id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
1737965 | from confluent_kafka import Consumer
conf = {'bootstrap.servers': 'kafka-1:9092', 'group.id': 'ch3_consumer_group'}
consumer = Consumer(conf)
consumer.subscribe(['ch3_topic_1'])
try:
while True:
msg = consumer.poll(timeout=1.0)
if msg is None:
continue
elif msg.error():
print('error: {}'.format(msg.error()))
else:
record_key = msg.key()
record_value = msg.value()
print(record_key, record_value)
except KeyboardInterrupt:
pass
finally:
consumer.close()
| StarcoderdataPython |
5132213 | <reponame>skearnes/color-features<filename>oe_utils/shape/overlap.py
"""
OEShape overlap utilities.
"""
__author__ = "<NAME>"
__copyright__ = "Copyright 2014, Stanford University"
__license__ = "3-clause BSD"
import collections
import numpy as np
from openeye.oechem import *
from openeye.oeshape import *
from oe_utils.shape.color import ColorForceField
class ColorOverlap(OEColorOverlap):
"""
Color overlap.
Parameters
----------
color_ff : int or OEColorForceField, optional (default
OEColorFFType_ImplicitMillsDean)
Color force field.
all_color : bool, optional (default True)
Calculate full pairwise color atom overlaps.
"""
def __init__(
self, color_ff=OEColorFFType_ImplicitMillsDean, all_color=True):
super(ColorOverlap, self).__init__()
if isinstance(color_ff, OEColorForceField):
self.color_ff = color_ff
else:
self.color_ff = OEColorForceField()
self.color_ff.Init(color_ff)
self.SetColorForceField(self.color_ff)
self.SetAllColor(all_color)
self.ref_mol = None
self.color_component_engines = None
def SetRefMol(self, ref_mol):
"""
Set reference molecule.
Parameters
----------
ref_mol : OEMol
Reference molecule.
"""
self.ref_mol = ref_mol
return super(ColorOverlap, self).SetRefMol(ref_mol)
def overlap(self, fit_mol):
"""
Get color overlap results.
Parameters
----------
fit_mol : OEMol
Fit molecule.
"""
result = OEColorResults()
self.ColorScore(fit_mol, result)
return ColorOverlapResult(result)
def get_color_components(self, fit_mol):
"""
Get overlap scores for each color type.
The color overlap is repeated with a series of different color force
fields that each have a single color type defined.
Parameters
----------
fit_mol : OEMol
Fit molecule.
"""
if self.color_component_engines is None:
self.color_component_engines = self.get_color_component_engines()
results = collections.defaultdict(list)
for color_type, color_type_name, engine in self.color_component_engines:
results['overlaps'].append(engine.overlap(fit_mol))
results['color_types'].append(color_type)
results['color_type_names'].append(color_type_name)
return results
def get_color_component_engines(self):
"""
Create a separate ColorOverlap engine for each interaction.
"""
color_component_engines = []
color_ff = ColorForceField(self.color_ff)
for this_color_ff in color_ff.isolate_interactions():
# Get a label for this force field.
# Assume like interactions only, and no duplicates.
# TODO: allow more flexibility here.
interactions = this_color_ff.get_interactions()
assert len(interactions) == 1
assert interactions[0][0] == interactions[0][1]
color_type = interactions[0][0]
color_type_name = this_color_ff.GetTypeName(color_type)
engine = ColorOverlap(
color_ff=this_color_ff, all_color=self.GetAllColor())
engine.SetRefMol(self.ref_mol)
color_component_engines.append(
(color_type, color_type_name, engine))
return color_component_engines
@staticmethod
def group_color_component_results(results):
"""
Extract scores from each overlay into arrays for each score type.
Parameters
----------
scores : array_like
2D array containing color component results.
"""
results = np.atleast_2d(results)
shape = results.shape
keys = [
'color_tanimoto', 'color_overlap', 'ref_self_color',
'fit_self_color']
data = {key: np.zeros(shape, dtype=float) for key in keys}
for i, this_results in enumerate(results):
for j, component_results in enumerate(this_results):
for k, component_result in enumerate(component_results):
for key in keys:
data[key][i, j, k] = getattr(component_result, key)
return data
def get_ref_color_atom_overlaps(self, fit_mol):
"""
Get overlap scores for each reference molecule color atom.
Each color atom in the reference molecule is isolated and the color
overlap with the fit molecule is scored.
Parameters
----------
fit_mol : OEMol
Fit molecule.
"""
results = []
# Use OEMol instead of CreateCopy because otherwise color atoms are
# added to self.ref_mol
colored_ref_mol = OEMol(self.ref_mol)
OEAddColorAtoms(colored_ref_mol, self.color_ff)
assert OECountColorAtoms(self.ref_mol) == 0
ref_color_coords = []
ref_color_types = []
ref_color_type_names = []
for ref_color_atom in OEGetColorAtoms(colored_ref_mol):
coords = colored_ref_mol.GetCoords(ref_color_atom)
ref_color_type = OEGetColorType(ref_color_atom)
ref_color_type_name = self.color_ff.GetTypeName(ref_color_type)
ref_color_coords.append(coords)
ref_color_types.append(ref_color_type)
ref_color_type_names.append(ref_color_type_name)
# Use OEMol instead of CreateCopy because otherwise colored_ref_mol
# color atoms are deleted by OERemoveColorAtoms
this_ref_mol = OEMol(colored_ref_mol)
OERemoveColorAtoms(this_ref_mol)
OEAddColorAtom(this_ref_mol, OEFloatArray(coords), ref_color_type,
ref_color_type_name)
assert OECountColorAtoms(this_ref_mol) == 1
super(ColorOverlap, self).SetRefMol(this_ref_mol)
results.append(self.overlap(fit_mol))
super(ColorOverlap, self).SetRefMol(self.ref_mol) # reset ref mol
return {'overlaps': results,
'ref_color_coords': ref_color_coords,
'ref_color_types': ref_color_types,
'ref_color_type_names': ref_color_type_names}
@staticmethod
def group_ref_color_atom_overlaps(results):
"""
Create a 3D masked array containing all overlap scores.
Parameters
----------
scores : array_like
2D array containing reference molecule color atom overlap results.
"""
# get maximum number of ref color atoms
# don't use `for result in it` because that gives an array of size 1
max_size = 0
it = np.nditer(results, flags=['multi_index', 'refs_ok'])
for _ in it:
max_size = max(max_size, len(results[it.multi_index]))
# build a masked array containing results
# don't use data[it.multi_index][:result.size] because that assigns
# to a view and not to data
data = np.ma.masked_all((results.shape[:2] + (max_size,)), dtype=float)
it = np.nditer(results, flags=['multi_index', 'refs_ok'])
for _ in it:
i, j = it.multi_index
result = results[i, j]
data[i, j, :result.size] = result
return data
class ColorOverlapResult(object):
"""
Color overlap result.
Parameters
----------
result : OEColorResults
Color overlap result.
"""
def __init__(self, result):
# extract overlap scores
self.color_tanimoto = result.GetTanimoto()
self.color_overlap = result.colorscore
self.ref_self_color = result.refSelfColor
self.fit_self_color = result.fitSelfColor
| StarcoderdataPython |
4923310 | <gh_stars>1-10
# -*- coding: utf-8 -*-
import sys
import logging.handlers
import os
import re
# noinspection PyUnresolvedReferences
import apiclient
import httplib2
from oauth2client.service_account import ServiceAccountCredentials
from flask import Flask, request
from flask_restful import Resource, Api, abort
from flask_cache import Cache
app = Flask(__name__)
api = Api(app)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
class GetAppVersion(Resource):
credentials = None
package_name = ''
package_name_undefined = ("The package name undefined. Example:"
"com.android.sample")
cache_empty = ("Cache for {0} is empty or expired. Try to get version"
" from Google. Loading key file")
cant_load_credentials = ("Can`t load credentials. The key file is empty "
"or corrupted. Contact your server "
"administrator.")
version_notfound = "No one valid version for {0} was found."
cant_find = "Can`t find package {0}"
error_400 = "Can`t get android version for some reason"
# noinspection PyBroadException
@property
def get(self):
args = request.args
if args['id'].isspace():
self.abort(400, self.package_name_undefined)
self.package_name = args['id']
app.logger.info('Try loading version from cache')
formatted_version = cache.get
if formatted_version is not None:
app.logger.info('Done from cache')
return {'last_version': formatted_version}
app.logger.info(self.cache_empty.format(self.package_name))
try:
self.load_credentials()
except:
abort(501, message=self.cant_load_credentials)
try:
return{"last_version": self.proceed_query(*self.parse_mask(args))}
except IndexError:
self.abort(422, self.version_notfound.format(self.package_name))
except apiclient.errors.HttpError:
self.abort(422, self.cant_find.format(self.package_name))
except:
self.abort(400, self.error_400)
def abort(self, error, message):
app.logger.error(sys.exc_info()[0])
abort(error, message=message)
@staticmethod
def parse_mask(args):
# mask of your version code. H - Major, L- Minor, P-Patch, I-ignore
mask = 'HILPIII' if 'mask' not in args else args['mask']
# get list of index ignored symbols
ignore = [m.start() for m in re.finditer('I', mask)]
major = mask.count('H') # calc Major symbols
minor = mask.count('L') # calc Minor symbols
patch = mask.count('P') # calc Patch symbols
return ignore, major, minor, patch
def load_credentials(self):
self.credentials = ServiceAccountCredentials.from_json_keyfile_name(
os.path.dirname(os.path.abspath(__file__)) + '/key.json',
scopes='https://www.googleapis.com/auth/androidpublisher')
def proceed_query(self, ignore, major, minor, patch):
app.logger.info("Request for {0} started".format(self.package_name))
http = httplib2.Http()
http = self.credentials.authorize(http)
service = apiclient.discovery.build('androidpublisher', 'v2',
http=http)
edit_request = service.edits().insert(body={},
packageName=self.package_name)
result = edit_request.execute()
edit_id = result['id']
apks_result = service.edits().apks().list(
editId=edit_id, packageName=self.package_name).execute()
app.logger.info("Request for {0} completed".format(self.package_name))
last_version = str(apks_result['apks'][-1]['versionCode'])
# remove ignored symbols
for i in range(len(ignore)):
last_version = last_version[:ignore[i] - i] + \
last_version[ignore[i] + 1 - i:]
formatted_version = '{0}.{1}.{2}'.format(
last_version[0:major],
last_version[major:major + minor],
last_version[major + minor:major + minor + patch])
app.logger.info('Save to cache')
# Usually releasing new app version take 4 hours. That's why we
# save version to cache for 4 hours
cache.set(self.package_name, formatted_version, timeout=4 * 60 * 60)
return formatted_version
api.add_resource(GetAppVersion, '/')
if __name__ == '__main__':
handler = logging.handlers.RotatingFileHandler(
os.path.dirname(os.path.abspath(__file__)) +
'/logs/android_version_checker.log',
maxBytes=10000,
backupCount=1)
formatter = logging.Formatter(
"[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s")
handler.setLevel(logging.INFO)
handler.setFormatter(formatter)
app.logger.addHandler(handler)
app.logger.info('Android app version checker started on port 5005')
app.run(
host="0.0.0.0",
port=5005,
debug=True
)
| StarcoderdataPython |
8056872 | <gh_stars>0
"""main code for tree searching using
- using minimax search
- MCTS?
22.11.2020 - @yashbonde""" | StarcoderdataPython |
156851 | <reponame>tlambert03/anyfft<filename>anyfft/reikna/_version.py
# file generated by setuptools_scm
# don't change, don't track in version control
version = "0.1.dev1+ga7b326d.d20210618"
version_tuple = (0, 1, "dev1+ga7b326d", "d20210618")
| StarcoderdataPython |
6514900 | ###########################################################################
# Imports
###########################################################################
# Standard library imports
import argparse
import time as time
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import path
# Local imports
from XFOIL import XFOIL
from helper_funcs import *
###########################################################################
# Code
###########################################################################
def cli_parser():
parser = argparse.ArgumentParser(
allow_abbrev=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
'-n', '--n-panels', action='store', dest='numPan', type=int, default=170,
help='Number of panel nodes.'
)
parser.add_argument(
'-v', '--vinf', action='store', dest='Vinf', type=float, default=1.,
help='Free stream velocity.'
)
parser.add_argument(
'--naca', action='store', dest='NACA', type=str, default="0012",
help='NACA airfoil to be used.'
)
parser.add_argument(
'-A', '--aoa', action='store', dest='AoA', type=float, default=0.,
help='Angle of attack.'
)
parser.add_argument(
'--pct', action='store', dest='replacement_pct', type=float,
default=100., help='Panel replacement percentage.'
)
parser.add_argument(
'--dpi', action='store', dest='dpi', type=int, default=300.,
help='DPI of output image.'
)
args = parser.parse_args()
return args
# KNOWNS
args = cli_parser()
Vinf = args.Vinf
AoA = args.AoA
NACA = args.NACA
# Convert AoA to radians [rad]
AoAR = AoA * (np.pi / 180)
# Flag to specify creating or loading airfoil
flagAirfoil = [1, # Create specified NACA airfoil in XFOIL
0] # Load Selig-format airfoil from directory
# Plotting flags
flagPlot = [1, # Airfoil with panel normal vectors
1, # Geometry boundary pts, control pts, first panel, second panel
1, # Cp vectors at airfoil surface panels
1, # Pressure coefficient comparison (XFOIL vs. VPM)
1, # Airfoil streamlines
1] # Pressure coefficient contour
# PPAR menu options
PPAR = [str(args.numPan + 1), # "Number of panel nodes"
'4', # "Panel bunching paramter"
'1.5', # "TE/LE panel density ratios"
'1', # "Refined area/LE panel density ratio"
'1 1', # "Top side refined area x/c limits"
'1 1'] # "Bottom side refined area x/c limits"
# Grid parameters
nGridX = 150 # X-grid for streamlines and contours
nGridY = 150 # Y-grid for streamlines and contours
xVals = [-1, 1.5] # X-grid extents [min, max]
yVals = [-1.5, 1.5] # Y-grid extents [min, max]
# %% XFOIL - CREATE/LOAD AIRFOIL
# Call XFOIL function to obtain the following:
# - Airfoil coordinates
# - Pressure coefficient along airfoil surface
# - Lift, drag, and moment coefficients
xFoilResults = XFOIL(NACA, PPAR, AoA, flagAirfoil)
# Separate out results from XFOIL function results
afName = xFoilResults[0] # Airfoil name
xFoilX = xFoilResults[1] # X-coordinate for Cp result
xFoilY = xFoilResults[2] # Y-coordinate for Cp result
xFoilCP = xFoilResults[3] # Pressure coefficient
XB = xFoilResults[4] # Boundary point X-coordinate
YB = xFoilResults[5] # Boundary point Y-coordinate
xFoilCL = xFoilResults[6] # Lift coefficient
xFoilCD = xFoilResults[7] # Drag coefficient
xFoilCM = xFoilResults[8] # Moment coefficient
# Number of boundary points and panels
numPts = len(XB) # Number of boundary points
numPan = numPts - 1 # Number of panels (control points)
# %% FUNCTIONS
XB, YB = correct_panels_orientation(numPan, XB, YB)
XC, YC, S, beta, delta, phi = compute_panel_geometries(numPan, XB, YB, AoA)
K, L = compute_kl_vpm(XC, YC, XB, YB, phi, S)
A, b = populate_matrices_vpm(numPan, K, beta, Vinf)
A, b = satisfy_kutta_condition_vpm(numPan, A, b, pct=args.replacement_pct)
gamma = np.linalg.solve(A, b)
print("\nSum of gamma: ", sum(gamma * S))
Vt, Cp = compute_panel_velocities(numPan, gamma, beta, L, Vinf)
CN, CA, CL, CD, CM = compute_force_coefficients(XC, phi, beta, AoAR, Cp, S)
# Print the results to the Console
print("\n======= RESULTS =======")
print("Lift Coefficient (CL)")
# From Kutta-Joukowski lift equation
print(f" K-J : {2*sum(gamma*S)}")
# From this VPM code
print(f" VPM : {CL}")
print(f"XFOIL : {xFoilCL}")
print("\nMoment Coefficient (CM)")
print(f" VPM : {CM}")
print(f"XFOIL : {xFoilCM}")
# %% COMPUTE STREAMLINES - REF [4]
if (flagPlot[4] == 1 or flagPlot[5] == 1): # If we are plotting streamlines or pressure coefficient contours
# Streamline parameters
slPct = 25 # Percentage of streamlines of the grid
Ysl = np.linspace(yVals[0],yVals[1],int((slPct/100)*nGridY)) # Create array of Y streamline starting points
Xsl = xVals[0]*np.ones(len(Ysl)) # Create array of X streamline starting points
XYsl = np.vstack((Xsl.T,Ysl.T)).T # Concatenate X and Y streamline starting points
# Generate the grid points
Xgrid = np.linspace(xVals[0],xVals[1],nGridX) # X-values in evenly spaced grid
Ygrid = np.linspace(yVals[0],yVals[1],nGridY) # Y-values in evenly spaced grid
XX, YY = np.meshgrid(Xgrid,Ygrid) # Create meshgrid from X and Y grid arrays
# Initialize velocities
Vx = np.zeros([nGridX,nGridY]) # Initialize X velocity matrix
Vy = np.zeros([nGridX,nGridY]) # Initialize Y velocity matrix
# Path to figure out if grid point is inside polygon or not
AF = np.vstack((XB.T,YB.T)).T # Concatenate XB and YB geometry points
afPath = path.Path(AF) # Create a path for the geometry
# Solve for grid point X and Y velocities
tic = time.perf_counter()
for m in range(nGridX): # Loop over X-grid points
for n in range(nGridY): # Loop over Y-grid points
XP = XX[m,n] # Current iteration's X grid point
YP = YY[m,n] # Current iteration's Y grid point
Nx, Ny = streamline_vpn(XP,YP,XB,YB,phi,S) # Compute Nx and Ny geometric integrals
# Check if grid points are in object
# - If they are, assign a velocity of zero
if afPath.contains_points([(XP,YP)]): # If (XP,YP) is in the body
Vx[m,n] = 0 # Set X-velocity equal to zero
Vy[m,n] = 0 # Set Y-velocity equal to zero
else:
Vx[m,n] = Vinf*np.cos(AoAR) + sum(-gamma*Nx/(2*np.pi)) # Compute X-velocity
Vy[m,n] = Vinf*np.sin(AoAR) + sum(-gamma*Ny/(2*np.pi)) # Compute Y-velocity
toc = time.perf_counter()
print("\n\nSTREAMLINE_VPM: %.2f seconds" % (toc-tic))
# Compute grid point velocity magnitude and pressure coefficient
Vxy = np.sqrt(Vx**2 + Vy**2) # Compute magnitude of velocity vector []
CpXY = 1 - (Vxy/Vinf)**2 # Pressure coefficient []
# %% CIRCULATION AND VORTEX STRENGTH CHECK
if (flagPlot[4] == 1 or flagPlot[5] == 1): # If we are plotting streamlines or Cp contours
# Compute circulation
aa = 0.75 # Ellipse horizontal half-length
bb = 0.25 # Ellipse vertical half-length
x0 = 0.5 # Ellipse center X-coordinate
y0 = 0 # Ellipse center Y-coordinate
numT = 5000 # Number of points on ellipse
Circulation, xC, yC, VxC, VyC = compute_circulation(aa,bb,x0,y0, # Compute circulation around ellipse
numT,Vx,Vy,Xgrid,Ygrid)
# Print values to Console
print("\n\n======= CIRCULATION RESULTS =======")
print("Sum of L : %2.8f" % sum(gamma*S)) # Print sum of vortex strengths
print("Circulation: %2.8f" % Circulation) # Print circulation
print("Lift Coef : %2.8f" % (2.0*Circulation)) # Lift coefficient from K-J equation
# %% PLOTTING
# FIGURE: Airfoil with panel normal vectors
if (flagPlot[0] == 1):
fig = plt.figure(1) # Create the figure
plt.cla() # Clear the axes
plt.fill(XB,YB,'k') # Plot the airfoil
X = np.zeros(2) # Initialize 'X'
Y = np.zeros(2) # Initialize 'Y'
for i in range(numPan): # Loop over all panels
X[0] = XC[i] # Set X start of panel orientation vector
X[1] = XC[i] + S[i]*np.cos(delta[i]) # Set X end of panel orientation vector
Y[0] = YC[i] # Set Y start of panel orientation vector
Y[1] = YC[i] + S[i]*np.sin(delta[i]) # Set Y end of panel orientation vector
if (i == 0): # If it's the first panel index
plt.plot(X,Y,'b-',label='First Panel') # Plot normal vector for first panel
elif (i == 1): # If it's the second panel index
plt.plot(X,Y,'g-',label='Second Panel') # Plot normal vector for second panel
else: # If it's neither the first nor second panel index
plt.plot(X,Y,'r-') # Plot normal vector for all other panels
plt.xlabel('X Units') # Set X-label
plt.ylabel('Y Units') # Set Y-label
plt.title('Panel Geometry') # Set title
plt.axis('equal') # Set axes equal
plt.legend() # Display legend
fname = os.path.join('figs', 'airfoil', 'airfoil_geometry.png')
plt.savefig(fname, dpi=args.dpi, bbox_inches='tight')
# FIGURE: Geometry with the following indicated:
# - Boundary points, control points, first panel, second panel
if (flagPlot[1] == 1):
fig = plt.figure(2) # Create figure
plt.cla() # Get ready for plotting
plt.plot(XB,YB,'k-') # Plot airfoil panels
plt.plot([XB[0], XB[1]],[YB[0], YB[1]],'b-',label='First Panel') # Plot first panel
plt.plot([XB[1], XB[2]],[YB[1], YB[2]],'g-',label='Second Panel') # Plot second panel
plt.plot(XB,YB,'ko',markerfacecolor='k',label='Boundary Pts') # Plot boundary points (black circles)
plt.plot(XC,YC,'ko',markerfacecolor='r',label='Control Pts') # Plot control points (red circles)
plt.xlabel('X Units') # Set X-label
plt.ylabel('Y Units') # Set Y-label
plt.axis('equal') # Set axes equal
plt.legend() # Display legend
fname = os.path.join('figs', 'airfoil', 'airfoil_geometry2.png')
plt.savefig(fname, dpi=args.dpi, bbox_inches='tight')
# FIGURE: Cp vectors at airfoil control points
if (flagPlot[2] == 1):
fig = plt.figure(3) # Create figure
plt.cla() # Get ready for plotting
# Scale and make positive all Cp values
Cps = np.absolute(Cp*0.15)
X = np.zeros(2)
Y = np.zeros(2)
posOnce = negOnce = True
for i in range(len(Cps)): # Loop over all panels
X[0] = XC[i] # Control point X-coordinate
X[1] = XC[i] + Cps[i]*np.cos(delta[i]) # Ending X-value based on Cp magnitude
Y[0] = YC[i] # Control point Y-coordinate
Y[1] = YC[i] + Cps[i]*np.sin(delta[i]) # Ending Y-value based on Cp magnitude
if (Cp[i] < 0):
if posOnce:
plt.plot(X,Y,'r-', label=r'$C_p < 0$')
posOnce = False
else:
plt.plot(X,Y,'r-')
elif (Cp[i] >= 0):
if negOnce:
plt.plot(X,Y,'b-', label=r'$C_p \geq 0$')
negOnce = False
else:
plt.plot(X,Y,'b-')
# Plot the airfoil as black polygon
plt.fill(XB,YB,'k')
plt.xlabel('X Units')
plt.ylabel('Y Units')
plt.gca().set_aspect('equal')
plt.legend(loc="lower center", bbox_to_anchor=(0.5, -1), ncol = 2)
fig.subplots_adjust(bottom=0.25)
fname = os.path.join('figs', 'airfoil', 'airfoil_cp.png')
plt.savefig(fname, dpi=args.dpi, bbox_inches='tight')
# FIGURE: Pressure coefficient comparison (XFOIL vs. VPM)
if (flagPlot[3] == 1):
fig = plt.figure(4) # Create figure
plt.cla() # Get ready for plotting
midIndX = int(np.floor(len(xFoilCP)/2)) # Airfoil middle index for XFOIL data
midIndS = int(np.floor(len(Cp)/2)) # Airfoil middle index for VPM data
plt.plot(xFoilX[0:midIndX],xFoilCP[0:midIndX], # Plot Cp for upper surface of airfoil from XFoil
'b-',label='XFOIL Upper')
plt.plot(xFoilX[midIndX+1:len(xFoilX)],xFoilCP[midIndX+1:len(xFoilX)], # Plot Cp for lower surface of airfoil from XFoil
'r-',label='XFOIL Lower')
plt.plot(XC[midIndS+1:len(XC)],Cp[midIndS+1:len(XC)], # Plot Cp for upper surface of airfoil from panel method
'ks',markerfacecolor='b',label='VPM Upper')
plt.plot(XC[0:midIndS],Cp[0:midIndS], # Plot Cp for lower surface of airfoil from panel method
'ks',markerfacecolor='r',label='VPM Lower')
plt.xlim(0,1) # Set X-limits
plt.xlabel('X Coordinate') # Set X-label
plt.ylabel('Cp') # Set Y-label
plt.title('Pressure Coefficient') # Set title
plt.legend() # Display legend
plt.gca().invert_yaxis() # Invert Cp (Y) axis
fname = os.path.join('figs', 'airfoil', 'airfoil_cp_comparison.png')
plt.savefig(fname, dpi=args.dpi, bbox_inches='tight')
# FIGURE: Airfoil streamlines
if (flagPlot[4] == 1):
fig = plt.figure(5) # Create figure
plt.cla() # Get ready for plotting
np.seterr(under="ignore") # Ignore underflow error message
plt.streamplot(XX,YY,Vx,Vy, linewidth=0.5, density=40, color='r', # Plot streamlines
arrowstyle='-', start_points=XYsl)
plt.clim(vmin=0, vmax=2)
plt.fill(XB,YB,'k') # Plot airfoil as black polygon
plt.xlabel('X Units') # Set X-label
plt.ylabel('Y Units') # Set Y-label
plt.gca().set_aspect('equal') # Set axes equal
plt.xlim(xVals) # Set X-limits
plt.ylim(yVals) # Set Y-limits
fname = os.path.join('figs', 'airfoil', 'airfoil_streamlines.png')
plt.savefig(fname, dpi=args.dpi, bbox_inches='tight')
# FIGURE: Pressure coefficient contour
if (flagPlot[5] == 1):
fig = plt.figure(6) # Create figure
plt.cla() # Get ready for plotting
plt.contourf(XX,YY,CpXY,500,cmap='jet') # Plot contour
plt.fill(XB,YB,'k') # Plot airfoil as black polygon
plt.xlabel('X Units') # Set X-label
plt.ylabel('Y Units') # Set Y-label
plt.gca().set_aspect('equal') # Set axes equal
plt.xlim(xVals) # Set X-limits
plt.ylim(yVals) # Set Y-limits
plt.colorbar()
fname = os.path.join(os.getcwd(),'CpContour.png')
fname = os.path.join('figs', 'airfoil', 'airfoil_cp_contour.png')
plt.savefig(fname, dpi=args.dpi, bbox_inches='tight')
| StarcoderdataPython |
11232068 | <reponame>sakost/kutana<filename>kutana/backends/vkontakte/__init__.py
from .extensions import VkontaktePluginExtension
from .backend import Vkontakte
__all__ = ["VkontaktePluginExtension", "Vkontakte"]
| StarcoderdataPython |
9651999 | from .token import Token
class Scanner:
def __init__(self, text):
self.text = text
self.start = 0
self.current = 0
self.tokens = []
def at_end(self):
return self.current >= len(self.text)
def advance(self):
self.current += 1
return self.text[self.current - 1]
def peek(self):
if self.at_end():
return ""
return self.text[self.current]
def peek_next(self):
if self.current + 1 >= len(self.text): # pragma: no cover
return ""
return self.text[self.current + 1]
def add_token(self, kind, literal=None):
source = self.text[self.start : self.current]
self.tokens.append(Token(kind, source, literal))
def scan(self):
while not self.at_end():
self.start = self.current
self.scan_token()
self.tokens.append(Token("EOF", ""))
return self
def scan_token(self): # pragma: no cover
return None
| StarcoderdataPython |
367236 | # Uploads the following to Storage:
# - (A version of) CBS catalog (TODO: Decide if/what/how)
# - Kerncijfers wijken and buurten
# - Nabijheidsstatistieken
# - Bevolkingsstatistieken per pc4
# - Mapping pc6huisnummer tot buurten and wijken
# TODO: Creates a `CBS helper` dataset in BQ, with 4 (/5?) tables ??? (Concat?)
from datetime import datetime
from prefect import Flow, Client
from prefect.tasks.prefect import StartFlowRun
from nl_open_data.config import config as CONFIG
from nl_open_data.utils import get_gcs_uris
# Prefect client parameters
TENANT_SLUG = "dataverbinders"
client = Client() # Local api key has been stored previously
client.login_to_tenant(tenant_slug=TENANT_SLUG) # For user-scoped API token
# GCP env parameters
GCP_ENV = "dev"
PROD_ENV = None
# General script parameters
SOURCE = "cbs"
RUN_TIME = f"{datetime.today().date()}_{datetime.today().time()}"
PROJECT = "nl_open_data"
################################################################################
# Upload Kerncijfers wijken and buurten to gcs (xls_flow)
# TODO: Are these the same or different then the regionaal_kwb statline datasets????
# TODO: How to concatanate?
# Taking 2013-2020 here, because earlier data has different format, so we leave integration of those for later. #TODO
# https://www.cbs.nl/nl-nl/reeksen/kerncijfers-wijken-en-buurten-2004-2020
# flow parameters
KWB_URLS = [
"https://www.cbs.nl/-/media/cbs/dossiers/nederland-regionaal/wijk-en-buurtstatistieken/_exel/kwb-2013.xls",
"https://www.cbs.nl/-/media/cbs/dossiers/nederland-regionaal/wijk-en-buurtstatistieken/_exel/kerncijfers-wijken-en-buurten-2014.xls",
"https://www.cbs.nl/-/media/cbs/dossiers/nederland-regionaal/wijk-en-buurtstatistieken/_exel/kwb-2015.xls",
"https://www.cbs.nl/-/media/cbs/dossiers/nederland-regionaal/wijk-en-buurtstatistieken/_exel/kwb-2016.xls",
"https://www.cbs.nl/-/media/cbs/dossiers/nederland-regionaal/wijk-en-buurtstatistieken/_exel/kwb-2017.xls",
"https://www.cbs.nl/-/media/_excel/2021/12/kwb-2018.xls",
# "https://www.cbs.nl/-/media/_excel/2021/12/kwb-2019.xls", # BUG: Unexpected error: ArrowInvalid('Could not convert 5,0 with type str: tried to convert to double', 'Conversion failed for column p_stadsv with type object')
# This issue stems from the mixed data types in Excel (NUMBER and TEXT).
# The column is translated as a dtype=object, and then crashes when trying to convert to parquet.
# This issue is related: https://issues.apache.org/jira/browse/ARROW-4131
# No trivial solution (skip_columns does not exist in read_excel, trying str.replace(".", ",") also fails
"https://www.cbs.nl/-/media/_excel/2021/12/kwb-2020.xls",
]
KWB_GCS_FOLDER = "cbs/kwb"
KWB_KWARGS = [
{"na_values": [".", " .", " . "]}, # 2013
{"na_values": [".", " .", " . "]}, # 2014
{"na_values": [".", " .", " . "]}, # 2015
{"na_values": [".", " .", " . "]}, # 2016
{"na_values": [".", " .", " . "]}, # 2017
{"na_values": [".", " .", " . "]}, # 2018
# {"na_values": [".", " .", " . "]}, # 2019 # BUG: (See above)
{"na_values": [".", " .", " . "]}, # 2020
]
# run parameters
VERSION_GROUP_ID = "xls_to_gcs"
RUN_NAME = f"cbs_helper_kwb_{RUN_TIME}"
PARAMETERS = {
"urls": KWB_URLS,
"gcs_folder": KWB_GCS_FOLDER,
"read_excel_kwargs": KWB_KWARGS,
}
# Schedule run
kwb_to_gcs_flow = StartFlowRun(
flow_name=VERSION_GROUP_ID,
project_name=PROJECT,
run_name=RUN_NAME,
parameters=PARAMETERS,
wait=True,
)
flow_run_id = client.create_flow_run(
version_group_id=VERSION_GROUP_ID, run_name=RUN_NAME, parameters=PARAMETERS,
)
################################################################################
# Upload Nabijheidsstatistieken to gcs (xls_flow + statline_gcs_flow)
######################
# xls_flow
# 2006-2016 figures are excel files
# flow parameters
NBH_URLS = [
"https://www.cbs.nl/-/media/_excel/2016/17/nabijheid-2006-2016-04-18.xls",
"https://www.cbs.nl/-/media/_excel/2016/17/nabijheid-2007-2016-04-18.xls",
"https://www.cbs.nl/-/media/_excel/2016/17/nabijheid-2008-2016-04-18.xls",
"https://www.cbs.nl/-/media/_excel/2016/17/nabijheid-2009-2016-04-18.xls",
"https://www.cbs.nl/-/media/_excel/2016/17/nabijheid-2010-2016-04-18.xls",
"https://www.cbs.nl/-/media/_excel/2016/16/nabijheid-2011-2016-04-18.xls",
"https://www.cbs.nl/-/media/_excel/2016/16/nabijheid-2012-2016-04-18.xls",
"https://www.cbs.nl/-/media/_excel/2016/51/nabijheid-2013-2016-12-19.xls",
"https://www.cbs.nl/-/media/_excel/2016/51/nabijheid-2014-2016-10-11-(1).xls",
"https://www.cbs.nl/-/media/_excel/2017/32/nabijheid_wijkbuurt_2015v3.xls",
"https://www.cbs.nl/-/media/_excel/2017/32/nabijheid_2016.xls",
]
NBH_GCS_FOLDER = "cbs/nbh"
GCP_ENV = "dev"
KWARGS = [
{"na_values": [".", " .", " . "]}, # 2006
{"na_values": [".", " .", " . "]}, # 2007
{"na_values": [".", " .", " . "]}, # 2008
{"na_values": [".", " .", " . "]}, # 2009
{"na_values": [".", " .", " . "]}, # 2010
{"na_values": [".", " .", " . "]}, # 2011
{"na_values": [".", " .", " . "]}, # 2012
{"na_values": [".", " .", " . "]}, # 2013
{"na_values": [".", " .", " . "]}, # 2014
{"skiprows": [1, 2], "na_values": [".", " .", " . "]}, # 2015
{"skiprows": [1, 2], "na_values": [".", " .", " . "]}, # 2016
]
# run parameters
VERSION_GROUP_ID = "xls_to_gcs"
RUN_NAME = f"cbs_helper_nbh_xls_{RUN_TIME}"
PARAMETERS = {
"urls": NBH_URLS,
"gcs_folder": NBH_GCS_FOLDER,
"gcp_env": GCP_ENV,
"PROD_ENV": PROD_ENV,
"read_excel_kwargs": KWARGS,
}
# Schedule run
flow_run_id = client.create_flow_run(
version_group_id=VERSION_GROUP_ID, run_name=RUN_NAME, parameters=PARAMETERS,
)
######################
# statline_to_gcs flow
# 2017 onwards in datasets:
NBH_IDS = [
"84334NED", # 2017
"84463NED", # 2018
"84718NED", # 2019
]
NBH_SOURCE = SOURCE
THIRD_PARTY = False
GCP_ENV = "dev"
ENDPOINT = "gcs"
FORCE = False
# run parameters
VERSION_GROUP_ID = "statline_bq"
RUN_NAME = f"cbs_helper_nabijheid_statline_{RUN_TIME}"
PARAMETERS = {
"ids": NBH_IDS,
"source": NBH_SOURCE,
"third_party": THIRD_PARTY,
"endpoint": ENDPOINT,
"force": FORCE,
}
# Schedule run
flow_run_id = client.create_flow_run(
version_group_id=VERSION_GROUP_ID, run_name=RUN_NAME, parameters=PARAMETERS,
)
################################################################################
# Bevolkingsstatistieken per pc4 (statline_gcs_flow)
BVS_IDS = [
"83502NED",
]
BVS_SOURCE = SOURCE
THIRD_PARTY = False
ENDPOINT = "gcs"
FORCE = False
# run parameters
VERSION_GROUP_ID = "statline_bq"
RUN_NAME = f"cbs_helper_bevolking_pc4_{RUN_TIME}"
PARAMETERS = {
"ids": BVS_IDS,
"source": BVS_SOURCE,
"third_party": THIRD_PARTY,
"endpoint": ENDPOINT,
"force": FORCE,
}
# Schedule run
flow_run_id = client.create_flow_run(
version_group_id=VERSION_GROUP_ID, run_name=RUN_NAME, parameters=PARAMETERS,
)
################################################################################
# TODO: Mapping pc6huisnummer tot buurten and wijken (????)
################################################################################
# Create dataset(/s) (gcs_to_bq_flow)
# TODO: these flows should be scheduled only after the previous ones are done
# See https://docs.prefect.io/core/idioms/flow-to-flow.html#scheduling-a-flow-of-flows for more info.
#####################
# KWB dataset
# flow parameters
URIS = get_gcs_uris(
gcs_folder=KWB_GCS_FOLDER, source=SOURCE, config=CONFIG, gcp_env=GCP_ENV
)
DATASET_NAME = "cbs_kwb"
DESCRIPTION = "ADD DESCRIPTION HERE" # TODO: Add description
# run parameters
VERSION_GROUP_ID = "gcs_to_bq"
RUN_NAME = f"gcs_to_bq_kwb_{RUN_TIME}"
PARAMETERS = {
"uris": URIS,
"dataset_name": DATASET_NAME,
# "config": CONFIG,
"gcp_env": GCP_ENV,
"prod_env": PROD_ENV,
"description": DESCRIPTION,
}
# Schedule run
kwb_gcs_to_bq_flow = StartFlowRun(
flow_name=VERSION_GROUP_ID,
project_name=PROJECT,
run_name=RUN_NAME,
parameters=PARAMETERS,
wait=True,
)
flow_run_id = client.create_flow_run(
version_group_id=VERSION_GROUP_ID, run_name=RUN_NAME, parameters=PARAMETERS,
)
################################################################################
# Build flow of flows
# TODO: NOt sure how this is supposed to work.
# with Flow("cbs_helper") as flow:
# kwb_gcs_to_bq = kwb_gcs_to_bq_flow(upstream_tasks=[kwb_to_gcs_flow])
# flow.run()
# flow.register(project_name=PROJECT, version_group_id="cbs_helper")
| StarcoderdataPython |
9721001 | <reponame>JohnnyHowe/Slow-Engine-Python
""" Sample program for the SlowEngine.
Shows off basic 2D player movement. No bells or whistles. """
import slowEngine
import pygame
class Game:
player = None
def __init__(self):
self.player = Player()
def run(self):
while True:
self.run_frame()
def run_frame(self):
slowEngine.EventHandler.run()
slowEngine.Display.fill((255, 255, 255))
self.player.show()
slowEngine.Display.update_display()
class Player:
position = None
def __init__(self):
self.position = slowEngine.Vector2(0, 0)
def show(self):
slowEngine.draw.draw_world_circle((0, 255, 0), self.position, 0.5)
slowEngine.draw.draw_world_circle((0, 0, 0), self.position, 0.5, 0.05)
slowEngine.draw.draw_world_text("Hoes", (0, 0, 0), self.position + slowEngine.Vector2(0, 1), 0.5)
if __name__ == "__main__":
Game().run()
| StarcoderdataPython |
6541062 | from flask.ext.restful import fields, marshal
from flask import Blueprint as FlaskBlueprint
import logging
from pouta_blueprints.models import User
from pouta_blueprints.forms import SessionCreateForm
from pouta_blueprints.server import app, restful
sessions = FlaskBlueprint('sessions', __name__)
token_fields = {
'token': fields.String,
'user_id': fields.String,
'is_admin': fields.Boolean,
}
class SessionView(restful.Resource):
def post(self):
form = SessionCreateForm()
if not form.validate_on_submit():
logging.warn("validation error on user login")
return form.errors, 422
user = User.query.filter_by(email=form.email.data).first()
if user and user.check_password(form.password.data):
return marshal({
'token': user.generate_auth_token(app.config['SECRET_KEY']),
'is_admin': user.is_admin,
'user_id': user.id
}, token_fields)
logging.warn("invalid login credentials for %s" % form.email.data)
return {
'message': 'Unauthorized',
'status': 401
}, 401
| StarcoderdataPython |
3237611 | <reponame>Jumper78/pyIndego<gh_stars>0
"""Classes for states of pyIndego."""
import logging
from dataclasses import dataclass, field, is_dataclass
from datetime import date, datetime, time, timedelta
from typing import List
from .const import (
ALERT_ERROR_CODE,
DAY_MAPPING,
DEFAULT_LOOKUP_VALUE,
MOWER_MODEL_DESCRIPTION,
MOWING_MODE_DESCRIPTION,
)
from .helpers import convert_bosch_datetime, nested_dataclass
_LOGGER = logging.getLogger(__name__)
@dataclass
class Alert:
"""Alert class."""
alm_sn: str = field(repr=False, default=None)
alert_id: str = None
error_code: str = None
headline: str = None
date: datetime = None
message: str = None
read_status: str = None
flag: str = None
push: bool = None
alert_description: str = None
def __post_init__(self):
"""Set alert description."""
self.alert_description = ALERT_ERROR_CODE.get(
self.error_code, DEFAULT_LOOKUP_VALUE
)
self.date = convert_bosch_datetime(self.date)
@dataclass
class ModelVoltage:
"""Model voltage Class."""
min: int = None
max: int = None
MOWER_MODEL_VOLTAGE = {
"3600HA2300": ModelVoltage(min=285, max=369), # Indego 1000
"3600HA2301": ModelVoltage(min=285, max=369), # Indego 1200
"3600HA2302": ModelVoltage(min=285, max=369), # Indego 1100
"3600HA2303": ModelVoltage(min=285, max=369), # Indego 13C
"3600HA2304": ModelVoltage(min=285, max=369), # Indego 10C
"3600HB0100": ModelVoltage(min=0, max=100), # Indego 350
"3600HB0101": ModelVoltage(min=0, max=100), # Indego 400
"3600HB0102": ModelVoltage(min=0, max=100), # Indego S+ 350
"3600HB0103": ModelVoltage(min=0, max=100), # Indego S+ 400
"3600HB0105": ModelVoltage(min=0, max=100), # Indego S+ 350
"3600HB0106": ModelVoltage(min=0, max=100), # Indego S+ 400
"3600HB0301": ModelVoltage(min=0, max=100), # Indego M+ 700
# '3600HB0xxx': {'min': '0','max': '100'} # Indego M+ 700
}
@dataclass
class Battery:
"""Battery Class."""
percent: int = None
voltage: float = None
cycles: int = None
discharge: float = None
ambient_temp: int = None
battery_temp: int = None
percent_adjusted: int = None
def update_percent_adjusted(self, voltage: ModelVoltage):
"""Set percent adjusted."""
if self.percent:
self.percent_adjusted = round(
(int(self.percent) - voltage.min) / ((voltage.max - voltage.min) / 100)
)
@dataclass
class CalendarSlot:
"""Class for CalendarSlots."""
En: bool = None
StHr: int = None
StMin: int = None
EnHr: int = None
EnMin: int = None
Attr: str = None
start: time = None
end: time = None
dt: datetime = None
def __post_init__(self):
"""Convert start and end in time format."""
if self.StHr is not None and self.StMin is not None:
self.start = time(self.StHr, self.StMin)
if self.EnHr is not None and self.EnMin is not None:
self.end = time(self.EnHr, self.EnMin)
@nested_dataclass
class CalendarDay:
"""Class for CalendarDays."""
day: int = None
day_name: str = None
slots: List[CalendarSlot] = field(default_factory=lambda: [CalendarSlot])
def __post_init__(self):
"""Update the dayname."""
if self.day is not None:
self.day_name = DAY_MAPPING[self.day]
if self.slots:
for slot in self.slots:
if slot.En:
today = date.today().weekday()
date_offset = timedelta(
days=self.day - today, hours=slot.StHr, minutes=slot.StMin
)
new_dt = (
datetime.now().replace(
hour=0, minute=0, second=0, microsecond=0
)
+ date_offset
)
if new_dt.date() < date.today():
new_dt = new_dt + timedelta(days=7)
slot.dt = new_dt
@nested_dataclass
class Calendar:
"""Class for Calendar."""
cal: int = None
days: List[CalendarDay] = field(default_factory=lambda: [CalendarDay])
@nested_dataclass
class PredictiveSchedule:
"""Class for PredictiveSchedule."""
schedule_days: List[CalendarDay] = field(default_factory=lambda: [CalendarDay])
exclusion_days: List[CalendarDay] = field(default_factory=lambda: [CalendarDay])
@nested_dataclass
class GenericData:
"""Generic Data Class."""
alm_name: str = None
alm_sn: str = None
service_counter: int = None
needs_service: bool = None
alm_mode: str = None
bareToolnumber: str = None
alm_firmware_version: str = None
model_description: str = None
model_voltage: ModelVoltage = field(default_factory=ModelVoltage)
mowing_mode_description: str = None
def __post_init__(self):
"""Set model description, voltage, mode description."""
self.model_description = MOWER_MODEL_DESCRIPTION.get(
self.bareToolnumber, DEFAULT_LOOKUP_VALUE
)
self.model_voltage = MOWER_MODEL_VOLTAGE.get(
self.bareToolnumber, ModelVoltage()
)
self.mowing_mode_description = MOWING_MODE_DESCRIPTION.get(
self.alm_mode, DEFAULT_LOOKUP_VALUE
)
@dataclass
class Location:
"""Location Class."""
latitude: float = None
longitude: float = None
timezone: str = None
@dataclass
class Network:
"""Network Class."""
mcc: int = None
mnc: int = None
rssi: int = None
currMode: str = None
configMode: str = None
steeredRssi: int = None
networkCount: int = None
networks: List[int] = None
@dataclass
class Config:
"""Config Class."""
region: int = None
language: int = None
border_cut: int = None
is_pin_set: bool = None
wire_id: int = None
bump_sensitivity: int = None
alarm_mode: bool = None
@dataclass
class Setup:
"""Setup Class."""
hasOwner: bool = None
hasPin: bool = None
hasMap: bool = None
hasAutoCal: bool = None
hasIntegrityCheckPassed: bool = None
@dataclass
class Security:
"""Security Class."""
enabled: bool = None
autolock: bool = None
@dataclass
class RuntimeDetail:
"""Runtime Details Class."""
operate: int = None
charge: int = None
cut: int = field(init=False, default=None)
def update_cut(self):
"""Update cut."""
self.cut = round(self.operate - self.charge)
@nested_dataclass
class Runtime: # pylint: disable=no-member,assigning-non-slot
"""Runtime Class."""
total: RuntimeDetail = field(default_factory=RuntimeDetail)
session: RuntimeDetail = field(default_factory=RuntimeDetail)
def __post_init__(self):
"""Set cuts and calc totals."""
if self.total.charge:
self.total.charge = round(self.total.charge / 100)
if self.total.operate:
self.total.operate = round(self.total.operate / 100)
if self.total.charge:
self.total.update_cut()
if self.session.charge:
self.session.update_cut()
else:
self.session.cut = 0
@dataclass
class Garden:
"""Garden Class."""
id: int = None
name: int = None
signal_id: int = None
size: int = None
inner_bounds: int = None
cuts: int = None
runtime: int = None
charge: int = None
bumps: int = None
stops: int = None
last_mow: int = None
map_cell_size: int = None
@nested_dataclass
class OperatingData:
"""Operating Data Class."""
hmiKeys: str = None
battery: Battery = field(default_factory=Battery)
garden: Garden = field(default_factory=Garden)
runtime: Runtime = field(default_factory=Runtime)
@nested_dataclass
class State:
"""State Class."""
state: int = None
map_update_available: bool = None
mowed: int = None
mowmode: int = None
error: int = None
xPos: int = None
yPos: int = None
charge: int = None
operate: int = None
runtime: Runtime = field(default_factory=Runtime)
mapsvgcache_ts: int = None
svg_xPos: int = None
svg_yPos: int = None
config_change: bool = None
mow_trig: bool = None
@dataclass
class User:
"""User Class."""
email: str = None
display_name: str = None
language: str = None
country: str = None
optIn: bool = None
optInApp: bool = None
| StarcoderdataPython |
1948136 | <gh_stars>10-100
from abc import (ABC, abstractmethod)
from queue import Queue
from ..lib import (
object_name, look_up, deep_map, inverse_deep_map)
import noodles
try:
import ujson as json
except ImportError:
import json
def _chain_fn(a, b):
def f(obj):
first = a(obj)
if first:
return first
return b(obj)
return f
class RefObject:
"""Placeholder object to delay decoding a serialised object
until needed by a worker."""
def __init__(self, rec):
self.rec = rec
class Registry(object):
"""Serialisation registry, keeps a record of `Serialiser` objects.
The Registry keeps a dictionary mapping (qualified) class names to
:py:class:`Serialiser` objects. Given an object, the `__getitem__`
method looks for the highest base class that it has a serialiser for.
As a fall-back we install a Serialiser matching the Python
`object` class.
Detection by object type is not always meaningful or even possible.
Before scannning for known base classes the look-up function passes
the object through the `hook` function, which should return a string
or `None`. If a string is returned that string is used to look-up
the serialiser.
Registries can be combined using the '+' operator. The left side argument
is than used as `parent` to the new Registry, while the right-hand argument
overrides and augments the Serialisers present. The `hook` functions
are being chained, such that the right-hand registry takes precedence.
The default serialiser is inherrited from the left-hand argument.
"""
def __init__(self, parent=None, types=None, hooks=None, hook_fn=None,
default=None):
"""Constructor
:param parent:
The new Registry takes the dictionary and hook from the parent.
If no other argumentns are given, we get a copy of `parent`.
:type parent: `Registry`
:param types:
A dictionary of types to Serialiser objects. Each of these
are added to the new Registry.
:param hooks:
A dictionary of strings to Serialiser objects. These are added
directly to the dictionary internal to the new Registry.
:param hook_fn:
A function taking an object returning a string. The string should
match a string in the hook dictionary. It should not be possible
to confuse the returned string with a qualified Python name.
One way to do this, is by enclosing the string with
'<' '>' characters.
:param default:
The default fall-back for the new Registry.
:type default: `Serialiser`"""
self._sers = parent._sers.copy() if parent else {}
if types:
for k, v in types.items():
self[k] = v
if hooks:
self._sers.update(hooks)
self.default = default if default \
else parent.default if parent \
else SerUnknown()
if hook_fn and parent and parent._hook:
self._hook = _chain_fn(hook_fn, parent._hook)
else:
self._hook = hook_fn if hook_fn \
else parent._hook if parent \
else None
def __add__(self, other):
"""Merge two registries. Right-side takes presedence over left-side
argument, with exception of the default (fall-back) serialiser."""
reg = Registry(
parent=self, hooks=other._sers,
hook_fn=other._hook, default=self.default)
return reg
@property
def default(self):
return self[object]
@default.setter
def default(self, ser):
self[object] = ser
def __getitem__(self, key):
"""Searches the most fitting serialiser based on the inheritance tree
of the given class. We search this tree breadth-first."""
q = Queue() # use a queue for breadth-first decent
q.put(key)
while not q.empty():
cls = q.get()
m_n = object_name(cls)
if m_n in self._sers:
return self._sers[m_n]
else:
for base in cls.__bases__:
q.put(base)
def __setitem__(self, cls, value):
"""Sets a new Serialiser for the given class."""
m_n = object_name(cls)
self._sers[m_n] = value
def encode(self, obj, host=None):
"""Encode an object using the serialisers available
in this registry. Objects that have a type that is one of
[dict, list, str, int, float, bool, tuple] are send back unchanged.
A host-name can be given as an additional argument to identify the
host in the resulting record if the encoder yields any filenames.
This function only treats the object for one layer deep.
:param obj:
The object that needs encoding.
:param host:
The name of the encoding host.
:type host: str
"""
if obj is None:
return None
if type(obj) in [dict, list, str, int, float, bool]:
return obj
if isinstance(obj, RefObject):
return obj.rec
hook = self._hook(obj) if self._hook else None
typename = hook if hook else '<object>'
classname = object_name(type(obj))
def make_rec(data, ref=None, files=None):
rec = {'_noodles': noodles.__version__,
'type': typename,
'class': classname,
'data': data}
if ref is not None:
rec['ref'] = ref
if files:
rec['host'] = host
rec['files'] = files
return rec
if hook:
return self._sers[hook].encode(obj, make_rec)
enc = self[type(obj)]
result = enc.encode(obj, make_rec)
return result
def decode(self, rec, deref=False):
"""Decode a record to return an object that could be considered
equivalent to the original.
The record is not touched if `_noodles` is not an item in the record.
:param rec:
A dictionary record to be decoded.
:type rec: dict
:param deref:
Wether to decode a RefObject. If the encoder wrote files on a
remote host, reading this file will be slow and result in an
error if the file is not present.
:type deref: bool"""
if not isinstance(rec, dict):
return rec
if '_noodles' not in rec:
return rec
# if not deref:
if rec.get('ref', False) and not deref:
return RefObject(rec)
typename = rec['type']
classname = rec['class']
try:
cls = look_up(classname) if classname else None
except (AttributeError, ImportError):
cls = None
if typename == '<object>':
assert cls is not None, \
"could not lookup class '{}', decoding '{}'".format(
classname, rec)
return self[cls].decode(cls, rec['data'])
else:
return self._sers[typename].decode(cls, rec['data'])
def deep_encode(self, obj, host=None):
return deep_map(lambda o: self.encode(o, host), obj)
def deep_decode(self, rec, deref=False):
return inverse_deep_map(lambda r: self.decode(r, deref), rec)
def to_json(self, obj, host=None, indent=None):
"""Recursively encode `obj` and convert it to a JSON string.
:param obj:
Object to encode.
:param host:
hostname where this object is being encoded.
:type host: str"""
if indent:
return json.dumps(deep_map(lambda o: self.encode(o, host), obj),
indent=indent)
else:
return json.dumps(deep_map(lambda o: self.encode(o, host), obj))
def from_json(self, data, deref=False):
"""Decode the string from JSON to return the original object (if
`deref` is true. Uses the `json.loads` function with `self.decode`
as object_hook.
:param data:
JSON encoded string.
:type data: str
:param deref:
Whether to decode records that gave `ref=True` at encoding.
:type deref: bool"""
return self.deep_decode(json.loads(data), deref)
# return json.loads(data, object_hook=lambda o: self.decode(o, deref))
def dereference(self, data, host=None):
"""Dereferences RefObjects stuck in the hierarchy. This is a bit
of an ugly hack."""
return self.deep_decode(self.deep_encode(data, host), deref=True)
class Serialiser(ABC):
"""Serialiser base class.
Serialisation classes should derive from `Serialiser` and overload the
`encode` and `decode` methods.
:param base:
The type that this class is supposed to serialise. This may differ
from the type of the object actually being serialised if its class
was derived from `base`. The supposed base-class is kept here for
reference but serves no immediate purpose.
:type base: type"""
def __init__(self, name='<unknown>'):
if isinstance(name, str):
self.name = name
else:
try:
self.name = name.__name__
except AttributeError:
self.name = '<unknown>'
@abstractmethod
def encode(self, obj, make_rec):
"""Should encode an object of type `self.base` (or derived).
This method receives the object and a function `make_rec`. This
function has signature:
.. code-block:: python
def make_rec(rec, ref=False, files=None):
...
If encoding and decoding is somewhat cosuming on resources, the
encoder may call with `ref=True`. Then the resulting record won't
be decoded until needed by the next job. This is most certainly
the case when an external file was written. In this case the
filename(s) should be passed as a list by `files=[...]`.
The `files` list is not passed back to the decoder. Rather it is used
by noodles to keep track of written files and copy them between hosts
if needed. It is the responsibily of the encoder to include
the filename information in the passed record as well.
:param obj:
Object to be encoded.
:param make_rec:
Function used to pack the encoded data with some meta-data."""
pass
@abstractmethod
def decode(self, cls, data):
"""Should decode the data to an object of type 'cls'.
:param cls:
The class is retrieved by the qualified name of the type
of the object that was encoded; restored by importing it.
:type cls: type
:param data:
The data is the record that was passed to `make_rec` by
the encoder."""
pass
class SerUnknown(Serialiser):
def encode(self, obj, make_rec):
msg = "Cannot encode {}: encoder for type `{}` is not implemented." \
.format(obj, type(obj).__name__)
raise NotImplementedError(msg)
def decode(self, cls, data):
msg = "Decoder for type `{}` is not implemented." \
.format(cls.__name__)
raise NotImplementedError(msg)
| StarcoderdataPython |
8074076 | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._enums import *
__all__ = [
'GetDataSetResult',
'AwaitableGetDataSetResult',
'get_data_set',
'get_data_set_output',
]
@pulumi.output_type
class GetDataSetResult:
def __init__(__self__, arn=None, column_groups=None, column_level_permission_rules=None, consumed_spice_capacity_in_bytes=None, created_time=None, import_mode=None, last_updated_time=None, logical_table_map=None, name=None, output_columns=None, permissions=None, physical_table_map=None, row_level_permission_data_set=None, tags=None):
if arn and not isinstance(arn, str):
raise TypeError("Expected argument 'arn' to be a str")
pulumi.set(__self__, "arn", arn)
if column_groups and not isinstance(column_groups, list):
raise TypeError("Expected argument 'column_groups' to be a list")
pulumi.set(__self__, "column_groups", column_groups)
if column_level_permission_rules and not isinstance(column_level_permission_rules, list):
raise TypeError("Expected argument 'column_level_permission_rules' to be a list")
pulumi.set(__self__, "column_level_permission_rules", column_level_permission_rules)
if consumed_spice_capacity_in_bytes and not isinstance(consumed_spice_capacity_in_bytes, float):
raise TypeError("Expected argument 'consumed_spice_capacity_in_bytes' to be a float")
pulumi.set(__self__, "consumed_spice_capacity_in_bytes", consumed_spice_capacity_in_bytes)
if created_time and not isinstance(created_time, str):
raise TypeError("Expected argument 'created_time' to be a str")
pulumi.set(__self__, "created_time", created_time)
if import_mode and not isinstance(import_mode, str):
raise TypeError("Expected argument 'import_mode' to be a str")
pulumi.set(__self__, "import_mode", import_mode)
if last_updated_time and not isinstance(last_updated_time, str):
raise TypeError("Expected argument 'last_updated_time' to be a str")
pulumi.set(__self__, "last_updated_time", last_updated_time)
if logical_table_map and not isinstance(logical_table_map, dict):
raise TypeError("Expected argument 'logical_table_map' to be a dict")
pulumi.set(__self__, "logical_table_map", logical_table_map)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if output_columns and not isinstance(output_columns, list):
raise TypeError("Expected argument 'output_columns' to be a list")
pulumi.set(__self__, "output_columns", output_columns)
if permissions and not isinstance(permissions, list):
raise TypeError("Expected argument 'permissions' to be a list")
pulumi.set(__self__, "permissions", permissions)
if physical_table_map and not isinstance(physical_table_map, dict):
raise TypeError("Expected argument 'physical_table_map' to be a dict")
pulumi.set(__self__, "physical_table_map", physical_table_map)
if row_level_permission_data_set and not isinstance(row_level_permission_data_set, dict):
raise TypeError("Expected argument 'row_level_permission_data_set' to be a dict")
pulumi.set(__self__, "row_level_permission_data_set", row_level_permission_data_set)
if tags and not isinstance(tags, list):
raise TypeError("Expected argument 'tags' to be a list")
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def arn(self) -> Optional[str]:
"""
<p>The Amazon Resource Name (ARN) of the resource.</p>
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="columnGroups")
def column_groups(self) -> Optional[Sequence['outputs.DataSetColumnGroup']]:
"""
<p>Groupings of columns that work together in certain QuickSight features. Currently, only geospatial hierarchy is supported.</p>
"""
return pulumi.get(self, "column_groups")
@property
@pulumi.getter(name="columnLevelPermissionRules")
def column_level_permission_rules(self) -> Optional[Sequence['outputs.DataSetColumnLevelPermissionRule']]:
return pulumi.get(self, "column_level_permission_rules")
@property
@pulumi.getter(name="consumedSpiceCapacityInBytes")
def consumed_spice_capacity_in_bytes(self) -> Optional[float]:
"""
<p>The amount of SPICE capacity used by this dataset. This is 0 if the dataset isn't
imported into SPICE.</p>
"""
return pulumi.get(self, "consumed_spice_capacity_in_bytes")
@property
@pulumi.getter(name="createdTime")
def created_time(self) -> Optional[str]:
"""
<p>The time that this dataset was created.</p>
"""
return pulumi.get(self, "created_time")
@property
@pulumi.getter(name="importMode")
def import_mode(self) -> Optional['DataSetImportMode']:
return pulumi.get(self, "import_mode")
@property
@pulumi.getter(name="lastUpdatedTime")
def last_updated_time(self) -> Optional[str]:
"""
<p>The last time that this dataset was updated.</p>
"""
return pulumi.get(self, "last_updated_time")
@property
@pulumi.getter(name="logicalTableMap")
def logical_table_map(self) -> Optional['outputs.DataSetLogicalTableMap']:
return pulumi.get(self, "logical_table_map")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
<p>The display name for the dataset.</p>
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="outputColumns")
def output_columns(self) -> Optional[Sequence['outputs.DataSetOutputColumn']]:
"""
<p>The list of columns after all transforms. These columns are available in templates,
analyses, and dashboards.</p>
"""
return pulumi.get(self, "output_columns")
@property
@pulumi.getter
def permissions(self) -> Optional[Sequence['outputs.DataSetResourcePermission']]:
"""
<p>A list of resource permissions on the dataset.</p>
"""
return pulumi.get(self, "permissions")
@property
@pulumi.getter(name="physicalTableMap")
def physical_table_map(self) -> Optional['outputs.DataSetPhysicalTableMap']:
return pulumi.get(self, "physical_table_map")
@property
@pulumi.getter(name="rowLevelPermissionDataSet")
def row_level_permission_data_set(self) -> Optional['outputs.DataSetRowLevelPermissionDataSet']:
return pulumi.get(self, "row_level_permission_data_set")
@property
@pulumi.getter
def tags(self) -> Optional[Sequence['outputs.DataSetTag']]:
"""
<p>Contains a map of the key-value pairs for the resource tag or tags assigned to the dataset.</p>
"""
return pulumi.get(self, "tags")
class AwaitableGetDataSetResult(GetDataSetResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetDataSetResult(
arn=self.arn,
column_groups=self.column_groups,
column_level_permission_rules=self.column_level_permission_rules,
consumed_spice_capacity_in_bytes=self.consumed_spice_capacity_in_bytes,
created_time=self.created_time,
import_mode=self.import_mode,
last_updated_time=self.last_updated_time,
logical_table_map=self.logical_table_map,
name=self.name,
output_columns=self.output_columns,
permissions=self.permissions,
physical_table_map=self.physical_table_map,
row_level_permission_data_set=self.row_level_permission_data_set,
tags=self.tags)
def get_data_set(aws_account_id: Optional[str] = None,
data_set_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDataSetResult:
"""
Definition of the AWS::QuickSight::DataSet Resource Type.
"""
__args__ = dict()
__args__['awsAccountId'] = aws_account_id
__args__['dataSetId'] = data_set_id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws-native:quicksight:getDataSet', __args__, opts=opts, typ=GetDataSetResult).value
return AwaitableGetDataSetResult(
arn=__ret__.arn,
column_groups=__ret__.column_groups,
column_level_permission_rules=__ret__.column_level_permission_rules,
consumed_spice_capacity_in_bytes=__ret__.consumed_spice_capacity_in_bytes,
created_time=__ret__.created_time,
import_mode=__ret__.import_mode,
last_updated_time=__ret__.last_updated_time,
logical_table_map=__ret__.logical_table_map,
name=__ret__.name,
output_columns=__ret__.output_columns,
permissions=__ret__.permissions,
physical_table_map=__ret__.physical_table_map,
row_level_permission_data_set=__ret__.row_level_permission_data_set,
tags=__ret__.tags)
@_utilities.lift_output_func(get_data_set)
def get_data_set_output(aws_account_id: Optional[pulumi.Input[str]] = None,
data_set_id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetDataSetResult]:
"""
Definition of the AWS::QuickSight::DataSet Resource Type.
"""
...
| StarcoderdataPython |
3363119 | <reponame>marzy-bn/Leetcode_2022<filename>1929-concatenation-of-array/1929-concatenation-of-array.py
class Solution:
def getConcatenation(self, nums: List[int]) -> List[int]:
p1 = 0
p2 = len(nums) - 1
end = len(nums)
while p1 < end:
nums.append(nums[p1])
p1 += 1
return nums | StarcoderdataPython |
5011078 | <reponame>enriquecoronadozu/NEP_samples<filename>nanomsg/python/publish-subscribe/publisher.py
import nep
import time
import sys
msg_type = "json" # Message type to listen. "string" or "json"
node = nep.node("publisher_sample", "nanomsg") # Create a new node
conf = node.broker(mode = "one2many") # Select the configuration of the publisher
pub = node.new_pub("pub_sub_test_nn",msg_type,conf) # Set the topic and the configuration of the publisher
# Publish a message each second
while True:
# --- String example ---
if msg_type == "string":
msg = "hello world"
print ("sending: " + msg)
pub.publish(msg)
time.sleep(1)
# --- JSON example ---
if msg_type == "json":
msg = data = {"node":"perception", "primitive":"speech", "input":"add", "robot":"pepper", "parameters":"0"}
print ("sending: " + str(msg))
pub.publish(msg)
time.sleep(1)
| StarcoderdataPython |
8066203 | <reponame>curtiszki/CYK-character-scrape
# csv_read_write.py
# Use the CSV module to read and write dictionary files.
import re
import os.path
class CsvReadWrite(object):
'''
Class with methods designed around reading from cc_cedict
and a designated CSV output file.
'''
def __init__(self, inputFile, outputDir, fileName="ce_text_output.csv"):
self.input_file = inputFile
self.output_dir = outputDir
self.file_name = fileName
self.output_dest = self.get_output_location()
def get_output_location(self):
if not isinstance(self.output_dir, str) or not isinstance(self.file_name, str):
raise ValueError("Output destination/file must be a string.")
if os.path.isabs(self.output_dir):
# abspath normalizes the path similar to normpath
return os.path.abspath(os.path.join(self.output_dir, self.file_name))
else:
# set an output path from the root directory
return os.path.normpath(os.path.join(self.output_dir, self.file_name))
def read_into_dictionary(self, input_file=None):
input_file = self.input_file if input_file is None else input_file
# Read in the datafile to use as a dictionary reference.
with open(input_file, 'rb') as data:
data_dict = dict()
for line in data:
line = line.decode('utf-8')
if line.startswith('#') or line == '\n':
pass
else:
splitLine = re.split(r'([\u4e00-\u9fff]+)?\s([\u4e00-\u9fff]+)?\s*\[*([^\]]+)?\]*\W+\/+(\[?\b[^\r\n\/(?<=\b)]*\]?)', line)
if len(splitLine) >= 4:
data_dict[splitLine[1]] = {"pron": splitLine[3], "def": splitLine[4]}
if(splitLine[1] != splitLine[2]):
data_dict[splitLine[2]] = {"pron": splitLine[3], "def": splitLine[4]}
try:
if len(data_dict) > 2:
return data_dict
else:
raise AttributeError
except Exception as e:
raise AttributeError
def write_csv(self, outputContent):
if not isinstance(outputContent, dict):
raise ValueError("The passed content needs to be in dict format.")
lines_write = []
strings_write = ''
for key, value in outputContent.items():
values = [key]
for subkey in value:
values.append(value[subkey])
lines_write.append(values)
for i in lines_write:
strings_write += ", ".join(i)+"\n"
try:
with open(self.output_dest, "wb") as f:
f.write(strings_write.encode("utf-8"))
print("Successfully wrote to the file: {}".format(self.output_dest))
except IOError as e:
print("Couldn't write to the designated file. {}".format(e))
| StarcoderdataPython |
8106064 | <reponame>pnsaevik/ladim
# Testing the nested_gridforce
import numpy as np
from nested_gridforce import Grid
config = dict(grid_args=[])
g = Grid(config)
# Two first in fine grid, land, sea
# Two next outside, land, sea
X = np.array([60, 80, 50, 30])
Y = np.array([40, 30, 50, 20])
print("X, Y = ", X, Y)
X1, Y1 = g.xy2fine(X, Y)
print("X1, Y1 =", X1, Y1)
X2, Y2 = g.xy2coarse(X, Y)
print("X2, Y2 = ", X2.round(2), Y2.round(2))
fine = g.fine_grid.ingrid(X1, Y1)
print("fine = ", fine)
print("")
H = g.sample_depth(X, Y)
print("depth = ", H)
H1 = g.fine_grid.sample_depth(X1[fine], Y1[fine])
print("depth1 = ", H1)
H2 = g.coarse_grid.sample_depth(X2, Y2)
print("depth2 = ", H2)
print()
# All particles in fine
H = g.sample_depth(X[fine], Y[fine])
print("All in fine", H)
# All particles in coarse
H = g.sample_depth(X[~fine], Y[~fine])
print("All in coarse", H)
print("")
lon0, lat0 = g.lonlat(X, Y)
lon1, lat1 = g.fine_grid.lonlat(X1[fine], Y1[fine])
lon2, lat2 = g.coarse_grid.lonlat(X2, Y2)
print("lon0 = ", lon0)
print("lon1 = ", lon1)
print("lon2 = ", lon2)
print("lat0 = ", lat0)
print("lat1 = ", lat1)
print("lat2 = ", lat2)
print("")
onland = g.onland(X, Y)
print("on land = ", onland)
onland = g.fine_grid.onland(X1[fine], Y1[fine])
print("fine: ", onland)
onland = g.coarse_grid.onland(X2, Y2)
print("coarse: ", onland)
| StarcoderdataPython |
4868874 | from czsc.extend.utils import push_text
from datetime import datetime
from czsc.extend.analyzeExtend import JKCzscTraderExtend as CzscTrader
import traceback
import time
import datetime
import shutil
import os
from czsc.objects import Signal, Factor, Event, Operate
from czsc.data.jq import get_kline
import pandas as pd
# 基础参数配置
ct_path = os.path.join("d:\\data", "czsc_traders")
os.makedirs(ct_path, exist_ok=True)
symbol = '399006.XSHE'
my_dic_container = {}
def start():
moni_path = os.path.join(ct_path, "monitor")
if os.path.exists(moni_path):
shutil.rmtree(moni_path)
os.makedirs(moni_path, exist_ok=True)
events_monitor = [
# 开多
Event(name="一买", operate=Operate.LO, factors=[
Factor(name="5分钟类一买", signals_all=[Signal("5分钟_倒1笔_类买卖点_类一买_任意_任意_0")]),
Factor(name="5分钟形一买", signals_all=[Signal("5分钟_倒1笔_基础形态_类一买_任意_任意_0")]),
Factor(name="15分钟类一买", signals_all=[Signal("15分钟_倒1笔_类买卖点_类一买_任意_任意_0")]),
Factor(name="15分钟形一买", signals_all=[Signal("15分钟_倒1笔_基础形态_类一买_任意_任意_0")]),
Factor(name="30分钟类一买", signals_all=[Signal("30分钟_倒1笔_类买卖点_类一买_任意_任意_0")]),
Factor(name="30分钟形一买", signals_all=[Signal("30分钟_倒1笔_基础形态_类一买_任意_任意_0")]),
]),
Event(name="二买", operate=Operate.LO, factors=[
Factor(name="5分钟类二买", signals_all=[Signal("5分钟_倒1笔_类买卖点_类二买_任意_任意_0")]),
Factor(name="5分钟形二买", signals_all=[Signal("5分钟_倒1笔_基础形态_类二买_任意_任意_0")]),
Factor(name="15分钟类二买", signals_all=[Signal("15分钟_倒1笔_类买卖点_类二买_任意_任意_0")]),
Factor(name="15分钟形二买", signals_all=[Signal("15分钟_倒1笔_基础形态_类二买_任意_任意_0")]),
Factor(name="30分钟类二买", signals_all=[Signal("30分钟_倒1笔_类买卖点_类二买_任意_任意_0")]),
Factor(name="30分钟形二买", signals_all=[Signal("30分钟_倒1笔_基础形态_类二买_任意_任意_0")]),
]),
Event(name="三买", operate=Operate.LO, factors=[
Factor(name="5分钟类三买", signals_all=[Signal("5分钟_倒1笔_类买卖点_类三买_任意_任意_0")]),
Factor(name="5分钟形三买", signals_all=[Signal("5分钟_倒1笔_基础形态_类三买_任意_任意_0")]),
Factor(name="15分钟类三买", signals_all=[Signal("15分钟_倒1笔_类买卖点_类三买_任意_任意_0")]),
Factor(name="15分钟形三买", signals_all=[Signal("15分钟_倒1笔_基础形态_类三买_任意_任意_0")]),
Factor(name="30分钟类三买", signals_all=[Signal("30分钟_倒1笔_类买卖点_类三买_任意_任意_0")]),
Factor(name="30分钟形三买", signals_all=[Signal("30分钟_倒1笔_基础形态_类三买_任意_任意_0")]),
]),
# 平多
Event(name="一卖", operate=Operate.LE, factors=[
Factor(name="5分钟类一卖", signals_all=[Signal("5分钟_倒1笔_类买卖点_类一卖_任意_任意_0")]),
Factor(name="5分钟形一卖", signals_all=[Signal("5分钟_倒1笔_基础形态_类一卖_任意_任意_0")]),
Factor(name="15分钟类一卖", signals_all=[Signal("15分钟_倒1笔_类买卖点_类一卖_任意_任意_0")]),
Factor(name="15分钟形一卖", signals_all=[Signal("15分钟_倒1笔_基础形态_类一卖_任意_任意_0")]),
Factor(name="30分钟类一卖", signals_all=[Signal("30分钟_倒1笔_类买卖点_类一卖_任意_任意_0")]),
Factor(name="30分钟形一卖", signals_all=[Signal("30分钟_倒1笔_基础形态_类一卖_任意_任意_0")]),
]),
Event(name="二卖", operate=Operate.LE, factors=[
Factor(name="5分钟类二卖", signals_all=[Signal("5分钟_倒1笔_类买卖点_类二卖_任意_任意_0")]),
Factor(name="5分钟形二卖", signals_all=[Signal("5分钟_倒1笔_基础形态_类二卖_任意_任意_0")]),
Factor(name="15分钟类二卖", signals_all=[Signal("15分钟_倒1笔_类买卖点_类二卖_任意_任意_0")]),
Factor(name="15分钟形二卖", signals_all=[Signal("15分钟_倒1笔_基础形态_类二卖_任意_任意_0")]),
Factor(name="30分钟类二卖", signals_all=[Signal("30分钟_倒1笔_类买卖点_类二卖_任意_任意_0")]),
Factor(name="30分钟形二卖", signals_all=[Signal("30分钟_倒1笔_基础形态_类二卖_任意_任意_0")]),
]),
Event(name="三卖", operate=Operate.LE, factors=[
Factor(name="5分钟类三卖", signals_all=[Signal("5分钟_倒1笔_类买卖点_类三卖_任意_任意_0")]),
Factor(name="5分钟形三卖", signals_all=[Signal("5分钟_倒1笔_基础形态_类三卖_任意_任意_0")]),
Factor(name="15分钟类三卖", signals_all=[Signal("15分钟_倒1笔_类买卖点_类三卖_任意_任意_0")]),
Factor(name="15分钟形三卖", signals_all=[Signal("15分钟_倒1笔_基础形态_类三卖_任意_任意_0")]),
Factor(name="30分钟类三卖", signals_all=[Signal("30分钟_倒1笔_类买卖点_类三卖_任意_任意_0")]),
Factor(name="30分钟形三卖", signals_all=[Signal("30分钟_倒1笔_基础形态_类三卖_任意_任意_0")]),
]),
]
try:
current_date: datetime = pd.to_datetime('2021-08-10')
end_date = pd.to_datetime("2021-08-20")
ct = CzscTrader(symbol, max_count=1000, end_date=current_date)
data = get_kline(symbol, end_date=end_date, start_date=current_date, freq="1min")
hit_event: dict = {}
for item in data:
msg = f"标的代码:{symbol}\n同花顺F10:http://basic.10jqka.com.cn/{symbol.split('.')[0]}\n"
for event in events_monitor:
m, f = event.is_match(ct.s)
container_key = "{}{}{}".format(symbol, f, item.dt.strftime('%Y-%m-%d'))
if m:
result = my_dic_container.get(container_key, None)
if result is None:
print("监控提醒:date{} {}@{}\n".format(item.dt.strftime('%Y-%m-%d %H:%M'), event.name, f))
key = item.dt.strftime('%Y-%m-%d') + f
contains = hit_event.get(key)
if contains is None:
hit_event[key] = '1'
my_dic_container[container_key] = 1
msg += "监控提醒:date{} {}@{}\n".format(item.dt.strftime('%Y-%m-%d %H:%M'), event.name, f)
hit_event
if "监控提醒" in msg:
push_text(msg.strip("\n"))
# 每次执行,会在moni_path下面保存一份快照
file_html = os.path.join(moni_path, f"{ct.symbol}_{item.dt.strftime('%Y%m%d%H%M')}.html")
ct.take_snapshot(file_html, width="1400px", height="580px")
ct.update_factors_with_bars([item])
except Exception as e:
traceback.print_exc()
print("{} 执行失败 - {}".format(symbol, e))
if __name__ == '__main__':
start()
print("end")
| StarcoderdataPython |
4955300 | <reponame>z-btc/z-btc-main<filename>test/functional/bsv-pbv-submitblock.py<gh_stars>1-10
#!/usr/bin/env python3
# Copyright (c) 2019 Bitcoin Association
# Distributed under the Open BSV software license, see the accompanying file LICENSE.
"""
We will test the following situation where block 1 is the tip and three blocks
are sent for parallel validation:
1
/ | \
2 3 4
Blocks 2,4 are hard to validate and block 3 is easy to validate.
- Blocks 2,3 are sent via p2p.
- Block 4 is submitted via rpc command submitblock.
Block 3 should be active in the end because it was easiest to validate and
therefore won the validation race.
*This test is similar to bsv-pbv-submitminingsolution.py which uses different RPC call to
submit the block.
Additionally this test also checks that blocks with same height but later arrival
are also announced to the network after being validated. (lines marked with ***
at the beginning of comments)
"""
import threading
from test_framework.blocktools import prepare_init_chain
from test_framework.util import (
assert_equal,
p2p_port,
get_rpc_proxy,
rpc_url,
get_datadir_path,
wait_until
)
from test_framework.mininode import (
NetworkThread,
NodeConn,
NodeConnCB,
msg_block,
msg_sendcmpct,
msg_getheaders,
ToHex,
CInv
)
from test_framework.test_framework import BitcoinTestFramework, ChainManager
from bsv_pbv_common import (
wait_for_waiting_blocks,
wait_for_validating_blocks
)
class PBVSubmitBlock(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.chain = ChainManager()
self.extra_args = [["-whitelist=127.0.0.1"]]
def run_test(self):
block_count = 0
# Create a P2P connections
node0 = NodeConnCB()
connection0 = NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node0)
node0.add_connection(connection0)
node1 = NodeConnCB()
connection1 = NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node1)
node1.add_connection(connection1)
# *** Prepare node connection for early announcements testing
node2 = NodeConnCB()
node2.add_connection(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node2))
NetworkThread().start()
# wait_for_verack ensures that the P2P connection is fully up.
node0.wait_for_verack()
node1.wait_for_verack()
# *** Activate early announcement functionality for this connection
# After this point the early announcements are not received yet -
# we still need to set latest announced block (CNode::pindexBestKnownBlock)
# which is set for e.g. by calling best headers message with locator
# set to non-null
node2.wait_for_verack()
node2.send_message(msg_sendcmpct(announce=True))
self.chain.set_genesis_hash(int(self.nodes[0].getbestblockhash(), 16))
_, outs, block_count = prepare_init_chain(self.chain, 101, 1, block_0=False, start_block=0, node=node0)
out = outs[0]
self.log.info("waiting for block height 101 via rpc")
self.nodes[0].waitforblockheight(101)
tip_block_num = block_count - 1
# adding extra transactions to get different block hashes
block2_hard = self.chain.next_block(block_count, spend=out, extra_txns=8)
block_count += 1
self.chain.set_tip(tip_block_num)
block3_easier = self.chain.next_block(block_count, spend=out, extra_txns=2)
block_count += 1
self.chain.set_tip(tip_block_num)
block4_hard = self.chain.next_block(block_count, spend=out, extra_txns=10)
block_count += 1
# send three "hard" blocks, with waitaftervalidatingblock we artificially
# extend validation time.
self.log.info(f"hard block2 hash: {block2_hard.hash}")
self.nodes[0].waitaftervalidatingblock(block2_hard.hash, "add")
self.log.info(f"hard block4 hash: {block4_hard.hash}")
self.nodes[0].waitaftervalidatingblock(block4_hard.hash, "add")
# make sure block hashes are in waiting list
wait_for_waiting_blocks({block2_hard.hash, block4_hard.hash}, self.nodes[0], self.log)
# *** Complete early announcement setup by sending getheaders message
# with a non-null locator (pointing to the last block that we know
# of on python side - we claim that we know of all the blocks that
# bitcoind node knows of)
#
# We also set on_cmpctblock handler as early announced blocks are
# announced via compact block messages instead of inv messages
node2.send_and_ping(msg_getheaders(locator_have=[int(self.nodes[0].getbestblockhash(), 16)]))
receivedAnnouncement = False
waiting_for_announcement_block_hash = block2_hard.sha256
def on_cmpctblock(conn, message):
nonlocal receivedAnnouncement
message.header_and_shortids.header.calc_sha256()
if message.header_and_shortids.header.sha256 == waiting_for_announcement_block_hash:
receivedAnnouncement = True
node2.on_cmpctblock = on_cmpctblock
# send one block via p2p and one via rpc
node0.send_message(msg_block(block2_hard))
# *** make sure that we receive announcement of the block before it has
# been validated
wait_until(lambda: receivedAnnouncement)
# making rpc call submitblock in a separate thread because waitaftervalidation is blocking
# the return of submitblock
submitblock_thread = threading.Thread(target=self.nodes[0].submitblock, args=(ToHex(block4_hard),))
submitblock_thread.start()
# because self.nodes[0] rpc is blocked we use another rpc client
rpc_client = get_rpc_proxy(rpc_url(get_datadir_path(self.options.tmpdir, 0), 0), 0,
coveragedir=self.options.coveragedir)
wait_for_validating_blocks({block2_hard.hash, block4_hard.hash}, rpc_client, self.log)
# *** prepare to intercept block3_easier announcement - it will not be
# announced before validation is complete as early announcement is
# limited to announcing one block per height (siblings are ignored)
# but after validation is complete we should still get the announcing
# compact block message
receivedAnnouncement = False
waiting_for_announcement_block_hash = block3_easier.sha256
self.log.info(f"easy block3 hash: {block3_easier.hash}")
node1.send_message(msg_block(block3_easier))
# *** Make sure that we receive compact block announcement of the block
# after the validation is complete even though it was not the first
# block that was received by bitcoind node.
#
# Also make sure that we receive inv announcement of the block after
# the validation is complete by the nodes that are not using early
# announcement functionality.
wait_until(lambda: receivedAnnouncement)
node0.wait_for_inv([CInv(2, block3_easier.sha256)]) # 2 == GetDataMsg::MSG_BLOCK
# node 1 was the sender but receives inv for block non the less
# (with early announcement that's not the case - sender does not receive the announcement)
node1.wait_for_inv([CInv(2, block3_easier.sha256)]) # 2 == GetDataMsg::MSG_BLOCK
rpc_client.waitforblockheight(102)
assert_equal(block3_easier.hash, rpc_client.getbestblockhash())
# now we can remove waiting status from blocks and finish their validation
rpc_client.waitaftervalidatingblock(block2_hard.hash, "remove")
rpc_client.waitaftervalidatingblock(block4_hard.hash, "remove")
submitblock_thread.join()
# wait till validation of block or blocks finishes
node0.sync_with_ping()
# easier block should still be on tip
assert_equal(block3_easier.hash, self.nodes[0].getbestblockhash())
if __name__ == '__main__':
PBVSubmitBlock().main()
| StarcoderdataPython |
3599934 | <reponame>kaka-lin/pycon.tw
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-04-09 03:49
from __future__ import unicode_literals
from django.db import migrations, models
import sponsors.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Sponsor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='name')),
('website_url', models.URLField(blank=True, max_length=255, verbose_name='website URL')),
('intro', models.TextField(verbose_name='Introduction')),
('logo', models.ImageField(upload_to=sponsors.models.logo_upload_to, verbose_name='logo')),
('level', models.PositiveSmallIntegerField(choices=[(0, 'platinum sponsor'), (1, 'gold sponsor'), (2, 'silver sponsor'), (3, 'bronze sponsor'), (4, 'special sponsor')], verbose_name='level')),
],
options={
'verbose_name': 'sponsor',
'verbose_name_plural': 'sponsors',
},
),
]
| StarcoderdataPython |
5127006 | <gh_stars>0
import logging
from os import getenv
from os.path import dirname, join
from typing import Optional
from dotenv import load_dotenv
def get_logging_level(level: Optional[str]) -> int:
if level == 'CRITICAL':
return logging.CRITICAL
elif level == 'FATAL':
return logging.FATAL
elif level == 'ERROR':
return logging.ERROR
elif level == 'WARNING':
return logging.WARNING
elif level == 'WARN':
return logging.WARN
elif level == 'INFO':
return logging.INFO
elif level == 'DEBUG':
return logging.DEBUG
elif level == 'NOTSET':
return logging.NOTSET
else:
return logging.INFO
# Create .env file path.
dotenv_path = join(dirname(__file__), '.env')
# Load file from the path.
load_dotenv(dotenv_path)
BOT_TOKEN = getenv('BOT_TOKEN')
# Users who can send commands to bot (e.g. /force) and do other interaction
ADMINS = getenv('ADMINS').split(";;")
CHANNEL_NAME = getenv('CHANNEL_NAME')
GROUP_NAME = getenv('GROUP_NAME')
QUESTION = getenv("QUESTION")
ANSWERS = getenv("ANSWERS").split(";;")
# Times to post new poll (UTC)
NEW_POLL_TIMES = getenv("NEW_POLL_TIMES").split(";;")
# Times to repeat poll which was postes according to previous setting (UTC)
REPEAT_POLL_TIMES = getenv("REPEAT_POLL_TIMES").split(";;")
# Timezone name for bot to determine date for poll title.
TIMEZONE = getenv("TIMEZONE") or 'Europe/Minsk'
# Amount of messages in group after previous poll which make bot to be
# allowed to forward poll again
GROUP_MESSAGES_COUNT_THRESHOLD = \
int(getenv("GROUP_MESSAGES_COUNT_THRESHOLD") or 5)
LOGGING_LEVEL = get_logging_level(getenv("LOGGING_LEVEL"))
| StarcoderdataPython |
11356834 | from datetime import datetime
import numpy as np
from utils import read_instances, get_cost
from best_improvement import grasp, best_improvment
from dynamic_cut_stock import bottom_up
from greedy import greddy
def pipeline(algoritmo, *params):
algoritmos = {
'greddy': greddy,
'grasp': grasp,
'heuristic': best_improvment,
'dynamic': bottom_up,
}
t_init = datetime.now()
result = algoritmos[algoritmo](*params)
t_diff = datetime.now() - t_init
custom_print(algoritmo, result, t_diff)
def custom_print(algoritmo, result, time):
print('='*35 + f' {algoritmo} ' + '='*35)
for i, bar in enumerate(result):
print(f'Barrra {i:2} - Toal {sum(bar)} - {bar}')
print(f'\n\nTempo de execução: {time}')
print(f'Results: {get_cost(result, size_bar)}\n')
print('=' * 85)
if __name__ == '__main__':
cuts, size_bar, result = read_instances('instancias/Solutionsfaceis/Schwerin1_BPP100.txt')
cuts = np.array(cuts)
# pipeline('dynamic', cuts, size_bar)
# pipeline('grasp', cuts, size_bar)
pipeline('heuristic', cuts, size_bar, 100)
pipeline('greddy', size_bar, cuts) | StarcoderdataPython |
11238505 | <filename>hard-gists/11526013/snippet.py
#!/usr/bin/env python
import subprocess
import itertools
import sys
from south.migration import all_migrations
from south.models import MigrationHistory
def get_migrations():
from multiprocessing import Process, Queue
queue = Queue()
p = Process(target=get_migrations_task, args=(queue,))
p.start()
p.join()
return queue.get()
def get_migrations_task(queue):
from collections import defaultdict
southified_apps = list(all_migrations())
available_migrations = defaultdict(list)
for migration in itertools.chain.from_iterable(southified_apps):
available_migrations[migration.app_label()].append(get_migration_location(migration))
applied_migrations = defaultdict(list)
for history in MigrationHistory.objects.filter(app_name__in=[app.app_label() for app in southified_apps]).order_by('migration'):
migration = history.get_migration()
applied_migrations[migration.app_label()].append(get_migration_location(migration))
queue.put((dict(available_migrations), dict(applied_migrations)))
def get_migration_location(migration):
return migration.name()
def get_current_branch():
return subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"]).strip()
def checkout(branch):
subprocess.check_call(["git", "checkout", "%s" % branch])
def migrate(app, migration):
subprocess.check_call(["./manage.py", "migrate", app, migration])
def migrate_all():
subprocess.check_call(["./manage.py", "migrate"])
def migrate_branch(target='main', source=None):
from collections import defaultdict
if not source:
source = get_current_branch()
checkout(target)
dest_avail, dest_applied = get_migrations()
checkout(source)
source_avail, source_applied = get_migrations()
rollbacks = defaultdict(list)
for app, migrations in source_applied.items():
if app not in dest_avail:
rollbacks[app].extend(migrations)
continue
for migration in migrations:
if migration not in dest_avail[app]:
rollbacks[app].append(migration)
for app in rollbacks.keys():
migration = dest_avail.get(app, [None])[-1]
if not migration:
continue
migrate(app, migration)
checkout(target)
migrate_all()
if __name__ == '__main__':
target = sys.argv[1]
source = None
if len(sys.argv) > 2:
source = sys.argv[2]
migrate_branch(target, source) | StarcoderdataPython |
1690428 | <filename>casino/deck.py<gh_stars>0
import random
from collections import deque
from itertools import product, chain
class Deck:
"""Creates a Deck of playing cards."""
suites = (":clubs:", ":diamonds:", ":hearts:", ":spades:")
face_cards = ('King', 'Queen', 'Jack', 'Ace')
bj_vals = {'Jack': 10, 'Queen': 10, 'King': 10, 'Ace': 1}
war_values = {'Jack': 11, 'Queen': 12, 'King': 13, 'Ace': 14}
def __init__(self):
self._deck = deque()
def __len__(self):
return len(self._deck)
def __str__(self):
return 'Standard deck of cards with {} cards remaining.'.format(len(self._deck))
def __repr__(self):
return 'Deck{!r}'.format(self._deck)
@property
def deck(self):
if len(self._deck) < 1:
self.new()
return self._deck
def shuffle(self):
random.shuffle(self._deck)
def war_count(self, card):
try:
return self.war_values[card[1]]
except KeyError:
return card[1]
def bj_count(self, hand: list, hole=False):
hand = self._hand_type(hand)
if hole:
card = hand[0][1]
count = self.bj_vals[card] if isinstance(card, str) else card
return count if count > 1 else 11
count = sum([self.bj_vals[y] if isinstance(y, str) else y for x, y in hand])
if any('Ace' in pair for pair in hand) and count <= 11:
count += 10
return count
@staticmethod
def fmt_hand(hand: list):
return ['{} {}'.format(y, x) for x, y in hand]
@staticmethod
def fmt_card(card):
return '{1} {0}'.format(*card)
@staticmethod
def hand_check(hand: list, card):
return any(x[1] == card for x in hand)
def split(self, position: int):
self._deck.rotate(-position)
@staticmethod
def _true_hand(hand: list):
return [x.split(' ') for x in hand]
def draw(self, top=True):
self._check()
if top:
card = self._deck.popleft()
else:
card = self._deck.pop()
return card
def _check(self, num=1):
if num > 52:
raise ValueError('Can not exceed deck limit.')
if len(self._deck) < num:
self.new()
def _hand_type(self, hand: list):
if isinstance(hand[0], tuple):
return hand
try:
return self._true_hand(hand)
except ValueError:
raise ValueError('Invalid hand input.')
def deal(self, num=1, top=True, hand=None):
self._check(num=num)
if hand is None:
hand = []
for x in range(0, num):
if top:
hand.append(self._deck.popleft())
else:
hand.append(self._deck.pop())
return hand
def burn(self, num):
self._check(num=num)
for x in range(0, num):
del self._deck[0]
def new(self):
cards = product(self.suites, chain(range(2, 11), ('King', 'Queen', 'Jack', 'Ace')))
self._deck = deque(cards)
self.shuffle()
| StarcoderdataPython |
4939455 | <filename>tests/apps/hello/__init__.py
#!/usr/bin/env python
import time
import webify
app = webify.defaults.app()
# Controllers
@app.subapp(path='/')
@webify.urlable()
def index(req, p):
p(u'Hello, world!')
@app.subapp()
@webify.urlable()
def hello(req, p):
p(u'<form method="POST">')
name = req.params.get('name', None)
if name is None:
p(u'Hello, world! <br />')
else:
p(u'Hello, %(name)s! <br />' % {'name': name})
p(u'Your name: <input type="text" name="name">')
p(u'<input type="submit">')
p(u'</form>')
@app.subapp()
@webify.urlable()
def hello_old(req, p):
webify.http.status.redirect(hello.url())
# Middleware
from webify.middleware import EvalException
wrapped_app = webify.wsgify(app, EvalException)
# Server
from webify.http import server
if __name__ == '__main__':
server.serve(wrapped_app, host='127.0.0.1', port=8080)
| StarcoderdataPython |
3247575 | <gh_stars>1-10
from mDateTime import cDate, cDateDuration;
# The rest of the imports are at the end to prevent import loops.
def fxConvertFromJSONData(xStructureDetails, xJSONData, sDataNameInError, s0BasePath, dxInheritingValues):
if xStructureDetails is None:
if xJSONData is not None:
raise cJSONDataTypeException(
"%s should be None, not %s" % (sDataNameInError, type(xJSONData).__name__),
{"sName": sDataNameInError, "xValue": xJSONData},
);
return None;
elif isinstance(xStructureDetails, cDataStructure):
return xStructureDetails.fxConvertFromJSONData(xJSONData, sDataNameInError, s0BasePath, dxInheritingValues);
elif isinstance(xStructureDetails, tuple):
# A tuple means a list of possible structures; try each until we find one that works:
asErrorMessages = [];
for xPossibleStructure in xStructureDetails:
try:
return fxConvertFromJSONData(xPossibleStructure, xJSONData, sDataNameInError, s0BasePath, dxInheritingValues);
except cJSONDataTypeException as oException:
asErrorMessages.append(oException.sMessage);
raise cJSONDataTypeException(
"%s cannot be parsed in any known way: %s" % (sDataNameInError, ", ".join(asErrorMessages)),
{"sName": sDataNameInError, "xValue": xJSONData},
);
elif isinstance(xStructureDetails, dict):
if not isinstance(xJSONData, dict):
raise cJSONDataTypeException(
"%s should contain a dictionary, not %s" % (sDataNameInError, type(xJSONData).__name__),
{"sName": sDataNameInError, "xValue": xJSONData},
);
dxStructureDetails_by_sChildName = xStructureDetails;
xStructureDetailsForUnspecifiedChildren = dxStructureDetails_by_sChildName.get("*");
asRequiredChildNames = [s for s in dxStructureDetails_by_sChildName.keys() if s != "*" and s[0] != "?"];
asOptionalChildNames = [s[1:] for s in dxStructureDetails_by_sChildName.keys() if s != "*" and s[0] == "?"];
dxData = {};
# We will process inheriting values first, so that they can be updated before we process the remaining
axOrderedChildren = (
[(sChildName, xChildValue) for (sChildName, xChildValue) in xJSONData.items() if sChildName in dxInheritingValues] +
[(sChildName, xChildValue) for (sChildName, xChildValue) in xJSONData.items() if sChildName not in dxInheritingValues]
);
for (sChildName, xChildValue) in axOrderedChildren:
if sChildName in asRequiredChildNames:
xChildStructureDetails = dxStructureDetails_by_sChildName[sChildName];
asRequiredChildNames.remove(sChildName);
elif sChildName in asOptionalChildNames:
xChildStructureDetails = dxStructureDetails_by_sChildName["?" + sChildName];
asOptionalChildNames.remove(sChildName);
elif sChildName in dxInheritingValues:
# Missing value is not a problem if it can be inherited:
dxData[sChildName] = dxInheritingValues[sChildName];
# We applied a value directly, not an xStructureDetails, so there is no
# need to parse it: we can continue immediately:
continue;
elif xStructureDetailsForUnspecifiedChildren:
xChildStructureDetails = xStructureDetailsForUnspecifiedChildren;
else:
raise cJSONDataTypeException(
"%s contains a superfluous value named %s" % (sDataNameInError, repr(sChildName)),
{"sName": sDataNameInError, "xValue": xJSONData},
);
sChildNameInErrors = "%s.%s" % (sDataNameInError, sChildName);
dxData[sChildName] = fxConvertFromJSONData(xChildStructureDetails, xChildValue, sChildNameInErrors, s0BasePath, dxInheritingValues);
if sChildName in dxInheritingValues and dxInheritingValues[sChildName] != dxData[sChildName]:
# If an inherited value is modified children will inherit the modified value:
dxInheritingValues[sChildName] = dxData[sChildName];
# We may have to inherit required child names:
for sChildName in asRequiredChildNames[:]: # loop on a copy as we are modifying the original.
if sChildName in dxInheritingValues:
dxData[sChildName] = dxInheritingValues[sChildName];
asRequiredChildNames.remove(sChildName);
# All required names should have been found and removed. If any still exist, report one of them:
if asRequiredChildNames:
raise cJSONDataTypeException(
"%s is missing a value named %s" % (sDataNameInError, repr(asRequiredChildNames[0])),
{"sName": sDataNameInError, "xValue": xJSONData},
);
return dxData;
elif isinstance(xStructureDetails, list):
if not isinstance(xJSONData, list):
raise cJSONDataTypeException(
"%s should contain a list not %s" % (sDataNameInError, type(xJSONData).__name__),
{"sName": sDataNameInError, "xValue": xJSONData},
);
if len(xStructureDetails) > 1 and len(xJSONData) != len(xStructureDetails):
raise cJSONDataTypeException(
"%s should contain a list with %d values, not %d" % (sDataNameInError, len(xStructureDetails), len(xJSONData)),
{"sName": sDataNameInError, "xValue": xJSONData},
);
axData = [];
for uIndex in range(len(xJSONData)):
xElementStructureDetails = xStructureDetails[len(xStructureDetails) > 1 and uIndex or 0];
sElementNameInErrors = "%s[%d]" % (sDataNameInError, uIndex);
axData.append(fxConvertFromJSONData(xElementStructureDetails, xJSONData[uIndex], sElementNameInErrors, s0BasePath, dxInheritingValues));
return axData;
elif xStructureDetails == "boolean":
if not isinstance(xJSONData, bool):
raise cJSONDataTypeException(
"%s should be boolean, not %s" % (sDataNameInError, repr(xJSONData)),
{"sName": sDataNameInError, "xValue": xJSONData},
);
return xJSONData;
elif xStructureDetails == "unsigned integer":
if not isinstance(xJSONData, int) or xJSONData < 0:
raise cJSONDataTypeException(
"%s should be an unsigned integer, not %s" % (sDataNameInError, type(xJSONData).__name__),
{"sName": sDataNameInError, "xValue": xJSONData},
);
return int(xJSONData);
elif xStructureDetails == "signed integer":
if not isinstance(xJSONData, int):
raise cJSONDataTypeException(
"%s should be an integer, not %s" % (sDataNameInError, type(xJSONData).__name__),
{"sName": sDataNameInError, "xValue": xJSONData},
);
return int(xJSONData);
elif xStructureDetails == "string":
if not isinstance(xJSONData, str):
raise cJSONDataTypeException(
"%s should be a string, not %s" % (sDataNameInError, type(xJSONData).__name__),
{"sName": sDataNameInError, "xValue": xJSONData},
);
return xJSONData;
elif xStructureDetails == "ascii":
if not isinstance(xJSONData, str):
raise cJSONDataTypeException(
"%s should be an ascii string, not %s" % (sDataNameInError, type(xJSONData).__name__),
{"sName": sDataNameInError, "xValue": xJSONData},
);
return bytes(xJSONData, "ascii", "strict");
elif xStructureDetails.startswith("string:"):
if not isinstance(xJSONData, str):
raise cJSONDataTypeException(
"%s should be a string, not %s" % (sDataNameInError, type(xJSONData).__name__),
{"sName": sDataNameInError, "xValue": xJSONData},
);
sExpectedString = xStructureDetails[7:];
if str(xJSONData) != sExpectedString:
raise cJSONDataTypeException(
"%s should be the string %s, not %s" % (sDataNameInError, repr(sExpectedString), repr(xJSONData)),
{"sName": sDataNameInError, "xValue": xJSONData},
);
return sExpectedString;
elif xStructureDetails == "path":
if not isinstance(xJSONData, str):
raise cJSONDataTypeException(
"%s should be a path string, not %s" % (sDataNameInError, type(xJSONData).__name__),
{"sName": sDataNameInError, "xValue": xJSONData},
);
return str(os.path.join(s0BasePath or "", xJSONData));
elif xStructureDetails == "date":
if not isinstance(xJSONData, str):
raise cJSONDataTypeException(
"%s should be a date string, not %s" % (sDataNameInError, type(xJSONData).__name__),
{"sName": sDataNameInError, "xValue": xJSONData},
);
oDate = cDate.foFromJSON(xJSONData);
return oDate;
elif xStructureDetails == "duration":
if not isinstance(xJSONData, str):
raise cJSONDataTypeException(
"%s should be a duration string, not %s" % (sDataNameInError, type(xJSONData).__name__),
{"sName": sDataNameInError, "xValue": xJSONData},
);
oDateDuration = cDateDuration.foFromJSON(xJSONData);
return oDateDuration;
elif xStructureDetails == "version":
o0Version = cVersion.fo0FromString(xJSONData) if isinstance(xJSONData, str) else None;
if not o0Version:
raise cJSONDataTypeException(
"%s should be a version string, not %s" % (sDataNameInError, type(xJSONData).__name__),
{"sName": sDataNameInError, "xValue": xJSONData},
);
return o0Version;
raise AssertionError("Unhandled structure type %s" % repr(xStructureDetails));
from .cDataStructure import cDataStructure;
from .cVersion import cVersion;
from .mExceptions import *;
| StarcoderdataPython |
11327089 | from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.layers import Conv2DTranspose
from tensorflow.keras.layers import Concatenate
from tensorflow.keras.layers import Activation
from tensorflow.keras.layers import MaxPool2D
from tensorflow.keras.layers import Conv2D
from tensorflow.keras.layers import Input
from tensorflow.keras.models import Model
def conv_block(x, num_filters, use_bn=True):
x = Conv2D(num_filters, kernel_size=3, padding="same")(x)
if use_bn:
x = BatchNormalization()(x)
x = Activation("relu")(x)
x = Conv2D(num_filters, kernel_size=3, padding="same")(x)
if use_bn:
x = BatchNormalization()(x)
x = Activation("relu")(x)
return x
def encoder_block(x, num_filters, use_bn=True):
x = conv_block(x, num_filters, use_bn)
p = MaxPool2D(pool_size=(2, 2))(x)
return x, p
def decoder_block(x, skip_feature, num_filters,use_bn=True):
x = Conv2DTranspose(filters=num_filters, kernel_size=(2, 2), strides=2, padding="same")(x)
x = Concatenate()([x, skip_feature])
x = conv_block(x, num_filters, use_bn)
return x
def build_unet(input_shape, filter_sizes, classes, use_bn=True):
inputs = Input(input_shape)
skip_blocks = []
total_filters = len(filter_sizes)
for index, f in enumerate(filter_sizes):
# If it's the first filter, send inputs as x
if index == 0:
s, x = encoder_block(inputs, f, use_bn=use_bn)
skip_blocks.append(s)
elif index != total_filters-1:
s, x = encoder_block(x, f, use_bn=use_bn)
skip_blocks.append(s)
else:
x = conv_block(x, f, use_bn=use_bn)
# Reverse the skip blocks so that we can match the skip connections
# with the correct decoded block
skip_blocks = skip_blocks[::-1]
for index, f in enumerate(filter_sizes[::-1]):
# We don't want the first filter as it was only the base layer
if index == 0:
continue
x = decoder_block(x, skip_blocks[index-1], f, use_bn=use_bn)
if classes == 1:
outputs = Conv2D(1, 1, padding="same", activation="sigmoid")(x)
else:
outputs = Conv2D(classes, 1, padding="same", activation="softmax")(x)
model = Model(inputs, outputs, name="U-Net")
return model
| StarcoderdataPython |
5193004 | <filename>transformers/abstract/mixin.py
import logging
from abc import ABC, abstractmethod
# Scientific
import pandas as pd
# Machine Learning
from sklearn.base import TransformerMixin
# Local
from utilities.container import Spans
# ##################################################################
# ABSTRACT FEATURE CLASS
# ##################################################################
class AbstractFeature(TransformerMixin, ABC):
def __init__(self, prefix: str):
self.__prefix: str = str(prefix)
# ##################################################################
# PROPERTIES
# ##################################################################
@property
def prefix(self):
return self.__prefix
# ##################################################################
# SKLEARN TRANSFORMER MIXIN IMPLEMENTATION
# ##################################################################
@abstractmethod
def fit(self, X, y=None):
raise NotImplementedError(f"Scikit Transformer 'fit' method must be implemented by derived classes.")
@abstractmethod
def transform(self, X, y=None, **fit_params):
raise NotImplementedError(f"Scikit Transformer 'transform' method must be implemented by derived classes.")
@abstractmethod
def fit_transform(self, X, y=None, **fit_params):
return self.fit(X, y).transform()
# ##################################################################
# LOCAL IMPLEMENTATION
# ##################################################################
@abstractmethod
def get_params(self, *args, **kwargs):
raise NotImplementedError()
@abstractmethod
def apply(self, *args, **kwargs):
raise NotImplementedError()
# ##################################################################
# COLUMN PREFIX METHODS
# ##################################################################
@classmethod
def _prefixed(cls, *args):
return "_".join([str(a) for a in args])
# ##################################################################
# HELPER METHODS: SANITIZERS
# ##################################################################
@classmethod
def _sanitize_spans(cls, spans, default: list = None):
if isinstance(spans, int) or isinstance(spans, float):
spans = [spans, ]
elif spans is None and default is not None:
spans = default
assert isinstance(spans, list) or isinstance(spans, Spans), "Spans must be passed as a list of integers."
output = []
for s in spans:
try:
output.append(int(s)) # cast should avoid typing issues
except Exception as exc:
logging.warning(f"Span type casting produced an exception: {exc}")
# Scikit issue with clone sanity checks (see Keras issue 13586)
output.append(int(s[0]))
return sorted(list(set(output)))
@classmethod
def _sanitize_columns(cls, columns=None, df: pd.DataFrame = None):
# No target columns means applied to all columns
if columns is None and df is None:
logging.warning(f"Transformation will be applied to all columns (no target specified).")
return None
if columns is None and df is not None:
logging.warning(f"Transformation will be applied to all columns (dataframe was provided).")
return df.columns
# Make sure the type is legitimate
assert isinstance(columns, list), "Target columns must be passed as a list of strings."
# Clean
output = list(set([str(t) for t in columns]))
if df is not None:
for c in output:
assert c in df.columns, f"Required column {c} missing from dataframe"
return output
@classmethod
def _assert_before_applying(cls, df: pd.DataFrame, columns: list, spans: list = None):
# Must provide some columns
columns = columns or df.columns
# Take a reference
df_columns = df.columns
for c in columns:
assert c in df_columns, f"Transformation cannot be applied on missing column {c}."
if spans is not None:
for s in spans:
assert s > 0, f"Transformation cannot be applied on negative span {s}."
# ##################################################################
# LEGACY METHODS
# ##################################################################
@classmethod
def _legacy_sanitize_columns(cls, df: pd.DataFrame, columns: list = None):
# Marshall to list of strings
columns = [str(columns), ] if not isinstance(columns, list) else [str(c) for c in columns]
# Check that the columns are valid
if columns:
already_present = df.columns
missing = [c for c in columns if c not in already_present]
assert not len(missing)
else:
columns = df.columns
return list(set(columns))
def __call__(self, df: pd.DataFrame):
return self.apply(df=df, **self.get_params())
| StarcoderdataPython |
1721189 | <gh_stars>1-10
import numpy as np
import pandas as pd
class InferredParameter(object):
"""
"""
# Public
def __init__(self):
self.estimate = None
self.bounds = [None, None]
self.inclusive = [True, True]
self.label = None
def __repr__(self):
return "InferredParameter()"
def __str__(self):
return """
{label} parameter
Estimate: {estimate}
Bounds : {lbr}{lb}, {ub}{ubr}
""".format(
label = "Unnamed" if self.label is None else self.label,
estimate = "?" if self.estimate is None else self.estimate,
lbr = "[" if self.inclusive[0] else "(",
lb = "?" if self.bounds[0] is None else self.bounds[0],
ub = "?" if self.bounds[1] is None else self.bounds[1],
ubr = "]" if self.inclusive[1] else ")"
)
def set_estimate(self, val):
if self.__validate_estimate(val):
self.estimate = val
def set_bounds(self, val):
if self.__validate_bounds(val):
self.bounds = val
def set_inclusive(self, val):
if self.__validate_inclusive(val):
self.inclusive = val
def set_label(self, val):
if self.__validate_label(val):
self.label = val
# Private
def __check_bounds(self, estimate, bounds):
if not estimate is None:
if not bounds[0] is None:
out_of_bounds = False
if self.inclusive[0] and estimate < bounds[0]:
out_of_bounds = True
if not self.inclusive[0] and estimate <= bounds[0]:
out_of_bounds = True
if out_of_bounds:
print("Estimate cannot be smaller than lower bound.")
return False
if not bounds[1] is None:
out_of_bounds = False
if self.inclusive[1] and estimate > bounds[1]:
out_of_bounds = True
if not self.inclusive[1] and estimate >= bounds[1]:
out_of_bounds = True
if out_of_bounds:
print("Estimate cannot be larger than upper bound.")
return False
return True
def __validate_estimate(self, val):
if not isinstance(val, (int, float)) and not val is None:
print("The `estimate` parameter should be a real scalar or None.")
return False
return self.__check_bounds(val, self.bounds)
def __validate_bounds(self, val):
if not isinstance(val, list):
print("The `bounds` parameter should be a list.")
return False
if len(val) != 2:
print("The `bounds` parameter should be of length 2.")
return False
if not all([e is None or isinstance(e, (int, float)) for e in val]):
print("The `bounds` parameter should contain only real values or None.")
return False
if all([isinstance(e, (int, float)) for e in val]):
if val[0] > val[1]:
print("The `bounds` parameter should be an interval. As such, the first value should be smaller than the second value.")
return False
return self.__check_bounds(self.estimate, val)
def __validate_inclusive(self, val):
if not isinstance(val, list):
print("The `inclusive` parameter should be a list.")
return False
if len(val) != 2:
print("The `inclusive` parameter should be of length 2.")
return False
if not all([isinstance(e, bool) for e in val]):
print("The `inclusive` parameter should contain only booleans.")
return False
return True
def __validate_label(self, val):
if not isinstance(val, str) and not val is None:
print("The `label` parameter should be a string or None.")
return False
return True
class InferredParameterSetIterator(object):
'''
Iterator class
'''
def __init__(self, param_set):
self._param_set = param_set
self._index = 0
def __next__(self):
'''Returns the next value from team object's lists'''
if (self._index < len(self._param_set)):
old_index = self._index
self._index += 1
return self._param_set[old_index]
raise StopIteration
class InferredParameterSet(object):
"""
"""
def __init__(self):
self.parameters = []
def __repr__(self):
return "InferredParameterSet()"
def __str__(self):
l = ["""
{label} parameter
Estimate: {estimate}
Bounds : {lbr}{lb}, {ub}{ubr}
""".format(
label = "Unnamed" if param.label is None else param.label,
estimate = "?" if param.estimate is None else param.estimate,
lbr = "[" if param.inclusive[0] else "(",
lb = "?" if param.bounds[0] is None else param.bounds[0],
ub = "?" if param.bounds[1] is None else param.bounds[1],
ubr = "]" if param.inclusive[1] else ")"
) for param in self.parameters]
return "\n\n".join(l)
def __len__(self):
return len(self.parameters)
def __getitem__(self, item):
return self.parameters[item]
def __iter__(self):
''' Returns the Iterator object '''
return InferredParameterSetIterator(self)
def add_parameter(self, param):
if not isinstance(param, InferredParameter):
print("The object to be added is not an InferredParameter().")
return
self.parameters += [param]
def create_grid(self, n = 21):
if n % 2 == 0:
n += 1
half_n = int((n - 1) / 2)
grids = [
list(np.linspace(param.bounds[0], param.estimate, half_n + 1))[:-1] + \
[param.estimate] + \
list(np.linspace(param.estimate, param.bounds[1], half_n + 1))[1:] \
for param in self.parameters
]
M = np.array(np.meshgrid(*grids)).reshape(len(grids), n**len(grids)).T
return pd.DataFrame(M, columns = [param.label for param in self.parameters])
| StarcoderdataPython |
1874193 | """Module to test and flatten a list into a flat list"""
def isflat(untyped):
"""tests if object is a flat set or list. Returns True for other types"""
onlyelements = True
if isinstance(untyped, (set, list)):
for e_temp in list(untyped):
if isinstance(e_temp, (set, list)):
onlyelements = False
return onlyelements
def flatten(untyped):
"""flattens a set or a list to a flat list"""
return_elements = []
if isinstance(untyped, (set, list)):
for e_temp in list(untyped):
if isflat(e_temp):
if not isinstance(e_temp, (set, list)):
return_elements.append(e_temp)
else:
for ee_temp in e_temp:
return_elements.append(ee_temp)
else:
print('not flat')
for ee_temp in list(flatten(e_temp)):
return_elements.append(ee_temp)
else:
return untyped
return return_elements
##while True:
## x = input('?')
## print(isflat(eval(x)))
## print(flatten(eval(x)))pyton
| StarcoderdataPython |
8076232 | <reponame>Slawomir-Kwiatkowski/fuel_page_parser
import requests
from lxml import html
import matplotlib.pyplot as plt
import unicodedata
import tkinter as tk
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from datetime import datetime
import os
from PIL import Image, ImageTk
class MainWindow(tk.Frame):
def __init__(self, root):
super().__init__(root)
self.root = root
root.title('Hurtowe ceny paliwa') #Wholesale fuel prices
self.create_widgets()
current_year = datetime.now().year
self.content = self.my_parser(current_year)
last_price = self.content['prices'][-1]
last_price_date = self.content['dates'][-1]
self.message.set(
f'Najnowsza cena: {last_price} z dnia: {last_price_date}') #last price
self.create_chart(self.content)
def create_widgets(self):
menubar = tk.Menu(self.root)
self.root.config(menu=menubar)
file_menu = tk.Menu(menubar, tearoff=0)
file_menu.add_command(label='Zakończ', command=self._exit)
menubar.add_cascade(label="Plik", menu=file_menu) # File menu
self.chart_canvas = tk.Frame(master=self.root)
self.chart_canvas.grid(row=1, column=0, columnspan=2)
self.search_variable = tk.StringVar()
self.search_entry = tk.Entry(
self.root, textvariable=self.search_variable)
self.search_entry.bind('<FocusIn>', self.on_entry_in)
self.search_entry.bind('<Return>', self.on_entry_return)
self.search_entry.bind('<KP_Enter>', self.on_entry_return)
self.search_entry.config(fg='grey')
self.search_variable.set("Podaj rok")
self.search_entry.grid(row=0, column=0, sticky='E')
self.button = tk.Button(
master=self.root, text="Wyświetl", command=self._search)
self.button.grid(row=0, column=1, pady=10, sticky='W')
self.message = tk.StringVar()
self.label = tk.Label(master=self.root, textvariable=self.message)
self.label.grid(row=3, column=0, columnspan=2, sticky='W')
self.copyright = tk.Label(
master=self.root, text='(C) S.Kwiatkowski 2020')
self.copyright.grid(row=0, column=1, sticky='E')
def _exit(self):
self.root.quit() # stops mainloop
self.root.destroy() # this is necessary on Windows
def on_entry_in(self, event):
self.search_entry.config(fg='black')
self.search_variable.set('')
def on_entry_return(self, event):
self._search()
def create_chart(self, content):
if content is not None:
year = content.get('year')
dates = content.get('dates')
prices = content.get('prices')
fig, ax = plt.subplots()
ax.set_title(f'Cena dla paliwa {self.fuel} - {year} rok') #title: price for fuel and year
ax.set_xlabel('Data') #Date label
ax.set_ylabel('Cena') #Price label
fig.autofmt_xdate()
ax.grid(True)
ax.xaxis.set_major_locator(plt.MaxNLocator(10)) #fewer dates
ax.plot(dates, prices, c='#CA3F62')
if not os.path.exists('data'):
os.makedirs('data')
fig.savefig(f'data/{self.fuel}-{year}.png')
canvas = FigureCanvasTkAgg(fig, master=self.chart_canvas)
canvas.draw()
canvas.get_tk_widget().grid(row=0, column=0)
def my_parser(self, year):
try:
page = requests.get(
f'https://www.orlen.pl/PL/DlaBiznesu/HurtoweCenyPaliw/Strony/archiwum-cen.aspx?Fuel=ONEkodiesel&Year={year}')
except Exception:
page = False
if page and page.ok:
self.message.set('Ok')
text = unicodedata.normalize("NFKC", page.text)
tree = html.fromstring(text)
table_content = tree.xpath('//tr/td/span/text()')
if table_content:
table_content = table_content[::-1]
table_content = [x for x in table_content if x != 'zł/m']
# print(table_content)
content = {}
content['year'] = year
self.fuel = table_content.pop()
content['dates'] = table_content[1::2]
prices = table_content[::2]
prices = [price.replace(' ', '') for price in prices]
prices = list(map(int, prices))
content['prices'] = prices
self.content = content
return self.content
else:
self.message.set('Brak danych dla podanego roku')
for child in self.chart_canvas.winfo_children():
child.destroy()
def _search(self):
year = self.search_variable.get()
if year.isdigit():
if os.path.exists(f'data/{self.fuel}-{year}.png'):
image = Image.open(f'data/{self.fuel}-{year}.png')
self.imagetk = ImageTk.PhotoImage(image)
self.img = tk.Label(image=self.imagetk,
master=self.chart_canvas)
self.img.grid(row=0, column=0, sticky='NSWE')
self.message.set('Ok')
else:
self.create_chart(self.my_parser(year))
self.search_entry.config(fg='black')
self.search_variable.set("")
def main():
root = tk.Tk()
app = MainWindow(root)
app.mainloop()
if __name__ == "__main__":
main()
| StarcoderdataPython |
6413544 | """
Custom exceptions.
"""
class RecordUnknown(Exception):
pass
class APIException(Exception):
pass
| StarcoderdataPython |
365699 | #!/usr/bin/env python3
"""
This is a NodeServer for Wi-Fi enabled Roomba vacuums by fahrer16 (<NAME>)
Based on template for Polyglot v2 written in Python2/3 by Einstein.42 (<NAME>) <EMAIL>
"""
import udi_interface
import sys
import json
from threading import Timer
from roomba import Roomba
LOGGER = udi_interface.LOGGER
SERVERDATA = json.load(open('server.json'))
VERSION = SERVERDATA['credits'][0]['version']
STATES = { "charge": 1, #"Charging"
"new": 2, #"New Mission"
"run": 3, #"Running"
"resume":4, #"Running"
"hmMidMsn": 5, #"Recharging"
"recharge": 6, #"Recharging"
"stuck": 7, #"Stuck"
"hmUsrDock": 8, #"User Docking"
"dock": 9, #"Docking"
"dockend": 10, # "Docking - End Mission"
"cancelled": 11, #"Cancelled"
"stop": 12, #"Stopped"
"pause": 13, #"Paused"
"hmPostMsn": 14, #"End Mission"
"": 0}
RUNNING_STATES = {2,3,4,5,6}
ERROR_MESSAGES = {
0: "None",
1: "Roomba is stuck with its left or right wheel hanging down.",
2: "The debris extractors can't turn.",
5: "The left or right wheel is stuck.",
6: "The cliff sensors are dirty, it is hanging over a drop, "\
"or it is stuck on a dark surface.",
8: "The fan is stuck or its filter is clogged.",
9: "The bumper is stuck, or the bumper sensor is dirty.",
10: "The left or right wheel is not moving.",
11: "Roomba has an internal error.",
14: "The bin has a bad connection to the robot.",
15: "Roomba has an internal error.",
16: "Roomba has started while moving or at an angle, or was bumped "\
"while running.",
17: "The cleaning job is incomplete.",
18: "Roomba cannot return to the Home Base or starting position."
}
#class Controller(udi_interface.Controller):
class Controller(object):
def __init__(self, polyglot):
#super().__init__(polyglot)
self.poly = polyglot
self.name = 'Roomba Bridge'
self._nodeQueue = []
self._roombas = {}
self.discoveryTries = 0
_msg = "Connection timer created for roomba controller"
self.timer = Timer(1,LOGGER.debug,[_msg])
def start(self, params):
LOGGER.info('Starting Roomba Polyglot v3 NodeServer version {}'.format(VERSION))
self.connectionTime = 5 #TODO: Add configurable time period here
self.discover(params)
""" TODO: Move this to each node """
def shortPoll(self):
for node in self.nodes:
self.nodes[node].updateInfo()
def longPoll(self):
pass
""" TODO: Move this to each node """
def query(self):
for node in self.nodes:
self.nodes[node].reportDrivers()
def _addRoombaNodesFromQueue(self):
if len(self._nodeQueue) > 0:
LOGGER.debug('Attempting to add %i roombas that have connected', len(self._nodeQueue))
for _address in self._nodeQueue:
try:
if self.poly.getNode(_address) is not None:
#Node has already been added, take it out of the queue
self._nodeQueue.remove(_address)
LOGGER.debug('%s already in ISY', _address)
else:
_roomba = self._roombas[_address]
LOGGER.debug('Processing %s (%s) for addition', _roomba.roombaName, _address)
#Check that info has been received from roomba by checking for the Roomba's capabilities being reported:
if len(_roomba.master_state["state"]["reported"]["cap"]) > 0:
try:
_name = str(_roomba.roombaName)
LOGGER.debug('Getting capabilities from %s', _name)
_hasPose = self._getCapability(_roomba, 'pose')
_hasCarpetBoost = self._getCapability(_roomba, 'carpetBoost')
_hasBinFullDetect = self._getCapability(_roomba, 'binFullDetect')
LOGGER.debug('Capabilities for %s: Position: %s, CarpetBoost: %s, BinFullDetection: %s', _name, str(_hasPose), str(_hasCarpetBoost), str(_hasBinFullDetect))
if _hasCarpetBoost:
LOGGER.info('Adding Roomba 980: %s (%s)', _name, _address)
self.poly.addNode(Roomba980(poly.self, _address, _address, _name, _roomba))
self._nodeQueue.remove(_address)
elif _hasPose:
LOGGER.info('Adding Series 900 Roomba: %s (%s)', _name, _address)
self.poly.addNode(Series900Roomba(self.poly, _address, _address, _name, _roomba))
self._nodeQueue.remove(_address)
elif _hasBinFullDetect:
LOGGER.info('Adding Series 800 Roomba: %s (%s)', _name, _address)
self.poly.addNode(Series800Roomba(self.poly, _address, _address, _name, _roomba))
self._nodeQueue.remove(_address)
else:
LOGGER.info('Adding Base Roomba: %s (%s)', _name, _address)
self.poly.addNode(BasicRoomba(self.poly, _address, _address, _name, _roomba))
self._nodeQueue.remove(_address)
except Exception as ex:
LOGGER.error('Error adding %s after discovery: %s', _name, str(ex))
else:
LOGGER.debug('Information not yet received for %s', _name)
except Exception as ex:
LOGGER.debug('Information not yet received from %s', _roomba.roombaName)
if len(self._nodeQueue) > 0 and self.discoveryTries <= 2: #There are still roomba's to add, we'll restart the timer to run this routine again
LOGGER.debug('%i roombas are still pending addition', len(self._nodeQueue))
self.discoveryTries += 1
self._startRoombaConnectionDelayTimer()
else:
LOGGER.debug('No roombas pending addition')
def discover(self, customParams):
LOGGER.debug('Beginning Discovery on %s', str(self.name))
self.poly.Notices.clear()
try:
_items = 0
self.discoveryTries = 0
_params = customParams
for key,value in _params.items():
_key = key.lower()
if _key.startswith('vacuum') or _key.startswith('roomba'):
_items += 1
try:
if 'ip' in value and 'blid' in value and 'password' in value and 'name' in value:
_value = json.loads(value)
_ip = _value['ip']
_blid = _value['blid']
_password = _value['password']
_name = _value['name']
_address = 'rm' + _blid[-10:].lower()
#Check that node hasn't already been added to ISY
if self.poly.getNode(_address) == None:
if _address not in self._nodeQueue: #Check that node hasn't already been added to queue of roombas to be added to ISY
LOGGER.debug('Connecting to %s', _name)
_roomba = Roomba(_ip, _blid, _password, roombaName = _name)
_roomba.nodeAddress = _address
_roomba.connect()
#build a list of the roombas that need to be added. We'll check them later after it's had a chance to connect
self._nodeQueue.append(_address)
self._roombas[_address] = _roomba
else:
LOGGER.debug('%s already pending addition to ISY. Skipping addition.', _name)
else:
LOGGER.debug('%s already configured. Skipping addition to ISY.', _name)
else:
_items -= 1
except Exception as ex:
LOGGER.error('Error with Roomba Connection: %s', str(ex))
if _items == 0:
LOGGER.error('No Roombas are configured in Polyglot. For each Roomba, add a key starting with "vacuum" and a value containing the IP address, BLID, Password, and Name. Example: "{"ip":"192.168.3.36", "blid":"6945841021309640","password":":<PASSWORD>","name":"Upstairs Roomba"}". Note the use of double quotes. Static IP\'s are strongly recommended. See here for instructions how to get BLID and Password: "https://github.com/NickWaterton/Roomba980-Python"')
self.poly.Notices['cfg'] = 'Please add your Roombas to the custom parameters configuration.'
elif len(self._nodeQueue) > 0:
LOGGER.info('%i Roomba\'s identified in configuration', _items)
self._startRoombaConnectionDelayTimer()
else:
LOGGER.debug('Discovery: No new roombas need to be added to ISY')
except Exception as ex:
LOGGER.error('Error with Roomba Discovery: %s', str(ex))
def _startRoombaConnectionDelayTimer(self):
try:
if self.timer is not None:
self.timer.cancel()
self.timer = Timer(self.connectionTime, self._addRoombaNodesFromQueue)
self.timer.start()
LOGGER.debug("Starting roomba connection delay timer for %s seconds", str(self.connectionTime))
return True
except Exception as ex:
LOGGER.error('Error starting roomba connection delay timer: %s', str(ex))
return False
def _getCapability(self, roomba, capability):
#If a capability is not contained within the roomba's master_state, it doesn't have that capability. Not sure it could ever be set to 0, but this will ensure it is 1 in order to report it has the capability
try:
return roomba.master_state["state"]["reported"]["cap"][capability] == 1
except:
return False
def updateInfo(self):
pass #Nothing to update for controller node
def delete(self):
LOGGER.info('Deleting roomba controller node. Deleting sub-nodes...')
for node in self.nodes:
if node.address != self.address:
self.nodes[node].delete()
id = 'controller'
commands = {'DISCOVER': discover}
drivers = [{'driver': 'ST', 'value': 0, 'uom': 2}]
class BasicRoomba(udi_interface.Node):
"""
This is the Base Class for all Roombas as all Roomba's contain the features within. Other Roomba's build upon these features.
"""
def __init__(self, poly, primary, address, name, roomba):
super().__init__(poly, primary, address, name)
self.roomba = roomba
self.quality = -1
self.connected = False
poly.subscribe(poly.START, self.start, address)
poly.subscribe(poly.POLL, self.updateInfo)
def start(self):
self.updateInfo(polltype='shortPoll')
def setOn(self, command):
#Roomba Start Command (not to be confused with the node start command above)
LOGGER.info('Received Start Command on %s', self.name)
try:
self.roomba.send_command("start")
return True
except Exception as ex:
LOGGER.error('Error processing Roomba Start Command on %s: %s', self.name, str(ex))
return False
def setOff(self, command):
#Roomba Stop Command
LOGGER.info('Received Stop Command on %s', self.name)
try:
self.roomba.send_command("stop")
return True
except Exception as ex:
LOGGER.error('Error processing Roomba Stop Command on %s: %s', self.name, str(ex))
return False
def setPause(self, command):
#Roomba Pause Command
LOGGER.info('Received Pause Command on %s', self.name)
try:
self.roomba.send_command("pause")
return True
except Exception as ex:
LOGGER.error('Error processing Roomba Pause Command on %s: %s', self.name, str(ex))
return False
def setResume(self, command):
#Roomba Resume Command
LOGGER.info('Received Resume Command on %s', self.name)
try:
self.roomba.send_command("resume")
return True
except Exception as ex:
LOGGER.error('Error processing Roomba Resume Command on %s: %s', self.name, str(ex))
return False
def setDock(self, command):
#Roomba Dock Command
LOGGER.info('Received Dock Command on %s', self.name)
try:
self.roomba.send_command("dock")
return True
except Exception as ex:
LOGGER.error('Error processing Roomba Dock Command on %s: %s', self.name, str(ex))
return False
def _updateBasicProperties(self):
#LOGGER.debug('Setting Basic Properties for %s', self.name)
#ST (On/Off)
#GV1, States (Enumeration)
try:
_state = self.roomba.master_state["state"]["reported"]["cleanMissionStatus"]["phase"]
#LOGGER.debug('Current state on %s: %s', self.name, str(_state))
if _state in STATES:
self.setDriver('GV1', STATES[_state])
_running = (STATES[_state] in RUNNING_STATES)
self.setDriver('ST', (0,100)[int(_running)])
except Exception as ex:
LOGGER.error("Error updating current state on %s: %s", self.name, str(ex))
#GV2, Connected (True/False)
try:
_connected = self.roomba.roomba_connected
if _connected == False and self.connected == True:
LOGGER.error('Roomba Disconnected: %s', self.name)
elif _connected == True and self.connected == False:
LOGGER.info('Roomba Connected: %s', self.name)
self.connected = _connected
self.setDriver('GV2', int(_connected))
except Exception as ex:
LOGGER.error("Error updating connection status on %s: %s", self.name, str(ex))
#BATLVL, Battery (Percent)
try:
_batPct = self.roomba.master_state["state"]["reported"]["batPct"]
self.setDriver('BATLVL', _batPct)
except Exception as ex:
LOGGER.error("Error updating battery Percentage on %s: %s", self.name, str(ex))
#GV3, Bin Present (True/False)
try:
_binPresent = self.roomba.master_state["state"]["reported"]["bin"]["present"]
self.setDriver('GV3', int(_binPresent))
except Exception as ex:
LOGGER.error("Error updating Bin Present on %s: %s", self.name, str(ex))
#GV4, Wifi Signal (Percent)
try:
_rssi = self.roomba.master_state["state"]["reported"]["signal"]["rssi"]
_quality = int(max(min(2.* (_rssi + 100.),100),0))
if abs(_quality - self.quality) > 15: #Quality can change very frequently, only update ISY if it has changed by more than 15%
self.setDriver('GV4', _quality)
self.quality = _quality
except Exception as ex:
LOGGER.error("Error updating WiFi Signal Strength on %s: %s", self.name, str(ex))
#GV5, Runtime (Hours)
try:
_hr = self.roomba.master_state["state"]["reported"]["bbrun"]["hr"]
_min = self.roomba.master_state["state"]["reported"]["bbrun"]["min"]
_runtime = round(_hr + _min/60.,1)
self.setDriver('GV5', _runtime)
except Exception as ex:
LOGGER.error("Error updating runtime on %s: %s", self.name, str(ex))
#GV6, Error Actie (True/False)
#ALARM, Error (Enumeration)
try:
if "error" in self.roomba.master_state["state"]["reported"]["cleanMissionStatus"]:
_error = self.roomba.master_state["state"]["reported"]["cleanMissionStatus"]["error"]
else: _error = 0
self.setDriver('GV6', int(_error != 0))
self.setDriver('ALARM', _error)
except Exception as ex:
LOGGER.error("Error updating current Error Status on %s: %s", self.name, str(ex))
def delete(self):
try:
LOGGER.info("Deleting %s and attempting to stop communication to roomba", self.name)
self.roomba.disconnect()
except Exception as ex:
LOGGER.error("Error attempting to stop communication to %s: %s", self.name, str(ex))
def updateInfo(self, polltype):
if polltype is 'shortPoll':
self._updateBasicProperties()
def query(self, command=None):
self.updateInfo(polltype='shortPoll')
self.reportDrivers()
drivers = [{'driver': 'ST', 'value': 0, 'uom': 78}, #Running (On/Off)
{'driver': 'GV1', 'value': 0, 'uom': 25}, #State (Enumeration)
{'driver': 'GV2', 'value': 0, 'uom': 2}, #Connected (True/False)
{'driver': 'BATLVL', 'value': 0, 'uom': 51}, #Battery (percent)
{'driver': 'GV3', 'value': 0, 'uom': 2}, #Bin Present (True/False)
{'driver': 'GV4', 'value': 0, 'uom': 51}, #Wifi Signal (Percent)
{'driver': 'GV5', 'value': 0, 'uom': 20}, #RunTime (Hours)
{'driver': 'GV6', 'value': 0, 'uom':2}, #Error Active (True/False)
{'driver': 'ALARM', 'value': 0, 'uom':25} #Current Error (Enumeration)
]
id = 'basicroomba'
commands = {
'DON': setOn, 'DOF': setOff, 'PAUSE': setPause, 'RESUME': setResume, 'DOCK': setDock, 'QUERY':query
}
class Series800Roomba(BasicRoomba):
"""
This class builds upon the BasicRoomba class by adding full bin detection present in the 800 series roombas
"""
def setOn(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setOn(command)
def setOff(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setOff(command)
def setPause(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setPause(command)
def setResume(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setResume(command)
def setDock(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setDock(command)
def _update800SeriesProperties(self):
#LOGGER.debug('Setting Bin status and settings for %s', self.name)
#GV7, Bin Full (True/False)
try:
_binFull = self.roomba.master_state["state"]["reported"]["bin"]["full"]
self.setDriver('GV7', int(_binFull))
except Exception as ex:
LOGGER.error("Error updating Bin Full on %s: %s", self.name, str(ex))
#GV8, Behavior on Full Bin (Enumeration, 1=Finish, 0=Continue)
try:
_finishOnBinFull = self.roomba.master_state["state"]["reported"]["binPause"]
self.setDriver('GV8', int(_finishOnBinFull))
except Exception as ex:
LOGGER.error("Error updating Behavior on Bin Full Setting on %s: %s", self.name, str(ex))
def updateInfo(self, polltype):
super().updateInfo(polltype)
self._update800SeriesProperties()
def query(self, command=None):
super().updateInfo()
def setBinFinish(self,command=None):
LOGGER.info('Received Command to set Bin Finish on %s: %s', self.name, str(command))
try:
_setting = command.get('value')
self.roomba.set_preference("binPause", ("false","true")[int(_setting)]) # 0=Continue, 1=Finish
except Exception as ex:
LOGGER.error("Error setting Bin Finish Parameter on %s: %s", self.name, str(ex))
drivers = [{'driver': 'ST', 'value': 0, 'uom': 78}, #Running (On/Off)
{'driver': 'GV1', 'value': 0, 'uom': 25}, #State (Enumeration)
{'driver': 'GV2', 'value': 0, 'uom': 2}, #Connected (True/False)
{'driver': 'BATLVL', 'value': 0, 'uom': 51}, #Battery (percent)
{'driver': 'GV3', 'value': 0, 'uom': 2}, #Bin Present (True/False)
{'driver': 'GV4', 'value': 0, 'uom': 51}, #Wifi Signal (Percent)
{'driver': 'GV5', 'value': 0, 'uom': 20}, #RunTime (Hours)
{'driver': 'GV6', 'value': 0, 'uom':2}, #Error Active (True/False)
{'driver': 'ALARM', 'value': 0, 'uom':25}, #Current Error (Enumeration)
{'driver': 'GV7', 'value': 0, 'uom': 2}, #Bin Present (True/False)
{'driver': 'GV8', 'value': 0, 'uom': 25} #Behavior on Full Bin (Enumeration - Finish/Continue)
]
id = 'series800roomba'
commands = {
'DON': setOn, 'DOF': setOff, 'PAUSE': setPause, 'RESUME': setResume, 'DOCK': setDock, 'QUERY':query, 'SET_BIN_FINISH': setBinFinish
}
class Series900Roomba(Series800Roomba):
"""
This class builds upon the Series800Roomba class by adding position tracking present in the 900 series roombas
"""
def setOn(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setOn(command)
def setOff(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setOff(command)
def setPause(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setPause(command)
def setResume(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setResume(command)
def setDock(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setDock(command)
def _update900SeriesProperties(self):
#LOGGER.debug('Setting Position status for %s', self.name)
#GV9, X Position
try:
_x = self.roomba.master_state["state"]["reported"]["pose"]["point"]["x"]
self.setDriver('GV9', int(_x))
except Exception as ex:
LOGGER.error("Error updating X Position on %s: %s", self.name, str(ex))
#GV10, Y Position
try:
_y = self.roomba.master_state["state"]["reported"]["pose"]["point"]["y"]
self.setDriver('GV10', int(_y))
except Exception as ex:
LOGGER.error("Error updating Y Position on %s: %s", self.name, str(ex))
#ROTATE, Theta (degrees)
try:
_theta = self.roomba.master_state["state"]["reported"]["pose"]["theta"]
self.setDriver('ROTATE', int(_theta))
except Exception as ex:
LOGGER.error("Error updating Theta Position on %s: %s", self.name, str(ex))
#LOGGER.debug('Getting Passes setting for %s', self.name)
#GV11, Passes Setting (0="", 1=One, 2=Two, 3=Automatic)
try:
_noAutoPasses = self.roomba.master_state["state"]["reported"]["noAutoPasses"]
_twoPass = self.roomba.master_state["state"]["reported"]["twoPass"]
if not _noAutoPasses:
self.setDriver('GV11', 3)
elif _twoPass:
self.setDriver('GV11', 2)
else:
self.setDriver('GV11', 1)
except Exception as ex:
LOGGER.error("Error updating Passes Setting on %s: %s", self.name, str(ex))
#GV12, Edge Clean (On/Off)
try:
_openOnly = self.roomba.master_state["state"]["reported"]["openOnly"]
self.setDriver('GV12', (100,0)[int(_openOnly)]) #note 0,100 order (openOnly True means Edge Clean is Off)
except Exception as ex:
LOGGER.error("Error updating Edge Clean Setting on %s: %s", self.name, str(ex))
def updateInfo(self, polltype):
super().updateInfo(polltype)
self._update900SeriesProperties()
def query(self, command=None):
super().updateInfo()
def setBinFinish(self,command=None):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setBinFinish(command)
def setPasses(self,command=None):
LOGGER.info('Received Command to set Number of Passes on %s: %s', self.name, str(command))
try:
_setting = int(command.get('value'))
if _setting == 1: #One Pass
self.roomba.set_preference("noAutoPasses", "true")
self.roomba.set_preference("twoPass", "false")
elif _setting == 2: #Two Passes
self.roomba.set_preference("noAutoPasses", "true")
self.roomba.set_preference("twoPass", "true")
elif _setting == 3: #Automatic Passes
self.roomba.set_preference("noAutoPasses", "false")
except Exception as ex:
LOGGER.error("Error setting Number of Passes on %s: %s", self.name, str(ex))
def setEdgeClean(self,command=None):
LOGGER.info('Received Command to set Edge Clean on %s: %s', self.name, str(command))
try:
_setting = int(command.get('value'))
if _setting == 100:
self.roomba.set_preference("openOnly", "false")
else:
self.roomba.set_preference("openOnly", "true")
except Exception as ex:
LOGGER.error("Error setting Edge Clean on %s: %s", self.name, str(ex))
drivers = [{'driver': 'ST', 'value': 0, 'uom': 78}, #Running (On/Off)
{'driver': 'GV1', 'value': 0, 'uom': 25}, #State (Enumeration)
{'driver': 'GV2', 'value': 0, 'uom': 2}, #Connected (True/False)
{'driver': 'BATLVL', 'value': 0, 'uom': 51}, #Battery (percent)
{'driver': 'GV3', 'value': 0, 'uom': 2}, #Bin Present (True/False)
{'driver': 'GV4', 'value': 0, 'uom': 51}, #Wifi Signal (Percent)
{'driver': 'GV5', 'value': 0, 'uom': 20}, #RunTime (Hours)
{'driver': 'GV6', 'value': 0, 'uom':2}, #Error Active (True/False)
{'driver': 'ALARM', 'value': 0, 'uom':25}, #Current Error (Enumeration)
{'driver': 'GV7', 'value': 0, 'uom': 2}, #Bin Present (True/False)
{'driver': 'GV8', 'value': 0, 'uom': 25}, #Behavior on Full Bin (Enumeration - Finish/Continue)
{'driver': 'GV9', 'value': 0, 'uom': 56}, #X Position (Raw Value)
{'driver': 'GV10', 'value': 0, 'uom': 56}, #Y Position (Raw Value)
{'driver': 'ROTATE', 'value': 0, 'uom': 14}, #Theta (Degrees)
{'driver': 'GV11', 'value': 0, 'uom': 25}, #Passes Setting (Enumeration, One/Two/Automatic)
{'driver': 'GV12', 'value': 0, 'uom': 78} #Edge Clean (On/Off)
]
id = 'series900roomba'
commands = {
'DON': setOn, 'DOF': setOff, 'PAUSE': setPause, 'RESUME': setResume, 'DOCK': setDock, 'QUERY':query, 'SET_BIN_FINISH': setBinFinish, 'SET_PASSES': setPasses, 'SET_EDGE_CLEAN': setEdgeClean
}
class Roomba980(Series900Roomba):
"""
This class builds upon the Series900Roomba class by adding fan settings (Carpet Boost)
"""
def setOn(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setOn(command)
def setOff(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setOff(command)
def setPause(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setPause(command)
def setResume(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setResume(command)
def setDock(self, command):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setDock(command)
def _update980Properties(self):
#LOGGER.debug('Updating status for Roomba 980 %s', self.name)
#GV13, Fan Speed Setting (0="", 1=Eco, 2=Automatic, 3=Performance)
try:
_carpetBoost = self.roomba.master_state["state"]["reported"]["carpetBoost"]
_vacHigh = self.roomba.master_state["state"]["reported"]["vacHigh"]
if _carpetBoost:
self.setDriver('GV13', 2)
elif _vacHigh:
self.setDriver('GV13', 3)
else:
self.setDriver('GV13', 1)
except Exception as ex:
LOGGER.error("Error updating Fan Speed Setting on %s: %s", self.name, str(ex))
def updateInfo(self, polltype):
super().updateInfo(polltype)
self._update980Properties()
def query(self, command=None):
super().updateInfo()
def setBinFinish(self,command=None):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setBinFinish(command)
def setPasses(self,command=None):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setPasses(command)
def setEdgeClean(self,command=None):
#Although method is not different than the BasicRoomba class, this needs to be defined so that it can be specified in "commands"
super().setEdgeClean(command)
def setFanSpeed(self,command=None):
LOGGER.info('Received Command to set Fan Speed on %s: %s', self.name, str(command))
try:
_setting = int(command.get('value'))
#(0="", 1=Eco, 2=Automatic, 3=Performance)
if _setting == 1: #Eco
LOGGER.info('Setting %s fan speed to "Eco"', self.name)
self.roomba.set_preference("carpetBoost", "false")
self.roomba.set_preference("vacHigh", "false")
elif _setting == 2: #Automatic
LOGGER.info('Setting %s fan speed to "Automatic" (Carpet Boost Enabled)', self.name)
self.roomba.set_preference("carpetBoost", "true")
self.roomba.set_preference("vacHigh", "false")
elif _setting == 3: #Performance
LOGGER.info('Setting %s fan speed to "Perfomance" (High Fan Speed)', self.name)
self.roomba.set_preference("carpetBoost", "false")
self.roomba.set_preference("vacHigh", "true")
except Exception as ex:
LOGGER.error("Error setting Number of Passes on %s: %s", self.name, str(ex))
drivers = [{'driver': 'ST', 'value': 0, 'uom': 78}, #Running (On/Off)
{'driver': 'GV1', 'value': 0, 'uom': 25}, #State (Enumeration)
{'driver': 'GV2', 'value': 0, 'uom': 2}, #Connected (True/False)
{'driver': 'BATLVL', 'value': 0, 'uom': 51}, #Battery (percent)
{'driver': 'GV3', 'value': 0, 'uom': 2}, #Bin Present (True/False)
{'driver': 'GV4', 'value': 0, 'uom': 51}, #Wifi Signal (Percent)
{'driver': 'GV5', 'value': 0, 'uom': 20}, #RunTime (Hours)
{'driver': 'GV6', 'value': 0, 'uom':2}, #Error Active (True/False)
{'driver': 'ALARM', 'value': 0, 'uom':25}, #Current Error (Enumeration)
{'driver': 'GV7', 'value': 0, 'uom': 2}, #Bin Present (True/False)
{'driver': 'GV8', 'value': 0, 'uom': 25}, #Behavior on Full Bin (Enumeration - Finish/Continue)
{'driver': 'GV9', 'value': 0, 'uom': 56}, #X Position (Raw Value)
{'driver': 'GV10', 'value': 0, 'uom': 56}, #Y Position (Raw Value)
{'driver': 'ROTATE', 'value': 0, 'uom': 14}, #Theta (Degrees)
{'driver': 'GV11', 'value': 0, 'uom': 25}, #Passes Setting (Enumeration, One/Two/Automatic)
{'driver': 'GV12', 'value': 0, 'uom': 78}, #Edge Clean (On/Off)
{'driver': 'GV13', 'value': 0, 'uom': 25} #Fan Speed Setting (Enumeration)
]
id = 'roomba980'
commands = {
'DON': setOn, 'DOF': setOff, 'PAUSE': setPause, 'RESUME': setResume, 'DOCK': setDock, 'QUERY':query, 'SET_BIN_FINISH': setBinFinish, 'SET_PASSES': setPasses, 'SET_EDGE_CLEAN': setEdgeClean, 'SET_FAN_SPEED': setFanSpeed
}
control = None
polyglot = None
def handleParameters(params):
global control
control.start(params)
if __name__ == "__main__":
try:
polyglot = udi_interface.Interface([])
polyglot.start()
control = Controller(polyglot)
polyglot.subscribe(polyglot.CUSTOMPARAMS, handleParameters)
polyglot.ready()
polyglot.updateProfile()
polyglot.setCustomParamsDoc()
polyglot.runForever()
except (KeyboardInterrupt, SystemExit):
sys.exit(0)
| StarcoderdataPython |
1951982 | # -*- coding: utf-8 -*-
# Copyright (c) 2014 Docker.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
docker_registry.core.driver
~~~~~~~~~~~~~~~~~~~~~~~~~~
This file defines:
* a generic interface that describes a uniform "driver"
* methods to register / get these "connections"
Pretty much, the purpose of this is just to abstract the underlying storage
implementation, for a given scheme.
"""
__all__ = ["fetch", "available", "Base"]
import logging
import pkgutil
import docker_registry.drivers
from .compat import json
from .exceptions import NotImplementedError
logger = logging.getLogger(__name__)
class Base(object):
"""Storage is a convenience class that describes methods that must be
implemented by any backend.
You should inherit (or duck type) this if you are implementing your own.
:param host: host name
:type host: unicode
:param port: port number
:type port: int
:param basepath: base path (will be prepended to actual requests)
:type basepath: unicode
"""
# Useful if we want to change those locations later without rewriting
# the code which uses Storage
repositories = 'repositories'
images = 'images'
# Set the IO buffer to 128kB
buffer_size = 128 * 1024
# By default no storage plugin supports it
supports_bytes_range = False
def __init__(self, path=None, config=None):
pass
# FIXME(samalba): Move all path resolver in each module (out of the base)
def images_list_path(self, namespace, repository):
repository_path = self.repository_path(
namespace=namespace, repository=repository)
return '{0}/_images_list'.format(repository_path)
def image_json_path(self, image_id):
return '{0}/{1}/json'.format(self.images, image_id)
def image_mark_path(self, image_id):
return '{0}/{1}/_inprogress'.format(self.images, image_id)
def image_checksum_path(self, image_id):
return '{0}/{1}/_checksum'.format(self.images, image_id)
def image_layer_path(self, image_id):
return '{0}/{1}/layer'.format(self.images, image_id)
def image_ancestry_path(self, image_id):
return '{0}/{1}/ancestry'.format(self.images, image_id)
def image_files_path(self, image_id):
return '{0}/{1}/_files'.format(self.images, image_id)
def image_diff_path(self, image_id):
return '{0}/{1}/_diff'.format(self.images, image_id)
def repository_path(self, namespace, repository):
return '{0}/{1}/{2}'.format(
self.repositories, namespace, repository)
def tag_path(self, namespace, repository, tagname=None):
repository_path = self.repository_path(
namespace=namespace, repository=repository)
if not tagname:
return repository_path
return '{0}/tag_{1}'.format(repository_path, tagname)
def repository_json_path(self, namespace, repository):
repository_path = self.repository_path(
namespace=namespace, repository=repository)
return '{0}/json'.format(repository_path)
def repository_tag_json_path(self, namespace, repository, tag):
repository_path = self.repository_path(
namespace=namespace, repository=repository)
return '{0}/tag{1}_json'.format(repository_path, tag)
def index_images_path(self, namespace, repository):
repository_path = self.repository_path(
namespace=namespace, repository=repository)
return '{0}/_index_images'.format(repository_path)
def private_flag_path(self, namespace, repository):
repository_path = self.repository_path(
namespace=namespace, repository=repository)
return '{0}/_private'.format(repository_path)
def is_private(self, namespace, repository):
return self.exists(self.private_flag_path(namespace, repository))
def content_redirect_url(self, path):
"""Get a URL for content at path
Get a URL to which client can be redirected to get the content from
the path. Return None if not supported by this engine.
Note, this feature will only be used if the `storage_redirect`
configuration key is set to `True`.
"""
return None
def get_json(self, path):
return json.loads(self.get_unicode(path))
def put_json(self, path, content):
return self.put_unicode(path, json.dumps(content))
def get_unicode(self, path):
return self.get_bytes(path).decode('utf8')
def put_unicode(self, path, content):
return self.put_bytes(path, content.encode('utf8'))
def get_bytes(self, path):
return self.get_content(path)
def put_bytes(self, path, content):
return self.put_content(path, content)
def get_content(self, path):
"""Method to get content
"""
raise NotImplementedError(
"You must implement get_content(self, path) on your storage %s" %
self.__class__.__name__)
def put_content(self, path, content):
"""Method to put content
"""
raise NotImplementedError(
"You must implement put_content(self, path, content) on %s" %
self.__class__.__name__)
def stream_read(self, path, bytes_range=None):
"""Method to stream read
"""
raise NotImplementedError(
"You must implement stream_read(self, path, , bytes_range=None) " +
"on your storage %s" %
self.__class__.__name__)
def stream_write(self, path, fp):
"""Method to stream write
"""
raise NotImplementedError(
"You must implement stream_write(self, path, fp) " +
"on your storage %s" %
self.__class__.__name__)
def list_directory(self, path=None):
"""Method to list directory
"""
raise NotImplementedError(
"You must implement list_directory(self, path=None) " +
"on your storage %s" %
self.__class__.__name__)
def exists(self, path):
"""Method to test exists
"""
raise NotImplementedError(
"You must implement exists(self, path) on your storage %s" %
self.__class__.__name__)
def remove(self, path):
"""Method to remove
"""
raise NotImplementedError(
"You must implement remove(self, path) on your storage %s" %
self.__class__.__name__)
def get_size(self, path):
"""Method to get the size
"""
raise NotImplementedError(
"You must implement get_size(self, path) on your storage %s" %
self.__class__.__name__)
def fetch(name):
"""The only public method you should access if you are not implementing
your own driver. - use this to get a backend
instance to which you can delegate actual requests.
:param host: host name
:type host: unicode
:param port: port number
:type port: int
:param basepath: base path (will be prepended to actual requests)
:type basepath: unicode
:returns: a docker connection instance usable for the requested scheme
:rtype: DockerConnection
"""
try:
# XXX The noqa below is because of hacking being non-sensical on this
module = __import__('docker_registry.drivers.%s' % name, globals(),
locals(), ['Storage'], 0) # noqa
logger.debug("Will return docker-registry.drivers.%s.Storage" % name)
except ImportError as e:
logger.warn("Got exception: %s" % e)
raise NotImplementedError(
"""You requested storage driver docker_registry.drivers.%s
which is not installed. Try `pip install docker-registry-driver-%s`
or check your configuration. The following are currently
available on your system: %s. Exception was: %s"""
% (name, name, available(), e)
)
module.Storage.scheme = name
return module.Storage
def available():
return [modname for importer, modname, ispkg
in pkgutil.iter_modules(docker_registry.drivers.__path__)]
| StarcoderdataPython |
4969313 | <reponame>regisb/richie<filename>tests/apps/courses/test_templates_program_detail.py<gh_stars>0
"""
End-to-end tests for the program detail view
"""
import re
from cms.test_utils.testcases import CMSTestCase
from richie.apps.core.factories import UserFactory
from richie.apps.courses.factories import CourseFactory, ProgramFactory
class ProgramCMSTestCase(CMSTestCase):
"""
End-to-end test suite to validate the content and Ux of the program detail view
"""
def test_templates_program_detail_cms_published_content(self):
"""
Validate that the important elements are displayed on a published program page
"""
courses = CourseFactory.create_batch(4)
program = ProgramFactory(
page_title="Preums",
fill_cover=True,
fill_excerpt=True,
fill_body=True,
fill_courses=courses,
)
page = program.extended_object
# Publish only 2 out of 4 courses
courses[0].extended_object.publish("en")
courses[1].extended_object.publish("en")
# The unpublished objects may have been published and unpublished which puts them in a
# status different from objects that have never been published.
# We want to test both cases.
courses[2].extended_object.publish("en")
courses[2].extended_object.unpublish("en")
# The page should not be visible before it is published
url = page.get_absolute_url()
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
# Publish the program and ensure the content is correct
page.publish("en")
response = self.client.get(url)
self.assertContains(
response, "<title>Preums</title>", html=True, status_code=200
)
self.assertContains(
response, '<h1 class="subheader__title">Preums</h1>', html=True
)
# Only published courses should be present on the page
for course in courses[:2]:
self.assertContains(
response,
'<p class="course-glimpse__title">{:s}</p>'.format(
course.extended_object.get_title()
),
html=True,
)
for course in courses[-2:]:
self.assertNotContains(response, course.extended_object.get_title())
def test_templates_program_detail_cms_draft_content(self):
"""
A staff user should see a draft program including draft elements.
"""
user = UserFactory(is_staff=True, is_superuser=True)
self.client.login(username=user.username, password="password")
courses = CourseFactory.create_batch(4)
program = ProgramFactory(
page_title="Preums",
fill_cover=True,
fill_excerpt=True,
fill_body=True,
fill_courses=courses,
)
page = program.extended_object
# Publish only 2 out of 4 courses
courses[0].extended_object.publish("en")
courses[1].extended_object.publish("en")
# The unpublished objects may have been published and unpublished which puts them in a
# status different from objects that have never been published.
# We want to test both cases.
courses[3].extended_object.publish("en")
courses[3].extended_object.unpublish("en")
# The page should be visible as draft to the staff user
url = page.get_absolute_url()
response = self.client.get(url)
self.assertContains(
response, "<title>Preums</title>", html=True, status_code=200
)
self.assertContains(
response, '<h1 class="subheader__title">Preums</h1>', html=True
)
# Draft and published courses should be present on the page
for course in courses[:2]:
self.assertContains(
response,
'<a class="course-glimpse" '
'href="{:s}"'.format(course.extended_object.get_absolute_url()),
)
self.assertContains(
response,
'<p class="course-glimpse__title">{:s}</p>'.format(
course.extended_object.get_title()
),
html=True,
)
self.assertContains(
response,
'<a class="course-glimpse course-glimpse--draft" '
'href="{:s}"'.format(courses[2].extended_object.get_absolute_url()),
)
self.assertContains(
response,
'<p class="course-glimpse__title">{:s}</p>'.format(
courses[2].extended_object.get_title()
),
html=True,
)
# The unpublished course should not be present on the page
self.assertNotContains(response, courses[3].extended_object.get_title())
def test_templates_program_detail_cms_no_course(self):
"""
Validate that a program without course doesn't show the course section
on a published program page but does on the draft program page
"""
program = ProgramFactory(
page_title="Preums",
fill_cover=True,
fill_excerpt=True,
fill_body=True,
)
page = program.extended_object
# Publish the program and ensure the content is absent
page.publish("en")
url = page.get_absolute_url()
response = self.client.get(url)
self.assertNotContains(
response, '<div class="program-detail__courses program-detail__block">'
)
# The content should be visible as draft to the staff user
user = UserFactory(is_staff=True, is_superuser=True)
self.client.login(username=user.username, password="password")
response = self.client.get(url)
self.assertContains(
response, '<div class="program-detail__courses program-detail__block">'
)
def test_templates_program_detail_cms_published_content_opengraph(self):
"""The program logo should be used as opengraph image."""
program = ProgramFactory(
fill_cover={
"original_filename": "cover.jpg",
"default_alt_text": "my cover",
},
should_publish=True,
)
url = program.extended_object.get_absolute_url()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<meta property="og:type" content="website" />')
self.assertContains(
response, f'<meta property="og:url" content="http://example.com{url:s}" />'
)
pattern = (
r'<meta property="og:image" content="http://example.com'
r"/media/filer_public_thumbnails/filer_public/.*cover\.jpg__1200x630"
)
self.assertIsNotNone(re.search(pattern, str(response.content)))
self.assertContains(
response, '<meta property="og:image:width" content="1200" />'
)
self.assertContains(
response, '<meta property="og:image:height" content="630" />'
)
| StarcoderdataPython |
3269628 | #!/usr/bin/env python3
import argparse
import sys
from . import loader
parser = argparse.ArgumentParser(
prog="csgomenumaker",
description="Generate a console menu for CSGO."
)
parser.add_argument(
"file"
)
args = parser.parse_args(sys.argv[1:])
loader.Loader(args.file)
| StarcoderdataPython |
9676262 | #!/usr/bin/env python
import os
import sys
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup, find_packages
readme = open('README.rst').read()
doclink = """
Documentation
-------------
The full documentation is at http://centreonapi.rtfd.org."""
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
name='centreonapi',
version='0.2.0',
description='Centreon Api for use Webservice in Centreon Web 2.8.0 or later',
long_description=readme + '\n\n' + doclink + '\n\n' + history,
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/guillaumewatteeux/centreon-sdk-python',
packages=find_packages(),
package_dir={'centreonapi': 'centreonapi'},
include_package_data=True,
install_requires=[
'requests',
'bs4',
],
license='Apache-2.0',
zip_safe=False,
keywords='centreonapi',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
| StarcoderdataPython |
9719479 | <reponame>yaelmi3/backslash<gh_stars>10-100
"""SCM Info
Revision ID: 37bc6a190f
Revises: <PASSWORD>
Create Date: 2015-10-03 23:08:48.308287
"""
# revision identifiers, used by Alembic.
revision = '37bc6a1<PASSWORD>'
down_revision = '3<PASSWORD>'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('test', sa.Column('file_hash', sa.String(length=40), nullable=True))
op.add_column('test', sa.Column('scm', sa.String(length=5), nullable=True))
op.add_column('test', sa.Column('scm_dirty', sa.Boolean(), server_default='false', nullable=True))
op.add_column('test', sa.Column('scm_revision', sa.String(length=40), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('test', 'scm_revision')
op.drop_column('test', 'scm_dirty')
op.drop_column('test', 'scm')
op.drop_column('test', 'file_hash')
### end Alembic commands ###
| StarcoderdataPython |
1654522 | <reponame>DadeCoderh/starlingx-stagingm<filename>dcmanager/db/api.py
# Copyright (c) 2015 Ericsson AB.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2017 Wind River Systems, Inc.
#
# The right to copy, distribute, modify, or otherwise make use
# of this software may be licensed only pursuant to the terms
# of an applicable Wind River license agreement.
#
'''
Interface for database access.
SQLAlchemy is currently the only supported backend.
'''
from oslo_config import cfg
from oslo_db import api
from dcmanager.common import consts
CONF = cfg.CONF
_BACKEND_MAPPING = {'sqlalchemy': 'dcmanager.db.sqlalchemy.api'}
IMPL = api.DBAPI.from_config(CONF, backend_mapping=_BACKEND_MAPPING)
def get_engine():
return IMPL.get_engine()
def get_session():
return IMPL.get_session()
# subcloud db methods
###################
def subcloud_db_model_to_dict(subcloud):
"""Convert subcloud db model to dictionary."""
result = {"id": subcloud.id,
"name": subcloud.name,
"description": subcloud.description,
"location": subcloud.location,
"software-version": subcloud.software_version,
"management-state": subcloud.management_state,
"availability-status": subcloud.availability_status,
"management-subnet": subcloud.management_subnet,
"management-start-ip": subcloud.management_start_ip,
"management-end-ip": subcloud.management_end_ip,
"management-gateway-ip": subcloud.management_gateway_ip,
"systemcontroller-gateway-ip":
subcloud.systemcontroller_gateway_ip,
"created-at": subcloud.created_at,
"updated-at": subcloud.updated_at}
return result
def subcloud_create(context, name, description, location, software_version,
management_subnet, management_gateway_ip,
management_start_ip, management_end_ip,
systemcontroller_gateway_ip):
"""Create a subcloud."""
return IMPL.subcloud_create(context, name, description, location,
software_version,
management_subnet, management_gateway_ip,
management_start_ip, management_end_ip,
systemcontroller_gateway_ip)
def subcloud_get(context, subcloud_id):
"""Retrieve a subcloud or raise if it does not exist."""
return IMPL.subcloud_get(context, subcloud_id)
def subcloud_get_with_status(context, subcloud_id):
"""Retrieve a subcloud and all endpoint sync statuses."""
return IMPL.subcloud_get_with_status(context, subcloud_id)
def subcloud_get_by_name(context, name):
"""Retrieve a subcloud by name or raise if it does not exist."""
return IMPL.subcloud_get_by_name(context, name)
def subcloud_get_all(context):
"""Retrieve all subclouds."""
return IMPL.subcloud_get_all(context)
def subcloud_get_all_with_status(context):
"""Retrieve all subclouds and sync statuses."""
return IMPL.subcloud_get_all_with_status(context)
def subcloud_update(context, subcloud_id, management_state=None,
availability_status=None, software_version=None,
description=None, location=None, audit_fail_count=None):
"""Update a subcloud or raise if it does not exist."""
return IMPL.subcloud_update(context, subcloud_id, management_state,
availability_status, software_version,
description, location, audit_fail_count)
def subcloud_destroy(context, subcloud_id):
"""Destroy the subcloud or raise if it does not exist."""
return IMPL.subcloud_destroy(context, subcloud_id)
###################
def subcloud_status_create(context, subcloud_id, endpoint_type):
"""Create a subcloud status for an endpoint_type."""
return IMPL.subcloud_status_create(context, subcloud_id, endpoint_type)
def subcloud_status_db_model_to_dict(subcloud_status):
"""Convert subcloud status db model to dictionary."""
if subcloud_status:
result = {"subcloud_id": subcloud_status.subcloud_id,
"sync_status": subcloud_status.sync_status}
else:
result = {"subcloud_id": 0,
"sync_status": "unknown"}
return result
def subcloud_endpoint_status_db_model_to_dict(subcloud_status):
"""Convert endpoint subcloud db model to dictionary."""
if subcloud_status:
result = {"endpoint_type": subcloud_status.endpoint_type,
"sync_status": subcloud_status.sync_status}
else:
result = {}
return result
def subcloud_status_get(context, subcloud_id, endpoint_type):
"""Retrieve the subcloud status for an endpoint
Will raise if subcloud does not exist.
"""
return IMPL.subcloud_status_get(context, subcloud_id, endpoint_type)
def subcloud_status_get_all(context, subcloud_id):
"""Retrieve all statuses for a subcloud."""
return IMPL.subcloud_status_get_all(context, subcloud_id)
def subcloud_status_get_all_by_name(context, name):
"""Retrieve all statuses for a subcloud by name."""
return IMPL.subcloud_status_get_all_by_name(context, name)
def subcloud_status_update(context, subcloud_id, endpoint_type, sync_status):
"""Update the status of a subcloud or raise if it does not exist."""
return IMPL.subcloud_status_update(context, subcloud_id, endpoint_type,
sync_status)
def subcloud_status_destroy_all(context, subcloud_id):
"""Destroy all the statuses for a subcloud
Will raise if subcloud does not exist.
"""
return IMPL.subcloud_status_destroy_all(context, subcloud_id)
###################
def sw_update_strategy_db_model_to_dict(sw_update_strategy):
"""Convert sw update db model to dictionary."""
result = {"id": sw_update_strategy.id,
"type": sw_update_strategy.type,
"subcloud-apply-type": sw_update_strategy.subcloud_apply_type,
"max-parallel-subclouds":
sw_update_strategy.max_parallel_subclouds,
"stop-on-failure": sw_update_strategy.stop_on_failure,
"state": sw_update_strategy.state,
"created-at": sw_update_strategy.created_at,
"updated-at": sw_update_strategy.updated_at}
return result
def sw_update_strategy_create(context, type, subcloud_apply_type,
max_parallel_subclouds, stop_on_failure, state):
"""Create a sw update."""
return IMPL.sw_update_strategy_create(context, type, subcloud_apply_type,
max_parallel_subclouds,
stop_on_failure, state)
def sw_update_strategy_get(context):
"""Retrieve a sw update or raise if it does not exist."""
return IMPL.sw_update_strategy_get(context)
def sw_update_strategy_update(context, state=None):
"""Update a sw update or raise if it does not exist."""
return IMPL.sw_update_strategy_update(context, state)
def sw_update_strategy_destroy(context):
"""Destroy the sw update or raise if it does not exist."""
return IMPL.sw_update_strategy_destroy(context)
###################
def strategy_step_db_model_to_dict(strategy_step):
"""Convert patch strategy db model to dictionary."""
if strategy_step.subcloud is not None:
cloud = strategy_step.subcloud.name
else:
cloud = consts.SYSTEM_CONTROLLER_NAME
result = {"id": strategy_step.id,
"cloud": cloud,
"stage": strategy_step.stage,
"state": strategy_step.state,
"details": strategy_step.details,
"started-at": strategy_step.started_at,
"finished-at": strategy_step.finished_at,
"created-at": strategy_step.created_at,
"updated-at": strategy_step.updated_at}
return result
def strategy_step_get(context, subcloud_id):
"""Retrieve the patch strategy step for a subcloud ID.
Will raise if subcloud does not exist.
"""
return IMPL.strategy_step_get(context, subcloud_id)
def strategy_step_get_by_name(context, name):
"""Retrieve the patch strategy step for a subcloud name."""
return IMPL.strategy_step_get_by_name(context, name)
def strategy_step_get_all(context):
"""Retrieve all patch strategy steps."""
return IMPL.strategy_step_get_all(context)
def strategy_step_create(context, subcloud_id, stage, state, details):
"""Create a patch strategy step."""
return IMPL.strategy_step_create(context, subcloud_id, stage, state,
details)
def strategy_step_update(context, subcloud_id, stage=None, state=None,
details=None, started_at=None, finished_at=None):
"""Update a patch strategy step or raise if it does not exist."""
return IMPL.strategy_step_update(context, subcloud_id, stage, state,
details, started_at, finished_at)
def strategy_step_destroy_all(context):
"""Destroy all the patch strategy steps."""
return IMPL.strategy_step_destroy_all(context)
###################
def sw_update_opts_w_name_db_model_to_dict(sw_update_opts, subcloud_name):
"""Convert sw update options db model plus subcloud name to dictionary."""
result = {"id": sw_update_opts.id,
"name": subcloud_name,
"subcloud-id": sw_update_opts.subcloud_id,
"storage-apply-type": sw_update_opts.storage_apply_type,
"compute-apply-type": sw_update_opts.compute_apply_type,
"max-parallel-computes": sw_update_opts.max_parallel_computes,
"alarm-restriction-type": sw_update_opts.alarm_restriction_type,
"default-instance-action":
sw_update_opts.default_instance_action,
"created-at": sw_update_opts.created_at,
"updated-at": sw_update_opts.updated_at}
return result
def sw_update_opts_create(context, subcloud_id, storage_apply_type,
compute_apply_type, max_parallel_computes,
alarm_restriction_type, default_instance_action):
"""Create sw update options."""
return IMPL.sw_update_opts_create(context, subcloud_id,
storage_apply_type,
compute_apply_type,
max_parallel_computes,
alarm_restriction_type,
default_instance_action)
def sw_update_opts_get(context, subcloud_id):
"""Retrieve sw update options."""
return IMPL.sw_update_opts_get(context, subcloud_id)
def sw_update_opts_get_all_plus_subcloud_info(context):
"""Retrieve sw update options plus subcloud info."""
return IMPL.sw_update_opts_get_all_plus_subcloud_info(context)
def sw_update_opts_update(context, subcloud_id,
storage_apply_type=None,
compute_apply_type=None,
max_parallel_computes=None,
alarm_restriction_type=None,
default_instance_action=None):
"""Update sw update options or raise if it does not exist."""
return IMPL.sw_update_opts_update(context, subcloud_id,
storage_apply_type,
compute_apply_type,
max_parallel_computes,
alarm_restriction_type,
default_instance_action)
def sw_update_opts_destroy(context, subcloud_id):
"""Destroy sw update options or raise if it does not exist."""
return IMPL.sw_update_opts_destroy(context, subcloud_id)
###################
def sw_update_opts_default_create(context, storage_apply_type,
compute_apply_type, max_parallel_computes,
alarm_restriction_type,
default_instance_action):
"""Create default sw update options."""
return IMPL.sw_update_opts_default_create(context,
storage_apply_type,
compute_apply_type,
max_parallel_computes,
alarm_restriction_type,
default_instance_action)
def sw_update_opts_default_get(context):
"""Retrieve default sw update options."""
return IMPL.sw_update_opts_default_get(context)
def sw_update_opts_default_update(context,
storage_apply_type=None,
compute_apply_type=None,
max_parallel_computes=None,
alarm_restriction_type=None,
default_instance_action=None):
"""Update default sw update options."""
return IMPL.sw_update_opts_default_update(context,
storage_apply_type,
compute_apply_type,
max_parallel_computes,
alarm_restriction_type,
default_instance_action)
def sw_update_opts_default_destroy(context):
"""Destroy the default sw update options or raise if it does not exist."""
return IMPL.sw_update_opts_default_destroy(context)
###################
def db_sync(engine, version=None):
"""Migrate the database to `version` or the most recent version."""
return IMPL.db_sync(engine, version=version)
def db_version(engine):
"""Display the current database version."""
return IMPL.db_version(engine)
| StarcoderdataPython |
12809174 | <gh_stars>1-10
# UCF Senior Design 2017-18
# Group 38
import cv2
import unittest
import utils.image_man as im
GOOD_EXIF = 'tests/images/test_good_exif.JPG'
EMPTY_EXIF = 'tests/images/test_empty_exif.JPG'
NONEXIF = 'tests/images/test_nonexif_image_format.png'
class ImageManipulationTestCase(unittest.TestCase):
"""Tests for `image_man.py`."""
def setUp(self):
pass
def tearDown(self):
pass
def test_empty_exif(self):
"""An image with empty EXIF data should lead to a NoneType return"""
image = cv2.imread(EMPTY_EXIF)
exif_data = im.exif(EMPTY_EXIF, image)
self.assertIsNone(exif_data)
def test_proper_exif_loading(self):
"""An image with EXIF data should lead to a return of a bytes object"""
image = cv2.imread(GOOD_EXIF)
exif_data = im.exif(GOOD_EXIF, image)
self.assertIsInstance(exif_data, bytes)
def test_nonexif_image_format(self):
"""An image of a format that does not typicallysupport EXIF should lead to a NoneType return"""
image = cv2.imread(NONEXIF)
exif_data = im.exif(NONEXIF, image)
self.assertIsNone(exif_data)
#def test_proper_image_crop(self):
# pass
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
88977 | <reponame>TJCSec/rcds<gh_stars>0
from textwrap import dedent
from typing import Any, Dict, List, Optional
import yaml
from jinja2 import Environment, PackageLoader, filters
jinja_env = Environment(
loader=PackageLoader("rcds.backends.k8s", "templates"),
autoescape=False,
trim_blocks=True,
lstrip_blocks=True,
)
def jinja_filter_indent(data: str, *args, **kwargs) -> str:
return filters.do_indent(dedent(data), *args, **kwargs)
def jinja_filter_yaml(data: Dict[str, Any], indent: Optional[int] = None) -> str:
output = yaml.dump(data).strip()
if indent is not None:
output = jinja_filter_indent(output, indent)
return output
def jinja_filter_pick(data: Dict[str, Any], keys: List[str]) -> Dict[str, Any]:
return {k: v for k, v in data.items() if k in keys}
def jinja_filter_omit(data: Dict[str, Any], keys: List[str]) -> Dict[str, Any]:
return {k: v for k, v in data.items() if k not in keys}
jinja_env.filters["indent"] = jinja_filter_indent
jinja_env.filters["yaml"] = jinja_filter_yaml
jinja_env.filters["quote"] = lambda s: repr(str(s))
jinja_env.filters["pick"] = jinja_filter_pick
jinja_env.filters["omit"] = jinja_filter_omit
| StarcoderdataPython |
9665825 | <reponame>fossabot/autofocus
from pathlib import Path
import requests
BASE_URL = "http://localhost:8000"
def test_sample_predict_request():
filepath = Path(__file__).resolve().parents[1] / "gallery" / "raccoons.jpeg"
response = requests.post(
f"{BASE_URL}/predict", files={"file": open(filepath, "rb")}
)
assert response.json()["raccoon"] > 0.9
def test_sample_predict_request_JPG():
filepath = Path(__file__).resolve().parents[1] / "gallery" / "fawn.JPG"
response = requests.post(
f"{BASE_URL}/predict", files={"file": open(filepath, "rb")}
)
assert response.json()["deer"] > 0.9
def test_sample_predict_zip_request():
filepath = Path(__file__).resolve().parents[1] / "gallery.zip"
response = requests.post(
f"{BASE_URL}/predict_zip", files={"file": open(filepath, "rb")}
)
assert len(response.json()) == 4
| StarcoderdataPython |
1863186 | #!/usr/bin/python
# encoding: utf-8
from workflow import web
import json
import os.path
import urllib
search_url_tem = 'http://www.wowhead.com/search'
list_json = 'data/list.json'
faction_icon = 'icon/'
image_url_tem = 'http://wow.zamimg.com/images/wow/icons/large/%s.jpg'
image_suffix = ".jpg"
class WowheadGateway(object):
result_list = 1
metadata_list = -1
def __init__(self, wf):
with open(list_json) as fp:
self.metadata_list = json.load(fp)
self.cachedir = wf.cachedir
def get_image_cache(self, image_name, default_image=None):
if not image_name:
return default_image
if image_name.endswith("-icon.png"):
return faction_icon + image_name
image_cache_path = os.path.join(self.cachedir, image_name + image_suffix)
if not os.path.isfile(image_cache_path):
image_url = image_url_tem %(image_name)
urllib.urlretrieve(image_url, image_cache_path)
return image_cache_path
def search(self, word):
url, params = self.generate_search_url(word)
response = self._send_request(url, params).json()
ret = self._parse_json(response)
return ret
def generate_search_url(self, word, json=True):
if json:
return search_url_tem, {'q': word, 'opensearch': ''}
else:
return search_url_tem, {'q': word}
def _parse_json(self, response):
return [self._parse_data(name, metadata) for name,metadata in zip(response[1], response[-1])]
def _parse_data(self, name, data):
name,_,type_desc = name.rpartition('(')
type_desc, _, _ = type_desc.rpartition(')')
# data: (type, id, [image | faction] [quality])
obj_type, obj_id, obj_quality, obj_image = None, None, None, None
obj_type_id = None
try:
obj_type_id = data[0]
obj_type = self.metadata_list['type'][obj_type_id]
obj_id = data[1]
except IndexError:
pass
if len(data) >= 3:
if isinstance(data[2], unicode):
obj_image = data[2]
elif obj_type == 'quest' or obj_type == 'title':
obj_image = self.metadata_list['faction'][data[2]-1]
elif isinstance(data[2], int):
obj_quality = self.metadata_list['quality'][data[2]]
if len(data) >= 4:
obj_quality = self.metadata_list['quality'][data[3]]
return {
'name': name,
'type_desc': type_desc,
'type_id': obj_type_id,
'type': obj_type,
'id': obj_id,
'image': obj_image,
'quality': obj_quality,
}
def _send_request(self, url, params):
response = web.get(url, params=params)
response.raise_for_status()
return response
if __name__ == "__main__":
import sys
import json
from workflow import Workflow
gateway = WowheadGateway(Workflow())
ret = gateway.search(' '.join(sys.argv[1:]))
print json.dumps(ret, indent = 2)
| StarcoderdataPython |
11379666 | from .wrapper import Wrapper
try:
from .gym_wrapper import GymWrapper
except:
print("Warning: make sure gym is installed if you want to use the GymWrapper.")
| StarcoderdataPython |
5107005 | <gh_stars>1-10
import logging
import random
import string
import pytest
import salt.config
import salt.loader
import salt.states.boto_iot as boto_iot
from tests.support.mock import MagicMock, patch
boto = pytest.importorskip("boto")
boto3 = pytest.importorskip("boto3", "1.2.1")
botocore = pytest.importorskip("botocore", "1.4.41")
log = logging.getLogger(__name__)
class GlobalConfig:
region = "us-east-1"
access_key = "<KEY>"
secret_key = "<KEY>"
conn_parameters = {
"region": region,
"key": access_key,
"keyid": secret_key,
"profile": {},
}
error_message = (
"An error occurred (101) when calling the {0} operation: Test-defined error"
)
not_found_error = botocore.exceptions.ClientError(
{
"Error": {
"Code": "ResourceNotFoundException",
"Message": "Test-defined error",
}
},
"msg",
)
topic_rule_not_found_error = botocore.exceptions.ClientError(
{"Error": {"Code": "UnauthorizedException", "Message": "Test-defined error"}},
"msg",
)
error_content = {"Error": {"Code": 101, "Message": "Test-defined error"}}
policy_ret = dict(
policyName="testpolicy",
policyDocument='{"Version": "2012-10-17", "Statement": [{"Action": ["iot:Publish"], "Resource": ["*"], "Effect": "Allow"}]}',
policyArn="arn:aws:iot:us-east-1:123456:policy/my_policy",
policyVersionId=1,
defaultVersionId=1,
)
topic_rule_ret = dict(
ruleName="testrule",
sql="SELECT * FROM 'iot/test'",
description="topic rule description",
createdAt="1970-01-01",
actions=[{"iot": {"functionArn": "arn:aws:::function"}}],
ruleDisabled=True,
)
principal = "arn:aws:iot:us-east-1:1234:cert/21fc104aaaf6043f5756c1b57bda84ea8395904c43f28517799b19e4c42514"
thing_type_name = "test_thing_type"
thing_type_desc = "test_thing_type_desc"
thing_type_attr_1 = "test_thing_type_search_attr_1"
thing_type_ret = dict(
thingTypeName=thing_type_name,
thingTypeProperties=dict(
thingTypeDescription=thing_type_desc,
searchableAttributes=[thing_type_attr_1],
),
thingTypeMetadata=dict(
deprecated=False, creationDate="2010-08-01 15:54:49.699000+00:00"
),
)
deprecated_thing_type_ret = dict(
thingTypeName=thing_type_name,
thingTypeProperties=dict(
thingTypeDescription=thing_type_desc,
searchableAttributes=[thing_type_attr_1],
),
thingTypeMetadata=dict(
deprecated=True,
creationDate="2010-08-01 15:54:49.699000+00:00",
deprecationDate="2010-08-02 15:54:49.699000+00:00",
),
)
thing_type_arn = "test_thing_type_arn"
create_thing_type_ret = dict(
thingTypeName=thing_type_name, thingTypeArn=thing_type_arn
)
@pytest.fixture
def configure_loader_modules():
opts = salt.config.DEFAULT_MINION_OPTS.copy()
opts["grains"] = salt.loader.grains(opts)
ctx = {}
utils = salt.loader.utils(
opts, whitelist=["boto3", "args", "systemd", "path", "platform"], context=ctx,
)
serializers = salt.loader.serializers(opts)
funcs = funcs = salt.loader.minion_mods(
opts, context=ctx, utils=utils, whitelist=["boto_iot"]
)
salt_states = salt.loader.states(
opts=opts,
functions=funcs,
utils=utils,
whitelist=["boto_iot"],
serializers=serializers,
)
return {
boto_iot: {
"__opts__": opts,
"__salt__": funcs,
"__utils__": utils,
"__states__": salt_states,
"__serializers__": serializers,
}
}
def test_present_when_thing_type_does_not_exist():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.describe_thing_type.side_effect = [
GlobalConfig.not_found_error,
GlobalConfig.thing_type_ret,
]
conn.create_thing_type.return_value = GlobalConfig.create_thing_type_ret
result = boto_iot.__states__["boto_iot.thing_type_present"](
"thing type present",
thingTypeName=GlobalConfig.thing_type_name,
thingTypeDescription=GlobalConfig.thing_type_desc,
searchableAttributesList=[GlobalConfig.thing_type_attr_1],
**GlobalConfig.conn_parameters
)
assert result["result"]
assert (
result["changes"]["new"]["thing_type"]["thingTypeName"]
== GlobalConfig.thing_type_name
)
def test_present_when_thing_type_exists():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.describe_thing_type.return_value = GlobalConfig.thing_type_ret
result = boto_iot.__states__["boto_iot.thing_type_present"](
"thing type present",
thingTypeName=GlobalConfig.thing_type_name,
thingTypeDescription=GlobalConfig.thing_type_desc,
searchableAttributesList=[GlobalConfig.thing_type_attr_1],
**GlobalConfig.conn_parameters
)
assert result["result"]
assert result["changes"] == {}
assert conn.create_thing_type.call_count == 0
def test_present_with_failure():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.describe_thing_type.side_effect = [
GlobalConfig.not_found_error,
GlobalConfig.thing_type_ret,
]
conn.create_thing_type.side_effect = botocore.exceptions.ClientError(
GlobalConfig.error_content, "create_thing_type"
)
result = boto_iot.__states__["boto_iot.thing_type_present"](
"thing type present",
thingTypeName=GlobalConfig.thing_type_name,
thingTypeDescription=GlobalConfig.thing_type_desc,
searchableAttributesList=[GlobalConfig.thing_type_attr_1],
**GlobalConfig.conn_parameters
)
assert not result["result"]
assert "An error occurred" in result["comment"]
def test_absent_when_thing_type_does_not_exist():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.describe_thing_type.side_effect = GlobalConfig.not_found_error
result = boto_iot.__states__["boto_iot.thing_type_absent"](
"test", "mythingtype", **GlobalConfig.conn_parameters
)
assert result["result"]
assert result["changes"] == {}
@pytest.mark.slow_test
def test_absent_when_thing_type_exists():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.describe_thing_type.return_value = GlobalConfig.deprecated_thing_type_ret
result = boto_iot.__states__["boto_iot.thing_type_absent"](
"test", GlobalConfig.thing_type_name, **GlobalConfig.conn_parameters
)
assert result["result"]
assert result["changes"]["new"]["thing_type"] is None
assert conn.deprecate_thing_type.call_count == 0
def test_absent_with_deprecate_failure():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.describe_thing_type.return_value = GlobalConfig.thing_type_ret
conn.deprecate_thing_type.side_effect = botocore.exceptions.ClientError(
GlobalConfig.error_content, "deprecate_thing_type"
)
result = boto_iot.__states__["boto_iot.thing_type_absent"](
"test", GlobalConfig.thing_type_name, **GlobalConfig.conn_parameters
)
assert not result["result"]
assert "An error occurred" in result["comment"]
assert "deprecate_thing_type" in result["comment"]
assert conn.delete_thing_type.call_count == 0
def test_absent_with_delete_failure():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.describe_thing_type.return_value = GlobalConfig.deprecated_thing_type_ret
conn.delete_thing_type.side_effect = botocore.exceptions.ClientError(
GlobalConfig.error_content, "delete_thing_type"
)
result = boto_iot.__states__["boto_iot.thing_type_absent"](
"test", GlobalConfig.thing_type_name, **GlobalConfig.conn_parameters
)
assert not result["result"]
assert "An error occurred" in result["comment"]
assert "delete_thing_type" in result["comment"]
assert conn.deprecate_thing_type.call_count == 0
def test_present_when_policy_does_not_exist():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.get_policy.side_effect = [
GlobalConfig.not_found_error,
GlobalConfig.policy_ret,
]
conn.create_policy.return_value = GlobalConfig.policy_ret
result = boto_iot.__states__["boto_iot.policy_present"](
"policy present",
policyName=GlobalConfig.policy_ret["policyName"],
policyDocument=GlobalConfig.policy_ret["policyDocument"],
)
assert result["result"]
assert (
result["changes"]["new"]["policy"]["policyName"]
== GlobalConfig.policy_ret["policyName"]
)
def test_present_when_policy_exists():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.get_policy.return_value = GlobalConfig.policy_ret
conn.create_policy_version.return_value = GlobalConfig.policy_ret
result = boto_iot.__states__["boto_iot.policy_present"](
"policy present",
policyName=GlobalConfig.policy_ret["policyName"],
policyDocument=GlobalConfig.policy_ret["policyDocument"],
)
assert result["result"]
assert result["changes"] == {}
def test_present_again_with_failure():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.get_policy.side_effect = [
GlobalConfig.not_found_error,
GlobalConfig.policy_ret,
]
conn.create_policy.side_effect = botocore.exceptions.ClientError(
GlobalConfig.error_content, "create_policy"
)
result = boto_iot.__states__["boto_iot.policy_present"](
"policy present",
policyName=GlobalConfig.policy_ret["policyName"],
policyDocument=GlobalConfig.policy_ret["policyDocument"],
)
assert not result["result"]
assert "An error occurred" in result["comment"]
def test_absent_when_policy_does_not_exist():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.get_policy.side_effect = GlobalConfig.not_found_error
result = boto_iot.__states__["boto_iot.policy_absent"]("test", "mypolicy")
assert result["result"]
assert result["changes"] == {}
def test_absent_when_policy_exists():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.get_policy.return_value = GlobalConfig.policy_ret
conn.list_policy_versions.return_value = {"policyVersions": []}
result = boto_iot.__states__["boto_iot.policy_absent"](
"test", GlobalConfig.policy_ret["policyName"]
)
assert result["result"]
assert result["changes"]["new"]["policy"] is None
def test_absent_with_failure():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.get_policy.return_value = GlobalConfig.policy_ret
conn.list_policy_versions.return_value = {"policyVersions": []}
conn.delete_policy.side_effect = botocore.exceptions.ClientError(
GlobalConfig.error_content, "delete_policy"
)
result = boto_iot.__states__["boto_iot.policy_absent"](
"test", GlobalConfig.policy_ret["policyName"]
)
assert not result["result"]
assert "An error occurred" in result["comment"]
def test_attached_when_policy_not_attached():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.list_principal_policies.return_value = {"policies": []}
result = boto_iot.__states__["boto_iot.policy_attached"](
"test", "myfunc", GlobalConfig.principal
)
assert result["result"]
assert result["changes"]["new"]["attached"]
def test_attached_when_policy_attached():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.list_principal_policies.return_value = {"policies": [GlobalConfig.policy_ret]}
result = boto_iot.__states__["boto_iot.policy_attached"](
"test", GlobalConfig.policy_ret["policyName"], GlobalConfig.principal
)
assert result["result"]
assert result["changes"] == {}
def test_attached_with_failure():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.list_principal_policies.return_value = {"policies": []}
conn.attach_principal_policy.side_effect = botocore.exceptions.ClientError(
GlobalConfig.error_content, "attach_principal_policy"
)
result = boto_iot.__states__["boto_iot.policy_attached"](
"test", GlobalConfig.policy_ret["policyName"], GlobalConfig.principal
)
assert not result["result"]
assert result["changes"] == {}
def test_detached_when_policy_not_detached():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.list_principal_policies.return_value = {"policies": [GlobalConfig.policy_ret]}
result = boto_iot.__states__["boto_iot.policy_detached"](
"test", GlobalConfig.policy_ret["policyName"], GlobalConfig.principal
)
assert result["result"]
log.warning(result)
assert not result["changes"]["new"]["attached"]
def test_detached_when_policy_detached():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.list_principal_policies.return_value = {"policies": []}
result = boto_iot.__states__["boto_iot.policy_detached"](
"test", GlobalConfig.policy_ret["policyName"], GlobalConfig.principal
)
assert result["result"]
assert result["changes"] == {}
def test_detached_with_failure():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.list_principal_policies.return_value = {"policies": [GlobalConfig.policy_ret]}
conn.detach_principal_policy.side_effect = botocore.exceptions.ClientError(
GlobalConfig.error_content, "detach_principal_policy"
)
result = boto_iot.__states__["boto_iot.policy_detached"](
"test", GlobalConfig.policy_ret["policyName"], GlobalConfig.principal
)
assert not result["result"]
assert result["changes"] == {}
def test_present_when_topic_rule_does_not_exist():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.get_topic_rule.side_effect = [
GlobalConfig.topic_rule_not_found_error,
{"rule": GlobalConfig.topic_rule_ret},
]
conn.create_topic_rule.return_value = {"created": True}
result = boto_iot.__states__["boto_iot.topic_rule_present"](
"topic rule present",
ruleName=GlobalConfig.topic_rule_ret["ruleName"],
sql=GlobalConfig.topic_rule_ret["sql"],
description=GlobalConfig.topic_rule_ret["description"],
actions=GlobalConfig.topic_rule_ret["actions"],
ruleDisabled=GlobalConfig.topic_rule_ret["ruleDisabled"],
)
assert result["result"]
assert (
result["changes"]["new"]["rule"]["ruleName"]
== GlobalConfig.topic_rule_ret["ruleName"]
)
def test_present_when_next_policy_exists():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.get_topic_rule.return_value = {"rule": GlobalConfig.topic_rule_ret}
conn.create_topic_rule.return_value = {"created": True}
result = boto_iot.__states__["boto_iot.topic_rule_present"](
"topic rule present",
ruleName=GlobalConfig.topic_rule_ret["ruleName"],
sql=GlobalConfig.topic_rule_ret["sql"],
description=GlobalConfig.topic_rule_ret["description"],
actions=GlobalConfig.topic_rule_ret["actions"],
ruleDisabled=GlobalConfig.topic_rule_ret["ruleDisabled"],
)
assert result["result"]
assert result["changes"] == {}
def test_present_next_with_failure():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.get_topic_rule.side_effect = [
GlobalConfig.topic_rule_not_found_error,
{"rule": GlobalConfig.topic_rule_ret},
]
conn.create_topic_rule.side_effect = botocore.exceptions.ClientError(
GlobalConfig.error_content, "create_topic_rule"
)
result = boto_iot.__states__["boto_iot.topic_rule_present"](
"topic rule present",
ruleName=GlobalConfig.topic_rule_ret["ruleName"],
sql=GlobalConfig.topic_rule_ret["sql"],
description=GlobalConfig.topic_rule_ret["description"],
actions=GlobalConfig.topic_rule_ret["actions"],
ruleDisabled=GlobalConfig.topic_rule_ret["ruleDisabled"],
)
assert not result["result"]
assert "An error occurred" in result["comment"]
def test_absent_when_topic_rule_does_not_exist():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.get_topic_rule.side_effect = GlobalConfig.topic_rule_not_found_error
result = boto_iot.__states__["boto_iot.topic_rule_absent"]("test", "myrule")
assert result["result"]
assert result["changes"] == {}
def test_absent_when_topic_rule_exists():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.get_topic_rule.return_value = GlobalConfig.topic_rule_ret
result = boto_iot.__states__["boto_iot.topic_rule_absent"](
"test", GlobalConfig.topic_rule_ret["ruleName"]
)
assert result["result"]
assert result["changes"]["new"]["rule"] is None
def test_absent_next_with_failure():
GlobalConfig.conn_parameters["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
patcher = patch("boto3.session.Session")
mock_session = patcher.start()
session_instance = mock_session.return_value
conn = MagicMock()
session_instance.client.return_value = conn
conn.get_topic_rule.return_value = GlobalConfig.topic_rule_ret
conn.delete_topic_rule.side_effect = botocore.exceptions.ClientError(
GlobalConfig.error_content, "delete_topic_rule"
)
result = boto_iot.__states__["boto_iot.topic_rule_absent"](
"test", GlobalConfig.topic_rule_ret["ruleName"]
)
assert not result["result"]
assert "An error occurred" in result["comment"]
| StarcoderdataPython |
1825140 | <reponame>jawaidss/halalar-web<gh_stars>1-10
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^privacy-policy/$', views.PrivacyPolicyView.as_view(), name='legal-privacy_policy'),
url(r'^terms-of-service/$', views.TermsOfServiceView.as_view(), name='legal-terms_of_service'),
] | StarcoderdataPython |
11224920 | a = int(input('Digite a 1° reta: '))
b = int(input('Digite a 2° reta: '))
c = int(input('Digite a 3° reta: '))
if ((b-c) < a < b + c) and ((a - c) < b < a + c) and ((a - b) < c < a + b):
if a == b == c:
print('E um triangulo equelátero.')
elif a == b or b == c or a == c:
print('E um triangulo isósceles.')
else:
print('E um triangulo escaleno.')
else:
print('Não e um triangulo.')
| StarcoderdataPython |
6636708 | # coding: utf-8
"""
Cloudbreak API
Cloudbreak is a powerful left surf that breaks over a coral reef, a mile off southwest the island of Tavarua, Fiji. Cloudbreak is a cloud agnostic Hadoop as a Service API. Abstracts the provisioning and ease management and monitoring of on-demand clusters. SequenceIQ's Cloudbreak is a RESTful application development platform with the goal of helping developers to build solutions for deploying Hadoop YARN clusters in different environments. Once it is deployed in your favourite servlet container it exposes a REST API allowing to span up Hadoop clusters of arbitary sizes and cloud providers. Provisioning Hadoop has never been easier. Cloudbreak is built on the foundation of cloud providers API (Amazon AWS, Microsoft Azure, Google Cloud Platform, Openstack), Apache Ambari, Docker lightweight containers, Swarm and Consul. For further product documentation follow the link: <a href=\"http://hortonworks.com/apache/cloudbreak/\">http://hortonworks.com/apache/cloudbreak/</a>
OpenAPI spec version: 2.7.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class OperationDetails(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'event_type': 'str',
'resource_id': 'int',
'resource_type': 'str',
'timestamp': 'int',
'account': 'str',
'user_id': 'str',
'user_name': 'str',
'cloudbreak_id': 'str',
'cloudbreak_version': 'str'
}
attribute_map = {
'event_type': 'eventType',
'resource_id': 'resourceId',
'resource_type': 'resourceType',
'timestamp': 'timestamp',
'account': 'account',
'user_id': 'userId',
'user_name': 'userName',
'cloudbreak_id': 'cloudbreakId',
'cloudbreak_version': 'cloudbreakVersion'
}
def __init__(self, event_type=None, resource_id=None, resource_type=None, timestamp=None, account=None, user_id=None, user_name=None, cloudbreak_id=None, cloudbreak_version=None):
"""
OperationDetails - a model defined in Swagger
"""
self._event_type = None
self._resource_id = None
self._resource_type = None
self._timestamp = None
self._account = None
self._user_id = None
self._user_name = None
self._cloudbreak_id = None
self._cloudbreak_version = None
if event_type is not None:
self.event_type = event_type
if resource_id is not None:
self.resource_id = resource_id
if resource_type is not None:
self.resource_type = resource_type
if timestamp is not None:
self.timestamp = timestamp
if account is not None:
self.account = account
if user_id is not None:
self.user_id = user_id
if user_name is not None:
self.user_name = user_name
if cloudbreak_id is not None:
self.cloudbreak_id = cloudbreak_id
if cloudbreak_version is not None:
self.cloudbreak_version = cloudbreak_version
@property
def event_type(self):
"""
Gets the event_type of this OperationDetails.
:return: The event_type of this OperationDetails.
:rtype: str
"""
return self._event_type
@event_type.setter
def event_type(self, event_type):
"""
Sets the event_type of this OperationDetails.
:param event_type: The event_type of this OperationDetails.
:type: str
"""
self._event_type = event_type
@property
def resource_id(self):
"""
Gets the resource_id of this OperationDetails.
:return: The resource_id of this OperationDetails.
:rtype: int
"""
return self._resource_id
@resource_id.setter
def resource_id(self, resource_id):
"""
Sets the resource_id of this OperationDetails.
:param resource_id: The resource_id of this OperationDetails.
:type: int
"""
self._resource_id = resource_id
@property
def resource_type(self):
"""
Gets the resource_type of this OperationDetails.
:return: The resource_type of this OperationDetails.
:rtype: str
"""
return self._resource_type
@resource_type.setter
def resource_type(self, resource_type):
"""
Sets the resource_type of this OperationDetails.
:param resource_type: The resource_type of this OperationDetails.
:type: str
"""
self._resource_type = resource_type
@property
def timestamp(self):
"""
Gets the timestamp of this OperationDetails.
:return: The timestamp of this OperationDetails.
:rtype: int
"""
return self._timestamp
@timestamp.setter
def timestamp(self, timestamp):
"""
Sets the timestamp of this OperationDetails.
:param timestamp: The timestamp of this OperationDetails.
:type: int
"""
self._timestamp = timestamp
@property
def account(self):
"""
Gets the account of this OperationDetails.
:return: The account of this OperationDetails.
:rtype: str
"""
return self._account
@account.setter
def account(self, account):
"""
Sets the account of this OperationDetails.
:param account: The account of this OperationDetails.
:type: str
"""
self._account = account
@property
def user_id(self):
"""
Gets the user_id of this OperationDetails.
:return: The user_id of this OperationDetails.
:rtype: str
"""
return self._user_id
@user_id.setter
def user_id(self, user_id):
"""
Sets the user_id of this OperationDetails.
:param user_id: The user_id of this OperationDetails.
:type: str
"""
self._user_id = user_id
@property
def user_name(self):
"""
Gets the user_name of this OperationDetails.
:return: The user_name of this OperationDetails.
:rtype: str
"""
return self._user_name
@user_name.setter
def user_name(self, user_name):
"""
Sets the user_name of this OperationDetails.
:param user_name: The user_name of this OperationDetails.
:type: str
"""
self._user_name = user_name
@property
def cloudbreak_id(self):
"""
Gets the cloudbreak_id of this OperationDetails.
:return: The cloudbreak_id of this OperationDetails.
:rtype: str
"""
return self._cloudbreak_id
@cloudbreak_id.setter
def cloudbreak_id(self, cloudbreak_id):
"""
Sets the cloudbreak_id of this OperationDetails.
:param cloudbreak_id: The cloudbreak_id of this OperationDetails.
:type: str
"""
self._cloudbreak_id = cloudbreak_id
@property
def cloudbreak_version(self):
"""
Gets the cloudbreak_version of this OperationDetails.
:return: The cloudbreak_version of this OperationDetails.
:rtype: str
"""
return self._cloudbreak_version
@cloudbreak_version.setter
def cloudbreak_version(self, cloudbreak_version):
"""
Sets the cloudbreak_version of this OperationDetails.
:param cloudbreak_version: The cloudbreak_version of this OperationDetails.
:type: str
"""
self._cloudbreak_version = cloudbreak_version
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, OperationDetails):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| StarcoderdataPython |
5136755 | <gh_stars>100-1000
import imp
import marshal
import importlib
code = '''
print('hello,worldddddddd')
'''
def t():
name = 'hello'
co = compile(code, name, 'exec')
r = marshal.dumps(co)
i = 0
for c in r:
i+=1
print('0x%02x,'%(c,), end='')
if i%16==0:
print()
print()
co = marshal.loads(r)
module = imp.new_module(name)
exec(co, module.__dict__)
| StarcoderdataPython |
1932848 | <gh_stars>0
# encoding: utf-8
'''
@author: allen-jia
@file: auth.py
@time: 2019/2/20 0020 11:54
@desc:
'''
from rest_framework.authentication import BaseAuthentication
from rest_framework import exceptions
token_list = [
'<KEY>',
'<KEY>',
]
class TestAuthentication(BaseAuthentication):
def authenticate(self, request):
val = request.query_params.get('token')
if val not in token_list:
raise exceptions.AuthenticationFailed("用户认证失败")
user = request._request.user
print(user, val)
return (user, val)
def authenticate_header(self, request):
"""
Return a string to be used as the value of the `WWW-Authenticate`
header in a `401 Unauthenticated` response, or `None` if the
authentication scheme should return `403 Permission Denied` responses.
"""
# 验证失败时,返回的响应头WWW-Authenticate对应的值
pass
| StarcoderdataPython |
1817931 | <reponame>Manny27nyc/BitcoinArmory
################################################################################
# #
# Copyright (C) 2011-2015, Armory Technologies, Inc. #
# Distributed under the GNU Affero General Public License (AGPL v3) #
# See LICENSE or http://www.gnu.org/licenses/agpl.html #
# #
################################################################################
import Queue
import os.path
import random
import threading
import traceback
from armoryengine.ArmoryUtils import *
from armoryengine.Timer import TimeThisFunction
import CppBlockUtils as Cpp
from armoryengine.BinaryPacker import UINT64
BDM_OFFLINE = 'Offline'
BDM_UNINITIALIZED = 'Uninitialized'
BDM_BLOCKCHAIN_READY = 'BlockChainReady'
BDM_SCANNING = 'Scanning'
FINISH_LOAD_BLOCKCHAIN_ACTION = 'FinishLoadBlockchain'
NEW_ZC_ACTION = 'newZC'
NEW_BLOCK_ACTION = 'newBlock'
REFRESH_ACTION = 'refresh'
STOPPED_ACTION = 'stopped'
WARNING_ACTION = 'warning'
SCAN_ACTION = 'StartedWalletScan'
NODESTATUS_UPDATE = 'NodeStatusUpdate'
BDM_SCAN_PROGRESS = 'BDM_Progress'
BDV_ERROR = 'BDV_Error'
def newTheBDM(isOffline=False):
global TheBDM
if TheBDM:
TheBDM.beginCleanShutdown()
TheBDM = BlockDataManager(isOffline=isOffline)
class PySide_CallBack(Cpp.PythonCallback):
def __init__(self, bdm):
Cpp.PythonCallback.__init__(self, bdm.bdv())
self.bdm = bdm
def run(self, action, arg, block):
try:
act = ''
arglist = []
# AOTODO replace with constants
if action == Cpp.BDMAction_Ready:
print 'BDM is ready!'
act = FINISH_LOAD_BLOCKCHAIN_ACTION
TheBDM.topBlockHeight = block
TheBDM.setState(BDM_BLOCKCHAIN_READY)
elif action == Cpp.BDMAction_ZC:
act = NEW_ZC_ACTION
castArg = Cpp.BtcUtils_cast_to_LedgerVector(arg)
arglist = castArg
elif action == Cpp.BDMAction_NewBlock:
act = NEW_BLOCK_ACTION
castArg = Cpp.BtcUtils_cast_to_int(arg)
arglist.append(castArg)
TheBDM.topBlockHeight = block
elif action == Cpp.BDMAction_Refresh:
act = REFRESH_ACTION
castArg = Cpp.BtcUtils_cast_to_BinaryDataVector(arg)
arglist = castArg
elif action == Cpp.BDMAction_Exited:
act = STOPPED_ACTION
elif action == Cpp.BDMAction_ErrorMsg:
act = WARNING_ACTION
argstr = Cpp.BtcUtils_cast_to_string(arg)
arglist.append(argstr)
elif action == Cpp.BDMAction_BDV_Error:
act = BDV_ERROR
argBdvError = Cpp.BDV_Error_Struct_cast_to_BDVErrorStruct(arg)
arglist.append(argBdvError)
elif action == Cpp.BDMAction_NodeStatus:
act = NODESTATUS_UPDATE
argNodeStatus = Cpp.NodeStatusStruct_cast_to_NodeStatusStruct(arg)
arglist.append(argNodeStatus)
listenerList = TheBDM.getListenerList()
for cppNotificationListener in listenerList:
cppNotificationListener(act, arglist)
except:
LOGEXCEPT('Error in running callback')
print sys.exc_info()
raise
def progress(self, phase, walletVec, prog, seconds, progressNumeric):
try:
if len(walletVec) == 0:
self.bdm.progressPhase = phase
self.bdm.progressComplete = prog
self.bdm.secondsRemaining = seconds
self.bdm.progressNumeric = progressNumeric
self.bdm.bdmState = BDM_SCANNING
for cppNotificationListener in TheBDM.getListenerList():
cppNotificationListener(BDM_SCAN_PROGRESS, [None, None])
else:
progInfo = [walletVec, prog]
for cppNotificationListener in TheBDM.getListenerList():
cppNotificationListener(SCAN_ACTION, progInfo)
except:
LOGEXCEPT('Error in running progress callback')
print sys.exc_info()
def getCurrTimeAndBlock():
time0 = long(RightNowUTC())
return (time0, TheBDM.getTopBlockHeight())
# Make TheBDM act like it's a singleton. Always use the global singleton TheBDM
# instance that exists in this module regardless of the instance that passed as self
def ActLikeASingletonBDM(func):
def inner(*args, **kwargs):
if TheBDM and len(args) > 0:
newArgs = (TheBDM,) + args[1:]
return func(*newArgs, **kwargs)
else:
return func(*args, **kwargs)
return inner
################################################################################
class BlockDataManager(object):
#############################################################################
def __init__(self, isOffline=False):
super(BlockDataManager, self).__init__()
#register callbacks
self.armoryDBDir = ""
self.bdv_ = None
# Flags
self.aboutToRescan = False
self.errorOut = 0
self.currentActivity = 'None'
self.walletsToRegister = []
if isOffline == True: self.bdmState = BDM_OFFLINE
else: self.bdmState = BDM_UNINITIALIZED
self.btcdir = BTC_HOME_DIR
self.armoryDBDir = ARMORY_DB_DIR
self.datadir = ARMORY_HOME_DIR
self.lastPctLoad = 0
self.topBlockHeight = 0
self.cppNotificationListenerList = []
self.progressComplete=0
self.secondsRemaining=0
self.progressPhase=0
self.progressNumeric=0
self.remoteDB = False
if ARMORYDB_IP != ARMORYDB_DEFAULT_IP:
self.remoteDB = True
self.exception = ""
self.cookie = None
self.witness = False
#############################################################################
def instantiateBDV(self, port):
if self.bdmState == BDM_OFFLINE:
return
socketType = Cpp.SocketFcgi
if self.remoteDB and not FORCE_FCGI:
socketType = Cpp.SocketHttp
self.bdv_ = Cpp.BlockDataViewer_getNewBDV(\
str(ARMORYDB_IP), str(port), socketType)
#############################################################################
def registerBDV(self):
if self.bdmState == BDM_OFFLINE:
return
try:
self.bdv_.registerWithDB(MAGIC_BYTES)
except Cpp.DbErrorMsg as e:
self.exception = e.what()
LOGERROR('DB error: ' + e.what())
raise e
#############################################################################
@ActLikeASingletonBDM
def hasRemoteDB(self):
return self.remoteDB
#############################################################################
@ActLikeASingletonBDM
def getListenerList(self):
return self.cppNotificationListenerList
#############################################################################
@ActLikeASingletonBDM
def bdv(self):
return self.bdv_
#############################################################################
@ActLikeASingletonBDM
def getTxByHash(self, txHash):
return self.bdv().getTxByHash(txHash)
#############################################################################
@ActLikeASingletonBDM
def getSentValue(self, txIn):
return self.bdv().getSentValue(txIn)
#############################################################################
@ActLikeASingletonBDM
def getTopBlockHeight(self):
return self.topBlockHeight
#############################################################################
@ActLikeASingletonBDM
def registerCppNotification(self, cppNotificationListener):
self.cppNotificationListenerList.append(cppNotificationListener)
#############################################################################
@ActLikeASingletonBDM
def unregisterCppNotification(self, cppNotificationListener):
if cppNotificationListener in self.cppNotificationListenerList:
self.cppNotificationListenerList.remove(cppNotificationListener)
#############################################################################
@ActLikeASingletonBDM
def goOnline(self):
self.bdv().goOnline()
self.callback = PySide_CallBack(self).__disown__()
self.callback.startLoop()
#############################################################################
@ActLikeASingletonBDM
def registerWallet(self, prefixedKeys, uniqueIDB58, isNew=False):
#this returns a pointer to the BtcWallet C++ object. This object is
#instantiated at registration and is unique for the BDV object, so we
#should only ever set the cppWallet member here
return self.bdv().registerWallet(uniqueIDB58, prefixedKeys, isNew)
#############################################################################
@ActLikeASingletonBDM
def registerLockbox(self, uniqueIDB58, addressList, isNew=False):
#this returns a pointer to the BtcWallet C++ object. This object is
#instantiated at registration and is unique for the BDV object, so we
#should only ever set the cppWallet member here
return self.bdv().registerLockbox(uniqueIDB58, addressList, isNew)
#############################################################################
@ActLikeASingletonBDM
def setSatoshiDir(self, newBtcDir):
if not os.path.exists(newBtcDir):
LOGERROR('setSatoshiDir: directory does not exist: %s', newBtcDir)
return
self.btcdir = newBtcDir
#############################################################################
@ActLikeASingletonBDM
def predictLoadTime(self):
return (self.progressPhase, self.progressComplete, self.secondsRemaining, self.progressNumeric)
#############################################################################
@TimeThisFunction
@ActLikeASingletonBDM
def createAddressBook(self, cppWlt):
return cppWlt.createAddressBook()
#############################################################################
@ActLikeASingletonBDM
def setState(self, state):
self.bdmState = state
#############################################################################
@ActLikeASingletonBDM
def getState(self):
return self.bdmState
#############################################################################
@ActLikeASingletonBDM
def shutdown(self):
if self.bdmState == BDM_OFFLINE:
return
try:
self.bdv_.unregisterFromDB()
self.callback.shutdown()
cookie = self.getCookie()
self.bdv_.shutdown(cookie)
except:
pass
#############################################################################
def getCookie(self):
if self.cookie == None:
self.cookie = Cpp.BlockDataManagerConfig_getCookie(str(self.datadir))
return self.cookie
#############################################################################
@ActLikeASingletonBDM
def runBDM(self, fn):
return self.inject.runCommand(fn)
#############################################################################
@ActLikeASingletonBDM
def RegisterEventForSignal(self, func, signal):
def bdmCallback(bdmSignal, args):
if bdmSignal == signal:
func(args)
self.registerCppNotification(bdmCallback)
#############################################################################
def setWitness(self, wit):
self.witness = wit
#############################################################################
def isSegWitEnabled(self):
return self.witness or FORCE_SEGWIT
################################################################################
# Make TheBDM reference the asyncrhonous BlockDataManager wrapper if we are
# running
TheBDM = None
if CLI_OPTIONS.offline:
LOGINFO('Armory loaded in offline-mode. Will not attempt to load ')
LOGINFO('blockchain without explicit command to do so.')
TheBDM = BlockDataManager(isOffline=True)
else:
# NOTE: "TheBDM" is sometimes used in the C++ code to reference the
# singleton BlockDataManager_LevelDB class object. Here,
# "TheBDM" refers to a python BlockDataManagerThead class
# object that wraps the C++ version. It implements some of
# it's own methods, and then passes through anything it
# doesn't recognize to the C++ object.
LOGINFO('Using the asynchronous/multi-threaded BlockDataManager.')
LOGINFO('Blockchain operations will happen in the background. ')
LOGINFO('Devs: check TheBDM.getState() before asking for data.')
LOGINFO('Registering addresses during rescans will queue them for ')
LOGINFO('inclusion after the current scan is completed.')
TheBDM = BlockDataManager(isOffline=False)
cppLogFile = os.path.join(ARMORY_HOME_DIR, 'armorycpplog.txt')
cpplf = cppLogFile
if OS_WINDOWS and isinstance(cppLogFile, unicode):
cpplf = cppLogFile.encode('utf8')
Cpp.StartCppLogging(cpplf, 4)
Cpp.EnableCppLogStdOut()
# Put the import at the end to avoid circular reference problem
from armoryengine.MultiSigUtils import MultiSigLockbox
from armoryengine.Transaction import PyTx
# kate: indent-width 3; replace-tabs on;
| StarcoderdataPython |
5186042 | <gh_stars>1-10
from typing import Union
from getnet.services.customers import Address
from getnet.services.payments.credit import Credit as BaseCredit
class Credit(BaseCredit):
billing_address: Address
def __init__(self, billing_address: Union[Address, dict] = None, **kwargs):
self.billing_address = (
billing_address
if isinstance(billing_address, Address) or billing_address is None
else Address(**billing_address)
)
super(Credit, self).__init__(**kwargs)
def as_dict(self):
data = {
"transaction_type": self.transaction_type,
"number_installments": self.number_installments,
"card": self.card._as_dict(),
}
if self.billing_address is not None:
data["billing_address"] = self.billing_address.as_dict()
if self.soft_descriptor is not None:
data["soft_descriptor"] = self.soft_descriptor
return data
| StarcoderdataPython |
3521865 | <reponame>amikey/audio_scripts
#!/Users/tkirke/anaconda/bin/python
# -*- coding: utf-8 -*-
import re,sys,os,codecs
from time import sleep
from math import sqrt,log
from scipy import signal,fft
import numpy, matplotlib
from lame import *
matplotlib.use('qt4agg')
import matplotlib.pyplot as plt
import warnings
def fxn():
warnings.warn("deprecated", DeprecationWarning)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
fxn()
mp = re.compile('\.mp3')
files = []
show_plot = False
if (len(sys.argv) > 1):
files.append(sys.argv[1])
if (len(sys.argv) > 2): show_plot = True
else:
files = os.listdir('.')
debug = False
count = 0
sr = 44100.0
for fil in files:
if (mp.search(fil)):
audio_in = decode_mp3(fil)
samples = len(audio_in)
fft_size = 2**int(floor(log(samples)/log(2.0)))
print 'samples,fft_size',samples,fft_size
freq = fft(audio_in[0:fft_size])
s_data = numpy.zeros(fft_size)
x_data = numpy.zeros(fft_size)
min_x = log(1.0/fft_size);
for j in xrange(fft_size):
x_data[j] = log(1.0*(j+1)/fft_size);
if (x_data[j] < -10):
x_data[j] = -10
s_data[j] = 10.0*log(abs(freq[j]))/log(10.0)
plt.plot(x_data,s_data)
plt.title('fft log power')
plt.grid()
fields = fil.split('.')
plt.savefig(fields[0]+'_fft.png', bbox_inches="tight")
plt.draw()
plt.waitforbuttonpress(timeout=22.1)
plt.hold(True)
| StarcoderdataPython |
6481459 | from typing import Sequence, Optional
import numpy as np
from matplotlib.pyplot import Axes
import datasets
def plot_predictions(data: datasets.BaseDataGenerator,
ax_arr: Sequence[Axes],
name: str,
t_seq: np.ndarray,
paths: np.ndarray,
mask: Optional[np.ndarray] = None,
true_paths: Optional[np.ndarray] = None,
y_lims: Optional[Sequence[Sequence[float]]] = None
):
for i in range(len(ax_arr)):
ax = ax_arr[i]
color = 'b'
color_true = 'r'
for j in range(min(paths.shape[0], 15)):
ax.plot(t_seq[j], paths[j, :, i], color=color, alpha=0.2, linewidth=0.5)
ax.plot(t_seq[0], np.mean(paths[:, :, i], axis=0), color=color)
if true_paths is not None and mask is not None:
mask_step = np.array(mask[0, :, i], dtype=bool)
inverted_mask = np.array(np.abs(mask - 1, dtype=np.int32)[0, :, i], dtype=bool)
true_paths_step = np.mean(true_paths[:, :, i], axis=0)
true_paths_step_nan = np.array(true_paths_step)
true_paths_step_nan[inverted_mask] = np.nan
true_paths_step_nan_inverted = np.array(true_paths_step)
true_paths_step_nan_inverted[mask_step] = np.nan
ax.plot(t_seq[0], true_paths_step_nan,
color=color_true)
ax.plot(t_seq[0], true_paths_step_nan_inverted, "--",
color=color_true)
if y_lims is not None:
ax.set_ylim(y_lims[i][0], y_lims[i][1])
ax.set_ylabel(f"${name}_{i + 1}$")
ax.set_xlabel("$t$")
# if not data.draw_y_axis:
# ax.set_yticks([])
# else:
# ax.set_ylabel(f"${name}_{i + 1}$")
# if i != len(ax_arr):
# ax.set_xticks([])
# else:
# ax.set_xlabel("$t$")
| StarcoderdataPython |
311318 | <gh_stars>10-100
from typing import List, Dict
import math
import numpy as np
from banditpylib.data_pb2 import Feedback, Actions, Context
from banditpylib import argmax_or_min_tuple
from banditpylib.arms import PseudoArm
from banditpylib.learners.mab_fcbai_learner import MABFixedConfidenceBAILearner
class CentralizedLilUCBHeuristic(MABFixedConfidenceBAILearner):
"""LilUCB heuristic policy :cite:`jamieson2014lil`
Modified implementation to supplement CollaborativeAgent
along with additional functionality to work on only a subset of arms
:param int arm_num: number of arms of the bandit
:param float confidence: confidence level. It should be within (0, 1). The
algorithm should output the best arm with probability at least this value.
:param np.ndarray assigned_arms: arm indices the learner has to work with
:param str name: alias name
"""
def __init__(self,
arm_num: int,
confidence: float,
assigned_arms: np.ndarray = None,
name: str = None):
assert np.max(assigned_arms) < arm_num and len(assigned_arms) <= arm_num, (
"assigned arms should be a subset of [arm_num]\nReceived: " +
str(assigned_arms))
super().__init__(arm_num=arm_num, confidence=confidence, name=name)
if assigned_arms is not None:
self.__assigned_arms = assigned_arms
else:
self.__assigned_arms = np.arange(arm_num)
def _name(self) -> str:
return 'lilUCB_heur_collaborative'
def reset(self):
# create only as many local arms as num_assigned_arms
# entire algo behaves as if there are just num_assigned_arms in the bandit
self.__pseudo_arms = [PseudoArm() for arm_id in self.__assigned_arms]
# Parameters suggested by the paper
self.__beta = 0.5
self.__a = 1 + 10 / len(self.__assigned_arms)
self.__eps = 0
self.__delta = (1 - self.confidence) / 5
# Total number of pulls used
self.__total_pulls = 0
self.__stage = 'initialization'
self.__ucb = np.array([0.0] * len(self.__assigned_arms))
def __confidence_radius(self, pulls: int) -> float:
"""
Args:
pulls: number of pulls
Returns:
confidence radius
"""
if (1 + self.__eps) * pulls == 1:
return math.inf
return (1 + self.__beta) * (1 + math.sqrt(self.__eps)) * math.sqrt(
2 * (1 + self.__eps) *
math.log(math.log((1 + self.__eps) * pulls) / self.__delta) / pulls)
def __update_ucb(self, arm_id: int):
"""
Args:
arm_id: index of the arm whose ucb has to be updated
"""
self.__ucb[arm_id] = self.__pseudo_arms[arm_id].em_mean +\
self.__confidence_radius(self.__pseudo_arms[arm_id].total_pulls)
def actions(self, context: Context = None) -> Actions:
del context
if self.__stage == 'initialization':
actions = Actions() # default state is normal
# 1 pull each for every assigned arm
for arm_id in self.__assigned_arms:
arm_pull = actions.arm_pulls.add()
arm_pull.arm.id = arm_id
arm_pull.times = 1
return actions
# self.__stage == 'main'
actions = Actions()
for pseudo_arm in self.__pseudo_arms:
if pseudo_arm.total_pulls >= (
1 + self.__a * (self.__total_pulls - pseudo_arm.total_pulls)):
return actions
arm_pull = actions.arm_pulls.add()
# map local arm index to the bandits arm index
arm_pull.arm.id = self.__assigned_arms[int(np.argmax(self.__ucb))]
arm_pull.times = 1
return actions
def update(self, feedback: Feedback):
for arm_feedback in feedback.arm_feedbacks:
# reverse map from bandit index to local index
pseudo_arm_index = np.where(
self.__assigned_arms == arm_feedback.arm.id)[0][0]
self.__pseudo_arms[pseudo_arm_index].update(
np.array(arm_feedback.rewards))
self.__update_ucb(pseudo_arm_index)
self.__total_pulls += len(arm_feedback.rewards)
if self.__stage == 'initialization':
self.__stage = 'main'
@property
def best_arm(self) -> int:
# map best arm local index to actual bandit index
return self.__assigned_arms[argmax_or_min_tuple([
(pseudo_arm.total_pulls, arm_id)
for (arm_id, pseudo_arm) in enumerate(self.__pseudo_arms)
])]
def get_total_pulls(self) -> int:
return self.__total_pulls
def get_num_pulls_per_round(rounds: int,
horizon: int,
use_centralized_learning: bool = False):
"""Calculate number of pulls used per round
Args:
rounds: number of total rounds allowed
horizon: maximum number of pulls the agent can make
(over all rounds combined)
use_centralized_learning: when it is true, the output result has one more
round than the input. It is assumed there will be no communication after
the first round within which the main goal is to eliminate assigned arms
such that only one arm remains.
Returns:
number of pulls used per round
"""
num_pulls_per_round = []
if use_centralized_learning:
pseudo_comm_rounds = rounds
num_pulls_per_round.append(int(0.5 * horizon))
num_pulls_per_round.extend([int(0.5 * horizon /
(pseudo_comm_rounds - 1))] *
(pseudo_comm_rounds - 1))
else:
comm_rounds = rounds - 1
num_pulls_per_round.extend([int(horizon / comm_rounds)] * comm_rounds)
# For the last round, we always use 0 pulls.
num_pulls_per_round.append(0)
# Assign the remaining budget
for i in range(horizon - sum(num_pulls_per_round)):
num_pulls_per_round[i] += 1
return num_pulls_per_round
def assign_arms(active_arms: List[int],
agent_ids: List[int]) -> Dict[int, List[int]]:
"""Assign arms to agents to pull
Args:
active_arms: list of active arm ids
agent_ids: list of agent ids
Returns:
arm assignment where key is agent id and value is assigned arms to this
agent
"""
if not active_arms:
raise ValueError("No arms to assign.")
if not agent_ids:
raise ValueError("No agents to assign.")
agent_arm_assignment: Dict[int, List[int]] = {}
if len(active_arms) == 1:
# Use -1 as the first arm id if there is only one active arm
for agent_id in agent_ids:
agent_arm_assignment[agent_id] = [-1, active_arms[0]]
return agent_arm_assignment
if len(active_arms) < len(agent_ids):
# Number of arms is less than the number of agents
min_num_agents_per_arm = int(len(agent_ids) / len(active_arms))
arms_assign_list = active_arms * min_num_agents_per_arm
if len(agent_ids) > len(arms_assign_list):
arms_assign_list.extend(
list(
np.random.choice(active_arms,
len(agent_ids) - len(arms_assign_list))))
np.random.shuffle(arms_assign_list)
for i, agent_id in enumerate(agent_ids):
agent_arm_assignment[agent_id] = [arms_assign_list[i]]
else:
# Number of arms is at least the number of agents
min_num_arms_per_agent = int(len(active_arms) / len(agent_ids))
agents_assign_list = agent_ids * min_num_arms_per_agent
if len(active_arms) > len(agents_assign_list):
agents_assign_list.extend(
list(
np.random.choice(agent_ids,
len(active_arms) - len(agents_assign_list))))
np.random.shuffle(agents_assign_list)
for i, arm_id in enumerate(active_arms):
agent_id = agents_assign_list[i]
if agent_id not in agent_arm_assignment:
agent_arm_assignment[agent_id] = []
agent_arm_assignment[agent_id].append(arm_id)
return agent_arm_assignment
| StarcoderdataPython |
3493759 | <gh_stars>1-10
import autocomplete_light
from cities_light.models import Country, City
from django.contrib.auth.models import User, Group
class AutocompleteTaggableItems(autocomplete_light.AutocompleteGenericBase):
choices = (
User.objects.all(),
Group.objects.all(),
City.objects.all(),
Country.objects.all(),
)
search_fields = (
('username', 'email'),
('name',),
('search_names',),
('name_ascii',),
)
autocomplete_light.register(AutocompleteTaggableItems)
| StarcoderdataPython |
292708 | <gh_stars>0
import unittest
import tempfile
import shutil
from os import path
from pbcore.io import AlignmentSet, ReferenceSet
from pbalign.pbalignrunner import PBAlignRunner
from test_setpath import ROOT_DIR
class Test_PBAlignRunner(unittest.TestCase):
def setUp(self):
self.rootDir = ROOT_DIR
self.queryFile = path.join(self.rootDir, "data/subreads_dataset1.xml")
self.referenceFile = path.join(self.rootDir, "data/reference_lambda.xml")
self.configFile = path.join(self.rootDir, "data/1.config")
self.OUT_DIR = tempfile.mkdtemp()
self.bamOut = path.join(self.OUT_DIR, "lambda_out.bam")
self.xmlOut = path.join(self.OUT_DIR, "lambda_out.xml")
def tearDown(self):
shutil.rmtree(self.OUT_DIR)
def test_init_xml(self):
"""Test PBAlignRunner.__init__() to XML."""
argumentList = ['--minAccuracy', '70', '--maxDivergence', '30',
self.queryFile, self.referenceFile,
self.xmlOut]
pbobj = PBAlignRunner(argumentList=argumentList)
self.assertEqual(pbobj.start(), 0)
aln = AlignmentSet(self.xmlOut)
self.assertEqual(aln.externalResources[0].reference,
ReferenceSet(self.referenceFile).toExternalFiles()[0])
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
11241781 | <reponame>Trafalcon/Parsr
import argparse
import os
import numpy as np
import pandas as pd
from sklearn import metrics
from sklearn.feature_selection import RFECV
from sklearn.svm import SVC
from sklearn.tree import DecisionTreeClassifier
from sklearn_porter import Porter
from imblearn.over_sampling import SMOTE
parser = argparse.ArgumentParser(description='Train a decision tree to recognize headings.')
parser.add_argument('dataset_dir', help='folder containing the .csv files generated by build_dataset.py')
parser.add_argument('out_dir', help='folder in which to save the trained model')
args = parser.parse_args()
dataset_dir = args.dataset_dir
paths = os.listdir(dataset_dir)
x_train = []
y_train = []
for path in paths:
df = pd.read_csv(os.path.join(dataset_dir, path), header=0)
if len(df) < 3:
break
df['is_bold'] = df['is_bold'].apply(lambda x: 1 if x else 0)
df['label'] = df['label'].apply(lambda x: 0 if x == 'paragraph' else 1)
df['title_case'] = df['title_case'].apply(lambda x: 1 if x else 0)
print(df.head())
x_train.append([0,
df['font_size'][0] / df['font_size'][1],
df['is_bold'][0],
int(df['line'][0].isupper()),
0,
df['word_count'][0] / df['word_count'][1],
int(df['title_case'][0]),
0,
int(df['color'][1] == df['color'][0])])
for i in range(len(df)):
if i > 0 and i < len(df) - 1:
prev_font_size = df['font_size'][i - 1]
font_size = df['font_size'][i]
next_font_size = df['font_size'][i + 1]
x_train.append([font_size / prev_font_size,
font_size / next_font_size,
df['is_bold'][i],
int(df['line'][i].isupper()),
df['word_count'][i] / df['word_count'][i - 1],
df['word_count'][i] / df['word_count'][i + 1],
int(df['title_case'][i]),
int(df['color'][i] == df['color'][i - 1]),
int(df['color'][i] == df['color'][i + 1])])
x_train.append([df['font_size'][len(df) - 1] / df['font_size'][len(df) - 2],
0,
df['is_bold'][len(df) - 1],
int(df['line'][0].isupper()),
df['word_count'][len(df) - 1] / df['word_count'][len(df) - 2],
0,
int(df['title_case'][len(df) - 1]),
int(df['color'][len(df) - 1] == df['color'][len(df) - 2]),
0])
y_train = y_train + list(df['label'])
print(len(x_train))
smt = SMOTE()
x_train, y_train = smt.fit_sample(x_train, y_train)
print(len(x_train))
clf = DecisionTreeClassifier(min_samples_leaf=3, min_samples_split=2, criterion='entropy')
#clf = DecisionTreeClassifier(criterion='entropy')
clf = clf.fit(x_train, y_train)
selector = RFECV(clf, step=1, cv=5)
selector = selector.fit(x_train, y_train)
print(selector.support_)
print(selector.ranking_)
y_pred = clf.predict(x_train)
print('f1:', metrics.f1_score(y_pred, y_train))
print('IoU:', metrics.jaccard_score(y_pred, y_train))
print('AuC:', metrics.roc_auc_score(y_pred, y_train))
headings_x = []
headings_y = []
for x, y in zip(x_train, y_train):
if y == 1:
headings_y.append(1)
headings_x.append(x)
y_pred = clf.predict(headings_x)
print('Accuracy:', metrics.accuracy_score(y_pred, headings_y))
porter = Porter(clf, language='js')
output = porter.export(embed_data=True)
headings_x_js = np.array(headings_x)
y_pred_js = porter.predict(headings_x_js)
print('Accuracy:', metrics.accuracy_score(y_pred_js, headings_y))
with open(os.path.join(args.out_dir, 'model.js'), mode='w+', encoding='utf8') as f:
f.write('export ' + output)
| StarcoderdataPython |
8151987 | <gh_stars>0
d = {'x':1,'y':2,'z':3}
for keys in d:
print keys, 'corresponds to',d[keys]
print '='*20
names=['anne','beth','george','damon']
ages=[12,45,32,102]
for i in range(len(names)):
print names[i], 'is', ages[i], 'years old'
print '='*20
for name,age in zip(names,ages):
print name, 'is', age, 'years old'
print '='*20
# for index,string in enumerate(strings)
# if 'xxx' in string
# strings[indext]='[censored]'
#
| StarcoderdataPython |
318971 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
from os.path import join as joinpath
import ffmpymedia
from tests import TEST_FILE_PATH
class TestMediaUse(unittest.TestCase):
def test_compare_two_files(self):
# User1 wants to compare two media files to see if their stream layouts are the same.
# First he passes the same file to the API to see if they compare as the same
filename1 = filename2 = joinpath(TEST_FILE_PATH, 'SIN001 Sinuca.mp4')
file1 = ffmpymedia.MediaFile.parse_file(filename1)
file2 = ffmpymedia.MediaFile.parse_file(filename2)
self.assertTrue(file1 == file2)
# Then he wants to be sure and see the that difference between the two files is {}
self.assertEqual(ffmpymedia.MediaFile.parse_file(filename1).difference(ffmpymedia.MediaFile.parse_file(filename2)), {})
# Then he decides to try two different files to be sure different files are treated differenty
filename3 = joinpath(TEST_FILE_PATH, 'COLB001 Color Bar.mp4')
file3 = ffmpymedia.MediaFile.parse_file(filename3)
self.assertFalse(file1 == file3)
# As he is very curious, he then wants to see the difference between the files
self.assertNotEqual(ffmpymedia.MediaFile.parse_file(filename1).difference(ffmpymedia.MediaFile.parse_file(filename3)), {})
# After all these comparisons, he decided to take a look at the streams of each file.
print(file1.__repr__())
print(file2.__repr__())
print(file3.__repr__())
def test_media_analyser(self):
# Developer1 whises to test the MediaAnalyser API funcionality.
# With that in mind, he decides to try out all 4 API calls from this helper class.
filename1 = filename2 = joinpath(TEST_FILE_PATH, 'SIN001 Sinuca.mp4')
filename3 = joinpath(TEST_FILE_PATH, 'COLB001 Color Bar.mp4')
template1 = ffmpymedia.MediaFileTemplate(**{'format_name': 'mov,mp4,m4a,3gp,3g2,mj2', 'duration': '12.0', 'metadata': None, 'start_time': '0.000000', 'streams': [{'type': 'video', 'height': '1080', 'bitrate': '2574', 'metadata': {'handler_name': 'VideoHandler'}, 'codec': 'h264', 'index': '0', 'disposition': {'lyrics': 0, 'default': 1, 'clean_effects': 0, 'karaoke': 0, 'hearing_impaired': 0, 'visual_impaired': 0, 'forced': 0, 'comment': 0, 'dub': 0, 'original': 0, 'attached_pic': 0}, 'codec_tag': '0x31637661', 'codec_tag_string': 'avc1', 'width': '1920', 'sample_aspect_ratio': '1:1', 'pixel_format': 'yuv420p', 'reported_frame_rate': '25', 'display_aspect_ratio': '16:9', 'container_time_base': '12800', 'average_frame_rate': '25', 'codec_time_base': '50', 'language': 'und', 'profile': 'High', 'codec_long_name': 'H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10'}], 'filename': '/home/flaviopontes/PycharmProjects/ffmpymedia/test_files/SIN001 Sinuca.mp4', 'bit_rate': '2577000'})
template3 = ffmpymedia.MediaFileTemplate(**{'format_name': 'mov,mp4,m4a,3gp,3g2,mj2', 'duration': '12.0', 'metadata': None, 'start_time': '0.000000', 'streams': [{'type': 'video', 'height': '1080', 'bitrate': '2574', 'metadata': {'handler_name': 'VideoHandler'}, 'codec': 'h264', 'index': '0', 'disposition': {'lyrics': 0, 'default': 1, 'clean_effects': 0, 'karaoke': 0, 'hearing_impaired': 0, 'visual_impaired': 0, 'forced': 0, 'comment': 0, 'dub': 0, 'original': 0, 'attached_pic': 0}, 'codec_tag': '0x31637661', 'codec_tag_string': 'avc1', 'width': '1920', 'sample_aspect_ratio': '1:1', 'pixel_format': 'yuv420p', 'reported_frame_rate': '25', 'display_aspect_ratio': '16:9', 'container_time_base': '12800', 'average_frame_rate': '25', 'codec_time_base': '50', 'language': 'und', 'profile': 'High', 'codec_long_name': 'H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10'}], 'filename': '/home/flaviopontes/PycharmProjects/ffmpymedia/test_files/SIN001 Sinuca.mp4', 'bit_rate': '2577000'})
self.assertFalse(ffmpymedia.MediaAnalyser.compare_media_file_with_template(filename1, template1))
| StarcoderdataPython |
179400 | <filename>django_react/json_encoder.py
# -*- coding: utf-8 -*-
# deprecated for now...
import json
from django.utils.encoding import force_unicode
from django.db.models.base import ModelBase
class LazyJSONEncoder(json.JSONEncoder):
"""
a JSONEncoder subclass that handle querysets and models objects. Add
your code about how to handle your type of object here to use when dumping json
"""
def default(self, o):
# this handles querysets and other iterable types
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
# this handlers Models
try:
isinstance(o.__class__, ModelBase)
except Exception:
pass
else:
return force_unicode(o)
return super(LazyJSONEncoder, self).default(o)
def serialize_to_json(obj, *args, **kwargs):
""" A wrapper for json.dumps with defaults as:
ensure_ascii=False
cls=LazyJSONEncoder
All arguments can be added via kwargs
"""
kwargs['ensure_ascii'] = kwargs.get('ensure_ascii', False)
kwargs['cls'] = kwargs.get('cls', LazyJSONEncoder)
return json.dumps(obj, *args, **kwargs)
| StarcoderdataPython |
12858058 | <reponame>brunosmmm/scoff
"""Auto-generate custom TextX AST classes."""
import re
try:
import black
except ImportError:
black = None
GRAMMAR_RULE_REGEX = re.compile(
r"([a-zA-Z_]\w*)\s*:(((['\"];['\"])|[^;])+);", re.S
)
RULE_MEMBER_REGEX = re.compile(
r"([a-zA-Z_]\w*)\s*([?\+\*]?)=\s*([^\s]+)", re.S
)
if black is not None:
BLACK_FILE_MODE = black.FileMode(line_length=79)
def parse_textx_rule(rule_definition):
"""Parse a rule definition."""
members = re.findall(RULE_MEMBER_REGEX, rule_definition)
# shortcut to optional members
revised_members = []
for member in members:
name, operator, value = member
if value.endswith("?"):
operator = "?"
revised_members.append((name, operator, value))
return [(member[0], member[1]) for member in revised_members]
def parse_textx_grammar(grammar_file):
"""Parse grammar file."""
with open(grammar_file, "r") as f:
contents = f.read()
rules = re.findall(GRAMMAR_RULE_REGEX, contents)
grammar_rules = {}
for rule in rules:
rule_name = rule[0]
rule_body = rule[1]
rule_members = parse_textx_rule(rule_body.strip())
if len(rule_members) < 1:
continue
grammar_rules[rule_name.strip()] = rule_members
return grammar_rules
def build_python_class_text(class_name, subclass_of, *members):
"""Build python class declaration."""
member_arguments = []
optional_arguments = []
for member in members:
member_name, member_operator = member
if member_operator in ("?", "*"):
# optional
optional_arguments.append("{name}=None".format(name=member_name))
else:
member_arguments.append(member_name)
member_arguments.extend(optional_arguments)
class_contents = """
class {name}({parent_class}):
\"\"\"{name} AST.\"\"\"
__slots__ = ({slots})
def __init__(self, parent, {members}, **kwargs):
\"\"\"Initialize.\"\"\"
super().__init__(parent=parent, {member_assign}, **kwargs)
""".format(
name=class_name,
parent_class=subclass_of,
members=", ".join(member_arguments),
slots=", ".join(
[
'"{}"'.format(member[0])
for member in members
if member != "parent"
]
),
member_assign=", ".join(
["{name}={name}".format(name=member[0]) for member in members]
),
)
if black is not None:
return (
class_name,
black.format_str(class_contents, mode=BLACK_FILE_MODE),
)
else:
return (class_name, class_contents)
| StarcoderdataPython |
3398514 | # Placeholder.
# Use prepare_contest.py to get an up-to-date version.
| StarcoderdataPython |
79411 | <filename>pplbench/models/utils.py
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Tuple
import numpy as np
import xarray as xr
def log1pexp(x: np.ndarray) -> np.ndarray:
"""
Compute log(1 + exp(x)) in a numerically stable way,
see https://cran.r-project.org/web/packages/Rmpfr/vignettes/log1mexp-note.pdf eqn 10
:param x: numpy array of numbers
:returns: log(1 + exp(x))
"""
y = np.zeros_like(x)
y[x < 18] = np.log1p(np.exp(x[x < 18]))
y[x >= 18] = x[x >= 18] + np.exp(-x[x >= 18])
return y
def log1mexpm(x: np.ndarray) -> np.ndarray:
"""
Compute log(1 - exp(-x)) in a numerically stable way for x > 0,
see https://cran.r-project.org/web/packages/Rmpfr/vignettes/log1mexp-note.pdf eqn 7
:param x: numpy array of numbers >= 0
:returns: log(1 - exp(-x))
"""
y = np.zeros_like(x)
x_low, x_high = x < 0.693, x >= 0.693
y[x_low] = np.log(-(np.expm1(-x[x_low])))
y[x_high] = np.log1p(-(np.exp(-x[x_high])))
return y
def split_train_test(
data: xr.Dataset, coord_name: str, train_frac: float
) -> Tuple[xr.Dataset, xr.Dataset]:
"""
Splice a dataset into two along the given coordinate
and update n in the attributes.
:param data: A dataset object which is to be split
:param coord_name: The coordinate on which the data is going to be sliced
:param train_frac: Fraction of data to be given to training
:returns: The training and test datasets.
"""
num_train = int(train_frac * len(data.coords[coord_name]))
train = data[{coord_name: slice(None, num_train)}]
test = data[{coord_name: slice(num_train, None)}]
train.attrs = data.attrs.copy()
train.attrs["n"] = num_train
test.attrs = data.attrs.copy()
test.attrs["n"] = data.attrs["n"] - train.attrs["n"]
return train, test
| StarcoderdataPython |
1882850 | <filename>foiamachine/apps/government/api.py<gh_stars>1-10
from tastypie.resources import ModelResource, Resource, ALL, ALL_WITH_RELATIONS
from tastypie import fields
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization#need?, DjangoAuthorization
from apps.government.models import Government, Statute, FeeExemptionOther
from tastypie.serializers import Serializer
from tastypie.exceptions import BadRequest, TastypieError, ApiFieldError
from tastypie.validation import Validation
from datetime import datetime
import bleach
import logging
logger = logging.getLogger('default')
class GovernmentValidation(Validation):
def is_valid(self, bundle, request=None):
if not bundle.request.user.is_staff:
return "You do not have permission to edit this bundle"
return {}
class GovernmentResource(ModelResource):
class Meta:
queryset = Government.objects.all()
resource_name = 'governments'
allowed_methods = ['get', 'post']
detail_allowed_methods = ['get', 'post', 'put']
#authorization = Authorization()
#authentication = Authentication()
filtering = {
'name' : ALL,
'slug' : ALL
}
class StatuteAuthentication(Authentication):
def is_authenticated (self, request):
return request.user.is_authenticated()
class FeeExemptionResource(ModelResource):
class Meta:
queryset = FeeExemptionOther.objects.all().order_by('-created')
resource_name = 'feeorexemption'
allowed_methods = ['get', 'post']
detail_allowed_methods = ['get', 'post', 'put']
#authorization = Authorization()
#authentication = StatuteAuthentication()
filtering = {
'id': ALL,
}
def get_object_list(self, request):
if request.user.is_staff:
return FeeExemptionOther.objects.all_them().order_by('-created')
else:
return FeeExemptionOther.objects.all().order_by('-created')
def dehydrate(self, bundle):
bundle.data['deleted'] = bundle.obj.deprecated is None
bundle.data['can_edit'] = bundle.request.user.is_staff
return bundle
def hydrate(self, bundle):
return bundle
def obj_update(self, bundle, **kwargs):
if not bundle.request.user.is_staff:
return bundle
try:
data = bundle.data
feeexemption = bundle.obj = FeeExemptionOther.objects.all_them().get(id=data['id'])
for field in ['source', 'name', 'description', 'deleted', 'deprecated']:
if field in data and field != 'deleted':
setattr(feeexemption, field, data[field])
elif field == 'deleted' and field in data and data[field]:
feeexemption.deprecated = datetime.now()
feeexemption.save()
return bundle
except Exception as e:
logger.exception(e)
raise BadRequest(str(e))
def obj_create(self, bundle, **kwargs):
if not bundle.request.user.is_staff:
return bundle
try:
data = bundle.data
statute = Statute.objects.get(id=data['statute_id'])
del data['statute_id']#data for relationships
del data['can_edit']#data for the template
del data['type']#date for the template
feeorexemption = FeeExemptionOther(**data)
feeorexemption.save()
bundle.obj = feeorexemption
statute.fees_exemptions.add(feeorexemption)
logger.info("feeorexemption %s created" % feeorexemption.id)
except Exception as e:
logger.exception(e)
return bundle
class StatuteValidation(Validation):
def is_valid(self, bundle, request=None):
if not bundle.request.user.is_staff:
return "You do not have permission to edit this statute"
if not bundle.data:
return "No data submitted"
if not 'short_title' in bundle.data:
return "Statute must have at least a distinct short title"
short_title = bundle.data['short_title']
if request and request.META and request.META['REQUEST_METHOD']=='POST' and Statute.objects.filter(short_title=short_title):
return "A statute with that short title already exists."
return {}
class StatuteResource(ModelResource):
governments = fields.ToManyField(GovernmentResource, 'related_statutes', null = True, blank = True)
class Meta:
queryset = Statute.objects.all().order_by('-created')
resource_name = 'statute'
allowed_methods = ['get', 'post']
detail_allowed_methods = ['get', 'put']
#authorization = Authorization()
#authentication = StatuteAuthentication()
filtering = {
'id': ALL,
}
validation = StatuteValidation()
def get_object_list(self, request):
if request.user.is_staff:
return Statute.objects.all_them().order_by('-created')
else:
return Statute.objects.all().order_by('-created')
def dehydrate(self, bundle):
if 'governments' in bundle.data:
bundle.data['governments'] = map(lambda x: {'id' : x.split("/")[-2]}, bundle.data['governments'])
else:
bundle.data['governments'] = []
bundle.data['can_edit'] = True #TODO move this to check for a group
bundle.data['deleted'] = bundle.obj.deprecated is None
return bundle
def hydrate(self, bundle):
return bundle
def obj_update(self, bundle, **kwargs):
try:
data = bundle.data
statute = bundle.obj = Statute.objects.all_them().get(id=data['id'])
for field in ['days_till_due', 'text', 'short_title', 'deleted', 'deprecated']:
if field in data and field != 'deleted':
setattr(statute, field, data[field])
elif field == 'deleted' and field in data and data[field]:
statute.deprecated = datetime.now()
if 'governments' in data:
governments = [Government.objects.get(id=id) for id in data['governments']]
statute.related_statutes = governments
statute.save()
return bundle
except Exception as e:
logger.exception(e)
raise BadRequest( str(e))
| StarcoderdataPython |
3467843 | from __future__ import division
import sys
from time import sleep, time
from threading import Event
from math import atan2, degrees, hypot
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
from .btcomm import BluetoothServer
from .threads import WrapThread
from .constants import PROTOCOL_VERSION, CHECK_PROTOCOL_TIMEOUT
from .colors import parse_color, BLUE
class BlueDotPosition(object):
"""
Represents a position of where the blue dot is pressed, released or held.
:param float x:
The x position of the Blue Dot, 0 being centre, -1 being far left
and 1 being far right.
:param float y:
The y position of the Blue Dot, 0 being centre, -1 being at the
bottom and 1 being at the top.
"""
def __init__(self, x, y):
self._time = time()
self._x = self._clamped(float(x))
self._y = self._clamped(float(y))
self._angle = None
self._distance = None
def _clamped(self, v):
return max(-1, min(1, v))
@property
def x(self):
"""
The x position of the Blue Dot, 0 being centre, -1 being far
left and 1 being far right.
"""
return self._x
@property
def y(self):
"""
The y position of the Blue Dot, 0 being centre, -1 being at
the bottom and 1 being at the top.
"""
return self._y
@property
def angle(self):
"""
The angle from centre of where the Blue Dot is pressed, held or released.
0 degress is up, 0..180 degrees clockwise, -180..0 degrees anti-clockwise.
"""
if self._angle is None:
self._angle = degrees(atan2(self.x, self.y))
return self._angle
@property
def distance(self):
"""
The distance from centre of where the Blue Dot is pressed, held or released.
The radius of the Blue Dot is 1.
"""
if self._distance is None:
self._distance = self._clamped(hypot(self.x, self.y))
return self._distance
@property
def middle(self):
"""
Returns ``True`` if the Blue Dot is pressed, held or released in the middle.
"""
return self.distance <= 0.5
@property
def top(self):
"""
Returns ``True`` if the Blue Dot is pressed, held or released at the top.
"""
return self.distance > 0.5 and (-45 < self.angle <= 45)
@property
def right(self):
"""
Returns ``True`` if the Blue Dot is pressed, held or released on the right.
"""
return self.distance > 0.5 and (45 < self.angle <= 135)
@property
def bottom(self):
"""
Returns ``True`` if the Blue Dot is pressed, held or released at the bottom.
"""
return self.distance > 0.5 and (self.angle > 135 or self.angle <= -135)
@property
def left(self):
"""
Returns ``True`` if the Blue Dot is pressed, held or released on the left.
"""
return self.distance > 0.5 and (-135 < self.angle <= -45)
@property
def time(self):
"""
The time the blue dot was at this position.
.. note::
This is the time the message was received from the Blue Dot app,
not the time it was sent.
"""
return self._time
class BlueDotInteraction(object):
"""
Represents an interaction with the Blue Dot, from when it was pressed to
when it was released.
A :class:`BlueDotInteraction` can be active or inactive, i.e. it is active
because the Blue Dot has not been released, or inactive because the Blue
Dot was released and the interaction finished.
:param BlueDotPosition pressed_position:
The BlueDotPosition when the Blue Dot was pressed.
"""
def __init__(self, pressed_position):
self._active = True
self._positions = []
self._positions.append(pressed_position)
@property
def active(self):
"""
Returns ``True`` if the interaction is still active, i.e. the Blue Dot
hasnt been released.
"""
return self._active
@property
def positions(self):
"""
A sequence of :class:`BlueDotPosition` instances for all the positions
which make up this interaction.
The first position is where the Blue Dot was pressed, the last is where
the Blue Dot was released, all position in between are where the position
Blue Dot changed (i.e. moved) when it was held down.
"""
return self._positions
@property
def pressed_position(self):
"""
Returns the position when the Blue Dot was pressed i.e. where the
interaction started.
"""
return self._positions[0]
@property
def released_position(self):
"""
Returns the position when the Blue Dot was released i.e. where the
interaction ended.
If the interaction is still active it returns ``None``.
"""
return self._positions[-1] if not self.active else None
@property
def current_position(self):
"""
Returns the current position for the interaction.
If the interaction is inactive, it will return the position when the
Blue Dot was released.
"""
return self._positions[-1]
@property
def previous_position(self):
"""
Returns the previous position for the interaction.
If the interaction contains only 1 position, None will be returned.
"""
return self._positions[-2] if len(self._positions) > 1 else None
@property
def duration(self):
"""
Returns the duration in seconds of the interaction, i.e. the amount time
between when the Blue Dot was pressed and now or when it was released.
"""
if self.active:
return time() - self.pressed_position.time
else:
return self.released_position.time - self.pressed_position.time
@property
def distance(self):
"""
Returns the total distance of the Blue Dot interaction
"""
dist = 0
for i in range(1, len(self._positions)):
p1 = self._positions[i-1]
p2 = self._positions[i]
dist += hypot(p2.x - p1.x, p2.y - p1.y)
return dist
def moved(self, moved_position):
"""
Adds an additional position to the interaction, called when the position
the Blue Dot is pressed moves.
"""
if self._active:
self._positions.append(moved_position)
def released(self, released_position):
"""
Called when the Blue Dot is released and completes a Blue Dot interaction
:param BlueDotPosition released_position:
The BlueDotPosition when the Blue Dot was released.
"""
self._active = False
self._positions.append(released_position)
class BlueDotSwipe(object):
"""
Represents a Blue Dot swipe interaction.
A :class:`BlueDotSwipe` can be valid or invalid based on whether the Blue Dot
interaction was a swipe or not.
:param BlueDotInteraction interaction:
The BlueDotInteraction object to be used to determine whether the interaction
was a swipe.
"""
def __init__(self, interaction):
self._interaction = interaction
self._speed_threshold = 2
self._angle = None
self._distance = None
self._valid = self._is_valid_swipe()
def _is_valid_swipe(self):
#the validity of a swipe is based on the speed of the interaction,
# so a short fast swipe is valid as well as a long slow swipe
#self._speed = self.distance / self.interaction.duration
self._speed = self.distance / self.interaction.duration
if not self.interaction.active and self._speed > self._speed_threshold:
return True
else:
return False
@property
def interaction(self):
"""
The :class:`BlueDotInteraction` object relating to this swipe.
"""
return self._interaction
@property
def valid(self):
"""
Returns ``True`` if the Blue Dot interaction is a swipe.
"""
return self._valid
@property
def distance(self):
"""
Returns the distance of the swipe (i.e. the distance between the pressed
and released positions)
"""
#should this be the total lenght of the swipe. All the points? It might be slow to calculate
if self._distance == None:
self._distance = hypot(
self.interaction.released_position.x - self.interaction.pressed_position.x,
self.interaction.released_position.y - self.interaction.pressed_position.y)
return self._distance
@property
def angle(self):
"""
Returns the angle of the swipe (i.e. the angle between the pressed
and released positions)
"""
if self._angle == None:
self._angle = degrees(atan2(
self.interaction.released_position.x - self.interaction.pressed_position.x,
self.interaction.released_position.y - self.interaction.pressed_position.y))
return self._angle
@property
def speed(self):
"""
Returns the speed of the swipe in Blue Dot radius / second.
"""
return self._speed
@property
def up(self):
"""
Returns ``True`` if the Blue Dot was swiped up.
"""
return self.valid and (-45 < self.angle <= 45)
@property
def down(self):
"""
Returns ``True`` if the Blue Dot was swiped down.
"""
return self.valid and (self.angle > 135 or self.angle <= -135)
@property
def left(self):
"""
Returns ``True`` if the Blue Dot was swiped left.
"""
return self.valid and (-135 < self.angle <= -45)
@property
def right(self):
"""
Returns ``True`` if the Blue Dot was swiped right.
"""
return self.valid and (45 < self.angle <= 135)
class BlueDotRotation(object):
def __init__(self, interaction, no_of_segments):
"""
Represents a Blue Dot rotation.
A :class:`BlueDotRotation` can be valid or invalid based on whether the Blue Dot
interaction was a rotation or not.
:param BlueDotInteraction interaction:
The object to be used to determine whether the interaction
was a rotation.
"""
self._value = 0
self._clockwise = False
self._anti_clockwise = False
self._previous_segment = 0
self._current_segment = 0
prev_pos = interaction.previous_position
pos = interaction.current_position
# was there a previous position (i.e. the interaction has more than 2 positions)
if prev_pos != None:
# were both positions in the 'outer circle'
if prev_pos.distance > 0.5 and pos.distance > 0.5:
# what segments are the positions in
deg_per_seg = (360 / no_of_segments)
self._previous_segment = int((prev_pos.angle + 180) / deg_per_seg) + 1
self._current_segment = int((pos.angle + 180) / deg_per_seg) + 1
# were the positions in different segments
if self._previous_segment != self._current_segment:
# calculate the rotation
diff = self._previous_segment - self._current_segment
if diff != 0:
if diff == -1:
self._value = 1
elif diff == 1:
self._value = -1
elif diff == (no_of_segments - 1):
self._value = 1
elif diff == (1 - no_of_segments):
self._value = -1
@property
def valid(self):
"""
Returns ``True`` if the Blue Dot was rotated.
"""
return self._value != 0
@property
def value(self):
"""
Returns 0 if the Blue Dot wasn't rotated, -1 if rotated anti-clockwise and 1 if rotated clockwise.
"""
return self._value
@property
def anti_clockwise(self):
"""
Returns ``True`` if the Blue Dot was rotated anti-clockwise.
"""
return self._value == -1
@property
def clockwise(self):
"""
Returns ``True`` if the Blue Dot was rotated clockwise.
"""
return self._value == 1
class BlueDot(object):
"""
Interacts with a Blue Dot client application, communicating when and where it
has been pressed, released or held.
This class starts an instance of :class:`.btcomm.BluetoothServer`
which manages the connection with the Blue Dot client.
This class is intended for use with the Blue Dot client application.
The following example will print a message when the Blue Dot is pressed::
from bluedot import BlueDot
bd = BlueDot()
bd.wait_for_press()
print("The blue dot was pressed")
:param str device:
The Bluetooth device the server should use, the default is "hci0", if
your device only has 1 Bluetooth adapter this shouldn't need to be changed.
:param int port:
The Bluetooth port the server should use, the default is 1, and under
normal use this should never need to change.
:param bool auto_start_server:
If ``True`` (the default), the Bluetooth server will be automatically
started on initialisation; if ``False``, the method :meth:`start` will
need to be called before connections will be accepted.
:param bool power_up_device:
If ``True``, the Bluetooth device will be powered up (if required) when the
server starts. The default is ``False``.
Depending on how Bluetooth has been powered down, you may need to use :command:`rfkill`
to unblock Bluetooth to give permission to bluez to power on Bluetooth::
sudo rfkill unblock bluetooth
:param bool print_messages:
If ``True`` (the default), server status messages will be printed stating
when the server has started and when clients connect / disconect.
"""
def __init__(self,
device = "hci0",
port = 1,
auto_start_server = True,
power_up_device = False,
print_messages = True):
self._data_buffer = ""
self._device = device
self._port = port
self._power_up_device = power_up_device
self._print_messages = print_messages
self._is_pressed = False
self._is_connected_event = Event()
self._is_pressed_event = Event()
self._is_released_event = Event()
self._is_moved_event = Event()
self._is_swiped_event = Event()
self._is_double_pressed_event = Event()
self._check_protocol_event = Event()
self._waiting_for_press = Event()
self._when_pressed = None
self._when_pressed_background = False
self._when_double_pressed = None
self._when_double_pressed_background = False
self._when_released = None
self._when_released_background = False
self._when_moved = None
self._when_moved_background = False
self._when_swiped = None
self._when_swiped_background = False
self._when_rotated = None
self._when_rotated_background = False
self._when_client_connects = None
self._when_client_connects_background = False
self._when_client_disconnects = None
self._when_client_disconnects_background = False
self._position = None
self._interaction = None
self._double_press_time = 0.3
self._rotation_segments = 8
self._color = BLUE
self._square = False
self._border = False
self._visible = True
self._create_server()
if auto_start_server:
self.start()
@property
def device(self):
"""
The Bluetooth device the server is using. This defaults to "hci0".
"""
return self._device
@property
def port(self):
"""
The port the server is using. This defaults to 1.
"""
return self._port
@property
def server(self):
"""
The :class:`.btcomm.BluetoothServer` instance that is being used to communicate
with clients.
"""
return self._server
@property
def adapter(self):
"""
The :class:`.btcomm.BluetoothAdapter` instance that is being used.
"""
return self._server.adapter
@property
def paired_devices(self):
"""
Returns a sequence of devices paired with this adapter
:code:`[(mac_address, name), (mac_address, name), ...]`::
bd = BlueDot()
devices = bd.paired_devices
for d in devices:
device_address = d[0]
device_name = d[1]
"""
return self._server.adapter.paired_devices
@property
def is_connected(self):
"""
Returns ``True`` if a Blue Dot client is connected.
"""
return self._is_connected_event.is_set()
@property
def is_pressed(self):
"""
Returns ``True`` if the Blue Dot is pressed (or held).
"""
return self._is_pressed
@property
def value(self):
"""
Returns a 1 if the Blue Dot is pressed, 0 if released.
"""
return 1 if self.is_pressed else 0
@property
def values(self):
"""
Returns an infinite generator constantly yielding the current value.
"""
while True:
yield self.value
@property
def position(self):
"""
Returns an instance of :class:`BlueDotPosition` representing the
current or last position the Blue Dot was pressed, held or
released.
.. note::
If the Blue Dot is released (and inactive), :attr:`position` will
return the position where it was released, until it is pressed
again. If the Blue Dot has never been pressed :attr:`position` will
return ``None``.
"""
return self._position
@property
def interaction(self):
"""
Returns an instance of :class:`BlueDotInteraction` representing the
current or last interaction with the Blue Dot.
.. note::
If the Blue Dot is released (and inactive), :attr:`interaction`
will return the interaction when it was released, until it is
pressed again. If the Blue Dot has never been pressed
:attr:`interaction` will return ``None``.
"""
return self._interaction
@property
def when_pressed(self):
"""
Sets or returns the function which is called when the Blue Dot is pressed.
The function should accept 0 or 1 parameters, if the function accepts 1 parameter an
instance of :class:`BlueDotPosition` will be returned representing where the Blue Dot was pressed.
The following example will print a message to the screen when the button is pressed::
from bluedot import BlueDot
def dot_was_pressed():
print("The Blue Dot was pressed")
bd = BlueDot()
bd.when_pressed = dot_was_pressed
This example shows how the position of where the dot was pressed can be obtained::
from bluedot import BlueDot
def dot_was_pressed(pos):
print("The Blue Dot was pressed at pos x={} y={}".format(pos.x, pos.y))
bd = BlueDot()
bd.when_pressed = dot_was_pressed
The function will be run in the same thread and block, to run in a separate
thread use `set_when_pressed(function, background=True)`
"""
return self._when_pressed
@when_pressed.setter
def when_pressed(self, value):
self.set_when_pressed(value)
def set_when_pressed(self, callback, background=False):
"""
Sets the function which is called when the Blue Dot is pressed.
:param function callback:
The function to call, setting to `None` will stop the callback.
:param bool background:
If set to `True` the function will be run in a separate thread
and it will return immediately. The default is `False`.
"""
self._when_pressed = callback
self._when_pressed_background = background
@property
def when_double_pressed(self):
"""
Sets or returns the function which is called when the Blue Dot is double pressed.
The function should accept 0 or 1 parameters, if the function accepts 1 parameter an
instance of :class:`BlueDotPosition` will be returned representing where the Blue Dot was
pressed the second time.
The function will be run in the same thread and block, to run in a separate
thread use `set_when_double_pressed(function, background=True)`
.. note::
The double press event is fired before the 2nd press event e.g. events would be
appear in the order, pressed, released, double pressed, pressed.
"""
return self._when_double_pressed
@when_double_pressed.setter
def when_double_pressed(self, value):
self.set_when_double_pressed(value)
def set_when_double_pressed(self, callback, background=False):
"""
Sets the function which is called when the Blue Dot is double pressed.
:param function callback:
The function to call, setting to `None` will stop the callback.
:param bool background:
If set to `True` the function will be run in a separate thread
and it will return immediately. The default is `False`.
"""
self._when_double_pressed = callback
self._when_double_pressed_background = background
@property
def double_press_time(self):
"""
Sets or returns the time threshold in seconds for a double press. Defaults to 0.3.
"""
return self._double_press_time
@double_press_time.setter
def double_press_time(self, value):
self._double_press_time = value
@property
def when_released(self):
"""
Sets or returns the function which is called when the Blue Dot is released.
The function should accept 0 or 1 parameters, if the function accepts 1 parameter an
instance of :class:`BlueDotPosition` will be returned representing where the Blue Dot was held
when it was released.
The function will be run in the same thread and block, to run in a separate
thread use `set_when_released(function, background=True)`
"""
return self._when_released
@when_released.setter
def when_released(self, value):
self.set_when_released(value)
def set_when_released(self, callback, background=False):
"""
Sets the function which is called when the Blue Dot is released.
:param function callback:
The function to call, setting to `None` will stop the callback.
:param bool background:
If set to `True` the function will be run in a separate thread
and it will return immediately. The default is `False`.
"""
self._when_released = callback
self._when_released_background = background
@property
def when_moved(self):
"""
Sets or returns the function which is called when the position the Blue Dot is pressed is moved.
The function should accept 0 or 1 parameters, if the function accepts 1 parameter an
instance of :class:`BlueDotPosition` will be returned representing the new position of where the
Blue Dot is held.
The function will be run in the same thread and block, to run in a separate
thread use `set_when_moved(function, background=True)`
"""
return self._when_moved
@when_moved.setter
def when_moved(self, value):
self.set_when_moved(value)
def set_when_moved(self, callback, background=False):
"""
Sets the function which is called when the position the Blue Dot is pressed is moved.
:param function callback:
The function to call, setting to `None` will stop the callback.
:param bool background:
If set to `True` the function will be run in a separate thread
and it will return immediately. The default is `False`.
"""
self._when_moved = callback
self._when_moved_background = background
@property
def when_swiped(self):
"""
Sets or returns the function which is called when the Blue Dot is swiped.
The function should accept 0 or 1 parameters, if the function accepts 1 parameter an
instance of :class:`BlueDotSwipe` will be returned representing the how the Blue Dot was
swiped.
The function will be run in the same thread and block, to run in a separate
thread use `set_when_swiped(function, background=True)`
"""
return self._when_swiped
@when_swiped.setter
def when_swiped(self, value):
self.set_when_swiped(value)
def set_when_swiped(self, callback, background=False):
"""
Sets the function which is called when the position the Blue Dot is swiped.
:param function callback:
The function to call, setting to `None` will stop the callback.
:param bool background:
If set to `True` the function will be run in a separate thread
and it will return immediately. The default is `False`.
"""
self._when_swiped = callback
self._when_swiped_background = background
@property
def rotation_segments(self):
"""
Sets or returns the number of virtual segments the Blue Dot is split into for rotating.
Defaults to 8.
"""
return self._rotation_segments
@rotation_segments.setter
def rotation_segments(self, value):
self._rotation_segments = value
@property
def when_rotated(self):
"""
Sets or returns the function which is called when the Blue Dot is rotated (like an
iPod clock wheel).
The function should accept 0 or 1 parameters, if the function accepts 1 parameter an
instance of :class:`BlueDotRotation` will be returned representing how the Blue Dot was
rotated.
The function will be run in the same thread and block, to run in a separate
thread use `set_when_rotated(function, background=True)`
"""
return self._when_rotated
@when_rotated.setter
def when_rotated(self, value):
self.set_when_rotated(value)
def set_when_rotated(self, callback, background=False):
"""
Sets the function which is called when the position the Blue Dot is rotated (like an
iPod clock wheel).
:param function callback:
The function to call, setting to `None` will stop the callback.
:param bool background:
If set to `True` the function will be run in a separate thread
and it will return immediately. The default is `False`.
"""
self._when_rotated = callback
self._when_rotated_background = background
@property
def when_client_connects(self):
"""
Sets or returns the function which is called when a Blue Dot connects.
The function will be run in the same thread and block, to run in a separate
thread use `set_when_client_connects(function, background=True)`
"""
return self._when_client_connects
@when_client_connects.setter
def when_client_connects(self, value):
self.set_when_client_connects(value)
def set_when_client_connects(self, callback, background=False):
"""
Sets the function which is called when a Blue Dot connects.
:param function callback:
The function to call, setting to `None` will stop the callback.
:param bool background:
If set to `True` the function will be run in a separate thread
and it will return immediately. The default is `False`.
"""
self._when_client_connects = callback
self._when_client_connects_background = background
@property
def when_client_disconnects(self):
"""
Sets or returns the function which is called when a Blue Dot disconnects.
The function will be run in the same thread and block, to run in a separate
thread use `set_when_client_disconnects(function, background=True)`
"""
return self._when_client_disconnects
@when_client_disconnects.setter
def when_client_disconnects(self, value):
self.set_when_client_disconnects(value)
def set_when_client_disconnects(self, callback, background=False):
"""
Sets the function which is called when a Blue Dot disconnects.
:param function callback:
The function to call, setting to `None` will stop the callback.
:param bool background:
If set to `True` the function will be run in a separate thread
and it will return immediately. The default is `False`.
"""
self._when_client_disconnects = callback
self._when_client_disconnects_background = background
@property
def print_messages(self):
"""
When set to ``True`` results in messages relating to the status of the Bluetooth server
to be printed.
"""
return self._print_messages
@print_messages.setter
def print_messages(self, value):
self._print_messages = value
@property
def running(self):
"""
Returns a ``True`` if the server is running.
"""
return self._server.running
@property
def color(self):
"""
Sets or returns the color of the dot. Defaults to BLUE.
An instance of :class:`.colors.Color` is returned.
Value can be set as a :class:`.colors.Color` object, a hex color value
in the format `#rrggbb` or `#rrggbbaa`, a tuple of `(red, green, blue)`
or `(red, green, blue, alpha)` values between `0` & `255` or a text
description of the color, e.g. "red".
A dictionary of available colors can be obtained from `bluedot.COLORS`.
"""
return self._color
@color.setter
def color(self, value):
self._color = parse_color(value)
self._send_dot_config()
@property
def square(self):
"""
When set to `True` the 'dot' is made square. Default is `False`.
"""
return self._square
@square.setter
def square(self, value):
self._square = value
self._send_dot_config()
@property
def border(self):
"""
When set to `True` adds a border to the dot. Default is `False`.
"""
return self._border
@border.setter
def border(self, value):
self._border = value
self._send_dot_config()
@property
def visible(self):
"""
When set to `True` makes the dot invisible. Default is `False`.
.. note::
Events (press, release, moved) are still sent from the dot
when it is not visible.
"""
return self._visible
@visible.setter
def visible(self, value):
self._visible = value
self._send_dot_config()
def start(self):
"""
Start the :class:`.btcomm.BluetoothServer` if it is not already running. By default the server is started at
initialisation.
"""
self._server.start()
self._print_message("Server started {}".format(self.server.server_address))
self._print_message("Waiting for connection")
def _create_server(self):
self._server = BluetoothServer(
self._data_received,
when_client_connects = self._client_connected,
when_client_disconnects = self._client_disconnected,
device = self.device,
port = self.port,
power_up_device = self._power_up_device,
auto_start = False)
def stop(self):
"""
Stop the Bluetooth server.
"""
self._server.stop()
def wait_for_connection(self, timeout = None):
"""
Waits until a Blue Dot client connects.
Returns ``True`` if a client connects.
:param float timeout:
Number of seconds to wait for a wait connections, if ``None`` (the default),
it will wait indefinetly for a connection from a Blue Dot client.
"""
return self._is_connected_event.wait(timeout)
def wait_for_press(self, timeout = None):
"""
Waits until a Blue Dot is pressed.
Returns ``True`` if the Blue Dot was pressed.
:param float timeout:
Number of seconds to wait for a Blue Dot to be pressed, if ``None``
(the default), it will wait indefinetly.
"""
return self._is_pressed_event.wait(timeout)
def wait_for_double_press(self, timeout = None):
"""
Waits until a Blue Dot is double pressed.
Returns ``True`` if the Blue Dot was double pressed.
:param float timeout:
Number of seconds to wait for a Blue Dot to be double pressed, if ``None``
(the default), it will wait indefinetly.
"""
return self._is_double_pressed_event.wait(timeout)
def wait_for_release(self, timeout = None):
"""
Waits until a Blue Dot is released.
Returns ``True`` if the Blue Dot was released.
:param float timeout:
Number of seconds to wait for a Blue Dot to be released, if ``None``
(the default), it will wait indefinetly.
"""
return self._is_released_event.wait(timeout)
def wait_for_move(self, timeout = None):
"""
Waits until the position where the Blue Dot is pressed is moved.
Returns ``True`` if the position pressed on the Blue Dot was moved.
:param float timeout:
Number of seconds to wait for the position that the Blue Dot
is pressed to move, if ``None`` (the default), it will wait indefinetly.
"""
return self._is_moved_event.wait(timeout)
def wait_for_swipe(self, timeout = None):
"""
Waits until the Blue Dot is swiped.
Returns ``True`` if the Blue Dot was swiped.
:param float timeout:
Number of seconds to wait for the Blue Dot to be swiped, if ``None``
(the default), it will wait indefinetly.
"""
return self._is_swiped_event.wait(timeout)
def allow_pairing(self, timeout = 60):
"""
Allow a Bluetooth device to pair with your Raspberry Pi by Putting the adapter
into discoverable and pairable mode.
:param int timeout:
The time in seconds the adapter will remain pairable. If set to ``None``
the device will be discoverable and pairable indefinetly.
"""
self.server.adapter.allow_pairing(timeout = timeout)
def _client_connected(self):
self._is_connected_event.set()
self._print_message("Client connected {}".format(self.server.client_address))
self._send_dot_config()
if self.when_client_connects:
self._process_callback(self.when_client_connects, None, self._when_client_connects_background)
# wait for the protocol version to be checked.
if not self._check_protocol_event.wait(CHECK_PROTOCOL_TIMEOUT):
self._print_message("Protocol version not received from client - update the client to the latest version.")
self._server.disconnect_client()
def _client_disconnected(self):
self._is_connected_event.clear()
self._check_protocol_event.clear()
self._print_message("Client disconnected")
if self.when_client_disconnects:
self._process_callback(self.when_client_disconnects, None, self._when_client_disconnects_background)
def _data_received(self, data):
#add the data received to the buffer
self._data_buffer += data
#get any full commands ended by \n
last_command = self._data_buffer.rfind("\n")
if last_command != -1:
commands = self._data_buffer[:last_command].split("\n")
#remove the processed commands from the buffer
self._data_buffer = self._data_buffer[last_command + 1:]
self._process_commands(commands)
def _process_commands(self, commands):
for command in commands:
try:
position = None
operation, param1, param2 = command.split(",")
if operation != "3":
position = BlueDotPosition(param1, param2)
self._position = position
except ValueError:
# ignore the occasional corrupt command; XXX warn here?
pass
else:
#dot released
if operation == "0":
self._released(position)
#dot pressed
elif operation == "1":
self._pressed(position)
#dot pressed position moved
elif operation == "2":
self._moved(position)
#protocol check
elif operation == "3":
self._check_protocol_version(param1, param2)
def _pressed(self, position):
self._is_pressed = True
self._is_pressed_event.set()
self._is_pressed_event.clear()
self._double_pressed(position)
#create new interaction
self._interaction = BlueDotInteraction(position)
self._process_callback(self.when_pressed, position, self._when_pressed_background)
def _double_pressed(self, position):
#was there a previous interaction
if self._interaction:
# was the previous interaction complete (i.e. had it been released)
if not self._interaction.active:
# was it less than the time threshold (0.3 seconds)
if self._interaction.duration < self._double_press_time:
#was the dot pressed again in less than the threshold
if time() - self._interaction.released_position.time < self._double_press_time:
self._is_double_pressed_event.set()
self._is_double_pressed_event.clear()
self._process_callback(self.when_double_pressed, position, self._when_double_pressed_background)
def _released(self, position):
self._is_pressed = False
self._is_released_event.set()
self._is_released_event.clear()
self._interaction.released(position)
self._process_callback(self.when_released, position, self._when_released_background)
self._process_swipe()
def _moved(self, position):
self._is_moved_event.set()
self._is_moved_event.clear()
self._interaction.moved(position)
self._process_callback(self.when_moved, position, self._when_moved_background)
if self.when_rotated:
self._process_rotation()
def _process_callback(self, callback, arg, background):
if callback:
args_expected = getfullargspec(callback).args
no_args_expected = len(args_expected)
if len(args_expected) > 0:
# if someone names the first arg of a class function to something
# other than self, this will fail! or if they name the first argument
# of a non class function to self this will fail!
if args_expected[0] == "self":
no_args_expected -= 1
if no_args_expected == 0:
call_back_t = WrapThread(target=callback)
else:
call_back_t = WrapThread(target=callback, args=(arg, ))
call_back_t.start()
# if this callback is not running in the background wait for it
if not background:
call_back_t.join()
def _process_swipe(self):
#was the Blue Dot swiped?
swipe = BlueDotSwipe(self._interaction)
if swipe.valid:
self._is_swiped_event.set()
self._is_swiped_event.clear()
if self.when_swiped:
self._process_callback(self.when_swiped, swipe, self._when_swiped_background)
def _process_rotation(self):
rotation = BlueDotRotation(self._interaction, self._rotation_segments)
if rotation.valid:
self._process_callback(self.when_rotated, rotation, self._when_rotated_background)
def _check_protocol_version(self, protocol_version, client_name):
try:
version_no = int(protocol_version)
except ValueError:
raise ValueError("protocol version number must be numeric, received {}.".format(protocol_version))
self._check_protocol_event.set()
if version_no != PROTOCOL_VERSION:
msg = "Client '{}' was using protocol version {}, bluedot python library is using version {}. "
if version_no > PROTOCOL_VERSION:
msg += "Update the bluedot python library, using 'sudo pip3 --upgrade install bluedot'."
msg = msg.format(client_name, protocol_version, PROTOCOL_VERSION)
else:
msg += "Update the {}."
msg = msg.format(client_name, protocol_version, PROTOCOL_VERSION, client_name)
self._server.disconnect_client()
print(msg)
# called whenever the dot is changed or a client connects
def _send_dot_config(self):
if self.is_connected:
self._server.send("4,{},{},{},{}\n".format(
self._color.str_rgba,
int(self._square),
int(self._border),
int(self._visible)))
def _print_message(self, message):
if self.print_messages:
print(message)
| StarcoderdataPython |
3213833 | <reponame>owen198/kslab-atrisk-prediction
# coding: utf-8
import matplotlib.pyplot as plt
def generate_boxplot(data, title, datasets_small_name):
fig = plt.figure(figsize=(8, 6))
bplot = plt.boxplot(data,
notch=False, # box instead of notch shape
# sym='rs', # red squares for outliers
vert=True) # vertical box aligmnent
plt.xticks([y + 1 for y in range(len(data))], datasets_small_name)
plt.xlabel('Dataset')
for components in bplot.keys():
for line in bplot[components]:
line.set_color('black') # black lines
plt.title(title)
plt.savefig('result/' + title + '.png')
| StarcoderdataPython |
5122067 | <reponame>MickaelRigault/pysedm
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from glob import glob
#################################
#
# MAIN
#
#################################
if __name__ == "__main__":
import argparse
from pysedm.script.ccd_to_cube import *
# ================= #
# Options #
# ================= #
parser = argparse.ArgumentParser(
description="""pysedm pipeline to build the cubebuilder objects
""", formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('infile', type=str, default=None,
help='The date YYYYMMDD')
parser.add_argument('--rebuild', action="store_true", default=False,
help='If the object you want to build already exists, nothing happens except if this is set')
parser.add_argument('--noguider', action="store_true", default=False,
help='Avoid having a guider stack created. [part of the --build]')
parser.add_argument('--solvewcs', action="store_true", default=False,
help='Shall the wcs solution of the guider be solved (ignored if --noguider). [part of the --build]')
parser.add_argument('--ncore', type=int, default=None,
help='Number of cores to use for multiprocessing. ncore=1 means no multiprocessing.')
# --------------- #
# Cube Building #
# --------------- #
parser.add_argument('--build', type=str, default=None,
help='Build a e3d cube of the given target (accepting regex) or target list (csv) e.g. --build dome or --build dome,Hg,Cd')
parser.add_argument('--quickbuild', action="store_true", default=False,
help='Faster cube extraction: No background No j-flexure, no i-flexure. Keyword "quickbuild" added to the cube.')
parser.add_argument('--nobackground', action="store_true", default=False,
help='Shall the ccd background be 0 instead of the usual pipeline background?')
parser.add_argument('--noflexure', action="store_true", default=False,
help='build cubes without flexure correction')
parser.add_argument('--notraceflexure', action="store_true", default=False,
help='build cubes without trace flexure (j-ccd correction)')
parser.add_argument('--buildbkgd', type=str, default=None,
help='Build a ccd background of the given target or target list (csv) e.g. --build dome or --build dome,Hg,Cd')
parser.add_argument('--buildcal', type=str, default=None,
help='Build the flux calibrated e3d cubes. Set this to "*" to use the --build arguments.')
parser.add_argument('--calsource', type=str, default=None,
help='The Inverse sensitivity spectrum used to calibrated the cubes. By default this uses the latest one.')
# - Trace Matching
parser.add_argument('--tracematch', action="store_true", default=False,
help='build the tracematch solution for the given night. This option saves masks (see tracematchnomasks)')
parser.add_argument('--tracematchnomasks', action="store_true", default=False,
help='build te tracematch solution for the given night without saved the masks')
# - Hexagonal Grid
parser.add_argument('--hexagrid', action="store_true", default=False,
help='build the hexagonal grid (index<->qr<->xy) for the given night')
# - Wavelength Solution
parser.add_argument('--wavesol', action="store_true", default=False,
help='build the wavelength solution for the given night.')
parser.add_argument('--spaxelrange', type=str, default="None",
help='Provide a range of spaxel indexe A,B ; only traces with index i>=A and i<B will be loaded. Indicated in saved filename.')
parser.add_argument('--wavesoltest', type=str, default="None",
help='to be used with --wavesol. By setting --wavesoltest N one N random wavelength solution will be performed.')
parser.add_argument('--wavesolplots', action="store_true", default=False,
help='Set this to save individual wavelength solution fit results')
# ----------------- #
# Raw Calibration #
# ----------------- #
parser.add_argument('--flat', action="store_true", default=False,
help='Build the flat fielding for the night [see flatref to the reference object]')
parser.add_argument('--flatref', type=str, default="dome",
help='Build the flat fielding for the night ')
parser.add_argument('--flatlbda', type=str, default="5000,8500",
help='The wavelength range for the flat field. Format: min,max [in Angstrom] ')
# ----------------- #
# Short Cuts #
# ----------------- #
parser.add_argument('--allcalibs', action="store_true", default=False,
help='')
parser.add_argument('--allscience', action="store_true", default=False,
help='')
parser.add_argument('--nofig', action="store_true", default=False,
help='')
args = parser.parse_args()
# Matplotlib
# ================= #
# The Scripts #
# ================= #
# --------- #
# Date #
# --------- #
date = args.infile
# ------------ #
# Short Cuts #
# ------------ #
if args.allcalibs:
args.tracematch = True
args.hexagrid = True
args.wavesol = True
args.build = "dome"
args.flat = True
# ================= #
# Actions #
# ================= #
# - Builds
if args.quickbuild:
args.noflexure = True
args.notraceflexure = True
args.nobackground = True
fileindex = "quickbuild"
else:
fileindex = ""
if args.build is not None and len(args.build) >0:
for target in args.build.split(","):
build_night_cubes(date, target=target,
lamps=True, only_lamps=False, skip_calib=True,
fileindex=fileindex,
nobackground=bool(args.nobackground),
# - options
build_guider = False if args.noguider else True,
solve_wcs = args.solvewcs,
savefig = False if args.nofig else True,
flexure_corrected = False if args.noflexure else True,
traceflexure_corrected = False if args.notraceflexure else True,
ncore=args.ncore)
if args.buildcal is not None:
if args.buildcal=="*": args.buildcal=args.build
if len(args.buildcal) >0:
for target in args.buildcal.split(","):
calibrate_night_cubes(date, target=target,
lamps=True, only_lamps=False, skip_calib=True)
# - Background
if args.buildbkgd is not None and len(args.buildbkgd) > 0:
for target in args.buildbkgd.split(","):
build_backgrounds(date, target=target,
show_progress=True,
multiprocess=False, # Force no multiprocessing here
lamps=True, only_lamps=True, skip_calib=True,
ncore=args.ncore)
# -----------
#
# -----------
# - TraceMatch
if args.tracematch or args.tracematchnomasks:
build_tracematcher(date, save_masks= True if not args.tracematchnomasks else False,
rebuild=args.rebuild, ncore=args.ncore)
# - Hexagonal Grid
if args.hexagrid:
build_hexagonalgrid(date)
# - Wavelength Solution
if args.wavesol:
ntest = None if "None" in args.wavesoltest else int(args.wavesoltest)
spaxelrange = None if "None" in args.spaxelrange else np.asarray(args.spaxelrange.split(","), dtype="int")
build_wavesolution(date, ntest=ntest, use_fine_tuned_traces=False,
idxrange=spaxelrange,
lamps=["Hg","Cd","Xe"], saveindividuals=args.wavesolplots,
savefig = False if args.nofig else True,
rebuild=args.rebuild)
# - Flat Fielding
if args.flat:
lbda_min,lbda_max = np.asarray(args.flatlbda.split(","), dtype="float")
build_flatfield(date,
lbda_min=lbda_min,
lbda_max=lbda_max,
ref=args.flatref, build_ref=True,
savefig=~args.nofig, ncore=args.ncore)
# Now calc stats
from pysedm import ccd
from pysedm.io import get_datapath
import numpy as np
dome = ccd.get_dome("dome.fits", background=0, load_sep=True)
a, b = dome.sepobjects.get(["a", "b"]).T
savefile = get_datapath(date) + "%s_dome_stats.txt" % date
stat_f = open(savefile, "w")
stat_f.write("NSpax: %d\n" % len(b))
stat_f.write("MinWid: %.3f\n" % min(b))
stat_f.write("MaxWid: %.3f\n" % max(b))
stat_f.write("MedWid: %.3f\n" % np.nanmedian(b))
stat_f.write("AvgWid: %.3f\n" % np.nanmean(b))
stat_f.write("MinLen: %.3f\n" % min(a))
stat_f.write("MaxLen: %.3f\n" % max(a))
stat_f.write("MedLen: %.3f\n" % np.nanmedian(a))
stat_f.write("AvgLen: %.3f\n" % np.nanmean(a))
stat_f.close()
print("nspax, min, avg, med, max Wid: %d, %.3f, %.3f, %.3f, %.3f" %
(len(b), min(b), np.nanmean(b), np.nanmedian(b), max(b)))
| StarcoderdataPython |
3461332 | <filename>micromagneticmodel/energy/zeeman.py
import ubermagutil as uu
import discretisedfield as df
import ubermagutil.typesystem as ts
from .energyterm import EnergyTerm
@uu.inherit_docs
@ts.typesystem(H=ts.Parameter(descriptor=ts.Vector(size=3),
otherwise=df.Field),
wave=ts.Subset(sample_set={'sin', 'sinc'}, unpack=False),
f=ts.Scalar(positive=True),
t0=ts.Scalar(),
# time_dependence=ts.Typed(expected_type=callable),
tstep=ts.Scalar(positive=True),
tcl_strings=ts.Dictionary(
key_descriptor=ts.Subset(
sample_set=('proc', 'energy', 'type', 'script_args',
'script'), unpack=False),
value_descriptor=ts.Typed(expected_type=str))
)
class Zeeman(EnergyTerm):
r"""Zeeman energy term.
.. math::
w = -\mu_{0}M_\text{s} \mathbf{m} \cdot \mathbf{H}
Zeeman energy term allows defining time-dependent as well as
time-independent external magnetic field. If only external magnetic field
``H`` is passed, a time-constant field is defined. On the other hand, in
order to define a time-dependent field, ``wave`` must be passed as a
string. ``wave`` can be either ``'sine'`` or ``'sinc'``. If time-dependent
external magnetic field is defined, apart from ``wave``, ``f`` and ``t0``
must be passed. For ``wave='sine'``, energy density is:
.. math::
w = -\mu_{0}M_\text{s} \mathbf{m} \cdot \mathbf{H} \sin[2\pi
f(t-t_{0})]
whereas for ``wave='sinc'``, the energy density is:
.. math::
w = -\mu_{0}M_\text{s} \mathbf{m} \cdot \mathbf{H}
\text{sinc}[2\pi f(t-t_{0})]
and ``f`` is a cut-off frequency.
Parameters
----------
H : (3,) array_like, dict, discretisedfield.Field
If a single length-3 array_like (tuple, list, ``numpy.ndarray``) is
passed, which consists of ``numbers.Real``, a spatially constant
parameter is defined. For a spatially varying parameter, either a
dictionary, e.g. ``H={'region1': (0, 0, 3e6), 'region2': (0, 0,
-3e6)}`` (if the parameter is defined "per region") or
``discretisedfield.Field`` is passed.
wave : str
For a time dependent field, either ``'sine'`` or ``'sinc'`` is passed.
f : numbers.Real
(Cut-off) frequency in Hz.
t0 : numbers.Real
Time for adjusting the phase (time-shift) of a wave.
Examples
--------
1. Defining the Zeeman energy term using a vector.
>>> import micromagneticmodel as mm
...
>>> zeeman = mm.Zeeman(H=(0, 0, 1e6))
2. Defining the Zeeman energy term using dictionary.
>>> zeeman = mm.Zeeman(H={'region1': (0, 0, 1e6), 'region2': (0, 0, -1e6)})
3. Defining the Zeeman energy term using ``discretisedfield.Field``.
>>> import discretisedfield as df
...
>>> region = df.Region(p1=(0, 0, 0), p2=(5e-9, 5e-9, 10e-9))
>>> mesh = df.Mesh(region=region, n=(5, 5, 10))
>>> H = df.Field(mesh, dim=3, value=(1e6, -1e6, 0))
>>> zeeman = mm.Zeeman(H=H)
4. Defining the Zeeman energy term using a vector which changes as a sine
wave.
>>> zeeman = mm.Zeeman(H=(0, 0, 1e6), wave='sin', f=1e9, t0=0)
5. An attempt to define the Zeeman energy term using a wrong value.
>>> zeeman = mm.Zeeman(H=(0, -1e7)) # length-2 vector
Traceback (most recent call last):
...
ValueError: ...
"""
_allowed_attributes = ['H', 'wave', 'f', 't0', 'time_dependence', 'tstep',
'tcl_strings']
@property
def _reprlatex(self):
if self.wave == 'sin':
return (r'-\mu_{0}M_\text{s} \mathbf{m}'
r'\cdot \mathbf{H} \sin[2 \pi f (t-t_{0})]')
elif self.wave == 'sinc':
return (r'-\mu_{0}M_\text{s} \mathbf{m} \cdot \mathbf{H}\, '
r'\text{sinc}[2 \pi f (t-t_{0})]')
else:
return r'-\mu_{0}M_\text{s} \mathbf{m} \cdot \mathbf{H}'
def effective_field(self, m):
raise NotImplementedError
| StarcoderdataPython |
6662037 | #! /usr/bin/ python
# -*- encoding: utf-8 -*-
from utils import config_loop, start_loop, set_ams, display_message
config_loop(gui=True)
from agent import Agent
from messages import ACLMessage
from aid import AID
class Teste(Agent):
def __init__(self, aid):
Agent.__init__(self, aid)
def on_start(self):
Agent.on_start(self)
display_message(self.aid.name, "Hello World")
if 'test_agent_initiator' in self.aid.name:
message = ACLMessage(ACLMessage.INFORM)
message.add_receiver('test_agent_participant')
message.set_content('Hello Agent!')
self.send(message)
display_message(self.aid.name, 'Sending Message...')
def react(self, message):
Agent.react(self, message)
display_message(self.aid.name, 'One message received')
if 'test_agent_participant' in self.aid.name:
resposta = message.create_reply()
resposta.set_content('Hello to you too, Agent!')
self.send(resposta)
if __name__ == '__main__':
set_ams('localhost', 8000)
test_agent_initiator = Teste(AID('test_agent_initiator'))
test_agent_participant = Teste(AID('test_agent_participant'))
agents = list()
print id(test_agent_initiator)
print id(test_agent_participant)
agents.append(test_agent_participant)
agents.append(test_agent_initiator)
start_loop(agents) | StarcoderdataPython |
243756 | # -*- coding: utf-8 -*-
"""
biothings_explorer.dispatcher
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains code that biothings_explorer use to communicate to and \
receive from APIs. It serves as a glue between "apicall" module and "api_output_parser" module.
"""
from .json_transformer import Transformer
class OutputParser():
def __init__(self, res, mapping, batch_mode=False, api=None, api_type=None):
self.api = api
self.api_type = api_type
self.response = res
self.mapping = mapping
self.batch_mode = batch_mode
def parse_biothings_get_res(self):
"""Parse the API response from biothings API using GET method"""
if self.response['total'] == 0:
return None
new_res = {}
for _res in self.response['hits']:
transformed_json = Transformer(_res, self.mapping).transform()
if isinstance(transformed_json, dict):
for k, v in transformed_json.items():
if k in ["@context", "@type"]:
new_res[k] = v
else:
if k not in new_res:
new_res[k] = []
if isinstance(v, list):
new_res[k] += v
else:
new_res[k].append(v)
else:
continue
return new_res
def parse_biothings_post_res(self):
"""Parse the API response from biothings API using POST method"""
new_res = {}
for _res in self.response:
if not isinstance(_res, dict):
continue
# handle case where the queried item is not found
if _res.get('notfound'):
# check if the item is already in final res
if _res['query'] in new_res:
continue
new_res[_res['query']] = {}
else:
# the semmed and cord API are already structured to conform to biolink model
# so no json transform is needed
if self.api[:4] in ['semm', 'cord']:
transformed_json = _res
else:
transformed_json = Transformer(_res, self.mapping).transform()
if _res['query'] not in new_res:
new_res[_res['query']] = transformed_json
else:
if isinstance(transformed_json, dict):
for k, v in transformed_json.items():
if k in ["@context", "@type"]:
new_res[_res['query']][k] = v
else:
if k not in new_res[_res['query']]:
new_res[_res['query']][k] = []
if isinstance(v, list):
new_res[_res['query']][k] += v
else:
new_res[_res['query']][k].append(v)
return dict(new_res)
def parse(self):
if not self.response:
return None
# parse the results from BioThings APIs
if self.api_type == 'biothings':
if self.batch_mode:
return self.parse_biothings_post_res()
return self.parse_biothings_get_res()
# parse the results from non-BioThings APIs
return Transformer(self.response, self.mapping).transform()
| StarcoderdataPython |
3451444 | """
For a new client:
1) send FULL_DATASET (to erase remaining data)
2) send DIFFERENTIAL for each new live update
"""
from gtfs_realtime_pb2 import FeedMessage
import iso8601
import sys
import time
import zmq
from utils import getVehiclePosition
KV6_ZMQ = "tcp://127.0.0.1:6006"
sequence = 0
context = zmq.Context()
while True:
sys.stderr.write("Connecting to %s...\n" % (KV6_ZMQ))
receiver = context.socket(zmq.SUB)
receiver.connect(KV6_ZMQ)
receiver.setsockopt(zmq.SUBSCRIBE, '')
poll = zmq.Poller()
poll.register(receiver, zmq.POLLIN)
while True:
sockets = dict(poll.poll(60000))
if receiver in sockets and sockets[receiver] == zmq.POLLIN:
kv6 = receiver.recv_json()
feedmessage = FeedMessage()
feedmessage.header.gtfs_realtime_version = "1.0"
feedmessage.header.incrementality = gtfs_realtime_pb2.FeedHeader.DIFFERENTIAL
feedmessage.header.timestamp = int(time.mktime(iso8601.parse_date(kv6['timestamp']).timetuple()))
for posinfo in kv6:
if posinfo['messagetype'] in ['DELAY', 'INIT', 'END']:
continue
feedentity = feedmessage.entity.add()
feedentity.id = str(sequence)
getVehiclePosition(feedentity, posinfo)
sequence += 1;
else:
break
| StarcoderdataPython |
3557998 | #This script is intended to find the top and the mid pedestal of the H mod plasma profile for the pre and post processing of the simulation
#Developed by <NAME> on 02/03/2020
import numpy as np
import matplotlib.pyplot as plt
import scipy.optimize as opt
import re
from max_stat_tool import *
# some_file.py
import sys
#The command of this script will be "python max_parity_calculator.py 0001(# of the scan)"
def parity_finder_long(zgrid,f,name,plot,report): #this function is for local linear simulation with long range of z
f=np.real(f)
#For the case the zgrid is evenly distributed.
zmin=np.min(zgrid)
zmax=np.max(zgrid)
nz=len(zgrid)
#print(zmax)
#print(zmin)
z0=int(nz/(zmax-zmin)) #nz from zmin to zmax
parity_name=['even','odd']
location=[-1,0,1]
parity=np.zeros((len(location)+1,2))
parity_plot_abs0=[]
parity_plot_abs1=[]
for j in range(len(location)):
for i in range(z0):
ztemp=int(nz/2+location[j]*z0) #around -1,0,1
#even parity sum(f(x)-f(-x)) = 0 if even
parity[j][0]=parity[j][0]+abs(f[ztemp+i-1]-f[ztemp-i])
#odd parity sum(f(x)+f(-x)) = 0 if odd
parity[j][1]=parity[j][1]+abs(f[ztemp+i-1]+f[ztemp-i])
#********Doing the same thing for loop***********
z_loop, f_loop=loop(zgrid,f,-1,1)
for i in range(z0):
ztemp=z0
parity_plot_abs0.append(abs(f_loop[ztemp+i-1]-f_loop[ztemp-i]))
parity_plot_abs1.append(abs(f_loop[ztemp+i-1]+f_loop[ztemp-i]))
#even parity sum(f(x)-f(-x)) = 0 if even
ntemp=len(location)
parity[ntemp][0]=parity[ntemp][0]+abs(f_loop[ztemp+i-1]-f_loop[ztemp-i])
#odd parity sum(f(x)+f(-x)) = 0 if odd
parity[ntemp][1]=parity[ntemp][1]+abs(f_loop[ztemp+i-1]+f_loop[ztemp-i])
#***********Find the ratio at different locations****************
ratio=np.zeros((len(location)+1)*2)
for i in range(len(location)+1):
ratio[2*i+0]=parity[i][0]/(parity[i][0]+parity[i][1]) #percentage of oddness
ratio[2*i+1]=parity[i][1]/(parity[i][0]+parity[i][1]) #percentage of evenness
#ratio[2*0+0,2*0+1]=-1 {percentage of oddness, percentage of eveness}
#ratio[2*1+0,2*1+1]= 0 {percentage of oddness, percentage of eveness}
#ratio[2*2+0,2*2+1]= 1 {percentage of oddness, percentage of eveness}
#ratio[2*3+0,2*3+1]= loop {percentage of oddness, percentage of eveness}
location0=location[int(np.argmin(ratio[0:len(location)*2-1])/2)]
parity0=parity_name[int(np.argmin(ratio[0:len(location)*2-1])%2)]
#********calc the ratio for loop**********************
if parity[3][1]<parity[3][0]:
parity1='odd'
else:
parity1='even'
#*********Print out report*****************
if report==1:
for i in range(len(location)+1):
if i < len(location):
print('Around z=',location[i])
else:
print('For the loop')
print(ratio[2*i+0]*100,'% Odd', ratio[2*i+1]*100, "% Even")
print('The location of the center is', location0)
print('The function is largely', parity0)
print('Based on the loop, the function is largely ',parity1)
if parity1==parity0:
print('Result checked')
else:
print('location=', location0 ,'and loop mismatch, please check the Parity_plot.png or function.png to determine the parity manually')
#*********End of Print out report**********
#********Plot the result*******************
if plot ==1:
x_zoom,y_zoom = zoom1D(zgrid,f,-2,2)
plt.clf()
plt.title('Parity of'+ name+ 'calculation')
plt.xlabel(r'$z/\pi$',fontsize=10)
plt.ylabel(r'$f$',fontsize=13)
#plt.plot(np.arange(0,1,1/len(parity_plot0)),parity_plot_abs0,label="Even function abs")
#plt.plot(np.arange(0,1,1/len(parity_plot0)),parity_plot_abs1,label="Odd function abs")
plt.plot(x_zoom,y_zoom,label=name)
plt.plot(z_loop,f_loop,label=name+'_loop')
plt.axhline(y=0, color="red")
plt.axvline(x=location0, label='Center of the symmetry', color="red")
#plt.axvline(x=midped, label="Mid-pedestal", color="red")
#lt.axvline(x=topped, label="Top-pedestal", color="green")
plt.legend()
plt.savefig('Parity_plot_'+ name +'.png')
plt.show()
plt.clf()
plt.title(name)
plt.xlabel(r'$z/\pi$',fontsize=10)
plt.ylabel(r'$f$',fontsize=13)
#plt.plot(z_loop,f_loop,label="loop_f")
plt.plot(zgrid,f,label=name)
#plt.axvline(x=midped, label="Mid-pedestal", color="red")
#lt.axvline(x=topped, label="Top-pedestal", color="green")
plt.legend()
plt.savefig('function_'+name+'.png')
plt.show()
ntemp=int(np.argmin(ratio[0:len(location)*2-1])/2)
ratio0=np.zeros(2)
ratio0[0]=ratio[ntemp*2+0]
ratio0[1]=ratio[ntemp*2+1]
return parity0,location0,ratio0
def parity_finder_short(zgrid,f,name,plot,report): #this function is for local linear simulation with short range of z
f=np.real(f)
#For the case the zgrid is evenly distributed.
zmin=np.min(zgrid)
zmax=np.max(zgrid)
if abs(zmin) < abs(zmax):
idx=find_nearest_index(zgrid,-zmax)
new_z=zgrid[idx:-1]
f=f[idx:-1]
elif abs(zmin) > abs(zmax):
idx=find_nearest_index(zgrid,-zmin)
new_z=zgrid[0:idx]
f=f[0:idx]
else:
new_z=zgrid
f=f
idx=find_nearest_index(new_z,0)
#print(f'index of z=0 is {idx}')
#print(f'index of nz/2 is {int(len(new_z)/2)}')
z0=int(len(new_z)/2)
location0=0
parity=np.zeros(2)
for i in range(z0):
ztemp=int(z0) #around -1,0,1
#even parity sum(f(x)-f(-x)) = 0 if even
parity[0]=parity[0]+abs(f[ztemp+i-1]-f[ztemp-i])
#odd parity sum(f(x)+f(-x)) = 0 if odd
parity[1]=parity[1]+abs(f[ztemp+i-1]+f[ztemp-i])
#Determine function's parity
if parity[1]<parity[0]:
parity1='odd'
else:
parity1='even'
if plot ==1:
plt.clf()
plt.title('Parity of'+name+' calculation')
plt.xlabel(r'$z/\pi$',fontsize=10)
plt.ylabel(name,fontsize=13)
#plt.plot(np.arange(0,1,1/len(parity_plot0)),parity_plot_abs0,label="Even function abs")
#plt.plot(np.arange(0,1,1/len(parity_plot0)),parity_plot_abs1,label="Odd function abs")
plt.plot(new_z,f,label=name)
plt.axhline(y=0, color="red")
plt.axvline(x=location0, label='Center of the symmetry', color="red")
#plt.axvline(x=midped, label="Mid-pedestal", color="red")
#lt.axvline(x=topped, label="Top-pedestal", color="green")
plt.legend()
plt.savefig('Parity_plot_'+name+'.png')
plt.show()
ratio=np.zeros(2)
ratio[0]=parity[0]/(parity[0]+parity[1]) #percentage of oddness
ratio[1]=parity[1]/(parity[0]+parity[1]) #percentage of evenness
return parity1,location0,ratio
def parity_finder_general(zgrid,f,name,plot,report):
zmin=np.min(zgrid)
zmax=np.max(zgrid)
if zmax>=2 and zmin<= -2:
return parity_finder_long(zgrid,f,name,plot,report)
else:
return parity_finder_short(zgrid,f,name,plot,report) | StarcoderdataPython |
9644252 | <reponame>BoyanZhou/starstr
#!/usr/bin/python
# -*- coding: utf-8 -*-
import numpy as np
def calculate_d_stat(distance_data, individual_index):
"""
calculate D_stat for each individual
"""
d_ij = 1.0/(distance_data[:, 3] + 1.0)
# record D_stat for each individual
individual_d_stat = [0.0]*len(individual_index) # D_stat of each individual
for i in range(len(distance_data[:, 0])):
index_temp = distance_data[i, 0]
pos = individual_index.index(index_temp)
individual_d_stat[pos] += d_ij[i]
individual_d_stat = np.array(individual_d_stat)
return individual_d_stat
def grouping(distance_data, individual_index, individual_D_stat):
"""
give each individual a group number according its distance to others
"""
group = np.array([0]*len(individual_index)) # store group number of each individual
group_number_temp = 0
for i in range(len(group)):
if group[i] != 0: # the individual has been grouped
continue
else:
ID_1 = individual_index[i] # the start person
ID_in_chain = [ID_1]
D_stat_1 = individual_D_stat[i] # the start D
ID_surround = distance_data[np.where(distance_data[:, 0] == ID_1), 1][0]
D_stat_surround = individual_D_stat[[individual_index.index(k) for k in ID_surround]]
# get surround ID and D_stat, then find the max
max_pos = D_stat_surround.argmax()
ID_2 = ID_surround[max_pos]
D_stat_2 = D_stat_surround[max_pos]
group_number_temp2 = group[individual_index.index(ID_2)]
# when the D of tested one is smaller than surround ones and not grouped
while D_stat_2 > D_stat_1 and group_number_temp2 == 0:
ID_in_chain.append(ID_2)
ID_1 = ID_2
D_stat_1 = D_stat_2
ID_surround = distance_data[np.where(distance_data[:, 0] == ID_1), 1][0]
D_stat_surround = individual_D_stat[[individual_index.index(k) for k in ID_surround]]
max_pos = D_stat_surround.argmax()
ID_2 = ID_surround[max_pos] # max_D of surround individuals
D_stat_2 = D_stat_surround[max_pos]
group_number_temp2 = group[individual_index.index(ID_2)]
ID_in_chain2 = set(ID_in_chain[:])
for j in ID_in_chain:
surround_j = distance_data[np.where(distance_data[:, 0] == j)]
j_same = surround_j[np.where(surround_j[:, 3] == 0)][:, 1]
ID_in_chain2 = ID_in_chain2 | set(j_same)
ID_in_chain2 = list(ID_in_chain2)
if D_stat_1 >= D_stat_2:
group_number_temp += 1 # it is a new group
group[[individual_index.index(k) for k in ID_in_chain2]] = group_number_temp
else:
group[[individual_index.index(k) for k in ID_in_chain2]] = group_number_temp2
return group
| StarcoderdataPython |
8168044 | <gh_stars>10-100
"""
Support for Sense Hat LEDs.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.sensehat/
"""
import logging
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.light import (
ATTR_BRIGHTNESS, SUPPORT_BRIGHTNESS, ATTR_HS_COLOR, SUPPORT_COLOR,
Light, PLATFORM_SCHEMA)
from homeassistant.const import CONF_NAME
import homeassistant.util.color as color_util
REQUIREMENTS = ['sense-hat==2.2.0']
_LOGGER = logging.getLogger(__name__)
SUPPORT_SENSEHAT = (SUPPORT_BRIGHTNESS | SUPPORT_COLOR)
DEFAULT_NAME = 'sensehat'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Sense Hat Light platform."""
from sense_hat import SenseHat
sensehat = SenseHat()
name = config.get(CONF_NAME)
add_devices([SenseHatLight(sensehat, name)])
class SenseHatLight(Light):
"""Representation of an Sense Hat Light."""
def __init__(self, sensehat, name):
"""Initialize an Sense Hat Light.
Full brightness and white color.
"""
self._sensehat = sensehat
self._name = name
self._is_on = False
self._brightness = 255
self._hs_color = [0, 0]
@property
def name(self):
"""Return the display name of this light."""
return self._name
@property
def brightness(self):
"""Read back the brightness of the light."""
return self._brightness
@property
def hs_color(self):
"""Read back the color of the light."""
return self._hs_color
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_SENSEHAT
@property
def is_on(self):
"""Return true if light is on."""
return self._is_on
@property
def should_poll(self):
"""Return if we should poll this device."""
return False
@property
def assumed_state(self) -> bool:
"""Return True if unable to access real state of the entity."""
return True
def turn_on(self, **kwargs):
"""Instruct the light to turn on and set correct brightness & color."""
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
if ATTR_HS_COLOR in kwargs:
self._hs_color = kwargs[ATTR_HS_COLOR]
rgb = color_util.color_hsv_to_RGB(
self._hs_color[0], self._hs_color[1], self._brightness / 255 * 100)
self._sensehat.clear(*rgb)
self._is_on = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Instruct the light to turn off."""
self._sensehat.clear()
self._is_on = False
self.schedule_update_ha_state()
| StarcoderdataPython |
9639437 | <filename>manim_sandbox/utils/import.py
from manim_sandbox.utils.functions.calculation import *
from manim_sandbox.utils.functions.debugTeX import *
from manim_sandbox.utils.functions.ratefunc import *
from manim_sandbox.utils.functions.MyClass import *
from manim_sandbox.utils.functions.MathTools import *
from manim_sandbox.utils.mobjects.Arc_group import *
from manim_sandbox.utils.mobjects.MyText import *
from manim_sandbox.utils.mobjects.Trail import *
from manim_sandbox.utils.mobjects.MyBoxes import *
from manim_sandbox.utils.mobjects.Right_angle import *
from manim_sandbox.utils.mobjects.angle import *
from manim_sandbox.utils.mobjects.PeriodicTable import *
# from manim_sandbox.utils.mobjects.Shadow_around import *
from manim_sandbox.utils.mobjects.ThreeBody import *
from manim_sandbox.utils.mobjects.Rubik_Cube import *
from manim_sandbox.utils.mobjects.Gear import *
from manim_sandbox.utils.mobjects.MyTriangle import *
from manim_sandbox.utils.mobjects.VideoProgressBar import *
# from manim_sandbox.utils.mobjects.video_mobject import *
from manim_sandbox.utils.mobjects.ColorText import *
from manim_sandbox.utils.mobjects.ThreeDVector import *
from manim_sandbox.utils.mobjects.BezierGenerator import BezierGenerator
from manim_sandbox.utils.mobjects.BezierGenerator import BezierFunc
from manim_sandbox.utils.scenes.bilibili import *
from manim_sandbox.utils.scenes.parabola import *
# from manim_sandbox.utils.scenes.NewGraphScene import *
from manim_sandbox.utils.scenes.Rubik_Cube_Scene import *
from manim_sandbox.utils.animations.paperclip import *
from manim_sandbox.utils.animations.RandomScene import *
| StarcoderdataPython |
8028209 | # Generated by Django 3.1.7 on 2021-05-03 02:15
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='TargetDomain',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('organization', models.CharField(max_length=255, verbose_name='Organization')),
('domains', models.TextField(verbose_name="Target's Domains")),
('overview', models.CharField(default='N/A', max_length=255, verbose_name='Overview')),
('actions', models.CharField(choices=[('LIST', 'List Up'), ('SCAN', 'Vulnerability assessment')], max_length=4, verbose_name='Action mode')),
],
),
]
| StarcoderdataPython |
6562787 | #!/usr/bin/env python
from __future__ import print_function, unicode_literals
import argparse
import ast
from datetime import datetime
import glob
import io
import json
import os
from pkg_resources import Requirement
import re
import sys
import textwrap
# Todo: This should use a common omit logic once ci scripts are refactored into ci_tools
skip_pkgs = [
'azure-mgmt-documentdb', # deprecated
'azure-sdk-for-python', # top-level package
'azure-sdk-tools', # internal tooling for automation
'azure-servicemanagement-legacy', # legacy (not officially deprecated)
'azure-common',
'azure',
'azure-keyvault'
]
def report_should_skip_lib(lib_name):
return lib_name in skip_pkgs or lib_name.endswith('-nspkg')
def dump_should_skip_lib(lib_name):
return report_should_skip_lib(lib_name) or '-mgmt' in lib_name or not lib_name.startswith('azure')
def locate_libs(base_dir):
packages = [os.path.dirname(p) for p in (glob.glob(os.path.join(base_dir, 'azure*', 'setup.py')) + glob.glob(os.path.join(base_dir, 'sdk/*/azure*', 'setup.py')))]
return sorted(packages)
def locate_wheels(base_dir):
wheels = glob.glob(os.path.join(base_dir, '*.whl'))
return sorted(wheels)
def parse_req(req):
try:
req_object = Requirement.parse(req.split(";")[0])
req_name = req_object.key
spec = str(req_object).replace(req_name, '')
return (req_name, spec)
except:
print('Failed to parse requirement %s' % (req))
def record_dep(dependencies, req_name, spec, lib_name):
if not req_name in dependencies:
dependencies[req_name] = {}
if not spec in dependencies[req_name]:
dependencies[req_name][spec] = []
dependencies[req_name][spec].append(lib_name)
def get_lib_deps(base_dir):
packages = {}
dependencies = {}
for lib_dir in locate_libs(base_dir):
try:
setup_path = os.path.join(lib_dir, 'setup.py')
lib_name, version, requires = parse_setup(setup_path)
packages[lib_name] = {
'version': version,
'source': lib_dir,
'deps': []
}
for req in requires:
req_name, spec = parse_req(req)
packages[lib_name]['deps'].append({
'name': req_name,
'version': spec
})
if not report_should_skip_lib(lib_name):
record_dep(dependencies, req_name, spec, lib_name)
except:
print('Failed to parse %s' % (setup_path))
return packages, dependencies
def get_wheel_deps(wheel_dir):
from wheel.pkginfo import read_pkg_info_bytes
from wheel.wheelfile import WheelFile
packages = {}
dependencies = {}
for whl_path in locate_wheels(wheel_dir):
try:
with WheelFile(whl_path) as whl:
pkg_info = read_pkg_info_bytes(whl.read(whl.dist_info_path + '/METADATA'))
lib_name = pkg_info.get('Name')
packages[lib_name] = {
'version': pkg_info.get('Version'),
'source': whl_path,
'deps': []
}
requires = pkg_info.get_all('Requires-Dist')
for req in requires:
req = req.split(';')[0] # Extras conditions appear after a semicolon
req = re.sub(r'[\s\(\)]', '', req) # Version specifiers appear in parentheses
req_name, spec = parse_req(req)
packages[lib_name]['deps'].append({
'name': req_name,
'version': spec
})
if not report_should_skip_lib(lib_name):
record_dep(dependencies, req_name, spec, lib_name)
except:
print('Failed to parse METADATA from %s' % (whl_path))
return packages, dependencies
def parse_setup(setup_filename):
mock_setup = textwrap.dedent('''\
def setup(*args, **kwargs):
__setup_calls__.append((args, kwargs))
''')
parsed_mock_setup = ast.parse(mock_setup, filename=setup_filename)
with io.open(setup_filename, 'r', encoding='utf-8-sig') as setup_file:
parsed = ast.parse(setup_file.read())
for index, node in enumerate(parsed.body[:]):
if (
not isinstance(node, ast.Expr) or
not isinstance(node.value, ast.Call) or
not hasattr(node.value.func, 'id') or
node.value.func.id != 'setup'
):
continue
parsed.body[index:index] = parsed_mock_setup.body
break
fixed = ast.fix_missing_locations(parsed)
codeobj = compile(fixed, setup_filename, 'exec')
local_vars = {}
global_vars = {'__setup_calls__': []}
current_dir = os.getcwd()
working_dir = os.path.dirname(setup_filename)
os.chdir(working_dir)
exec(codeobj, global_vars, local_vars)
os.chdir(current_dir)
_, kwargs = global_vars['__setup_calls__'][0]
version = kwargs['version']
name = kwargs['name']
requires = []
if 'install_requires' in kwargs:
requires += kwargs['install_requires']
if 'extras_require' in kwargs:
for extra in kwargs['extras_require'].values():
requires += extra
return name, version, requires
def dict_compare(d1, d2):
d1_keys = set(d1.keys())
d2_keys = set(d2.keys())
intersect_keys = d1_keys.intersection(d2_keys)
added = d1_keys - d2_keys
removed = d2_keys - d1_keys
modified = {o : (d1[o], d2[o]) for o in intersect_keys if d1[o] != d2[o]}
return added, removed, modified
def render_report(output_path, report_context):
env = Environment(
loader=FileSystemLoader(os.path.dirname(os.path.realpath(__file__)))
)
template = env.get_template('deps.html.j2')
with io.open(output_path, 'w', encoding='utf-8') as output:
output.write(template.render(report_context))
def get_dependent_packages(data_pkgs):
# Get unique set of Azure SDK packages that are added as required package
deps = []
for v in data_pkgs.values():
deps.extend([dep['name'] for dep in v['deps'] if not dump_should_skip_lib(dep['name'])])
return set(deps)
def dump_packages(data_pkgs):
dump_data = {}
unique_dependent_packages = get_dependent_packages(data_pkgs)
for p_name, p_data in data_pkgs.items():
p_id = p_name + ':' + p_data['version']
dep = [p for p in p_data['deps'] if not dump_should_skip_lib(p['name'])]
# Add package if it requires other azure sdk package or if it is added as required by other sdk package
if len(dep) > 0 or p_name in unique_dependent_packages:
dump_data[p_id] = {
'name': p_name,
'version': p_data['version'],
'type': 'internal',
'deps': dep
}
return dump_data
def resolve_lib_deps(dump_data, data_pkgs, pkg_id):
for dep in dump_data[pkg_id]['deps']:
dep_req = Requirement.parse(dep['name'] + dep['version'])
if dep['name'] in data_pkgs and data_pkgs[dep['name']]['version'] in dep_req:
# If the internal package version matches the dependency spec,
# rewrite the dep version to match the internal package version
dep['version'] = data_pkgs[dep['name']]['version']
else:
dep_id = dep['name'] + ':' + dep['version']
if not dep_id in dump_data:
dump_data[dep_id] = {
'name': dep['name'],
'version': dep['version'],
'type': 'internalbinary' if dep['name'] in data_pkgs else 'external',
'deps': []
}
if __name__ == '__main__':
base_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
parser = argparse.ArgumentParser(description='''\
Analyze dependencies in Python packages. First, all declared dependencies
and the libraries that declare them will be discovered (visible with
--verbose). Next, all declared dependency version specs will be analyzed to
ensure they are consistent across all libraries. Finally, all declared
dependency version specs will be compared to the frozen version specs in
shared_requirements.txt, or if --freeze is provided, all declared dependency
version specs will be frozen to shared_requirements.txt.
''')
parser.add_argument('--verbose', help='verbose output', action='store_true')
parser.add_argument('--freeze', help='freeze dependencies after analyzing (otherwise, validate dependencies against frozen list)', action='store_true')
parser.add_argument('--out', metavar='FILE', help='write HTML-formatted report to FILE')
parser.add_argument('--dump', metavar='FILE', help='write JSONP-formatted dependency data to FILE')
parser.add_argument('--wheeldir', metavar='DIR', help='analyze wheels in DIR rather than source packages in this repository')
args = parser.parse_args()
if args.out:
try:
from jinja2 import Environment, FileSystemLoader
except:
print("Jinja2 is required to render the dependency report. Please install with 'pip install Jinja2' to use this option.")
sys.exit(1)
if args.wheeldir:
all_packages, dependencies = get_wheel_deps(args.wheeldir)
else:
all_packages, dependencies = get_lib_deps(base_dir)
packages = {k: v for k,v in all_packages.items() if not report_should_skip_lib(k)}
if args.verbose:
print('Packages analyzed')
print('=================')
for package in sorted(packages.keys()):
info = packages[package]
print("%s %s" % (package, info['version']))
print(" from %s" % (info['source']))
print('\n\nRequirements discovered')
print('=======================')
for requirement in sorted(dependencies.keys()):
specs = dependencies[requirement]
libs = []
print('%s' % (requirement))
for spec in specs.keys():
print('%s' % (spec if spec else '(empty)'))
for lib in specs[spec]:
print(' * %s' % (lib))
print('')
inconsistent = []
for requirement in sorted(dependencies.keys()):
specs = dependencies[requirement]
num_specs = len(specs)
if num_specs == 1:
continue
if not inconsistent and args.verbose:
print('\nInconsistencies detected')
print('========================')
inconsistent.append(requirement)
if args.verbose:
print("Requirement '%s' has %s unique specifiers:" % (requirement, num_specs))
for spec in sorted(specs.keys()):
libs = specs[spec]
friendly_spec = '(none)' if spec == '' else spec
print(" '%s'" % (friendly_spec))
print(' ' + ('-' * (len(friendly_spec) + 2)))
for lib in sorted(libs):
print(' * %s' % (lib))
print('')
frozen_filename = os.path.join(base_dir, 'shared_requirements.txt')
if args.freeze:
if inconsistent:
print('Unable to freeze requirements due to incompatible dependency versions')
sys.exit(1)
else:
with io.open(frozen_filename, 'w', encoding='utf-8') as frozen_file:
for requirement in sorted(dependencies.keys()):
spec = list(dependencies[requirement].keys())[0]
if spec == '':
print("Requirement '%s' being frozen with no version spec" % requirement)
frozen_file.write(requirement + spec + '\n')
print('Current requirements frozen to %s' % (frozen_filename))
sys.exit(0)
frozen = {}
overrides = {}
override_count = 0
try:
with io.open(frozen_filename, 'r', encoding='utf-8-sig') as frozen_file:
for line in frozen_file:
if line.startswith('#override'):
_, lib_name, req_override = line.split(' ', 2)
req_override_name, override_spec = parse_req(req_override)
record_dep(overrides, req_override_name, override_spec, lib_name)
override_count += 1
elif not line.startswith('#'):
req_name, spec = parse_req(line)
frozen[req_name] = [spec]
except:
print('Unable to open shared_requirements.txt, shared requirements have not been validated')
missing_reqs, new_reqs, changed_reqs = {}, {}, {}
non_overridden_reqs_count = 0
exitcode = 0
if frozen:
flat_deps = {req: sorted(dependencies[req].keys()) for req in dependencies}
missing_reqs, new_reqs, changed_reqs = dict_compare(frozen, flat_deps)
if args.verbose and len(overrides) > 0:
print('\nThe following requirement overrides are in place:')
for overridden_req in overrides:
for spec in overrides[overridden_req]:
libs = ', '.join(sorted(overrides[overridden_req][spec]))
print(' * %s is allowed for %s' % (overridden_req + spec, libs))
if args.verbose and len(missing_reqs) > 0:
print('\nThe following requirements are frozen but do not exist in any current library:')
for missing_req in missing_reqs:
[spec] = frozen[missing_req]
print(' * %s' % (missing_req + spec))
if len(new_reqs) > 0:
exitcode = 1
if args.verbose:
for new_req in new_reqs:
for spec in dependencies[new_req]:
libs = dependencies[new_req][spec]
print("\nRequirement '%s' is declared in the following libraries but has not been frozen:" % (new_req + spec))
for lib in libs:
print(" * %s" % (lib))
if len(changed_reqs) > 0:
for changed_req in changed_reqs:
frozen_specs, current_specs = changed_reqs[changed_req]
unmatched_specs = set(current_specs) - set(frozen_specs)
override_specs = overrides.get(changed_req, [])
for spec in unmatched_specs:
if spec in override_specs:
non_overridden_libs = set(dependencies[changed_req][spec]) - set(override_specs[spec])
else:
non_overridden_libs = dependencies[changed_req][spec]
if len(non_overridden_libs) > 0:
exitcode = 1
non_overridden_reqs_count += 1
if args.verbose:
print("\nThe following libraries declare requirement '%s' which does not match the frozen requirement '%s':" % (changed_req + spec, changed_req + frozen_specs[0]))
for lib in non_overridden_libs:
print(" * %s" % (lib))
if exitcode == 0:
if args.verbose:
print('')
print('All library dependencies validated against frozen requirements')
elif not args.verbose:
print('Library dependencies do not match frozen requirements, run this script with --verbose for details')
elif inconsistent:
exitcode = 1
if exitcode == 1:
if not args.verbose:
print('\nIncompatible dependency versions detected in libraries, run this script with --verbose for details')
else:
print('\nAll library dependencies verified, no incompatible versions detected')
if args.out:
external = [k for k in dependencies if k not in packages and not report_should_skip_lib(k)]
def display_order(k):
if k in inconsistent:
return 'a' + k if k in external else 'b' + k
else:
return 'c' + k if k in external else 'd' + k
render_report(args.out, {
'changed_reqs': changed_reqs,
'curtime': datetime.utcnow(),
'dependencies': dependencies,
'env': os.environ,
'external': external,
'frozen': frozen,
'inconsistent': inconsistent,
'missing_reqs': missing_reqs,
'new_reqs': new_reqs,
'non_overridden_reqs_count': non_overridden_reqs_count,
'ordered_deps': sorted(dependencies.keys(), key=display_order),
'override_count': override_count,
'overrides': overrides,
'packages': packages,
'repo_name': 'azure-sdk-for-python'
})
if args.dump:
data_pkgs = {k: v for k, v in all_packages.items() if not dump_should_skip_lib(k)}
dump_data = dump_packages(data_pkgs)
pkg_ids = [k for k in dump_data.keys()]
for pkg_id in pkg_ids:
resolve_lib_deps(dump_data, data_pkgs, pkg_id)
with io.open(f"{args.dump}/data.js", 'w', encoding='utf-8') as dump_file:
dump_file.write('const data = ' + json.dumps(dump_data) + ';')
with io.open(f"{args.dump}/arcdata.json", 'w', encoding='utf-8') as dump_file:
dump_file.write(json.dumps(dump_data))
sys.exit(exitcode)
| StarcoderdataPython |
287999 | # pluto.py
# My brother Steven and father Jim collaborated buiding this program
# I modified the output - to print the vector and return the string "Done"
# === import random.py and statistics.py
import random
import statistics
# === initialize temperatures in a vector that represents n layers of pluto's surface
# === the top is temp_low, the bottom is temp_high, and smooth gradient between
# === note: n > 2 (n must be greater than 2, since first layer is temp_low constant, and last layer is temp_high constant)
# === test case
# >>> init_temp(10,40,45)
# [40, 40.553479088950986, 41.10695817790197, 41.66140886865191, 42.21585955940185, 42.77179883047424, 43.32773810154663, 43.88498640818992, 44.442234714833205, 45]
def init_temp(n,temp_low,temp_high):
"init_temp = return a vector of size n, with smooth temperature gradient from temp_low to temp_high"
# initialize vector x of size n to temp_low
x = []
for i in range(0,n):
x = x + [temp_low]
x[n-1] = temp_high # set last (deepest) layer to temp_high
# intialize another vector of size n to hold intermediate calculations
y = []
for j in range(0,n):
y = y + [0]
# 100 diffusion steps, by averaging adjacent layers
for i in range(0,100):
for j in range(1,n-1):
y[j] = (x[j-1] + x[j+1])/2.0
for j in range(1,n-1):
x[j]=y[j]
print(x) # Adam modification
return("Done") # Adam modification
# === initialize probability of absorption in a vector that represents n layers of pluto's surface
# === transmission coefficient is a probability between 0 and 100 percent
# === test case
# >>> init_absorb(10,80)
# [0, 120, 99, 75, 76, 62, 48, 34, 28, 0]
def init_absorb(n,transmission_coeff):
"init_absorb = return a vector of size n, with probability of absorption of incident radiation, given a transmission co-efficient"
# default reflection and absorbtion coefficients
reflect_coeff = absorb_coeff = (100.0 - transmission_coeff)/2.0
# initialize vector x of size n to zero
x = []
for i in range(0,n):
x = x + [0]
# 1000 incident radiation experiments - increment the layer that absorbs it
for i in range(0,1000):
absorbed = False
direction = 1 # 1 = down, -1 = up
layer = 1
while (not absorbed):
# roll dice to see if transmitted
p = random.uniform(0,100)
if p <= transmission_coeff:
layer = layer + direction
elif p <= transmission_coeff + reflect_coeff:
direction = -1*direction
layer = layer + direction
else:
x[layer] = x[layer] + 1
absorbed = True
# going to high or too low, same as being absorbed - but with no temp change impact
if (layer == 0) or (layer == n-1):
absorbed = True
return(x)
| StarcoderdataPython |
1727215 | import scipy.stats as spst
import scipy.special as spsp
import numpy as np
from . import opt_abc as opt
from . import opt_smile_abc as smile
class Cev(opt.OptAnalyticABC, smile.OptSmileABC, smile.MassZeroABC):
"""
Constant Elasticity of Variance (CEV) model.
Underlying price is assumed to follow CEV process:
dS_t = (r - q) S_t dt + sigma S_t^beta dW_t, where dW_t is a standard Brownian motion.
Examples:
>>> import numpy as np
>>> import pyfeng as pf
>>> m = pf.Cev(sigma=0.2, beta=0.5, intr=0.05, divr=0.1)
>>> m.price(np.arange(80, 121, 10), 100, 1.2)
array([16.11757214, 10.00786871, 5.64880408, 2.89028476, 1.34128656])
"""
sigma = None
beta = 0.5
is_bsm_sigma = False
def __init__(self, sigma, beta=0.5, intr=0.0, divr=0.0, is_fwd=False):
"""
Args:
sigma: model volatility
beta: elasticity parameter. 0.5 by default
intr: interest rate (domestic interest rate)
divr: dividend/convenience yield (foreign interest rate)
is_fwd: if True, treat `spot` as forward price. False by default.
"""
super().__init__(sigma, intr=intr, divr=divr, is_fwd=is_fwd)
self.beta = beta
def params_kw(self):
params = super().params_kw()
extra = {"beta": self.beta}
return {**params, **extra} # Py 3.9, params | extra
def mass_zero(self, spot, texp, log=False):
fwd = self.forward(spot, texp)
betac = 1.0 - self.beta
a = 0.5 / betac
sigma_std = np.maximum(
self.sigma / np.power(fwd, betac) * np.sqrt(texp), np.finfo(float).eps
)
x = 0.5 / np.square(betac * sigma_std)
if log:
log_mass = (a - 1) * np.log(x) - x - np.log(spsp.gamma(a))
log_mass += np.log(
1
+ (a - 1)
/ x
* (1 + (a - 2) / x * (1 + (a - 3) / x * (1 + (a - 4) / x)))
)
with np.errstate(divide="ignore"):
log_mass = np.where(x > 100, log_mass, np.log(spst.gamma.sf(x=x, a=a)))
return log_mass
else:
return spst.gamma.sf(x=x, a=a)
def mass_zero_t0(self, spot, texp):
"""
Limit value of -T log(M_T) as T -> 0, where M_T is the mass at zero.
Args:
spot: spot (or forward) price
Returns:
- lim_{T->0} T log(M_T)
"""
fwd = self.forward(spot, texp)
betac = 1.0 - self.beta
alpha = self.sigma / np.power(fwd, betac)
t0 = 0.5 / (betac * alpha) ** 2
return t0
@staticmethod
def price_formula(
strike, spot, texp, sigma=None, cp=1, beta=0.5, intr=0.0, divr=0.0, is_fwd=False
):
"""
Args:
strike:
spot:
texp:
cp:
sigma:
beta:
intr:
divr:
is_fwd:
Returns:
"""
disc_fac = np.exp(-texp * intr)
fwd = spot * (1.0 if is_fwd else np.exp(-texp * divr) / disc_fac)
betac = 1.0 - beta
betac_inv = 1.0 / betac
alpha = sigma / np.power(fwd, betac)
sigma_std = np.maximum(alpha * np.sqrt(texp), np.finfo(float).eps)
kk = strike / fwd
x = 1.0 / np.square(betac * sigma_std)
y = np.power(kk, 2 * betac) * x
# Need to clean up the case beta > 0
if beta > 1.0:
raise ValueError("Cannot handle beta value higher than 1.0")
ncx2_sf = spst.ncx2.sf
ncx2_cdf = spst.ncx2.cdf
# Computing call and put is a bit of computtion waste, but do this for vectorization.
price = np.where(
cp > 0,
fwd * ncx2_sf(y, 2 + betac_inv, x) - strike * ncx2_cdf(x, betac_inv, y),
strike * ncx2_sf(x, betac_inv, y) - fwd * ncx2_cdf(y, 2 + betac_inv, x),
)
return disc_fac * price
def delta(self, strike, spot, texp, cp=1):
fwd, df, divf = self._fwd_factor(spot, texp)
betac_inv = 1 / (1 - self.beta)
k_star = 1.0 / np.square(self.sigma / betac_inv) / texp
x = k_star * np.power(fwd, 2 / betac_inv)
y = k_star * np.power(strike, 2 / betac_inv)
if self.beta < 1.0:
delta = (
0.5 * (cp - 1)
+ spst.ncx2.sf(y, 2 + betac_inv, x)
+ 2
* x
/ betac_inv
* (
spst.ncx2.pdf(y, 4 + betac_inv, x)
- strike / fwd * spst.ncx2.pdf(x, betac_inv, y)
)
)
else:
delta = (
0.5 * (cp - 1)
+ spst.ncx2.sf(x, -betac_inv, y)
- 2
* x
/ betac_inv
* (
spst.ncx2.pdf(x, -betac_inv, y)
- strike / fwd * spst.ncx2.pdf(y, 4 - betac_inv, x)
)
)
delta *= df if self.is_fwd else divf
return delta
def cdf(self, strike, spot, texp, cp=1):
fwd = self.forward(spot, texp)
betac = 1.0 - self.beta
betac_inv = 1.0 / betac
alpha = self.sigma / np.power(fwd, betac)
sigma_std = np.maximum(alpha * np.sqrt(texp), np.finfo(float).eps)
kk = strike / fwd
x = 1.0 / np.square(betac * sigma_std)
y = np.power(kk, 2 * betac) * x
cdf = np.where(
cp > 0, spst.ncx2.cdf(x, betac_inv, y), spst.ncx2.sf(x, betac_inv, y)
)
return cdf
def gamma(self, strike, spot, texp, cp=1):
fwd, df, divf = self._fwd_factor(spot, texp)
betac_inv = 1 / (1 - self.beta)
k_star = 1.0 / np.square(self.sigma / betac_inv) / texp
x = k_star * np.power(fwd, 2 / betac_inv)
y = k_star * np.power(strike, 2 / betac_inv)
if self.beta < 1.0:
gamma = (
(2 + betac_inv - x) * spst.ncx2.pdf(y, 4 + betac_inv, x)
+ x * spst.ncx2.pdf(y, 6 + betac_inv, x)
+ strike
/ fwd
* (
x * spst.ncx2.pdf(x, betac_inv, y)
- y * spst.ncx2.pdf(x, 2 + betac_inv, y)
)
)
else:
gamma = (
x * spst.ncx2.pdf(x, -betac_inv, y)
- y * spst.ncx2.pdf(x, 2 - betac_inv, y)
) + strike / fwd * (
(2 - betac_inv - x) * spst.ncx2.pdf(y, 4 - betac_inv, x)
+ x * spst.ncx2.pdf(y, 6 - betac_inv, x)
)
gamma *= 2 * (divf / betac_inv) ** 2 / df * x / fwd
if self.is_fwd:
gamma *= (df / divf) ** 2
return gamma
def vega(self, strike, spot, texp, cp=1):
fwd, df, divf = self._fwd_factor(spot, texp)
spot = fwd * df / divf
betac_inv = 1 / (1 - self.beta)
k_star = 1.0 / np.square(self.sigma / betac_inv) / texp
x = k_star * np.power(fwd, 2 / betac_inv)
y = k_star * np.power(strike, 2 / betac_inv)
if self.beta < 1.0:
vega = -fwd * spst.ncx2.pdf(y, 4 + betac_inv, x) + strike * spst.ncx2.pdf(
x, betac_inv, y
)
else:
vega = fwd * spst.ncx2.pdf(x, -betac_inv, y) - strike * spst.ncx2.pdf(
y, 4 - betac_inv, x
)
sigma = self.sigma * spot ** (self.beta - 1)
vega *= df * 2 * x / sigma
return vega
def theta(self, strike, spot, texp, cp=1):
### Need to implement this
return self.theta_numeric(strike, spot, texp, cp=cp)
| StarcoderdataPython |
329479 | <filename>python/runner.py
import argparse
import re
from sandbox import iterative_placement
from sandbox.core import CoreScene
from sandbox.explainer import Explainer
from sandbox.hunter import Hunter
from sandbox.propertyset import PropertySet
def run_sample(scene, *props):
parser = argparse.ArgumentParser()
parser.add_argument('--max-layer', default='user', choices=CoreScene.layers)
parser.add_argument('--dump', nargs='+', choices=('scene', 'constraints', 'stats', 'result', 'properties', 'explanation'), default=('stats', 'result'))
parser.add_argument('--run-hunter', action='store_true')
parser.add_argument('--extra-rules', nargs='+', choices=('advanced', 'circles', 'trigonometric'), default=())
parser.add_argument('--profile', action='store_true')
args = parser.parse_args()
if 'scene' in args.dump:
scene.dump(include_constraints='constraints' in args.dump, max_layer=args.max_layer)
if args.run_hunter:
placement = iterative_placement(scene)
hunter = Hunter(placement)
hunter.hunt()
properties = hunter.properties
else:
properties = []
options = { 'max_layer': args.max_layer }
for extra in args.extra_rules:
options[extra] = True
explainer = Explainer(scene, options=options)
if args.profile:
import cProfile
cProfile.runctx('explainer.explain()', {'explainer': explainer}, {})
else:
explainer.explain()
if 'properties' in args.dump:
explainer.dump(properties)
if 'stats' in args.dump:
explainer.stats(properties).dump()
if 'result' in args.dump:
for prop in props:
explanation = explainer.explanation(prop)
if explanation:
print('\tExplained: %s' % explanation)
else:
print('\tNot explained: %s' % prop)
if 'explanation' in args.dump:
def dump(prop, level=0):
print('\t' + ' ' * level + str(prop) + ': ' + str(prop.reason.comment))
if prop.reason.premises:
for premise in prop.reason.premises:
dump(premise, level + 1)
def depth(prop):
if prop.reason.premises:
return 1 + max(depth(p) for p in prop.reason.premises)
return 0
def full_size(prop):
if prop.reason.premises:
return 1 + sum(full_size(p) for p in prop.reason.premises)
return 1
def all_premises(prop):
premises = PropertySet(explainer.context.points)
for p in prop.reason.all_premises:
premises.add(p)
return premises
for prop in props:
explanation = explainer.explanation(prop)
if explanation:
dump(explanation)
print('Depth = %s' % depth(explanation))
print('Full size = %s' % full_size(explanation))
cumulative_priorities = {}
def cumu(prop):
cached = cumulative_priorities.get(prop)
if cached is not None:
return cached
if prop.reason.premises:
cu = 0.7 * prop.priority + 0.3 * max(cumu(p) for p in prop.reason.premises)
else:
cu = prop.priority
cumulative_priorities[prop] = cu
return cu
priorities = {}
for p in explanation.reason.all_premises:
priority = cumu(p)
priorities[priority] = priorities.get(priority, 0) + 1
pairs = list(priorities.items())
pairs.sort(key=lambda pair: -pair[0])
count_all = len(explanation.reason.all_premises)
print('Props = %d (%s)' % (count_all, ', '.join(['%.3f: %d' % p for p in pairs])))
all_premises(explanation).stats().dump()
rules_map = {}
for prop in explanation.reason.all_premises:
key = type(prop.rule).__name__ if hasattr(prop, 'rule') else 'Unknown'
rules_map[key] = rules_map.get(key, 0) + 1
items = list(rules_map.items())
items.sort(key=lambda pair: -pair[1])
print('Rules:')
for pair in items:
print('\t%s: %s' % pair)
| StarcoderdataPython |
5040525 | #!env/bin/python3
from termcolor import colored
import json
import os
import sys
import zmq
import fclient
import ffile
# client data
client = json.dumps({'ip': '', 'port': '', 'serverIP': '', 'serverPort': ''})
client = json.loads(client)
context = zmq.Context()
socket = context.socket(zmq.REP)
socket_send = context.socket(zmq.REQ)
def get_id(my_ip, server_ip):
client['ip'], client['port'] = my_ip.split(':')
socket.bind('tcp://*:' + client['port'])
client['serverIP'], client['serverPort'] = server_ip.split(':')
def main():
global client, socket, socket_send
my_ip = some_ip = ''
if len(sys.argv) == 3:
some_ip = sys.argv[2]
# print(some_ip)
elif len(sys.argv) == 2:
print(colored('ERROR: Server IP missing', 'red', attrs=['bold']))
else:
print(colored('ERROR: Client IP and Server IP are missing', 'red', attrs=['bold']))
if len(sys.argv) >= 2:
my_ip = sys.argv[1]
get_id(my_ip, some_ip) # Arguments to variables python
try:
fclient.clear()
fclient.client_info(client)
print (colored(
'Welcome to Map Reduce simulation', 'yellow', attrs=['bold']), colored('Terminal', 'yellow'))
while True:
inp = input(colored('$ >> ', 'cyan'))
inp = inp.split()
if inp[0] == 'help' or inp[0] == '-h':
fclient.options()
elif inp[0] == '-s' or inp[0] == 'send':
file = ffile.get_file(inp[1])
if file:
print(colored('Sending the file to -> ' + client['serverIP'] + ':' + client['serverPort'], 'yellow'))
send_req = ffile.create_req('send', client['ip'] + ':' + client['port'], client['serverIP'] + ':' + client['serverPort'], {'origin': client['ip'] + ':' + client['port'], 'filename': os.path.basename(inp[1]), 'data': file})
socket_send = context.socket(zmq.REQ)
socket_send.connect('tcp://' + send_req['to'])
socket_send.send_string(json.dumps(send_req))
message = socket_send.recv()
print(colored(message.decode("utf-8"), 'green'))
else:
print(colored('Invalid File' ,'red', attrs=['bold']))
elif inp[0] == 'exit':
print (colored('See you later', 'yellow'))
break
else:
print (colored('Type a correct option', 'red'))
except KeyboardInterrupt:
print('')
print(colored('See you later', 'yellow'))
exit(0)
if __name__ == '__main__':
main() | StarcoderdataPython |
11227631 | <gh_stars>1-10
class RewardPerformanceScore():
rewardName = "PerformanceScore"
def getReward(self, thisPlayerPosition, performanceScore, matchFinished):
reward = - 0.001
if matchFinished:
finalPoints = (3 - thisPlayerPosition)/3
reward = finalPoints + performanceScore
return reward
| StarcoderdataPython |
9645899 | from datetime import datetime
import logging
import sys
import traceback
import click
import colorama
from colorama import Fore, Style
from dateutil import tz as dutz
import rasterio
from . import horizon, KM, sunrise_sunset, sunrise_sunset_details, sunrise_sunset_year
colorama.init()
logger = logging.getLogger(__package__)
# specify colors for different logging levels
LOG_COLORS = {logging.ERROR: Fore.RED, logging.WARNING: Fore.YELLOW}
class ColorFormatter(logging.Formatter):
def format(self, record, *args, **kwargs):
if record.levelno in LOG_COLORS:
record.msg = "{color_begin}{message}{color_end}".format(
message=record.msg,
color_begin=LOG_COLORS[record.levelno],
color_end=Style.RESET_ALL,
)
return super().format(record, *args, **kwargs)
def setup_logging(is_debug):
global logger
if is_debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
formatter = ColorFormatter("%(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
class LatLonParamType(click.ParamType):
name = "latlon"
def convert(self, value, param, ctx):
try:
parts = value.split(",")
lat = float(parts[0].strip())
lon = float(parts[1].strip())
return lat, lon
except Exception:
self.fail(
f"{value!r} is not a valid Lat Lon (eg '45.235555,5.83890')", param, ctx
)
class CatchAllExceptionsCommand(click.Command):
def invoke(self, ctx):
try:
return super().invoke(ctx)
except Exception as ex:
raise UnrecoverableJNCEPError(str(ex), sys.exc_info())
class UnrecoverableJNCEPError(click.ClickException):
def __init__(self, message, exc_info):
super().__init__(message)
self.exc_info = exc_info
def show(self):
logger.error("*** An unrecoverable error occured ***")
logger.error(self.message)
logger.debug("".join(traceback.format_exception(*self.exc_info)))
def colored(s, color):
return f"{color}{s}{Fore.RESET}"
position_option = click.option(
"-p",
"-position",
"latlon",
help="Latitude and Longitude of the location to consider (eg 45.2315,5.8389)",
required=True,
type=LatLonParamType(),
)
dem_option = click.option(
"-m",
"--dem",
"dem_filepath",
help="DEM in TIFF format and Geographic CRS (eg WGS4)",
required=True,
type=click.Path(exists=True, resolve_path=True, dir_okay=False),
)
timezone_option = click.option(
"-t",
"--timezone",
"timezone",
help="Timezone for the result [Default: Timezone of the local machine]",
)
distance_option = click.option(
"--distance",
"distance",
help="Distance from the position to consider when computing the horizon (in KM)",
default=25,
type=int,
show_default=True,
)
angle_option = click.option(
"--angle-precision",
"angle_precision",
help="Precision of horizon angles (for each degree)",
default=1,
type=int,
show_default=True,
)
time_option = click.option(
"--time-precision",
"time_precision",
help="Precision of times (for each hour)",
default=60,
type=int,
show_default=True,
)
height_option = click.option(
"-h",
"--height",
"height",
help="Height of position in meters",
default=2,
type=int,
show_default=True,
)
@click.group()
@click.option(
"-d",
"--debug",
"is_debug",
is_flag=True,
help=("Flag to activate debug mode"),
required=False,
)
@click.pass_context
def main(ctx, is_debug):
"""Computes sunset / sunrise time taking into account local topography"""
setup_logging(is_debug)
# special attribute of context
ctx.obj = {"DEBUG": is_debug}
@main.command(
"day",
help="Compute sunset / sunrise time for a single day",
cls=CatchAllExceptionsCommand,
)
@position_option
@dem_option
@click.option(
"-j",
"--day",
"day",
metavar="DATE",
help="Date to consider (in YYYY-MM-DD format)",
required=True,
type=click.DateTime(formats=["%Y-%m-%d"]),
)
@click.option(
"-v",
"--details",
"is_details",
is_flag=True,
help="Show additional details",
)
@timezone_option
@distance_option
@height_option
@angle_option
@time_option
@click.pass_context
def tppss_day(
ctx,
latlon,
dem_filepath,
day,
is_details,
timezone,
distance,
height,
angle_precision,
time_precision,
):
tz = dutz.gettz(timezone)
if timezone is None:
zone_name = datetime.now(tz).tzname()
logger.warning(f"Timezone set to local: '{zone_name}'")
with rasterio.open(dem_filepath) as dataset:
logger.info("Compute horizon...")
horizon_ = horizon(
latlon,
dataset,
distance=distance * KM,
height=height,
precision=angle_precision,
)
logger.info("Compute sunrise / sunset...")
if not is_details:
res = sunrise_sunset(latlon, horizon_, day, tz, precision=time_precision)
sunrise, sunset, is_light_all_day = res
if is_light_all_day is None:
text = f"Sunrise: {sunrise} / Sunset: {sunset}"
else:
if is_light_all_day:
text = "Light all day!"
else:
text = "Night all day!"
logger.info(colored(text, Fore.GREEN))
else:
res = sunrise_sunset_details(
latlon, horizon_, day, tz, precision=time_precision
)
sunrises, sunsets, is_light_all_day = res
if is_light_all_day is None:
logger.info(colored(f"{len(sunrises)} sunrises", Fore.GREEN))
for i in range(len(sunrises)):
sunrise = sunrises[i]
sunset = sunsets[i]
text = f"Sunrise: {sunrise} / Sunset: {sunset}"
logger.info(colored(text, Fore.GREEN))
else:
if is_light_all_day:
text = "Light all day!"
else:
text = "Night all day!"
logger.info(colored(text, Fore.GREEN))
@main.command(
"year",
help="Compute sunset / sunrise time for a whole year",
cls=CatchAllExceptionsCommand,
)
@position_option
@dem_option
@click.option(
"-y",
"--year",
"year",
metavar="YEAR",
help="Year to take into account",
required=True,
type=int,
)
@timezone_option
@click.option(
"-o",
"--csv",
"csv_filepath",
help="CSV to export",
type=click.Path(resolve_path=True, dir_okay=False, writable=True),
required=True,
)
@distance_option
@height_option
@angle_option
@time_option
@click.pass_context
def tppss_year(
ctx,
latlon,
dem_filepath,
year,
timezone,
csv_filepath,
distance,
height,
angle_precision,
time_precision,
):
if not 1901 <= year <= 2099:
logger.warning(
"Sun position computation may not be accurate outside years 1901 to 2099!"
)
tz = dutz.gettz(timezone)
if timezone is None:
zone_name = datetime.now(tz).tzname()
logger.warning(f"Timezone set to local: '{zone_name}'")
with rasterio.open(dem_filepath) as dataset:
logger.info("Compute horizon...")
horizon_ = horizon(
latlon,
dataset,
distance=distance * KM,
height=height,
precision=angle_precision,
)
logger.info(f"Compute sunrise / sunset for year {year}...")
sunsuns = sunrise_sunset_year(
latlon, horizon_, year, tz, precision=time_precision
)
logger.info(f"Write results to {csv_filepath}...")
with open(csv_filepath, "w", encoding="utf-8") as f:
print_output(f, year, sunsuns)
def print_output(file_, year, sunsuns):
format_day = "%Y-%m-%d"
format_sunsun = "%H:%M:%S%z"
file_.write("DAY,SUNRISE,SUNSET\n")
for i in range(len(sunsuns)):
day, sunrise, sunset, is_light_or_night_all_day = sunsuns[i]
if is_light_or_night_all_day is None:
file_.write(
f"{day.strftime(format_day)},"
f"{sunrise.strftime(format_sunsun)},"
f"{sunset.strftime(format_sunsun)}\n"
)
else:
file_.write(f"{day.strftime(format_day)},NA,NA\n")
if __name__ == "__main__":
main()
| StarcoderdataPython |
6581091 | <reponame>srujanpanuganti/elsa
#! /usr/bin/env python
from __future__ import division
import rospy
from math import pi, asin
# from geometry_msgs.msg import Twist, Pose
# from nav_msgs.msg import Odometry
from std_msgs.msg import Int32
import numpy as np
import RPi.GPIO as gpio
class TickPublisher:
def __init__(self):
self.counterBL = np.uint64(0)
self.counterBR = np.uint64(0)
self.buttonBL = int(0)
self.buttonBR = int(0)
self.left = self.counterBL
self.right = self.counterBR
self.initialize_pins()
def initialize_pins(self):
gpio.setmode(gpio.BOARD)
gpio.setup(7, gpio.IN, pull_up_down = gpio.PUD_UP)
gpio.setup(12, gpio.IN, pull_up_down = gpio.PUD_UP)
def main(self):
rospy.init_node('ticks_pub')
self.leftPub = rospy.Publisher('~lwheel_ticks',
Int32, queue_size=10)
self.rightPub = rospy.Publisher('~rwheel_ticks',
Int32, queue_size=10)
self.nodeName = rospy.get_name()
rospy.loginfo("{0} started".format(self.nodeName))
self.rate = rospy.get_param('~rate', 10.0)
self.timeout = rospy.get_param('~timeout', 0.5)
rate = rospy.Rate(self.rate)
self.lastTime = rospy.get_time()
while not rospy.is_shutdown():
self.publish()
rate.sleep()
def publish(self):
if int(gpio.input(7)) != int(self.buttonBR):
self.buttonBR = int(gpio.input(7))
self.counterBR += 1
if int(gpio.input(12)) != int(self.buttonBL):
self.buttonBL = int(gpio.input(12))
self.counterBL += 1
self.left = self.counterBL
self.right = self.counterBR
# print(self.left, self.right)
self.leftPub.publish(self.left)
self.rightPub.publish(self.right)
if __name__ == '__main__':
try:
node = TickPublisher()
node.main()
except rospy.ROSInterruptException:
pass
| StarcoderdataPython |
274449 | import csv
from typing import List
def write_dict_to_csv(data, file):
csv_columns = list(data[0].keys())
with open(file, "w") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=csv_columns)
writer.writeheader()
for row in data:
writer.writerow(row)
def read_csv_to_dict(file):
with open(file, "r") as csvfile:
reader = csv.DictReader(csvfile)
return list(reader)
def compare_rows_helper(expected: List[dict], actual: List[dict]):
assert len(expected) == len(
actual
), f"Unequal row counts: {len(expected)} != {len(actual)}"
errors = []
for k in expected[0].keys():
if k.startswith("date"):
exp = set([e[k][:10] for e in expected])
act = set([a[k][:10] for a in actual])
else:
exp = set([e[k] for e in expected])
act = set([a[k] for a in actual])
if exp ^ act != set():
errors.append(f"Unequal value sets: {exp}, {act}")
error_str = "\n".join(errors)
assert not errors, f"Failed with the following unequal columns:\n{error_str}"
| StarcoderdataPython |
9772792 | from uzuwiki.settings_static_file_engine import *
from commons.file.backends.file_engine_s3 import _put_s3
from logging import getLogger
import os
import hashlib
import mimetypes
import tempfile
from datetime import datetime
logger = getLogger(__name__)
def initialize_dirs(wiki_id):
_put_s3(STATIC_FILE_S3_PATH["bugget_name"],
"/".join([STATIC_FILE_S3_PATH["path"], wiki_id, "init"]), "")
def put_static_file(wiki_id, file_name, file_data):
logger.debug("put_static_file:start")
temp = tempfile.TemporaryDirectory()
base_name, ext = os.path.splitext(file_name)
timestamp = datetime.now().strftime("%Y%m%d%H%M%S%f")
digest_file_name = hashlib.md5((base_name + timestamp).encode("UTF-8")).hexdigest()
path = "/".join([STATIC_FILE_S3_PATH["path"], wiki_id, digest_file_name + ext])
dir = STATIC_FILE_S3_PATH
_put_s3(dir["bugget_name"], path, file_data, public_flg=True)
# ファイルを一時領域に保存
attach_file_path = os.path.join(temp.name, file_name)
destination = open(attach_file_path, 'wb')
for chunk in file_data.chunks():
destination.write(chunk)
destination.close()
mime = mimetypes.guess_type(attach_file_path)
record = {
"type": "s3",
"id": digest_file_name,
"mime": mime[0],
"size": os.path.getsize(attach_file_path),
"name": file_name,
"path": path[len(dir["path"]):],
"bugget": dir["bugget_name"],
"timestamp": datetime.now().isoformat(),
}
logger.debug("record=%s", record)
logger.debug("put_static_file:end")
return record
def get_static_file_url(attachment_record):
return STATIC_URL + attachment_record["path"][1:]
| StarcoderdataPython |
5091881 | import sys
import re
filename = sys.argv[1]
# print(filename)
with open(filename) as f:
lines = f.readlines()
no_header = [line for line in lines[1:]
if line != '\n']
data = "".join(no_header)
# print(data)
re_matcher = re.compile('[0-9]+\.[0-9]+\n')
rest = data
while True:
temp = rest.split(":")
if len(temp) == 1:
break
bench = temp[0]
# print("Bench:", bench)
rest = ":".join(temp[1:])
perf = re_matcher.findall(rest)[0]
# perf = re.findall('[0-9]+\.[0-9]+\n', rest)[0]
# print("Perf:", perf)
rest = rest.split(perf)[1]
# print(rest)
print(f'{bench}:\t{perf.rstrip()}')
# break
| StarcoderdataPython |
1726756 | """Script defined to test the paystack class."""
import unittest
import httpretty
from paystackapi.paystack import Paystack
paystack_secret_key = "sk_test_0a246ef179dc841f42d20959bebdd790f69605d8"
paystack = Paystack(secret_key=paystack_secret_key)
class TestPaystackClass(unittest.TestCase):
"""Method defined to test paystack class."""
@httpretty.activate
def test_transaction_init(self):
"""Function defined to test dynamic class use."""
httpretty.register_uri(
httpretty.GET,
"https://api.paystack.co/transaction",
content_type='text/json',
body='{"status": true, "contributors": true}',
status=201,
)
response = paystack.transaction.list()
self.assertTrue(response['status'])
| StarcoderdataPython |
8011487 | <filename>model.py
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""=================================================
@Author :蒋虎成
@Date :2021/9/24 19:12
@Desc :模型训练
=================================================="""
import csv
import os
from settings import DATACENTER_ID,WORKER_ID,SEQUENCE,color_distance
from colors import ColorMultiImage
import numpy as np
from function.snowflake import IdWorker
def training(color_data_path):
black_rbg = [0, 0, 0]
color_data_distance = []
color_distance_filepath = os.getcwd() + "\\output\\csv\\" + str(IdWorker(DATACENTER_ID, WORKER_ID, SEQUENCE).get_id()) + ".csv"
get_model = ColorMultiImage()
color_distance_csv = open(color_distance_filepath, "a+", newline="", encoding="utf-8-sig")
color_data = get_model.get_main_colors(color_data_path)
writer = csv.writer(color_distance_csv)
for rbg in color_data:
color_data_distance.append(rbg + [get_model.colour_distance(rbg, black_rbg)])
color_data_sort = sorted(color_data_distance, key=lambda x: x[3])
color_data_sort = np.array(color_data_sort)
color_data_sort_index = (color_data_sort[:, 3] > color_distance)
color_data_sort = color_data_sort[color_data_sort_index]
for rbg in color_data_sort:
writer.writerow(tuple(rbg))
return color_distance_filepath | StarcoderdataPython |
3526646 | import pathlib
import geopandas as gpd
THIS_DIR = pathlib.Path(__file__).parent.absolute()
PARENT_DIR = THIS_DIR.parent.absolute()
DATA_DIR = PARENT_DIR / "data"
def main():
zips_df = gpd.read_file(DATA_DIR / 'zips.geojson')
hoods_df = gpd.read_file(DATA_DIR / 'hoods.geojson')
joined_df = gpd.sjoin(hoods_df, zips_df, how="inner")
joined_df[['name_left', 'name_right']].rename(columns={
"name_left": "hood",
"name_right": "zipcode",
}).to_csv(DATA_DIR / "zips2hoods.csv", index=False)
if __name__ == '__main__':
main()
| StarcoderdataPython |
3455955 | # https://leetcode.com/problems/maximum-subarray/
# 방법 1
# O(N^2)
def maxSubArray(nums):
ans = -10 ** 4
for i in range(len(nums)):
for j in range(i + 1, len(nums) + 1):
ans = max(ans, sum(nums[i:j]))
return ans
# 방법 2
# O(N)
def maxSubArray(nums):
total, ans = 0, -10 ** 4
for i in range(len(nums)):
total += nums[i]
if total <= nums[i]:
total = nums[i]
ans = max(total, ans)
return ans
| StarcoderdataPython |
3499936 | import logging
from osconfeed import load
from frozenjson import *
from frozenattr import *
logging.basicConfig(level=logging.DEBUG)
raw_feed = load()
feed = FrozenJSON(raw_feed)
print(len(feed.Schedule.keys()))
for key, value in sorted(feed.Schedule.items()):
print('{:3} {}'.format(len(value), key))
sudeepFrozen = FrozenAttr(raw_feed)
print(sudeepFrozen)
print(len(sudeepFrozen.Schedule))
for key, value in sudeepFrozen.Schedule.items():
print(key, value) | StarcoderdataPython |
5190856 | <gh_stars>10-100
#!/usr/bin/python
# after runing this file you MUST modify nsIdentityinfo.cpp to change the
# fingerprint of the evroot
import tempfile, os, sys
import random
import pexpect
import subprocess
import shutil
libpath = os.path.abspath('../psm_common_py')
sys.path.append(libpath)
import CertUtils
dest_dir = os.getcwd()
db = tempfile.mkdtemp()
CA_basic_constraints = "basicConstraints = critical, CA:TRUE\n"
CA_min_ku = "keyUsage = critical, digitalSignature, keyCertSign, cRLSign\n"
subject_key_ident = "subjectKeyIdentifier = hash\n"
def generate_root_cert(db_dir, dest_dir, prefix, ext_text):
serial_num = 12343299546
name = prefix
key_name = dest_dir + "/" + name + ".key"
os.system ("openssl genpkey -algorithm RSA -out " + key_name +
" -pkeyopt rsa_keygen_bits:2048")
csr_name = dest_dir + "/" + name + ".csr"
os.system ("openssl req -new -key " + key_name + " -days 3650" +
" -extensions v3_ca -batch -out " + csr_name +
" -utf8 -subj '/C=US/ST=CA/L=Mountain View" +
"/O=Mozilla - EV debug test CA/OU=Security Engineering" +
"/CN=XPCShell EV Testing (untrustworthy) CA'")
extensions_filename = db_dir + "/openssl-exts"
f = open(extensions_filename, 'w')
f.write(ext_text)
f.close()
cert_name = dest_dir + "/" + name + ".der"
signer_key_filename = key_name
os.system ("openssl x509 -req -sha256 -days 3650 -in " + csr_name +
" -signkey " + signer_key_filename +
" -set_serial " + str(serial_num) +
" -extfile " + extensions_filename +
" -outform DER -out " + cert_name)
return key_name, cert_name
prefix = "evroot"
[ca_key, ca_cert] = generate_root_cert(db, dest_dir, prefix,
CA_basic_constraints +
CA_min_ku + subject_key_ident)
CertUtils.generate_pkcs12(db, dest_dir, ca_cert, ca_key, prefix)
print ("You now MUST modify nsIdentityinfo.cpp to ensure the xpchell debug " +
"certificate there matches this newly generated one\n")
| StarcoderdataPython |
4898814 | from rest_framework import serializers
from accounts.models import User
from pwuDB.models import Categories, Coupon, Orders, Products
class CategoriesSerializer(serializers.ModelSerializer):
class Meta:
model = Categories
fields = '__all__'
class CategorySerializerForProduct(serializers.ModelSerializer):
class Meta:
model = Categories
fields = ('description',)
class ProductSerializer(serializers.ModelSerializer):
category = CategorySerializerForProduct()
class Meta:
model = Products
fields = "__all__"
class ProductSerializerForOrder(serializers.ModelSerializer):
class Meta:
model = Products
exclude = ['image1','image2']
class UserSerializerForOrder(serializers.ModelSerializer):
class Meta:
model = User
exclude = ['password', 'admin', 'is_active', 'last_login', 'staff']
class OrderSerializer(serializers.ModelSerializer):
customer = UserSerializerForOrder()
product = ProductSerializerForOrder()
class Meta:
model = Orders
fields = ['ord_id', 'customer', 'product', 'ord_quantity', 'ord_price', 'ord_feedback']
class OrderListSerializer(serializers.ModelSerializer):
class Meta:
model = Orders
fields = "__all__"
# fields = ('ord_id', 'customer', 'product', 'ord_quantity', 'ord_price', 'ord_status', 'ord_image', 'ord_feedback', 'ca')
class CouponSerializer(serializers.ModelSerializer):
class Meta:
model = Coupon
fields = "__all__" | StarcoderdataPython |
4849710 | # Python Version: 3.x
"""
the module for yosupo's Library Checker (https://judge.yosupo.jp)
"""
import glob
import os
import pathlib
import re
import subprocess
import sys
import urllib.parse
from typing import *
import requests
import onlinejudge._implementation.logging as log
import onlinejudge._implementation.testcase_zipper
import onlinejudge._implementation.utils as utils
import onlinejudge.type
from onlinejudge.type import TestCase
class LibraryCheckerService(onlinejudge.type.Service):
def get_url(self) -> str:
return 'https://judge.yosupo.jp/'
def get_name(self) -> str:
return 'Library Checker'
@classmethod
def from_url(cls, url: str) -> Optional['LibraryCheckerService']:
# example: https://judge.yosupo.jp/
result = urllib.parse.urlparse(url)
if result.scheme in ('', 'http', 'https') \
and result.netloc == 'judge.yosupo.jp':
return cls()
return None
@classmethod
def _get_cloned_repository_path(cls) -> pathlib.Path:
return utils.user_cache_dir / 'library-checker-problems'
is_repository_updated = False
@classmethod
def _update_cloned_repository(cls) -> None:
if cls.is_repository_updated:
return
try:
subprocess.check_call(['git', '--version'], stdout=sys.stderr, stderr=sys.stderr)
except FileNotFoundError:
log.error('git command not found')
raise
path = LibraryCheckerService._get_cloned_repository_path()
if not path.exists():
# init the problem repository
url = 'https://github.com/yosupo06/library-checker-problems'
log.status('$ git clone %s %s', url, path)
subprocess.check_call(['git', 'clone', url, str(path)], stdout=sys.stderr, stderr=sys.stderr)
else:
# sync the problem repository
log.status('$ git -C %s pull', str(path))
subprocess.check_call(['git', '-C', str(path), 'pull'], stdout=sys.stderr, stderr=sys.stderr)
cls.is_repository_updated = True
class LibraryCheckerProblem(onlinejudge.type.Problem):
def __init__(self, *, problem_id: str):
self.problem_id = problem_id
def download_sample_cases(self, *, session: Optional[requests.Session] = None) -> List[TestCase]:
self._generate_test_cases_in_cloned_repository()
path = self._get_problem_directory_path()
files = [] # type: List[Tuple[str, bytes]]
files += [(file.name, file.read_bytes()) for file in path.glob('in/*.in') if file.name.startswith('example_')]
files += [(file.name, file.read_bytes()) for file in path.glob('out/*.out') if file.name.startswith('example_')]
return onlinejudge._implementation.testcase_zipper.extract_from_files(iter(files))
def download_system_cases(self, *, session: Optional[requests.Session] = None) -> List[TestCase]:
self._generate_test_cases_in_cloned_repository()
path = self._get_problem_directory_path()
files = [] # type: List[Tuple[str, bytes]]
files += [(file.name, file.read_bytes()) for file in path.glob('in/*.in')]
files += [(file.name, file.read_bytes()) for file in path.glob('out/*.out')]
return onlinejudge._implementation.testcase_zipper.extract_from_files(iter(files))
def _generate_test_cases_in_cloned_repository(self, compile_checker: bool = False) -> None:
LibraryCheckerService._update_cloned_repository()
path = LibraryCheckerService._get_cloned_repository_path()
if sys.version_info < (3, 6):
log.warning("generate.py may not work on Python 3.5 or older")
if os.name == 'nt':
log.warning("generate.py may not work on Windows")
problem_spec = str(self._get_problem_directory_path() / 'info.toml')
command = [sys.executable, str(path / 'generate.py'), problem_spec]
if compile_checker:
command.append('--compile-checker')
log.status('$ %s', ' '.join(command))
try:
subprocess.check_call(command, stdout=sys.stderr, stderr=sys.stderr)
except subprocess.CalledProcessError:
log.error("the generate.py failed: check https://github.com/yosupo06/library-checker-problems/issues")
raise
def _get_problem_directory_path(self) -> pathlib.Path:
path = LibraryCheckerService._get_cloned_repository_path()
info_tomls = list(path.glob('**/{}/info.toml'.format(glob.escape(self.problem_id))))
if len(info_tomls) != 1:
log.error("the problem %s not found or broken", self.problem_id)
raise RuntimeError()
return info_tomls[0].parent
def get_url(self) -> str:
return 'https://judge.yosupo.jp/problem/{}'.format(self.problem_id)
def get_service(self) -> LibraryCheckerService:
return LibraryCheckerService()
@classmethod
def from_url(cls, url: str) -> Optional['LibraryCheckerProblem']:
# example: https://judge.yosupo.jp/problem/unionfind
result = urllib.parse.urlparse(url)
if result.scheme in ('', 'http', 'https') \
and result.netloc == 'judge.yosupo.jp':
m = re.match(r'/problem/(\w+)/?', result.path)
if m:
return cls(problem_id=m.group(1))
return None
def download_checker_binary(self) -> pathlib.Path:
self._generate_test_cases_in_cloned_repository(compile_checker=True)
return self._get_problem_directory_path() / "checker"
onlinejudge.dispatch.services += [LibraryCheckerService]
onlinejudge.dispatch.problems += [LibraryCheckerProblem]
| StarcoderdataPython |
9702604 | <gh_stars>0
# -*- coding: utf-8 -*-
import cv2
import os
import re
#指定する画像フォルダ
files = os.listdir('/home/ringo/Prog/StarField/python3/OpenCV/test/')
for file in files:
jpg = re.compile("jpg")
print(file)
if jpg.search(file):
img = cv2.imread(file)
cv2.imshow("img", img)
resized = cv2.resize(img, (32,32))
cv2.imwrite(file,resized)
| StarcoderdataPython |
1711725 | import abc
from sparclur._metaclass import Meta
from sparclur._renderer import Renderer
from sparclur._text_extractor import TextExtractor
class Hybrid(TextExtractor, Renderer, metaclass=Meta):
"""
Abstract class to handle parsers that both render and have text extraction.
"""
@abc.abstractmethod
def __init__(self, doc,
temp_folders_dir,
skip_check,
timeout,
hash_exclude,
page_hashes,
validate_hash,
dpi,
cache_renders,
ocr,
*args,
**kwargs):
"""
Parameters
----------
ocr : bool
Flag for whether or not text extraction calls should be made using OCR or the built-in parser feature.
"""
super().__init__(doc=doc,
skip_check=skip_check,
dpi=dpi,
cache_renders=cache_renders,
timeout=timeout,
temp_folders_dir=temp_folders_dir,
hash_exclude=hash_exclude,
page_hashes=page_hashes,
validate_hash=validate_hash,
*args,
**kwargs)
hybrid_apis = {'compare_ocr': "Compares the OCR of the document with the text extraction"}
self._api.update(hybrid_apis)
self._ocr: bool = ocr
@property
def ocr(self):
return self._ocr
@ocr.setter
def ocr(self, o: bool):
if self._ocr != o:
self.clear_text()
self._can_extract = None
self._ocr = o
def compare_ocr(self, page=None, shingle_size=4):
"""
Method that compares the OCR result to the built-in text extraction.
Parameters
----------
page : int
Indicates which page the comparison should be run over. If 'None', all pages are compared.
shingle_size : int, default=4
The size of the token shingles used in the Jaccard similarity comparison between the OCR and the text
extraction.
Returns
-------
float
The Jaccard similarity between the OCR and the text extraction (for the specified shingle size).
"""
other = self.__class__(doc=self._doc,
dpi=self._dpi,
cache_renders=self._caching,
timeout=self._timeout,
ocr=not self._ocr)
metric = self.compare_text(other, page=page, shingle_size=shingle_size)
return metric
@abc.abstractmethod
def _extract_doc(self):
pass
@abc.abstractmethod
def _extract_page(self, page: int):
pass
@property
def validity(self):
return super().validity
@property
def sparclur_hash(self):
return super().sparclur_hash
| StarcoderdataPython |
3410686 | <reponame>thinkAmi-sandbox/wsgi_webtest-sample<filename>e.g._get_post_app/get_post_app.py
import datetime
import cgi
import io
from jinja2 import Environment, FileSystemLoader
# WSGIアプリとして、以下より移植
# https://github.com/thinkAmi-sandbox/wsgi_application-sample
class Message(object):
def __init__(self, title, handle, message):
self.title = title
self.handle = handle
self.message = message
self.created_at = datetime.datetime.now()
class MyWSGIApplication(object):
def __init__(self):
self.messages = []
# https://knzm.readthedocs.io/en/latest/pep-3333-ja.html#the-application-framework-side
def __call__(self, environ, start_response):
if environ['REQUEST_METHOD'].upper() == "POST":
# POSTヘッダとボディが一緒に格納されている
# cgi.FieldStorageで使うために、
# ・リクエストをデコード
# ・ヘッダとボディを分離
# ・ボディをエンコード
# ・ボディをio.BytesIOに渡す
# を行う
decoded = environ['wsgi.input'].read().decode('utf-8')
header_body_list = decoded.split('\r\n')
body = header_body_list[-1]
encoded_body = body.encode('utf-8')
# http://docs.python.jp/3/library/io.html#io.BytesIO
with io.BytesIO(encoded_body) as bytes_body:
fs = cgi.FieldStorage(
fp=bytes_body,
environ=environ,
keep_blank_values=True,
)
# 念のためFieldStorageの内容を確認
print('-'*20 + '\nFieldStorage:{}\n'.format(fs) + '-'*20)
self.messages.append(Message(
title=fs['title'].value,
handle=fs['handle'].value,
message=fs['message'].value,
))
# リダイレクトはLocationヘッダをつけてあげれば、ブラウザがうまいことやってくれる
location = "{scheme}://{name}:{port}/".format(
scheme = environ['wsgi.url_scheme'],
name = environ['SERVER_NAME'],
port = environ['SERVER_PORT'],
)
start_response(
'301 Moved Permanently',
[('Location', location), ('Content-Type', 'text/plain')])
# 適当な値を返しておく
return [b'1']
else:
jinja2_env = Environment(loader=FileSystemLoader('./templates', encoding='utf8'))
template = jinja2_env.get_template('bbs.html')
html = template.render({'messages': self.messages})
start_response('200 OK', [('Content-Type', 'text/html')])
return [html.encode('utf-8')]
app = MyWSGIApplication() | StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.