id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
1725211 | from typing import Any
from typing import Tuple
from .config_base import ConfigBase
from .config_env import ConfigEnv
class Config(object):
# todo use args ?
def __init__(self, order=(ConfigEnv(),)):
# type: (Tuple[ConfigBase]) -> None
"""
Args:
order (tuple): tuple of ConfigBase instances defining the order
of config files
"""
if order is None:
order = (ConfigEnv(),)
self.__configs = order
for c in self.__configs:
assert isinstance(c, ConfigBase)
def __get(self, key, return_type=None):
# type: (str, type) -> Any
v = None
for c in self.__configs:
if isinstance(c, ConfigEnv):
v = c.get("{}_{}".format(self.__class__.__name__, key))
else:
v = c.get(key)
if v is None:
return
if return_type:
return return_type(v)
return v
def configure(self):
# type: () -> None
"""
Configures object based on its initialization
"""
for i in vars(self):
if i.startswith("_"):
continue
val = self.__get(i, return_type=type(getattr(self, i)))
if val is not None:
setattr(self, i, val)
def __str__(self):
s = ""
for i in vars(self):
if i.startswith("_"):
continue
s += "{}\t{}\n".format(i, getattr(self, i))
return s
| StarcoderdataPython |
389728 | <reponame>ParspooyeshFanavar/pyibsng
"""Search batch API method."""
from ibsng.handler.handler import Handler
class searchBatch(Handler):
"""Search batch method class."""
def control(self):
"""Validate inputs after setup method.
:return: None
:rtype: None
"""
self.is_valid(self.conds, dict)
self.is_valid(self.index_from, int)
self.is_valid(self.index_to, int)
self.is_valid(self.sort_by, str)
self.is_valid(self.desc, bool)
def setup(self, conds, index_from, index_to, sort_by, desc):
"""Setup required parameters.
:param dict conds: conditions
:param int index_from: from index
:param int index_to: end index
:param str sort_by: sort result by specific field (batch_id,
batch_name, batch_credit)
:param bool desc: descending
:return: None
:rtype: None
"""
self.conds = conds
self.index_from = index_from
self.index_to = index_to
self.sort_by = sort_by
self.desc = desc
| StarcoderdataPython |
1727261 | <filename>fastapi_mail/errors.py
class NotAvilableService(Exception):
def __init__(self, expression, message):
self.expression = expression
self.message = message
class TypeExecption(TypeError):
def __init__(self, expression):
self.expression = expression
class ConnectionErrors(Exception):
def __init__(self, expression):
self.expression = expression
class WrongPort(Exception):
def __init__(self, expression, message):
self.expression = expression
self.message = message
class WrongFormat(Exception):
def __init__(self, expression, message):
self.expression = expression
self.message = message
class WrongFile(Exception):
def __init__(self, expression, message):
self.expression = expression
self.message = message
| StarcoderdataPython |
12842174 | import argparse
from datetime import datetime, timedelta
from pathlib import Path
from typing import NamedTuple
import dask
import numpy as np
from snobedo.lib import ModisGeoTiff
from snobedo.lib.command_line_helpers import add_dask_options, \
add_water_year_option
from snobedo.lib.dask_utils import run_with_client
from snobedo.modis.geotiff_to_zarr import write_zarr
from snobedo.modis.matlab_to_geotiff import matlab_to_geotiff, warp_to
ONE_DAY = timedelta(days=1)
class ConversionConfig(NamedTuple):
variable: str
source_dir: Path
output_dir: Path
modis_us: ModisGeoTiff
target_srs: str
def argument_parser():
parser = argparse.ArgumentParser(
description='Convert matlab files to zarr',
)
parser.add_argument(
'--source-dir',
required=True,
type=Path,
help='Base directory. The files to convert are expected to be in a '
'folder with the water year. Example: 2018'
' Other required file expected under this folder is the template '
f'MODIS file with name: {ModisGeoTiff.WESTERN_US_TEMPLATE}',
)
parser.add_argument(
'--variable',
required=True,
type=str,
help='Variable to extract from the matlab files'
)
parser.add_argument(
'--t-srs',
type=str,
default='EPSG:32613',
help='Target EPSG. Default: EPSG:32613'
)
parser = add_dask_options(parser)
parser = add_water_year_option(parser)
return parser
def config_for_arguments(arguments):
output_dir = arguments.source_dir / f'wy{arguments.water_year}-zarr/'
output_dir.mkdir(exist_ok=True)
return ConversionConfig(
variable=arguments.variable,
source_dir=arguments.source_dir,
output_dir=output_dir,
modis_us=ModisGeoTiff(arguments.source_dir),
target_srs=arguments.t_srs
)
def date_range(water_year):
d0 = datetime(water_year - 1, 9, 30)
d1 = datetime(water_year, 10, 1)
return np.arange(d0, d1, ONE_DAY).astype(datetime)
@dask.delayed
def write_date(date, config):
file = matlab_to_geotiff(
config.source_dir,
config.output_dir,
config.modis_us,
date,
config.variable,
)
file = warp_to(file, config.target_srs)
write_zarr(file, date, config.variable, config.output_dir)
def main():
arguments = argument_parser().parse_args()
if not arguments.source_dir.exists():
raise IOError(
f'Given source folder does not exist: {arguments.source_dir}'
)
with run_with_client(arguments.cores, arguments.memory):
config = config_for_arguments(arguments)
files = [
write_date(date, config)
for date in date_range(arguments.water_year)
]
dask.compute(files)
if __name__ == '__main__':
main()
| StarcoderdataPython |
3201536 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 10 13:17:52 2020
@author: <NAME>, Finnish Meteorological Institute
@licence: MIT licence Copyright
"""
import matplotlib
import numpy
import os
import pathlib
import seaborn
from Data import Data
from FileSystem import FileSystem
from PlotTweak import PlotTweak
from Simulation import Simulation
from decimal import Decimal
class Plot:
def getVerticalLine(ax, x, color = 'k', linestyle = '--' ):
ax.axvline( x, color = color , linestyle = linestyle )
def getHorizontalLine(ax, y, color = 'k', linestyle = '--' ):
ax.axhline( y, color = color , linestyle = linestyle )
def getTimeseries(ax,
simulation : Simulation,
muuttuja,
conversionFactor = 1.0):
if isinstance(simulation, list):
for simulationInstance in simulation:
ax = Plot.getTimeseries(ax, simulationInstance, muuttuja, conversionFactor)
return ax
ts = simulation.getTSDataset()
try:
dataset = ts[muuttuja]
except KeyError:
print("KeyError")
return None
# conversion
dataset = dataset*conversionFactor
dataset.plot(ax = ax,
color = simulation.getColor(),
label = simulation.getLabel(),
linewidth = simulation.getLineWidth())
return ax
#REVISEu
def getTimeseriesOfProfile(ax,
simulation : Simulation,
muuttuja,
levels = None,
useLogaritmic = False,
useColorBar = False,
colors = None):
ps = simulation.getPSDataset()
try:
data = ps[muuttuja]
except KeyError:
print("KeyError", simulation, muuttuja, "Plot.getTimeseriesOfProfile")
return
if useLogaritmic:
if levels is None:
levels, rangePotenssi, minimiPotenssi, maksimiPotenssi = Data.getLogScale(data.values)
levels = rangePotenssi
data.values = numpy.log10(data.values)
im = data.plot.contourf("time","zt", ax = ax, levels=levels, add_colorbar = useColorBar, colors = colors) #
return ax, im, levels
def getContourLine(ax,
simulation : Simulation,
muuttuja,
value,
color = "black",
epsilon = 1e-12):
ps = simulation.getPSDataset()
try:
data = ps[muuttuja]
except KeyError:
print("KeyError", simulation, muuttuja, "Plot.getContourLine")
return
data.plot.contour(x="time", y="zt",ax=ax, colors = color, vmin = value - epsilon , vmax = value + epsilon)
return ax
def getColorBar(im, ax, levels = None):
cb = matplotlib.pyplot.colorbar(im, cax = ax, ticks = levels, orientation='horizontal') #, pad=0.21
if levels is not None:
cb.ax.set_xticklabels([r"$10^{" + str(int(elem)) + "}$" for elem in levels])
colorbarLabelListShowBoolean = Data.getIntegerExponentsAsBoolean( levels )
cb = PlotTweak.hideColorbarXLabels(cb, colorbarLabelListShowBoolean)
cb.ax.tick_params(labelsize=36)
# REVISE
def getTimeseriesOfProportions(axes,
simulation : Simulation,
muuttuja,
mode = "inCloud", cmap = "bright", limit = 1e-6, height = None, packing = None,
timeStartH = 2.05, timeEndH = 48, analysis = False,
fontsize = None, useLegend = True,
figurePrefix = "",
kuvakansio = "/home/aholaj/OneDrive/000_WORK/000_ARTIKKELIT/000-Manuscript-ICE/kuvat/bini/",
kuvakansioPDF = "/home/aholaj/OneDrive/000_WORK/000_ARTIKKELIT/000-Manuscript-ICE/figures_pdf",
filenamePDF = "figure6"):
print(mode, end = " ")
if height is not None:
print(height)
else:
print()
ps = simulation.getPSDataset()
if ps is None:
return "FileNotFound"
ts = simulation.getTSDataset()
if ts is None:
return "FileNotFound"
timeStartInd = Data.getClosestIndex( ps.time.values, timeStartH*3600 )
timeEndInd = Data.getClosestIndex( ps.time.values, timeEndH*3600 )
ps = ps.isel(time = slice(timeStartInd,timeEndInd))
try:
if mode == "inCloud":
# ps = ps.sel(zt = slice(665,745)).mean(dim = "zt")
ps = ps.where( (ps.P_rl > limit) & (ps.P_ri > limit), drop = True).mean(dim = "zt", skipna = True) #ps.where(ps.zt > ts.zb).where(ps.zt < ts.zc).mean(dim = "zt")#
elif mode == "belowCloud":
#ps = ps.sel(zt = slice(5,410)).mean(dim = "zt")
ps = ps.where(ps.P_rl < limit, drop = True).where(ps.zt < ts.zb, drop = True).mean(dim = "zt", skipna = True) #.where(ps.P_rl < 1e-6, drop = True)
elif mode == "aboveCloud":
ps = ps.where(ps.zt > ts.zc, drop = True).mean(dim = "zt", skipna = True) #.where(ps.P_rl < 1e-6, drop = True)
elif mode == "height":
ps = ps.sel(zt = height, method = 'nearest')
except KeyError:
return
ps = ps.assign_coords(time = (ps.time / 3600))
try:
aero = ps["P_Nabb"]
cloud = ps["P_Ncbb"]
ice = ps["P_Nibb"]
except KeyError:
return
newname = "dryRadiusBinB"
aero = aero.rename({"aeb":newname})
cloud = cloud.rename({"clb":newname})
ice = ice.rename({"icb":newname})
total = aero + cloud + ice
if packing is not None:
for daatta in aero, cloud, ice, total:
daatta[:,packing] = numpy.sum(daatta[:,packing:], axis = 1)
binNumber = min( numpy.shape(total.values)[1], packing +1 )
matplotlib.rcParams['lines.linewidth'] = 6
yTicks = [0, 0.5, 1]
yTickLabels = map(str, yTicks)
matplotlib.pyplot.subplots_adjust(hspace=0.05, wspace = 0.05)
xLabelListShow = numpy.arange(8, 48+1, 8)
xLabelListShow = numpy.insert(xLabelListShow, 0, 2)
xLabelListMajorLine = numpy.arange(4, 48+1, 4)
xLabelListMajorLine = numpy.insert(xLabelListMajorLine, 0, 2)
for bini in range(binNumber):
ax = axes[bini]
aeroBin = aero[:,bini]
cloudBin = cloud[:,bini]
iceBin = ice[:,bini]
totalBin = total[:,bini]
aeroFrac = aeroBin/totalBin
cloudFrac = cloudBin/totalBin
iceFrac = iceBin/totalBin
totalBinRelative = totalBin / totalBin.values[0]
aeroFrac.plot(ax=ax, color = "#e6194B")
cloudFrac.plot(ax=ax, color = "#000075")
iceFrac.plot(ax=ax, color = "#42d4f4")
totalBinRelative.plot(ax = ax, color = "black")
ax.set_yticks( yTicks )
ax.set_yticklabels( yTickLabels )
ax.set_ylim( 0, 1.5)
ax.set_title("")
matplotlib.pyplot.setp(ax.get_yticklabels()[1], visible=False)
if packing is not None and bini == (binNumber - 1):
bininame = str(bini + 1 ) + " - 7"
else:
bininame = str(bini +1)
if useLegend:
legend_handles = [matplotlib.patches.Patch( facecolor = "black",
label = " ".join([ "Bin", bininame + ",", "Total", r"$N_0$", str(int(totalBin.values[0])) + ",", "Min", r"$N$", str(int(numpy.min(totalBin))), "$(kg^{-1})$" ]))]
legend = ax.legend(handles = legend_handles, loc = "best", fontsize = fontsize)
ax.add_artist(legend)
if bini == 0:
header_handles = [matplotlib.patches.Patch(facecolor = "#e6194B", label="Aerosol"),
matplotlib.patches.Patch(facecolor = "#000075", label="Cloud"),
matplotlib.patches.Patch(facecolor = "#42d4f4", label="Ice")]
header_legend = ax.legend(handles = header_handles, loc =(0.3,1.05), ncol = 3, frameon = True, framealpha = 1.0, fontsize = fontsize)
ax.add_artist(header_legend)
########## END USELEGEND
if bini in [2,3]:
setXlabel= True
else:
setXlabel =False
ax = PlotTweak.setXTicksLabelsAsTime(ax, ps.time.values, xLabelListShow = xLabelListShow, xLabelListMajorLine = xLabelListMajorLine, setXlabel = setXlabel)
if bini in [0,1]:
ax.set_xticklabels([])
axes[2].set_yticklabels([str(item) for item in yTicks])
for tick in axes[2].get_yticklabels():
print(tick)
tick.set_visible(True)
return axes
# REVISE
def getTimeseriesOfBinMass(ax,
simulation : Simulation,
muuttuja,
height,
cmap = "OrRd", relative = True, limiter = 1e-3):
ps = simulation.getPSDataset()
if ps is None:
return "FileNotFound"
zt = ps.zt
psSliced = ps.sel(zt = height, method = "nearest").isel(time = slice(61,1440))
try:
data = psSliced[muuttuja]
except KeyError:
return
aerosolsUsed = False
AerosolAbins = False
AerosolBbins = False
parallelAeroB = None
parallelAeroA = None
################################
if muuttuja == "P_Naba":
aerosolsUsed = True
if muuttuja == "P_Naba":
AerosolAbins = True
elif muuttuja == "P_Nabb":
AerosolBbins = True
if aerosolsUsed:
parallelCloudA = psSliced["P_Ncba"]
parallelCloudB = psSliced["P_Ncbb"]
if AerosolAbins:
parallelAeroB = psSliced["P_Nabb"]
elif AerosolBbins:
parallelAeroA = psSliced["P_Naba"]
biniTieto = muuttuja[-1].upper()
################################
dataAllBins = data
dataAllBins = dataAllBins.assign_coords(time = (dataAllBins.time / 3600))
size = numpy.shape(dataAllBins.values)[1]
colorpalette = seaborn.color_palette(cmap, 10)
skip = 0
aero = None
includeOtherAero = False
includeParallelOthercloud = False
includeParallelCloud = True
label = biniTieto + " bin |" + r"$N_0\ (\#/kg)$"
legend_elements = [matplotlib.patches.Patch(facecolor="white",label=label)]
for bini in range(size):
plottable = dataAllBins[:,bini]
vertailuData = numpy.zeros( numpy.shape(plottable.values))
if Data.isCloseToEpsilon(plottable, limiter):
skip += 1
continue
if AerosolAbins:
parallelBaeroBini = bini - 3
parallelAcloudBini = bini - 3
parallelBcloudBini = bini - 3
elif AerosolBbins:
parallelAaeroBini = bini + 3
parallelAcloudBini = bini
parallelBcloudBini = bini
if aerosolsUsed:# and (parallelbini >= 0):
if includeOtherAero:
if AerosolAbins and parallelBaeroBini > 0:
aero = parallelAeroB[:, parallelBaeroBini]
elif AerosolBbins:
aero = parallelAeroA[:, parallelAaeroBini]
vertailuData = vertailuData + aero.values
if includeParallelOthercloud:
if AerosolAbins and parallelBcloudBini>0:
parallelOtherCloud = parallelCloudB[:, parallelBcloudBini ]
elif AerosolBbins and parallelAcloudBini>0:
parallelOtherCloud = parallelCloudA[:, parallelAcloudBini ]
vertailuData = vertailuData + parallelOtherCloud.values
if includeParallelCloud:
if AerosolAbins:
parallelCloud = parallelCloudA[:, parallelAcloudBini]
elif AerosolBbins:
parallelCloud = parallelCloudB[:, parallelBcloudBini]
vertailuData = vertailuData + parallelCloud.values
denom = plottable.values + vertailuData
plottable, lahtoarvo = Data.getRelativeChange(plottable, denominator=denom, limiter = limiter)
color = Data.getColorBin(colorpalette, bini, plottable)
plottable.plot(ax=ax, color = color)
if lahtoarvo > 1000 or lahtoarvo < 0.1:
label = '{0:8d}{1:11.1E}'.format(bini + 1, Decimal(lahtoarvo))
else:
label = '{0:8d}{1:11.1f}'.format(bini + 1, lahtoarvo)
legend_elements.append(matplotlib.patches.Patch(facecolor=color,label=label))
if skip == size:
matplotlib.pyplot.close()
return None
#matplotlib.pyplot.axvline( 2, color = "k" , linestyle = "--" )
#matplotlib.pyplot.legend()
ax.legend(handles=legend_elements, loc='best', frameon = True, framealpha = 1.0 )
heightTosi = str(int(zt.sel(zt = height, method = 'nearest' ).values))
matplotlib.pyplot.title("zt =" + heightTosi + "(m)" )
# print(time.values)
ax = PlotTweak.setXTicksLabelsAsTime(ax, plottable["time"].values, startPoint=8)
#matplotlib.pyplot.ylim(0, 5)
ax.set_yscale('log')
return ax
# REVISE
def getSizeDistributionHeightTimeSpecified(ax,
simulation : Simulation,
muuttuja,
label = None,
color = "b",
height =745, timeH = 2.5):
ps = simulation.getPSDataset()
if ps is None:
return "FileNotFound"
ps = ps.sel(zt=height).sel(time=timeH*3600, method = "nearest")
try:
dataarray = ps[muuttuja]
except KeyError:
return
if label is None:
label = dataarray.longname
dataarray.plot.line(ax=ax, color = color , marker="o", label=label) #aero "#e6194B" cloud "#000075" ice "#42d4f4"
matplotlib.pyplot.legend()
ax.set_yscale("log")
ax.set_xscale("log")
return ax
### getAnimation2D NEEDS REVISION ####
def getAnimation2D(ax,
simulation : Simulation,
muuttuja = "S_Nc",
kuvakansio = "/home/aholaj/OneDrive/000_WORK/000_ARTIKKELIT/000-Manuscript-ICE/kuvat/anim",
useAverage=False, ytValue = 0, useLog = False):
nc = simulation.getNCDataset()
if nc is None:
return "FileNotFound"
try:
dataAnim = nc[muuttuja]#.sel(zt = slice(395, 850))
except KeyError:
return
dataAnim = dataAnim.assign_coords(time = (dataAnim.time / 3600))
print(" ")
print("animate", muuttuja)
if useLog:
dataAnim.values = numpy.ma.log10(dataAnim.values).filled(0)
if useAverage:
dataAnim = dataAnim.mean(dim="yt")
else:
dataAnim = dataAnim.sel(yt = ytValue, method="nearest")
dataAnim= dataAnim.sel(time = slice(2.5,49))
timeSpan = numpy.shape(dataAnim)[0]
subkuvakansio = FileSystem.createSubfolder(kuvakansio, muuttuja)
for i in range(timeSpan):
fig, ax = plot_alustus()
plottable = dataAnim.isel(time=i)
plottable.plot(x = "xt", y = "zt",ax = ax, add_colorbar = False, cmap = "Blues_r") #, levels = levels
ax.set_title("time = " +"{:5.1f} (h)".format(plottable.time.values) )
ax.set_ylabel("height (m)")
ax.set_xlabel("East-west displacement of cell centers (m)")
saveFig(subkuvakansio, muuttuja + "_{:04d}".format(i))
origDir = os.getcwd()
os.chdir(subkuvakansio)
os.system("convert -delay 50 -loop 0 *.png animation.gif")
os.chdir(origDir)
| StarcoderdataPython |
3584426 | <gh_stars>1-10
from update import *
from ui import *
from session import *
| StarcoderdataPython |
11216707 | import fresh_tomatoes
import media
toy_story = media.Movie("Toy Story",
"A story of a boy and his toys that come to life",
"http://upload.wikimedia.org/wikipedia/en/1/13/Toy_Story.jpg",
"https://www.youtube.com/watch?v=vwyZH85NQC4")
#print(toy_story.storyline)
avatar = media.Movie("Avatar","A marine on an alien planet",
"http://upload.wikimedia.org/wikipedia/en/b/b0/Avatar-Teaser-Poster.jpg",
"http://www.youtube.com/watch?v=5PSNL1qE6VY")
dawn = media.Movie("Dawn Of The Planet Of The Apes",
"A story about an ape",
"http://upload.wikimedia.org/wikipedia/en/7/77/Dawn_of_the_Planet_of_the_Apes.jpg",
"http://www.youtube.com/watch?v=eq1sTNGDXo0")
gonegirl = media.Movie("Gone Girl",
"A sad story",
"http://upload.wikimedia.org/wikipedia/en/0/05/Gone_Girl_Poster.jpg",
"http://www.youtube.com/watch?v=Ym3LB0lOJ0o")
avenger = media.Movie("Avenger",
"A story about superheroes",
"http://upload.wikimedia.org/wikipedia/en/3/37/Captain_America_The_First_Avenger_poster.jpg",
"http://www.youtube.com/watch?v=hIR8Ar-Z4hw")
dark_knight = media.Movie("Dark knight rises",
"A story about batman",
"http://upload.wikimedia.org/wikipedia/en/8/83/Dark_knight_rises_poster.jpg",
"http://www.youtube.com/watch?v=g8evyE9TuYk")
movies = [toy_story, avatar, dawn, gonegirl, avenger, dark_knight]
#fresh_tomatoes.open_movies_page(movies)
#print (media.Movie.VALID_RATINGS)
print (media.Movie.__doc__)
| StarcoderdataPython |
9733304 | <gh_stars>10-100
from core.Config import alphabet
class Vigenere_Square:
"""
Name: {cyan}The Vigenere Square{reset}
Description: {cyan}An encryption and decryption technique that use word or string provided as key and adding it to each number that represent letter in alphabet.{reset}
Possibility: {cyan}limitless{reset}
Author: {yellow}@<NAME>{reset}
[+] You only need to input characters in the as the key
{cyan}HINT{reset}:
Your keyword: dog
"""
def __init__(self,msg, key):
self.msg = msg
if key is None:
try:
self.keyword = input('Your keyword: ')
except ValueError as e:
raise ValueError()
def encrypt(self):
key_list = self.keyword * len(self.msg)
encrypt_num = [alphabet.get(letter) for letter in self.msg]
encrypt_key = [alphabet.get(key_num) for key_num in key_list]
new_num = []
result = []
for i in range(len(encrypt_num)):
new_num.append((encrypt_num[i] + encrypt_key[i] - 1) % 26)
for number in new_num:
if number == 0:
result.append('Z')
else:
for letter,index in alphabet.items():
if number == index:
result.append(letter.upper())
return ''.join(result)
def decrypt(self):
key_list = self.keyword * len(self.msg)
decrypt_num = [alphabet.get(letter.lower()) for letter in self.msg]
decrypt_key = [alphabet.get(key_num.lower()) for key_num in key_list]
new_num = []
result = []
for i in range(len(decrypt_num)):
new_num.append((decrypt_num[i] - decrypt_key[i] + 1) % 26)
for number in new_num:
if number == 0:
result.append('Z')
else:
for letter,index in alphabet.items():
if number == index:
result.append(letter)
return ''.join(result) | StarcoderdataPython |
6689332 | <reponame>pombredanne/quine-mccluskey
#!/usr/bin/env python
# qm.py -- A Quine McCluskey Python implementation
#
# Copyright (c) 2006-2013 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""An implementation of the Quine McCluskey algorithm.
This implementation of the Quine McCluskey algorithm has no inherent limits
(other than the calculation time) on the size of the inputs.
Also, in the limited tests of the author of this module, this implementation is
considerably faster than other public Python implementations for non-trivial
inputs.
Another unique feature of this implementation is the possibility to use the XOR
and XNOR operators, in addition to the normal AND operator, to minimise the
terms. This slows down the algorithm, but in some cases it can be a big win in
terms of complexity of the output.
"""
from __future__ import print_function
import math
class QuineMcCluskey:
"""The Quine McCluskey class.
The QuineMcCluskey class minimises boolean functions using the Quine
McCluskey algorithm.
If the class was instantiiated with the use_xor set to True, then the
resulting boolean function may contain XOR and XNOR operators.
"""
__version__ = "0.2"
def __init__(self, use_xor = False):
"""The class constructor.
Kwargs:
use_xor (bool): if True, try to use XOR and XNOR operations to give
a more compact return.
"""
self.use_xor = use_xor # Whether or not to use XOR and XNOR operations.
self.n_bits = 0 # number of bits (i.e. self.n_bits == len(ones[i]) for every i).
def __num2str(self, i):
"""
Convert an integer to its bit-representation in a string.
Args:
i (int): the number to convert.
Returns:
The binary string representation of the parameter i.
"""
x = ['1' if i & (1 << k) else '0' for k in range(self.n_bits - 1, -1, -1)]
return "".join(x)
def simplify(self, ones, dc = []):
"""Simplify a list of terms.
Args:
ones (list of int): list of integers that describe when the output
function is '1', e.g. [1, 2, 6, 8, 15].
Kwargs:
dc (list of int): list of numbers for which we don't care if they
have one or zero in the output.
Returns:
see: simplify_los.
Example:
ones = [2, 6, 10, 14]
dc = []
This will produce the ouput: ['--10']
This means x = b1 & ~b0, (bit1 AND NOT bit0)
Example:
ones = [1, 2, 5, 6, 9, 10, 13, 14]
dc = []
This will produce the ouput: ['--^^'].
In other words, x = b1 ^ b0, (bit1 XOR bit0).
"""
terms = ones + dc
if len(terms) == 0:
return None
# Calculate the number of bits to use
# Needed internally by __num2str()
self.n_bits = int(math.ceil(math.log(max(terms) + 1, 2)))
# Generate the sets of ones and dontcares
ones = set(self.__num2str(i) for i in ones)
dc = set(self.__num2str(i) for i in dc)
return self.simplify_los(ones, dc)
def simplify_los(self, ones, dc = []):
"""The simplification algorithm for a list of string-encoded inputs.
Args:
ones (list of str): list of strings that describe when the output
function is '1', e.g. ['0001', '0010', '0110', '1000', '1111'].
Kwargs:
dc: (list of str)set of strings that define the don't care
combinations.
Returns:
Returns a set of strings which represent the reduced minterms. The
length of the strings is equal to the number of bits in the input.
Character 0 of the output string stands for the most significant
bit, Character n - 1 (n is the number of bits) stands for the least
significant bit.
The following characters are allowed in the return string:
'-' don't care: this bit can be either zero or one.
'1' the bit must be one.
'0' the bit must be zero.
'^' all bits with the caret are XOR-ed together.
'~' all bits with the tilde are XNOR-ed together.
Example:
ones = ['0010', '0110', '1010', '1110']
dc = []
This will produce the ouput: ['--10'].
In other words, x = b1 & ~b0, (bit1 AND NOT bit0).
Example:
ones = ['0001', '0010', '0101', '0110', '1001', '1010' '1101', '1110']
dc = []
This will produce the ouput: ['--^^'].
In other words, x = b1 ^ b0, (bit1 XOR bit0).
"""
self.profile_cmp = 0 # number of comparisons (for profiling)
self.profile_xor = 0 # number of comparisons (for profiling)
self.profile_xnor = 0 # number of comparisons (for profiling)
terms = ones | dc
if len(terms) == 0:
return None
# Calculate the number of bits to use
self.n_bits = max(len(i) for i in terms)
if self.n_bits != min(len(i) for i in terms):
return None
# First step of Quine-McCluskey method.
prime_implicants = self.__get_prime_implicants(terms)
# Remove essential terms.
essential_implicants = self.__get_essential_implicants(prime_implicants, set(dc))
# Insert here the Quine McCluskey step 2: prime implicant chart.
# Insert here Petrick's Method.
return essential_implicants
def __reduce_simple_xor_terms(self, t1, t2):
"""Try to reduce two terms t1 and t2, by combining them as XOR terms.
Args:
t1 (str): a term.
t2 (str): a term.
Returns:
The reduced term or None if the terms cannot be reduced.
"""
difft10 = 0
difft20 = 0
ret = []
for (t1c, t2c) in zip(t1, t2):
if t1c == '^' or t2c == '^' or t1c == '~' or t2c == '~':
return None
elif t1c != t2c:
ret.append('^')
if t2c == '0':
difft10 += 1
else:
difft20 += 1
else:
ret.append(t1c)
if difft10 == 1 and difft20 == 1:
return "".join(ret)
return None
def __reduce_simple_xnor_terms(self, t1, t2):
"""Try to reduce two terms t1 and t2, by combining them as XNOR terms.
Args:
t1 (str): a term.
t2 (str): a term.
Returns:
The reduced term or None if the terms cannot be reduced.
"""
difft10 = 0
difft20 = 0
ret = []
for (t1c, t2c) in zip(t1, t2):
if t1c == '^' or t2c == '^' or t1c == '~' or t2c == '~':
return None
elif t1c != t2c:
ret.append('~')
if t1c == '0':
difft10 += 1
else:
difft20 += 1
else:
ret.append(t1c)
if (difft10 == 2 and difft20 == 0) or (difft10 == 0 and difft20 == 2):
return "".join(ret)
return None
def __get_prime_implicants(self, terms):
"""Simplify the set 'terms'.
Args:
terms (set of str): set of strings representing the minterms of
ones and dontcares.
Returns:
A list of prime implicants. These are the minterms that cannot be
reduced with step 1 of the Quine McCluskey method.
This is the very first step in the Quine McCluskey algorithm. This
generates all prime implicants, whether they are redundant or not.
"""
# Sort and remove duplicates.
n_groups = self.n_bits + 1
marked = set()
# Group terms into the list groups.
# groups is a list of length n_groups.
# Each element of groups is a set of terms with the same number
# of ones. In other words, each term contained in the set
# groups[i] contains exactly i ones.
groups = [set() for i in range(n_groups)]
for t in terms:
n_bits = t.count('1')
groups[n_bits].add(t)
if self.use_xor:
# Add 'simple' XOR and XNOR terms to the set of terms.
# Simple means the terms can be obtained by combining just two
# bits.
for gi, group in enumerate(groups):
for t1 in group:
for t2 in group:
t12 = self.__reduce_simple_xor_terms(t1, t2)
if t12 != None:
terms.add(t12)
if gi < n_groups - 2:
for t2 in groups[gi + 2]:
t12 = self.__reduce_simple_xnor_terms(t1, t2)
if t12 != None:
terms.add(t12)
done = False
while not done:
# Group terms into groups.
# groups is a list of length n_groups.
# Each element of groups is a set of terms with the same
# number of ones. In other words, each term contained in the
# set groups[i] contains exactly i ones.
groups = dict()
for t in terms:
n_ones = t.count('1')
n_xor = t.count('^')
n_xnor = t.count('~')
# The algorithm can not cope with mixed XORs and XNORs in
# one expression.
assert n_xor == 0 or n_xnor == 0
key = (n_ones, n_xor, n_xnor)
if key not in groups:
groups[key] = set()
groups[key].add(t)
terms = set() # The set of new created terms
used = set() # The set of used terms
# Find prime implicants
for key in groups:
key_next = (key[0]+1, key[1], key[2])
if key_next in groups:
group_next = groups[key_next]
for t1 in groups[key]:
# Optimisation:
# The Quine-McCluskey algorithm compares t1 with
# each element of the next group. (Normal approach)
# But in reality it is faster to construct all
# possible permutations of t1 by adding a '1' in
# opportune positions and check if this new term is
# contained in the set groups[key_next].
for i, c1 in enumerate(t1):
if c1 == '0':
self.profile_cmp += 1
t2 = t1[:i] + '1' + t1[i+1:]
if t2 in group_next:
t12 = t1[:i] + '-' + t1[i+1:]
used.add(t1)
used.add(t2)
terms.add(t12)
# Find XOR combinations
for key in [k for k in groups if k[1] > 0]:
key_complement = (key[0] + 1, key[2], key[1])
if key_complement in groups:
for t1 in groups[key]:
t1_complement = t1.replace('^', '~')
for i, c1 in enumerate(t1):
if c1 == '0':
self.profile_xor += 1
t2 = t1_complement[:i] + '1' + t1_complement[i+1:]
if t2 in groups[key_complement]:
t12 = t1[:i] + '^' + t1[i+1:]
used.add(t1)
terms.add(t12)
# Find XNOR combinations
for key in [k for k in groups if k[2] > 0]:
key_complement = (key[0] + 1, key[2], key[1])
if key_complement in groups:
for t1 in groups[key]:
t1_complement = t1.replace('~', '^')
for i, c1 in enumerate(t1):
if c1 == '0':
self.profile_xnor += 1
t2 = t1_complement[:i] + '1' + t1_complement[i+1:]
if t2 in groups[key_complement]:
t12 = t1[:i] + '~' + t1[i+1:]
used.add(t1)
terms.add(t12)
# Add the unused terms to the list of marked terms
for g in list(groups.values()):
marked |= g - used
if len(used) == 0:
done = True
# Prepare the list of prime implicants
pi = marked
for g in list(groups.values()):
pi |= g
return pi
def __get_essential_implicants(self, terms, dc):
"""Simplify the set 'terms'.
Args:
terms (set of str): set of strings representing the minterms of
ones and dontcares.
dc (set of str): set of strings representing the dontcares.
Returns:
A list of prime implicants. These are the minterms that cannot be
reduced with step 1 of the Quine McCluskey method.
This function is usually called after __get_prime_implicants and its
objective is to remove non-essential minterms.
In reality this function omits all terms that can be covered by at
least one other term in the list.
"""
# Create all permutations for each term in terms.
perms = {}
for t in terms:
perms[t] = set(p for p in self.permutations(t) if p not in dc)
# Now group the remaining terms and see if any term can be covered
# by a combination of terms.
ei_range = set()
ei = set()
groups = dict()
for t in terms:
n = self.__get_term_rank(t, len(perms[t]))
if n not in groups:
groups[n] = set()
groups[n].add(t)
for t in sorted(list(groups.keys()), reverse=True):
for g in groups[t]:
if not perms[g] <= ei_range:
ei.add(g)
ei_range |= perms[g]
if len(ei) == 0:
ei = set(['-' * self.n_bits])
return ei
def __get_term_rank(self, term, term_range):
"""Calculate the "rank" of a term.
Args:
term (str): one single term in string format.
term_range (int): the rank of the class of term.
Returns:
The "rank" of the term.
The rank of a term is a positive number or zero. If a term has all
bits fixed '0's then its "rank" is 0. The more 'dontcares' and xor or
xnor it contains, the higher its rank.
A dontcare weights more than a xor, a xor weights more than a xnor, a
xnor weights more than 1 and a 1 weights more than a 0.
This means, the higher rank of a term, the more desireable it is to
include this term in the final result.
"""
n = 0
for t in term:
if t == "-":
n += 8
elif t == "^":
n += 4
elif t == "~":
n += 2
elif t == "1":
n += 1
return 4*term_range + n
def permutations(self, value = ''):
"""Iterator to generate all possible values out of a string.
Args:
value (str): A string containing any of the above characters.
Returns:
The output strings contain only '0' and '1'.
Example:
from qm import QuineMcCluskey
qm = QuineMcCluskey()
for i in qm.permutations('1--^^'):
print(i)
The operation performed by this generator function can be seen as the
inverse of binary minimisation methonds such as Karnaugh maps, Quine
McCluskey or Espresso. It takes as input a minterm and generates all
possible maxterms from it. Inputs and outputs are strings.
Possible input characters:
'0': the bit at this position will always be zero.
'1': the bit at this position will always be one.
'-': don't care: this bit can be zero or one.
'^': all bits with the caret are XOR-ed together.
'~': all bits with the tilde are XNOR-ed together.
Algorithm description:
This lovely piece of spaghetti code generates all possibe
permutations of a given string describing logic operations.
This could be achieved by recursively running through all
possibilities, but a more linear approach has been preferred.
The basic idea of this algorithm is to consider all bit
positions from 0 upwards (direction = +1) until the last bit
position. When the last bit position has been reached, then the
generated string is yielded. At this point the algorithm works
its way backward (direction = -1) until it finds an operator
like '-', '^' or '~'. The bit at this position is then flipped
(generally from '0' to '1') and the direction flag again
inverted. This way the bit position pointer (i) runs forth and
back several times until all possible permutations have been
generated.
When the position pointer reaches position -1, all possible
combinations have been visited.
"""
n_bits = len(value)
n_xor = value.count('^') + value.count('~')
xor_value = 0
seen_xors = 0
res = ['0' for i in range(n_bits)]
i = 0
direction = +1
while i >= 0:
# binary constant
if value[i] == '0' or value[i] == '1':
res[i] = value[i]
# dontcare operator
elif value[i] == '-':
if direction == +1:
res[i] = '0'
elif res[i] == '0':
res[i] = '1'
direction = +1
# XOR operator
elif value[i] == '^':
seen_xors = seen_xors + direction
if direction == +1:
if seen_xors == n_xor and xor_value == 0:
res[i] = '1'
else:
res[i] = '0'
else:
if res[i] == '0' and seen_xors < n_xor - 1:
res[i] = '1'
direction = +1
seen_xors = seen_xors + 1
if res[i] == '1':
xor_value = xor_value ^ 1
# XNOR operator
elif value[i] == '~':
seen_xors = seen_xors + direction
if direction == +1:
if seen_xors == n_xor and xor_value == 1:
res[i] = '1'
else:
res[i] = '0'
else:
if res[i] == '0' and seen_xors < n_xor - 1:
res[i] = '1'
direction = +1
seen_xors = seen_xors + 1
if res[i] == '1':
xor_value = xor_value ^ 1
# unknown input
else:
res[i] = '#'
i = i + direction
if i == n_bits:
direction = -1
i = n_bits - 1
yield "".join(res)
| StarcoderdataPython |
336581 | from abc import ABC, abstractproperty
class Base(ABC):
@abstract
@property
def some_method(self):
pass
class <weak_warning descr="Class Sub must implement all abstract methods">S<caret>ub</weak_warning>(Base):
pass | StarcoderdataPython |
8056891 | # -*- coding: utf-8 -*-
import sys
import base64
import requests
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
pass
class Rest():
def __init__(self, BASE_URL, USERNAME, SECRET, DEBUG, VERBOSE, DRY_RUN, LOGGER):
self.base_url = BASE_URL
self.username = USERNAME
self.password = <PASSWORD>
self.debug = DEBUG
self.verbose = VERBOSE
self.dry_run = DRY_RUN
self.logger = LOGGER
def uploader(self, data, url):
payload = data
headers = {
'Authorization': 'Basic ' + base64.b64encode(self.username + ':' + self.password),
'Content-Type': 'application/x-www-form-urlencoded'
}
try:
r = requests.post(url, data=payload, headers=headers, verify=False)
except Exception as e:
self.logger.exception(e)
print e
sys.exit()
else:
msg = unicode(payload)
if self.verbose:
print msg
msg = 'Status code: %s' % str(r.status_code)
if self.verbose:
print msg
msg = str(r.text)
if self.verbose:
print msg
return r.json()
def post_device(self, data):
if self.dry_run == False:
url = self.base_url+'/api/device/'
msg = '\r\nPosting data to %s ' % url
if self.verbose:
print msg
result = self.uploader(data, url)
return result
def post_ip(self, data):
if self.dry_run == False:
url = self.base_url+'/api/ip/'
msg = '\r\nPosting IP data to %s ' % url
if self.verbose:
print msg
result = self.uploader(data, url)
return result
def post_mac(self, data):
if self.dry_run == False:
url = self.base_url+'/api/1.0/macs/'
msg = '\r\nPosting MAC data to %s ' % url
if self.verbose:
print msg
result = self.uploader(data, url)
return result
def post_parts(self, data):
if self.dry_run == False:
url = self.base_url+'/api/1.0/parts/'
msg = '\r\nPosting HDD parts to %s ' % url
if self.verbose:
print msg
result = self.uploader(data, url)
return result
| StarcoderdataPython |
3361795 | <filename>styling/views.py<gh_stars>1-10
import json
from api.view_utils import JsonResponse, pass_errors_to_response
from styling.utils import rendering_task, get_available_options
@pass_errors_to_response
def options(request):
params = json.loads(request.body)
kind = params.get("kind")
available_opts = get_available_options(kind)
opt_list = [opt.serialize() for opt in available_opts]
return JsonResponse(opt_list)
@pass_errors_to_response
def restyle(request):
params = json.loads(request.body)
kind = params.get("kind")
task_class = rendering_task(kind)
task = task_class(request).deploy()
return JsonResponse()
| StarcoderdataPython |
292888 | <filename>vestaboard/vbUrls.py
subscription = 'https://platform.vestaboard.com/subscriptions'
post = "https://platform.vestaboard.com/subscriptions/{}/message" | StarcoderdataPython |
8170284 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#Author: <NAME> and <NAME>
#Email: <EMAIL> and <EMAIL>
#For licensing see the LICENSE file in the top level directory.
"""
Allows for the computation of the PQ-Gram edit distance of two trees. To
calculate the distance, a Profile object must first be created for each tree,
then the edit_distance function can be called.
For more information on the PQ-Gram algorithm, please see the README.
"""
import tree, copy
class Profile(object):
"""Represents a PQ-Gram Profile, which is a list of PQ-Grams. Each PQ-Gram
is represented by a ShiftRegister. This class relies on both the
ShiftRegister and tree.Node classes."""
def __init__(self, root, p=2, q=3):
"""Builds the PQ-Gram Profile of the given tree, using the p and q
parameters specified. The p and q parameters do not need to be
specified, however, different values will have an effect on the
distribution of the calculated edit distance. In general, smaller values
of p and q are better, though a value of (1, 1) is not recommended, and
anything lower is invalid. """
super(Profile, self).__init__()
ancestors = ShiftRegister(p)
self.list = list()
self.profile(root, p, q, ancestors)
self.sort()
def profile(self, root, p, q, ancestors):
"""Recursively builds the PQ-Gram profile of the given subtree. This
method should not be called directly and is called from __init__."""
ancestors.shift(root.label)
siblings = ShiftRegister(q)
if len(root.children) == 0:
self.append(ancestors.concatenate(siblings))
else:
for child in root.children:
siblings.shift(child.label)
self.append(ancestors.concatenate(siblings))
self.profile(child, p, q, copy.deepcopy(ancestors))
for i in range(q-1):
siblings.shift("*")
self.append(ancestors.concatenate(siblings))
def edit_distance(self, other):
"""Computes the edit distance between two PQ-Gram Profiles. This value
should always be between 0.0 and 1.0. This calculation is reliant on the
intersection method."""
union = len(self) + len(other)
return 1.0 - 2.0*(self.intersection(other)/union)
def intersection(self, other):
"""Computes the set intersection of two PQ-Gram Profiles and returns the
number of elements in the intersection."""
intersect = 0.0
i = j = 0
while i < len(self) and j < len(other):
intersect += self.gram_edit_distance(self[i], other[j])
if self[i] == other[j]:
i += 1
j += 1
elif self[i] < other[j]:
i += 1
else:
j += 1
return intersect
def gram_edit_distance(self, gram1, gram2):
"""Computes the edit distance between two different PQ-Grams. If the two
PQ-Grams are the same then the distance is 1.0, otherwise the distance
is 0.0. Changing this will break the metrics of the algorithm."""
distance = 0.0
if gram1 == gram2:
distance = 1.0
return distance
def sort(self):
"""Sorts the PQ-Grams by the concatenation of their labels. This step is
automatically performed when a PQ-Gram Profile is created to ensure the
intersection algorithm functions properly and efficiently."""
self.list.sort(key=lambda x: ''.join)
def append(self, value):
self.list.append(value)
def __len__(self):
return len(self.list)
def __repr__(self):
return str(self.list)
def __str__(self):
return str(self.list)
def __getitem__(self, key):
return self.list[key]
def __iter__(self):
for x in self.list: yield x
class ShiftRegister(object):
"""Represents a register which acts as a fixed size queue. There are only
two valid operations on a ShiftRegister: shift and concatenate. Shifting
results in a new value being pushed onto the end of the list and the value
at the beginning list being removed. Note that you cannot recover this
value, nor do you need to for generating PQ-Gram Profiles."""
def __init__(self, size):
"""Creates an internal list of the specified size and fills it with the
default value of "*". Once a ShiftRegister is created you cannot change
the size without concatenating another ShiftRegister."""
self.register = list()
for i in range(size):
self.register.append("*")
def concatenate(self, reg):
"""Concatenates two ShiftRegisters and returns the resulting
ShiftRegister."""
temp = list(self.register)
temp.extend(reg.register)
return temp
def shift(self, el):
"""Shift is the primary operation on a ShiftRegister. The new item given
is pushed onto the end of the ShiftRegister, the first value is removed,
and all items in between shift to accomodate the new value."""
self.register.pop(0)
self.register.append(el)
"""The following methods are provided for visualization of the PQ-Gram Profile
structure. They are NOT intended for other use, and play no role in using the
PQ-Gram algorithm."""
def build_extended_tree(root, p=1, q=1):
"""This method will take a normal tree structure and the given values for p
and q, returning a new tree which represents the so-called PQ-Extended-Tree.
To do this, the following algorithm is used:
1. Add p-1 null ancestors to the root
2. Traverse tree, add q-1 null children before the first and
after the last child of every non-leaf node
3. For each leaf node add q null children """
original_root = root # store for later
# step 1
for i in range(p-1):
node = tree.node(label="*")
node.addkid(root)
root = node
# Steps 2 and 3
list_of_children = original_root.children
if(len(list_of_children) == 0):
q_append_leaf(original_root, q)
else:
q_append_non_leaf(original_root, q)
while(len(list_of_children) > 0):
temp_list = list()
for child in list_of_children:
if(child.label != "*"):
if(len(child.children) == 0):
q_append_leaf(child, q)
else:
q_append_non_leaf(child, q)
temp_list.extend(child.children)
list_of_children = temp_list
return root
##### Extended Tree Functions #####
def q_append_non_leaf(node, q):
"""This method will append null node children to the given node. (Step 2)
When adding null nodes to a non-leaf node, the null nodes should exist on
both side of the real children. This is why the first of each pair of
children added sets the flag 'before=True', ensuring that on the left and
right (or start and end) of the list of children a node is added."""
for i in range(q-1):
node.addkid(tree.Node("*"), before=True)
node.addkid(tree.Node("*"))
def q_append_leaf(node, q):
"""This method will append q null node children to the given node.
(Step 3)"""
for i in range(q): node.addkid(tree.Node("*"))
| StarcoderdataPython |
3202057 | ##########################################################
# detect_lanes.py
#
# SPDX-FileCopyrightText: Copyright 2021 <NAME>
#
# SPDX-License-Identifier: MIT
#
# Lane detection on video files
#
# ########################################################
#
# Import libraries
import cv2
#
import numpy as np
import os
#import lane_detection
import lane_detection.utils as utils
import lane_detection.frame_transformer as ftf
import lane_detection.lane_detector as det
import lane_detection.config as cfg
from lane_detection.get_coordinates import set_polygon
DEBUG = False
#
# Print Video Information, return video resolution (width, height)
def get_video_information(cap, filename=None):
if filename is None:
filename = "-"
print("""
File: {}
FPS: {}
# Frames: {}
Width: {}
Height: {}
""".format(
filename,
int(cap.get(cv2.CAP_PROP_FPS)),
int(cap.get(cv2.CAP_PROP_FRAME_COUNT)),
width,
height
))
#
# Print Image Information, return image resolution (width, height, channel)
def get_image_information(filename=None):
if filename is None:
filename = "-"
print("""
File: {}
Width: {}
Height: {}
""".format(
filename,
width,
height
))
#
# Video pipeline
def process_video(vid_files, mtx, dist, Ftf):
global START_TICK_FRAME
# For manual application exit
end_application = False
# Loop through found video files
for vid in vid_files:
# Open next video file
cap = cv2.VideoCapture(vid)
if not cap.isOpened():
print("Error opening file {}".format(vid))
filename = utils.get_filename(vid)
# Save output
# capture = cv2.VideoWriter(str(filename) + '.avi',
# cv2.VideoWriter_fourcc(*'MJPG'),
# 20, (Detector.width, Detector.height))
# Reset detector for next clip
Detector.reset_detector()
# Check video orientation
flip = False
if cap.get(cv2.CAP_PROP_FRAME_WIDTH) != width:
flip = True
# Display file information
get_video_information(cap, filename)
# Enter video processing
Detector.vertices = None
processed_frames = 0
while cap.isOpened():
START_TICK_FRAME = cv2.getTickCount()
# Read next frame
ret, bgr_frame = cap.read()
# Flip if necessary
if flip:
bgr_frame = cv2.rotate(bgr_frame, cv2.ROTATE_90_COUNTERCLOCKWISE)#cv2.ROTATE_90_COUNTERCLOCKWISE
# Check for error while retrieving frame
if ret:
# Number of frames
processed_frames += 1
# Start lane detection
result = process_lane_detection(bgr_frame, mtx, dist, Ftf, filename)
# Write clip
#capture.write(result)
# 'ESC' to skip to next file and 'q' to end application
pressed_key = cv2.waitKey(1) & 0xFF
if pressed_key == 27:
cv2.destroyAllWindows()
break
elif pressed_key == ord('q'):
end_application = True
break
else:
# Error while retrieving frame or video ended
break
# Release video file
cap.release()
# Check for manual end command
if end_application:
break
#
# Image pipeline
def process_image(image_files, mtx, dist, Ftf):
global START_TICK_FRAME
for image_path in image_files:
START_TICK_FRAME = cv2.getTickCount()
filename = utils.get_filename(image_path)
image = cv2.imread(image_path)
# Reset detector for next image
Detector.reset_detector()
process_lane_detection(image, mtx, dist, Ftf, filename)
# 'ESC' to skip to next file and 'q' to end application
pressed_key = cv2.waitKey(0) & 0xFF
if pressed_key == 27:
cv2.destroyAllWindows()
elif pressed_key == ord('q'):
break
cv2.destroyAllWindows()
#
# Lane detection
def process_lane_detection(bgr_frame, mtx, dist, Ftf, filename):
#
# Lane Detection
#
global START_TICK_FRAME
# Distort frame
if is_calibration:
undist_frame = Ftf.undistort_frame(bgr_frame, mtx, dist)
else:
undist_frame = bgr_frame
# Select ROI
if is_man_roi or Detector.vertices is None:
# Check for saved ROI
if not is_man_roi:
try:
Detector.vertices = utils.save_load_np_var(filename, save = False)
except:
Detector.vertices = None
if is_man_roi and Detector.vertices is None:
vertices = set_polygon(undist_frame)
Detector.set_vertices(vertices)
elif is_video and Detector.vertices is None:
vertices = set_polygon(undist_frame)
Detector.set_vertices(vertices)
utils.save_load_np_var(filename, data = vertices)
elif not is_video and Detector.vertices is None:
vertices = set_polygon(undist_frame)
Detector.set_vertices(vertices)
utils.save_load_np_var(filename, data = vertices)
else:
vertices = Detector.vertices
else:
vertices = Detector.vertices
vert_poly = np.array([[vertices[0], vertices[1], vertices[2], vertices[3]]], dtype=np.int32)
vert_trans = np.array([[vertices[1], vertices[0], vertices[2], vertices[3]]], dtype=np.float32)
# Generate ROI on frame and tranform to Bird-Eye view
roi_frame = Ftf.region_of_interest(undist_frame, vert_poly)
trans_frame, M, minv = Ftf.transform_frame(roi_frame, vert_trans)
# Adjust brightness
bright_fac = Ftf.brightness_estimation(trans_frame)
bright_frame = Ftf.adjust_frame(trans_frame, bright_fac)
# Convert color space
hls_frame = Ftf.bgr_to_x(bright_frame, 'hls')
gray_frame= Ftf.bgr_to_x(trans_frame, 'gray')
# Edge detection
blur_frame = Ftf.apply_gaussian_blur(gray_frame)
#canny_frame = Ftf.apply_canny_edge_det(blur_frame)
sobel_frame = Ftf.apply_sobel_edge_det(blur_frame)
filter_frame = sobel_frame
# White color mask
lower = np.uint8(cfg.white_lower)
upper = np.uint8(cfg.white_upper)
white_mask = Ftf.create_mask(bright_frame, (lower, upper))
# Yellow color mask
lower = np.uint8(cfg.yellow_lower)
upper = np.uint8(cfg.yellow_upper)
yellow_mask = Ftf.create_mask(hls_frame, (lower, upper))
# Combine the mask
comb_mask = Ftf.combine_frames(white_mask, yellow_mask)
intersect = Ftf.intersect_mask(comb_mask, filter_frame)
# Draw lanes
lanes, avg_rad = Detector.find_lanes(intersect)
if avg_rad is not None:
Detector.insert_direction(bgr_frame, avg_rad)
# Return from Bird-eye view to normal view
unwarp = Ftf.untransform_frame(lanes, minv)
# Overlay drawings on bgr frame
result = cv2.addWeighted(bgr_frame, 1, unwarp, 1, 0)
# FPS
fps = 1 / (utils.get_passed_time(
START_TICK_FRAME,
cv2.getTickCount()))
Detector.insert_fps(result, fps)
# Display results
if DEBUG:
cv2.imshow("Combined", comb_mask)
cv2.imshow("White", white_mask)
cv2.imshow("Edge", filter_frame)
cv2.imshow("Bright", bright_frame)
cv2.imshow("original", bgr_frame)
cv2.imshow("intersect", intersect)
cv2.imshow("lanes", unwarp)
cv2.imshow("result", result)
cv2.setWindowTitle("result", filename)
else:
cv2.imshow("result", result)
cv2.setWindowTitle("result", filename)
return result
#
# Main application
def main():
Ftf = ftf.FrameTransformer()
# Print General Version information
utils.print_version_information()
# Search for available video files
if not is_video:
folder_name = image_folder_name
file_format = image_file_format
else:
folder_name = video_folder_name
file_format = video_file_format
files = utils.get_list_of_input_files(folder_name, file_format)
# Load calibration data
if is_calibration:
import pickle
objpoints, imgpoints = pickle.load(open(calibration_data_path, "rb"))
mtx, dist = Ftf.calibrate_camera(width, height, objpoints, imgpoints)
if is_video:
process_video(files, mtx, dist, Ftf)
else:
process_image(files, mtx, dist, Ftf)
# Close Window
cv2.destroyAllWindows()
#
# Run as script
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Start lane detection. Press <q> to quit and <ESC> for next frame.')
parser.add_argument('--image', type=bool, default=False,
help='Detect lanes on video files or images <False>')
parser.add_argument('--nocal', type=bool, default=False,
help='Don\' calibrate input files <False>')
parser.add_argument('--format', type=str, default=None,
help='File format <"mov">/<"jpg">')
parser.add_argument('--size', nargs=2, type=int, default=[1280, 720],
help='Image width and height <1280 720>')
parser.add_argument('--roi', type=bool, default=False,
help='Manual ROI selection <False>')
args = parser.parse_args()
is_video = not args.image
is_calibration = not args.nocal
is_man_roi = args.roi
width = args.size[0]
height = args.size[1]
START_TICK_FRAME = 0
video_file_format = 'mov'
image_file_format = 'jpg'
if is_video:
video_folder_name = 'input_video'
if args.format is not None:
video_file_format = args.format
else:
image_folder_name = 'input_image'
if args.format is not None:
image_file_format = args.format
calibration_data_path = os.path.join('lane_detection', 'calibration_data', "calibration.p")
# Object for finding and drawing lanes
Detector = det.LaneDetector(is_video=is_video, queue_len=cfg.queue_len)
# Drawing
Detector.draw_area = cfg.draw_area
Detector.l_lane_color = cfg.l_lane_color
Detector.r_lane_color = cfg.r_lane_color
Detector.lane_thickness = cfg.lane_thickness
Detector.road_color = cfg.road_color
# Frame dimensions
Detector.width = cfg.width
Detector.height = cfg.height
# Sliding window
Detector.n_windows = cfg.n_windows
Detector.margin = cfg.margin
Detector.nb_margin = cfg.nb_margin
Detector.radii_threshold = cfg.radii_threshold
# Conversion pixel to meter
Detector.px_to_m_y = cfg.px_to_m_y
Detector.px_to_m_x = cfg.px_to_m_x
# Lanes and poly
Detector.min_lane_dis = cfg.min_lane_dis
Detector.poly_thr_a = cfg.poly_thr_a
Detector.poly_thr_b = cfg.poly_thr_b
Detector.poly_thr_c = cfg.poly_thr_c
# Start process
main()
| StarcoderdataPython |
1784699 | <reponame>shirou/dipus
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib
import os
from sphinx.builders import Builder
from sphinx.util.osutil import ensuredir, os_path
from sphinx.util.nodes import inline_all_toctrees
from sphinx.util.console import bold, darkgreen, brown
from sphinx.errors import SphinxError
from docutils import nodes, writers
from docutils.nodes import Text
import simplejson
import search_html_t # template of search_dipus.html
import search_js_t # template of search_dipus.js
DEFAULT_HOST = "localhost"
DEFAULT_PORT = "9876"
class DipusWriter(writers.Writer):
def __init__(self, builder):
self.builder = builder
writers.Writer.__init__(self)
def getTitle(self, doctree):
''' return first text node as Title'''
for node in doctree.traverse(Text):
t = node.astext()
if t:
return t
def write(self, docname, doctree, conf):
title = self.getTitle(doctree)
if title is None:
title = docname
params = urllib.urlencode({
'path': docname.encode('utf-8'),
'title': title.encode('utf-8'),
'message': doctree.astext().encode('utf-8'),
'_index': conf['_index'],
'doc_url': conf['doc_url'],
'password': conf['password']
})
url = conf['server_url'] + '/' + conf['_index']
ret = urllib.urlopen(url, params).read()
result = simplejson.loads(ret)
if 'ok' not in result or result['ok'] is not True:
print "Error"
print result
class DipusBuilder(Builder):
name = 'dipus'
format = 'search'
out_suffix = ''
def output_templates(self, server_url, _index):
dipus_url = "/".join([server_url, _index, "_search"])
# TODO: fix if multiple html_static_path
# TODO: should check files exist?
path = self.config.html_static_path[0]
if not os.path.exists(path):
os.mkdir(path)
js_path = os.path.join(path, "search_dipus.js")
js = search_js_t.template.format(dipus_url=dipus_url)
with open(js_path, "w") as fp:
fp.write(js)
path = self.config.templates_path[0]
if not os.path.exists(path):
os.mkdir(path)
html_path = os.path.join(path, "search_dipus.html")
html = search_html_t.template.format()
with open(html_path, "w") as fp:
fp.write(html)
def get_outdated_docs(self):
return 'pass'
def get_target_uri(self, docname, typ=None):
return ''
def check_deprecated_params(self):
if self.config.dipus_host_url is not None:
raise SphinxError("dipus_host_url is deprecated. \
use dipus_server_url")
def prepare_writing(self, docnames):
self.check_deprecated_params()
self.writer = DipusWriter(self)
if self.config.dipus_server_url is None:
self.config.dipus_server_url = "http://{0}:{1}".format(
DEFAULT_HOST, DEFAULT_PORT)
if self.config.dipus_doc_url is None:
html_dir = os.path.join(self.outdir, 'html')
self.config.dipus_doc_url = "file:///{0}".format(html_dir)
if self.config.dipus_index is None:
# if dipus_index is not set, use project name
p_name = self.config.project.encode('utf-8')
self.config.dipus_index = urllib.quote(p_name)
self.config.dipus_server_url = self.config.dipus_server_url.rstrip("/")
self.config.dipus_index = self.config.dipus_index.rstrip("/")
self.output_templates(self.config.dipus_server_url,
self.config.dipus_index)
def write_doc(self, docname, doctree):
conf = {
'server_url': self.config.dipus_server_url,
'doc_url': self.config.dipus_doc_url,
'_index': self.config.dipus_index,
'password': self.config.dipus_password
}
self.writer.write(docname, doctree, conf)
def setup(app):
app.add_config_value('dipus_server_url', None, '')
app.add_config_value('dipus_doc_url', None, '')
app.add_config_value('dipus_index', None, '')
app.add_config_value('dipus_password', None, '')
# deperecated
app.add_config_value('dipus_host_url', None, '')
app.add_builder(DipusBuilder)
| StarcoderdataPython |
296545 | import os
os.environ["OMP_NUM_THREADS"] = "1"
# os.environ['KMP_AFFINITY']='compact,1,0'
import multiprocessing
import numpy as np
import ConfigSpace as CS
from hpbandster.core.worker import Worker
from src.learna.agent import NetworkConfig, get_network, AgentConfig
from src.learna.environment import RnaDesignEnvironment, RnaDesignEnvironmentConfig
from src.learna.design_rna import design_rna
from src.data.parse_dot_brackets import parse_dot_brackets
class LearnaWorker(Worker):
def __init__(self, data_dir, num_cores, train_sequences, **kwargs):
super().__init__(**kwargs)
self.num_cores = num_cores
self.train_sequences = parse_dot_brackets(
dataset="rfam_learn/validation",
data_dir=data_dir,
target_structure_ids=train_sequences,
)
def compute(self, config, budget, **kwargs):
"""
Parameters
----------
budget: float
cutoff for the agent on a single sequence
"""
config = self._fill_config(config)
network_config = NetworkConfig(
conv_sizes=[config["conv_size1"], config["conv_size2"]],
conv_channels=[config["conv_channels1"], config["conv_channels2"]],
num_fc_layers=config["num_fc_layers"],
fc_units=config["fc_units"],
num_lstm_layers=config["num_lstm_layers"],
lstm_units=config["lstm_units"],
embedding_size=config["embedding_size"],
)
agent_config = AgentConfig(
learning_rate=config["learning_rate"],
batch_size=config["batch_size"],
entropy_regularization=config["entropy_regularization"],
)
env_config = RnaDesignEnvironmentConfig(
reward_exponent=config["reward_exponent"], state_radius=config["state_radius"]
)
validation_info = self._evaluate(
budget, config["restart_timeout"], network_config, agent_config, env_config
)
return {
"loss": validation_info["sum_of_min_distances"],
"info": {"validation_info": validation_info},
}
def _evaluate(
self,
evaluation_timeout,
restart_timeout,
network_config,
agent_config,
env_config,
):
evaluation_arguments = [
[
[train_sequence],
evaluation_timeout, # timeout
None, # restore_path
False, # stop_learning
restart_timeout, # restart_timeout
network_config,
agent_config,
env_config,
]
for train_sequence in self.train_sequences
]
with multiprocessing.Pool(self.num_cores) as pool:
evaluation_results = pool.starmap(design_rna, evaluation_arguments)
evaluation_sequence_infos = {}
evaluation_sum_of_min_distances = 0
evaluation_sum_of_first_distances = 0
evaluation_num_solved = 0
for r in evaluation_results:
sequence_id = r[0].target_id
r.sort(key=lambda e: e.time)
times = np.array(list(map(lambda e: e.time, r)))
dists = np.array(list(map(lambda e: e.normalized_hamming_distance, r)))
evaluation_sum_of_min_distances += dists.min()
evaluation_sum_of_first_distances += dists[0]
evaluation_num_solved += dists.min() == 0.0
evaluation_sequence_infos[sequence_id] = {
"num_episodes": len(r),
"mean_time_per_episode": float((times[1:] - times[:-1]).mean()),
"min_distance": float(dists.min()),
"last_distance": float(dists[-1]),
}
evaluation_info = {
"num_solved": int(evaluation_num_solved),
"sum_of_min_distances": float(evaluation_sum_of_min_distances),
"sum_of_first_distances": float(evaluation_sum_of_first_distances),
"squence_infos": evaluation_sequence_infos,
}
return evaluation_info
@staticmethod
def get_configspace():
config_space = CS.ConfigurationSpace()
# parameters for PPO here
config_space.add_hyperparameter(
CS.UniformFloatHyperparameter(
"learning_rate", lower=1e-5, upper=1e-3, log=True, default_value=5e-4
)
)
config_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(
"batch_size", lower=32, upper=128, log=True, default_value=32
)
)
config_space.add_hyperparameter(
CS.UniformFloatHyperparameter(
"entropy_regularization",
lower=1e-5,
upper=1e-2,
log=True,
default_value=1.5e-3,
)
)
config_space.add_hyperparameter(
CS.UniformFloatHyperparameter(
"reward_exponent", lower=1, upper=10, default_value=1
)
)
config_space.add_hyperparameter(
CS.UniformFloatHyperparameter(
"state_radius_relative", lower=0, upper=1, default_value=0
)
)
# parameters for the architecture
config_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(
"conv_radius1", lower=0, upper=8, default_value=1
)
)
config_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(
"conv_channels1", lower=1, upper=32, log=True, default_value=32
)
)
config_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(
"conv_radius2", lower=0, upper=4, default_value=0
)
)
config_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(
"conv_channels2", lower=1, upper=32, log=True, default_value=1
)
)
config_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(
"num_fc_layers", lower=1, upper=2, default_value=2
)
)
config_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(
"fc_units", lower=8, upper=64, log=True, default_value=50
)
)
config_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(
"num_lstm_layers", lower=0, upper=2, default_value=0
)
)
config_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(
"lstm_units", lower=1, upper=64, log=True, default_value=1
)
)
config_space.add_hyperparameter(
CS.UniformIntegerHyperparameter(
"embedding_size", lower=0, upper=4, default_value=1
)
)
return config_space
@staticmethod
def _fill_config(config):
config["conv_size1"] = 1 + 2 * config["conv_radius1"]
if config["conv_radius1"] == 0:
config["conv_size1"] = 0
del config["conv_radius1"]
config["conv_size2"] = 1 + 2 * config["conv_radius2"]
if config["conv_radius2"] == 0:
config["conv_size2"] = 0
del config["conv_radius2"]
if config["conv_size1"] != 0:
min_state_radius = config["conv_size1"] + config["conv_size1"] - 1
max_state_radius = 32
config["state_radius"] = int(
min_state_radius
+ (max_state_radius - min_state_radius) * config["state_radius_relative"]
)
else:
min_state_radius = config["conv_size2"] + config["conv_size2"] - 1
max_state_radius = 32
config["state_radius"] = int(
min_state_radius
+ (max_state_radius - min_state_radius) * config["state_radius_relative"]
)
del config["state_radius_relative"]
config["restart_timeout"] = None
return config
| StarcoderdataPython |
6412117 | def century(year):
return year // 100 if year % 100 == 0 else year // 100 + 1
| StarcoderdataPython |
12854575 | # coding: utf-8
from flask.ext import wtf
import flask
import wtforms
import auth
import config
import model
import util
from main import app
###############################################################################
# Admin Stuff
###############################################################################
@app.route('/admin/')
@auth.admin_required
def admin():
return flask.render_template(
'admin/dashboard/dashboard.html',
title='Admin',
html_class='admin',
)
| StarcoderdataPython |
4883508 | <filename>src/TB3Util/tb3step.py<gh_stars>0
import midi
#Represents a single step in the TB-3 sequencer
class TB3Step:
KEY_NOTE = "NOTE"
KEY_ACCENT = "ACCENT"
KEY_CLEAR = "CLEAR"
KEY_SLIDE = "SLIDE"
def __init__(self,step_dict=None):
if(step_dict == None):
self._init()
else:
self._init(step_dict[TB3Step.KEY_NOTE],step_dict[TB3Step.KEY_ACCENT],step_dict[TB3Step.KEY_CLEAR],step_dict[TB3Step.KEY_SLIDE])
def __repr__(self):
return "TB3Step(note=%s,accent=%s,clear=%s,slide=%s)" % (self.note,self.accent,self.clear,self.slide)
def _init(self,note=midi.C_3,accent=False,clear=True,slide=False):
self.note = note
self.accent = accent
self.clear = clear
self.slide = slide
def get_note(self):
return self.note
def get_accent(self):
return self.accent
def get_clear(self):
return self.clear
def get_slide(self):
return self.slide | StarcoderdataPython |
22662 | # Copyright 2017 Rice University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from program_helper.ast.ops import DAPIInvoke
from synthesis.ops.candidate_ast import SYMTAB_MOD, TYPE_NODE, API_NODE, VAR_NODE, OP_NODE, METHOD_NODE, CLSTYPE_NODE, \
VAR_DECL_NODE
class AstReverseMapper:
def __init__(self, vocab):
self.vocab = vocab
self.nodes, self.edges, self.targets = [], [], []
self.var_decl_ids = []
self.node_type_numbers = []
self.type_helper_val, self.expr_type_val, self.ret_type_val = [], [], []
self.num_data = 0
return
def add_data(self, nodes, edges, targets,
var_decl_ids,
node_type_number,
type_helper_val, expr_type_val, ret_type_val):
self.nodes.extend(nodes)
self.edges.extend(edges)
self.targets.extend(targets)
self.var_decl_ids.extend(var_decl_ids)
self.node_type_numbers.extend(node_type_number)
self.type_helper_val.extend(type_helper_val)
self.expr_type_val.extend(expr_type_val)
self.ret_type_val.extend(ret_type_val)
self.num_data += len(nodes)
def get_element(self, id):
return self.nodes[id], self.edges[id], self.targets[id], \
self.var_decl_ids[id], \
self.node_type_numbers[id], \
self.type_helper_val[id], self.expr_type_val[id], self.ret_type_val[id]
def decode_ast_paths(self, ast_element, partial=True):
nodes, edges, targets, \
var_decl_ids, \
node_type_numbers, \
type_helper_vals, expr_type_vals, ret_type_vals = ast_element
for node in nodes:
print(self.vocab.chars_concept[node], end=',')
print()
#
for edge in edges:
print(edge, end=',')
print()
for _, _, target, \
var_decl_id, \
node_type_numbers, \
type_helper_val, expr_type_val, ret_type_val in zip(*ast_element):
if node_type_numbers == SYMTAB_MOD:
print('--symtab--', end=',')
elif node_type_numbers == VAR_NODE:
print(self.vocab.chars_var[target], end=',')
elif node_type_numbers == VAR_DECL_NODE:
print(self.vocab.chars_var[target], end=',')
elif node_type_numbers == TYPE_NODE:
print(self.vocab.chars_type[target], end=',')
elif node_type_numbers == CLSTYPE_NODE:
print(self.vocab.chars_type[target], end=',')
elif node_type_numbers == API_NODE:
api = self.vocab.chars_api[target]
api = api.split(DAPIInvoke.delimiter())[0]
print(api, end=',')
elif node_type_numbers == OP_NODE:
op = self.vocab.chars_op[target]
print(op, end=',')
elif node_type_numbers == METHOD_NODE:
op = self.vocab.chars_method[target]
print(op, end=',')
else:
print(self.vocab.chars_concept[target], end=',')
print()
if not partial:
for var_decl_id in var_decl_ids:
print(var_decl_id, end=',')
print()
for type_helper_val in type_helper_vals:
print(self.vocab.chars_type[type_helper_val], end=',')
print()
for expr_type_val in expr_type_vals:
print(self.vocab.chars_type[expr_type_val], end=',')
print()
for ret_type_val in ret_type_vals:
print(self.vocab.chars_type[ret_type_val], end=',')
print()
print()
def reset(self):
self.nodes, self.edges, self.targets = [], [], []
self.var_decl_ids = []
self.node_type_numbers = []
self.type_helper_val, self.expr_type_val, self.ret_type_val = [], [], []
self.num_data = 0 | StarcoderdataPython |
3546958 | # This file is part of App3 (http://code.google.com/p/app3).
#
# Copyright (C) 2009 <NAME> http://geewax.org/
# All rights reserved.
#
# This software is licensed as described in the file COPYING.txt,
# which you should have received as part of this distribution.
import logging
import hmac
import base64
from datetime import datetime
TIMEFORMAT = "%a, %d %b %Y %H:%M:%S +0000"
def generate_auth(request):
"""
The timestamp format should be as specified in RFC 2822 and in UTC:
"%a, %d %b %Y %H:%M:%S +0000"
- See http://www.faqs.org/rfcs/rfc2822.html
"""
params = request.params
if not params: params = {}
params = '&'.join(["%s=%s" % (key, params[key]) for key in sorted(params.keys())])
message = "%s\n%s\n%s" % (request.path, params, request.app3_timestamp)
auth = hmac.new(
key = request.secret_key,
msg = message,
).digest()
return base64.encodestring(auth).strip()
def generate_timestamp():
"""
Generates a timestamp in the standard format.
"""
return datetime.utcnow().strftime(TIMEFORMAT)
def is_within_n_minutes(sent_time, n=15):
"""
Check whether one of our timestamps is within n minutes of
now. (All times are in UTC)
"""
sent_time = datetime.strptime(sent_time, TIMEFORMAT)
diff = abs(datetime.utcnow() - sent_time)
return diff.seconds < n * 60
def is_authorized(request):
"""
Returns whether a user is authorized based on the request.
"""
# Need all of the headers to have been passed for authentication
if not all( (request.app3_auth, request.app3_timestamp) ):
logging.info('Failed authorization: missing headers.')
return False
# Time skew... Could be replay attack?
if not is_within_n_minutes(request.app3_timestamp, 15):
logging.info('Failed authorization: not within n minutes.')
return False
# Check whether we generate the same auth header as they did
if request.app3_auth != generate_auth(request):
logging.info('Failed authorization: auth mismatch.')
return False
return True
| StarcoderdataPython |
5024767 | import itertools
from math import ceil, floor
import numpy as np
from numpy.random import randint, random
from .utils import ensure_multiplicity
from scipy.ndimage import zoom
OVERLAP_MODE = ["NO_OVERLAP", "ALLOW", "FORCE"]
def extract_tile_function(tile_shape, perform_augmentation=True, overlap_mode=OVERLAP_MODE[1], min_overlap=1, n_tiles=None, random_stride=False, augmentation_rotate=True):
def func(batch, is_mask):
tiles = extract_tiles(batch, tile_shape=tile_shape, overlap_mode=overlap_mode, min_overlap=min_overlap, n_tiles=n_tiles, random_stride=random_stride, return_coords=False)
if perform_augmentation:
tiles = augment_tiles_inplace(tiles, rotate = augmentation_rotate and all([s==tile_shape[0] for s in tile_shape]), n_dims=len(tile_shape))
return tiles
return func
def extract_tiles(batch, tile_shape, overlap_mode=OVERLAP_MODE[1], min_overlap=1, n_tiles=None, random_stride=False, return_coords=False):
"""Extract tiles.
Parameters
----------
batch : numpy array
dimensions BYXC or BZYXC (B = batch)
tile_shape : tuple
tile shape, dimensions YX or ZYX. Z,Y,X,must be inferior or equal to batch dimensions
overlap_mode : string
one of ["NO_OVERLAP", "ALLOW", "FORCE"]
"NO_OVERLAP" maximum number of tiles so that they do not overlap
"ALLOW" maximum number of tiles that fit in the image, allowing overlap
"FORCE" maximum number of tiles that fit in the image while enforcing a minimum overlap defined by min_overlap. If min_overlap is less than zero, it enforces a distance between tiles
min_overlap : integer or tuple
min overlap along each spatial dimension. only used in mode "FORCE"
n_tiles : int
if provided overlap_mode and min_overlap are ignored
random_stride : bool
whether tile coordinates should be randomized, within the gap / overlap zone
return_coords : bool
whether tile coodinates should be returned
Returns
-------
numpy array, ([numpy array])
tiles concatenated along first axis, (tiles coordinates)
"""
image_shape = batch[0].shape[1:-1] if isinstance(batch, (list, tuple)) else batch.shape[1:-1]
tile_shape = ensure_multiplicity(len(image_shape), tile_shape)
if n_tiles is None:
tile_coords = _get_tile_coords_overlap(image_shape, tile_shape, overlap_mode, min_overlap, random_stride)
else:
assert len(image_shape)==2, "only 2d images supported when specifying n_tiles"
_, n_tiles_yx = get_stride_2d(image_shape, tile_shape, n_tiles)
tile_coords = _get_tile_coords(image_shape, tile_shape, n_tiles_yx, random_stride)
if len(image_shape)==2:
tile_fun = lambda b : np.concatenate([b[:, tile_coords[0][i]:tile_coords[0][i] + tile_shape[0], tile_coords[1][i]:tile_coords[1][i] + tile_shape[1]] for i in range(len(tile_coords[0]))])
else:
tile_fun = lambda b : np.concatenate([b[:, tile_coords[0][i]:tile_coords[0][i] + tile_shape[0], tile_coords[1][i]:tile_coords[1][i] + tile_shape[1], tile_coords[2][i]:tile_coords[2][i] + tile_shape[2]] for i in range(len(tile_coords[0]))])
if isinstance(batch, (list, tuple)):
return [tile_fun(b) for b in batch]
else:
return tile_fun(batch)
if return_coords:
return tiles, tile_coords
else:
return tiles
def extract_tile_random_zoom_function(tile_shape, perform_augmentation=True, overlap_mode=OVERLAP_MODE[1], min_overlap=1, n_tiles=None, random_stride=False, augmentation_rotate=True, zoom_range=[0.6, 1.6], aspect_ratio_range=[0.6, 1.6], interpolation_order=1):
def func(batch, is_mask):
if isinstance(batch, (list, tuple)):
is_mask = ensure_multiplicity(len(batch), is_mask)
order = [0 if m else interpolation_order for m in is_mask]
tiles = extract_tiles_random_zoom(batch, tile_shape=tile_shape, overlap_mode=overlap_mode, min_overlap=min_overlap, n_tiles=n_tiles, random_stride=random_stride, zoom_range=zoom_range, aspect_ratio_range=aspect_ratio_range, interpolation_order=order)
if perform_augmentation:
tiles = augment_tiles_inplace(tiles, rotate = augmentation_rotate and all([s==tile_shape[0] for s in tile_shape]), n_dims=len(tile_shape))
return tiles
return func
def extract_tiles_random_zoom(batch, tile_shape, overlap_mode=OVERLAP_MODE[1], min_overlap=1, n_tiles=None, random_stride=False, zoom_range=[0.6, 1.6], aspect_ratio_range=[0.6, 1.6], interpolation_order=1):
"""Extract tiles with random zoom.
Parameters
----------
batch : numpy array
dimensions BYXC or BZYXC (B = batch)
tile_shape : tuple
tile shape, dimensions YX or ZYX. Z,Y,X,must be inferior or equal to batch dimensions
overlap_mode : string
one of ["NO_OVERLAP", "ALLOW", "FORCE"]
"NO_OVERLAP" maximum number of tiles so that they do not overlap
"ALLOW" maximum number of tiles that fit in the image, allowing overlap
"FORCE" maximum number of tiles that fit in the image while enforcing a minimum overlap defined by min_overlap. If min_overlap is less than zero, it enforces a distance between tiles
min_overlap : integer or tuple
min overlap along each spatial dimension. only used in mode "FORCE"
n_tiles : int
if provided overlap_mode and min_overlap are ignored
random_stride : bool
whether tile coordinates should be randomized, within the gap / overlap zone
zoom_range : list
[min zoom ratio, max zoom ratio]
aspect_ratio_range : list
aspect ratio relative to the first axis.
[min aspect ratio, max aspect ratio]
interpolation_order : int
The order of the spline interpolation passed to scipy.ndimage.zoom
Returns
-------
numpy array
tiles concatenated along first axis
"""
image_shape = batch[0].shape[1:-1] if isinstance(batch, (list, tuple)) else batch.shape[1:-1]
rank = len(image_shape)
assert rank in [2, 3], "only 2D or 3D images are supported"
aspect_ratio_range = ensure_multiplicity(2, aspect_ratio_range)
assert aspect_ratio_range[0]<=aspect_ratio_range[1], "invalid aspect_ratio_range"
aspect_ratio_range = [1./aspect_ratio_range[1], 1./aspect_ratio_range[0]]
zoom_range = ensure_multiplicity(2, zoom_range)
assert zoom_range[0]<=zoom_range[1], "invalid zoom range"
tile_shape = ensure_multiplicity(len(image_shape), tile_shape)
if n_tiles is None:
tile_coords = _get_tile_coords_overlap(image_shape, tile_shape, overlap_mode, min_overlap, random_stride)
else:
assert len(image_shape)==2, "only 2d images supported when specifying n_tiles"
_, n_tiles_yx = get_stride_2d(image_shape, tile_shape, n_tiles)
tile_coords = _get_tile_coords(image_shape, tile_shape, n_tiles_yx, random_stride)
zoom = random(tile_coords[0].shape[0]) * (zoom_range[1] - zoom_range[0]) + zoom_range[0]
aspect_ratio = [random(tile_coords[0].shape[0]) * (aspect_ratio_range[1] - aspect_ratio_range[0]) + aspect_ratio_range[0] for ax in range(1, len(image_shape)) ]
tile_size_fun = lambda ax : np.rint(zoom * tile_shape[ax]).astype(int) if ax==0 else np.rint(zoom * aspect_ratio[ax-1] * tile_shape[ax]).astype(int)
r_tile_shape = [tile_size_fun(ax) for ax in range(len(image_shape))]
if rank==2:
tile_fun = lambda b,o : np.concatenate([_zoom(b[:, tile_coords[0][i]:tile_coords[0][i] + r_tile_shape[0][i], tile_coords[1][i]:tile_coords[1][i] + r_tile_shape[1][i]], tile_shape, o) for i in range(len(tile_coords[0]))])
else:
tile_fun = lambda b,o : np.concatenate([_zoom(b[:, tile_coords[0][i]:tile_coords[0][i] + r_tile_shape[0][i], tile_coords[1][i]:tile_coords[1][i] + r_tile_shape[1][i], tile_coords[2][i]:tile_coords[2][i] + r_tile_shape[2][i]], tile_shape, o) for i in range(len(tile_coords[0]))])
if isinstance(batch, (list, tuple)): # multi-channel case
interpolation_order= ensure_multiplicity(len(batch), interpolation_order)
return [tile_fun(b, interpolation_order[i]) for i, b in enumerate(batch)]
else:
return tile_fun(batch, interpolation_order)
def _zoom(batch, target_shape, order):
ratio = [i / j for i, j in zip(target_shape, batch.shape[1:-1])]
return zoom(batch, zoom = [1] + ratio + [1], order=order)
def get_stride_2d(image_shape, tile_shape, n_tiles):
if n_tiles == 1:
return (image_shape[0], image_shape[1]), (1, 1)
assert len(image_shape)==2, "only available for 2d images"
tile_shape = ensure_multiplicity(2, tile_shape)
Sy = image_shape[0] - tile_shape[0]
Sx = image_shape[1] - tile_shape[1]
assert Sy>=0, "tile size is too high on first axis"
assert Sx>=0, "tile size is too high on second axis"
a = - n_tiles + 1
b = Sy + Sx
c = Sx*Sy
d = b**2 - 4*a*c
d = np.sqrt(d)
r1 = (-b+d)/(2*a)
r2 = (-b-d)/(2*a)
stride = r1 if r1>r2 else r2
n_tiles_x = (Sx / stride) + 1
n_tiles_y = (Sy / stride) + 1
n_tiles_x_i = round(n_tiles_x)
n_tiles_y_i = round(n_tiles_y)
if abs(n_tiles_x_i-n_tiles_x)<abs(n_tiles_y_i-n_tiles_y):
n_tiles_x = n_tiles_x_i
n_tiles_y = n_tiles // n_tiles_x
else:
n_tiles_y = n_tiles_y_i
n_tiles_x = n_tiles // n_tiles_y
stride_x = Sx // (n_tiles_x - 1) if n_tiles_x > 1 else image_shape[1]
stride_y = Sy // (n_tiles_y - 1) if n_tiles_y > 1 else image_shape[0]
return (stride_y, stride_x), (n_tiles_y, n_tiles_x)
def _get_tile_coords(image_shape, tile_shape, n_tiles, random_stride=False):
n_dims = len(image_shape)
assert n_dims == len(tile_shape), "tile rank should be equal to image rank"
assert n_dims == len(n_tiles), "n_tiles should have same rank as image"
tile_coords_by_axis = [_get_tile_coords_axis(image_shape[i], tile_shape[i], n_tiles[i], random_stride=random_stride) for i in range(n_dims)]
return [a.flatten() for a in np.meshgrid(*tile_coords_by_axis, sparse=False, indexing='ij')]
def _get_tile_coords_overlap(image_shape, tile_shape, overlap_mode=OVERLAP_MODE[1], min_overlap=1, random_stride=False):
n_dims = len(image_shape)
min_overlap = ensure_multiplicity(n_dims, min_overlap)
assert n_dims == len(tile_shape), "tile shape should be equal to image shape"
tile_coords_by_axis = [_get_tile_coords_axis_overlap(image_shape[i], tile_shape[i], overlap_mode, min_overlap[i], random_stride) for i in range(n_dims)]
return [a.flatten() for a in np.meshgrid(*tile_coords_by_axis, sparse=False, indexing='ij')]
def _get_tile_coords_axis_overlap(size, tile_size, overlap_mode=OVERLAP_MODE[1], min_overlap=1, random_stride=False):
if tile_size==size:
return [0]
assert tile_size<size, "tile size must be inferior or equal to size"
o_mode = OVERLAP_MODE.index(overlap_mode)
assert o_mode>=0 and o_mode<=2, "invalid overlap mode"
if o_mode==0:
n_tiles = int(size/tile_size)
elif o_mode==1:
n_tiles = ceil(size/tile_size)
elif o_mode==2:
assert min_overlap<tile_size, "invalid min_overlap: value: {} should be <{}".format(min_overlap, tile_size)
if min_overlap>=0:
n_tiles = 1 + ceil((size - tile_size)/(tile_size - min_overlap)) # size = tile_size + (n-1) * (tile_size - min_overlap)
else:
n_tiles = floor((size - min_overlap)/(tile_size - min_overlap)) # n-1 gaps and n tiles: size = n * tile_size + (n-1)*-min_overlap
return _get_tile_coords_axis(size, tile_size, n_tiles, random_stride)
def _get_tile_coords_axis(size, tile_size, n_tiles, random_stride=False):
if n_tiles==1:
coords = [(size - tile_size)//2]
if random_stride and coords[0]>0:
coords += randint(-coords[0], size-(coords[0]+tile_size), size=1)
return coords
if n_tiles==2:
coords = [0, size-tile_size]
if random_stride:
gap = size - 2 * tile_size
if gap>1:
delta = randint(0, gap//2, size=2)
coords[0] += delta[0]
coords[1] -= delta[1]
return coords
sum_stride = np.abs(n_tiles * tile_size - size)
stride = np.array([0]+[sum_stride//(n_tiles-1)]*(n_tiles-1), dtype=int)
remains = sum_stride%(n_tiles-1)
stride[1:remains+1] += 1
if np.sign(n_tiles * tile_size - size)>0:
stride=-stride
stride = np.cumsum(stride)
coords = np.array([tile_size*idx + stride[idx] for idx in range(n_tiles)])
# print("before random: n_tiles: {}, tile_size: {} size: {}, stride: {}, coords: {}".format(n_tiles, tile_size, size, stride, coords))
if random_stride:
spacing = (size-tile_size)//(n_tiles-1)
if spacing >= tile_size: # no overlap
half_mean_gap = floor(0.5 * (spacing-tile_size) )
else: # overlap
half_mean_gap = ceil(0.5 * spacing )
coords += randint(-half_mean_gap, half_mean_gap+1, size=n_tiles)
coords[0] = max(coords[0], 0)
coords[-1] = min(coords[-1], size-tile_size)
# print("after random: spacing: {}, gap: {}, coords: {}".format(spacing, half_mean_gap, coords))
return coords
def augment_tiles(tiles, rotate, n_dims=2):
flip_axis = [1, 2, (1,2)] if n_dims==2 else [2, 3, (2,3)]
flips = [np.flip(tiles, axis=ax) for ax in flip_axis]
augmented = np.concatenate([tiles]+flips, axis=0)
if rotate:
rot_axis = (1, 2) if n_dims==2 else (2, 3)
augmented = np.concatenate((augmented, np.rot90(augmented, k=1, axes=rot_axis)))
return augmented
AUG_FUN_2D = [
lambda img : img,
lambda img : np.flip(img, axis=0),
lambda img : np.flip(img, axis=1),
lambda img : np.flip(img, axis=(0, 1)),
lambda img : np.rot90(img, k=1, axes=(0,1)),
lambda img : np.rot90(img, k=3, axes=(0,1)), # rot + flip0
lambda img : np.rot90(np.flip(img, axis=1), k=1, axes=(0,1)),
lambda img : np.rot90(np.flip(img, axis=(0, 1)), k=1, axes=(0,1))
]
AUG_FUN_3D = [
lambda img : img,
lambda img : np.flip(img, axis=1),
lambda img : np.flip(img, axis=2),
lambda img : np.flip(img, axis=(1, 2)),
lambda img : np.rot90(img, k=1, axes=(1,2)),
lambda img : np.rot90(img, k=3, axes=(1,2)), # rot + flip0
lambda img : np.rot90(np.flip(img, axis=2), k=1, axes=(1,2)),
lambda img : np.rot90(np.flip(img, axis=(1, 2)), k=1, axes=(1,2))
]
def augment_tiles_inplace(tiles, rotate, n_dims=2):
aug_fun = AUG_FUN_2D if n_dims==2 else AUG_FUN_3D
n_tiles = tiles[0].shape[0] if isinstance(tiles, (tuple, list)) else tiles.shape[0]
aug = randint(0, len(aug_fun) if rotate else len(aug_fun)/2, size=n_tiles)
if isinstance(tiles, (tuple, list)):
for bidx in range(len(tiles)):
for b in range(n_tiles):
if aug[b]>0: # 0 is identity
tiles[bidx][b] = aug_fun[aug[b]](tiles[bidx][b])
else:
for b in range(n_tiles):
if aug[b]>0: # 0 is identity
tiles[b] = aug_fun[aug[b]](tiles[b])
return tiles
| StarcoderdataPython |
5141528 | <reponame>edwinb-ai/segnet
import tensorflow.keras as K
from segmed.models import Unet
from segmed.metrics.metrics import jaccard_index
import numpy as np
from . import SimpleDataset
class TestSimpleUnet(SimpleDataset):
@staticmethod
def test_simple_unet_is_model():
"""Test that the model is an actual Keras model,
and that the layers are valid ones.
"""
model = Unet((256, 256, 3), variant="simple").collect()
assert isinstance(model, K.Model)
def test_simple_unet_segmentation(self):
"""Test that the UNet model can train correctly, and that it
returns a valid result.
"""
# Import some sample images from within the directory
x_train, x_test, y_train, y_test = self._create_dataset()
# Create the model and train it, test the results
model = Unet(x_train[0].shape, variant="simple").collect()
model.compile(
loss=K.losses.BinaryCrossentropy(),
optimizer=K.optimizers.Adam(),
metrics=[jaccard_index],
)
model.fit(
x=x_train,
y=y_train,
batch_size=1,
epochs=2,
validation_data=(x_test, y_test),
)
result = model.predict(x_test)
assert result[0].shape == y_test[0].shape
class TestCustomUnet(SimpleDataset):
@staticmethod
def test_custom_unet_is_model():
"""Test that the model is an actual Keras model,
and that the layers are valid ones.
"""
conv = {
"activation": "relu",
"padding": "same",
"batch_norm": True,
"l2_reg": 0.995,
}
model = Unet((256, 256, 3), variant="custom", parameters=conv).collect()
assert isinstance(model, K.Model)
def test_custom_unet_segmentation(self):
"""Test that the UNet model can train correctly, and that it
returns a valid result.
"""
# Import some sample images from within the directory
x_train, x_test, y_train, y_test = self._create_dataset()
# Create the model and train it, test the results
conv = {
"activation": "relu",
"padding": "same",
"batch_norm": True,
"l2_reg": 0.995,
}
model = Unet(x_train[0].shape, variant="custom", parameters=conv).collect()
model.compile(
loss=K.losses.BinaryCrossentropy(),
optimizer=K.optimizers.Adam(),
metrics=[jaccard_index],
)
model.fit(
x=x_train,
y=y_train,
batch_size=1,
epochs=2,
validation_data=(x_test, y_test),
)
result = model.predict(x_test)
assert result[0].shape == y_test[0].shape
| StarcoderdataPython |
222165 | <filename>src/plotting/plotting.py
import os
import sys
import pickle
import matplotlib.pyplot as plt
import numpy as np
import h5py
import argparse
from mpl_toolkits.axes_grid1 import make_axes_locatable
import pylab as PL
import matplotlib
from matplotlib import cbook
import matplotlib.gridspec as gridspec
from matplotlib.colors import Normalize
sys.path.insert(0, os.getcwd())
from src.utils.helpers import *
GTEx_directory = '.'
parser = argparse.ArgumentParser(description='Collection of plotting results. Runs on local computer.')
parser.add_argument('-g', '--group', help='Plotting group', required=True)
parser.add_argument('-n', '--name', help='Plotting name', required=True)
args = vars(parser.parse_args())
group = args['group']
name = args['name']
class Classifier():
@staticmethod
def validation_accuracy_across_patchsize():
import matplotlib as mpl
import seaborn as sns
sns.set_style("dark")
validation_accuracies = np.loadtxt(GTEx_directory + '/results/{group}/{name}.txt'.format(group=group, name=name))
fig = plt.figure(figsize=(5,4))
plt.plot(validation_accuracies)
plt.ylabel('Validation accuracy', size=15)
plt.xticks([0, 1, 2, 3, 4, 5], ['128', '256', '512', '1024', '2056', '4096'])
plt.xlabel('Patch size', size=15)
label_size = 100
mpl.rcParams['xtick.labelsize'] = label_size
mpl.rcParams['ytick.labelsize'] = label_size
os.makedirs(GTEx_directory + '/plotting/{}'.format(group), exist_ok=True)
plt.savefig(GTEx_directory + '/plotting/{group}/{name}.eps'.format(group=group, name=name), format='eps', dpi=100)
plt.savefig(GTEx_directory + '/plotting/{group}/{name}.png'.format(group=group, name=name), format='png', dpi=100)
plt.show()
class InflationPvalues():
@staticmethod
def raw_pvalues():
import seaborn as sns
sns.set_style("dark")
results = pickle.load(open(GTEx_directory + '/results/{group}/{name}.pickle'.format(group=group, name=name), 'rb'))
Rs_real, pvs_real, pvs_1, pvs_2, pvs_3 = results
from limix.plot import qqplot
qqplot(pvs_real.flatten())
qqplot(pvs_1.flatten())
qqplot(pvs_2.flatten())
qqplot(pvs_3.flatten())
os.makedirs(GTEx_directory + '/plotting/{}'.format(group), exist_ok=True)
plt.savefig(GTEx_directory + '/plotting/{group}/{name}.eps'.format(group=group, name=name), format='eps', dpi=100)
plt.savefig(GTEx_directory + '/plotting/{group}/{name}.png'.format(group=group, name=name), format='png', dpi=100)
plt.show()
@staticmethod
def raw_vs_corrected_pvalues():
print("Loading pvalues")
raw_results = pickle.load(open(GTEx_directory + '/results/InflationPvalues/raw_pvalues.pickle', 'rb'))
corrected_results = pickle.load(open(GTEx_directory + '/results/InflationPvalues/pc_corrected_pvalues.pickle', 'rb'))
import seaborn as sns
sns.set_style("dark")
from limix.plot import qqplot
_, pvs_real_raw, _, _, _ = raw_results
_, pvs_real_corrected, _, _, _ = corrected_results[0]
print('Estimating lambda for raw pvalues')
raw_lamb = estimate_lambda(pvs_real_raw.flatten())
print('Estimating lambda for corrected pvalues')
corrected_lamb = estimate_lambda(pvs_real_corrected.flatten())
print('Plotting raw pvalues')
qqplot(pvs_real_raw.flatten(), label='raw $\lambda={:0.2f}$'.format(raw_lamb))
print('Plotting corrected pvalues')
qqplot(pvs_real_corrected.flatten(), label='corrected $\lambda={:0.2f}$'.format(corrected_lamb))
plt.legend(prop={'size':15})
os.makedirs(GTEx_directory + '/plotting/{}'.format(group), exist_ok=True)
plt.savefig(GTEx_directory + '/plotting/{group}/{name}.eps'.format(group=group, name=name), format='eps', dpi=100)
plt.savefig(GTEx_directory + '/plotting/{group}/{name}.png'.format(group=group, name=name), format='png', dpi=100)
plt.show()
@staticmethod
def tf_corrected_pvalues():
print("Loading pvalues")
raw_results = pickle.load(open(GTEx_directory + '/results/InflationPvalues/raw_pvalues.pickle', 'rb'))
tf_corrected_results = pickle.load(open(GTEx_directory + '/results/InflationPvalues/tf_corrected_pvalues.pickle', 'rb'))
import seaborn as sns
sns.set_style("dark")
from limix.plot import qqplot
_, pvs_real_raw, _, _, _ = raw_results
_, pvs_real_tf_corrected, _, _, _ = tf_corrected_results
print('Estimating lambda for raw pvalues')
raw_lamb = estimate_lambda(pvs_real_raw.flatten())
print('Estimating lambda for TF corrected pvalues')
tf_corrected_lamb = estimate_lambda(pvs_real_tf_corrected.flatten())
print('Plotting raw pvalues')
qqplot(pvs_real_raw.flatten(), label='raw $\lambda={:0.2f}$'.format(raw_lamb))
print('Plotting TF corrected pvalues')
qqplot(pvs_real_tf_corrected.flatten(), label='corrected $\lambda={:0.2f}$'.format(tf_corrected_lamb))
plt.legend(prop={'size':15})
os.makedirs(GTEx_directory + '/plotting/{}'.format(group), exist_ok=True)
plt.savefig(GTEx_directory + '/plotting/{group}/{name}.eps'.format(group=group, name=name), format='eps', dpi=100)
plt.savefig(GTEx_directory + '/plotting/{group}/{name}.png'.format(group=group, name=name), format='png', dpi=100)
plt.show()
if __name__ == '__main__':
eval(group + '().' + name + '()')
| StarcoderdataPython |
11399902 | <filename>video_gray.py<gh_stars>1-10
import argparse
from cv2 import *
import numpy as np
arg=argparse.ArgumentParser()
arg.add_argument("--path",help="INPUT VIDEO PATH ",default=0)
args=vars(arg.parse_args())
cap=cv2.VideoCapture(args['path'])
while (True):
check,frame=cap.read()
gray=cvtColor(frame,COLOR_RGB2GRAY)
img_brg=cvtColor(frame,COLOR_RGB2BGR)
img_hsv=cvtColor(frame,COLOR_BGR2HSV)
imshow("displaying GRAY IMAGE -->",gray)
imshow("displaying IMAGE -->",frame)
imshow("displaying BRG IMAGE -->", img_brg)
imshow("displaying HSV IMAGE -->", img_hsv)
if waitKey(100) & 0xFF==ord('q'):
break;
cap.release()
destroyAllWindows()
| StarcoderdataPython |
11397163 | <gh_stars>1-10
import abjad
from abjad.tools import abctools
from abjad.tools import scoretools
from abjad.tools import systemtools
class StopTrillSpan(abctools.AbjadValueObject):
__slots__ = ()
def _get_lilypond_format_bundle(self, component):
import consort
parentage = abjad.inspect(component).get_parentage()
prototype = scoretools.GraceContainer
grace_container = None
for parent in parentage:
if isinstance(parent, prototype):
grace_container = parent
break
if grace_container is None:
return
prototype = consort.ConsortTrillSpanner
carrier = grace_container._carrier
spanners = abjad.inspect(carrier).get_spanners(prototype)
if not spanners:
return
bundle = systemtools.LilyPondFormatBundle()
bundle.right.spanner_stops.append(r'\stopTrillSpan')
return bundle
| StarcoderdataPython |
12811860 | import json
import torch
import numpy as np
def convert_sentence_to_adjacency_matrix(sentence):
'''
Input: sentence in json
Output: adjancency matrix (gold standard)
'''
sentence_len = len(sentence['words'])
# Initialize a matrix of size N x N
adjancency_matrix = np.zeros((sentence_len, sentence_len))
for word in sentence['words']:
word_id = int(word['id'])
head = int(word['head'])
# Ignore the root(0)-(-1) connection
if head == -1:
continue
adjancency_matrix[head][word_id] = 1
return adjancency_matrix
def adjacency_matrix_to_tensor(matrix):
output = [0] * matrix.shape[0]
for i in range(matrix.shape[0]):
for j in range(matrix.shape[0]):
if matrix[i][j] == 1:
output[j] = i
output1 = torch.LongTensor(output)
return(output1)
def get_labels():
path = '../data/labels'
with open(path) as f:
content = f.readlines()
labels = {}
count = 1
for line in content:
temp = line.split(':')
if len(temp) == 2:
# labels.append(temp[0])
labels[count] = temp[0]
labels[temp[0]] = count
count += 1
with open(path + '.json', 'w+') as f:
f.write(json.dumps(labels, indent=4))
| StarcoderdataPython |
3267941 | <gh_stars>1-10
import unittest
from tests import initialize_screenshot
from tft import game, board, utils, tracker, parser, debugger
TestFile = "parser_test_data.json"
Test1080PDefault = "/Users/henry/Downloads/TFT Screenshots/board_1080_1.png"
Test1440PDefault = "/Users/henry/Downloads/TFT Screenshots/board_1440_5.png"
class TestParser(unittest.TestCase):
def setUp(self):
self.unit_lookup = tracker.initialize_unit_lookup_table()
self.debug = debugger.Debugger()
def test_players(self):
gameWindow, gameBoard = initialize_screenshot("/Users/henry/Downloads/TFT Screenshots/players_1080_2.png")
players = game.retrieve_player_list(gameWindow, gameBoard)
self.assertEqual(len(players), 8)
for player in players:
self.assertNotEqual("", player)
def test_healthbars(self):
gameWindow, gameBoard = initialize_screenshot(Test1440PDefault)
img = gameWindow.captureWindow()
cropped_circles = board.crop_healthbar_circles(img, gameBoard)
result = parser.parse_healthbar_circles(cropped_circles, self.debug)
values = board.crop_healthbars(img, gameBoard, result)
print(parser.parse_healthbars(values, debug=self.debug))
def test_healthbars_legacy(self):
gameWindow, gameBoard = initialize_screenshot(Test1080PDefault)
img = gameWindow.captureWindow()
top_to_bottom = board.crop_healthbars_legacy(img, gameBoard, 0)
bottom_to_top = board.crop_healthbars_legacy(img, gameBoard, 1)
healthbars = parser.parse_healthbars_legacy(top_to_bottom, bottom_to_top)
print(healthbars)
def test_level(self):
gameWindow, gameBoard = initialize_screenshot(Test1080PDefault)
img = gameWindow.captureWindow()
level = parser.parse_level(board.crop_level(img, gameBoard))
self.assertEqual(level, 6)
def test_stage(self):
gameWindow, gameBoard = initialize_screenshot(Test1080PDefault)
img = gameWindow.captureWindow()
stage = parser.parse_stage(board.crop_stage(img, gameBoard))
self.assertEqual(stage, "4-5")
def test_stage_early(self):
gameWindow, gameBoard = initialize_screenshot("/Users/henry/Downloads/TFT Screenshots/board_1080_1.png")
img = gameWindow.captureWindow()
stage = parser.parse_stage(board.crop_stage_early(img, gameBoard))
self.assertEqual(stage, "1-3")
def test_shop(self):
self.debug.validation_mode()
self.debug.enable_parse_shop()
gameWindow, gameBoard = initialize_screenshot(Test1440PDefault)
img = gameWindow.captureWindow()
shop = parser.parse_shop(board.crop_shop(img, gameBoard), self.debug)
print(shop)
self.debug.show()
def test_gold(self):
gameWindow, gameBoard = initialize_screenshot(Test1080PDefault)
img = gameWindow.captureWindow()
gold = parser.parse_gold(board.crop_gold(img, gameBoard))
self.assertEqual(gold, 50)
def test_timer(self):
gameWindow, gameBoard = initialize_screenshot(Test1440PDefault)
img = gameWindow.captureWindow()
timer = parser.parse_timer(board.crop_timer_early(img, gameBoard))
print(timer)
def test_parser_complete_1080p(self):
initialize_complete_test(self, "players", "1080")
initialize_complete_test(self, "board", "1080")
def test_parser_complete_1440p(self):
self.debug.validation_mode()
self.debug.enable_parse_timer()
initialize_complete_test(self, "players", "1440")
initialize_complete_test(self, "board", "1440")
def initialize_complete_test(testcase, type, resolution):
tests = utils.open_json_file("tests/parser_test_data.json")[type][resolution]
for test in tests:
file = "/Users/henry/Downloads/TFT Screenshots/{}".format(test["file_name"])
print("Testing Screenshot: {}".format(file))
gameWindow, gameBoard = initialize_screenshot(file)
img = gameWindow.captureWindow()
run_complete_parser_test(testcase, img, test, gameBoard)
def run_complete_parser_test(testcase, img, data, gameBoard):
if "shop" in data:
shop = parser.parse_shop(board.crop_shop(img, gameBoard), testcase.debug)
shop = [utils.find_matching_string_in_list(unit, testcase.unit_lookup) for unit in shop]
print("Asserting shop: {}".format(shop))
testcase.assertEqual(data["shop"], shop)
if "level" in data:
level = parser.parse_level(board.crop_level(img, gameBoard), testcase.debug)
print("Asserting level: {}".format(level))
testcase.assertEqual(data["level"], level)
if "stage" in data:
stage = parser.parse_stage(board.crop_stage(img, gameBoard), testcase.debug)
if not utils.assert_stage_string_format(stage):
stage = parser.parse_stage(board.crop_stage_early(img, gameBoard), testcase.debug)
print("Asserting stage: {}".format(stage))
testcase.assertEqual(data["stage"], stage)
if "timer" in data:
timer = parser.parse_timer(board.crop_timer(img, gameBoard), testcase.debug)
if timer == - 1:
timer = parser.parse_timer(board.crop_timer_early(img, gameBoard), testcase.debug)
print("Asserting timer: {}".format(timer))
testcase.debug.show()
testcase.assertEqual(data["timer"], timer)
if "gold" in data:
gold = parser.parse_gold(board.crop_gold(img, gameBoard), testcase.debug)
print("Asserting gold: {}".format(gold))
testcase.assertEqual(data["gold"], gold)
if "players" in data:
if isinstance(data["players"], list):
players = parser.parse_players(board.crop_players(img, gameBoard), testcase.debug)
print("Asserting players: {}".format(players))
for player in players:
if player == "You":
continue
res = utils.find_matching_string_in_list(player, data["players"])
testcase.assertIsNot(res, "", "Unable to find match for {}".format(player))
elif isinstance(data["players"], dict):
cropped_circles = board.crop_healthbar_circles(img, gameBoard)
result = parser.parse_healthbar_circles(cropped_circles, testcase.debug)
cropped_healthbars = board.crop_healthbars(img, gameBoard, result)
healthbars = parser.parse_healthbars(cropped_healthbars, testcase.debug)
print("Asserting healthbars: {}".format(healthbars))
player_names = data["players"].keys()
for healthbar in healthbars:
res = utils.find_matching_string_in_list(healthbar[0], player_names)
health = healthbar[1]
if res == "" and healthbar[0].isdigit(): # Own HP Testcase
health = utils.convert_string_to_integer(healthbar[0])
testcase.assertEqual(health, data["players"][res])
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
4991298 | # -*- coding: utf-8 -*-
import ccxt
import time
import json
import argparse
class Argv (object):
pass
argv = Argv()
parser = argparse.ArgumentParser()
parser.add_argument('--nonce', type=int, help='integer')
parser.add_argument('exchange', type=str, help='exchange id in lowercase', nargs='?')
parser.add_argument('symbol', type=str, help='symbol in uppercase', nargs='?')
parser.parse_args(namespace=argv)
exchanges = {}
# ------------------------------------------------------------------------------
# string coloring functions
def style(s, style): return str(s) # style + str (s) + '\033[0m'
def green(s): return style(s, '\033[92m')
def blue(s): return style(s, '\033[94m')
def yellow(s): return style(s, '\033[93m')
def red(s): return style(s, '\033[91m')
def pink(s): return style(s, '\033[95m')
def bold(s): return style(s, '\033[1m')
def underline(s): return style(s, '\033[4m')
# print a colored string
def dump(*args):
print(' '.join([str(arg) for arg in args]))
# ------------------------------------------------------------------------------
def test_exchange_symbol_orderbook(exchange, symbol):
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
dump(green(exchange.id), green(symbol), 'fetching order book...')
orderbook = exchange.fetch_order_book(symbol)
dump(
green(exchange.id),
green(symbol),
'order book',
orderbook['datetime'],
'bid: ' + str(orderbook['bids'][0][0] if len(orderbook['bids']) else 'N/A'),
'bidVolume: ' + str(orderbook['bids'][0][1] if len(orderbook['bids']) else 'N/A'),
'ask: ' + str(orderbook['asks'][0][0] if len(orderbook['asks']) else 'N/A'),
'askVolume: ' + str(orderbook['asks'][0][1] if len(orderbook['asks']) else 'N/A'))
# ------------------------------------------------------------------------------
def test_exchange_symbol_ohlcv(exchange, symbol):
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
if exchange.hasFetchOHLCV:
ohlcvs = exchange.fetch_ohlcv(symbol)
dump(green(exchange.id), 'fetched', green(len(ohlcvs)), 'OHLCVs')
else:
dump(yellow(exchange.id), 'fetching OHLCV not supported')
# ------------------------------------------------------------------------------
def test_exchange_all_tickers(exchange):
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
dump(green(exchange.id), 'fetching all tickers at once...')
if exchange.hasFetchTickers:
tickers = exchange.fetch_tickers()
dump(green(exchange.id), 'fetched', green(len(list(tickers.keys()))), 'tickers')
else:
dump(yellow(exchange.id), 'fetching all tickers at once not supported')
# ------------------------------------------------------------------------------
def test_exchange_symbol_ticker(exchange, symbol):
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
dump(green(exchange.id), green(symbol), 'fetching ticker...')
ticker = exchange.fetch_ticker(symbol)
dump(
green(exchange.id),
green(symbol),
'ticker',
ticker['datetime'],
'high: ' + str(ticker['high']),
'low: ' + str(ticker['low']),
'bid: ' + str(ticker['bid']),
'ask: ' + str(ticker['ask']),
'volume: ' + str(ticker['quoteVolume']))
# ------------------------------------------------------------------------------
def test_exchange_symbol_trades(exchange, symbol):
delay = int(exchange.rateLimit / 1000)
time.sleep(delay)
dump(green(exchange.id), green(symbol), 'fetching trades...')
try:
trades = exchange.fetch_trades(symbol)
dump(green(exchange.id), green(symbol), 'fetched', green(len(list(trades))), 'trades')
except ccxt.ExchangeError as e:
dump(yellow(type(e).__name__), e.args)
except ccxt.NotSupported as e:
dump(yellow(type(e).__name__), e.args)
# ------------------------------------------------------------------------------
def test_exchange_symbol(exchange, symbol):
dump(green('SYMBOL: ' + symbol))
test_exchange_symbol_ticker(exchange, symbol)
if exchange.id == 'coinmarketcap':
dump(green(exchange.fetchGlobal()))
else:
test_exchange_symbol_orderbook(exchange, symbol)
test_exchange_symbol_trades(exchange, symbol)
test_exchange_all_tickers(exchange)
test_exchange_symbol_ohlcv(exchange, symbol)
# ------------------------------------------------------------------------------
def load_exchange(exchange):
exchange.load_markets()
def test_exchange(exchange):
dump(green('EXCHANGE: ' + exchange.id))
# delay = 2
keys = list(exchange.markets.keys())
# ..........................................................................
# public API
symbol = keys[0]
symbols = [
'BTC/USD',
'BTC/CNY',
'BTC/EUR',
'BTC/ETH',
'ETH/BTC',
'BTC/JPY',
'LTC/BTC',
'USD/SLL',
]
for s in symbols:
if s in keys:
symbol = s
break
if symbol.find('.d') < 0:
test_exchange_symbol(exchange, symbol)
# ..........................................................................
# private API
if (not hasattr(exchange, 'apiKey') or (len(exchange.apiKey) < 1)):
return
dump(green(exchange.id), 'fetching balance...')
balance = exchange.fetch_balance()
dump(green(exchange.id), 'balance', balance)
# time.sleep(delay)
# amount = 1
# price = 0.0161
# marketBuy = exchange.create_market_buy_order(symbol, amount)
# print(marketBuy)
# time.sleep(delay)
# marketSell = exchange.create_market_sell_order(symbol, amount)
# print(marketSell)
# time.sleep(delay)
# limitBuy = exchange.create_limit_buy_order(symbol, amount, price)
# print(limitBuy)
# time.sleep(delay)
# limitSell = exchange.create_limit_sell_order(symbol, amount, price)
# print(limitSell)
# time.sleep(delay)
# ------------------------------------------------------------------------------
def try_all_proxies(exchange, proxies):
current_proxy = 0
max_retries = len(proxies)
# a special case for ccex
if exchange.id == 'ccex':
current_proxy = 1
for num_retries in range(0, max_retries):
try:
exchange.proxy = proxies[current_proxy]
current_proxy = (current_proxy + 1) % len(proxies)
load_exchange(exchange)
test_exchange(exchange)
break
except ccxt.RequestTimeout as e:
dump(yellow(type(e).__name__), str(e))
except ccxt.NotSupported as e:
dump(yellow(type(e).__name__), e.args)
except ccxt.DDoSProtection as e:
dump(yellow(type(e).__name__), e.args)
except ccxt.ExchangeNotAvailable as e:
dump(yellow(type(e).__name__), e.args)
except ccxt.AuthenticationError as e:
dump(yellow(type(e).__name__), str(e))
except ccxt.ExchangeError as e:
dump(yellow(type(e).__name__), e.args)
# ------------------------------------------------------------------------------
proxies = [
'',
'https://cors-anywhere.herokuapp.com/',
'https://crossorigin.me/',
# 'http://cors-proxy.htmldriven.com/?url=', # we don't want this for now
]
# load the api keys from config
with open('./keys.json') as file:
config = json.load(file)
# instantiate all exchanges
for id in ccxt.exchanges:
exchange = getattr(ccxt, id)
exchanges[id] = exchange({'verbose': False})
# set up api keys appropriately
tuples = list(ccxt.Exchange.keysort(config).items())
for (id, params) in tuples:
options = list(params.items())
for key in params:
setattr(exchanges[id], key, params[key])
# move gdax to sandbox
exchanges['gdax'].urls['api'] = 'https://api-public.sandbox.gdax.com'
# ------------------------------------------------------------------------------
if argv.exchange:
exchange = exchanges[argv.exchange]
symbol = argv.symbol
if symbol:
load_exchange(exchange)
test_exchange_symbol(exchange, symbol)
else:
try_all_proxies(exchange, proxies)
else:
tuples = list(ccxt.Exchange.keysort(exchanges).items())
for (id, params) in tuples:
exchange = exchanges[id]
try_all_proxies(exchange, proxies)
| StarcoderdataPython |
3255006 | <gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 17-9-22 下午1:39
# @Author : <NAME>
# @Site : http://github.com/TJCVRS
# @File : data_provider.py
# @IDE: PyCharm Community Edition
"""
Provide the training and testing data for shadow net
"""
import os.path as ops
import numpy as np
import copy
import cv2
import os
try:
from cv2 import cv2
except ImportError:
pass
import sys
sys.path.append(os.getcwd()+'/model/CRNN/data_provider')
import base_data_provider
#from data_provider import base_data_provider
class TextDataset(base_data_provider.Dataset):
"""
Implement a dataset class providing the image and it's corresponding text
"""
def __init__(self, images, labels, imagenames, shuffle=None, normalization=None):
"""
:param images: image datasets [nums, H, W, C] 4D ndarray
:param labels: label dataset [nums, :] 2D ndarray
:param shuffle: if need shuffle the dataset, 'once_prior_train' represent shuffle only once before training
'every_epoch' represent shuffle the data every epoch
:param imagenames:
:param normalization: if need do normalization to the dataset,
'None': no any normalization
'divide_255': divide all pixels by 255
'divide_256': divide all pixels by 256
"""
super(TextDataset, self).__init__()
self.__normalization = normalization
if self.__normalization not in [None, 'divide_255', 'divide_256']:
raise ValueError('normalization parameter wrong')
self.__images = self.normalize_images(images, self.__normalization)
self.__labels = labels
self.__imagenames = imagenames
self._epoch_images = copy.deepcopy(self.__images)
self._epoch_labels = copy.deepcopy(self.__labels)
self._epoch_imagenames = copy.deepcopy(self.__imagenames)
self.__shuffle = shuffle
if self.__shuffle not in [None, 'once_prior_train', 'every_epoch']:
raise ValueError('shuffle parameter wrong')
if self.__shuffle == 'every_epoch' or 'once_prior_train':
self._epoch_images, self._epoch_labels, self._epoch_imagenames = self.shuffle_images_labels(
self._epoch_images, self._epoch_labels, self._epoch_imagenames)
self.__batch_counter = 0
return
@property
def num_examples(self):
"""
:return:
"""
assert self.__images.shape[0] == self.__labels.shape[0]
return self.__labels.shape[0]
@property
def images(self):
"""
:return:
"""
return self._epoch_images
@property
def labels(self):
"""
:return:
"""
return self._epoch_labels
@property
def imagenames(self):
"""
:return:
"""
return self._epoch_imagenames
def next_batch(self, batch_size):
"""
:param batch_size:
:return:
"""
start = self.__batch_counter * batch_size
end = (self.__batch_counter + 1) * batch_size
self.__batch_counter += 1
images_slice = self._epoch_images[start:end]
labels_slice = self._epoch_labels[start:end]
imagenames_slice = self._epoch_imagenames[start:end]
# if overflow restart from the begining
if images_slice.shape[0] != batch_size:
self.__start_new_epoch()
return self.next_batch(batch_size)
else:
return images_slice, labels_slice, imagenames_slice
def __start_new_epoch(self):
"""
:return:
"""
self.__batch_counter = 0
if self.__shuffle == 'every_epoch':
self._epoch_images, self._epoch_labels, self._epoch_imagenames = self.shuffle_images_labels(
self._epoch_images, self._epoch_labels, self._epoch_imagenames)
else:
pass
return
class TextDataProvider(object):
"""
Implement the text data provider for training and testing the shadow net
"""
def __init__(self, dataset_dir, annotation_name, validation_set=None, validation_split=None, shuffle=None,
normalization=None):
"""
:param dataset_dir: str, where you save the dataset one class on folder
:param annotation_name: annotation name
:param validation_set:
:param validation_split: `float` or None float: chunk of `train set` will be marked as `validation set`.
None: if 'validation set' == True, `validation set` will be
copy of `test set`
:param shuffle: if need shuffle the dataset, 'once_prior_train' represent shuffle only once before training
'every_epoch' represent shuffle the data every epoch
:param normalization: if need do normalization to the dataset,
'None': no any normalization
'divide_255': divide all pixels by 255
'divide_256': divide all pixels by 256
'by_chanels': substract mean of every chanel and divide each
chanel data by it's standart deviation
"""
self.__dataset_dir = dataset_dir
self.__validation_split = validation_split
self.__shuffle = shuffle
self.__normalization = normalization
self.__train_dataset_dir = ops.join(self.__dataset_dir, 'Train')
self.__test_dataset_dir = ops.join(self.__dataset_dir, 'Test')
assert ops.exists(dataset_dir)
assert ops.exists(self.__train_dataset_dir)
assert ops.exists(self.__test_dataset_dir)
# add test dataset
test_anno_path = ops.join(self.__test_dataset_dir, annotation_name)
assert ops.exists(test_anno_path)
with open(test_anno_path, 'r') as anno_file:
info = np.array([tmp.strip().split() for tmp in anno_file.readlines()])
test_images = np.array([cv2.imread(ops.join(self.__test_dataset_dir, tmp), cv2.IMREAD_COLOR)
for tmp in info[:, 0]])
test_labels = np.array([tmp for tmp in info[:, 1]])
test_imagenames = np.array([ops.basename(tmp) for tmp in info[:, 0]])
self.test = TextDataset(test_images, test_labels, imagenames=test_imagenames,
shuffle=shuffle, normalization=normalization)
anno_file.close()
# add train and validation dataset
train_anno_path = ops.join(self.__train_dataset_dir, annotation_name)
print(train_anno_path)
assert ops.exists(train_anno_path)
with open(train_anno_path, 'r') as anno_file:
info = np.array([tmp.strip().split() for tmp in anno_file.readlines()])
print("here1")
train_images = np.array([cv2.imread(ops.join(self.__train_dataset_dir, tmp), cv2.IMREAD_COLOR)
for tmp in info[:, 0]])
print("here2")
train_labels = np.array([tmp for tmp in info[:, 1]])
print("here3")
train_imagenames = np.array([ops.basename(tmp) for tmp in info[:, 0]])
print("here4")
if validation_set is not None and validation_split is not None:
split_idx = int(train_images.shape[0] * (1 - validation_split))
self.train = TextDataset(images=train_images[:split_idx], labels=train_labels[:split_idx],
shuffle=shuffle, normalization=normalization,
imagenames=train_imagenames[:split_idx])
print("here5")
self.validation = TextDataset(images=train_images[split_idx:], labels=train_labels[split_idx:],
shuffle=shuffle, normalization=normalization,
imagenames=train_imagenames[split_idx:])
print("here6")
else:
self.train = TextDataset(images=train_images, labels=train_labels, shuffle=shuffle,
normalization=normalization, imagenames=train_imagenames)
print("here7")
print("here8")
if validation_set and not validation_split:
self.validation = self.test
print("here9")
print("here10")
anno_file.close()
return
def __str__(self):
provider_info = 'Dataset_dir: {:s} contain training images: {:d} validation images: {:d} testing images: {:d}'.\
format(self.__dataset_dir, self.train.num_examples, self.validation.num_examples, self.test.num_examples)
return provider_info
@property
def dataset_dir(self):
"""
:return:
"""
return self.__dataset_dir
@property
def train_dataset_dir(self):
"""
:return:
"""
return self.__train_dataset_dir
@property
def test_dataset_dir(self):
"""
:return:
"""
return self.__test_dataset_dir
| StarcoderdataPython |
29203 | <reponame>akshshar/bigmuddy-network-telemetry-proto
#!/usr/bin/env python
# Standard python libs
import os,sys
sys.path.append("./src/genpy")
import ast, pprint
import pdb
import yaml, json
import telemetry_pb2
from mdt_grpc_dialin import mdt_grpc_dialin_pb2
from mdt_grpc_dialin import mdt_grpc_dialin_pb2_grpc
import json_format
import grpc
#
# Get the GRPC Server IP address and port number
#
def get_server_ip_port():
# Get GRPC Server's IP from the environment
if 'SERVER_IP' not in os.environ.keys():
print "Need to set the SERVER_IP env variable e.g."
print "export SERVER_IP='10.30.110.214'"
os._exit(0)
# Get GRPC Server's Port from the environment
if 'SERVER_PORT' not in os.environ.keys():
print "Need to set the SERVER_PORT env variable e.g."
print "export SERVER_PORT='57777'"
os._exit(0)
return (os.environ['SERVER_IP'], int(os.environ['SERVER_PORT']))
#
# Setup the GRPC channel with the server, and issue RPCs
#
if __name__ == '__main__':
server_ip, server_port = get_server_ip_port()
print "Using GRPC Server IP(%s) Port(%s)" %(server_ip, server_port)
# Create the channel for gRPC.
channel = grpc.insecure_channel(str(server_ip)+":"+str(server_port))
unmarshal = True
# Ereate the gRPC stub.
stub = mdt_grpc_dialin_pb2_grpc.gRPCConfigOperStub(channel)
metadata = [('username', 'vagrant'), ('password', '<PASSWORD>')]
Timeout = 3600*24*365 # Seconds
sub_args = mdt_grpc_dialin_pb2.CreateSubsArgs(ReqId=99, encode=3, subidstr='BGP-SESSION')
stream = stub.CreateSubs(sub_args, timeout=Timeout, metadata=metadata)
for segment in stream:
if not unmarshal:
print segment
else:
# Go straight for telemetry data
telemetry_pb = telemetry_pb2.Telemetry()
encoding_path = 'Cisco-IOS-XR-ipv4-bgp-oper:bgp/instances/'+\
'instance/instance-active/default-vrf/sessions/session'
# Return in JSON format instead of protobuf.
if json.loads(segment.data)["encoding_path"] == encoding_path:
print json.dumps(json.loads(segment.data), indent=3)
os._exit(0)
| StarcoderdataPython |
11378307 | from celery import shared_task
from .models import TransferBatch, TransferFile
@shared_task(bind=True, time_limit=600, default_retry_delay=30, max_retries=3)
def file_pending_to_downloading(self, fileId):
file = TransferFile.objects.get(id=fileId)
file.status = 'DL'
file.save()
print("File Pending and downloading now %d" % fileId)
try:
file.download()
file_downloading_to_download_succeeded.delay(fileId)
except ValueError:
print("Provider not matched on download. %d" % fileId)
file_downloading_to_download_failed.delay(fileId)
raise
except:
print("Some fatal error occurred %d" % fileId)
import traceback
traceback.print_exc()
file_downloading_to_download_failed.delay(fileId)
self.retry(countdown=10)
raise
@shared_task
def file_downloading_to_download_succeeded(fileId):
file = TransferFile.objects.get(id=fileId)
file.status = 'DS'
file.save()
print("file DL succeeded %d" % fileId)
file_download_succeeded_to_uploading.delay(fileId)
@shared_task
def file_downloading_to_download_failed(fileId):
file = TransferFile.objects.get(id=fileId)
file.status = 'DF'
file.save()
print("file DL failed %d" % fileId)
@shared_task(bind=True, time_limit=600, default_retry_delay=30, max_retries=3)
def file_download_succeeded_to_uploading(self, fileId):
file = TransferFile.objects.get(id=fileId)
file.status = 'UP'
file.save()
print("Currently uploading to %d" % fileId)
try:
file.upload()
file_uploading_to_upload_succeeded.delay(fileId)
except ValueError:
print("Provider not matched on upload %d" % fileId)
file_uploading_to_upload_failed.delay(fileId)
raise
except:
print("Some fatal error occurred %d" % fileId)
import traceback
traceback.print_exc()
file_uploading_to_upload_failed.delay(fileId)
self.retry(countdown=10)
raise
@shared_task
def file_uploading_to_upload_failed(fileId):
file = TransferFile.objects.get(id=fileId)
file.status = 'UF'
file.save()
print("file UP failed %d" % fileId)
@shared_task
def file_uploading_to_upload_succeeded(fileId):
file = TransferFile.objects.get(id=fileId)
file.status = 'US'
file.save()
file.cleanup()
print("file UP succeeded %d" % fileId)
file_upload_succeeded_to_complete.delay(fileId)
@shared_task
def file_upload_succeeded_to_complete(fileId):
file = TransferFile.objects.get(id=fileId)
file.status = 'CP'
file.save()
print("file transfer complete!!! %d" % fileId)
print("file was transfered from %s to %s" % (file.fromPath, file.toPath))
| StarcoderdataPython |
11276252 | <gh_stars>10-100
#!/usr/bin/python
import sys
class juniper(object):
def __init__(self):
self.FAMILY = ["QzF3n6/9CAtpu0O", "B1IREhcSyrleKvMW8LXx", "7N-dVbwsY2g4oaJZGUDj", "iHkq.mPf5T"]
self.EXTRA = dict()
for x, item in enumerate(self.FAMILY):
for c in item:
self.EXTRA[c] = 3 - x
self.NUM_ALPHA = [x for x in "".join(self.FAMILY)]
self.ALPHA_NUM = {self.NUM_ALPHA[x]: x for x in range(0, len(self.NUM_ALPHA))}
self.ENCODING = [[1, 4, 32], [1, 16, 32], [1, 8, 32], [1, 64], [1, 32], [1, 4, 16, 128], [1, 32, 64]]
def init(self):
conf={
'name':'juniper',
'author':'tautology, mangled from a script by <NAME>',
'hashes':[
{
'name': '9',
'decode': self.type9decode,
}
]
}
return conf
def _nibble(self, cref, length):
nib = cref[0:length]
rest = cref[length:]
if len(nib) != length:
print("Ran out of characters: hit '%s', expecting %s chars" % (nib, length))
sys.exit(1)
return nib, rest
def _gap(self, c1, c2):
return (self.ALPHA_NUM[str(c2)] - self.ALPHA_NUM[str(c1)]) % (len(self.NUM_ALPHA)) - 1
def _gap_decode(self,gaps, dec):
num = 0
if len(gaps) != len(dec):
print("Nibble and decode size not the same!")
sys.exit(1)
for x in range(0, len(gaps)):
num += gaps[x] * dec[x]
return chr(num % 256)
def juniper_decrypt(self,crypt):
chars = crypt.split("$9$", 1)[1]
first, chars = self._nibble(chars, 1)
toss, chars = self._nibble(chars, self.EXTRA[first])
prev = first
decrypt = ""
while chars:
decode = self.ENCODING[len(decrypt) % len(self.ENCODING)]
nibble, chars = self._nibble(chars, len(decode))
gaps = []
for i in nibble:
g = self._gap(prev, i)
prev = i
gaps += [g]
decrypt += self._gap_decode(gaps, decode)
return decrypt
def type9decode(self,data,option):
if data[:3] != "$9$":
print("Does not look like a type 9 hash")
sys.exit(1)
return self.juniper_decrypt(data)
| StarcoderdataPython |
8004902 | <gh_stars>1-10
# -*- coding: utf-8 -*-
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import six
import mock
from st2common.exceptions.param import ParamException
from st2common.models.system.common import ResourceReference
from st2common.models.db.liveaction import LiveActionDB
from st2common.models.db.keyvalue import KeyValuePairDB
from st2common.models.utils import action_param_utils
from st2common.persistence.keyvalue import KeyValuePair
from st2common.transport.publishers import PoolPublisher
from st2common.util import date as date_utils
from st2common.util import param as param_utils
from st2common.util.config_loader import get_config
from st2tests import DbTestCase
from st2tests.fixturesloader import FixturesLoader
FIXTURES_PACK = 'generic'
TEST_MODELS = {
'actions': ['action_4_action_context_param.yaml', 'action_system_default.yaml'],
'runners': ['testrunner1.yaml']
}
FIXTURES = FixturesLoader().load_models(fixtures_pack=FIXTURES_PACK,
fixtures_dict=TEST_MODELS)
@mock.patch.object(PoolPublisher, 'publish', mock.MagicMock())
class ParamsUtilsTest(DbTestCase):
action_db = FIXTURES['actions']['action_4_action_context_param.yaml']
action_system_default_db = FIXTURES['actions']['action_system_default.yaml']
runnertype_db = FIXTURES['runners']['testrunner1.yaml']
def test_get_finalized_params(self):
params = {
'actionstr': 'foo',
'some_key_that_aint_exist_in_action_or_runner': 'bar',
'runnerint': 555,
'runnerimmutable': 'failed_override',
'actionimmutable': 'failed_override'
}
liveaction_db = self._get_liveaction_model(params)
runner_params, action_params = param_utils.get_finalized_params(
ParamsUtilsTest.runnertype_db.runner_parameters,
ParamsUtilsTest.action_db.parameters,
liveaction_db.parameters,
liveaction_db.context)
# Asserts for runner params.
# Assert that default values for runner params are resolved.
self.assertEqual(runner_params.get('runnerstr'), 'defaultfoo')
# Assert that a runner param from action exec is picked up.
self.assertEqual(runner_params.get('runnerint'), 555)
# Assert that a runner param can be overridden by action param default.
self.assertEqual(runner_params.get('runnerdummy'), 'actiondummy')
# Assert that a runner param default can be overridden by 'falsey' action param default,
# (timeout: 0 case).
self.assertEqual(runner_params.get('runnerdefaultint'), 0)
# Assert that an immutable param cannot be overridden by action param or execution param.
self.assertEqual(runner_params.get('runnerimmutable'), 'runnerimmutable')
# Asserts for action params.
self.assertEqual(action_params.get('actionstr'), 'foo')
# Assert that a param that is provided in action exec that isn't in action or runner params
# isn't in resolved params.
self.assertEqual(action_params.get('some_key_that_aint_exist_in_action_or_runner'), None)
# Assert that an immutable param cannot be overridden by execution param.
self.assertEqual(action_params.get('actionimmutable'), 'actionimmutable')
# Assert that an action context param is set correctly.
self.assertEqual(action_params.get('action_api_user'), 'noob')
# Assert that none of runner params are present in action_params.
for k in action_params:
self.assertTrue(k not in runner_params, 'Param ' + k + ' is a runner param.')
def test_get_finalized_params_system_values(self):
KeyValuePair.add_or_update(KeyValuePairDB(name='actionstr', value='foo'))
KeyValuePair.add_or_update(KeyValuePairDB(name='actionnumber', value='1.0'))
params = {
'runnerint': 555
}
liveaction_db = self._get_liveaction_model(params)
runner_params, action_params = param_utils.get_finalized_params(
ParamsUtilsTest.runnertype_db.runner_parameters,
ParamsUtilsTest.action_system_default_db.parameters,
liveaction_db.parameters,
liveaction_db.context)
# Asserts for runner params.
# Assert that default values for runner params are resolved.
self.assertEqual(runner_params.get('runnerstr'), 'defaultfoo')
# Assert that a runner param from action exec is picked up.
self.assertEqual(runner_params.get('runnerint'), 555)
# Assert that an immutable param cannot be overridden by action param or execution param.
self.assertEqual(runner_params.get('runnerimmutable'), 'runnerimmutable')
# Asserts for action params.
self.assertEqual(action_params.get('actionstr'), 'foo')
self.assertEqual(action_params.get('actionnumber'), 1.0)
def test_get_finalized_params_action_immutable(self):
params = {
'actionstr': 'foo',
'some_key_that_aint_exist_in_action_or_runner': 'bar',
'runnerint': 555,
'actionimmutable': 'failed_override'
}
liveaction_db = self._get_liveaction_model(params)
action_context = {'api_user': None}
runner_params, action_params = param_utils.get_finalized_params(
ParamsUtilsTest.runnertype_db.runner_parameters,
ParamsUtilsTest.action_db.parameters,
liveaction_db.parameters,
action_context)
# Asserts for runner params.
# Assert that default values for runner params are resolved.
self.assertEqual(runner_params.get('runnerstr'), 'defaultfoo')
# Assert that a runner param from action exec is picked up.
self.assertEqual(runner_params.get('runnerint'), 555)
# Assert that a runner param can be overridden by action param default.
self.assertEqual(runner_params.get('runnerdummy'), 'actiondummy')
# Asserts for action params.
self.assertEqual(action_params.get('actionstr'), 'foo')
# Assert that a param that is provided in action exec that isn't in action or runner params
# isn't in resolved params.
self.assertEqual(action_params.get('some_key_that_aint_exist_in_action_or_runner'), None)
def test_get_finalized_params_empty(self):
params = {}
runner_param_info = {}
action_param_info = {}
action_context = {}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
self.assertEqual(r_runner_params, params)
self.assertEqual(r_action_params, params)
def test_get_finalized_params_none(self):
params = {
'r1': None,
'a1': None
}
runner_param_info = {'r1': {}}
action_param_info = {'a1': {}}
action_context = {'api_user': None}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
self.assertEqual(r_runner_params, {'r1': None})
self.assertEqual(r_action_params, {'a1': None})
def test_get_finalized_params_no_cast(self):
params = {
'r1': '{{r2}}',
'r2': 1,
'a1': True,
'a2': '{{r1}} {{a1}}',
'a3': '{{action_context.api_user}}'
}
runner_param_info = {'r1': {}, 'r2': {}}
action_param_info = {'a1': {}, 'a2': {}, 'a3': {}}
action_context = {'api_user': 'noob'}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
self.assertEqual(r_runner_params, {'r1': u'1', 'r2': 1})
self.assertEqual(r_action_params, {'a1': True, 'a2': u'1 True', 'a3': 'noob'})
def test_get_finalized_params_with_cast(self):
# Note : In this test runner_params.r1 has a string value. However per runner_param_info the
# type is an integer. The expected type is considered and cast is performed accordingly.
params = {
'r1': '{{r2}}',
'r2': 1,
'a1': True,
'a2': '{{a1}}',
'a3': '{{action_context.api_user}}'
}
runner_param_info = {'r1': {'type': 'integer'}, 'r2': {'type': 'integer'}}
action_param_info = {'a1': {'type': 'boolean'}, 'a2': {'type': 'boolean'}, 'a3': {}}
action_context = {'api_user': 'noob'}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
self.assertEqual(r_runner_params, {'r1': 1, 'r2': 1})
self.assertEqual(r_action_params, {'a1': True, 'a2': True, 'a3': 'noob'})
def test_get_finalized_params_with_cast_overriden(self):
params = {
'r1': '{{r2}}',
'r2': 1,
'a1': '{{r1}}',
'a2': '{{r1}}',
'a3': '{{r1}}'
}
runner_param_info = {'r1': {'type': 'integer'}, 'r2': {'type': 'integer'}}
action_param_info = {'a1': {'type': 'boolean'}, 'a2': {'type': 'string'},
'a3': {'type': 'integer'}, 'r1': {'type': 'string'}}
action_context = {'api_user': 'noob'}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
self.assertEqual(r_runner_params, {'r1': 1, 'r2': 1})
self.assertEqual(r_action_params, {'a1': 1, 'a2': u'1', 'a3': 1})
def test_get_finalized_params_cross_talk_no_cast(self):
params = {
'r1': '{{a1}}',
'r2': 1,
'a1': True,
'a2': '{{r1}} {{a1}}',
'a3': '{{action_context.api_user}}'
}
runner_param_info = {'r1': {}, 'r2': {}}
action_param_info = {'a1': {}, 'a2': {}, 'a3': {}}
action_context = {'api_user': 'noob'}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
self.assertEqual(r_runner_params, {'r1': u'True', 'r2': 1})
self.assertEqual(r_action_params, {'a1': True, 'a2': u'True True', 'a3': 'noob'})
def test_get_finalized_params_cross_talk_with_cast(self):
params = {
'r1': '{{a1}}',
'r2': 1,
'r3': 1,
'a1': True,
'a2': '{{r1}},{{a1}},{{a3}},{{r3}}',
'a3': '{{a1}}'
}
runner_param_info = {'r1': {'type': 'boolean'}, 'r2': {'type': 'integer'}, 'r3': {}}
action_param_info = {'a1': {'type': 'boolean'}, 'a2': {'type': 'array'}, 'a3': {}}
action_context = {}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
self.assertEqual(r_runner_params, {'r1': True, 'r2': 1, 'r3': 1})
self.assertEqual(r_action_params, {'a1': True, 'a2': (True, True, True, 1), 'a3': u'True'})
def test_get_finalized_params_order(self):
params = {
'r1': 'p1',
'r2': 'p2',
'r3': 'p3',
'a1': 'p4',
'a2': 'p5'
}
runner_param_info = {'r1': {}, 'r2': {'default': 'r2'}, 'r3': {'default': 'r3'}}
action_param_info = {'a1': {}, 'a2': {'default': 'a2'}, 'r3': {'default': 'a3'}}
action_context = {'api_user': 'noob'}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
self.assertEqual(r_runner_params, {'r1': u'p1', 'r2': u'p2', 'r3': u'p3'})
self.assertEqual(r_action_params, {'a1': u'p4', 'a2': u'p5'})
params = {}
runner_param_info = {'r1': {}, 'r2': {'default': 'r2'}, 'r3': {'default': 'r3'}}
action_param_info = {'a1': {}, 'a2': {'default': 'a2'}, 'r3': {'default': 'a3'}}
action_context = {'api_user': 'noob'}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
self.assertEqual(r_runner_params, {'r1': None, 'r2': u'r2', 'r3': u'a3'})
self.assertEqual(r_action_params, {'a1': None, 'a2': u'a2'})
params = {}
runner_param_info = {'r1': {}, 'r2': {'default': 'r2'}, 'r3': {}}
action_param_info = {'r1': {}, 'r2': {}, 'r3': {'default': 'a3'}}
action_context = {'api_user': 'noob'}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
self.assertEqual(r_runner_params, {'r1': None, 'r2': u'r2', 'r3': u'a3'})
def test_get_finalized_params_non_existent_template_key_in_action_context(self):
params = {
'r1': 'foo',
'r2': 2,
'a1': 'i love tests',
'a2': '{{action_context.lorem_ipsum}}'
}
runner_param_info = {'r1': {'type': 'string'}, 'r2': {'type': 'integer'}}
action_param_info = {'a1': {'type': 'string'}, 'a2': {'type': 'string'}}
action_context = {'api_user': 'noob', 'source_channel': 'reddit'}
try:
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
self.fail('This should have thrown because we are trying to deref a key in ' +
'action context that ain\'t exist.')
except ParamException as e:
error_msg = 'Failed to render parameter "a2": \'dict object\' ' + \
'has no attribute \'lorem_ipsum\''
self.assertTrue(error_msg in str(e))
pass
def test_unicode_value_casting(self):
rendered = {'a1': 'unicode1 ٩(̾●̮̮̃̾•̃̾)۶ unicode2'}
parameter_schemas = {'a1': {'type': 'string'}}
result = param_utils._cast_params(rendered=rendered,
parameter_schemas=parameter_schemas)
if six.PY3:
expected = {
'a1': (u'unicode1 ٩(̾●̮̮̃̾•̃̾)۶ unicode2')
}
else:
expected = {
'a1': (u'unicode1 \xd9\xa9(\xcc\xbe\xe2\x97\x8f\xcc\xae\xcc\xae\xcc'
u'\x83\xcc\xbe\xe2\x80\xa2\xcc\x83\xcc\xbe)\xdb\xb6 unicode2')
}
self.assertEqual(result, expected)
def test_get_finalized_params_with_casting_unicode_values(self):
params = {'a1': 'unicode1 ٩(̾●̮̮̃̾•̃̾)۶ unicode2'}
runner_param_info = {}
action_param_info = {'a1': {'type': 'string'}}
action_context = {}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
if six.PY3:
expected_action_params = {
'a1': (u'unicode1 ٩(̾●̮̮̃̾•̃̾)۶ unicode2')
}
else:
expected_action_params = {
'a1': (u'unicode1 \xd9\xa9(\xcc\xbe\xe2\x97\x8f\xcc\xae\xcc\xae\xcc'
u'\x83\xcc\xbe\xe2\x80\xa2\xcc\x83\xcc\xbe)\xdb\xb6 unicode2')
}
self.assertEqual(r_runner_params, {})
self.assertEqual(r_action_params, expected_action_params)
def test_get_finalized_params_with_dict(self):
# Note : In this test runner_params.r1 has a string value. However per runner_param_info the
# type is an integer. The expected type is considered and cast is performed accordingly.
params = {
'r1': '{{r2}}',
'r2': {'r2.1': 1},
'a1': True,
'a2': '{{a1}}',
'a3': {
'test': '{{a1}}',
'test1': '{{a4}}',
'test2': '{{a5}}',
},
'a4': 3,
'a5': ['1', '{{a1}}']
}
runner_param_info = {'r1': {'type': 'object'}, 'r2': {'type': 'object'}}
action_param_info = {
'a1': {
'type': 'boolean',
},
'a2': {
'type': 'boolean',
},
'a3': {
'type': 'object',
},
'a4': {
'type': 'integer',
},
'a5': {
'type': 'array',
},
}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, {})
self.assertEqual(
r_runner_params, {'r1': {'r2.1': 1}, 'r2': {'r2.1': 1}})
self.assertEqual(
r_action_params,
{
'a1': True,
'a2': True,
'a3': {
'test': True,
'test1': 3,
'test2': [
'1',
True
],
},
'a4': 3,
'a5': [
'1',
True
],
}
)
def test_get_finalized_params_with_list(self):
# Note : In this test runner_params.r1 has a string value. However per runner_param_info the
# type is an integer. The expected type is considered and cast is performed accordingly.
self.maxDiff = None
params = {
'r1': '{{r2}}',
'r2': ['1', '2'],
'a1': True,
'a2': 'Test',
'a3': 'Test2',
'a4': '{{a1}}',
'a5': ['{{a2}}', '{{a3}}'],
'a6': [
['{{r2}}', '{{a2}}'],
['{{a3}}', '{{a1}}'],
[
'{{a7}}',
'This should be rendered as a string {{a1}}',
'{{a1}} This, too, should be rendered as a string {{a1}}',
]
],
'a7': 5,
}
runner_param_info = {'r1': {'type': 'array'}, 'r2': {'type': 'array'}}
action_param_info = {
'a1': {'type': 'boolean'},
'a2': {'type': 'string'},
'a3': {'type': 'string'},
'a4': {'type': 'boolean'},
'a5': {'type': 'array'},
'a6': {'type': 'array'},
'a7': {'type': 'integer'},
}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, {})
self.assertEqual(r_runner_params, {'r1': ['1', '2'], 'r2': ['1', '2']})
self.assertEqual(
r_action_params,
{
'a1': True,
'a2': 'Test',
'a3': 'Test2',
'a4': True,
'a5': ['Test', 'Test2'],
'a6': [
[['1', '2'], 'Test'],
['Test2', True],
[
5,
u'This should be rendered as a string True',
u'True This, too, should be rendered as a string True'
]
],
'a7': 5,
}
)
def test_get_finalized_params_with_cyclic_dependency(self):
params = {'r1': '{{r2}}', 'r2': '{{r1}}'}
runner_param_info = {'r1': {}, 'r2': {}}
action_param_info = {}
test_pass = True
try:
param_utils.get_finalized_params(runner_param_info, action_param_info, params, {})
test_pass = False
except ParamException as e:
test_pass = str(e).find('Cyclic') == 0
self.assertTrue(test_pass)
def test_get_finalized_params_with_missing_dependency(self):
params = {'r1': '{{r3}}', 'r2': '{{r3}}'}
runner_param_info = {'r1': {}, 'r2': {}}
action_param_info = {}
test_pass = True
try:
param_utils.get_finalized_params(runner_param_info, action_param_info, params, {})
test_pass = False
except ParamException as e:
test_pass = str(e).find('Dependency') == 0
self.assertTrue(test_pass)
params = {}
runner_param_info = {'r1': {'default': '{{r3}}'}, 'r2': {'default': '{{r3}}'}}
action_param_info = {}
test_pass = True
try:
param_utils.get_finalized_params(runner_param_info, action_param_info, params, {})
test_pass = False
except ParamException as e:
test_pass = str(e).find('Dependency') == 0
self.assertTrue(test_pass)
def test_get_finalized_params_no_double_rendering(self):
params = {
'r1': '{{ action_context.h1 }}{{ action_context.h2 }}'
}
runner_param_info = {'r1': {}}
action_param_info = {}
action_context = {
'h1': '{',
'h2': '{ missing }}'
}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
self.assertEqual(r_runner_params, {'r1': '{{ missing }}'})
self.assertEqual(r_action_params, {})
def test_get_finalized_params_jinja_filters(self):
params = {'cmd': 'echo {{"1.6.0" | version_bump_minor}}'}
runner_param_info = {'r1': {}}
action_param_info = {'cmd': {}}
action_context = {}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
self.assertEqual(r_action_params['cmd'], "echo 1.7.0")
def test_get_finalized_params_param_rendering_failure(self):
params = {'cmd': '{{a2.foo}}', 'a2': 'test'}
action_param_info = {'cmd': {}, 'a2': {}}
expected_msg = 'Failed to render parameter "cmd": .*'
self.assertRaisesRegexp(ParamException,
expected_msg,
param_utils.get_finalized_params,
runnertype_parameter_info={},
action_parameter_info=action_param_info,
liveaction_parameters=params,
action_context={})
def test_get_finalized_param_object_contains_template_notation_in_the_value(self):
runner_param_info = {'r1': {}}
action_param_info = {
'params': {
'type': 'object',
'default': {
'host': '{{host}}',
'port': '{{port}}',
'path': '/bar'}
}
}
params = {
'host': 'lolcathost',
'port': 5555
}
action_context = {}
r_runner_params, r_action_params = param_utils.get_finalized_params(
runner_param_info, action_param_info, params, action_context)
expected_params = {
'host': 'lolcathost',
'port': 5555,
'path': '/bar'
}
self.assertEqual(r_action_params['params'], expected_params)
def test_cast_param_referenced_action_doesnt_exist(self):
# Make sure the function throws if the action doesnt exist
expected_msg = 'Action with ref "foo.doesntexist" doesn\'t exist'
self.assertRaisesRegexp(ValueError, expected_msg, action_param_utils.cast_params,
action_ref='foo.doesntexist', params={})
def test_get_finalized_params_with_config(self):
with mock.patch('st2common.util.config_loader.ContentPackConfigLoader') as config_loader:
config_loader().get_config.return_value = {
'generic_config_param': 'So generic'
}
params = {
'config_param': '{{config_context.generic_config_param}}',
}
liveaction_db = self._get_liveaction_model(params, True)
_, action_params = param_utils.get_finalized_params(
ParamsUtilsTest.runnertype_db.runner_parameters,
ParamsUtilsTest.action_db.parameters,
liveaction_db.parameters,
liveaction_db.context)
self.assertEqual(
action_params.get('config_param'),
'So generic'
)
def test_get_config(self):
with mock.patch('st2common.util.config_loader.ContentPackConfigLoader') as config_loader:
mock_config_return = {
'generic_config_param': 'So generic'
}
config_loader().get_config.return_value = mock_config_return
self.assertEqual(get_config(None, None), {})
self.assertEqual(get_config('pack', None), {})
self.assertEqual(get_config(None, 'user'), {})
self.assertEqual(
get_config('pack', 'user'), mock_config_return
)
config_loader.assert_called_with(pack_name='pack', user='user')
config_loader().get_config.assert_called_once()
def _get_liveaction_model(self, params, with_config_context=False):
status = 'initializing'
start_timestamp = date_utils.get_datetime_utc_now()
action_ref = ResourceReference(name=ParamsUtilsTest.action_db.name,
pack=ParamsUtilsTest.action_db.pack).ref
liveaction_db = LiveActionDB(status=status, start_timestamp=start_timestamp,
action=action_ref, parameters=params)
liveaction_db.context = {
'api_user': 'noob',
'source_channel': 'reddit',
}
if with_config_context:
liveaction_db.context.update(
{
'pack': 'generic',
'user': 'st2admin'
}
)
return liveaction_db
def test_get_live_params_with_additional_context(self):
runner_param_info = {
'r1': {
'default': 'some'
}
}
action_param_info = {
'r2': {
'default': '{{ r1 }}'
}
}
params = {
'r3': 'lolcathost',
'r1': '{{ additional.stuff }}'
}
action_context = {}
additional_contexts = {
'additional': {
'stuff': 'generic'
}
}
live_params = param_utils.render_live_params(
runner_param_info, action_param_info, params, action_context, additional_contexts)
expected_params = {
'r1': 'generic',
'r2': 'generic',
'r3': 'lolcathost'
}
self.assertEqual(live_params, expected_params)
def test_cyclic_dependency_friendly_error_message(self):
runner_param_info = {
'r1': {
'default': 'some',
'cyclic': 'cyclic value',
'morecyclic': 'cyclic value'
}
}
action_param_info = {
'r2': {
'default': '{{ r1 }}'
}
}
params = {
'r3': 'lolcathost',
'cyclic': '{{ cyclic }}',
'morecyclic': '{{ morecyclic }}'
}
action_context = {}
expected_msg = 'Cyclic dependency found in the following variables: cyclic, morecyclic'
self.assertRaisesRegexp(ParamException, expected_msg, param_utils.render_live_params,
runner_param_info, action_param_info, params, action_context)
def test_unsatisfied_dependency_friendly_error_message(self):
runner_param_info = {
'r1': {
'default': 'some',
}
}
action_param_info = {
'r2': {
'default': '{{ r1 }}'
}
}
params = {
'r3': 'lolcathost',
'r4': '{{ variable_not_defined }}',
}
action_context = {}
expected_msg = 'Dependency unsatisfied in variable "variable_not_defined"'
self.assertRaisesRegexp(ParamException, expected_msg, param_utils.render_live_params,
runner_param_info, action_param_info, params, action_context)
def test_add_default_templates_to_live_params(self):
"""Test addition of template values in defaults to live params
"""
# Ensure parameter is skipped if the parameter has immutable set to true in schema
schemas = [
{
'templateparam': {
'default': '{{ 3 | int }}',
'type': 'integer',
'immutable': True
}
}
]
context = {
'templateparam': '3'
}
result = param_utils._cast_params_from({}, context, schemas)
self.assertEquals(result, {})
# Test with no live params, and two parameters - one should make it through because
# it was a template, and the other shouldn't because its default wasn't a template
schemas = [
{
'templateparam': {
'default': '{{ 3 | int }}',
'type': 'integer'
}
}
]
context = {
'templateparam': '3'
}
result = param_utils._cast_params_from({}, context, schemas)
self.assertEquals(result, {'templateparam': 3})
# Ensure parameter is skipped if the value in context is identical to default
schemas = [
{
'nottemplateparam': {
'default': '4',
'type': 'integer'
}
}
]
context = {
'nottemplateparam': '4',
}
result = param_utils._cast_params_from({}, context, schemas)
self.assertEquals(result, {})
# Ensure parameter is skipped if the parameter doesn't have a default
schemas = [
{
'nottemplateparam': {
'type': 'integer'
}
}
]
context = {
'nottemplateparam': '4',
}
result = param_utils._cast_params_from({}, context, schemas)
self.assertEquals(result, {})
# Skip if the default value isn't a Jinja expression
schemas = [
{
'nottemplateparam': {
'default': '5',
'type': 'integer'
}
}
]
context = {
'nottemplateparam': '4',
}
result = param_utils._cast_params_from({}, context, schemas)
self.assertEquals(result, {})
# Ensure parameter is skipped if the parameter is being overridden
schemas = [
{
'templateparam': {
'default': '{{ 3 | int }}',
'type': 'integer'
}
}
]
context = {
'templateparam': '4',
}
result = param_utils._cast_params_from({'templateparam': '4'}, context, schemas)
self.assertEquals(result, {'templateparam': 4})
| StarcoderdataPython |
6635312 | # Copyright 2017-2020 The GPflow Contributors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
import numpy as np
import tensorflow as tf
from ..base import Parameter, TensorType
from ..utilities import positive
from ..utilities.ops import difference_matrix
from .base import ActiveDims, Kernel, NormalizedActiveDims
from .stationaries import IsotropicStationary
class Periodic(Kernel):
"""
The periodic family of kernels. Can be used to wrap any Stationary kernel
to transform it into a periodic version. The canonical form (based on the
SquaredExponential kernel) can be found in Equation (47) of
D.J.C.MacKay. Introduction to Gaussian processes. In C.M.Bishop, editor,
Neural Networks and Machine Learning, pages 133--165. Springer, 1998.
The derivation can be achieved by mapping the original inputs through the
transformation u = (cos(x), sin(x)).
For the SquaredExponential base kernel, the result can be expressed as:
k(r) = σ² exp{ -0.5 sin²(π r / γ) / ℓ²}
where:
r is the Euclidean distance between the input points
ℓ is the lengthscales parameter,
σ² is the variance parameter,
γ is the period parameter.
NOTE: usually we have a factor of 4 instead of 0.5 in front but this
is absorbed into the lengthscales hyperparameter.
NOTE: periodic kernel uses `active_dims` of a base kernel, therefore
the constructor doesn't have it as an argument.
"""
def __init__(self, base_kernel: IsotropicStationary, period: TensorType = 1.0) -> None:
"""
:param base_kernel: the base kernel to make periodic; must inherit from Stationary
Note that `active_dims` should be specified in the base kernel.
:param period: the period; to induce a different period per active dimension
this must be initialized with an array the same length as the number
of active dimensions e.g. [1., 1., 1.]
"""
if not isinstance(base_kernel, IsotropicStationary):
raise TypeError("Periodic requires an IsotropicStationary kernel as the `base_kernel`")
super().__init__()
self.base_kernel = base_kernel
self.period = Parameter(period, transform=positive())
self.base_kernel._validate_ard_active_dims(self.period)
@property
def active_dims(self) -> NormalizedActiveDims:
return self.base_kernel.active_dims
@active_dims.setter
def active_dims(self, value: ActiveDims) -> None:
# type-ignore below is because mypy doesn't understand that getter and the setter of
# `active_dims` have different types.
self.base_kernel.active_dims = value # type: ignore
def K_diag(self, X: TensorType) -> tf.Tensor:
return self.base_kernel.K_diag(X)
def K(self, X: TensorType, X2: Optional[TensorType] = None) -> tf.Tensor:
r = np.pi * (difference_matrix(X, X2)) / self.period
scaled_sine = tf.sin(r) / self.base_kernel.lengthscales
if hasattr(self.base_kernel, "K_r"):
sine_r = tf.reduce_sum(tf.abs(scaled_sine), -1)
K = self.base_kernel.K_r(sine_r)
else:
sine_r2 = tf.reduce_sum(tf.square(scaled_sine), -1)
K = self.base_kernel.K_r2(sine_r2)
return K
| StarcoderdataPython |
3278956 | from __future__ import print_function
# third party libraries
from theano.tensor import matrix, lvector
from opendeep.log.logger import config_root_logger
from opendeep.models import Prototype, Dense, Softmax
from opendeep.optimization import AdaDelta
from opendeep.optimization.loss import Neg_LL
from opendeep.data import MNIST
def run_mlp():
# test the new way to automatically fill in inputs for models
mlp = Prototype()
x = ((None, 784), matrix("x"))
mlp.add(Dense(inputs=x, outputs=1000, activation='rectifier'))
mlp.add(Dense, outputs=1500, activation='tanh')
mlp.add(Softmax, outputs=10, out_as_probs=False)
# define our loss to optimize for the model (and the target variable)
# targets from MNIST are int64 numbers 0-9
y = lvector('y')
loss = Neg_LL(inputs=mlp.models[-1].p_y_given_x, targets=y, one_hot=False)
mnist = MNIST()
optimizer = AdaDelta(model=mlp, loss=loss, dataset=mnist, epochs=10)
optimizer.train()
test_data, test_labels = mnist.test_inputs, mnist.test_targets
test_data = test_data[:25]
test_labels = test_labels[:25]
# use the run function!
yhat = mlp.run(test_data)
print('-------')
print('Prediction: %s' % str(yhat))
print('Actual: %s' % str(test_labels.astype('int32')))
if __name__ == '__main__':
config_root_logger()
run_mlp()
| StarcoderdataPython |
185728 | <reponame>SDomarecki/Specusticc<filename>specusticc/agent/agent.py
from specusticc.configs_init.model.agent_config import AgentConfig
from specusticc.data_postprocessing.data_postprocessor import DataPostprocessor
from specusticc.data_preprocessing.preprocessed_data import PreprocessedData
from specusticc.model_creating.network_builder import NetworkBuilder
from specusticc.model_creating.optimizer import Optimizer
from specusticc.model_testing.tester import Tester
from specusticc.model_training.trainer import Trainer
from specusticc.reporting.reporter import Reporter
class Agent:
def __init__(
self,
model_name: str,
fold_number: int,
data: PreprocessedData,
config: AgentConfig,
):
self.config: AgentConfig = config
self._name = f"{model_name}_{fold_number}"
self.model_name: str = model_name
self._fold_number: int = fold_number
self._data: PreprocessedData = data
def run(self):
hyperparam_method = self.config.hyperparam_optimization_method
if hyperparam_method in ["grid", "random", "bayes"]:
self._optimize_model()
else:
self._create_model()
self._train_model()
self._test_model()
self._postprocess_data()
self._save_report()
def _optimize_model(self):
optimizer = Optimizer(self._data, self.model_name, self.config)
self._model = optimizer.optimize()
def _create_model(self):
builder = NetworkBuilder(self.model_name, self.config)
self._model = builder.build()
def _train_model(self):
trainer = Trainer(self._data, self.model_name, self.config)
self._model = trainer.train(self._model)
def _test_model(self):
tester = Tester(self._model, self.model_name, self._data)
tester.test()
self._test_results = tester.get_test_results()
def _postprocess_data(self):
postprocessor = DataPostprocessor(self._data, self._test_results)
self._postprocessed_data = postprocessor.get_data()
def _save_report(self):
save_path = self.config.save_path
reporter = Reporter(self._postprocessed_data, save_path, self._name)
reporter.save_results()
| StarcoderdataPython |
11375705 | <reponame>xiangruhuang/mmdetection3d
_base_ = './centerpoint_0075voxel_second_secfpn_dcn_' \
'circlenms_4x8_cyclic_20e_waymo.py'
point_cloud_range = [-75.2, -75.2, -2.0, 75.2, 75.2, 4.0]
file_client_args = dict(backend='disk')
class_names = [
'Car', 'Pedestrian', 'Cyclist'
]
dataset_type = 'WaymoDataset'
data_root = 'data/waymo/kitti_format/'
file_client_args = dict(backend='disk')
db_sampler = dict(
data_root=data_root,
info_path=data_root + 'waymo_dbinfos_train.pkl',
rate=1.0,
prepare=dict(
filter_by_difficulty=[-1],
filter_by_min_points=dict(Car=5, Pedestrian=10, Cyclist=10)),
classes=class_names,
sample_groups=dict(Car=15, Pedestrian=10, Cyclist=10),
points_loader=dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=5,
use_dim=[0, 1, 2, 3, 4],
file_client_args=file_client_args))
train_pipeline = [
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=6,
use_dim=5,
file_client_args=file_client_args),
dict(
type='LoadAnnotations3D',
with_bbox_3d=True,
with_label_3d=True,
file_client_args=file_client_args),
dict(type='ObjectSample', db_sampler=db_sampler),
dict(
type='GlobalRotScaleTrans',
rot_range=[-0.78539816, 0.78539816],
scale_ratio_range=[0.95, 1.05],
translation_std=[0, 0, 0]),
dict(
type='RandomFlip3D',
sync_2d=False,
flip_ratio_bev_horizontal=0.5,
flip_ratio_bev_vertical=0.5),
dict(type='PointsRangeFilter', point_cloud_range=point_cloud_range),
dict(type='ObjectRangeFilter', point_cloud_range=point_cloud_range),
dict(type='PointShuffle'),
dict(type='DefaultFormatBundle3D', class_names=class_names),
dict(type='Collect3D', keys=['points', 'gt_bboxes_3d', 'gt_labels_3d'])
]
test_pipeline = [
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=6,
use_dim=5,
file_client_args=file_client_args),
#dict(
# type='LoadPointsFromMultiSweeps',
# sweeps_num=9,
# use_dim=[0, 1, 2, 3, 4],
# file_client_args=file_client_args,
# pad_empty_sweeps=True,
# remove_close=True),
dict(
type='MultiScaleFlipAug3D',
img_scale=(2048, 2048),
pts_scale_ratio=1,
# Add double-flip augmentation
flip=True,
pcd_horizontal_flip=True,
pcd_vertical_flip=True,
transforms=[
dict(
type='GlobalRotScaleTrans',
rot_range=[0, 0],
scale_ratio_range=[1., 1.],
translation_std=[0, 0, 0]),
dict(type='RandomFlip3D', sync_2d=False),
dict(
type='PointsRangeFilter', point_cloud_range=point_cloud_range),
dict(
type='DefaultFormatBundle3D',
class_names=class_names,
with_label=False),
dict(type='Collect3D', keys=['points'])
])
]
data = dict(
samples_per_gpu=2,
workers_per_gpu=2,
train=dict(
type='RepeatDataset',
times=2,
dataset=dict(
type=dataset_type,
data_root=data_root,
ann_file=data_root + 'waymo_infos_train.pkl',
pipeline=train_pipeline,
load_interval=1)),
val=dict(pipeline=test_pipeline),
test=dict(pipeline=test_pipeline)
)
evaluation = dict(interval=25)
workflow=[('train', 1)]
| StarcoderdataPython |
11326 | <filename>tests/integration/storage_memory/test_storage_memory_write.py
import logging
import pytest
from moto import mock_ec2, mock_iam, mock_sts
from cloudwanderer.cloud_wanderer_resource import CloudWandererResource
from cloudwanderer.storage_connectors import MemoryStorageConnector
from cloudwanderer.urn import URN
from tests.pytest_helpers import create_ec2_instances
logger = logging.getLogger(__name__)
@pytest.fixture(scope="function")
def memory_connector(request):
connector = MemoryStorageConnector()
connector.init()
return connector
def get_inferred_ec2_instances(cloudwanderer_boto3_session):
return [
CloudWandererResource(
urn=URN(
account_id="111111111111",
region="eu-west-2",
service="ec2",
resource_type="instance",
resource_id_parts=[instance.instance_id],
),
resource_data=instance.meta.data,
)
for instance in cloudwanderer_boto3_session.resource("ec2").instances.all()
]
def inferred_ec2_vpcs(cloudwanderer_boto3_session):
return [
CloudWandererResource(
urn=URN(
account_id="111111111111",
region="eu-west-2",
service="ec2",
resource_type="vpc",
resource_id_parts=[vpc.vpc_id],
),
resource_data=vpc.meta.data,
)
for vpc in cloudwanderer_boto3_session.resource("ec2").vpcs.all()
]
@pytest.fixture
def iam_role():
return CloudWandererResource(
urn=URN(
account_id="111111111111",
region="us-east-1",
service="iam",
resource_type="role",
resource_id_parts=["test-role"],
),
resource_data={"RoleName": "test-role", "InlinePolicyAttachments": [{"PolicyNames": ["test-role"]}]},
dependent_resource_urns=[
URN(
account_id="111111111111",
region="us-east-1",
service="iam",
resource_type="role_policy",
resource_id_parts=["test-role", "test-role-policy"],
)
],
)
@pytest.fixture
def iam_role_policies():
return [
CloudWandererResource(
urn=URN(
account_id="111111111111",
region="us-east-1",
service="iam",
resource_type="role_policy",
resource_id_parts=["test-role", "test-role-policy-1"],
),
resource_data={},
parent_urn=URN(
account_id="111111111111",
region="us-east-1",
service="iam",
resource_type="role",
resource_id_parts=["test-role"],
),
),
CloudWandererResource(
urn=URN(
account_id="111111111111",
region="us-east-1",
service="iam",
resource_type="role_policy",
resource_id_parts=["test-role", "test-role-policy-2"],
),
resource_data={},
parent_urn=URN(
account_id="111111111111",
region="us-east-1",
service="iam",
resource_type="role",
resource_id_parts=["test-role"],
),
),
]
@mock_sts
@mock_iam
def test_write_resource_and_attribute(memory_connector, iam_role):
memory_connector.write_resource(resource=iam_role)
result = memory_connector.read_resource(urn=iam_role.urn)
assert result.urn == iam_role.urn
assert result.role_name == "test-role"
logger.info(result.cloudwanderer_metadata.resource_data)
assert result.inline_policy_attachments == [{"PolicyNames": ["test-role"]}]
assert result.dependent_resource_urns == [
URN(
account_id="111111111111",
region="us-east-1",
service="iam",
resource_type="role_policy",
resource_id_parts=["test-role", "test-role-policy"],
)
]
@mock_sts
@mock_ec2
def test_write_and_delete_instances(memory_connector, cloudwanderer_boto3_session):
create_ec2_instances()
inferred_ec2_instances = get_inferred_ec2_instances(cloudwanderer_boto3_session)
memory_connector.write_resource(resource=inferred_ec2_instances[0])
result_before_delete = memory_connector.read_resource(urn=inferred_ec2_instances[0].urn)
memory_connector.delete_resource(urn=inferred_ec2_instances[0].urn)
result_after_delete = memory_connector.read_resource(urn=inferred_ec2_instances[0].urn)
assert result_before_delete.urn == inferred_ec2_instances[0].urn
assert result_after_delete is None
@mock_sts
@mock_ec2
def test_write_and_delete_resource_of_type_in_account_region(memory_connector, cloudwanderer_boto3_session):
create_ec2_instances(count=5)
inferred_ec2_instances = get_inferred_ec2_instances(cloudwanderer_boto3_session)
for i in range(5):
memory_connector.write_resource(resource=inferred_ec2_instances[i])
memory_connector.delete_resource_of_type_in_account_region(
cloud_name="aws",
service="ec2",
resource_type="instance",
account_id="111111111111",
region="eu-west-2",
cutoff=None,
)
remaining_urns = [
resource.urn for resource in memory_connector.read_resources(service="ec2", resource_type="instance")
]
assert remaining_urns == []
def test_delete_subresources_from_resource(memory_connector, iam_role, iam_role_policies):
"""If we are deleting a parent resource we should delete all its subresources."""
memory_connector.write_resource(resource=iam_role)
memory_connector.write_resource(resource=iam_role_policies[0])
memory_connector.write_resource(resource=iam_role_policies[1])
role_before_delete = memory_connector.read_resource(urn=iam_role.urn)
role_policy_1_before_delete = memory_connector.read_resource(urn=iam_role_policies[0].urn)
role_policy_2_before_delete = memory_connector.read_resource(urn=iam_role_policies[1].urn)
# Delete the parent and ensure the subresources are also deleted
memory_connector.delete_resource(urn=iam_role.urn)
role_after_delete = memory_connector.read_resource(urn=iam_role.urn)
role_policy_1_after_delete = memory_connector.read_resource(urn=iam_role_policies[0].urn)
role_policy_2_after_delete = memory_connector.read_resource(urn=iam_role_policies[1].urn)
assert role_before_delete.urn == iam_role.urn
assert role_policy_1_before_delete.urn == iam_role_policies[0].urn
assert role_policy_2_before_delete.urn == iam_role_policies[1].urn
assert role_after_delete is None
assert role_policy_1_after_delete is None
assert role_policy_2_after_delete is None
| StarcoderdataPython |
11215861 | <filename>lab4/src/imageproc_cl.py
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import os
from pathlib import Path
import glob
import yaml
from datetime import date
from scipy import ndimage
import cv2 as cv
from itertools import combinations
from itertools import product
from imageprocessing import ImageProcessing
from roi import PlotCoordinates
import sys
import numpy
#https://realpython.com/python-command-line-arguments/
class ImageProcCL:
"""class for processing images without notebook"""
def define_camera_parameters(self):
camera_parameters = {
"blue-444": 444,
"blue": 475,
"green-531": 531,
"green": 560,
"red-650": 650,
"red": 668,
"red-edge-705": 705,
"red-edge": 717,
"red-edge-740": 740,
"nir": 842,
}
return camera_parameters
# define camera wavelengths and file image labels in a dict
# RedEdge-MX Dual Camera Imaging System bands
# channel names: blue-444, blue, green-531, green, red-650, red, red-edge-705, red-edge, red-edge-740, nir
# define notebook parameters
params = {
"project_stub": "Potato_Fertilizer_Othello",
"image_format": "*.tif",
"data_acquisition_date": "Jun22_2020",
"NDVI_threshold": 0.3,
"data_import_path": Path.cwd() / "data" / "raw" / "Jun22_2020",
"data_export_path": Path.cwd() / "data" / "processed" / "Jun22_2020",
"plot_export_path": Path.cwd() / "image_export",
"ground_truth_path": Path.cwd() / "data" / "raw" / "ground_truth.csv",
}
improc = ImageProcessing(params=params)
# import images with given parameters
field_image = np.stack(
[
ndimage.rotate(
improc.load_img(channel_name), angle=182.4, reshape=True
)
for channel_name in rededge_mx_band_wl
]
)
# origin is upper left
y_limits = [2400, 9800]
x_limits = [1460, 3050]
# crop to desired size (channels, y axis, x axis)
field_image = field_image[
:, y_limits[0] : y_limits[1], x_limits[0] : x_limits[1]
]
print(f"final field_image.shape: {field_image.shape}")
improc.show_image(field_image[9], size=(4, 5))
# # Test threshold values and create a mask
# 1. Calculate NDVI
# 2. Choose NDVI threshold
# 3. Create a boolean mask using the threshold
# 4. Apply the mask to the NDVI image.
# 5. Display masked NDVI image for verification
# 1. calculate NDVI
ndvi = improc.calc_spec_idx((9, 5), field_image)
# 2. choose ndvi threshold
ndvi_th = 0.3
# 3. create a boolean mask of pixels > ndvi_th
mask = np.where(ndvi > ndvi_th, True, False)
# 4. apply mask to cropped image
ndvi_masked = np.multiply(ndvi, mask)
# 5. Display the images as one figure.
fig, axs = plt.subplots(1, 2)
axs[0].imshow(ndvi, cmap="gray")
axs[0].set_title("NDVI")
axs[1].imshow(ndvi_masked, cmap="gray")
axs[1].set_title(f"NDVI_masked, th:{ndvi_th}")
fig.tight_layout()
fig.set_figheight(8)
# Congratulations! You have a field_image with all ten bands, and a mask to segment the background out from the vegetation.
# save the field_image array to disk for use in other notebooks
# this will be an np.array object, which can then be loaded using np.load()
array_filename = os.path.join(
params["data_export_path"],
f"{params['project_stub']}_{params['data_acquisition_date']}_fieldimage",
)
mask_filename = os.path.join(
params["data_export_path"],
f"{params['project_stub']}_{params['data_acquisition_date']}_mask",
)
np.save(file=array_filename, arr=field_image)
np.save(file=mask_filename, arr=mask)
# # Define the NDSIs
# NDSIs are defined by the equation $\frac{a-b}{a+b}$, where a and b are different color bands taken from the available channels in the field image. There are 10 channels available on data taken from the camera utilized for this dataset, which would yield $\frac {10!}{(10-2)!}=90$ combinations. However, half of those would be the inverse of the other half: $\frac{a-b}{a+b}$ vs. $\frac{b-a}{b+a}$.
# The inversed NDSI features would be very highly autocorrelated with their partner, and would ultimately need to be removed from the dataset during dimensional reduction. Instead we choose to take only the unique permutations, resulting in $\frac {10!}{2!(10-2)!}=45$ NDSI features to calculate.
img_chan = {
0: "blue",
1: "blue_444",
2: "green",
3: "green_531",
4: "red_650",
5: "red",
6: "red_edge_705",
7: "red_edge",
8: "red_edge_740",
9: "nir",
}
ndsi_list = [combo for combo in combinations(iter(img_chan), 2)]
ndsi_name_list = [
f"{img_chan.get(combo[0])}-{img_chan.get(combo[1])}"
for combo in ndsi_list
]
print(
f"There are {len(ndsi_list)} unique combinations in contained in ndsi_list."
)
# create an image stack with a channel for each NDSI in our list
# We need to perform the calculations to generate a new image stack, with one channel for each NDSI. We can use the function calc_spec_idx_from_combo() to create the stack of np.arrays. It takes a tuple of two ints, with each tuple representing a combination of two image channels.
# # create ndsi stack
ndsi_stack = np.stack(
[improc.calc_spec_idx(combo, field_image) for combo in ndsi_list]
)
print(f"ndsi_stack.shape={ndsi_stack.shape}")
ndsistack_filename = os.path.join(
params["data_export_path"],
f"{params['project_stub']}_{params['data_acquisition_date']}_mask",
)
np.save(file=ndsistack_filename, arr=ndsi_stack)
# Calculate boundary of plots
# In the case of the potato, the lower left of the field is plot 0, with plot_id incrementing with range, then starting again from the bottom.
pc = PlotCoordinates()
# variables
plot_shape = (200, 492) # w,h
edge_buf = 40 # buffer around edge of plot
roi_shape = pc.get_roi_shape(
plot_shape, edge_buf
) # smaller coordinates within plot_shape
num_ranges = 13
bottom_offset = 50 # offset from bottom of image
# set the x origins for the plots, and the y origins will be calculated
x_origins = [50, 355, 555, 850, 1050, 1350]
y_origins = [
ndsi_stack.shape[1] - bottom_offset - plot_shape[1] * y
for y in range(1, num_ranges + 1)
]
# use these values to calculate the plot coordinates
plot_coords = list(product(x_origins, y_origins))
roi_coords = [
pc.get_roi_coord(plot_coord=plot_coord, edge_buf=edge_buf)
for plot_coord in plot_coords
]
# now plot them for verification on the NDVI image
plot_id_list = pc.plot_boundaries(
img=ndvi,
plot_coords=plot_coords,
roi_coords=roi_coords,
plot_shape=plot_shape,
roi_shape=roi_shape,
)
# A note on calculating mean values.
### You have to exclude the background!
# We need to be careful when we calculate our mean values for the roi. We can't include values from the background. To exclude these, we utilize the NDVI thresholded mask we created above. Just to see how it works, here is a simple test of the mask on a very small array. A mask is provided that excludes values less than 1. They are not included in the number of pixels when the average value is calculated, as seen below.
roi = np.array([0, 1, 2, 3])
roi_mask = np.where(roi >= 1, True, False)
roi_avg4 = (1 + 2 + 3) / 4
roi_avg3 = (1 + 2 + 3) / 3
print(f" sum(roi)/4 = {np.sum(roi)/4}, sum(roi)/3 = {sum(roi)/3}")
print(roi_mask)
print(f"np.mean(roi) = {np.mean(roi)}")
print(f"np.mean(roi, where=mask) = {np.mean(roi, where=roi_mask)}")
# calculate the NDSI means and export the dataframe as a *.csv
# We want to use this data in other notebooks for modeling, so lets combine it with our ground truth data. After it is joined on the plot id, we export it to the processed data path.
ndsi_means = np.stack(
[
[
improc.ndsi_mean(
arr=ndsi, origin=origin, shape=roi_shape, mask=mask
)
for ndsi in ndsi_stack
]
for origin in roi_coords
]
)
df = pd.read_csv(params["ground_truth_path"])[["plot_id", "yield"]]
ndsi_df = pd.concat(
[
pd.DataFrame(plot_id_list, columns=["plot_id"]),
pd.DataFrame(ndsi_means, columns=ndsi_name_list),
],
axis=1,
)
export_df = df.join(ndsi_df.set_index("plot_id"), on="plot_id")
export_df.to_csv(os.path.join(params["data_export_path"], "df.csv"))
# Deep Learning image export
# For deep learning, we need images. The exact format of those images is determined by the model and type of deep learning you're doing. I don't know that yet. So this notebook ends here.
def main():
script = sys.argv[0]
filename = sys.argv[1]
data = numpy.loadtxt(filename, delimiter=",")
for row_mean in numpy.mean(data, axis=1):
print(row_mean)
if __name__ == "__main__":
main()
| StarcoderdataPython |
11255853 | <filename>autolavadox/urls.py
"""autolavadox URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.conf.urls.static import static
from exileui.admin import exileui
import settings
urlpatterns = [
url(r'^dashboard/', exileui.urls),
url(r'^nested_admin/', include('nested_admin.urls')),
url(r'^select2/', include('django_select2.urls')),
url(r'^', include('interface.urls')),
url(r'^empleados/', include('empleados.urls', namespace='empleado')),
url(r'^cliente/', include('cliente.urls', namespace='cliente')),
url(r'^operacion/', include('operacion.urls', namespace='operacion')),
url(r'^cierre/', include('cierre.urls', namespace='cierre')),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| StarcoderdataPython |
22497 | <filename>python/paddle/fluid/tests/unittests/test_python_bf16_numpy_datatype.py<gh_stars>10-100
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from paddle_bfloat import bfloat16
import unittest
class TestBF16DataType(unittest.TestCase):
def test_matmul(self):
a_bf16 = np.random.random((6, 7)).astype(bfloat16)
b_bf16 = np.random.random((7, 8)).astype(bfloat16)
c_bf16 = np.matmul(a_bf16, b_bf16)
a_fp32 = a_bf16.astype(np.float32)
b_fp32 = b_bf16.astype(np.float32)
c_fp32 = np.matmul(a_fp32, b_fp32)
self.assertTrue(np.allclose(c_bf16, c_fp32))
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
347741 | <reponame>Aditya-AS/Question-Answering-System
import os
import sys
import inspect
# Add the project's lib/ folder to the python path. This makes it so
# that if the required libraries are available in the lib/ directory
# they will be used even if they were not installed into Python's
# site-packages/ directory.
# Note: the lib_dir var is still available if needed from the outside.
module_path = os.path.split(inspect.getfile(inspect.currentframe()))[0]
lib_dir = os.path.realpath(os.path.abspath(os.path.join(module_path, "../lib")))
if lib_dir not in sys.path:
sys.path.insert(0, lib_dir)
| StarcoderdataPython |
128535 | # -*- coding: utf-8 -*-
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Copyright 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from snap_plugin.v1.namespace import Namespace
from snap_plugin.v1.namespace_element import NamespaceElement
from snap_plugin.v1.plugin_pb2 import Metric
def test_namespace():
ns1 = Namespace(Metric().Namespace)
ns1.add_static_element("runc").add_static_element("libcontainer")
assert len(ns1) == 2
assert ns1[0].value == "runc"
assert ns1[1].value == "libcontainer"
ns1.add_dynamic_element("container-id", "container id")
ns1.add_static_element("status")
assert len(ns1) == 4
assert (ns1[2].name == "container-id" and
ns1[2].description == "container id")
assert ns1[3].value == "status"
ns2 = Namespace(
Metric().Namespace,
NamespaceElement.static_namespace_element("runc"),
NamespaceElement.static_namespace_element("libcontainer"),
NamespaceElement.dynamic_namespace_element("container-id",
"container id"),
NamespaceElement.static_namespace_element("status"),
)
assert len(ns2) == 4
assert ns2[0].value == "runc"
assert ns2[1].value == "libcontainer"
assert (ns2[2].name == "container-id" and
ns2[2].description == "container id")
assert ns2[3].value == "status"
ns3 = Namespace(
Metric().Namespace,
"runc",
"libcontainer"
)
assert len(ns3) == 2
assert ns3[0].value == "runc"
assert ns3[1].value == "libcontainer"
| StarcoderdataPython |
1997199 | <filename>hydrus/core/HydrusEncryption.py
import os
import stat
try:
import OpenSSL
OPENSSL_OK = True
except:
OPENSSL_OK = False
def GenerateOpenSSLCertAndKeyFile( cert_path, key_path ):
key = OpenSSL.crypto.PKey()
key.generate_key( OpenSSL.crypto.TYPE_RSA, 2048 )
# create a self-signed cert
cert = OpenSSL.crypto.X509()
cert.get_subject().countryName = 'HN'
cert.get_subject().organizationName = 'hydrus network'
cert.get_subject().organizationalUnitName = os.urandom( 32 ).hex()
cert.set_serial_number( 1 )
cert.gmtime_adj_notBefore( 0 )
cert.gmtime_adj_notAfter( 10*365*24*60*60 )
cert.set_issuer( cert.get_subject() )
cert.set_pubkey( key )
cert.sign( key, 'sha256' )
cert_bytes = OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_PEM, cert )
with open( cert_path, 'wb' ) as f:
f.write( cert_bytes )
os.chmod( cert_path, stat.S_IREAD )
key_bytes = OpenSSL.crypto.dump_privatekey( OpenSSL.crypto.FILETYPE_PEM, key )
with open( key_path, 'wb' ) as f:
f.write( key_bytes )
os.chmod( key_path, stat.S_IREAD )
'''
# old crypto code experiments
import Crypto.Cipher.AES
import Crypto.Cipher.PKCS1_OAEP
import Crypto.PublicKey.RSA
AES_KEY_LENGTH = 32
AES_BLOCK_SIZE = 16
def DecryptAES( aes_key, encrypted_message ):
iv = encrypted_message[:AES_BLOCK_SIZE]
enciphered_message = encrypted_message[AES_BLOCK_SIZE:]
aes_cipher = Crypto.Cipher.AES.new( aes_key, Crypto.Cipher.AES.MODE_CFB, iv )
padded_message = aes_cipher.decrypt( enciphered_message )
message = UnpadAES( padded_message )
return message
def DecryptAESStream( aes_key, stream_in, stream_out ):
iv = stream_in.read( AES_BLOCK_SIZE )
aes_cipher = Crypto.Cipher.AES.new( aes_key, Crypto.Cipher.AES.MODE_CFB, iv )
next_block = stream_in.read( HC.READ_BLOCK_SIZE )
while True:
block = next_block
next_block = stream_in.read( HC.READ_BLOCK_SIZE )
decrypted_block = aes_cipher.decrypt( block )
if next_block == '':
decrypted_block = UnpadAES( decrypted_block )
stream_out.write( decrypted_block )
if next_block == '':
break
def DecryptPKCS( private_key, encrypted_message ):
rsa_cipher = Crypto.Cipher.PKCS1_OAEP.new( private_key )
message = rsa_cipher.decrypt( encrypted_message )
return message
def DeserialiseRSAKey( text ):
return Crypto.PublicKey.RSA.importKey( text )
def EncryptAES( aes_key, message ):
iv = GenerateIV()
padded_message = PadAES( message )
aes_cipher = Crypto.Cipher.AES.new( aes_key, Crypto.Cipher.AES.MODE_CFB, iv )
enciphered_message = aes_cipher.encrypt( padded_message )
encrypted_message = iv + enciphered_message
return encrypted_message
def EncryptAESStream( aes_key, stream_in, stream_out ):
iv = GenerateIV()
stream_out.write( iv )
aes_cipher = Crypto.Cipher.AES.new( aes_key, Crypto.Cipher.AES.MODE_CFB, iv )
next_block = stream_in.read( HC.READ_BLOCK_SIZE )
while True:
block = next_block
next_block = stream_in.read( HC.READ_BLOCK_SIZE )
if next_block == '':
block = PadAES( block )
encrypted_block = aes_cipher.encrypt( block )
stream_out.write( encrypted_block )
if next_block == '':
break
def EncryptPKCS( public_key, message ):
rsa_cipher = Crypto.Cipher.PKCS1_OAEP.new( public_key )
# my understanding is that I don't have to manually pad this, cause OAEP does it for me.
encrypted_message = rsa_cipher.encrypt( message )
return encrypted_message
def GenerateAESKey():
return os.urandom( AES_KEY_LENGTH )
def GenerateIV():
return os.urandom( AES_BLOCK_SIZE )
def GenerateFilteredRandomBytes( byte_to_exclude, num_bytes ):
bytes = []
while len( bytes ) < num_bytes:
new_byte = os.urandom( 1 )
if new_byte != byte_to_exclude:
bytes.append( new_byte )
return ''.join( bytes )
def GenerateRSAKeyPair():
private_key = Crypto.PublicKey.RSA.generate( 2048 )
public_key = private_key.publickey()
return ( private_key, public_key )
def PadAES( message ):
block_size = AES_BLOCK_SIZE
# get last byte
# add random gumpf (except for last byte), then add last byte again
last_byte = message[-1]
num_bytes_to_add = block_size - ( len( message ) % block_size )
pad = GenerateFilteredRandomBytes( last_byte, num_bytes_to_add - 1 ) + last_byte
return message + pad
def SerialiseRSAKey( key ):
return key.exportKey()
def UnpadAES( message ):
block_size = AES_BLOCK_SIZE
# check last byte, jump back to previous instance of that byte
last_byte = message[-1]
i = 2
while True:
if message[-i] == last_byte: break
i += 1
index_of_correct_end = len( message ) - i
return message[:index_of_correct_end + 1]
'''
| StarcoderdataPython |
1706614 | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import unittest
import numpy as np
from paddle.fluid.tests.unittests.op_test import _set_use_system_allocator
from typing import Optional
import paddle.fluid.compiler as compiler
SEED = 2021
ipu_compiler_ref: Optional[compiler.IPUCompiledProgram] = None
map_np_dtype_to_fluid_dtype = {
'bool': "bool",
'int8': "int8",
'uint8': "uint8",
"int32": "int32",
"int64": "int64",
"float16": "float16",
"float32": "float32",
"float64": "float64",
}
def np_dtype_to_fluid_str(dtype: np.dtype) -> str:
return map_np_dtype_to_fluid_dtype[dtype.name]
class IPUOpTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls._np_rand_state = np.random.get_state()
cls._py_rand_state = random.getstate()
cls.SEED = SEED
np.random.seed(cls.SEED)
random.seed(cls.SEED)
cls._use_system_allocator = _set_use_system_allocator(True)
@classmethod
def tearDownClass(cls):
"""Restore random seeds"""
np.random.set_state(cls._np_rand_state)
random.setstate(cls._py_rand_state)
_set_use_system_allocator(cls._use_system_allocator)
# unittest will to trigger IPUCompiledProgram.__del__ automatically
global ipu_compiler_ref
ipu_compiler_ref is not None and ipu_compiler_ref.clean()
def set_atol(self):
self.atol = 1e-5
def set_training(self):
self.is_training = False
self.epoch = 1
| StarcoderdataPython |
280988 | <filename>topaz/parser.py
from rpython.rlib.objectmodel import specialize
from rpython.rlib.rbigint import rbigint
from rply import ParserGenerator, Token, ParsingError
from rply.token import BaseBox, SourcePosition
from topaz import ast
from topaz.astcompiler import (
SymbolTable, BlockSymbolTable, SharedScopeSymbolTable)
from topaz.utils import regexp
class Parser(object):
def __init__(self, lexer):
self.lexer = lexer
self._hidden_scopes = []
def parse(self):
l = LexerWrapper(self.lexer.tokenize())
return self.parser.parse(l, state=self)
def error(self, msg):
# TODO: this should use a real SourcePosition
return ParsingError(msg, SourcePosition(-1, -1, -1))
def push_local_scope(self):
self.lexer.symtable = SymbolTable(self.lexer.symtable)
def push_block_scope(self):
self.lexer.symtable = BlockSymbolTable(self.lexer.symtable)
def push_shared_scope(self):
self.lexer.symtable = SharedScopeSymbolTable(self.lexer.symtable)
def save_and_pop_scope(self, node):
child_symtable = self.lexer.symtable
child_symtable.parent_symtable.add_subscope(node, child_symtable)
self.lexer.symtable = child_symtable.parent_symtable
def hide_scope(self):
self._hidden_scopes.append(self.lexer.symtable)
self.lexer.symtable = self.lexer.symtable.parent_symtable
def unhide_scope(self):
self.lexer.symtable = self._hidden_scopes.pop()
def new_token(self, orig, name, value):
return Token(name, value, orig.getsourcepos())
def new_list(self, box=None):
if box is None:
contents = []
else:
contents = [box.getast()]
return self._new_list(contents)
def _new_list(self, nodes):
return BoxASTList(nodes)
def append_to_list(self, box_list, box):
base = box_list.getastlist() if box_list is not None else []
return BoxASTList(base + [box.getast()])
def new_stmt(self, box):
return self._new_stmt(box.getast())
def _new_stmt(self, node):
if not isinstance(node, ast.BaseStatement):
node = ast.Statement(node)
return BoxAST(node)
def new_assignable_list(self, boxes):
return self._new_assignable_list([box.getast() for box in boxes])
def _new_assignable_list(self, nodes):
return BoxAssignableList(nodes)
def append_to_assignable_list(self, box_list, box):
return self._append_to_assignable_list(box_list.getvars(), [box.getast()])
def _append_to_assignable_list(self, vars, nodes):
return BoxAssignableList(vars + nodes)
def new_augmented_assignment(self, op, lhs, rhs):
op = op.getstr()[:-1]
target = lhs.getast()
value = rhs.getast()
if op == "||":
node = ast.OrEqual(target, value)
elif op == "&&":
node = ast.AndEqual(target, value)
else:
node = ast.AugmentedAssignment(op, target, value)
return BoxAST(node)
def assignable(self, box):
node = box.getast()
if isinstance(node, ast.File):
raise self.error("Can't assign to __FILE__")
elif isinstance(node, ast.Line):
raise self.error("Can't assign to __LINE__")
elif isinstance(node, ast.Variable):
self.lexer.symtable.declare_write(node.name)
return box
def _arg_to_variable(self, node):
if isinstance(node, ast.Argument):
self.lexer.symtable.declare_local(node.name)
return ast.Variable(node.name, -1)
elif isinstance(node, ast.MultiAssignable):
return node
else:
raise SystemError
def arg_to_variable(self, box):
return self._arg_to_variable(box.getast())
def args_to_variables(self, listbox):
astlist = listbox.getastlist()
variables = [None] * len(astlist)
for i, node in enumerate(astlist):
variables[i] = self._arg_to_variable(node)
return variables
def new_binary_call(self, lhs, op, rhs):
return self._new_call(lhs.getast(), op, [rhs.getast()], None)
def new_call(self, receiver, method, box_args):
args = box_args.getcallargs() if box_args is not None else []
block = box_args.getcallblock() if box_args is not None else None
return self._new_call(receiver.getast(), method, args, block)
def new_fcall(self, method, args):
receiver = ast.Self(method.getsourcepos().lineno)
return self._new_call(
receiver, method,
args.getcallargs() if args is not None else [],
args.getcallblock() if args is not None else None,
)
def _new_call(self, receiver, method, args, block):
return BoxAST(ast.Send(receiver, method.getstr(), args, block, method.getsourcepos().lineno))
def new_and(self, lhs, rhs):
return BoxAST(ast.And(lhs.getast(), rhs.getast()))
def new_or(self, lhs, rhs):
return BoxAST(ast.Or(lhs.getast(), rhs.getast()))
def new_args(self, args=None, optargs=None, splat_arg=None, post_args=None, args_tail=None):
if args_tail:
assert isinstance(args_tail, BoxArgsTail)
kwargs = args_tail.getkwargsbox()
kwrest = args_tail.getkwrestbox()
block_arg = args_tail.getblockargbox()
else:
kwargs = None
kwrest = None
block_arg = None
arguments = (
(args.getastlist() if args is not None else []) +
(optargs.getastlist() if optargs is not None else [])
)
return BoxArgs(
arguments,
splat_arg.getstr() if splat_arg is not None else None,
post_args.getastlist() if post_args is not None else [],
kwargs.getastlist() if kwargs is not None else [],
kwrest.getstr() if kwrest is not None else None,
block_arg.getstr() if block_arg is not None else None
)
def new_argstail(self, keywords=None, kwrest=None, block_arg=None):
return BoxArgsTail(keywords, kwrest, block_arg)
def new_call_args(self, box_arg=None, box_block=None):
args = [box_arg.getast()] if box_arg else []
block = box_block.getast() if box_block is not None else None
return self._new_call_args(args, block)
def _new_call_args(self, args, block):
return BoxCallArgs(args, block)
def call_arg_block_pass(self, box_args, box_block_pass):
if box_block_pass is None:
return box_args
return self._new_call_args(box_args.getcallargs(), box_block_pass.getast())
def append_call_arg(self, box_arg, box):
return self._new_call_args(box_arg.getcallargs() + [box.getast()], box_arg.getcallblock())
def _block_with_destructuring_arguments(self, lineno, args, postargs):
# Multi-Assignables are destructuring arguments
extra_stmts = []
idx = 0
for arg in args:
if isinstance(arg, ast.MultiAssignable):
new_arg = ast.Argument(str(idx))
asgn = ast.MultiAssignment(arg, ast.Variable(new_arg.name, lineno))
args[idx] = new_arg
self.lexer.symtable.declare_argument(new_arg.name)
extra_stmts.append(ast.Statement(asgn))
idx += 1
offset = idx
idx = 0
for arg in postargs:
if isinstance(arg, ast.MultiAssignable):
new_arg = ast.Argument(str(idx + offset))
asgn = ast.MultiAssignment(arg, ast.Variable(new_arg.name, lineno))
postargs[idx] = new_arg
self.lexer.symtable.declare_argument(new_arg.name)
extra_stmts.append(ast.Statement(asgn))
idx += 1
extra_stmts.reverse()
return extra_stmts
def new_function(self, lineno, parent, fname, params, bodybox):
args = params.getargs() if params is not None else []
splat = params.getsplatarg() if params is not None else None
postargs = params.getpostargs() if params is not None else []
kwargs = params.getkwargs() if params is not None else []
kwrest = params.getkwrestarg() if params is not None else None
block_arg = params.getblockarg() if params is not None else None
extra_stmts = self._block_with_destructuring_arguments(lineno, args, postargs)
body = bodybox.getast()
if extra_stmts:
if isinstance(body, ast.Nil):
body = ast.Block(extra_stmts)
elif isinstance(body, ast.Block):
body = ast.Block(extra_stmts + body.stmts)
elif isinstance(body, ast.BaseStatement):
body = ast.Block(extra_stmts + [body])
else:
body = ast.Block(extra_stmts + [ast.Statement(body)])
return BoxAST(ast.Function(
lineno,
parent.getast() if parent is not None else None,
fname.getstr(),
args,
splat,
postargs,
kwargs,
kwrest,
block_arg,
body
))
def new_send_block(self, lineno, params, body):
stmts = body.getastlist() if body is not None else []
args = params.getargs() if params is not None else []
splat = params.getsplatarg() if params is not None else None
postargs = params.getpostargs() if params is not None else []
kwargs = params.getkwargs() if params is not None else []
kwrest = params.getkwrestarg() if params is not None else None
block_arg = params.getblockarg() if params is not None else None
extra_stmts = self._block_with_destructuring_arguments(lineno, args, postargs)
stmts = extra_stmts + stmts
block = ast.Block(stmts) if stmts else ast.Nil()
return BoxAST(ast.SendBlock(
lineno,
args,
splat,
postargs,
kwargs,
kwrest,
block_arg,
block
))
def combine_send_block(self, send_box, block_box):
sendast = send_box.getast()
if isinstance(sendast, ast.BaseSend):
send = send_box.getast(ast.BaseSend)
elif isinstance(sendast, ast.Break):
send = BoxAST(sendast.expr).getast(ast.BaseSend)
elif isinstance(sendast, ast.Next):
send = BoxAST(sendast.expr).getast(ast.BaseSend)
elif isinstance(sendast, ast.Return):
send = BoxAST(sendast.expr).getast(ast.BaseSend)
else:
raise SystemError
block = block_box.getast()
if send.block_arg is not None:
raise self.error("Both block arg and actual block given.")
if isinstance(send, ast.Send):
node = ast.Send(
send.receiver,
send.method,
send.args,
block,
send.lineno
)
elif isinstance(send, ast.Super):
node = ast.Super(
send.args,
block,
send.lineno,
)
else:
raise SystemError
if isinstance(sendast, ast.Break):
return BoxAST(ast.Break(node))
elif isinstance(sendast, ast.Next):
return BoxAST(ast.Next(node))
elif isinstance(sendast, ast.Return):
return BoxAST(ast.Return(node))
else:
return BoxAST(node)
def _array_or_node(self, box):
args = box.getcallargs()
if len(args) == 1:
[node] = args
else:
node = ast.Array(args)
return node
def new_return(self, box):
return BoxAST(ast.Return(self._array_or_node(box)))
def new_next(self, box):
return BoxAST(ast.Next(self._array_or_node(box)))
def new_break(self, box):
return BoxAST(ast.Break(self._array_or_node(box)))
def new_super(self, args, token):
return BoxAST(ast.Super(
args.getcallargs() if args is not None else [],
args.getcallblock() if args is not None else None,
token.getsourcepos().lineno
))
def new_splat(self, box):
return BoxAST(ast.Splat(box.getast()))
def new_kw_arg(self, box, default_value):
return BoxAST(ast.Argument(
box.getstr(),
default_value.getast() if default_value else None
))
def new_colon2(self, box, constant):
return BoxAST(ast.LookupConstant(box.getast(), constant.getstr(), constant.getsourcepos().lineno))
def new_colon3(self, constant):
return BoxAST(ast.LookupConstant(None, constant.getstr(), constant.getsourcepos().lineno))
def new_defined(self, box, token):
return BoxAST(ast.Defined(box.getast(), token.getsourcepos().lineno))
def new_symbol(self, token):
return BoxAST(ast.ConstantSymbol(token.getstr()))
def new_hash(self, box):
items = []
raw_items = box.getastlist()
i = 0
current_hash = None
while i < len(raw_items):
node = raw_items[i]
if isinstance(node, ast.HashSplat):
# DSTAR element, its a `to_hash' send, we need to merge it at
# this position
if not current_hash:
current_hash = ast.Hash(items)
current_hash = ast.Send(
current_hash,
"merge",
[node],
None,
node.lineno
)
items = []
i += 1
else:
items.append((node, raw_items[i + 1]))
i += 2
if current_hash and len(items) > 0:
current_hash = ast.Send(
current_hash,
"merge",
[ast.Hash(items)],
None,
current_hash.lineno
)
elif current_hash is None:
current_hash = ast.Hash(items)
return BoxAST(current_hash)
def new_global(self, box):
return BoxAST(ast.Global(box.getstr()))
def new_instance_var(self, box):
return BoxAST(ast.InstanceVariable(box.getstr()))
def new_class_var(self, box):
return BoxAST(ast.ClassVariable(box.getstr(), box.getsourcepos().lineno))
def concat_literals(self, head, tail):
if head is None:
return tail
if tail is None:
return head
dynamic = False
const_str = ""
dyn_str_components = []
for part in [head.getast(), tail.getast()]:
if not dynamic:
if isinstance(part, ast.ConstantString):
const_str += part.strvalue
else:
dynamic = True
if const_str:
dyn_str_components.append(ast.ConstantString(const_str))
if isinstance(part, ast.DynamicString):
dyn_str_components.extend(part.strvalues)
else:
dyn_str_components.append(part)
else:
if isinstance(part, ast.DynamicString):
dyn_str_components.extend(part.strvalues)
else:
dyn_str_components.append(part)
if dynamic:
node = ast.DynamicString(dyn_str_components)
else:
node = ast.ConstantString(const_str)
return BoxAST(node)
def _parse_numeric_string(self, s):
ls = list(s)
ls.pop()
s = "".join(ls)
if ("X" in s) or ("O" in s) or ("B" in s):
return self._parse_int(s)
else:
return ast.ConstantFloat(float(s))
def _parse_rational(self, s, lineno):
return ast.Send(
ast.LookupConstant(None, "Kernel", lineno),
"Rational",
[self._parse_numeric_string(s)],
None,
lineno
)
def _parse_imaginary(self, s, lineno):
return ast.Send(
ast.LookupConstant(None, "Kernel", lineno),
"Complex",
[self._parse_numeric_string(s)],
None,
lineno
)
def _parse_int(self, s):
if "X" in s:
base = 16
elif "O" in s:
base = 8
elif "B" in s:
base = 2
else:
base = 10
if base != 10:
# Strip off the leading 0[xob]
s = s[2:]
val = rbigint()
i = 0
while i < len(s):
c = ord(s[i])
if ord("a") <= c <= ord("z"):
digit = c - ord("a") + 10
elif ord("A") <= c <= ord("Z"):
digit = c - ord("A") + 10
elif ord("0") <= c <= ord("9"):
digit = c - ord("0")
else:
break
if digit >= base:
break
val = val.mul(rbigint.fromint(base)).add(rbigint.fromint(digit))
i += 1
try:
return ast.ConstantInt(val.toint())
except OverflowError:
return ast.ConstantBigInt(val)
pg = ParserGenerator([
"CLASS", "MODULE", "DEF", "UNDEF", "BEGIN", "RESCUE", "ENSURE", "END",
"IF", "UNLESS", "THEN", "ELSIF", "ELSE", "CASE", "WHEN", "WHILE",
"UNTIL", "FOR", "BREAK", "NEXT", "REDO", "RETRY", "IN", "DO",
"DO_COND", "DO_BLOCK", "RETURN", "YIELD", "SUPER", "SELF", "NIL",
"TRUE", "FALSE", "AND", "OR", "NOT", "IF_MOD", "UNLESS_MOD",
"WHILE_MOD", "UNTIL_MOD", "RESCUE_MOD", "ALIAS", "DEFINED",
"lBEGIN", "lEND", "__LINE__", "__FILE__", "__ENCODING__", "DO_LAMBDA",
"IDENTIFIER", "FID", "GVAR", "IVAR", "CONSTANT", "CVAR", "LABEL",
"CHAR", "UPLUS", "UMINUS", "UMINUS_NUM", "POW", "CMP", "EQ", "EQQ",
"NEQ", "GEQ", "LEQ", "ANDOP", "OROP", "MATCH", "NMATCH", "DOT", "DOT2",
"DOT3", "AREF", "ASET", "LSHFT", "RSHFT", "COLON2", "COLON3", "ANDDOT",
"OP_ASGN", "ASSOC", "LPAREN", "LPAREN2", "RPAREN", "LPAREN_ARG",
"LBRACK", "RBRACK", "LBRACE", "LBRACE_ARG", "STAR", "STAR2", "DSTAR",
"AMPER", "AMPER2", "TILDE", "PERCENT", "DIVIDE", "PLUS", "MINUS",
"LT", "GT", "PIPE", "BANG", "CARET", "LCURLY", "RCURLY", "BACK_REF2",
"SYMBEG", "STRING_BEG", "XSTRING_BEG", "REGEXP_BEG", "WORDS_BEG",
"QWORDS_BEG", "STRING_DBEG", "STRING_DVAR", "STRING_END", "LAMBDA",
"LAMBEG", "NTH_REF", "BACK_REF", "STRING_CONTENT", "INTEGER", "FLOAT",
"REGEXP_END", "SYMBOLS_BEG", "QSYMBOLS_BEG", "RATIONAL", "IMAGINARY",
"LABEL_END",
"LITERAL_EQUAL", "LITERAL_COLON", "LITERAL_COMMA", "LITERAL_LBRACKET",
"LITERAL_SEMICOLON", "LITERAL_QUESTION_MARK", "LITERAL_SPACE",
"LITERAL_NEWLINE",
], precedence=[
("nonassoc", ["LOWEST"]),
("nonassoc", ["LBRACE_ARG"]),
("nonassoc", ["IF_MOD", "UNLESS_MOD", "WHILE_MOD", "UNTIL_MOD"]),
("left", ["OR", "AND"]),
("right", ["NOT"]),
("nonassoc", ["DEFINED"]),
("right", ["LITERAL_EQUAL", "OP_ASGN"]),
("left", ["RESCUE_MOD"]),
("right", ["LITERAL_QUESTION_MARK", "LITERAL_COLON"]),
("nonassoc", ["DOT2", "DOT3"]),
("left", ["OROP"]),
("left", ["ANDOP"]),
("nonassoc", ["CMP", "EQ", "EQQ", "NEQ", "MATCH", "NMATCH"]),
("left", ["GT", "GEQ", "LT", "LEQ"]),
("left", ["PIPE", "CARET"]),
("left", ["AMPER2"]),
("left", ["LSHFT", "RSHFT"]),
("left", ["PLUS", "MINUS"]),
("left", ["STAR2", "DIVIDE", "PERCENT"]),
("right", ["UMINUS_NUM", "UMINUS"]),
("right", ["POW"]),
("right", ["BANG", "TILDE", "UPLUS"]),
], cache_id="topaz")
def error_handler(state, token):
raise ParsingError(
"Token(%s, %s)" % (token.gettokentype(), token.getstr()),
token.getsourcepos()
)
pg.error(error_handler)
@pg.production("program : top_compstmt")
def program(self, p):
"""
program : {
lexer.setState(LexState.EXPR_BEG);
support.initTopLocalVariables();
} top_compstmt {
// ENEBO: Removed !compile_for_eval which probably is to reduce warnings
if ($2 != null) {
/* last expression should not be void */
if ($2 instanceof BlockNode) {
support.checkUselessStatement($<BlockNode>2.getLast());
} else {
support.checkUselessStatement($2);
}
}
support.getResult().setAST(support.addRootNode($2, support.getPosition($2)));
}
"""
# TODO: sym table setup, and useless statement
return BoxAST(ast.Main(ast.Block(p[0].getastlist()) if p[0] is not None else ast.Nil()))
@pg.production("top_compstmt : top_stmts opt_terms")
def top_compstmt(self, p):
return p[0]
@pg.production("top_stmts : none")
def top_stmts_none(self, p):
return p[0]
@pg.production("top_stmts : top_stmt")
def top_stmts_top_stmt(self, p):
return self.new_list(p[0])
@pg.production("top_stmts : top_stmts terms top_stmt")
def top_stmts(self, p):
return self.append_to_list(p[0], p[2])
@pg.production("top_stmts : error top_stmt")
def top_stmts_error(self, p):
return p[1]
@pg.production("top_stmt : stmt")
def top_stmt_stmt(self, p):
return p[0]
@pg.production("top_stmt : lBEGIN LCURLY top_compstmt RCURLY")
def top_stmt_lbegin(self, p):
"""
top_stmt : stmt
| klBEGIN {
if (support.isInDef() || support.isInSingle()) {
support.yyerror("BEGIN in method");
}
} tLCURLY top_compstmt tRCURLY {
support.getResult().addBeginNode(new PreExe19Node($1.getPosition(), support.getCurrentScope(), $4));
$$ = null;
}
"""
raise NotImplementedError(p)
@pg.production("bodystmt : compstmt opt_rescue opt_else opt_ensure")
def bodystmt(self, p):
body = ast.Block(p[0].getastlist()) if p[0] is not None else ast.Nil()
if p[1] is not None:
except_handlers = p[1].getastlist()
body = ast.TryExcept(body, except_handlers, ast.Nil())
elif p[2] is not None:
body = ast.TryExcept(body, [], p[2].getast())
if p[3] is not None:
body = ast.TryFinally(body, ast.Block(p[3].getastlist()))
return BoxAST(body)
@pg.production("compstmt : stmts opt_terms")
def compstmt(self, p):
"""
compstmt : stmts opt_terms {
if ($1 instanceof BlockNode) {
support.checkUselessStatements($<BlockNode>1);
}
$$ = $1;
}
"""
# TODO: checkUslessStatements?
return p[0]
@pg.production("stmts : none")
def stmts_none(self, p):
return p[0]
@pg.production("stmts : stmt_or_begin")
def stmts_stmt(self, p):
return self.new_list(p[0])
@pg.production("stmts : stmts terms stmt_or_begin")
def stmts(self, p):
return self.append_to_list(p[0], p[2])
@pg.production("stmts : error stmt")
def stmts_error(self, p):
return p[1]
@pg.production("stmt_or_begin : stmt")
def stmt_or_begin(self, p):
return p[0]
@pg.production("stmt_or_begin : lBEGIN LCURLY top_compstmt RCURLY")
def stmt_or_begin_curly(self, p):
raise NotImplementedError
@pg.production("stmt : ALIAS fitem alias_after_fitem fitem")
def stmt_alias_fitem(self, p):
return BoxAST(ast.Alias(p[1].getast(), p[3].getast(), p[0].getsourcepos().lineno))
@pg.production("alias_after_fitem : ")
def alias_after_fitem(self, p):
self.lexer.state = self.lexer.EXPR_FNAME
@pg.production("stmt : ALIAS GVAR GVAR")
def stmt_alias_gvar(self, p):
"""
kALIAS tGVAR tGVAR {
$$ = new VAliasNode($1.getPosition(), (String) $2.getValue(), (String) $3.getValue());
}
"""
raise NotImplementedError(p)
@pg.production("stmt : ALIAS GVAR BACK_REF")
def stmt_alias_gvar_backref(self, p):
"""
kALIAS tGVAR tBACK_REF {
$$ = new VAliasNode($1.getPosition(), (String) $2.getValue(), "$" + $<BackRefNode>3.getType());
}
"""
raise NotImplementedError(p)
@pg.production("stmt : ALIAS GVAR NTH_REF")
def stmt_alias_gvar_nref(self, p):
"""
kALIAS tGVAR tNTH_REF {
support.yyerror("can't make alias for the number variables");
}
"""
raise NotImplementedError(p)
@pg.production("stmt : UNDEF undef_list")
def stmt_undef(self, p):
return BoxAST(ast.Undef(p[1].getastlist(), p[0].getsourcepos().lineno))
@pg.production("stmt : stmt IF_MOD expr_value")
def stmt_ifmod(self, p):
return self._new_stmt(ast.If(
p[2].getast(),
ast.Block([p[0].getast()]),
ast.Nil(),
))
@pg.production("stmt : stmt UNLESS_MOD expr_value")
def stmt_unlessmod(self, p):
return self._new_stmt(ast.If(
p[2].getast(),
ast.Nil(),
ast.Block([p[0].getast()]),
))
@pg.production("stmt : BEGIN bodystmt END WHILE_MOD expr_value")
def stmt_while_mod(self, p):
return self._new_stmt(ast.While(
p[4].getast(),
p[1].getast(),
post_check=True
))
@pg.production("stmt : stmt WHILE_MOD expr_value")
def stmt_while_mod(self, p):
return self._new_stmt(ast.While(
p[2].getast(),
ast.Block([p[0].getast()])
))
@pg.production("stmt : BEGIN bodystmt END UNTIL_MOD expr_value")
def stmt_until_mod(self, p):
return self._new_stmt(ast.Until(
p[4].getast(),
p[1].getast(),
post_check=True
))
@pg.production("stmt : stmt UNTIL_MOD expr_value")
def stmt_until_mod(self, p):
return self._new_stmt(ast.Until(
p[2].getast(),
ast.Block([p[0].getast()])
))
@pg.production("stmt : stmt RESCUE_MOD stmt")
def stmt_rescue_mod(self, p):
lineno = p[1].getsourcepos().lineno
return self._new_stmt(ast.TryExcept(
ast.Block([p[0].getast()]),
[
ast.ExceptHandler(
[ast.LookupConstant(ast.Scope(lineno), "StandardError", lineno)],
None,
ast.Block([p[2].getast()]),
)
],
ast.Nil()
))
@pg.production("stmt : lEND LCURLY compstmt RCURLY")
def stmt_lend(self, p):
"""
klEND tLCURLY compstmt tRCURLY {
if (support.isInDef() || support.isInSingle()) {
support.warn(ID.END_IN_METHOD, $1.getPosition(), "END in method; use at_exit");
}
$$ = new PostExeNode($1.getPosition(), $3);
}
"""
raise NotImplementedError(p)
@pg.production("stmt : command_asgn")
def stmt_command_assign(self, p):
return self.new_stmt(p[0])
@pg.production("stmt : mlhs LITERAL_EQUAL command_call")
def stmt_mlhs_equal_command_call(self, p):
return self._new_stmt(ast.MultiAssignment(
p[0].getassignment(),
p[2].getast()
))
@pg.production("stmt : lhs LITERAL_EQUAL mrhs")
def stmt_lhs_equal_mrhs(self, p):
return self._new_stmt(ast.Assignment(p[0].getast(), ast.Array(p[2].getastlist())))
@pg.production("stmt : mlhs LITERAL_EQUAL mrhs_arg")
def stmt_mlhs_equal_arg_value(self, p):
return self._new_stmt(ast.MultiAssignment(
p[0].getassignment(),
p[2].getast()
))
@pg.production("stmt : expr")
def stmt_expr(self, p):
return self.new_stmt(p[0])
@pg.production("command_asgn : lhs LITERAL_EQUAL command_rhs")
def command_asgn_lhs_equal_command_call(self, p):
return BoxAST(ast.Assignment(
p[0].getast(),
p[2].getast()
))
@pg.production("command_asgn : var_lhs OP_ASGN command_rhs")
def command_asgn_var(self, p):
return self.new_augmented_assignment(p[1], p[0], p[2])
@pg.production("command_asgn : primary_value LITERAL_LBRACKET opt_call_args rbracket OP_ASGN command_rhs")
def command_asgn_subscript_op_asgn_command_call(self, p):
raise NotImplementedError(p)
@pg.production("command_asgn : primary_value call_op IDENTIFIER OP_ASGN command_rhs")
def command_asgn_method_op_asgn_command_call(self, p):
raise NotImplementedError(p)
@pg.production("command_asgn : primary_value call_op CONSTANT OP_ASGN command_rhs")
def command_asgnmethod_constant_op_asgn_command_call(self, p):
raise NotImplementedError(p)
@pg.production("command_asgn : primary_value COLON2 CONSTANT OP_ASGN command_rhs")
def command_asgnprimary_value_colon_constant_op_asgn_command_call(self, p):
raise NotImplementedError(p)
@pg.production("command_asgn : primary_value COLON2 IDENTIFIER OP_ASGN command_rhs")
def command_asgnconstant_op_asgn_command_call(self, p):
raise NotImplementedError(p)
@pg.production("command_asgn : backref OP_ASGN command_rhs")
def command_asgnbackref_op_asgn_command_call(self, p):
raise NotImplementedError(p)
self.backref_assign_error(p[0])
@pg.production("command_rhs : command_call", precedence="OP_ASGN")
def command_rhs_call(self, p):
return p[0]
@pg.production("command_rhs : command_call RESCUE_MOD stmt")
def command_rhs_call_rescue(self, p):
lineno = p[1].getsourcepos().lineno
return self._new_stmt(ast.TryExcept(
ast.Block([p[0].getast()]),
[
ast.ExceptHandler(
[ast.LookupConstant(ast.Scope(lineno), "StandardError", lineno)],
None,
ast.Block([p[2].getast()]),
)
],
ast.Nil()
))
@pg.production("command_rhs : command_asgn")
def command_rhs_asgn(self, p):
return p[0]
@pg.production("expr : command_call")
def expr_command_call(self, p):
return p[0]
@pg.production("expr : expr AND expr")
def expr_and(self, p):
return self.new_and(p[0], p[2])
@pg.production("expr : expr OR expr")
def expr_or(self, p):
return self.new_or(p[0], p[2])
@pg.production("expr : NOT opt_nl expr")
def expr_not(self, p):
return self.new_call(p[2], self.new_token(p[0], "!", "!"), None)
@pg.production("expr : BANG command_call")
def expr_bang_command_call(self, p):
return self.new_call(p[1], self.new_token(p[0], "!", "!"), None)
@pg.production("expr : arg")
def expr_arg(self, p):
return p[0]
@pg.production("expr_value : expr")
def expr_value(self, p):
"""
expr {
support.checkExpression($1);
}
"""
# TODO: checkExpression?
return p[0]
@pg.production("command_call : command")
def command_call_command(self, p):
return p[0]
@pg.production("command_call : block_command")
def command_call_block_command(self, p):
return p[0]
@pg.production("block_command : block_call")
def block_command_block_call(self, p):
return p[0]
@pg.production("block_command : block_call call_op2 operation2 command_args")
def block_command_dot(self, p):
return self.new_call(p[0], p[2], p[3])
@pg.production("cmd_brace_block : LBRACE_ARG brace_body RCURLY")
def cmd_brace_block(self, p):
box = self.new_send_block(p[0].getsourcepos().lineno, p[1].getblockparam(), p[1].getblockstmts())
self.save_and_pop_scope(box.getast())
return box
@pg.production("fcall : operation")
def fcall(self, p):
return p[0]
@pg.production("command : fcall command_args", precedence="LOWEST")
def command_operation_command_args(self, p):
return self.new_fcall(p[0], p[1])
@pg.production("command : fcall command_args cmd_brace_block")
def command_operation_command_args_cmd_brace_block(self, p):
return self.combine_send_block(self.new_fcall(p[0], p[1]), p[2])
@pg.production("command : primary_value call_op operation2 command_args", precedence="LOWEST")
def command_method_call_args(self, p):
return self.new_call(p[0], p[2], p[3])
@pg.production("command : primary_value call_op operation2 command_args cmd_brace_block")
def command_method_call_args_brace_block(self, p):
return self.combine_send_block(self.new_call(p[0], p[2], p[3]), p[4])
@pg.production("command : primary_value COLON2 operation2 command_args", precedence="LOWEST")
def command_colon_call_args(self, p):
return self.new_call(p[0], p[2], p[3])
@pg.production("command : primary_value COLON2 operation2 command_args cmd_brace_block")
def command_colon_call_args_brace_block(self, p):
return self.combine_send_block(self.new_call(p[0], p[2], p[3]), p[4])
@pg.production("command : SUPER command_args")
def command_super(self, p):
return self.new_super(p[1], p[0])
@pg.production("command : YIELD command_args")
def command_yield(self, p):
return BoxAST(ast.Yield(p[1].getcallargs(), p[0].getsourcepos().lineno))
@pg.production("command : RETURN call_args")
def command_call_return(self, p):
return self.new_return(p[1])
@pg.production("command : BREAK call_args")
def command_call_break(self, p):
return self.new_break(p[1])
@pg.production("command : NEXT call_args")
def command_call_next(self, p):
return self.new_next(p[1])
@pg.production("mlhs : mlhs_basic")
def mlhs(self, p):
return p[0]
@pg.production("mlhs : LPAREN mlhs_inner rparen")
def mlhs_paren(self, p):
return p[1]
@pg.production("mlhs_inner : mlhs_basic")
def mlhs_inner(self, p):
return p[0]
@pg.production("mlhs_inner : LPAREN mlhs_inner rparen")
def mlhs_inner_paren(self, p):
return p[0]
@pg.production("mlhs_basic : mlhs_head")
def mlhs_basic_mlhs_head(self, p):
return p[0]
@pg.production("mlhs_basic : mlhs_head mlhs_item")
def mlhs_basic_mlhs_head_mlhs_item(self, p):
return self.append_to_assignable_list(p[0], p[1])
@pg.production("mlhs_basic : mlhs_head STAR mlhs_node")
def mlhs_basic_mlhs_head_star_node(self, p):
return self.append_to_assignable_list(p[0], self.new_splat(p[2]))
@pg.production("mlhs_basic : mlhs_head STAR mlhs_node LITERAL_COMMA mlhs_post")
def mlhs_basic_mlhs_head_star_node_comma_post(self, p):
box = self.append_to_assignable_list(p[0], self.new_splat(p[2]))
return self._append_to_assignable_list(box.getvars(), p[4].getastlist())
@pg.production("mlhs_basic : mlhs_head STAR")
def mlhs_basic_mlhs_head_star(self, p):
return self._append_to_assignable_list(p[0].getvars(), [ast.Splat(None)])
@pg.production("mlhs_basic : mlhs_head STAR LITERAL_COMMA mlhs_post")
def mlhs_basic_mlhs_head_star_comma_post(self, p):
return self._append_to_assignable_list(p[0].getvars(), [ast.Splat(None)] + p[3].getastlist())
@pg.production("mlhs_basic : STAR mlhs_node")
def mlhs_basic_star_mlhs_node(self, p):
return self.new_assignable_list([self.new_splat(p[1])])
@pg.production("mlhs_basic : STAR mlhs_node LITERAL_COMMA mlhs_post")
def mlhs_basic_star_mlhs_node_comma_post(self, p):
return self._new_assignable_list([self.new_splat(p[1]).getast()] + p[3].getastlist())
@pg.production("mlhs_basic : STAR")
def mlhs_basic_star(self, p):
return self._new_assignable_list([ast.Splat(None)])
@pg.production("mlhs_basic : STAR LITERAL_COMMA mlhs_post")
def mlhs_basic_star_comma_post(self, p):
return self._new_assignable_list([ast.Splat(None)] + p[2].getastlist())
@pg.production("mlhs_item : mlhs_node")
def mlhs_item_node(self, p):
return p[0]
@pg.production("mlhs_item : LPAREN mlhs_inner rparen")
def mlhs_item_paren(self, p):
return BoxAST(p[1].getassignment())
@pg.production("mlhs_head : mlhs_item LITERAL_COMMA")
def mlhs_head_item(self, p):
return self.new_assignable_list([p[0]])
@pg.production("mlhs_head : mlhs_head mlhs_item LITERAL_COMMA")
def mlhs_head_head_item(self, p):
return self.append_to_assignable_list(p[0], p[1])
@pg.production("mlhs_post : mlhs_item")
def mlhs_post_item(self, p):
return self.new_list(p[0])
@pg.production("mlhs_post : mlhs_post LITERAL_COMMA mlhs_item")
def mlhs_post_post_item(self, p):
return self.append_to_list(p[0], p[2])
@pg.production("mlhs_node : keyword_variable")
@pg.production("mlhs_node : user_variable")
def mlhs_node_variable(self, p):
return self.assignable(p[0])
@pg.production("mlhs_node : primary_value LITERAL_LBRACKET opt_call_args rbracket")
def mlhs_node_subscript(self, p):
return BoxAST(ast.Subscript(
p[0].getast(),
p[2].getcallargs(),
p[1].getsourcepos().lineno
))
@pg.production("mlhs_node : primary_value call_op IDENTIFIER")
def mlhs_node_attr(self, p):
return self.new_call(p[0], p[2], None)
@pg.production("mlhs_node : primary_value COLON2 IDENTIFIER")
def mlhs_node_colon_attr(self, p):
return self.new_call(p[0], p[2], None)
@pg.production("mlhs_node : primary_value call_op CONSTANT")
def mlhs_node_attr_constant(self, p):
return self.new_call(p[0], p[2], None)
@pg.production("mlhs_node : primary_value COLON2 CONSTANT")
def mlhs_node_constant(self, p):
return self.new_colon2(p[0], p[2])
@pg.production("mlhs_node : COLON3 CONSTANT")
def mlhs_node_colon_constant(self, p):
return self.new_colon3(p[1])
@pg.production("mlhs_node : backref")
def mlhs_node_backref(self, p):
raise NotImplementedError(p)
self.backref_assign_error(p[0])
@pg.production("lhs : keyword_variable")
@pg.production("lhs : user_variable")
def lhs_variable(self, p):
return self.assignable(p[0])
@pg.production("lhs : primary_value LITERAL_LBRACKET opt_call_args rbracket")
def lhs_subscript(self, p):
args = p[2].getcallargs() if p[2] is not None else []
return BoxAST(ast.Subscript(p[0].getast(), args, p[1].getsourcepos().lineno))
@pg.production("lhs : primary_value call_op IDENTIFIER")
def lhs_dot_identifier(self, p):
return self.new_call(p[0], p[2], None)
@pg.production("lhs : primary_value COLON2 IDENTIFIER")
def lhs_colon_identifier(self, p):
return self.new_call(p[0], p[2], None)
@pg.production("lhs : primary_value call_op CONSTANT")
def lhs_dot_constant(self, p):
return self.new_call(p[0], p[2], None)
@pg.production("lhs : primary_value COLON2 CONSTANT")
def lhs_colon_constant(self, p):
return self.new_colon2(p[0], p[2])
@pg.production("lhs : COLON3 CONSTANT")
def lhs_unbound_colon_constant(self, p):
return self.new_colon3(p[1])
@pg.production("lhs : backref")
def lhs_backref(self, p):
raise NotImplementedError(p)
self.backref_assign_error()
@pg.production("cname : IDENTIFIER")
def cname_identifier(self, p):
raise self.error("class/module name must be CONSTANT")
@pg.production("cname : CONSTANT")
def cname_constant(self, p):
return p[0]
@pg.production("cpath : COLON3 cname")
def cpath_unbound_colon_cname(self, p):
return self.new_colon3(p[1])
@pg.production("cpath : cname")
def cpath_cname(self, p):
lineno = p[0].getsourcepos().lineno
return BoxAST(ast.LookupConstant(ast.Scope(lineno), p[0].getstr(), lineno))
@pg.production("cpath : primary_value COLON2 cname")
def cpath_colon_cname(self, p):
return self.new_colon2(p[0], p[2])
@pg.production("fname : IDENTIFIER")
@pg.production("fname : CONSTANT")
@pg.production("fname : FID")
def fname_identifier(self, p):
return p[0]
@pg.production("fname : op")
def fname_op(self, p):
self.lexer.state = self.lexer.EXPR_ENDFN
return p[0]
@pg.production("fname : reswords")
def fname_reswords(self, p):
self.lexer.state = self.lexer.EXPR_ENDFN
return p[0]
@pg.production("fsym : fname")
@pg.production("fsym : symbol")
def fsym_fname(self, p):
return self.new_symbol(p[0])
@pg.production("fitem : fsym")
def fitem_fsym(self, p):
return p[0]
@pg.production("fitem : dsym")
def fitem_dsym(self, p):
return p[0]
@pg.production("undef_list : fitem")
def undef_list_fitem(self, p):
return self.new_list(p[0])
@pg.production("undef_list : undef_list LITERAL_COMMA UNDEF_LIST_SET_LEX_STATE fitem")
def undef_list_undef_list(self, p):
"""
undef_list ',' {
lexer.setState(LexState.EXPR_FNAME);
} fitem {
$$ = support.appendToBlock($1, support.newUndef($1.getPosition(), $4));
}
"""
return self.append_to_list(p[0], p[3])
@pg.production("UNDEF_LIST_SET_LEX_STATE : ")
def undef_list_UNDEF_LIST_SET_LEX_STATE(self, p):
self.lexer.state = self.lexer.EXPR_FNAME
@pg.production("op : PIPE")
@pg.production("op : CARET")
@pg.production("op : AMPER2")
@pg.production("op : CMP")
@pg.production("op : EQ")
@pg.production("op : EQQ")
@pg.production("op : MATCH")
@pg.production("op : NMATCH")
@pg.production("op : GT")
@pg.production("op : GEQ")
@pg.production("op : LT")
@pg.production("op : LEQ")
@pg.production("op : NEQ")
@pg.production("op : LSHFT")
@pg.production("op : RSHFT")
@pg.production("op : PLUS")
@pg.production("op : MINUS")
@pg.production("op : STAR2")
@pg.production("op : STAR")
@pg.production("op : DIVIDE")
@pg.production("op : PERCENT")
@pg.production("op : POW")
@pg.production("op : DSTAR")
@pg.production("op : BANG")
@pg.production("op : TILDE")
@pg.production("op : UPLUS")
@pg.production("op : UMINUS")
@pg.production("op : AREF")
@pg.production("op : ASET")
@pg.production("op : BACK_REF2")
def op(self, p):
return p[0]
@pg.production("reswords : __LINE__")
@pg.production("reswords : __FILE__")
@pg.production("reswords : __ENCODING__")
@pg.production("reswords : lBEGIN")
@pg.production("reswords : lEND")
@pg.production("reswords : ALIAS")
@pg.production("reswords : AND")
@pg.production("reswords : BEGIN")
@pg.production("reswords : BREAK")
@pg.production("reswords : CASE")
@pg.production("reswords : CLASS")
@pg.production("reswords : DEF")
@pg.production("reswords : DEFINED")
@pg.production("reswords : DO")
@pg.production("reswords : ELSE")
@pg.production("reswords : ELSIF")
@pg.production("reswords : END")
@pg.production("reswords : ENSURE")
@pg.production("reswords : FALSE")
@pg.production("reswords : FOR")
@pg.production("reswords : IN")
@pg.production("reswords : MODULE")
@pg.production("reswords : NEXT")
@pg.production("reswords : NIL")
@pg.production("reswords : NOT")
@pg.production("reswords : OR")
@pg.production("reswords : REDO")
@pg.production("reswords : RESCUE")
@pg.production("reswords : RETRY")
@pg.production("reswords : RETURN")
@pg.production("reswords : SELF")
@pg.production("reswords : SUPER")
@pg.production("reswords : THEN")
@pg.production("reswords : TRUE")
@pg.production("reswords : UNDEF")
@pg.production("reswords : WHEN")
@pg.production("reswords : YIELD")
@pg.production("reswords : IF_MOD")
@pg.production("reswords : UNLESS_MOD")
@pg.production("reswords : WHILE_MOD")
@pg.production("reswords : UNTIL_MOD")
@pg.production("reswords : RESCUE_MOD")
def reswords(self, p):
return p[0]
@pg.production("arg : lhs LITERAL_EQUAL arg_rhs")
def arg_lhs_equal_arg(self, p):
return BoxAST(ast.Assignment(p[0].getast(), p[2].getast()))
@pg.production("arg : var_lhs OP_ASGN arg_rhs")
def arg_var_lhs_op_asgn_arg(self, p):
return self.new_augmented_assignment(p[1], p[0], p[2])
@pg.production("arg : primary_value LITERAL_LBRACKET opt_call_args rbracket OP_ASGN arg_rhs")
def arg_subscript_op_asgn_arg(self, p):
args = p[2].getcallargs() if p[2] is not None else []
return self.new_augmented_assignment(
p[4],
BoxAST(ast.Subscript(p[0].getast(), args, p[1].getsourcepos().lineno)),
p[5],
)
@pg.production("arg : primary_value call_op IDENTIFIER OP_ASGN arg_rhs")
def arg_method_op_asgn_arg(self, p):
return self.new_augmented_assignment(
p[3],
self.new_call(p[0], p[2], None),
p[4]
)
@pg.production("arg : primary_value call_op CONSTANT OP_ASGN arg_rhs")
def arg_method_constant_op_asgn_arg(self, p):
return self.new_augmented_assignment(
p[3],
self.new_call(p[0], p[2], None),
p[4]
)
@pg.production("arg : primary_value COLON2 IDENTIFIER OP_ASGN arg_rhs")
def arg_colon_method_op_asgn_arg(self, p):
"""
primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg {
$$ = new OpAsgnNode(support.getPosition($1), $1, $5, (String) $3.getValue(), (String) $4.getValue());
}
"""
raise NotImplementedError(p)
@pg.production("arg : primary_value COLON2 CONSTANT OP_ASGN arg_rhs")
def arg_constant_op_asgn_arg(self, p):
# self.warning("constant re-assignment")
return self.new_augmented_assignment(
p[3],
self.new_colon2(p[0], p[2]),
p[4]
)
@pg.production("arg : COLON3 CONSTANT OP_ASGN arg_rhs")
def arg_unbound_constant_op_asgn_arg(self, p):
# self.warning("constant re-assignment")
return self.new_augmented_assignment(
p[2],
self.new_colon3(p[1]),
p[3]
)
@pg.production("arg : backref OP_ASGN arg_rhs")
def arg_backref_op_asgn_arg(self, p):
raise NotImplementedError(p)
self.backref_assign_error()
@pg.production("arg : arg DOT2 arg")
def arg_dot2(self, p):
return BoxAST(ast.Range(p[0].getast(), p[2].getast(), False))
@pg.production("arg : arg DOT3 arg")
def arg_dot3(self, p):
return BoxAST(ast.Range(p[0].getast(), p[2].getast(), True))
@pg.production("arg : arg POW arg")
@pg.production("arg : arg PERCENT arg")
@pg.production("arg : arg DIVIDE arg")
@pg.production("arg : arg STAR2 arg")
@pg.production("arg : arg MINUS arg")
@pg.production("arg : arg PLUS arg")
def arg_binop(self, p):
return self.new_binary_call(p[0], p[1], p[2])
@pg.production("arg : UMINUS_NUM simple_numeric POW arg")
def arg_uminus_num_integer_pow_arg(self, p):
lineno = p[0].getsourcepos().lineno
return BoxAST(ast.Send(
self.new_binary_call(p[1], p[2], p[3]).getast(),
"-@",
[],
None,
lineno
))
@pg.production("arg : UPLUS arg")
def arg_uplus_arg(self, p):
return BoxAST(ast.Send(p[1].getast(), "+@", [], None, p[0].getsourcepos().lineno))
@pg.production("arg : UMINUS arg")
def arg_uminus_arg(self, p):
return BoxAST(ast.Send(p[1].getast(), "-@", [], None, p[0].getsourcepos().lineno))
@pg.production("arg : arg NEQ arg")
@pg.production("arg : arg EQQ arg")
@pg.production("arg : arg EQ arg")
@pg.production("arg : arg LEQ arg")
@pg.production("arg : arg LT arg")
@pg.production("arg : arg GEQ arg")
@pg.production("arg : arg GT arg")
@pg.production("arg : arg CMP arg")
@pg.production("arg : arg AMPER2 arg")
@pg.production("arg : arg CARET arg")
@pg.production("arg : arg PIPE arg")
def arg_binop2(self, p):
return self.new_binary_call(p[0], p[1], p[2])
@pg.production("arg : arg NMATCH arg")
@pg.production("arg : arg MATCH arg")
def arg_match_arg(self, p):
return self.new_binary_call(p[0], p[1], p[2])
@pg.production("arg : BANG arg")
def arg_bang_arg(self, p):
return self.new_call(p[1], p[0], None)
@pg.production("arg : TILDE arg")
def arg_tilde_arg(self, p):
return self.new_call(p[1], p[0], None)
@pg.production("arg : arg RSHFT arg")
@pg.production("arg : arg LSHFT arg")
def arg_binop3(self, p):
return self.new_binary_call(p[0], p[1], p[2])
@pg.production("arg : arg ANDOP arg")
def arg_andop_arg(self, p):
return self.new_and(p[0], p[2])
@pg.production("arg : arg OROP arg")
def arg_orop_arg(self, p):
return self.new_or(p[0], p[2])
@pg.production("arg : DEFINED opt_nl arg")
def arg_defined(self, p):
return self.new_defined(p[2], p[0])
@pg.production("arg : arg LITERAL_QUESTION_MARK arg opt_nl LITERAL_COLON arg")
def arg_ternary(self, p):
return BoxAST(ast.If(
p[0].getast(),
p[2].getast(),
p[5].getast()
))
@pg.production("arg : primary")
def arg_primary(self, p):
return p[0]
@pg.production("arg_value : arg")
def arg_value(self, p):
"""
arg {
support.checkExpression($1);
$$ = $1 != null ? $1 : NilImplicitNode.NIL;
}
"""
# TODO: check_expression, none handling
return p[0]
@pg.production("aref_args : none")
def aref_args_none(self, p):
return p[0]
@pg.production("aref_args : args trailer")
def aref_args_args_trailer(self, p):
return p[0]
@pg.production("aref_args : args LITERAL_COMMA assocs trailer")
def aref_args_args_comma_assocs_trailer(self, p):
return self.append_call_arg(p[0], self.new_hash(p[2]))
@pg.production("aref_args : assocs trailer")
def aref_args_assocs_trailer(self, p):
return self.new_call_args(self.new_hash(p[0]))
@pg.production("arg_rhs : arg", precedence="OP_ASGN")
def arg_rhs_arg(self, p):
return p[0]
@pg.production("arg_rhs : arg RESCUE_MOD arg")
def arg_rhs_arg_rescue_arg(self, p):
lineno = p[1].getsourcepos().lineno
return BoxAST(ast.TryExcept(
p[0].getast(),
[
ast.ExceptHandler(
[ast.LookupConstant(ast.Scope(lineno), "StandardError", lineno)],
None,
p[2].getast()
)
],
ast.Nil()
))
@pg.production("paren_args : LPAREN2 opt_call_args rparen")
def paren_args(self, p):
return p[1]
@pg.production("opt_paren_args : none")
def opt_paren_args_none(self, p):
return p[0]
@pg.production("opt_paren_args : paren_args")
def opt_paren_args(self, p):
return p[0]
@pg.production("opt_call_args : none")
def opt_call_args_none(self, p):
return p[0]
@pg.production("opt_call_args : call_args")
def opt_call_args(self, p):
return p[0]
@pg.production("opt_call_args : args LITERAL_COMMA")
def opt_call_args_args_comma(self, p):
return p[0]
@pg.production("opt_call_args : args LITERAL_COMMA assocs LITERAL_COMMA")
def opt_call_args_args_comma_assocs_comma(self, p):
return self.append_call_arg(p[0], self.new_hash(p[2]))
@pg.production("opt_call_args : assocs LITERAL_COMMA")
def opt_call_args_assocs_comma(self, p):
return self.new_call_args(self.new_hash(p[0]))
@pg.production("call_args : command")
def call_args_command(self, p):
return self.new_call_args(p[0])
@pg.production("call_args : args opt_block_arg")
def call_args_args_opt_block_arg(self, p):
return self.call_arg_block_pass(p[0], p[1])
@pg.production("call_args : assocs opt_block_arg")
def call_args_assocs_opt_block_arg(self, p):
box = self.new_call_args(self.new_hash(p[0]))
return self.call_arg_block_pass(box, p[1])
@pg.production("call_args : args LITERAL_COMMA assocs opt_block_arg")
def call_args_args_comma_assocs_opt_block_arg(self, p):
box = self.append_call_arg(p[0], self.new_hash(p[2]))
return self.call_arg_block_pass(box, p[3])
@pg.production("call_args : block_arg")
def call_args_block_arg(self, p):
return self.new_call_args(None, box_block=p[0])
@pg.production("command_args : start_command_args call_args")
def command_args(self, p):
self.lexer.cmd_argument_state.reset(p[0].getint())
return p[1]
@pg.production("start_command_args : ")
def start_command_args(self, p):
return BoxInt(self.lexer.cmd_argument_state.begin())
@pg.production("block_arg : AMPER arg_value")
def block_arg(self, p):
return BoxAST(ast.BlockArgument(p[1].getast()))
@pg.production("opt_block_arg : LITERAL_COMMA block_arg")
def opt_block_arg(self, p):
return p[1]
@pg.production("opt_block_arg : none")
def opt_block_arg_none(self, p):
return p[0]
@pg.production("args : arg_value")
def args_arg_value(self, p):
return self.new_call_args(p[0])
@pg.production("args : STAR arg_value")
def args_star_arg_value(self, p):
return self.new_call_args(self.new_splat(p[1]))
@pg.production("args : args LITERAL_COMMA arg_value")
def args_comma_arg_value(self, p):
return self.append_call_arg(p[0], p[2])
@pg.production("args : args LITERAL_COMMA STAR arg_value")
def args_comma_star_arg_value(self, p):
return self.append_call_arg(p[0], self.new_splat(p[3]))
@pg.production("mrhs_arg : arg_value")
def mrhs_arg_arg_value(self, p):
return p[0]
@pg.production("mrhs_arg : mrhs")
def mrhs_arg_mrhs(self, p):
return BoxAST(ast.Array(p[0].getastlist()))
@pg.production("mrhs : args LITERAL_COMMA arg_value")
def mrhs_args_comma_arg_value(self, p):
return self.append_to_list(self._new_list(p[0].getcallargs()), p[2])
@pg.production("mrhs : args LITERAL_COMMA STAR arg_value")
def mrhs_args_comma_star_arg_value(self, p):
return self.append_to_list(self._new_list(p[0].getcallargs()), self.new_splat(p[3]))
@pg.production("mrhs : STAR arg_value")
def mrhs_star_arg_value(self, p):
return self.new_list(self.new_splat(p[1]))
@pg.production("primary : literal")
def primary_literal(self, p):
return p[0]
@pg.production("primary : strings")
def primary_strings(self, p):
return p[0]
@pg.production("primary : xstring")
def primary_xstring(self, p):
return p[0]
@pg.production("primary : regexp")
def primary_regexp(self, p):
return p[0]
@pg.production("primary : words")
def primary_words(self, p):
return p[0]
@pg.production("primary : qwords")
def primary_qwords(self, p):
return p[0]
@pg.production("primary : symbols")
def primary_qwords(self, p):
return p[0]
@pg.production("primary : qsymbols")
def primary_qwords(self, p):
return p[0]
@pg.production("primary : var_ref")
def primary_var_ref(self, p):
return p[0]
@pg.production("primary : backref")
def primary_backref(self, p):
return p[0]
@pg.production("primary : FID")
def primary_fid(self, p):
return self.new_fcall(p[0], None)
@pg.production("primary : BEGIN bodystmt END")
def primary_begin_end(self, p):
return p[1]
@pg.production("primary : LPAREN_ARG paren_post_expr rparen")
def primary_paren_arg(self, p):
return None
@pg.production("primary : LPAREN_ARG stmt paren_post_expr rparen")
def primary_paren_arg(self, p):
stmt = p[1].getast()
if isinstance(stmt, ast.Statement): # simplify here for posterity
return BoxAST(stmt.expr)
else:
return p[1]
@pg.production("paren_post_expr : ")
def paren_post_expr(self, p):
self.lexer.state = self.lexer.EXPR_ENDARG
@pg.production("primary : LPAREN compstmt RPAREN")
def primary_lparen(self, p):
node = ast.Block(p[1].getastlist()) if p[1] is not None else ast.Nil()
return BoxAST(node)
@pg.production("primary : primary_value COLON2 CONSTANT")
def primary_constant_lookup(self, p):
return self.new_colon2(p[0], p[2])
@pg.production("primary : COLON3 CONSTANT")
def primary_unbound_constant(self, p):
return self.new_colon3(p[1])
@pg.production("primary : LBRACK aref_args RBRACK")
def primary_array(self, p):
if p[1] is None:
items = []
else:
items = p[1].getcallargs()
return BoxAST(ast.Array(items))
@pg.production("primary : LBRACE assoc_list RCURLY")
def primary_hash(self, p):
return self.new_hash(p[1])
@pg.production("primary : RETURN")
def primary_return(self, p):
return BoxAST(ast.Return(ast.Nil()))
@pg.production("primary : YIELD LPAREN2 call_args rparen")
def primary_yield_paren_args(self, p):
return BoxAST(ast.Yield(p[2].getcallargs(), p[0].getsourcepos().lineno))
@pg.production("primary : YIELD LPAREN2 rparen")
def primary_yield_paren(self, p):
return BoxAST(ast.Yield([], p[0].getsourcepos().lineno))
@pg.production("primary : YIELD")
def primary_yield(self, p):
return BoxAST(ast.Yield([], p[0].getsourcepos().lineno))
@pg.production("primary : DEFINED opt_nl LPAREN2 expr rparen")
def primary_defined(self, p):
return self.new_defined(p[3], p[0])
@pg.production("primary : NOT LPAREN2 expr rparen")
def primary_not_paren_expr(self, p):
return self.new_call(p[2], self.new_token(p[0], "!", "!"), None)
@pg.production("primary : NOT LPAREN2 rparen")
def primary_not_paren(self, p):
return self.new_call(BoxAST(ast.Nil()), self.new_token(p[0], "!", "!"), None)
@pg.production("primary : fcall brace_block")
def primary_operation_brace_block(self, p):
return self.new_fcall(p[0], self.new_call_args(box_block=p[1]))
@pg.production("primary : method_call")
def primary_method_call(self, p):
return p[0]
@pg.production("primary : method_call brace_block")
def primary_method_call_brace_block(self, p):
return self.combine_send_block(p[0], p[1])
@pg.production("primary : LAMBDA lambda")
def primary_lambda(self, p):
return p[1]
@pg.production("primary : IF expr_value then compstmt if_tail END")
def primary_if(self, p):
return BoxAST(ast.If(
p[1].getast(),
ast.Block(p[3].getastlist()) if p[3] else ast.Nil(),
p[4].getast() if p[4] else ast.Nil()
))
@pg.production("primary : UNLESS expr_value then compstmt opt_else END")
def primary_unless(self, p):
return BoxAST(ast.If(
p[1].getast(),
p[4].getast() if p[4] is not None else ast.Nil(),
ast.Block(p[3].getastlist()) if p[3] else ast.Nil(),
))
@pg.production("primary : while expr_value do post_while_do compstmt END")
def primary_while(self, p):
body = ast.Block(p[4].getastlist()) if p[4] is not None else ast.Nil()
return BoxAST(ast.While(p[1].getast(), body))
@pg.production("while : WHILE")
def while_token(self, p):
self.lexer.condition_state.begin()
@pg.production("post_while_do : ")
def post_while_do(self, p):
self.lexer.condition_state.end()
@pg.production("primary : until expr_value do post_while_do compstmt END")
def primary_until(self, p):
body = ast.Block(p[4].getastlist()) if p[4] is not None else ast.Nil()
return BoxAST(ast.Until(p[1].getast(), body))
@pg.production("until : UNTIL")
def until_token(self, p):
self.lexer.condition_state.begin()
@pg.production("primary : CASE expr_value opt_terms case_body END")
def primary_case_expr_value(self, p):
elsebody = p[3].getastlist()[-1]
assert isinstance(elsebody, ast.When)
assert elsebody.conds is None
return BoxAST(ast.Case(
p[1].getast(),
p[3].getastlist()[:-1],
elsebody.block,
))
@pg.production("primary : CASE opt_terms case_body END")
def primary_case(self, p):
elsebody = p[2].getastlist()[-1]
assert isinstance(elsebody, ast.When)
assert elsebody.conds is None
conditions = []
for when in p[2].getastlist()[:-1]:
assert isinstance(when, ast.When)
cond = when.conds[0]
for expr in when.conds[1:]:
cond = ast.Or(cond, expr)
conditions.append((cond, when.block))
else_block = elsebody.block
for idx in range(len(conditions) - 1, 0, -1):
cond, block = conditions[idx]
else_block = ast.If(cond, block, else_block)
return BoxAST(ast.If(conditions[0][0], conditions[0][1], else_block))
@pg.production("primary : for for_var IN post_for_in expr_value do post_for_do compstmt END")
def primary_for(self, p):
lineno = p[0].getsourcepos().lineno
for_vars = p[1].get_for_var()
arg = p[1].getargument()
target = ast.Variable(arg.name, lineno)
if isinstance(for_vars, BoxAST):
asgn = ast.Assignment(for_vars.getast(), target)
elif isinstance(for_vars, BoxAssignableList):
asgn = ast.MultiAssignment(for_vars.getassignment(), target)
else:
raise SystemError
stmts = p[7].getastlist() if p[7] is not None else []
stmts = [ast.Statement(asgn)] + stmts
block = ast.SendBlock(lineno, [arg], None, [], [], None, None, ast.Block(stmts))
self.save_and_pop_scope(block)
return BoxAST(ast.Send(p[4].getast(), "each", [], block, lineno))
@pg.production("for : FOR")
def for_prod(self, p):
self.push_shared_scope()
return p[0]
@pg.production("post_for_in : ")
def post_for_in(self, p):
self.lexer.condition_state.begin()
self.hide_scope()
@pg.production("post_for_do : ")
def post_for_do(self, p):
self.lexer.condition_state.end()
self.unhide_scope()
@pg.production("primary : CLASS cpath superclass push_local_scope bodystmt END")
def primary_class(self, p):
node = p[1].getast(ast.LookupConstant)
node = ast.Class(
node.scope,
node.name,
p[2].getast() if p[2] is not None else None,
p[4].getast(),
)
self.save_and_pop_scope(node)
return BoxAST(node)
@pg.production("push_local_scope : ")
def push_local_scope_prod(self, p):
self.push_local_scope()
@pg.production("primary : CLASS LSHFT expr term push_local_scope bodystmt END")
def primary_singleton_class(self, p):
node = ast.SingletonClass(
p[2].getast(),
p[5].getast(),
p[0].getsourcepos().lineno
)
self.save_and_pop_scope(node)
return BoxAST(node)
@pg.production("primary : MODULE cpath push_local_scope bodystmt END")
def primary_module(self, p):
node = p[1].getast(ast.LookupConstant)
node = ast.Module(node.scope, node.name, p[3].getast())
self.save_and_pop_scope(node)
return BoxAST(node)
@pg.production("primary : DEF fname push_local_scope f_arglist bodystmt END")
def primary_def(self, p):
node = self.new_function(
p[0].getsourcepos().lineno,
None,
p[1],
p[3],
p[4]
)
self.save_and_pop_scope(node.getast())
return node
@pg.production("primary : DEF singleton dot_or_colon singleton_method_post_dot_colon fname push_local_scope singleton_method_post_fname f_arglist bodystmt END")
def primary_def_singleton(self, p):
node = self.new_function(
p[0].getsourcepos().lineno,
p[1],
p[4],
p[7],
p[8]
)
self.save_and_pop_scope(node.getast())
return node
@pg.production("singleton_method_post_dot_colon : ")
def singleton_method_post_dot_colon(self, p):
self.lexer.state = self.lexer.EXPR_FNAME
@pg.production("singleton_method_post_fname : ")
def singleton_method_post_fname(self, p):
self.lexer.state = self.lexer.EXPR_ENDFN
self.lexer.label_state = self.lexer.EXPR_LABEL
@pg.production("primary : BREAK")
def primary_break(self, p):
return BoxAST(ast.Break(ast.Nil()))
@pg.production("primary : NEXT")
def primary_next(self, p):
return BoxAST(ast.Next(ast.Nil()))
@pg.production("primary : REDO")
def primary_redo(self, p):
"""
kREDO {
$$ = new RedoNode($1.getPosition());
}
"""
raise NotImplementedError(p)
@pg.production("primary : RETRY")
def primary_retry(self, p):
"""
kRETRY {
$$ = new RetryNode($1.getPosition());
}
"""
raise NotImplementedError(p)
@pg.production("primary_value : primary")
def primary_value(self, p):
"""
primary {
support.checkExpression($1);
$$ = $1;
if ($$ == null) $$ = NilImplicitNode.NIL;
}
"""
# TODO: checkExpression, implicit Nil
return p[0]
@pg.production("then : term THEN")
@pg.production("then : THEN")
@pg.production("then : term")
def then(self, p):
return p[0]
@pg.production("do : DO_COND")
@pg.production("do : term")
def do(self, p):
return p[0]
@pg.production("if_tail : opt_else")
def if_tail_opt_else(self, p):
return p[0]
@pg.production("if_tail : ELSIF expr_value then compstmt if_tail")
def if_tail_elsif(self, p):
return BoxAST(ast.If(
p[1].getast(),
ast.Block(p[3].getastlist()),
p[4].getast() if p[4] else ast.Nil(),
))
@pg.production("opt_else : none")
def opt_else_none(self, p):
return p[0]
@pg.production("opt_else : ELSE compstmt")
def opt_else(self, p):
return BoxAST(ast.Block(p[1].getastlist()) if p[1] is not None else ast.Nil())
@pg.production("for_var : mlhs")
@pg.production("for_var : lhs")
def for_var(self, p):
box = BoxForVars(p[0])
self.lexer.symtable.declare_local(box.getargument().name)
return box
@pg.production("f_marg : f_norm_arg")
def f_marg_f_norm_arg(self, p):
return p[0]
@pg.production("f_marg : LPAREN f_margs rparen")
def f_marg_paren(self, p):
return BoxAST(p[1].getassignment())
@pg.production("f_marg_list : f_marg")
def f_marg_list_f_marg(self, p):
return self.new_list(p[0])
@pg.production("f_marg_list : f_marg_list LITERAL_COMMA f_marg")
def f_marg_list(self, p):
return self.append_to_list(p[0], p[2])
@pg.production("f_margs : f_marg_list")
def f_margs_f_marg_list(self, p):
return self._new_assignable_list(self.args_to_variables(p[0]))
@pg.production("f_margs : f_marg_list LITERAL_COMMA STAR f_norm_arg")
def f_margs_f_marg_list_comma_star_f_norm_Arg(self, p):
return self._new_assignable_list(self.args_to_variables(p[0]) + [ast.Splat(self.arg_to_variable(p[3]))])
@pg.production("f_margs : f_marg_list LITERAL_COMMA STAR f_norm_arg LITERAL_COMMA f_marg_list")
def f_margs_f_marg_list_comma_star_f_norm_arg_comm_f_marg_list(self, p):
return self._new_assignable_list(
self.args_to_variables(p[0]) +
[ast.Splat(self.arg_to_variable(p[3]))] +
[self._arg_to_variable(node) for node in p[5].getastlist()]
)
@pg.production("f_margs : f_marg_list LITERAL_COMMA STAR")
def f_margs_f_marg_list_comma_star(self, p):
return self._new_assignable_list(self.args_to_variables(p[0]) + [ast.Splat(None)])
@pg.production("f_margs : f_marg_list LITERAL_COMMA STAR LITERAL_COMMA f_marg_list")
def f_margs_f_marg_list_comma_star_comma_f_marg_list(self, p):
return self._new_assignable_list(
self.args_to_variables(p[0]) +
[ast.Splat(None)] +
[self._arg_to_variable(node) for node in p[4].getastlist()]
)
@pg.production("f_margs : STAR f_norm_arg")
def f_margs_star_f_norm_arg(self, p):
return self._new_assignable_list([ast.Splat(self.arg_to_variable(p[1]))])
@pg.production("f_margs : STAR f_norm_arg LITERAL_COMMA f_marg_list")
def f_margs_star_f_norm_arg_comma_f_marg_list(self, p):
return self._new_assignable_list(
[ast.Splat(self.arg_to_variable(p[1]))] +
[self._arg_to_variable(node) for node in p[3].getastlist()]
)
@pg.production("f_margs : STAR")
def f_margs_star(self, p):
return self._new_assignable_list([ast.Splat(None)])
@pg.production("f_margs : STAR LITERAL_COMMA f_marg_list")
def f_margs_star_comma_f_marg_list(self, p):
return self._new_assignable_list(
[ast.Splat(None)] +
[self._arg_to_variable(node) for node in p[2].getastlist()]
)
@pg.production("block_args_tail : f_block_kwarg LITERAL_COMMA f_kwrest opt_f_block_arg")
def block_args_tail_1(self, p):
return self.new_argstail(keywords=p[0], kwrest=p[2], block_arg=p[3])
@pg.production("block_args_tail : f_block_kwarg opt_f_block_arg")
def block_args_tail_2(self, p):
return self.new_argstail(keywords=p[0], block_arg=p[1])
@pg.production("block_args_tail : f_kwrest opt_f_block_arg")
def block_args_tail_3(self, p):
return self.new_argstail(kwrest=p[0], block_arg=p[1])
@pg.production("block_args_tail : f_block_arg")
def block_args_tail_4(self, p):
return self.new_argstail(block_arg=p[0])
@pg.production("opt_block_args_tail : LITERAL_COMMA block_args_tail")
def opt_block_args_tail(self, p):
return p[1]
@pg.production("opt_block_args_tail : ")
def opt_block_args_tail_empty(self, p):
return None
@pg.production("block_param : f_arg LITERAL_COMMA f_block_optarg LITERAL_COMMA f_rest_arg opt_block_args_tail")
def block_param_f_arg_comma_f_block_optarg_comma_f_rest_arg_opt_f_block_arg(self, p):
return self.new_args(
args=p[0],
optargs=p[2],
splat_arg=p[4],
args_tail=p[5]
)
@pg.production("block_param : f_arg LITERAL_COMMA f_block_optarg LITERAL_COMMA f_rest_arg LITERAL_COMMA f_arg opt_block_args_tail")
def block_param_f_arg_comma_f_block_optarg_comma_f_rest_arg_comma_f_arg_opt_f_block_arg(self, p):
return self.new_args(
args=p[0],
optargs=p[2],
splat_arg=p[4],
post_args=p[6],
args_tail=p[7]
)
@pg.production("block_param : f_arg LITERAL_COMMA f_block_optarg opt_block_args_tail")
def block_param_f_arg_comma_f_block_optarg_opt_f_block_arg(self, p):
return self.new_args(
args=p[0],
optargs=p[2],
args_tail=p[3]
)
@pg.production("block_param : f_arg LITERAL_COMMA f_block_optarg LITERAL_COMMA f_arg opt_block_args_tail")
def block_param_f_arg_comma_f_block_optarg_comma_f_arg_opt_f_block_arg(self, p):
"""
f_arg ',' f_block_optarg ',' f_arg opt_f_block_arg {
$$ = support.new_args($1.getPosition(), $1, $3, null, $5, $6);
}
"""
return self.new_args(
args=p[0],
optargs=p[2],
post_args=p[4],
args_tail=p[5]
)
@pg.production("block_param : f_arg LITERAL_COMMA f_rest_arg opt_block_args_tail")
def block_param_f_arg_comma_f_rest_arg_opt_f_block_arg(self, p):
return self.new_args(args=p[0], splat_arg=p[2], args_tail=p[3])
@pg.production("block_param : f_arg LITERAL_COMMA")
def block_param_f_arg_comma(self, p):
self.lexer.symtable.declare_argument("*")
tok = self.new_token(p[1], "IDENTIFIER", "*")
return self.new_args(args=p[0], splat_arg=tok)
@pg.production("block_param : f_arg LITERAL_COMMA f_rest_arg LITERAL_COMMA f_arg opt_block_args_tail")
def block_param_f_arg_comma_f_rest_arg_comma_f_arg_opt_f_block_arg(self, p):
"""
f_arg ',' f_rest_arg ',' f_arg opt_f_block_arg {
$$ = support.new_args($1.getPosition(), $1, null, $3, $5, $6);
}
"""
return self.new_args(
args=p[0],
splat_arg=p[2],
post_args=p[4],
args_tail=p[5]
)
@pg.production("block_param : f_arg opt_block_args_tail")
def block_param_f_arg_opt_f_block_arg(self, p):
return self.new_args(args=p[0], args_tail=p[1])
@pg.production("block_param : f_block_optarg LITERAL_COMMA f_rest_arg opt_block_args_tail")
def block_param_f_block_optarg_comma_f_rest_arg_opt_f_block_arg(self, p):
return self.new_args(args=p[0], splat_arg=p[2], args_tail=p[3])
@pg.production("block_param : f_block_optarg LITERAL_COMMA f_rest_arg LITERAL_COMMA f_arg opt_block_args_tail")
def block_param_f_block_optarg_comma_f_rest_arg_comma_f_arg_opt_f_block_arg(self, p):
"""
f_block_optarg ',' f_rest_arg ',' f_arg opt_f_block_arg {
$$ = support.new_args(support.getPosition($1), null, $1, $3, $5, $6);
}
"""
return self.new_args(
optargs=p[0],
splat_arg=p[2],
post_args=p[4],
args_tail=p[5]
)
@pg.production("block_param : f_block_optarg opt_block_args_tail")
def block_param_f_block_optarg_opt_f_block_arg(self, p):
return self.new_args(args=p[0], args_tail=p[1])
@pg.production("block_param : f_block_optarg LITERAL_COMMA f_arg opt_block_args_tail")
def block_param_f_block_optarg_comma_f_arg_opt_f_block_arg(self, p):
"""
f_block_optarg ',' f_arg opt_f_block_arg {
$$ = support.new_args($1.getPosition(), null, $1, null, $3, $4);
}
"""
return self.new_args(
args=p[0],
post_args=p[2],
args_tail=p[3]
)
@pg.production("block_param : f_rest_arg opt_block_args_tail")
def block_param_f_rest_arg_opt_f_block_arg(self, p):
return self.new_args(splat_arg=p[0], args_tail=p[1])
@pg.production("block_param : f_rest_arg LITERAL_COMMA f_arg opt_block_args_tail")
def block_param_f_rest_arg_comma_f_arg_opt_f_block_arg(self, p):
"""
f_rest_arg ',' f_arg opt_f_block_arg {
$$ = support.new_args($1.getPosition(), null, null, $1, $3, $4);
}
"""
return self.new_args(
splat_arg=p[0],
post_args=p[2],
args_tail=p[3]
)
@pg.production("block_param : block_args_tail")
def block_param_f_block_arg(self, p):
return self.new_args(args_tail=p[0])
@pg.production("opt_block_param : none")
def opt_block_param_none(self, p):
return self.new_args()
@pg.production("opt_block_param : block_param_def")
def opt_block_param(self, p):
self.lexer.command_start = True
return p[0]
@pg.production("block_param_def : PIPE opt_bv_decl PIPE")
def block_param_def_pipe_opt_bv_decl_pipe(self, p):
return self.new_args()
@pg.production("block_param_def : OROP")
def block_param_def_orop(self, p):
return self.new_args()
@pg.production("block_param_def : PIPE block_param opt_bv_decl PIPE")
def block_param_def_pipe_block_param_opt_bv_decl_pipe(self, p):
return p[1]
@pg.production("opt_bv_decl : opt_nl")
def opt_bv_decl_opt_nl(self, p):
return None
@pg.production("opt_bv_decl : opt_nl LITERAL_SEMICOLON bv_decls opt_nl")
def opt_bv_decl(self, p):
return None
@pg.production("bv_decls : bvar")
def bv_decls_bvar(self, p):
return None
@pg.production("bv_decls : bv_decls LITERAL_COMMA bvar")
def bv_decls(self, p):
return None
@pg.production("bvar : IDENTIFIER")
def bvar_identifier(self, p):
self.lexer.symtable.declare_local(p[0].getstr())
@pg.production("bvar : f_bad_arg")
def bvar_f_bad_arg(self, p):
return None
@pg.production("lambda : PRE_LAMBDA f_larglist lambda_body")
def lambda_prod(self, p):
self.lexer.left_paren_begin = p[0].getint()
node = self.new_send_block(self.lexer.lineno, p[1], p[2]).getast()
self.save_and_pop_scope(node)
return BoxAST(ast.Lambda(node))
@pg.production("PRE_LAMBDA :")
def pre_lambda(self, p):
self.push_block_scope()
left_paren_begin = self.lexer.left_paren_begin
self.lexer.paren_nest += 1
self.lexer.left_paren_begin = self.lexer.paren_nest
return BoxInt(left_paren_begin)
@pg.production("f_larglist : LPAREN_ARG f_args opt_bv_decl RPAREN")
@pg.production("f_larglist : LPAREN2 f_args opt_bv_decl RPAREN")
def f_larglist_parens(self, p):
return p[1]
@pg.production("f_larglist : f_args opt_bv_decl")
def f_larglist(self, p):
return p[0]
@pg.production("lambda_body : LAMBEG compstmt RCURLY")
def lambda_body_lambeg(self, p):
return p[1]
@pg.production("lambda_body : DO_LAMBDA compstmt END")
def lambda_body_do(self, p):
return p[1]
@pg.production("do_block : DO_BLOCK do_body END")
def do_block(self, p):
box = self.new_send_block(p[0].getsourcepos().lineno, p[1].getblockparam(), p[1].getblockstmts())
self.save_and_pop_scope(box.getast())
return box
@pg.production("block_call : command do_block")
def block_call_command_do_block(self, p):
return self.combine_send_block(p[0], p[1])
@pg.production("block_call : block_call call_op2 operation2 opt_paren_args")
def block_call_op2_operation_opt_paren_args(self, p):
return self.new_call(p[0], p[2], p[3])
@pg.production("block_call : block_call call_op2 operation2 opt_paren_args brace_block")
def block_call_op2_operation_opt_paren_args_brace_block(self, p):
return self.combine_send_block(self.new_call(p[0], p[2], p[3]), p[4])
@pg.production("block_call : block_call call_op2 operation2 command_args do_block")
def block_call_op2_operation_command_args_do_block(self, p):
return self.combine_send_block(self.new_call(p[0], p[2], p[3]), p[4])
@pg.production("method_call : fcall paren_args")
def method_call_operation_paren_args(self, p):
return self.new_fcall(p[0], p[1])
@pg.production("method_call : primary_value call_op operation2 opt_paren_args")
def method_call_primary_value_dot_operation_opt_paren_args(self, p):
return self.new_call(p[0], p[2], p[3])
@pg.production("method_call : primary_value COLON2 operation2 paren_args")
def method_call_primary_value_colon_operation_paren_args(self, p):
return self.new_call(p[0], p[2], p[3])
@pg.production("method_call : primary_value COLON2 operation3")
def method_call_primary_value_colon_operation(self, p):
return self.new_call(p[0], p[2], None)
@pg.production("method_call : primary_value call_op paren_args")
def method_call_primary_value_dot_paren_args(self, p):
return self.new_call(p[0], self.new_token(p[1], "call", "call"), p[2])
@pg.production("method_call : primary_value COLON2 paren_args")
def method_call_primary_value_colon_paren_args(self, p):
return self.new_call(p[0], self.new_token(p[1], "call", "call"), p[2])
@pg.production("method_call : SUPER paren_args")
def method_call_super_paren_args(self, p):
return self.new_super(p[1], p[0])
@pg.production("method_call : SUPER")
def method_call_super(self, p):
lineno = p[0].getsourcepos().lineno
args = []
for n, tp in self.lexer.symtable.arguments:
if tp == self.lexer.symtable.BLOCK_ARG:
continue
node = ast.Variable(n, lineno)
if tp == self.lexer.symtable.SPLAT_ARG:
node = ast.Splat(node)
args.append(node)
return BoxAST(ast.Super(args, None, lineno))
@pg.production("method_call : primary_value LITERAL_LBRACKET opt_call_args rbracket")
def method_call_primary_value_lbracket_opt_call_args_rbracket(self, p):
return self.new_call(p[0], self.new_token(p[1], "[]", "[]"), p[2])
@pg.production("brace_block : LCURLY brace_body RCURLY")
def brace_block_curly(self, p):
box = self.new_send_block(p[0].getsourcepos().lineno, p[1].getblockparam(), p[1].getblockstmts())
self.save_and_pop_scope(box.getast())
return box
@pg.production("brace_block : DO do_body END")
def brace_block_do(self, p):
box = self.new_send_block(p[0].getsourcepos().lineno, p[1].getblockparam(), p[1].getblockstmts())
self.save_and_pop_scope(box.getast())
return box
@pg.production("brace_body : push_block_scope opt_block_param compstmt")
def brace_body(self, p):
return BoxBraceBody(p[1], p[2])
@pg.production("do_body : push_block_scope opt_block_param compstmt")
def do_body(self, p):
return BoxBraceBody(p[1], p[2])
@pg.production("push_block_scope : ")
def push_block_scope_prod(self, p):
self.push_block_scope()
@pg.production("case_body : WHEN args then compstmt cases")
def case_body(self, p):
body = ast.Block(p[3].getastlist()) if p[3] is not None else ast.Nil()
items = [
ast.When(p[1].getcallargs(), body, p[0].getsourcepos().lineno)
]
items.extend(p[4].getastlist())
return self._new_list(items)
@pg.production("cases : opt_else")
def cases_opt_else(self, p):
body = p[0].getast() if p[0] is not None else ast.Nil()
# TODO: a real line number here
return self.new_list(BoxAST(ast.When(None, body, -1)))
@pg.production("cases : case_body")
def cases_case_body(self, p):
return p[0]
@pg.production("opt_rescue : RESCUE exc_list exc_var then compstmt opt_rescue")
def opt_rescue(self, p):
handlers = [
ast.ExceptHandler(
p[1].getastlist() if p[1] is not None else [],
p[2].getast() if p[2] is not None else None,
ast.Block(p[4].getastlist()) if p[4] is not None else ast.Nil(),
)
]
if p[5] is not None:
handlers.extend(p[5].getastlist())
return BoxASTList(handlers)
@pg.production("opt_rescue : ")
def opt_rescue_empty(self, p):
return None
@pg.production("exc_list : arg_value")
def exc_list_arg_value(self, p):
return self.new_list(p[0])
@pg.production("exc_list : mrhs")
def exc_list_mrhs(self, p):
return p[0]
@pg.production("exc_list : none")
def exc_list_none(self, p):
return p[0]
@pg.production("exc_var : ASSOC lhs")
def exc_var(self, p):
return p[1]
@pg.production("exc_var : none")
def exc_var_none(self, p):
return p[0]
@pg.production("opt_ensure : ENSURE compstmt")
def opt_ensure(self, p):
return p[1]
@pg.production("opt_ensure : none")
def opt_ensure_none(self, p):
return p[0]
@pg.production("literal : numeric")
def literal_numeric(self, p):
return p[0]
@pg.production("literal : symbol")
def literal_symbol(self, p):
return self.new_symbol(p[0])
@pg.production("literal : dsym")
def literal_dsym(self, p):
return p[0]
@pg.production("strings : string")
def strings(self, p):
return p[0]
@pg.production("string : CHAR")
def string_char(self, p):
# TODO: encoding
return BoxAST(ast.ConstantString(p[0].getstr()))
@pg.production("string : string1")
def string_string1(self, p):
return p[0]
@pg.production("string : string string1")
def string_string_string1(self, p):
return self.concat_literals(p[0], p[1])
@pg.production("string1 : STRING_BEG string_contents STRING_END")
def string1(self, p):
return p[1]
@pg.production("xstring : XSTRING_BEG xstring_contents STRING_END")
def xstring(self, p):
return self.new_fcall(self.new_token(p[0], "`", "`"), self.new_call_args(p[1]))
@pg.production("regexp : REGEXP_BEG regexp_contents REGEXP_END")
def regexp(self, p):
str_flags = p[2].getstr()
flags = 0
for f in str_flags:
flags |= regexp.OPTIONS_MAP[f]
if p[1] is not None:
n = p[1].getast()
if isinstance(n, ast.ConstantString):
node = ast.ConstantRegexp(n.strvalue, flags, p[0].getsourcepos().lineno)
else:
node = ast.DynamicRegexp(n, flags)
else:
node = ast.ConstantRegexp("", flags, p[0].getsourcepos().lineno)
return BoxAST(node)
@pg.production("words : WORDS_BEG LITERAL_SPACE STRING_END")
def words_space(self, p):
return BoxAST(ast.Array([]))
@pg.production("words : WORDS_BEG word_list STRING_END")
def words_word_list(self, p):
return BoxAST(ast.Array(p[1].getastlist()))
@pg.production("word_list : ")
def word_list_empty(self, p):
return self.new_list()
@pg.production("word_list : word_list word LITERAL_SPACE")
def word_list(self, p):
return self.append_to_list(p[0], p[1])
@pg.production("word : string_content")
def word_string_content(self, p):
return p[0]
@pg.production("word : word string_content")
def word(self, p):
return self.concat_literals(p[0], p[1])
@pg.production("symbols : SYMBOLS_BEG LITERAL_SPACE STRING_END")
def symbols_empty(self, p):
return BoxAST(ast.Array([]))
@pg.production("symbols : SYMBOLS_BEG symbol_list STRING_END")
def symbols(self, p):
return BoxAST(ast.Array(p[1].getastlist()))
@pg.production("symbol_list : ")
def symbol_list(self, p):
return self.new_list()
@pg.production("symbol_list : symbol_list word LITERAL_SPACE")
def symbol_list_word(self, p):
word = p[1].getast()
if word is None:
sym = ast.ConstantSymbol("")
elif isinstance(word, ast.ConstantString):
sym = ast.ConstantSymbol(word.strvalue)
else:
sym = ast.Symbol(word, p[2].getsourcepos().lineno)
return self.append_to_list(p[0], BoxAST(sym))
@pg.production("qwords : QWORDS_BEG LITERAL_SPACE STRING_END")
def qwords_space(self, p):
return BoxAST(ast.Array([]))
@pg.production("qwords : QWORDS_BEG qword_list STRING_END")
def qwords_qword_list(self, p):
return BoxAST(ast.Array(p[1].getastlist()))
@pg.production("qsymbols : QSYMBOLS_BEG LITERAL_SPACE STRING_END")
def qsymbols_space(self, p):
return BoxAST(ast.Array([]))
@pg.production("qsymbols : QSYMBOLS_BEG qsym_list STRING_END")
def qsymbols_space(self, p):
return BoxAST(ast.Array(p[1].getastlist()))
@pg.production("qword_list : ")
def qword_list_empty(self, p):
return self.new_list()
@pg.production("qword_list : qword_list STRING_CONTENT LITERAL_SPACE")
def qword_list(self, p):
return self.append_to_list(p[0], BoxAST(ast.ConstantString(p[1].getstr())))
@pg.production("qsym_list : ")
def qsym_list_empty(self, p):
return self.new_list()
@pg.production("qsym_list : qsym_list STRING_CONTENT LITERAL_SPACE")
def qsym_list(self, p):
return self.append_to_list(p[0], self.new_symbol(p[1]))
@pg.production("string_contents : ")
def string_contents_empty(self, p):
# TODO: Encoding?
return BoxAST(ast.ConstantString(""))
@pg.production("string_contents : string_contents string_content")
def string_contents(self, p):
return self.concat_literals(p[0], p[1])
@pg.production("xstring_contents : ")
def xstring_contents_empty(self, p):
return None
@pg.production("xstring_contents : xstring_contents string_content")
def xstring_contents(self, p):
return self.concat_literals(p[0], p[1])
@pg.production("regexp_contents : ")
def regexp_contents_empty(self, p):
return None
@pg.production("regexp_contents : regexp_contents string_content")
def regexp_contents(self, p):
return self.concat_literals(p[0], p[1])
@pg.production("string_content : STRING_CONTENT")
def string_content_string_content(self, p):
return BoxAST(ast.ConstantString(p[0].getstr()))
@pg.production("string_content : string_dvar_prod string_dvar")
def string_content_string_dvar(self, p):
self.lexer.str_term = p[0].getstrterm()
return p[1]
@pg.production("string_dvar_prod : STRING_DVAR")
def string_dvar_prod(self, p):
str_term = self.lexer.str_term
self.lexer.str_term = None
self.lexer.state = self.lexer.EXPR_BEG
return BoxStrTerm(str_term)
@pg.production("string_content : string_dbeg compstmt RCURLY STRING_DEND")
def string_content_string_dbeg(self, p):
self.lexer.condition_state.restart()
self.lexer.cmd_argument_state.restart()
self.lexer.str_term = p[0].getstrterm()
if p[1]:
return BoxAST(ast.DynamicString([ast.Block(p[1].getastlist())]))
else:
return None
@pg.production("STRING_DEND : ")
def string_dend(self, p):
return None
@pg.production("string_dbeg : STRING_DBEG")
def string_dbeg(self, p):
str_term = self.lexer.str_term
self.lexer.condition_state.stop()
self.lexer.cmd_argument_state.stop()
self.lexer.str_term = None
self.lexer.state = self.lexer.EXPR_BEG
return BoxStrTerm(str_term)
@pg.production("string_dvar : GVAR")
def string_dvar_gvar(self, p):
return self.new_global(p[0])
@pg.production("string_dvar : IVAR")
def string_dvar_ivar(self, p):
return self.new_instance_var(p[0])
@pg.production("string_dvar : CVAR")
def string_dvar_cvar(self, p):
return self.new_class_var(p[0])
@pg.production("string_dvar : backref")
def string_dvar_backref(self, p):
return p[0]
@pg.production("symbol : SYMBEG sym")
def symbol(self, p):
self.lexer.state = self.lexer.EXPR_END
return p[1]
@pg.production("sym : CVAR")
@pg.production("sym : GVAR")
@pg.production("sym : IVAR")
@pg.production("sym : fname")
def sym(self, p):
return p[0]
@pg.production("dsym : SYMBEG xstring_contents STRING_END")
def dsym(self, p):
box = p[1]
if box is None:
return BoxAST(ast.ConstantSymbol(""))
node = box.getast()
if isinstance(node, ast.ConstantString):
return BoxAST(ast.ConstantSymbol(node.strvalue))
else:
return BoxAST(ast.Symbol(node, p[0].getsourcepos().lineno))
@pg.production("numeric : simple_numeric")
def numeric_simple(self, p):
return p[0]
@pg.production("numeric : UMINUS_NUM simple_numeric", precedence="LOWEST")
def numeric_minus_integer(self, p):
return p[1].negate()
@pg.production("simple_numeric : INTEGER")
def simple_numeric_integer(self, p):
return BoxNumericAST(self._parse_int(p[0].getstr()))
@pg.production("simple_numeric : FLOAT")
def simple_numeric_float(self, p):
return BoxNumericAST(ast.ConstantFloat(float(p[0].getstr())))
@pg.production("simple_numeric : RATIONAL")
def simple_numeric_rational(self, p):
return BoxNumericAST(self._parse_rational(
p[0].getstr(),
p[0].getsourcepos().lineno
))
@pg.production("simple_numeric : IMAGINARY")
def simple_numeric_imaginary(self, p):
return BoxNumericAST(self._parse_imaginary(
p[0].getstr(),
p[0].getsourcepos().lineno
))
@pg.production("user_variable : IDENTIFIER")
def variable_identifier(self, p):
return BoxAST(ast.Variable(p[0].getstr(), p[0].getsourcepos().lineno))
@pg.production("user_variable : IVAR")
def variable_ivar(self, p):
return self.new_instance_var(p[0])
@pg.production("user_variable : GVAR")
def variable_gvar(self, p):
return self.new_global(p[0])
@pg.production("user_variable : CONSTANT")
def variable_constant(self, p):
return BoxAST(ast.Constant(
p[0].getstr(),
p[0].getsourcepos().lineno
))
@pg.production("user_variable : CVAR")
def variable_cvar(self, p):
return self.new_class_var(p[0])
@pg.production("keyword_variable : NIL")
def variable_nil(self, p):
return BoxAST(ast.Nil())
@pg.production("keyword_variable : SELF")
def variable_self(self, p):
return BoxAST(ast.Self(p[0].getsourcepos().lineno))
@pg.production("keyword_variable : TRUE")
def variable_true(self, p):
return BoxAST(ast.ConstantBool(True))
@pg.production("keyword_variable : FALSE")
def variable_false(self, p):
return BoxAST(ast.ConstantBool(False))
@pg.production("keyword_variable : __FILE__")
def variable__file__(self, p):
return BoxAST(ast.File())
@pg.production("keyword_variable : __LINE__")
def variable__line__(self, p):
return BoxAST(ast.Line(p[0].getsourcepos().lineno))
@pg.production("keyword_variable : __ENCODING__")
def variable__encoding__(self, p):
raise NotImplementedError(p)
return BoxAST(ast.Encoding())
@pg.production("var_ref : keyword_variable")
@pg.production("var_ref : user_variable")
def var_ref(self, p):
node = p[0].getast()
if isinstance(node, ast.Variable):
if self.lexer.symtable.is_defined(node.name):
self.lexer.symtable.declare_read(node.name)
return p[0]
else:
return BoxAST(ast.Send(ast.Self(node.lineno), node.name, [], None, node.lineno))
else:
return p[0]
@pg.production("var_lhs : keyword_variable")
@pg.production("var_lhs : user_variable")
def var_lhs(self, p):
return self.assignable(p[0])
@pg.production("backref : BACK_REF")
@pg.production("backref : NTH_REF")
def backref(self, p):
return p[0]
# TODO: check
@pg.production("superclass : term")
def superclass_term(self, p):
return None
@pg.production("superclass : superclass_lt expr_value term")
def superclass(self, p):
return p[1]
@pg.production("superclass_lt : LT")
def superclass_lt(self, p):
self.lexer.state = self.lexer.EXPR_BEG
@pg.production("superclass : error term")
def superclass_error(self, p):
return None
@pg.production("superclass : ")
def superclass_none(self, p):
return None
@pg.production("f_arglist : LPAREN2 f_args rparen")
def f_arglist_parens(self, p):
self.lexer.state = self.lexer.EXPR_BEG
self.lexer.command_start = True
return p[1]
@pg.production("f_arglist : PRE_F_ARGLIST_LABEL_STATE f_args term")
def f_arglist(self, p):
self.lexer.state = self.lexer.EXPR_BEG
self.lexer.command_start = True
return p[1]
@pg.production("PRE_F_ARGLIST_LABEL_STATE : ")
def pre_args_arglist_fargs_term(self, p):
self.lexer.label_state = self.lexer.EXPR_LABEL
@pg.production("args_tail : f_kwarg LITERAL_COMMA f_kwrest opt_f_block_arg")
def args_tail_kwarg_kwrest_block(self, p):
return self.new_argstail(keywords=p[0], kwrest=p[2], block_arg=p[3])
@pg.production("args_tail : f_kwarg opt_f_block_arg")
def args_tail_kwarg_block(self, p):
return self.new_argstail(keywords=p[0], block_arg=p[1])
@pg.production("args_tail : f_kwrest opt_f_block_arg")
def args_tail_kwarg_block(self, p):
return self.new_argstail(kwrest=p[0], block_arg=p[1])
@pg.production("args_tail : f_block_arg")
def args_tail_kwarg_block(self, p):
return self.new_argstail(block_arg=p[0])
@pg.production("opt_args_tail : LITERAL_COMMA args_tail")
def opt_args_tail(self, p):
return p[1]
@pg.production("opt_args_tail : ")
def opt_args_tail(self, p):
return None
@pg.production("f_args : f_arg LITERAL_COMMA f_optarg LITERAL_COMMA f_rest_arg opt_args_tail")
def f_args_f_arg_comma_f_optarg_comma_f_rest_arg_opt_f_block_arg(self, p):
return self.new_args(
args=p[0],
optargs=p[2],
splat_arg=p[4],
args_tail=p[5],
)
@pg.production("f_args : f_arg LITERAL_COMMA f_optarg LITERAL_COMMA f_rest_arg LITERAL_COMMA f_arg opt_args_tail")
def f_args_f_arg_comma_f_optarg_comma_f_rest_arg_comma_f_arg_opt_f_block_arg(self, p):
"""
f_arg ',' f_optarg ',' f_rest_arg ',' f_arg opt_f_block_arg {
$$ = support.new_args($1.getPosition(), $1, $3, $5, $7, $8);
}
"""
return self.new_args(
args=p[0],
optargs=p[2],
splat_arg=p[4],
post_args=p[6],
args_tail=p[7]
)
@pg.production("f_args : f_arg LITERAL_COMMA f_optarg opt_args_tail")
def f_args_f_arg_comma_f_optarg_opt_f_block_arg(self, p):
return self.new_args(
args=p[0],
optargs=p[2],
args_tail=p[3],
)
@pg.production("f_args : f_arg LITERAL_COMMA f_optarg LITERAL_COMMA f_arg opt_args_tail")
def f_args_f_arg_comma_f_optarg_comma_f_arg_opt_f_block_arg(self, p):
return self.new_args(
args=p[0],
optargs=p[2],
post_args=p[4],
args_tail=p[5],
)
@pg.production("f_args : f_arg LITERAL_COMMA f_rest_arg opt_args_tail")
def f_args_f_arg_comma_f_rest_arg_opt_f_block_arg(self, p):
return self.new_args(
args=p[0],
splat_arg=p[2],
args_tail=p[3],
)
@pg.production("f_args : f_arg LITERAL_COMMA f_rest_arg LITERAL_COMMA f_arg opt_args_tail")
def f_args_f_arg_comma_f_rest_arg_comma_f_arg_opt_f_block_arg(self, p):
"""
f_arg ',' f_rest_arg ',' f_arg opt_f_block_arg {
$$ = support.new_args($1.getPosition(), $1, null, $3, $5, $6);
}
"""
return self.new_args(
args=p[0],
splat_arg=p[2],
post_args=p[4],
args_tail=p[5]
)
@pg.production("f_args : f_arg opt_args_tail")
def f_args_f_arg_opt_f_block_arg(self, p):
return self.new_args(args=p[0], args_tail=p[1])
@pg.production("f_args : f_optarg LITERAL_COMMA f_rest_arg opt_args_tail")
def f_args_f_optarg_comma_f_rest_arg_opt_f_block_arg(self, p):
return self.new_args(args=p[0], splat_arg=p[2], args_tail=p[3])
@pg.production("f_args : f_optarg LITERAL_COMMA f_rest_arg LITERAL_COMMA f_arg opt_args_tail")
def f_args_f_optarg_comma_f_rest_arg_comma_f_arg_opt_f_block_arg(self, p):
"""
f_optarg ',' f_rest_arg ',' f_arg opt_f_block_arg {
$$ = support.new_args($1.getPosition(), null, $1, $3, $5, $6);
}
"""
return self.new_args(
args=p[0],
splat_arg=p[2],
post_args=p[4],
args_tail=p[5]
)
@pg.production("f_args : f_optarg opt_args_tail")
def f_args_f_optarg_opt_f_block_arg(self, p):
return self.new_args(args=p[0], args_tail=p[1])
@pg.production("f_args : f_optarg LITERAL_COMMA f_arg opt_args_tail")
def f_args_f_optarg_comma_f_arg_opt_f_block_arg(self, p):
return self.new_args(
optargs=p[0],
post_args=p[2],
args_tail=p[3]
)
@pg.production("f_args : f_rest_arg opt_args_tail")
def f_args_f_rest_arg_opt_f_block_arg(self, p):
return self.new_args(splat_arg=p[0], args_tail=p[1])
@pg.production("f_args : f_rest_arg LITERAL_COMMA f_arg opt_args_tail")
def f_args_f_rest_arg_comma_f_arg_opt_f_block_arg(self, p):
return self.new_args(
splat_arg=p[0],
post_args=p[2],
args_tail=p[3]
)
@pg.production("f_args : args_tail")
def f_args_f_block_arg(self, p):
return self.new_args(args_tail=p[0])
@pg.production("f_args : ")
def f_args_none(self, p):
return self.new_args()
@pg.production("f_bad_arg : CONSTANT")
def f_bad_arg_constant(self, p):
raise self.error("formal argument cannot be a constant")
@pg.production("f_bad_arg : IVAR")
def f_bad_arg_ivar(self, p):
raise self.error("formal argument cannot be an instance variable")
@pg.production("f_bad_arg : GVAR")
def f_bad_arg_gvar(self, p):
raise self.error("formal argument cannot be a global variable")
@pg.production("f_bad_arg : CVAR")
def f_bad_arg_cvar(self, p):
raise self.error("formal argument cannot be a class variable")
@pg.production("f_norm_arg : f_bad_arg")
def f_norm_arg_f_bad_arg(self, p):
return p[0]
@pg.production("f_norm_arg : IDENTIFIER")
def f_norm_arg_identifier(self, p):
return BoxAST(ast.Argument(p[0].getstr()))
@pg.production("f_arg_asgn : f_norm_arg")
def f_arg_asgn(self, p):
return p[0]
@pg.production("f_arg_item : f_arg_asgn")
def f_arg_item_f_norm_arg(self, p):
node = p[0].getast(ast.Argument)
self.lexer.symtable.declare_argument(node.name)
return p[0]
@pg.production("f_arg_item : LPAREN f_margs rparen")
def f_arg_item_paren(self, p):
return BoxAST(p[1].getassignment())
@pg.production("f_arg : f_arg_item")
def f_arg_f_arg_item(self, p):
return self.new_list(p[0])
@pg.production("f_arg : f_arg LITERAL_COMMA f_arg_item")
def f_arg(self, p):
return self.append_to_list(p[0], p[2])
@pg.production("f_label : LABEL")
def f_label(self, p):
return p[0]
@pg.production("f_kw : f_label arg_value")
@pg.production("f_block_kw : f_label primary_value")
def f_kw_label_value(self, p):
self.lexer.symtable.declare_argument(p[0].getstr())
return self.new_kw_arg(p[0], p[1])
@pg.production("f_kw : f_label")
@pg.production("f_block_kw : f_label")
def f_kw_label(self, p):
self.lexer.symtable.declare_argument(p[0].getstr())
return self.new_kw_arg(p[0], None)
@pg.production("f_block_kwarg : f_block_kw")
@pg.production("f_kwarg : f_kw")
def f_kwarg(self, p):
return self.new_list(p[0])
@pg.production("f_block_kwarg : f_block_kwarg LITERAL_COMMA f_block_kw")
@pg.production("f_kwarg : f_kwarg LITERAL_COMMA f_kw")
def f_kwarg_f_kw(self, p):
return self.append_to_list(p[0], p[2])
@pg.production("kwrest_mark : POW")
@pg.production("kwrest_mark : DSTAR")
def kwrest_mark(self, p):
return p[0]
@pg.production("f_kwrest : kwrest_mark IDENTIFIER")
def f_kwrest_ident(self, p):
self.lexer.symtable.declare_argument(p[1].getstr(), self.lexer.symtable.KW_ARG)
return p[1]
@pg.production("f_kwrest : kwrest_mark")
def f_kwrest(self, p):
self.lexer.symtable.declare_argument("**", self.lexer.symtable.KW_ARG)
return self.new_token(p[0], "IDENTIFIER", "**")
@pg.production("f_opt : f_arg_asgn LITERAL_EQUAL arg_value")
def f_opt(self, p):
node = p[0].getast(ast.Argument)
self.lexer.symtable.declare_argument(node.name)
return BoxAST(ast.Argument(node.name, p[2].getast()))
@pg.production("f_block_opt : f_arg_asgn LITERAL_EQUAL primary_value")
def f_block_opt(self, p):
node = p[0].getast(ast.Argument)
self.lexer.symtable.declare_argument(node.name)
return BoxAST(ast.Argument(node.name, p[2].getast()))
@pg.production("f_block_optarg : f_block_opt")
def f_block_optarg_f_block_opt(self, p):
return self.new_list(p[0])
@pg.production("f_block_optarg : f_block_optarg LITERAL_COMMA f_block_opt")
def f_block_optarg(self, p):
return self.append_to_list(p[0], p[2])
@pg.production("f_optarg : f_opt")
def f_optarg_f_opt(self, p):
return self.new_list(p[0])
@pg.production("f_optarg : f_optarg LITERAL_COMMA f_opt")
def f_optarg(self, p):
return self.append_to_list(p[0], p[2])
@pg.production("restarg_mark : STAR")
@pg.production("restarg_mark : STAR2")
def restarg_mark(self, p):
return p[0]
@pg.production("f_rest_arg : restarg_mark IDENTIFIER")
def f_rest_arg_restarg_mark_identifer(self, p):
self.lexer.symtable.declare_argument(p[1].getstr(), self.lexer.symtable.SPLAT_ARG)
return p[1]
@pg.production("f_rest_arg : restarg_mark")
def f_rest_arg_restarg_mark(self, p):
self.lexer.symtable.declare_argument("*", self.lexer.symtable.SPLAT_ARG)
return self.new_token(p[0], "IDENTIFIER", "*")
@pg.production("blkarg_mark : AMPER")
@pg.production("blkarg_mark : AMPER2")
def blkarg_mark(self, p):
return p[0]
@pg.production("f_block_arg : blkarg_mark IDENTIFIER")
def f_block_arg(self, p):
self.lexer.symtable.declare_argument(p[1].getstr(), self.lexer.symtable.BLOCK_ARG)
return p[1]
@pg.production("opt_f_block_arg : LITERAL_COMMA f_block_arg")
def opt_f_block_arg(self, p):
return p[1]
@pg.production("opt_f_block_arg : ")
def opt_f_block_arg_empty(self, p):
return None
@pg.production("singleton : var_ref")
def singleton_var_ref(self, p):
return p[0]
@pg.production("singleton : LPAREN2 SINGLETON_SET_LEX_STATE expr rparen")
@pg.production("singleton : LPAREN SINGLETON_SET_LEX_STATE expr rparen")
def singleton_paren(self, p):
if p[2] is None:
self.error("Can't define singleton method for ().")
elif isinstance(p[2].getast(), ast.ConstantNode):
self.error("Can't define singleton method for literals")
else:
return p[2]
@pg.production("SINGLETON_SET_LEX_STATE : ")
def SINGLETON_SET_LEX_STATE(self, p):
self.lexer.state = self.lexer.EXPR_BEG
@pg.production("assoc_list : none")
def assoc_list_none(self, p):
return self.new_list()
@pg.production("assoc_list : assocs trailer")
def assoc_list(self, p):
return p[0]
@pg.production("assocs : assoc")
def assocs_assoc(self, p):
return p[0]
@pg.production("assocs : assocs LITERAL_COMMA assoc")
def assocs(self, p):
return self._new_list(p[0].getastlist() + p[2].getastlist())
@pg.production("assoc : arg_value ASSOC arg_value")
def assoc_arg_value(self, p):
return self.append_to_list(self.new_list(p[0]), p[2])
@pg.production("assoc : LABEL arg_value")
def assoc_label(self, p):
return self.append_to_list(self.new_list(self.new_symbol(p[0])), p[1])
@pg.production("assoc : STRING_BEG string_contents LABEL_END arg_value")
def assoc_string_contents(self, p):
return self.append_to_list(self.new_list(self.new_symbol(p[1])), p[1])
@pg.production("assoc : POW arg_value")
@pg.production("assoc : DSTAR arg_value")
def assoc_dstar(self, p):
node = p[1].getast()
if isinstance(node, ast.Hash):
items = node.items
raw_items = []
for k, v in items:
raw_items.append(k)
raw_items.append(v)
return self._new_list(raw_items)
else:
# we need to later merge this dynamically
return self._new_list([ast.HashSplat(
node,
p[0].getsourcepos().lineno
)])
@pg.production("operation : FID")
@pg.production("operation : CONSTANT")
@pg.production("operation : IDENTIFIER")
def operation(self, p):
return p[0]
@pg.production("operation2 : op")
@pg.production("operation2 : FID")
@pg.production("operation2 : CONSTANT")
@pg.production("operation2 : IDENTIFIER")
def operation2(self, p):
return p[0]
@pg.production("operation3 : op")
@pg.production("operation3 : FID")
@pg.production("operation3 : IDENTIFIER")
def operation3(self, p):
return p[0]
@pg.production("dot_or_colon : COLON2")
@pg.production("dot_or_colon : DOT")
def dot_or_colon(self, p):
return p[0]
@pg.production("call_op : DOT")
@pg.production("call_op : ANDDOT")
def call_op(self, p):
return p[0]
@pg.production("call_op2 : call_op")
@pg.production("call_op2 : COLON2")
def call_op2(self, p):
return p[0]
@pg.production("opt_terms : ")
def opt_terms_none(self, p):
return None
@pg.production("opt_terms : terms")
def opt_terms(self, p):
return p[0]
@pg.production("opt_nl : ")
def opt_nl_none(self, p):
return None
@pg.production("opt_nl : LITERAL_NEWLINE")
def opt_nl(self, p):
return None
@pg.production("rparen : opt_nl RPAREN")
def rparen(self, p):
return p[1]
@pg.production("rbracket : opt_nl RBRACK")
def rbracket(self, p):
return p[1]
@pg.production("trailer : ")
def trailer_none(self, p):
return None
@pg.production("trailer : LITERAL_COMMA")
@pg.production("trailer : LITERAL_NEWLINE")
def trailer(self, p):
return p[0]
@pg.production("term : LITERAL_NEWLINE")
@pg.production("term : LITERAL_SEMICOLON")
def term(self, p):
return p[0]
@pg.production("terms : term")
def terms_term(self, p):
return p[0]
@pg.production("terms : terms LITERAL_SEMICOLON")
def terms(self, p):
return p[0]
@pg.production("none : ")
def none(self, p):
return None
parser = pg.build()
class LexerWrapper(object):
def __init__(self, lexer):
self.lexer = lexer
def next(self):
try:
n = self.lexer.next()
# print n
return n
except StopIteration:
return None
class BoxAST(BaseBox):
def __init__(self, node):
self.node = node
@specialize.arg(1)
def getast(self, cls=None):
node = self.node
if cls is not None:
assert isinstance(node, cls)
return node
class BoxASTList(BaseBox):
def __init__(self, nodes):
self.nodes = nodes
def getastlist(self):
return self.nodes
class BoxCallArgs(BaseBox):
"""
A box for the arguments of a call/send.
"""
def __init__(self, args, block):
self.args = args
self.block = block
def getcallargs(self):
return self.args
def getcallblock(self):
return self.block
class BoxInt(BaseBox):
def __init__(self, intvalue):
self.intvalue = intvalue
def getint(self):
return self.intvalue
class BoxArgs(BaseBox):
"""
A box for the arguments of a function/block definition.
"""
def __init__(self, args, splat_arg, post_args, kwargs, kwrest_arg, block_arg):
self.args = args
self.splat_arg = splat_arg
self.post_args = post_args
self.kwargs = kwargs
self.kwrest_arg = kwrest_arg
self.block_arg = block_arg
def getargs(self):
return self.args
def getpostargs(self):
return self.post_args
def getsplatarg(self):
return self.splat_arg
def getkwargs(self):
return self.kwargs
def getkwrestarg(self):
return self.kwrest_arg
def getblockarg(self):
return self.block_arg
class BoxArgsTail(BaseBox):
def __init__(self, kwargs, kwrest, block_arg):
self.kwargs = kwargs
self.kwrest = kwrest
self.block_arg = block_arg
def getkwargsbox(self):
return self.kwargs
def getkwrestbox(self):
return self.kwrest
def getblockargbox(self):
return self.block_arg
class BoxStrTerm(BaseBox):
def __init__(self, str_term):
self.str_term = str_term
def getstrterm(self):
return self.str_term
class BoxAssignableList(BaseBox):
def __init__(self, vars):
self.vars = vars
def getassignment(self):
return ast.MultiAssignable(self.vars)
def getvars(self):
return self.vars
class BoxForVars(BaseBox):
def __init__(self, for_var):
self.for_var = for_var
self.argument = ast.Argument("0")
def getargument(self):
return self.argument
def get_for_var(self):
return self.for_var
class BoxNumericAST(BoxAST):
def negate(self):
return BoxNumericAST(self.node.negate())
class BoxBraceBody(BaseBox):
def __init__(self, block_param, compstmt):
self.block_param = block_param
self.compstmt = compstmt
def getblockparam(self):
return self.block_param
def getblockstmts(self):
return self.compstmt
| StarcoderdataPython |
6690942 | <gh_stars>10-100
# -*- coding: utf-8 -*-
from ..auth import auth
#
# Copyright (c) 2019 <NAME>.
#
# This file is part of Dremio Client
# (see https://github.com/rymurr/dremio_client).
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from .config_parser import build_config
def get_base_url_token(args=None):
config = build_config(args)
ssl = "s" if config["ssl"].get(bool) else ""
host = config["hostname"].get()
port = ":" + str(config["port"].get(int))
base_url = "http{}://{}{}".format(ssl, host, port)
token = auth(base_url, config)
return base_url, token, config["verify"].get()
| StarcoderdataPython |
1894289 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# ======================================================================
# Copyright 2016 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ======================================================================
from time import time
from typing import Any, Optional
from supervisor.loggers import Logger
from .context import Context
from .strategy import conciliate_conflicts
from .ttypes import AddressStates, RunningFailureStrategies, SupvisorsStates, NameList, Payload, PayloadList
class AbstractState(object):
""" Base class for a state with simple entry / next / exit actions.
Attributes are:
- supvisors: the reference to the global Supvisors structure,
- address_name: the name of the local node.
"""
def __init__(self, supvisors: Any) -> None:
""" Initialization of the attributes.
:param supvisors: the global Supvisors structure
"""
self.supvisors = supvisors
self.context = supvisors.context
self.logger = supvisors.logger
self.local_node_name = supvisors.address_mapper.local_node_name
def enter(self) -> None:
""" Actions performed when entering the state.
May be redefined in subclasses.
:return: None
"""
def next(self) -> Optional[SupvisorsStates]:
""" Actions performed upon reception of an event.
May be redefined in subclasses.
:return: None
"""
def exit(self) -> None:
""" Actions performed when leaving the state.
May be redefined in subclasses.
:return: None
"""
def check_nodes(self) -> SupvisorsStates:
""" Check that local and Master nodes are still RUNNING.
If their ticks are not received anymore, back to INITIALIZATION state to force a synchronization phase.
:return: the suggested state if local or Master node is not active anymore
"""
if self.context.nodes[self.local_node_name].state != AddressStates.RUNNING:
self.logger.critical('AbstractState.check_nodes: local node not RUNNING anymore')
return SupvisorsStates.INITIALIZATION
if self.context.nodes[self.context.master_node_name].state != AddressStates.RUNNING:
self.logger.warn('AbstractState.check_nodes: Master node not RUNNING anymore')
return SupvisorsStates.INITIALIZATION
def abort_jobs(self) -> None:
""" Abort starting jobs in progress.
:return: None
"""
self.supvisors.failure_handler.abort()
self.supvisors.starter.abort()
class InitializationState(AbstractState):
""" In the INITIALIZATION state, Supvisors synchronizes to all known instances.
Attributes are:
- start_date: the date when entering this state.
"""
def enter(self) -> None:
""" When entering in the INITIALIZATION state, reset the context.
:return: None
"""
# clear any existing job
self.abort_jobs()
# reset context, keeping the isolation status
self.context.reset()
def next(self) -> SupvisorsStates:
""" Wait for nodes to publish until:
- all are active,
- or all core nodes defined in the optional *force_synchro_if* option are active,
- or timeout is reached.
:return: the new Supvisors state
"""
uptime = time() - self.context.start_date
if uptime > self.supvisors.options.synchro_timeout:
self.logger.warn('InitializationState.next: synchro timed out')
# cannot get out of this state without local node RUNNING
running_nodes = self.context.running_nodes()
if self.local_node_name in running_nodes:
# synchro done if the state of all nodes is known
if len(self.context.unknown_nodes()) == 0:
self.logger.info('InitializationState.next: all nodes are in a known state')
return SupvisorsStates.DEPLOYMENT
# for a partial end of sync, cannot get out of this state SYNCHRO_TIMEOUT_MIN
# in case of a Supervisor restart, this gives a chance to all nodes to send their tick
if uptime > self.supvisors.options.SYNCHRO_TIMEOUT_MIN:
# check synchro on core nodes
if self.context.running_core_nodes():
# if ok, master must be running if already known
if self.context.master_node_name and self.context.master_node_name not in running_nodes:
self.logger.info('InitializationState.next: all core nodes are RUNNING but not Master={}'
.format(self.context.master_node_name))
return SupvisorsStates.INITIALIZATION
self.logger.info('InitializationState.next: all core nodes are RUNNING')
return SupvisorsStates.DEPLOYMENT
self.logger.debug('InitializationState.next: still waiting for remote Supvisors instances to publish')
else:
self.logger.debug('InitializationState.next: local node {} still not RUNNING'.format(self.local_node_name))
return SupvisorsStates.INITIALIZATION
def exit(self) -> None:
""" When leaving the INITIALIZATION state, the working nodes are defined.
One of them is elected as the MASTER.
:return: None
"""
# force state of missing Supvisors instances
node_names = self.context.running_nodes()
self.logger.info('InitializationState.exit: working with nodes {}'.format(node_names))
# elect master node among working nodes only if not fixed before
# of course master node must be running
if not self.context.master_node_name or self.context.master_node_name not in node_names:
# choose Master among the core nodes because these nodes are expected to be more present
if self.supvisors.options.force_synchro_if:
running_core_node_names = set(node_names).intersection(self.supvisors.options.force_synchro_if)
if running_core_node_names:
node_names = running_core_node_names
# arbitrarily choice : master node has the 'lowest' node_name among running nodes
self.context.master_node_name = min(node_names)
class MasterDeploymentState(AbstractState):
""" In the DEPLOYMENT state, Supvisors starts automatically the applications having a starting model. """
def enter(self):
""" Trigger the automatic start and stop. """
self.supvisors.fsm.redeploy_mark = False
self.supvisors.starter.start_applications()
def next(self) -> SupvisorsStates:
""" Check if the starting tasks are completed.
:return: the new Supvisors state
"""
# common check on local and Master nodes
next_state = self.check_nodes()
if next_state:
return next_state
# Master goes to OPERATION when starting is completed
if self.supvisors.starter.is_starting_completed():
return SupvisorsStates.OPERATION
# stay in current state
return SupvisorsStates.DEPLOYMENT
class MasterOperationState(AbstractState):
""" In the OPERATION state, Supvisors is waiting for requests. """
def next(self) -> SupvisorsStates:
""" Check that all nodes are still active.
Look after possible conflicts due to multiple running instances of the same program. """
# common check on local and Master nodes
next_state = self.check_nodes()
if next_state:
return next_state
# normal behavior. check if jobs in progress
if self.supvisors.starter.is_starting_completed() and self.supvisors.stopper.is_stopping_completed():
# check duplicated processes
if self.context.conflicting():
return SupvisorsStates.CONCILIATION
# a redeploy mark has set due to a new node in Supvisors
# back to DEPLOYMENT state to repair what may have failed before
if self.supvisors.fsm.redeploy_mark:
return SupvisorsStates.DEPLOYMENT
return SupvisorsStates.OPERATION
class MasterConciliationState(AbstractState):
""" In the CONCILIATION state, Supvisors conciliates the conflicts. """
def enter(self) -> None:
""" When entering in the CONCILIATION state, conciliate automatically the conflicts. """
conciliate_conflicts(self.supvisors,
self.supvisors.options.conciliation_strategy,
self.context.conflicts())
def next(self) -> SupvisorsStates:
""" Check that all addresses are still active.
Wait for all conflicts to be conciliated. """
# common check on local and Master nodes
next_state = self.check_nodes()
if next_state:
return next_state
# check eventual jobs in progress
if self.supvisors.starter.is_starting_completed() and self.supvisors.stopper.is_stopping_completed():
# back to OPERATION when there is no conflict anymore
if not self.context.conflicting():
return SupvisorsStates.OPERATION
# new conflicts may happen while conciliation is in progress
# call enter again to trigger a new conciliation
self.enter()
return SupvisorsStates.CONCILIATION
class MasterRestartingState(AbstractState):
""" In the RESTARTING state, Supvisors stops all applications before triggering a full restart. """
def enter(self) -> None:
""" When entering in the RESTARTING state, stop all applications.
The current design is that the current node drives the job and not necessarily the Master.
:return: None
"""
self.abort_jobs()
self.supvisors.stopper.stop_applications()
def next(self) -> SupvisorsStates:
""" Wait for all processes to be stopped. """
next_state = self.check_nodes()
if next_state:
# no way going back to INITIALIZATION state at this point
return SupvisorsStates.SHUTDOWN
if self.supvisors.stopper.is_stopping_completed():
return SupvisorsStates.SHUTDOWN
return SupvisorsStates.RESTARTING
def exit(self) -> None:
""" When leaving the RESTARTING state, request the full restart. """
self.supvisors.zmq.pusher.send_restart(self.local_node_name)
# other nodes will shutdown on reception of SHUTDOWN state
# due to Supvisors design, the state publication will be fired before the send_shutdown
class MasterShuttingDownState(AbstractState):
""" In the SHUTTING_DOWN state, Supvisors stops all applications before triggering a full shutdown. """
def enter(self):
""" When entering in the SHUTTING_DOWN state, stop all applications. """
self.abort_jobs()
self.supvisors.stopper.stop_applications()
def next(self):
""" Wait for all processes to be stopped. """
# check eventual jobs in progress
next_state = self.check_nodes()
if next_state:
# no way going back to INITIALIZATION state at this point
return SupvisorsStates.SHUTDOWN
if self.supvisors.stopper.is_stopping_completed():
return SupvisorsStates.SHUTDOWN
return SupvisorsStates.SHUTTING_DOWN
def exit(self):
""" When leaving the SHUTTING_DOWN state, request the Supervisor shutdown. """
self.supvisors.zmq.pusher.send_shutdown(self.local_node_name)
# other nodes will shutdown on reception of SHUTDOWN state
# due to Supvisors design, the state publication will be fired before the send_shutdown
class ShutdownState(AbstractState):
""" This is the final state. """
class SlaveMainState(AbstractState):
def next(self) -> SupvisorsStates:
""" the non-master instances are just checking if local and Master instances are still running.
:return: the next state
"""
# common check on local and Master nodes
next_state = self.check_nodes()
if next_state:
return next_state
class SlaveRestartingState(AbstractState):
""" In the RESTARTING state, Supvisors stops all applications before triggering a full restart. """
def enter(self) -> None:
""" When entering in the RESTARTING state, abort all pending tasks applications.
:return: None
"""
self.abort_jobs()
def next(self) -> SupvisorsStates:
""" Wait for all processes to be stopped. """
next_state = self.check_nodes()
if next_state:
# no way going back to INITIALIZATION state at this point
return SupvisorsStates.SHUTDOWN
def exit(self) -> None:
""" When leaving the RESTARTING state, request the Supervisor restart. """
self.supvisors.zmq.pusher.send_restart(self.local_node_name)
class SlaveShuttingDownState(SlaveRestartingState):
""" In the SHUTTING_DOWN state, Supvisors stops all applications before triggering a full shutdown.
Only the exit actions are different from the RESTARTING state.
"""
def exit(self) -> None:
""" When leaving the SHUTTING_DOWN state, request the Supervisor shutdown. """
self.supvisors.zmq.pusher.send_shutdown(self.local_node_name)
class FiniteStateMachine:
""" This class implements a very simple behaviour of FiniteStateMachine based on a single event.
A state is able to evaluate itself for transitions. """
def __init__(self, supvisors: Any) -> None:
""" Reset the state machine and the internal context.
:param supvisors: the Supvisors global structure
"""
self.supvisors = supvisors
self.context: Context = supvisors.context
self.logger: Logger = supvisors.logger
self.state: Optional[SupvisorsStates] = None
self.instance: AbstractState = AbstractState(supvisors)
self.redeploy_mark: bool = False
# Trigger first state / INITIALIZATION
self.set_state(SupvisorsStates.INITIALIZATION)
def next(self) -> None:
""" Send the event to the state and transitions if possible.
The state machine re-sends the event as long as it transitions.
:return: None
"""
self.set_state(self.instance.next())
def set_state(self, next_state: SupvisorsStates, force_transition: bool = None) -> None:
""" Update the current state of the state machine and transitions as long as possible.
The transition can be forced, especially when getting the first Master state.
:param next_state: the new state
:param force_transition: if True, transition validity is not checked
:return: None
"""
while next_state is not None and next_state != self.state:
# check that the transition is allowed
if not force_transition and next_state not in self._Transitions[self.state]:
self.logger.critical('FiniteStateMachine.set_state: unexpected transition from {} to {}'
.format(self.state, next_state))
break
# exit the current state
self.instance.exit()
# assign the new state and publish SupvisorsStatus event internally and externally
self.state = next_state
self.logger.info('FiniteStateMachine.set_state: Supvisors in {}'.format(self.state.name))
if self.supvisors.zmq:
# the zmq does not exist yet for the first occurrence here
self.supvisors.zmq.internal_publisher.send_state_event(self.serial())
self.supvisors.zmq.publisher.send_supvisors_status(self.serial())
# create the new state and enters it
if self.context.is_master:
self.instance = self._MasterStateInstances[self.state](self.supvisors)
else:
self.instance = self._SlaveStateInstances[self.state](self.supvisors)
self.instance.enter()
# evaluate current state
next_state = self.instance.next()
def on_timer_event(self) -> NameList:
""" Periodic task used to check if remote Supvisors instances are still active.
This is also the main event on this state machine. """
process_failures = self.context.on_timer_event()
self.logger.debug('FiniteStateMachine.on_timer_event: process_failures={}'.format(process_failures))
# get invalidated nodes / use next / update processes on invalidated nodes ?
self.next()
# fix failures if any (can happen after a node invalidation, a process crash or a conciliation request)
if self.context.is_master:
for process in process_failures:
self.supvisors.failure_handler.add_default_job(process)
self.supvisors.failure_handler.trigger_jobs()
# check if new isolating remotes and return the list of newly isolated nodes
return self.context.handle_isolation()
def on_tick_event(self, node_name: str, event: Payload):
""" This event is used to refresh the data related to the address. """
self.context.on_tick_event(node_name, event)
def on_process_state_event(self, node_name: str, event: Payload) -> None:
""" This event is used to refresh the process data related to the event and address.
This event also triggers the application starter and/or stopper.
:param node_name: the node that sent the event
:param event: the process event
:return: None
"""
process = self.context.on_process_state_event(node_name, event)
# returned process may be None if the event is linked to an unknown or an isolated node
if process:
# feed starter with event
self.supvisors.starter.on_event(process)
# feed stopper with event
self.supvisors.stopper.on_event(process)
# trigger an automatic (so master only) behaviour for a running failure
# process crash triggered only if running failure strategy related to application
# Supvisors does not replace Supervisor in the present matter (use autorestart if necessary)
if self.context.is_master and process.crashed():
# local variables to keep it readable
strategy = process.rules.running_failure_strategy
stop_strategy = strategy == RunningFailureStrategies.STOP_APPLICATION
restart_strategy = strategy == RunningFailureStrategies.RESTART_APPLICATION
# to avoid infinite application restart, exclude the case where process state is forced
# indeed the process state forced to FATAL can only happen during a starting sequence (no node found)
# retry is useless
if stop_strategy or restart_strategy and process.forced_state is None:
self.supvisors.failure_handler.add_default_job(process)
def on_process_added_event(self, node_name: str, event: Payload) -> None:
""" This event is used to fill the internal structures when a process has been added on a node.
:param node_name: the node that sent the information
:param event: the process information
:return: None
"""
self.context.load_processes(node_name, [event])
def on_process_removed_event(self, node_name: str, event: Payload) -> None:
""" This event is used to fill the internal structures when a process has been added on a node.
:param node_name: the node that sent the event
:param event: the process identification
:return: None
"""
self.context.on_process_removed_event(node_name, event)
def on_state_event(self, node_name, event: Payload) -> None:
""" This event is used to get the FSM state of the master node.
:param node_name: the node that sent the event
:param event: the state event
:return: None
"""
if not self.context.is_master and node_name == self.context.master_node_name:
master_state = SupvisorsStates(event['statecode'])
self.logger.info('FiniteStateMachine.on_state_event: Master node_name={} transitioned to state={}'
.format(node_name, master_state))
self.set_state(master_state)
def on_process_info(self, node_name: str, info: PayloadList) -> None:
""" This event is used to fill the internal structures with processes available on node.
:param node_name: the node that sent the event
:param info: the process information
:return: None
"""
self.context.load_processes(node_name, info)
def on_authorization(self, node_name: str, authorized: bool, master_node_name: str,
supvisors_state: SupvisorsStates) -> None:
""" This event is used to finalize the port-knocking between Supvisors instances.
When a new node comes in the group, back to DEPLOYMENT for a possible deployment.
:param node_name: the node name from which the event comes
:param authorized: the authorization status as seen by the remote node
:param master_node_name: the master node perceived by the remote node
:param supvisors_state: the Supvisors state perceived by the remote node
:return: None
"""
self.logger.info('FiniteStateMachine.on_authorization: node_name={} authorized={} master_node_name={}'
' supvisors_state={}'.format(node_name, authorized, master_node_name, supvisors_state))
if self.context.on_authorization(node_name, authorized):
# a new node comes in group
# a DEPLOYMENT phase is considered as applications could not be fully started due to this missing node
# the idea of simply going back to INITIALIZATION is rejected as it would imply a re-synchronization
if self.context.is_master:
if self.state in [SupvisorsStates.DEPLOYMENT, SupvisorsStates.OPERATION, SupvisorsStates.CONCILIATION]:
# it may not be relevant to transition directly to DEPLOYMENT from here
# the DEPLOYMENT and CONCILIATION states are temporary and pending on actions to be completed
# so mark the context to remember that a re-DEPLOYMENT can be considered at OPERATION level
self.redeploy_mark = True
self.logger.info('FiniteStateMachine.on_authorization: new node={}. defer re-DEPLOYMENT'
.format(node_name))
if master_node_name:
if not self.context.master_node_name:
# local Supvisors doesn't know about a master yet but remote Supvisors does
# typically happens when the local Supervisor has just been started whereas a Supvisors group
# was already operating, so accept remote perception
self.logger.warn('FiniteStateMachine.on_authorization: accept master_node={} declared by node={}'
.format(master_node_name, node_name))
self.context.master_node_name = master_node_name
if master_node_name != self.context.master_node_name:
# 2 different perceptions of the master, likely due to a split-brain situation
# so going back to INITIALIZATION to fix
self.logger.warn('FiniteStateMachine.on_authorization: master node conflict. '
' local declares {} - remote ({}) declares {}'
.format(self.context.master_node_name, node_name, master_node_name))
# no need to restrict to [DEPLOYMENT, OPERATION, CONCILIATION] as other transitions are forbidden
self.set_state(SupvisorsStates.INITIALIZATION)
elif master_node_name == node_name:
# accept the remote Master state
# FIXME: not possible as long as local node itself is not authorized !
self.logger.info('FiniteStateMachine.on_authorization: Master node_name={} is in state={}'
.format(node_name, supvisors_state))
self.set_state(supvisors_state, True)
def on_restart(self) -> None:
""" This event is used to transition the state machine to the RESTARTING state.
:return: None
"""
if self.context.is_master:
self.set_state(SupvisorsStates.RESTARTING)
else:
# re-route command to Master
self.supvisors.zmq.pusher.send_restart_all(self.context.master_node_name)
def on_shutdown(self) -> None:
""" This event is used to transition the state machine to the SHUTTING_DOWN state.
:return: None
"""
if self.context.is_master:
self.set_state(SupvisorsStates.SHUTTING_DOWN)
else:
# re-route command to Master
self.supvisors.zmq.pusher.send_shutdown_all(self.context.master_node_name)
# serialization
def serial(self) -> Payload:
""" Return a serializable form of the SupvisorsState.
:return: the Supvisors state as a dictionary
"""
return {'statecode': self.state.value, 'statename': self.state.name}
# Map between state enumerations and classes
_MasterStateInstances = {SupvisorsStates.INITIALIZATION: InitializationState,
SupvisorsStates.DEPLOYMENT: MasterDeploymentState,
SupvisorsStates.OPERATION: MasterOperationState,
SupvisorsStates.CONCILIATION: MasterConciliationState,
SupvisorsStates.RESTARTING: MasterRestartingState,
SupvisorsStates.SHUTTING_DOWN: MasterShuttingDownState,
SupvisorsStates.SHUTDOWN: ShutdownState}
_SlaveStateInstances = {SupvisorsStates.INITIALIZATION: InitializationState,
SupvisorsStates.DEPLOYMENT: SlaveMainState,
SupvisorsStates.OPERATION: SlaveMainState,
SupvisorsStates.CONCILIATION: SlaveMainState,
SupvisorsStates.RESTARTING: SlaveRestartingState,
SupvisorsStates.SHUTTING_DOWN: SlaveShuttingDownState,
SupvisorsStates.SHUTDOWN: ShutdownState}
# Transitions allowed between states
_Transitions = {None: [SupvisorsStates.INITIALIZATION],
SupvisorsStates.INITIALIZATION: [SupvisorsStates.DEPLOYMENT],
SupvisorsStates.DEPLOYMENT: [SupvisorsStates.INITIALIZATION,
SupvisorsStates.OPERATION,
SupvisorsStates.RESTARTING,
SupvisorsStates.SHUTTING_DOWN],
SupvisorsStates.OPERATION: [SupvisorsStates.CONCILIATION,
SupvisorsStates.DEPLOYMENT,
SupvisorsStates.INITIALIZATION,
SupvisorsStates.RESTARTING,
SupvisorsStates.SHUTTING_DOWN],
SupvisorsStates.CONCILIATION: [SupvisorsStates.OPERATION,
SupvisorsStates.INITIALIZATION,
SupvisorsStates.RESTARTING,
SupvisorsStates.SHUTTING_DOWN],
SupvisorsStates.RESTARTING: [SupvisorsStates.SHUTDOWN],
SupvisorsStates.SHUTTING_DOWN: [SupvisorsStates.SHUTDOWN],
SupvisorsStates.SHUTDOWN: []}
| StarcoderdataPython |
6534008 | __author__ = 'rcj1492'
__created__ = '2018.02'
__license__ = 'MIT'
def compile_run_kwargs(service_config, service_repo, service_alias, service_tag, service_path, system_envvar):
from os import path
run_kwargs = {
'image_name': service_repo,
'container_alias': service_alias,
'image_tag': service_tag,
'environmental_variables': system_envvar,
'mapped_ports': {},
'mounted_volumes': {},
'start_command': '',
'network_name': ''
}
# add optional compose variables
if 'environment' in service_config.keys():
for key, value in service_config['environment'].items():
if key.upper() not in run_kwargs['environmental_variables'].keys():
run_kwargs['environmental_variables'][key.upper()] = value
if 'ports' in service_config.keys():
for port in service_config['ports']:
port_split = port.split(':')
sys_port = port_split[0]
con_port = port_split[1]
run_kwargs['mapped_ports'][sys_port] = con_port
if 'volumes' in service_config.keys():
for volume in service_config['volumes']:
if volume['type'] == 'bind':
volume_path = path.join(service_path, volume['source'])
run_kwargs['mounted_volumes'][volume_path] = volume['target']
if 'command' in service_config.keys():
import re
bracket_pattern = re.compile('\$\{.*?\}')
space_pattern = re.compile('\$.*?(\s|$)')
def _replace_bracket(x):
envvar_key = x.group()[2:-1]
if envvar_key in run_kwargs['environmental_variables'].keys():
return run_kwargs['environmental_variables'][envvar_key]
def _replace_space(x):
envvar_key = x.group().strip()[1:]
if envvar_key in run_kwargs['environmental_variables'].keys():
return run_kwargs['environmental_variables'][envvar_key]
start_command = bracket_pattern.sub(_replace_bracket, service_config['command'])
start_command = space_pattern.sub(_replace_space, start_command)
run_kwargs['start_command'] = start_command
if 'networks' in service_config.keys():
if service_config['networks']:
run_kwargs['network_name'] = service_config['networks'][0]
return run_kwargs
def compile_run_command(run_kwargs, root_path='./', os='Linux'):
# compose run command
from os import path
windows_path = ''
if os in ('Windows'):
windows_path = '/'
sys_cmd = 'docker run --name %s' % run_kwargs['container_alias']
for key, value in run_kwargs['environmental_variables'].items():
sys_cmd += ' -e %s=%s' % (key.upper(), value)
for key, value in run_kwargs['mapped_ports'].items():
sys_cmd += ' -p %s:%s' % (key, value)
for key, value in run_kwargs['mounted_volumes'].items():
sys_cmd += ' -v %s"${pwd}/%s":%s' % (windows_path, path.join(path.relpath(root_path), key), value)
if run_kwargs['network_name']:
sys_cmd += ' --network %s' % run_kwargs['network_name']
sys_cmd += ' -d %s' % run_kwargs['image_name']
if run_kwargs['image_tag']:
sys_cmd += ':%s' % run_kwargs['image_tag']
if run_kwargs['start_command']:
sys_cmd += ' %s' % run_kwargs['start_command'].strip()
return sys_cmd | StarcoderdataPython |
3581519 | <reponame>willhayslett/gehome<gh_stars>10-100
from .ge_exception import GeException
class GeDuplicateApplianceError(GeException):
"""Error raised when a duplicate appliance is attempted to be added"""
pass
| StarcoderdataPython |
310431 | <gh_stars>0
#!/usr/local/bin/python
# -*- coding:utf-8 -*-
# 订阅
from RedisHelper import RedisHelper
obj = RedisHelper()
obj.publish('nihao')#发布
| StarcoderdataPython |
1604342 | <reponame>SpencerEricksen/PCBA_downloads_oxphos
import pandas as pd
import numpy as np
df1 = pd.read_csv('all_oxphos_aids_cids.csv')
df2 = pd.read_csv('./assay_descriptions/all_assays_desc.csv', sep="|")
df3 = df2.merge( df1, how='right', on='AID')
df3.to_csv('all_oxphos_aids_cids_assaydesc.csv', sep="|", index=False )
| StarcoderdataPython |
1988306 | <filename>blockchain-api/utils.py
import hashlib
import json
#Creates a SHA-256 hash of provided data
def hash(data, serialized=False):
if not serialized:
data = serialize(data)
return hashlib.sha256(data).hexdigest()
def serialize(data):
if isinstance(data, dict):
return json.dumps(data, sort_keys=True).encode()
return json.dumps(data.__dict__, sort_keys=True).encode()
def deserialize(data):
return json.loads(data).decode()
| StarcoderdataPython |
9731715 | from __future__ import unicode_literals
from django.conf import settings
from django.utils import translation
from parler.models import TranslationDoesNotExist
from parler import appsettings
from .utils import AppTestCase
from .testapp.models import SimpleModel, AnyLanguageModel, EmptyModel
class ModelAttributeTests(AppTestCase):
"""
Test model construction
"""
def test_untranslated_get(self):
"""
Test the metaclass of the model.
"""
try:
value = SimpleModel().tr_title
except Exception as e:
self.assertIsInstance(e, TranslationDoesNotExist)
self.assertIsInstance(e, AttributeError)
else:
self.fail("Expected exception from reading untranslated title, got {0}.".format(repr(value)))
# Raising attribute error gives some additional benefits:
self.assertEqual(getattr(SimpleModel(), 'tr_title', 'FOO'), 'FOO')
self.assertFalse(hasattr(SimpleModel(), 'tr_title'))
def test_default_language(self):
"""
Test whether simple language assignments work.
"""
with translation.override('ca-fr'):
x = SimpleModel() # should use get_language()
self.assertEqual(x.get_current_language(), translation.get_language())
self.assertEqual(translation.get_language(), 'ca-fr')
x.shared = 'SHARED'
x.tr_title = 'TRANS_CA'
x.save()
# Refetch
with translation.override('en'):
x = SimpleModel.objects.get(pk=x.pk)
self.assertRaises(TranslationDoesNotExist, lambda: x.tr_title)
# Switch context
x.set_current_language('ca-fr')
self.assertEqual(x.tr_title, 'TRANS_CA')
def test_init_args(self):
"""
Test whether passing translated attributes to __init__() works.
"""
x = SimpleModel(tr_title='TRANS_TITLE')
self.assertEqual(x.tr_title, "TRANS_TITLE")
y = SimpleModel(tr_title='TRANS_TITLE', _current_language='nl')
self.assertEqual(y.get_current_language(), 'nl')
self.assertEqual(y.tr_title, "TRANS_TITLE")
def test_save_multiple(self):
"""
Test the save_translations() function to store multiple languages.
"""
x = SimpleModel()
x.set_current_language('en')
x.tr_title = "TITLE_EN"
x.set_current_language('fr')
x.tr_title = "TITLE_FR"
x.set_current_language('es')
x.tr_title = "TITLE_ES"
x.set_current_language('nl')
x.tr_title = "TITLE_NL"
x.save()
# Check if all translations are saved.
self.assertEqual(sorted(x.translations.values_list('tr_title', flat=True)), ['TITLE_EN', 'TITLE_ES', 'TITLE_FR', 'TITLE_NL'])
self.assertEqual(sorted(x.get_available_languages()), ['en', 'es', 'fr', 'nl'])
self.assertTrue(x.has_translation('en'))
self.assertTrue(x.has_translation('es'))
self.assertFalse(x.has_translation('fi'))
# Update 2 translations.
# Only those should be updated in the database.
x.set_current_language('es')
x.tr_title = "TITLE_ES2"
x.set_current_language('nl')
x.tr_title = "TITLE_NL2"
self.assertNumQueries(2, x.save_translations())
# Any unmodified language is not saved.
x.set_current_language('it', initialize=True)
self.assertTrue(x.has_translation('it')) # does return true for this object.
self.assertNumQueries(0, x.save_translations())
self.assertEqual(sorted(x.get_available_languages()), ['en', 'es', 'fr', 'nl'])
def test_empty_model(self):
"""
Test whether a translated model without any fields still works.
"""
x = EmptyModel()
x.set_current_language('en', initialize=True)
x.set_current_language('fr', initialize=True)
x.set_current_language('es')
x.set_current_language('nl', initialize=True)
x.save()
self.assertEqual(sorted(x.get_available_languages()), ['en', 'fr', 'nl'])
def test_fallback_language(self):
"""
Test whether the fallback language will be returned.
"""
x = SimpleModel()
x.set_current_language(self.conf_fallback)
x.tr_title = "TITLE_FALLBACK"
x.set_current_language(self.other_lang1)
x.tr_title = 'TITLE_XX'
x.save()
with translation.override(self.other_lang2):
x = SimpleModel.objects.get(pk=x.pk)
self.assertEqual(x.tr_title, 'TITLE_FALLBACK')
def test_any_fallback_model(self):
"""
Test whether a failure in the fallback language can return any saved language (if configured for it).
"""
x = AnyLanguageModel()
x.set_current_language(self.other_lang1)
x.tr_title = "TITLE_XX"
x.save()
with translation.override(self.other_lang2):
x = AnyLanguageModel.objects.get(pk=x.pk)
self.assertRaises(TranslationDoesNotExist, lambda: x._get_translated_model(use_fallback=True))
self.assertEqual(x.tr_title, 'TITLE_XX') # Even though there is no current language, there is a value.
self.assertNumQueries(0, lambda: x._get_any_translated_model()) # Can fetch from cache next time.
self.assertEqual(x._get_any_translated_model().language_code, self.other_lang1)
def test_any_fallback_function(self):
x = SimpleModel()
x.set_current_language(self.other_lang1)
x.tr_title = "TITLE_XX"
x.save()
with translation.override(self.other_lang2):
x = SimpleModel.objects.get(pk=x.pk)
self.assertRaises(TranslationDoesNotExist, lambda: x._get_translated_model(use_fallback=True))
self.assertIs(x.safe_translation_getter('tr_title', 'DEFAULT'), 'DEFAULT') # No lanuage, gives default
self.assertEqual(x.safe_translation_getter('tr_title', any_language=True), 'TITLE_XX') # Even though there is no current language, there is a value.
self.assertNumQueries(0, lambda: x._get_any_translated_model()) # Can fetch from cache next time.
self.assertEqual(x._get_any_translated_model().language_code, self.other_lang1)
def test_save_ignore_fallback_marker(self):
"""
Test whether the ``save_translations()`` method skips fallback languages
"""
x = SimpleModel()
x.set_current_language(self.other_lang1)
x.tr_title = "TITLE_XX"
x.set_current_language(self.other_lang2)
# try fetching, causing an fallback marker
x.safe_translation_getter('tr_title', any_language=True)
# Now save. This should not raise errors
x.save()
| StarcoderdataPython |
3400131 | from flask import Flask, jsonify, request, render_template, url_for, flash
from flask_wtf import form, Form
from wtforms import PasswordField
from wtforms.validators import DataRequired
import dbconn, student_auth,hash,os,re,json_gen, redis_updater
from flask_mail import Mail,Message
from itsdangerous import URLSafeTimedSerializer
app = Flask(__name__)
app.config.from_pyfile('flask.cfg')
app.config["SECRET_KEY"] = os.environ["SECRET_KEY"]
app.config["MAIL_PASSWORD"] = os.environ["MAIL_PASSWORD"]
m = Mail(app)
class PasswordForm(Form):
password = PasswordField('Password', validators=[DataRequired()])
@app.route("/", methods=['GET'])
def hello():
return "<h1>Hello flask</h1>"
@app.route('/api/v1/test/<int:id>', methods=['GET'])
def tester(id):
return dbconn.dbtest(id)
@app.route('/api/v1/signup/student', methods=['POST'])
def signup_handler():
data = request.get_json()
# print(data)
try:
return student_auth.sign_up(data["name"], data["email"], data["password"])
except:
return jsonify(status=0, message='Missing fields!/Error Occured! :/'), 400
@app.route('/api/v1/login/student', methods=["POST"])
def login_handler():
data = request.get_json()
# print(data)
try:
return student_auth.login(data['email'], data['password'])
except:
return jsonify(status=0, message='Missing fields!/Error Occured! :/'), 400
def send_email(subject,recipients,html_body):
msg = Message(subject, recipients=recipients)
msg.html = html_body
m.send(msg)
@app.route('/api/v1/forgot/', methods=["POST"])
def send_password_reset_email():
data = request.get_json()
user_email=data['email']
if not re.match(r"[^@]+@[^@]+\.[^@]+", user_email):
return jsonify(status='Failed', message='Please enter valid email!')
if user_email is None:
return jsonify(status='Failed', message='Missing parameter')
if not dbconn.email_already_exists(user_email):
return jsonify(status='Failed', message='Email not exists!')
else:
password_reset_serializer = URLSafeTimedSerializer(app.config['SECRET_KEY'])
password_reset_url = url_for(
'reset_with_token',
token = password_reset_serializer.dumps(user_email, salt='password-reset-salt'),
_external=True)
html = render_template('email_password_reset.html',password_reset_url=password_reset_url)
send_email('Password Reset Requested', [user_email], html)
return jsonify(status="Link Sent!")
@app.route('/reset/<token>', methods=["GET", "POST"])
def reset_with_token(token):
try:
password_reset_serializer = URLSafeTimedSerializer(app.config['SECRET_KEY'])
email = password_reset_serializer.loads(token, salt='password-reset-salt', max_age=3600)
print(email)
except:
flash('The password reset link is invalid or has expired.', 'error')
#return redirect(url_for('users.login'))
return 'inv'
form = PasswordForm()
if form.validate_on_submit():
hashed_password = hash.hash_pwd(form.password.data)
status = dbconn.forgot(email,hashed_password)
# try:
# user = User.query.filter_by(email=email).first_or_404()
# except:
# flash('Invalid email address!', 'error')
# return redirect(url_for('users.login'))
#
# user.password = form.password.data
# db.session.add(user)
# db.session.commit()
# flash('Your password has been updated!', 'success')
# return redirect(url_for('users.login'))
return status
return render_template('reset_password_with_token.html', form=form, token=token)
@app.route('/api/v1/get_timetable/', methods=['GET'])
def get_timetable():
try:
date = request.args['date']
shift = request.args['shift']
batch = request.args['batch']
except:
return jsonify(status=0, message='Missing fields!/Error Occured! :/'), 400
list1, status = json_gen.generate(date, shift, batch)
if not status:
return jsonify(status=status, msg="No data found!"), 400
else:
return jsonify(status=status, result_set=list1), 200
@app.route('/api/v1/update_cache/', methods=['POST'])
def update_cache():
try:
date = request.form.get('date')
redis_updater.cache_to_redis(date)
return jsonify(status=1), 200
except:
return jsonify(status=0, message='Missing fields!/Error Occured! :/'), 400
if __name__ == '__main__':
app.run()
| StarcoderdataPython |
8158019 | <gh_stars>0
#!/usr/bin/env python3
"""A basic CLI for the API. It allows to list, create and
delete services, exploits and targets."""
import sys
import logging
import argparse
import requests
LOGGER = logging.getLogger()
ARGS = argparse.ArgumentParser(description='CTF-PWN CLI')
SUBARGS = ARGS.add_subparsers(dest='cmd')
ARGS.add_argument(
'--api', action='store', dest='api_server',
default='127.0.0.1', help='The IP of the CTF-PWN API')
ARGS.add_argument(
'--port', action='store', dest='api_port', type=int,
default=8080, help='Port of the CTF-PWN API')
pservices = SUBARGS.add_parser('services', help='List and create services')
pservices.add_argument('--list', action='store_true', help='List all services')
pservices.add_argument('--delete', default=None, help='Delete a service')
pservices.add_argument('--name', help='Name of the service')
pservices.add_argument('--type', default='port',
help='Type of the service (port of url, default: port)')
pservices.add_argument('--port', help='Port of the service')
pservices.add_argument('--url', help='URL of the service (optional)')
pservices.add_argument('--meta', help='Any other information (optional)')
pexploits = SUBARGS.add_parser('exploits', help='List and create exploits')
pexploits.add_argument('--list', action='store_true', help='List all exploits')
pexploits.add_argument('--delete', default=None, help='Delete an exploits')
pexploits.add_argument('--service', help='The service which is exploited')
pexploits.add_argument('--exploit', help='The path of the exploit on the API host')
pexploits.add_argument('--port', help='The service port')
pexploits.add_argument('--enabled', action='store_true', dest='enabled',
default=True, help='Enable or disable the exploit (default: True)')
ptargets = SUBARGS.add_parser('targets', help='List targets')
class CtfpwnCli(object):
def __init__(self, args):
self.args = args
self.api = 'http://' + self.args.api_server
if str(self.args.api_port) != str(80):
self.api += ':' + str(self.args.api_port)
self.services = self.api + '/services'
self.exploits = self.api + '/exploits'
self.targets = self.api + '/targets'
def create_service(self):
if not self.args.name or not self.args.port:
LOGGER.error('--name and --port are mandatory')
data = dict(name=self.args.name,
type=self.args.type,
port=self.args.port,
url=self.args.url,
meta=self.args.meta)
resp = requests.post(self.services, data=data)
if resp.status_code == 201:
print('Successfully created service')
elif resp.status_code == 500:
LOGGER.error('Creation of service failed')
else:
LOGGER.error('Unknown HTTP status: ' + str(resp.status_code))
def delete_service(self):
resp = requests.delete(self.services + '/' + self.args.delete)
if resp.status_code == 200:
print('Successfully deleted service')
elif resp.status_code == 404:
LOGGER.error('Service not found')
elif resp.status_code == 500:
LOGGER.error('Could not delete service')
else:
LOGGER.error('Unknown HTTP status: ' + str(resp.status_code))
def list_services(self):
resp = requests.get(self.services)
print(resp.text)
def create_exploit(self):
if not self.args.service or not self.args.exploit \
or not self.args.port or not isinstance(self.args.enabled, (type(True), type(False))):
LOGGER.error('--service, --exploit and --port are mandatory')
data = dict(service=self.args.service,
exploit=self.args.exploit,
port=self.args.port,
enabled=self.args.enabled)
resp = requests.post(self.exploits, data=data)
if resp.status_code == 201:
print('Successfully created service')
elif resp.status_code == 500:
LOGGER.error('Creation of exploit failed')
else:
LOGGER.error('Unknown HTTP status: ' + str(resp.status_code))
def delete_exploit(self):
resp = requests.delete(self.exploits + '/' + self.args.delete)
if resp.status_code == 200:
print('Successfully deleted exploit')
elif resp.status_code == 404:
LOGGER.error('Exploit not found')
elif resp.status_code == 500:
LOGGER.error('Could not delete exploit')
else:
LOGGER.error('Unknown HTTP status: ' + str(resp.status_code))
def list_exploits(self):
resp = requests.get(self.exploits)
print(resp.text)
def list_targets(self):
resp = requests.get(self.targets)
print(resp.text)
def main():
args = ARGS.parse_args()
cli = CtfpwnCli(args)
if args.cmd == 'services':
if args.list:
cli.list_services()
elif args.delete:
cli.delete_service()
else:
cli.create_service()
elif args.cmd == 'exploits':
if args.list:
cli.list_exploits()
elif args.delete:
cli.delete_exploit()
else:
cli.create_exploit()
elif args.cmd == 'targets':
cli.list_targets()
else:
LOGGER.error('Invalid argument')
return 1
return 0
if __name__ == '__main__':
sys.exit(main()) | StarcoderdataPython |
5023886 | #!/usr/bin/python
import sys
import subprocess
import getopt
import random
# Program to format a benchmark run and submit it to the latedays job queue
def usage(name):
print "Usage: %s -h -J -s NAME -a ARGS -r ROOT -d DIGITS"
print " -h Print this message"
print " -J Don't submit job (just generate command file)"
print " -s NAME Specify command name"
print " -a ARGS Arguments for benchmark.py (can be quoted string)"
print " -r ROOT Specify root name of benchmark output file"
print " -d DIGITS Specify number of randomly generated digits in command and benchmark output file names"
sys.exit(0)
uniqueId = ""
def generateId(digits):
id = ""
for i in range(digits):
c = chr(random.randint(ord('0'), ord('9')))
id += c
return id
def generateFileName(root, extension):
if uniqueId == "":
return root + "." + extension
else:
return root + "-" + uniqueId + "." + extension
# Create shell script to submit to qsub
# Results stored in file 'OUTROOT-XXXX.out' with specified number of digits
def generateScript(scriptName = "latedays.sh", argString = "", outputName = "benchmark.out"):
try:
scriptFile = open(scriptName, 'w')
except Exception as e:
print "Couldn't open file '%s' (%s)" % (scriptName, str(e))
return False
argString += " -f " + outputName
scriptFile.write("#!/bin/bash\n")
scriptFile.write("# This script lets you submit jobs for execution on the latedays cluster\n")
scriptFile.write("# You should submit it using qsub:\n")
scriptFile.write("# 'qsub latedays.sh'\n")
scriptFile.write("# Upon completion, the output generated on stdout will show up in the\n")
scriptFile.write("# file latedays.sh.oNNNNN where NNNNN is the job number. The output\n")
scriptFile.write("# generated on stderr will show up in the file latedays.sh.eNNNNN.\n")
scriptFile.write("\n")
scriptFile.write("# Limit execution time to 30 minutes\n")
scriptFile.write("#PBS -lwalltime=0:30:00\n")
scriptFile.write("# Allocate all available CPUs on a single node\n")
scriptFile.write("#PBS -l nodes=1:ppn=24\n")
scriptFile.write("\n")
scriptFile.write("# Go to the directory from which you submitted your job\n")
scriptFile.write("cd $PBS_O_WORKDIR\n")
scriptFile.write("\n")
scriptFile.write("# Execute the performance evaluation program and store summary in %s\n" % outputName)
scriptFile.write("./benchmark.py %s\n" % argString)
scriptFile.close()
return True
def submit(scriptName):
cmd = ["qsub", scriptName]
cmdline = " ".join(cmd)
try:
process = subprocess.Popen(cmd)
except Exception as e:
print "Couldn't execute '%s' (%s)" % (cmdline, str(e))
return
process.wait()
if process.returncode != 0:
print "Error. Executing '%s' gave return code %d" % (cmdline, process.returncode)
def run(name, args):
global uniqueId
submitJob = True
scriptRoot = "latedays"
scriptExtension = "sh"
argString = ""
outputRoot = "benchmark"
outputExtension = "out"
digits = 4
optlist, args = getopt.getopt(args, "hJs:a:r:d:")
for (opt, val) in optlist:
if opt == '-h':
usage(name)
elif opt == '-J':
submitJob = False
elif opt == '-s':
scriptRoot = val
elif opt == '-a':
argString = val
elif opt == '-r':
outputRoot = val
elif opt == '-d':
digits = int(val)
uniqueId = generateId(digits)
scriptName = generateFileName(scriptRoot, scriptExtension)
outputName = generateFileName(outputRoot, outputExtension)
if digits > 0:
argString += " -i %s" % uniqueId
if generateScript(scriptName, argString, outputName):
print "Generated script %s" % scriptName
if submitJob:
submit(scriptName)
if __name__ == "__main__":
run(sys.argv[0], sys.argv[1:])
| StarcoderdataPython |
5087047 | import os
import re
import pickle
import msession
from ocr import ocr
def login(username: str, password: str):
session = msession.session
session.cookies.clear()
res = session.get(msession.urls.cas, verify=False)
lt = re.findall(r'name="lt" value="(.*)"', res.text)
captcha_url = msession.urls.captcha
captcha_path = 'captcha.jpg'
with session.get(captcha_url) as captcha:
with open(captcha_path, mode='wb') as captcha_jpg:
captcha_jpg.write(captcha.content)
captcha = ocr(captcha_path)
login_form = {
'username': username,
'password': password,
'captcha': captcha,
'warn': 'true',
'lt': lt[0],
'execution': 'e1s1',
'_eventId': 'submit',
'submit': '登录'
}
post_res = session.post(msession.urls.cas, data=login_form)
if '账号或密码错误' in post_res.text:
print ('账号或密码错误')
return
if '验证码不正确' in post_res.text:
print ('验证码不正确')
return
os.remove('captcha.jpg')
session.get(msession.urls.sso, verify=False)
cookies = session.cookies
if not os.path.exists('cookies'):
os.mkdir('cookies')
if not cookies:
print ('No cookies!')
else:
file_name = 'cookies' + os.sep + username
with open(file_name, mode='wb') as cookies_file:
pickle.dump(session.cookies, cookies_file) | StarcoderdataPython |
3488766 | <reponame>vysakh-menon-aot/lear<gh_stars>1-10
# Copyright © 2021 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Validation for the Alteration filing."""
from http import HTTPStatus
from typing import Dict, Final
from flask_babel import _ as babel # noqa: N81
from legal_api.core.filing import Filing
from legal_api.errors import Error
from legal_api.models import Business
from legal_api.services import namex
from legal_api.services.utils import get_str
from .common_validations import has_at_least_one_share_class, validate_court_order, validate_share_structure
def validate(business: Business, filing: Dict) -> Error: # pylint: disable=too-many-branches
"""Validate the Alteration filing."""
if not business or not filing:
return Error(HTTPStatus.BAD_REQUEST, [{'error': babel('A valid business and filing are required.')}])
msg = []
msg.extend(company_name_validation(filing))
msg.extend(share_structure_validation(filing))
msg.extend(court_order_validation(filing))
msg.extend(type_change_validation(filing))
if err := has_at_least_one_share_class(filing, 'alteration'):
msg.append({'error': babel(err), 'path': '/filing/alteration/shareStructure'})
if msg:
return Error(HTTPStatus.BAD_REQUEST, msg)
return None
def court_order_validation(filing):
"""Validate court order."""
court_order_path: Final = '/filing/alteration/courtOrder'
if get_str(filing, court_order_path):
err = validate_court_order(court_order_path, filing['filing']['alteration']['courtOrder'])
if err:
return err
return []
def share_structure_validation(filing):
"""Validate share structure."""
share_structure_path: Final = '/filing/alteration/shareStructure'
if get_str(filing, share_structure_path):
err = validate_share_structure(filing, Filing.FilingTypes.ALTERATION.value)
if err:
return err
return []
def company_name_validation(filing):
"""Validate company name."""
msg = []
nr_path: Final = '/filing/alteration/nameRequest/nrNumber'
if nr_number := get_str(filing, nr_path):
# ensure NR is approved or conditionally approved
nr_response = namex.query_nr_number(nr_number).json()
validation_result = namex.validate_nr(nr_response)
if not nr_response['requestTypeCd'] in ('CCR', 'CCP', 'BEC', 'BECV'):
msg.append({'error': babel('Alteration only available for Change of Name Name requests.'), 'path': nr_path})
if not validation_result['is_consumable']:
msg.append({'error': babel('Alteration of Name Request is not approved.'), 'path': nr_path})
# ensure NR request has the same legal name
legal_name_path: Final = '/filing/alteration/nameRequest/legalName'
legal_name = get_str(filing, legal_name_path)
nr_name = namex.get_approved_name(nr_response)
if nr_name != legal_name:
msg.append({'error': babel('Alteration of Name Request has a different legal name.'),
'path': legal_name_path})
else:
# ensure legalType is valid
legal_type_path: Final = '/filing/business/legalType'
if get_str(filing, legal_type_path) not in \
(Business.LegalTypes.BC_ULC_COMPANY.value,
Business.LegalTypes.COMP.value,
Business.LegalTypes.BCOMP.value):
msg.append({'error': babel('Alteration not valid for selected Legal Type.'), 'path': legal_type_path})
# ensure company is named if being altered to numbered
legal_name_path: Final = '/filing/business/legalName'
if not get_str(filing, legal_name_path):
msg.append({'error': babel('Alteration to Numbered Company can only be done for a Named Company.'),
'path': legal_name_path})
return msg
def type_change_validation(filing):
"""Validate type change."""
msg = []
legal_type_path: Final = '/filing/alteration/business/legalType'
# you must alter to a bc benefit company
if get_str(filing, legal_type_path) != Business.LegalTypes.BCOMP.value:
msg.append({'error': babel('Your business type has not been updated to a BC Benefit Company.'),
'path': legal_type_path})
return msg
return []
| StarcoderdataPython |
1690534 | # -*- coding: utf-8 -*-
# Copyright (C) 2013-2015 MUJIN Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import requests
import requests.auth
import requests.adapters
from . import json
from . import APIServerError, ControllerClientError
import logging
log = logging.getLogger(__name__)
class ControllerWebClient(object):
_baseurl = None # Base URL of the controller
_username = None # Username to login with
_password = <PASSWORD> # Password to login with
_headers = None # Prepared headers for all requests
_isok = False # Flag to stop
_session = None # Requests session object
def __init__(self, baseurl, username, password, locale=None, author=None):
self._baseurl = baseurl
self._username = username
self._password = password
self._headers = {}
self._isok = True
# Create session
self._session = requests.Session()
# Use basic auth
self._session.auth = requests.auth.HTTPBasicAuth(self._username, self._password)
# Set referer
self._headers['Referer'] = baseurl
# Set csrftoken
# Any string can be the csrftoken
self._headers['X-CSRFToken'] = 'csrftoken'
self._session.cookies.set('csrftoken', self._headers['X-CSRFToken'], path='/')
# Add retry to deal with closed keep alive connections
self._session.mount('https://', requests.adapters.HTTPAdapter(max_retries=3))
self._session.mount('http://', requests.adapters.HTTPAdapter(max_retries=3))
# Set locale headers
self.SetLocale(locale)
# Set author header
self.SetAuthor(author)
def __del__(self):
self.Destroy()
def Destroy(self):
self.SetDestroy()
def SetDestroy(self):
self._isok = False
def SetLocale(self, locale=None):
locale = locale or os.environ.get('LANG', None)
# Convert locale to language code for http requests
# en_US.UTF-8 => en-us
# en_US => en-us
# en => en
language = 'en' # default to en
if locale is not None and len(locale) > 0:
language = locale.split('.', 1)[0].replace('_', '-').lower()
self._headers['Accept-Language'] = language
def SetAuthor(self, author=None):
if author is not None and len(author) > 0:
self._headers['X-Author'] = author
def Request(self, method, path, timeout=5, headers=None, **kwargs):
if timeout < 1e-6:
raise ControllerClientError('timeout value (%s sec) is too small' % timeout)
url = self._baseurl + path
# Set all the headers prepared for this client
headers = dict(headers or {})
headers.update(self._headers)
return self._session.request(method=method, url=url, timeout=timeout, headers=headers, **kwargs)
# Python port of the javascript API Call function
def APICall(self, method, path='', params=None, fields=None, data=None, headers=None, expectedStatusCode=None, timeout=5):
path = '/api/v1/' + path.lstrip('/')
if not path.endswith('/'):
path += '/'
if params is None:
params = {}
params['format'] = 'json'
if fields is not None:
params['fields'] = fields
# TODO(ziyan): implicit order by pk, is this necessary?
# if 'order_by' not in params:
# params['order_by'] = 'pk'
if data is None:
data = {}
if headers is None:
headers = {}
# Default to json content type
if 'Content-Type' not in headers:
headers['Content-Type'] = 'application/json'
data = json.dumps(data)
if 'Accept' not in headers:
headers['Accept'] = 'application/json'
method = method.upper()
# log.debug('%s %s', method, self._baseurl + path)
response = self.Request(method, path, params=params, data=data, headers=headers, timeout=timeout)
# Try to parse response
raw = response.content.decode('utf-8', 'replace').strip()
content = None
if len(raw) > 0:
try:
content = json.loads(raw)
except ValueError as e:
log.exception('caught exception parsing json response: %s: %s', e, raw)
# First check error
if content is not None and 'error_message' in content:
raise APIServerError(content['error_message'], errorcode=content.get('error_code', None), inputcommand=path, detailInfoType=content.get('detailInfoType',None), detailInfo=content.get('detailInfo',None))
if content is not None and 'error' in content:
raise APIServerError(content['error'].get('message', raw), inputcommand=path)
if response.status_code >= 400:
raise APIServerError(raw)
# TODO(ziyan): Figure out the expected status code from method
# Some APIs were mis-implemented to not return standard status code.
if not expectedStatusCode:
expectedStatusCode = {
'GET': 200,
'POST': 201,
'DELETE': 204,
'PUT': 202,
}.get(method, 200)
# Check expected status code
if response.status_code != expectedStatusCode:
log.error('response status code is %d, expecting %d for %s %s: %s', response.status_code, expectedStatusCode, method, path, raw)
raise APIServerError(raw)
return content
| StarcoderdataPython |
8199899 | from lux.core import LuxCommand, Setting
from lux.extensions.rest import session_backend
class Command(LuxCommand):
help = "Clear Sessions"
option_list = (
Setting('app_name',
nargs='?',
desc=('Optional app name. If omitted the default '
'application name is used (APP_NAME)')),
)
def run(self, options, **params):
result = session_backend(self.app).clear(options.app_name)
self.write('Clear %d sessions' % result)
return result
| StarcoderdataPython |
4804293 | import time
from pathlib import Path
import cv2
from ir_tracker.utils import calibration_manager, debug_server, picam_wrapper
def draw_info(image, text):
cv2.putText(image, text, (10, 50), cv2.FONT_HERSHEY_SIMPLEX, 1,
(255, 255, 0), 2, cv2.LINE_AA)
CHESSBOARD_HEIGHT = 8
CHESSBOARD_WIDTH = 5
PICTURE_TIME = 3
NUMBER_OF_IMAGES = 10
def main():
debug_image_container = debug_server.create_image_server()
with picam_wrapper.picamera_opencv_video(resolution=(640, 480),
framerate=30) as video_stream:
calibration_images = []
for frame in video_stream:
if len(calibration_images) >= 10:
break
# draw_info(
# frame,
# f"{PICTURE_TIME - time_delta:.1f}s left, {len(calibration_images)}/{NUMBER_OF_IMAGES}"
# )
# detect chessboard
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
found_chessboard, corners = cv2.findChessboardCorners(
gray, (CHESSBOARD_HEIGHT, CHESSBOARD_WIDTH), None)
cv2.drawChessboardCorners(frame,
(CHESSBOARD_HEIGHT, CHESSBOARD_WIDTH),
corners, found_chessboard)
debug_image_container["calib"] = frame
image_directory = Path.home().joinpath("calibration_images")
image_directory.mkdir(parents=True, exist_ok=True)
print(f"Saving images to {image_directory}")
for i, image in enumerate(calibration_images):
cv2.imwrite(f"{str(image_directory)}/image_{i}.png", image)
print("images saved")
print("Calibrating")
calibartion = calibration_manager.calibarate_from_images(
calibration_images, CHESSBOARD_HEIGHT, CHESSBOARD_WIDTH, 500)
calibration_dir = Path.home().joinpath("calibration")
calibration_dir.mkdir(parents=True, exist_ok=True)
calibration_path = calibration_dir.joinpath("picamera_calibration.yml")
print(f"Saving calibration to {calibration_path}")
calibartion.save_yaml(str(calibration_path))
calibartion_read = calibration_manager.ImageCalibration.load_yaml(
calibration_path)
for image in calibration_images:
undisorted = calibartion_read.undistort_image(image, False)
combined = cv2.vconcat((image, undisorted))
debug_image_container["calib"] = combined
if __name__ == "__main__":
main() | StarcoderdataPython |
4958775 | <filename>dopamine/thesis/scratch/test_rng_pytree.py
# NOTE keeping rngs on their own and splitting them as necessary is
# faster than using my custom PyTree generator class. For now I will
# keep using because it provides a cleaner interface, but it ideally
# needs improvements. Look into https://cgarciae.github.io/treeo/ or
# flax.struct for some alternatives; also, I do not know if my class
# is the proper Jax way to accomplish the design I want.
import functools as ft
import jax
from jax import numpy as jnp
from jax import random as jrand
def show_example(structured):
flat, tree = jax.tree_util.tree_flatten(structured)
unflattened = jax.tree_util.tree_unflatten(tree, flat)
print(
"structured={}\n flat={}\n tree={}\n unflattened={}".format(
structured, flat, tree, unflattened
)
)
# wrong! a is unchanged at top level
@jax.jit
def cip(a):
e = jax.random.uniform(next(a), (4,))
c = jax.random.uniform(next(a), (4,))
return e, c
@ft.partial(jax.jit, static_argnums=(1, 2, 3))
def ciop(rng, eps, num_actions, net, params, state):
rng, k1, k2 = jrand.split(rng, 3)
return rng, jnp.where(
jrand.uniform(k1) <= eps,
jrand.randint(k2, (), 0, num_actions),
jnp.argmax(net.apply(params, state)),
)
@ft.partial(jax.jit, static_argnums=(1, 2, 3))
def pippo(rng, eps, num_actions, net, params, state):
# print(f"changed: {a}")
return rng, jnp.where(
jrand.uniform(next(rng)) <= eps,
jrand.randint(next(rng), (), 0, num_actions),
jnp.argmax(net.apply(params, state)),
)
def time_pippo(k, net, params):
for i in range(100):
state = jrand.uniform(next(k), (4,))
k, _ = pippo(k, 0.01, 2, net, params, state)
def time_ciop(k, net, params):
for i in range(100):
k, sk = jrand.split(k)
state = jrand.uniform(sk, (4,))
k, _ = ciop(k, 0.01, 2, net, params, state)
# import timeit
# from thesis import PRNGKeyWrap
# from thesis.jax import networks
# net = networks.mlp(2, [512, 512])
# k = PRNGKeyWrap.PRNGKeyWrap(0)
# nk = jrand.PRNGKey(0)
# params = net.init(next(k), jnp.ones((4,)))
# pt = timeit.timeit("time_pippo(k, net, params)", number=10000, globals=locals())
# ct = timeit.timeit("time_ciop(nk, net, params)", number=10000, globals=locals())
# k, _ = pippo(k, 0.01, 2, net, params, jnp.ones((4,)))
# k = PRNGKeyWrap.PRNGKeyWrap(5)
# show_example(k)
# a, b, k = pippo(k)
# print(k)
# c, d, k = pippo(k)
# print(k)
# print(jax.make_jaxpr(pippo)(k))
# print(jax.make_jaxpr(cip)(k))
# print(jax.make_jaxpr(ciop)(jax.random.PRNGKey(0)))
# a, b = cip(k)
# k
| StarcoderdataPython |
6673811 | <gh_stars>100-1000
from django.conf.urls import url
from usaspending_api.accounts.views import tas as views
from usaspending_api.common.views import RemovedEndpointView
# bind ViewSets to URLs
tas_list = RemovedEndpointView.as_view({"get": "retrieve", "post": "retrieve"})
tas_detail = RemovedEndpointView.as_view({"get": "retrieve", "post": "retrieve"})
tas_balances_list = RemovedEndpointView.as_view({"get": "retrieve", "post": "retrieve"})
tas_balances_quarters_list = RemovedEndpointView.as_view({"get": "retrieve", "post": "retrieve"})
tas_balances_quarters_total = views.TASBalancesQuarterAggregate.as_view({"get": "list", "post": "list"})
tas_balances_total = views.TASBalancesAggregate.as_view({"get": "list", "post": "list"})
tas_categories_list = RemovedEndpointView.as_view({"get": "retrieve", "post": "retrieve"})
tas_categories_total = views.TASCategoryAggregate.as_view({"get": "list", "post": "list"})
tas_categories_quarters_list = RemovedEndpointView.as_view({"get": "retrieve", "post": "retrieve"})
tas_categories_quarters_total = views.TASCategoryQuarterAggregate.as_view({"get": "list", "post": "list"})
urlpatterns = [
url(r"^balances/$", tas_balances_list),
url(r"^balances/total/", tas_balances_total),
url(r"^balances/quarters/$", tas_balances_quarters_list),
url(r"^balances/quarters/total/$", tas_balances_quarters_total),
url(r"^categories/$", tas_categories_list),
url(r"^categories/total/", tas_categories_total),
url(r"^categories/quarters/$", tas_categories_quarters_list),
url(r"^categories/quarters/total/$", tas_categories_quarters_total),
url(r"^$", tas_list),
url(r"(?P<pk>[0-9]+)/$", tas_detail),
url(r"^autocomplete/", views.TreasuryAppropriationAccountAutocomplete.as_view()),
]
| StarcoderdataPython |
1715169 | """
_speech_synthesizer.py
Copyright 1999-present Alibaba Group Holding Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import uuid
import json
import threading
from nls._core import NlsCore
from . import _logging
__SPEECH_SYNTHESIZER_NAMESPACE__ = "SpeechSynthesizer"
__SPEECH_SYNTHESIZER_REQUEST_CMD__ = {
"start": "StartSynthesis"
}
__URL__ = "wss://nls-gateway.cn-shanghai.aliyuncs.com/ws/v1"
__all__ = ["NlsSpeechSynthesizer"]
class NlsSpeechSynthesizer:
"""
Api for text-to-speech
"""
def __init__(self, url=__URL__,
akid=None, aksecret=None,
token=None, appkey=None,
on_metainfo=None,
on_data=None,
on_completed=None,
on_error=None, on_close=None,
callback_args=[]):
"""
NlsSpeechSynthesizer initialization
Parameters:
-----------
url: str
websocket url.
akid: str
access id from aliyun. if you provide a token, ignore this argument.
aksecret: str
access secret key from aliyun. if you provide a token, ignore this
argument.
token: str
access token. if you do not have a token, provide access id and key
secret from your aliyun account.
appkey: str
appkey from aliyun
on_metainfo: function
Callback object which is called when recognition started.
on_start has two arguments.
The 1st argument is message which is a json format string.
The 2nd argument is *args which is callback_args.
on_data: function
Callback object which is called when partial synthesis result arrived
arrived.
on_result_changed has two arguments.
The 1st argument is binary data corresponding to aformat in start
method.
The 2nd argument is *args which is callback_args.
on_completed: function
Callback object which is called when recognition is completed.
on_completed has two arguments.
The 1st argument is message which is a json format string.
The 2nd argument is *args which is callback_args.
on_error: function
Callback object which is called when any error occurs.
on_error has two arguments.
The 1st argument is message which is a json format string.
The 2nd argument is *args which is callback_args.
on_close: function
Callback object which is called when connection closed.
on_close has one arguments.
The 1st argument is *args which is callback_args.
callback_args: list
callback_args will return in callbacks above for *args.
"""
self.__response_handler__ = {
"MetaInfo": self.__metainfo,
"SynthesisCompleted": self.__synthesis_completed,
"TaskFailed": self.__task_failed
}
self.__callback_args = callback_args
self.__url = url
self.__appkey = appkey
self.__akid = akid
self.__aksecret = aksecret
self.__token = token
self.__start_cond = threading.Condition()
self.__start_flag = False
self.__on_metainfo = on_metainfo
self.__on_data = on_data
self.__on_completed = on_completed
self.__on_error = on_error
self.__on_close = on_close
self.__allow_aformat = (
"pcm", "wav", "mp3"
)
self.__allow_sample_rate = (
8000, 11025, 16000, 22050,
24000, 32000, 44100, 48000
)
def __handle_message(self, message):
_logging.debug("__handle_message")
try:
__result = json.loads(message)
if __result["header"]["name"] in self.__response_handler__:
__handler = self.__response_handler__[__result["header"]["name"]]
__handler(message)
else:
_logging.error("cannot handle cmd{}".format(
__result["header"]["name"]))
return
except json.JSONDecodeError:
_logging.error("cannot parse message:{}".format(message))
return
def __syn_core_on_open(self):
_logging.debug("__syn_core_on_open")
with self.__start_cond:
self.__start_flag = True
self.__start_cond.notify()
def __syn_core_on_data(self, data, opcode, flag):
_logging.debug("__syn_core_on_data")
if self.__on_data:
self.__on_data(data, *self.__callback_args)
def __syn_core_on_msg(self, msg, *args):
_logging.debug("__syn_core_on_msg:msg={} args={}".format(msg, args))
self.__handle_message(msg)
def __syn_core_on_error(self, msg, *args):
_logging.debug("__sr_core_on_error:msg={} args={}".format(msg, args))
def __syn_core_on_close(self):
_logging.debug("__sr_core_on_close")
if self.__on_close:
self.__on_close(*self.__callback_args)
with self.__start_cond:
self.__start_flag = False
self.__start_cond.notify()
def __metainfo(self, message):
_logging.debug("__metainfo")
if self.__on_metainfo:
self.__on_metainfo(message, *self.__callback_args)
def __synthesis_completed(self, message):
_logging.debug("__synthesis_completed")
self.__nls.shutdown()
_logging.debug("__synthesis_completed shutdown done")
if self.__on_completed:
self.__on_completed(message, *self.__callback_args)
with self.__start_cond:
self.__start_flag = False
self.__start_cond.notify()
def __task_failed(self, message):
_logging.debug("__task_failed")
with self.__start_cond:
self.__start_flag = False
self.__start_cond.notify()
if self.__on_error:
self.__on_error(message, *self.__callback_args)
def start(self, text="", voice="xiaoyun",
aformat="pcm", sample_rate=16000,
volume=50, speech_rate=0, pitch_rate=0,
wait_complete=True,
start_timeout=10,
completed_timeout=60,
ex={}):
"""
Synthesis start
Parameters:
-----------
text: str
utf-8 text
voice: str
voice for text-to-speech, default is xiaoyun
aformat: str
audio binary format, support: "pcm", "wav", "mp3", default is "pcm"
sample_rate: int
audio sample rate, default is 16000, support:8000, 11025, 16000, 22050,
24000, 32000, 44100, 48000
volume: int
audio volume, from 0~100, default is 50
speech_rate: int
speech rate from -500~500, default is 0
pitch_rate: int
pitch for voice from -500~500, default is 0
wait_complete: bool
whether block until syntheis completed or timeout for completed timeout
start_timeout: int
timeout for connection established
completed_timeout: int
timeout for waiting synthesis completed from connection established
ex: dict
dict which will merge into "payload" field in request
"""
self.__nls = NlsCore(
url=self.__url, akid=self.__akid,
aksecret=self.__aksecret,
token=self.__token,
on_open=self.__syn_core_on_open,
on_message=self.__syn_core_on_msg,
on_data=self.__syn_core_on_data,
on_close=self.__syn_core_on_close,
on_error=self.__syn_core_on_error,
callback_args=[])
if aformat not in self.__allow_aformat:
raise ValueError("format {} not support".format(aformat))
if sample_rate not in self.__allow_sample_rate:
raise ValueError("samplerate {} not support".format(sample_rate))
if volume < 0 or volume > 100:
raise ValueError("volume {} not support".format(volume))
if speech_rate < -500 or speech_rate > 500:
raise ValueError("speech_rate {} not support".format(speech_rate))
if pitch_rate < -500 or pitch_rate > 500:
raise ValueError("pitch rate {} not support".format(pitch_rate))
__id4 = uuid.uuid4().hex
self.__task_id = uuid.uuid4().hex
__header = {
"message_id": __id4,
"task_id": self.__task_id,
"namespace": __SPEECH_SYNTHESIZER_NAMESPACE__,
"name": __SPEECH_SYNTHESIZER_REQUEST_CMD__["start"],
"appkey": self.__appkey
}
__payload = {
"text": text,
"voice": voice,
"format": aformat,
"sample_rate": sample_rate,
"volume": volume,
"speech_rate": speech_rate,
"pitch_rate": pitch_rate
}
for key in ex:
__payload[key] = ex[key]
__msg = {
"header": __header,
"payload": __payload
}
__jmsg = json.dumps(__msg)
with self.__start_cond:
if self.__start_flag:
_logging.debug("already start...")
return False
if self.__nls.start(__jmsg, ping_interval=0, ping_timeout=None):
if self.__start_flag == False:
if not self.__start_cond.wait(start_timeout):
_logging.debug("syn start timeout")
return False
if not wait_complete:
_logging.debug("do not wait completed")
return self.__start_flag == True
if not self.__start_flag:
_logging.debug("started but flag not true")
return False
else:
_logging.debug("nls core start failed")
return False
if self.__start_flag:
if not self.__start_cond.wait(completed_timeout):
_logging.debug("wait completed timeout")
return False
else:
return self.__start_flag == False
else:
_logging.debug("wait completed but start flag is false")
return True
def shutdown(self):
"""
Shutdown connection immediately
"""
self.__nls.shutdown()
| StarcoderdataPython |
4992396 | from tmps.star.propagator.factory import get, get_from_hamiltonian | StarcoderdataPython |
3450519 | import pandas as pd
import pysam
import argparse
import pickle
def parse_args():
parser=argparse.ArgumentParser(description="get gc content from a bed file")
parser.add_argument("--input_bed")
parser.add_argument("--ref_fasta")
parser.add_argument("--out_pickle")
return parser.parse_args()
def main():
args=parse_args()
ref=pysam.FastaFile(args.ref_fasta)
outputs=dict()
outf=None
data=pd.read_csv(args.input_bed,header=None,sep='\t')
print("loaded bed file")
num_rows=str(data.shape[0])
print("num_rows:"+num_rows)
for index,row in data.iterrows():
if index%1000==0:
print(str(index))
seq=ref.fetch(row[0],row[1],row[2]).upper()
g_count=seq.count('G')
c_count=seq.count('C')
gc_fract=round((g_count+c_count)/len(seq),2)
if gc_fract not in outputs:
outputs[gc_fract]=['\t'.join([str(i) for i in row])]
else:
outputs[gc_fract].append('\t'.join([str(i) for i in row]))
print("pickling")
with open(args.out_pickle,'wb') as handle:
pickle.dump(outputs,handle,protocol=pickle.HIGHEST_PROTOCOL)
print("done!")
if __name__=="__main__":
main()
| StarcoderdataPython |
1958122 | <gh_stars>1-10
from fastapi import Depends, HTTPException, Request
from aim.web.api.utils import APIRouter # wrapper for fastapi.APIRouter
from sqlalchemy.orm import Session
from aim.web.api.dashboards.models import Dashboard
from aim.web.api.dashboard_apps.models import ExploreState
from aim.web.api.dashboards.serializers import dashboard_response_serializer
from aim.web.api.db import get_session
dashboards_router = APIRouter()
@dashboards_router.get('/')
async def dashboards_list_api(session: Session = Depends(get_session)):
dashboards_query = session.query(Dashboard) \
.filter(Dashboard.is_archived == False) \
.order_by(Dashboard.updated_at) # noqa
result = []
for dashboard in dashboards_query:
result.append(dashboard_response_serializer(dashboard, session))
return result
@dashboards_router.post('/', status_code=201)
async def dashboards_post_api(request: Request, session: Session = Depends(get_session)):
# create the dashboard object
request_data = await request.json()
dashboard_name = request_data.get('name')
dashboard_description = request_data.get('description')
dashboard = Dashboard(dashboard_name, dashboard_description)
session.add(dashboard)
# update the app object's foreign key relation
app_id = request_data.get('app_id')
app = session.query(ExploreState).filter(ExploreState.uuid == app_id).first()
if app:
app.dashboard_id = dashboard.uuid
# commit db session
session.commit()
return dashboard_response_serializer(dashboard, session)
@dashboards_router.get('/{dashboard_id}/')
async def dashboards_get_api(dashboard_id: str, session: Session = Depends(get_session)):
dashboard = session.query(Dashboard) \
.filter(Dashboard.uuid == dashboard_id, Dashboard.is_archived == False) \
.first() # noqa
if not dashboard:
raise HTTPException(status_code=404)
return dashboard_response_serializer(dashboard, session)
@dashboards_router.put('/{dashboard_id}/')
async def dashboards_put_api(dashboard_id: str, request: Request, session: Session = Depends(get_session)):
dashboard = session.query(Dashboard) \
.filter(Dashboard.uuid == dashboard_id, Dashboard.is_archived == False) \
.first() # noqa
if not dashboard:
raise HTTPException(status_code=404)
request_data = await request.json()
dashboard_name = request_data.get('name')
if dashboard_name:
dashboard.name = dashboard_name
dashboard_description = request_data.get('description')
if dashboard_description:
dashboard.description = dashboard_description
session.commit()
return dashboard_response_serializer(dashboard, session)
@dashboards_router.delete('/{dashboard_id}/')
async def dashboards_delete_api(dashboard_id: str, session: Session = Depends(get_session)):
dashboard = session.query(Dashboard) \
.filter(Dashboard.uuid == dashboard_id, Dashboard.is_archived == False) \
.first() # noqa
if not dashboard:
raise HTTPException(status_code=404)
dashboard.is_archived = True
session.commit()
| StarcoderdataPython |
6580877 | <gh_stars>100-1000
""" Example on how to finetune on the HardHat dataset
using custom layers. This script assume the dataset is already download
on your computer in raw and Tensorflow Object detection csv format.
Please, for more information, checkout the following notebooks:
- DETR : How to setup a custom dataset
"""
import argparse
import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
import time
import os
from detr_tf.data import load_tfcsv_dataset
from detr_tf.networks.detr import get_detr_model
from detr_tf.optimizers import setup_optimizers
from detr_tf.logger.training_logging import train_log, valid_log
from detr_tf.loss.loss import get_losses
from detr_tf.inference import numpy_bbox_to_image
from detr_tf.training_config import TrainingConfig, training_config_parser
from detr_tf import training
try:
# Should be optional if --log is not set
import wandb
except:
wandb = None
import time
def build_model(config):
""" Build the model with the pretrained weights
and add new layers to finetune
"""
# Load the pretrained model with new heads at the top
# 3 class : background head and helmet (we exclude here person from the dataset)
detr = get_detr_model(config, include_top=False, nb_class=3, weights="detr", num_decoder_layers=6, num_encoder_layers=6)
detr.summary()
return detr
def run_finetuning(config):
# Load the model with the new layers to finetune
detr = build_model(config)
# Load the training and validation dataset and exclude the person class
train_dt, class_names = load_tfcsv_dataset(
config, config.batch_size, augmentation=True, exclude=["person"], ann_file="train/_annotations.csv", img_dir="train")
valid_dt, _ = load_tfcsv_dataset(
config, 4, augmentation=False, exclude=["person"], ann_file="test/_annotations.csv", img_dir="test")
# Train/finetune the transformers only
config.train_backbone = tf.Variable(False)
config.train_transformers = tf.Variable(False)
config.train_nlayers = tf.Variable(True)
# Learning rate (NOTE: The transformers and the backbone are NOT trained with)
# a 0 learning rate. They're not trained, but we set the LR to 0 just so that it is clear
# in the log that both are not trained at the begining
config.backbone_lr = tf.Variable(0.0)
config.transformers_lr = tf.Variable(0.0)
config.nlayers_lr = tf.Variable(1e-3)
# Setup the optimziers and the trainable variables
optimzers = setup_optimizers(detr, config)
# Run the training for 180 epochs
for epoch_nb in range(180):
if epoch_nb > 0:
# After the first epoch, we finetune the transformers and the new layers
config.train_transformers.assign(True)
config.transformers_lr.assign(1e-4)
config.nlayers_lr.assign(1e-3)
training.eval(detr, valid_dt, config, class_names, evaluation_step=100)
training.fit(detr, train_dt, optimzers, config, epoch_nb, class_names)
if __name__ == "__main__":
physical_devices = tf.config.list_physical_devices('GPU')
if len(physical_devices) == 1:
tf.config.experimental.set_memory_growth(physical_devices[0], True)
config = TrainingConfig()
args = training_config_parser().parse_args()
config.update_from_args(args)
if config.log:
wandb.init(project="detr-tensorflow", reinit=True)
# Run training
run_finetuning(config)
| StarcoderdataPython |
1613779 | <filename>tests/test_root.py
def test_namespace():
"""
A trivially simple test that will run on all platforms.
"""
__import__('jaraco')
| StarcoderdataPython |
6478291 | <reponame>shinnng/platon-utils
import decimal
from hypothesis import given
from hypothesis import strategies as st
import pytest
from platon_utils.currency import MAX_VON, MIN_VON, from_von, to_von, units
@given(
amount_in_von=st.integers(min_value=MIN_VON, max_value=MAX_VON),
intermediate_unit=st.sampled_from(tuple(units.keys())),
)
def test_conversion_round_trip(amount_in_von, intermediate_unit):
intermediate_amount = from_von(amount_in_von, intermediate_unit)
result_amount = to_von(intermediate_amount, intermediate_unit)
assert result_amount == amount_in_von
MAX_ETHER_WHOLE = 115792089237316195423570985008687907853269984665640564039457
MAX_ETHER_DECIMAL_MAX = 584007913129639935
MAX_ETHER_DECIMAL = 999999999999999999
def make_lat_string_value(amount_in_von):
s_amount_in_von = str(amount_in_von)
whole_part = s_amount_in_von[:-18] or "0"
decimal_part = s_amount_in_von[-18:]
s_amount_in_lat = "{0}.{1}".format(
whole_part, decimal_part.zfill(18).rstrip("0")
).rstrip(".")
return s_amount_in_lat
@given(st.integers(min_value=0, max_value=MAX_VON).map(make_lat_string_value))
def test_conversion_revers_round_trip_trip(amount_in_lat):
intermediate_amount = to_von(amount_in_lat, "lat")
result_amount = from_von(intermediate_amount, "lat")
assert decimal.Decimal(result_amount) == decimal.Decimal(str(amount_in_lat))
@pytest.mark.parametrize(
"value,expected",
[
([1000000000000000000, "von"], "1000000000000000000"),
([1000000000000000000, "kvon"], "1000000000000000"),
([1000000000000000000, "mvon"], "1000000000000"),
([1000000000000000000, "gvon"], "1000000000"),
([1000000000000000000, "microlat"], "1000000"),
([1000000000000000000, "millilat"], "1000"),
([1000000000000000000, "lat"], "1"),
([1000000000000000000, "klat"], "0.001"),
([1000000000000000000, "klat"], "0.001"),
([1000000000000000000, "mlat"], "0.000001"),
([1000000000000000000, "glat"], "0.000000001"),
([1000000000000000000, "tlat"], "0.000000000001"),
],
)
def test_from_von(value, expected):
assert from_von(*value) == decimal.Decimal(expected)
@pytest.mark.parametrize(
"value,expected",
[
([1, "von"], "1"),
([1, "kvon"], "1000"),
([1, "Kvon"], "1000"),
([1, "kvon"], "1000"),
([1, "mvon"], "1000000"),
([1, "Mvon"], "1000000"),
([1, "mvon"], "1000000"),
([1, "gvon"], "1000000000"),
([1, "Gvon"], "1000000000"),
([1, "gvon"], "1000000000"),
([1, "microlat"], "1000000000000"),
([1, "millilat"], "1000000000000000"),
([1, "lat"], "1000000000000000000"),
([1, "klat"], "1000000000000000000000"),
([1, "klat"], "1000000000000000000000"),
([1, "mlat"], "1000000000000000000000000"),
([1, "glat"], "1000000000000000000000000000"),
([1, "tlat"], "1000000000000000000000000000000"),
([0.05, "lat"], "50000000000000000"),
([1.2, "lat"], "1200000000000000000"),
],
)
def test_to_von(value, expected):
assert to_von(*value) == decimal.Decimal(expected)
@pytest.mark.parametrize("value,unit", ((1, "von1"), (1, "not-a-unit"), (-1, "lat")))
def test_invalid_to_von_values(value, unit):
with pytest.raises(ValueError):
to_von(value, unit)
with pytest.raises(ValueError):
from_von(value, unit)
| StarcoderdataPython |
1986941 | # AUTOGENERATED! DO NOT EDIT! File to edit: 00_core.ipynb (unless otherwise specified).
__all__ = ['device', 'INDUCING_POINT_STRIDE', 'get_data']
# Cell
import numpy as np
import matplotlib.pyplot as plt
import torch
import torch.distributions as dist
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import Dataset, DataLoader
from torch.optim import Adam
# Cell
from .simulations import get_time_from_intervals, simulate_data, TrueParameters, visualize
from .models import Encoder_h, Encoder_x, Model_z, marginalize_z, marginalize_z_paper, Decoder_h, Decoder_i, Decoder_y
from .GP import inference_X, get_covariance_matrix_from_RBFkernel_new, add_jitter_covar, plot_predictions, ExactGPModelLayer
# Cell
device = 'cuda' if torch.cuda.is_available() else 'cpu'
# Cell
INDUCING_POINT_STRIDE = 10
# Cell
def get_data(Params, visualize_data=False):
# returns I,Y,T,X,Z,H
I,Y,T,X,_,H = simulate_data(Params)
data = {'I': I, 'Y': Y, 'T': T, 'X': X, 'H': H}
if visualize_data:
visualize(Params, X, T, I, H)
return data | StarcoderdataPython |
8021932 | <reponame>ngbla/memoire_bigdata_face
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 22 06:15:43 2021
@author: ngbla
"""
from __future__ import print_function
# import the necessary packages
import cv2 as cv
print(cv.__version__)
import numpy as np
print(np.__version__)
#-- Import de la fonction dans le meme dossier
from detectAndDisplay import *
from reconnaissance import *
#-- 2. Read the video stream
#camera_device = args.camera
#cap = cv.VideoCapture(camera_device)
cap = cv.VideoCapture(0)
# Names corresponding to each id
names = []
for users in os.listdir("img_training"):
if users == "1" :
txt ="{0} - {1} "
#print("users")
#print(users)
names.append(txt.format(users,"NGBLA") )
#-- END Infos User for recongnition
if not cap.isOpened:
print('--(!) Caméra vidéo en cour d utilisation !')
cap.release()
cv.destroyAllWindows()
exit(0)
while True:
ret, frame = cap.read()
if frame is None:
print('--(!) Image Absente -- Exit !')
cap.release()
cv.destroyAllWindows()
break
#detectAndDisplay(frame)
recongition(frame,ret,names)
if cv.waitKey(10) == 27 :
break | StarcoderdataPython |
4881901 | <gh_stars>10-100
r"""
Reward Tracing
==============
.. autosummary::
:nosignatures:
coax.reward_tracing.NStep
coax.reward_tracing.MonteCarlo
coax.reward_tracing.TransitionBatch
----
The term **reward tracing** refers to the process of turning raw experience into
:class:`TransitionBatch <coax.reward_tracing.TransitionBatch>` objects. These
:class:`TransitionBatch <coax.reward_tracing.TransitionBatch>` objects are then used to learn, i.e.
to update our function approximators.
Reward tracing typically entails keeping some episodic cache in order to relate a state :math:`S_t`
or state-action pair :math:`(S_t, A_t)` to a collection of objects that can be used to construct a
target (feedback signal):
.. math::
\left(R^{(n)}_t, I^{(n)}_t, S_{t+n}, A_{t+n}\right)
where
.. math::
R^{(n)}_t\ &=\ \sum_{k=0}^{n-1}\gamma^kR_{t+k} \\
I^{(n)}_t\ &=\ \left\{\begin{matrix}
0 & \text{if $S_{t+n}$ is a terminal state} \\
\gamma^n & \text{otherwise}
\end{matrix}\right.
For example, in :math:`n`-step SARSA target is constructed as:
.. math::
G^{(n)}_t\ =\ R^{(n)}_t + I^{(n)}_t\,q(S_{t+n}, A_{t+n})
Object Reference
----------------
.. autoclass:: coax.reward_tracing.NStep
.. autoclass:: coax.reward_tracing.MonteCarlo
.. autoclass:: coax.reward_tracing.TransitionBatch
"""
from ._transition import TransitionBatch
from ._montecarlo import MonteCarlo
from ._nstep import NStep
__all__ = (
'TransitionBatch',
'MonteCarlo',
'NStep',
)
| StarcoderdataPython |
6683895 | #python2.*
__author__ = 'wangchao'
import hashlib
def main():
print 1
print MD5encrypt('10000Rb498d0f8bfab48b7a2abdf25ee51c5a51480402506335{"MsgInfo":{"CommType":"REQ","TxnType":"ActivateCertificate","TxnVersion":"0.0.1","Charset":"UTF-8"},"ChannelInfo":{"ChannelLang":"zh_CN","ChannelHostIP":"10.100.70.100","ChannelRefNo":"Rb498d0f8bfab48b7a2abdf25ee51c5a51480402506335","ChannelTimeStamp":"1477466428035","ChannelTimeZone":"CCT","SAFFlag":"","SAFTimeStamp":""},"RetailerInfo":{"StoreID":"20160427","StoreName":"zihexintest","StoreTimeZone":"CCT","TerminalID":"110","TerminalRefNo":"101152027452","TerminalTimeStamp":"1477451436297","TerminalLang":"zh_CN","MerchantId":"1001","MerchantName":"TEST2","MerchantDivId":"1001126"},"ServiceData":{"CardList":[{"CardNo":"8886660127000888888","InputType":"02","TrackI":"","TrackII":"","TrackIII":"","FaceValue":"10000","CNYCode":"CHN"}]}}*&(%$#@WS@!S')
def MD5encrypt(str):
m = hashlib.md5()
m.update(str)
return m.hexdigest()
if __name__=='__main__':
main() | StarcoderdataPython |
137750 | import time
import os
import pandas as pd
import re
# %a 星期的简写。如 星期三为Web
# %A 星期的全写。如 星期三为Wednesday
# %b 月份的简写。如4月份为Apr
# %B 月份的全写。如4月份为April
# %c: 日期时间的字符串表示。(如: 04/07/10 10:43:39)
# %d: 日在这个月中的天数(是这个月的第几天)
# %f: 微秒(范围[0,999999])
# %H: 小时(24小时制,[0, 23])
# %I: 小时(12小时制,[0, 11])
# %j: 日在年中的天数 [001,366](是当年的第几天)
# %m: 月份([01,12])
# %M: 分钟([00,59])
# %p: AM或者PM
# %S: 秒(范围为[00,61],为什么不是[00, 59],参考python手册~_~)
# %U: 周在当年的周数当年的第几周),星期天作为周的第一天
# %w: 今天在这周的天数,范围为[0, 6],6表示星期天
# %W: 周在当年的周数(是当年的第几周),星期一作为周的第一天
# %x: 日期字符串(如:04/07/10)
# %X: 时间字符串(如:10:43:39)
# %y: 2个数字表示的年份
# %Y: 4个数字表示的年份
# %z: 与utc时间的间隔 (如果是本地时间,返回空字符串)
# %Z: 时区名称(如果是本地时间,返回空字符串)
# %%: %% => %
# Oct 19, 2017 12:00:00 AM
# May 27, 2015 12:00:00 AM
def get_short_date(date):
time_array = time.strptime(date, "%Y-%m-%d")
return time.strftime("%Y%m%d", time_array)
def get_standard_date(date):
time_array = time.strptime(date, "%b %d, %Y %X %p")
return time.strftime("%Y-%m-%d", time_array)
def get_standard_date2(date):
time_array = time.strptime(date, "%Y-%m-%d %X")
return time.strftime("%Y-%m-%d", time_array)
# 将字符串时间转换为时间戳
def get_mktime(date_string):
return time.mktime(time.strptime(date_string, '%Y-%m-%d'))
# 将字符串时间转换为时间戳
def get_mktime2(date_string):
return time.mktime(time.strptime(date_string, '%Y年%m月%d日'))
# 将时间戳转化为标准时间
def get_standard_time_from_mktime(mktime):
return time.strftime("%Y-%m-%d", time.localtime(mktime))
def get_standard_time_from_mktime2(mktime):
temp = time.strftime("%Y-%m-%d", time.localtime(mktime))
return get_mktime(temp)
def get_full_time_from_mktime(mktime):
return time.strftime("%Y-%m-%d %X", time.localtime(mktime))
def get_month(date):
time_array = time.strptime(str(date), "%Y-%m-%d")
return time.strftime("%Y-%m", time_array)
def check_dir_exist(dir):
if os.path.exists(dir) == False:
os.makedirs(dir)
def open_file_list(path, open_data_frame = False):
path_dir = os.listdir(path)
if open_data_frame:
df = pd.DataFrame()
else:
page_list = []
for dir in path_dir:
print('open dir:', dir, '...')
file_name = path + dir
if open_data_frame:
data_df = do_read_csv(file_name)
df = pd.concat([df, data_df], axis=0)
else:
data = do_open_file(file_name=file_name)
page_list.append(data)
if open_data_frame:
return df
else:
return page_list
def do_open_file(file_name):
with open(file_name, 'r', encoding='utf-8') as r:
try:
data = r.read()
print(file_name)
return data
except BaseException as e:
format_error(e, file_name + "file error")
def get_file_full_path(path):
path_dir = os.listdir(path)
file_name_list = []
for dir in path_dir:
file_name = path + dir
file_name_list.append(file_name)
return file_name_list
def get_file_list(path):
return os.listdir(path)
def do_read_csv(file_name):
if file_name.find('.csv') != -1:
data = pd.read_csv(file_name)
return data
elif file_name.find('.xlsx') != -1:
data = pd.read_excel(file_name)
return data
else:
return pd.DataFrame()
def format_error(e, msg=""):
print('ERROR===================')
print(e)
print(msg)
print('ERROR===================')
def date_to_millis(d):
return int(time.mktime(d.timetuple())) * 1000
def remove_waste_emoji(text):
text = re.subn(re.compile('\[em\].*?\[\/em\]'), '', text)[0]
text = re.subn(re.compile('@\{.*?\}'), '', text)[0]
return text
if __name__ =='__main__':
print(get_mktime('2018-09-6'))
print(get_mktime('2018-9-06'))
print(get_full_time_from_mktime(1566545874)) | StarcoderdataPython |
231534 | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Created on 04.20.2021
Updated on 04.23.2021
Author: <EMAIL>
'''
import os
import cv2
import copy
import numpy as np
from PIL import Image
from PyQt5.QtWidgets import QLabel, QApplication
from PyQt5.QtCore import Qt, QSize, QPoint, QRect, QLineF, pyqtSlot
from PyQt5.QtGui import QImage, QPixmap, QPainter, QPen, QColor
def format_image(image): # Gray to RGB
if image is None: return
if image.shape[-1] == 4:
image = image[:,:,:3]
elif image.shape[-1] != 3:
image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB)
return image
class Canvas(QLabel):
def __init__(self, parent=None):
super(Canvas, self).__init__(parent)
self.pixmap = None
self.initParams()
def initParams(self):
self.mode = 'live' # Canvas mode: live or mark
self.scale = 1.0 # Concatenated image width / widget width, should be larger than 1
self.is_drag: False # Whether the drag has been started
self.is_register = False # Whether the image info has been regisgtered
self.cursor = QPoint(self.width()/2, self.height()/2) # Cursor position
self.registerWidget()
def setConfig(self, params):
self.zoom_factor = params['zoom_factor']
self.max_scale = params['max_scale']
self.params = params
def registerWidget(self, logo=None, overview=None):
self.logo = logo
self.overview = overview
def refresh(self, image_list):
if not isinstance(image_list, list): return
image = self.concatImageList(image_list)
if not self.is_register: self.registerImageInfo(image)
image = self.cropResizeImage(image)
self.convertPixmap(image)
self.update()
def resizeWidget(self, size):
off_w = int(self.params['off_w'])
off_h = int(self.params['off_h'])
width = max(10, size.width() - 2*off_w)
height = max(10, size.height() - 2*off_h)
self.setGeometry(off_w, off_h, width, height)
if self.is_register: self.setWidgetGeometry()
def setWidgetGeometry(self):
image_w = self.image_w
image_h = self.image_h
disp_h = image_h * (self.scale*self.width()/image_w) # Image height displayed on the canvas
disp_w = image_w * (self.scale*self.width()/image_w) # Image width displayed on the canvas
disp_size = QSize(disp_w, disp_h)
if self.overview is not None:
self.overview.refresh(self.size(), disp_size, self.scale, self.left_index, self.top_index)
def mousePressEvent(self, ev):
if self.mode == 'live':
self.is_drag = True
self.cursor = ev.pos()
elif self.mode == 'mark':
pass
def mouseMoveEvent(self, ev):
if self.mode == 'live' and self.is_drag:
self.moveContent(ev.pos())
self.setWidgetGeometry()
elif self.mode == 'mark':
pass
def mouseReleaseEvent(self, ev):
self.is_drag = False
def mouseDoubleClickEvent(self, ev):
if self.scale < self.max_scale:
self.resizeContent(ev.pos(), op='maximize')
else:
self.resizeContent(ev.pos(), op='minimize')
self.setWidgetGeometry()
def wheelEvent(self, ev):
angle = ev.angleDelta().y()
position = ev.pos()
self.resizeContent(position, angle)
self.setWidgetGeometry()
def moveContent(self, position):
x0, y0 = self.cursor.x(), self.cursor.y()
x, y = position.x(), position.y()
image_h, image_w = self.image_h, self.image_w # Original image height and width
disp_h = image_h * (self.scale*self.width()/image_w) # Image height displayed on the canvas
disp_w = image_w * (self.scale*self.width()/image_w) # Image width displayed on the canvas
# Update the over-widget pixels
off_x = x - x0
off_y = y - y0
self.left_index = max(0, self.left_index - off_x)
self.left_index = min(int(disp_w-self.width()), self.left_index)
if disp_h > self.height():#self.top_index >= 0:
self.top_index = max(0, self.top_index - off_y)
self.top_index = min(int(disp_h-self.height()), self.top_index)
self.cursor = position
def resizeContent(self, position, angle=0, op='normal'):
x, y = position.x(), position.y()
# Update the scale and scale factor
if op in ['maximize', 'minimize']:
if self.top_index < 0 and (y<=abs(self.top_index) \
or y>=abs(self.height()-abs(self.top_index))):
return
if op == 'maximize':
scale = self.max_scale
elif op == 'minimize':
self.resetContent()
return
elif angle > 0: scale = min(self.max_scale, self.scale*self.zoom_factor)
else: scale = max(1.0, self.scale/self.zoom_factor)
scale_factor = scale / self.scale
image_h, image_w = self.image_h, self.image_w # Image height and width
disp_h = image_h * (scale*self.width()/image_w) # Image height displayed on the canvas
disp_w = image_w * (scale*self.width()/image_w) # Image width displayed on the canvas
# Distance from the cursor to the displayed image's boundary after resize
left_image_len = int((x + self.left_index) * scale_factor)
self.left_index = max(0, left_image_len - x)
if disp_h < self.height():
self.top_index = int((disp_h - self.height())/2)
else:
if self.top_index >= 0:
top_image_len = (y + self.top_index) * scale_factor
self.top_index = int(top_image_len - y)
else:
abs_top_index = abs(self.top_index)
if y < abs_top_index: y = abs_top_index + 1
elif y > self.height() - abs_top_index: y = self.height() - abs_top_index - 1
dist_to_image_top = y - abs_top_index
if dist_to_image_top * scale_factor < y:
self.top_index = 0
else:
self.top_index = min(disp_h-self.height(), dist_to_image_top*scale_factor-y)
self.scale = scale
def resetContent(self):
self.left_index = 0
self.top_index = int((self.height() - self.width()/self.image_w*self.image_h) / 2) * -1
self.scale = 1.0
def registerImageInfo(self, image):
assert self.scale == 1.0, 'Initial scale should be 1.0'
self.image_h, self.image_w = image.shape[:2]
widget_h, widget_w = self.height(), self.width()
self.resetContent()
self.setWidgetGeometry()
self.is_register = True
def concatImageList(self, image_list):
image_list = self.checkImageList(image_list)
if len(image_list) == 0: return None
elif len(image_list) == 1: return image_list[0]
image_list = tuple(image_list)
image_concat = np.hstack(image_list)
return image_concat
def checkImageList(self, image_list):
image_list_checked = list()
for i, image in enumerate(image_list):
if image is None: continue
image = format_image(image)
image_list_checked.append(image)
return image_list_checked
def cropResizeImage(self, image):
image_h, image_w = self.image_h, self.image_w
disp_h = int(image_h * (self.scale*self.width()/image_w)) # Image height displayed on the canvas
disp_w = int(image_w * (self.scale*self.width()/image_w)) # Image width displayed on the canvas
left = int(self.left_index * image_w / disp_w)
right = int((self.left_index + self.width()) * image_w / disp_w)
if self.top_index > 0:
top = int(self.top_index * image_w / disp_w)
bottom = int((self.top_index + self.height()) * image_w / disp_w)
else:
top = 0
bottom = int(self.height() * image_w / disp_w)
image = image[top:bottom, left:right, :]
return image
def convertPixmap(self, image):
h, w, ch = image.shape[:3]
bytesPerLine = ch*w
convertToQtFormat = QImage(image.data.tobytes(), w, h, bytesPerLine, QImage.Format_RGB888)
self.pixmap = QPixmap.fromImage(convertToQtFormat).scaled(self.size(), Qt.KeepAspectRatio,
Qt.SmoothTransformation)
def paintEvent(self, event):
painter = QPainter(self)
if self.pixmap is not None:
off_x = (self.width() - self.pixmap.width()) / 2
off_y = (self.height() - self.pixmap.height()) / 2
painter.drawPixmap(off_x, off_y, self.pixmap)
| StarcoderdataPython |
6479354 | <reponame>Carlososuna11/codewars-handbook
import codewars_test as test
import timeit
from solution import alphametics
test.describe('Example Tests')
example_tests = (
('SEND + MORE == MONEY','9567 + 1085 = 10652'),# 0..9: 0.4 / 0..5,9..6
('ZEROES + ONES == BINARY','698392 + 3192 = 701584'),# 0..9: 9.7 / 0..5,9..6
('COUPLE + COUPLE == QUARTET','653924 + 653924 = 1307848'),# 0..9: 8.7 / 0..5,9..6
('DO + YOU + FEEL == LUCKY','57 + 870 + 9441 = 10368'),# 0..9: 5.80 / 0..5,9..6
('ELEVEN + NINE + FIVE + FIVE == THIRTY','797275 + 5057 + 4027 + 4027 = 810386'),# 0..9:4.97 / 0..5,9..6
# ('GQRAOO + COGPY + RKYKPPC + CACRK + GGAOOCHY + ORHRKYHP + CCHYRCGG = QPQAYOHPA','823755 + 65814 + 3949116 + 67639 + 88755604 + 53039401 + 66043688 = 212745017'),
)
starttime = timeit.default_timer()
for inp,out in example_tests:
assertime =timeit.default_timer()
test.assert_equals(alphametics(inp),out)
print("assert time", timeit.default_timer() - assertime)
print("\nTOTAL TIME", timeit.default_timer() - starttime) | StarcoderdataPython |
3491655 | <reponame>vtheno/pyLexical
from datatype import *
class Expr(metaclass=TypeMeta):pass
class SYM(Expr):
def __init__(self,name):
self.name = name
class IF(Expr):
def __init__(self,e1,e2,e3):
self.e1 = e1
self.e2 = e2
self.e3 = e3
def test1():
a = SYM('a')
b = IF(a,a,a)
print( Expr.__subs__ )
assert isinstance(Expr,Expr) is False
assert isinstance(SYM,Expr) is True
assert isinstance(IF,Expr) is True
assert isinstance(SYM,IF) is False
assert isinstance(IF,SYM) is False
assert isinstance(b,IF) is True
assert isinstance(b,SYM) is False
assert isinstance(a,IF) is False
assert isinstance(a,SYM) is True
assert isinstance(a,Expr) is True
assert isinstance(b,Expr) is True
print( a, b)
test1()
class Nat(metaclass=TypeMeta): pass
class Zero(Nat):
@class_prop
def eval(cls):
return 0
class Succ(Nat):
def __init__(self,o):
self.o = o
@prop
def eval(self):
return 1 + self.o.eval
def __repr__(self):
return "{} {}".format(self.__name__,self.o)
one = Succ(Zero)
two = Succ(one)
three = Succ(Succ(Succ(Zero)))
print( Zero )
assert isinstance(Zero,Nat) is True
assert isinstance(one,Nat) is True
assert isinstance(one,Succ) is True
assert isinstance(Zero,Succ) is False
assert isinstance(one,Zero) is False
assert isinstance(Zero,Zero) is False
print( Nat.__subs__ )
print( one.eval )
print( two.eval )
print( three.eval )
print( dir(Nat) )
| StarcoderdataPython |
11267185 | #!/usr/bin/python
"""
Script to copy images to Wikimedia Commons, or to another wiki.
Syntax:
python pwb.py imagetransfer {<pagename>|<generator>} [<options>]
The following parameters are supported:
-interwiki Look for images in pages found through interwiki links.
-keepname Keep the filename and do not verify description while replacing
-tolang:x Copy the image to the wiki in code x
-tofamily:y Copy the image to a wiki in the family y
-tosite:s Copy the image to the given site like wikipedia:test
-file:z Upload many files from textfile: [[Image:x]]
[[Image:y]]
If pagename is an image description page, offers to copy the image to the
target site. If it is a normal page, it will offer to copy any of the images
used on that page, or if the -interwiki argument is used, any of the images
used on a page reachable via interwiki links.
¶ms;
"""
#
# (C) Pywikibot team, 2004-2020
#
# Distributed under the terms of the MIT license.
#
import re
import sys
import pywikibot
from pywikibot.bot import SingleSiteBot
from pywikibot import config, i18n, pagegenerators, textlib
from pywikibot.specialbots import UploadRobot
from pywikibot.tools.formatter import color_format
docuReplacements = {
'¶ms;': pagegenerators.parameterHelp
}
nowCommonsTemplate = {
'ar': '{{الآن كومنز|%s}}',
'ary': '{{Now Commons|%s}}',
'arz': '{{Now Commons|%s}}',
'de': '{{NowCommons|%s}}',
'fr': '{{Désormais sur Commons|%s}}',
'en': '{{subst:ncd|Image:%s}}',
'fa': '{{موجود در انبار|%s}}',
'he': '{{גם בוויקישיתוף|%s}}',
'hu': '{{azonnali-commons|Kép:%s}}',
'ia': '{{OraInCommons|Imagine:%s}}',
'it': '{{NowCommons unlink|%s}}',
'ja': '{{NowCommons|Image:%s}}',
'kk': '{{NowCommons|Image:%s}}',
'li': '{{NowCommons|%s}}',
'lt': '{{NowCommons|Image:%s}}',
'nds-nl': '{{NoenCommons|File:%s}}',
'nl': '{{NuCommons|Image:%s}}',
'pl': '{{NowCommons|%s}}',
'pt': '{{NowCommons|%s}}',
'sr': '{{NowCommons|%s}}',
'zh': '{{NowCommons|Image:%s}}',
}
# Translations for license templates.
# Must only be given when they are in fact different.
licenseTemplates = {
('wikipedia:ar', 'commons:commons'): {
'رخصة جنو للوثائق الحرة': 'GFDL',
'رخصة جنو للوثائق الحرة - شخصي': 'GFDL-self',
'ملكية عامة': 'PD',
'ملكية عامة - شخصي': 'PD-self',
'ملكية عامة - فن': 'PD-Art',
'ملكية عامة - الحكومة الأمريكية': 'PD-USGov',
},
('wikipedia:de', 'commons:commons'): {
'Bild-GFDL': 'GFDL',
'Bild-GFDL-OpenGeoDB': 'GFDL-OpenGeoDB',
'Bild-Innweb-Lizenz': 'Map-Austria-GNU',
'Bild-PD': 'PD',
'Bild-PD-alt': 'PD-old',
'Bild-PD-Kunst': 'PD-Art',
'Bild-PD-US': 'PD-USGov',
},
('wikipedia:fa', 'commons:commons'): {
'مالکیت عمومی': 'PD',
'مالکیت عمومی-خود': 'PD-self',
'مجوز گنو': 'GFDL',
'مجوز گنو-خود': 'GFDL-self',
'نگاره قدیمی': 'PD-Iran',
'نگاره نوشتاری': 'PD-textlogo',
'نگاره عراقی': 'PD-Iraq',
'نگاره بریتانیا': 'PD-UK',
'نگاره هابل': 'PD-Hubble',
'نگاره آمریکا': 'PD-US',
'نگاره دولت آمریکا': 'PD-USGov',
'کک-یاد-دو': 'Cc-by-2.0',
'کک-یاد-حفظ-دونیم': 'Cc-by-sa-2.5',
'کک-یاد-سه': 'Cc-by-3.0',
},
('wikipedia:fr', 'commons:commons'): {
'Domaine public': 'PD'
},
('wikipedia:he', 'commons:commons'): {
'שימוש חופשי': 'PD-self',
'שימוש חופשי מוגן': 'Copyrighted free use',
'שימוש חופשי מוגן בתנאי': 'Copyrighted free use provided that',
'תמונה ישנה': 'PD-Israel',
'ייחוס': 'Attribution',
'לוגו ויקימדיה': 'Copyright by Wikimedia',
},
('wikipedia:hu', 'commons:commons'): {
'Közkincs': 'PD',
'Közkincs-régi': 'PD-old',
},
('wikipedia:pt', 'commons:commons'): {
'Domínio público': 'PD',
},
}
class ImageTransferBot(SingleSiteBot):
"""Image transfer bot."""
def __init__(self, **kwargs):
"""Initializer.
@keyword generator: the pages to work on
@type generator: iterable
@keyword target_site: Site to send image to, default none
@type target_site: pywikibot.site.APISite
@keyword interwiki: Look for images in interwiki links, default false
@type interwiki: boolean
@keyword keep_name: Keep the filename and do not verify description
while replacing, default false
@type keep_name: boolean
"""
self.available_options.update({
'ignore_warning': False, # not implemented yet
'interwiki': False,
'keepname': False,
'target': None,
})
super().__init__(**kwargs)
if self.opt.target is None:
self.opt.target = self.site.image_repository()
else:
self.opt.target = pywikibot.Site(self.opt.target)
def transfer_image(self, sourceImagePage):
"""
Download image and its description, and upload it to another site.
@return: the filename which was used to upload the image
"""
sourceSite = sourceImagePage.site
url = sourceImagePage.get_file_url()
pywikibot.output('URL should be: ' + url)
# localize the text that should be printed on image description page
try:
description = sourceImagePage.get()
# try to translate license templates
if (sourceSite.sitename,
self.opt.target.sitename) in licenseTemplates:
for old, new in licenseTemplates[
(sourceSite.sitename,
self.opt.target.sitename)].items():
new = '{{%s}}' % new
old = re.compile('{{%s}}' % old)
description = textlib.replaceExcept(description, old, new,
['comment', 'math',
'nowiki', 'pre'])
description = i18n.twtranslate(self.opt.target,
'imagetransfer-file_page_message',
{'site': sourceSite,
'description': description})
description += '\n\n'
description += sourceImagePage.getFileVersionHistoryTable()
# add interwiki link
if sourceSite.family == self.opt.target.family:
description += '\n\n{0}'.format(sourceImagePage)
except pywikibot.NoPage:
pywikibot.output(
'Image does not exist or description page is empty.')
except pywikibot.IsRedirectPage:
pywikibot.output('Image description page is redirect.')
else:
bot = UploadRobot(url=url, description=description,
target_site=self.opt.target,
url_encoding=sourceSite.encoding(),
keep_filename=self.opt.keepname,
verify_description=not self.opt.keepname,
ignore_warning=self.opt.ignore_warning)
# try to upload
if bot.self.skip_run():
return
target_filename = bot.upload_file(url)
if target_filename \
and self.opt.target.sitename == 'commons:commons':
# upload to Commons was successful
reason = i18n.twtranslate(sourceSite,
'imagetransfer-nowcommons_notice')
# try to delete the original image if we have a sysop account
if sourceSite.has_right('delete'):
if sourceImagePage.delete(reason):
return
if sourceSite.lang in nowCommonsTemplate \
and sourceSite.family.name in config.usernames \
and sourceSite.lang in \
config.usernames[sourceSite.family.name]:
# add the nowCommons template.
pywikibot.output('Adding nowCommons template to '
+ sourceImagePage.title())
sourceImagePage.put(sourceImagePage.get() + '\n\n'
+ nowCommonsTemplate[sourceSite.lang]
% target_filename,
summary=reason)
def show_image_list(self, imagelist):
"""Print image list."""
pywikibot.output('-' * 60)
for i, image in enumerate(imagelist):
pywikibot.output('{}. Found image: {}'
.format(i, image.title(as_link=True)))
try:
# Show the image description page's contents
pywikibot.output(image.get())
except pywikibot.NoPage:
pass
else:
# look if page already exists with this name.
# TODO: consider removing this: a different image of the same
# name may exist on the target wiki, and the bot user may want
# to upload anyway, using another name.
try:
# Maybe the image is on the target site already
targetTitle = 'File:' + image.title().split(':', 1)[1]
targetImage = pywikibot.Page(self.opt.target, targetTitle)
targetImage.get()
pywikibot.output('Image with this name is already on {}.'
.format(self.opt.target))
pywikibot.output('-' * 60)
pywikibot.output(targetImage.get())
sys.exit()
except pywikibot.NoPage:
# That's the normal case
pass
except pywikibot.IsRedirectPage:
pywikibot.output(
'Description page on target wiki is redirect?!')
pywikibot.output('=' * 60)
def treat(self, page):
"""Treat a single page."""
if self.opt.interwiki:
imagelist = []
for linkedPage in page.interwiki():
linkedPage = pywikibot.Page(linkedPage)
imagelist.extend(linkedPage.imagelinks())
elif page.is_filepage():
imagePage = pywikibot.FilePage(page.site, page.title())
imagelist = [imagePage]
else:
imagelist = list(page.imagelinks())
while imagelist:
self.show_image_list(imagelist)
if len(imagelist) == 1:
# no need to query the user, only one possibility
todo = 0
else:
pywikibot.output('Give the number of the image to transfer.')
todo = pywikibot.input('To end uploading, press enter:')
if not todo:
break
todo = int(todo)
if 0 <= todo < len(imagelist):
if self.transfer_allowed(imagelist[todo]):
self.transfer_image(imagelist[todo])
# remove the selected image from the list
imagelist.pop(todo)
else:
pywikibot.output(
color_format('{yellow}No such image number.{default}'))
def transfer_allowed(self, image):
"""Check whether transfer is allowed."""
target_repo = self.opt.target.image_repository()
if image.file_is_shared() \
and image.site.image_repository() == target_repo:
pywikibot.output(color_format(
'{yellow}The image is already shared on {}.{default}',
target_repo))
return False
return True
def main(*args):
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
@param args: command line arguments
@type args: str
"""
target_code = None
target_family = None
options = {}
local_args = pywikibot.handle_args(args)
generator_factory = pagegenerators.GeneratorFactory(
positional_arg_name='page')
for arg in local_args:
opt, _, value = arg.partition(':')
if opt in ('-ignore_warning', '-interwiki', '-keepname'):
options[opt[1:]] = True
elif opt == '-tolang':
target_code = value
elif opt == '-tofamily':
target_family = value
elif opt == '-tosite':
options['target'] = value
else:
generator_factory.handle_arg(arg)
gen = generator_factory.getCombinedGenerator()
if not gen:
pywikibot.bot.suggest_help(
missing_parameters=['page'],
additional_text='and no other generator was defined.')
return
if target_code or target_family:
site = pywikibot.Site()
options.setdefault('target',
'{}:{}'.format(target_code or site.lang,
target_family or site.family))
bot = ImageTransferBot(generator=gen, **options)
bot.run()
if __name__ == '__main__':
main()
| StarcoderdataPython |
377659 | <filename>emol/emol/initialize/errors.py
# -*- coding: utf-8 -*-
"""Set up custom error handlers."""
# standard library imports
# third-party imports
from flask import render_template, request, Response, current_app
# application imports
def init_error_handlers():
"""Custom error pages for the app."""
current_app.logger.info('Initialize error handling')
# pylint really hates these 'unused' decorated functions.
# In reality, they are callbacks for Flask
# pylint: disable=unused-variable
@current_app.errorhandler(404)
def not_found(error):
"""Custom 404 handler to return error page."""
current_app.logger.debug(error)
if len(request.form) > 0:
# Requests with form data are likely AJAX
return Response(None, 404)
return render_template('errors/404.html', http_error=True), 404
@current_app.errorhandler(403)
def forbidden(error):
"""Custom 404 handler to return error page."""
current_app.logger.debug(error)
return render_template('errors/403.html', http_error=True), 403
@current_app.errorhandler(401)
def unauthorized(error):
"""Custom 401 handler to return error page."""
current_app.logger.debug(error)
return render_template('errors/401.html', http_error=True), 401
@current_app.errorhandler(500)
def uhoh(error):
"""Custom 500 handler to return error page."""
current_app.logger.error(error)
return render_template('errors/500.html', http_error=True), 500
| StarcoderdataPython |
9686094 | <gh_stars>10-100
from invoke import task
from pyinvokedepends import depends
@depends(on=["./hello.c"], creates=["./a.out"])
@task
def test(c):
#c.run("gcc hello.c")
c.run("touch a.out", echo=True)
| StarcoderdataPython |
6565469 | # Quest will use QPool questions tagged 'quest'
import os
import logging
import datetime
import subprocess
from django.db import models
from django.utils.translation import ugettext_noop
from django.conf import settings
from django.db.models import Sum
from wouso.core.user.models import Player
from wouso.core.game.models import Game
from wouso.core import scoring, signals
from wouso.core.scoring import History
from wouso.core.scoring.models import Formula
from wouso.core.qpool import register_category
from wouso.core.qpool.models import Question
(TYPE_CLASSIC,
TYPE_CHECKER,
TYPE_EXTERNAL) = range(3)
class QuestUser(Player):
current_quest = models.ForeignKey('Quest', null=True, blank=True, default=None)
current_level = models.IntegerField(default=0, blank=True)
started_time = models.DateTimeField(default=datetime.datetime.now, blank=True, null=True)
finished_time = models.DateTimeField(default=None, blank=True, null=True)
finished = models.BooleanField(default=False, blank=True)
def is_current(self, quest):
return (self.current_quest.id == quest.id) if (self.current_quest and quest) else (self.current_quest == quest)
@property
def started(self):
"""
Check if we started the current quest.
"""
quest = QuestGame.get_current()
if (not quest) or (not self.current_quest_id):
return False
return self.current_quest_id == quest.id
@property
def current_question(self):
if not self.current_quest:
return None
try:
return self.current_quest.levels[self.current_level]
except IndexError:
return None
@property
def time_took(self):
if not self.finished_time:
if self.current_quest:
if self.current_quest.end < datetime.datetime.now():
return self.current_quest.end - self.started_time
else:
return datetime.datetime.now() - self.started_time
else:
return 0
else:
return self.finished_time - self.started_time
def pass_level(self, quest):
"""
Pass current level. Increment current level and score.
"""
if self.current_quest.id != quest.id:
return None
scoring.score(self, QuestGame, quest.get_formula('quest-ok'), level=(self.current_level + 1), external_id=quest.id)
self.current_level += 1
if self.current_level == quest.count:
self.finish_quest()
scoring.score(self, QuestGame, quest.get_formula('quest-finish-ok'), external_id=quest.id)
self.save()
self.user.get_profile().save()
return self.current_level
def finish_quest(self):
if not self.finished:
if self.current_level < self.current_quest.count:
return
qr = QuestResult(user=self, quest=self.current_quest, level=self.current_level)
qr.save()
# sending the signal
signal_msg = ugettext_noop("has finished quest {quest}")
signals.addActivity.send(sender=None, user_from=self,
user_to=self, message=signal_msg,
arguments=dict(quest=self.current_quest.title),
game=QuestGame.get_instance())
# saving finish data
self.finished = True
self.finished_time = datetime.datetime.now()
self.save()
def register_quest_result(self):
"""
Create a QuestResult entry for the QuestUser's current quest
"""
if not self.finished:
qr, created = QuestResult.objects.get_or_create(user=self,
quest=self.current_quest, level=self.current_level)
def set_current(self, quest):
self.started_time = datetime.datetime.now()
self.current_quest = quest
self.current_level = 0
self.finished = False
self.finished_time = None
self.save()
# send activity signal
signal_msg = ugettext_noop('has started quest {quest}')
signals.addActivity.send(sender=None,
user_from=self, user_to=self,
message=signal_msg,
arguments=dict(quest=quest.title),
game=QuestGame.get_instance())
class QuestResult(models.Model):
user = models.ForeignKey('QuestUser')
quest = models.ForeignKey('Quest')
level = models.IntegerField(default=0)
class Quest(models.Model):
QUEST_TYPES = (
(TYPE_CLASSIC, 'In site text answers'),
(TYPE_CHECKER, 'In site answers, verified by checker'),
(TYPE_EXTERNAL, 'External levels and answers'),
)
start = models.DateTimeField()
end = models.DateTimeField()
title = models.CharField(default="", max_length=100)
questions = models.ManyToManyField(Question)
order = models.CharField(max_length=1000, default="", blank=True)
type = models.IntegerField(default=TYPE_CLASSIC, choices=QUEST_TYPES)
registered = models.BooleanField(default=False)
def get_formula(self, type='quest-ok'):
""" Allow specific formulas for specific quests.
Hackish by now, think of a better approach in next version
TODO
"""
if type not in ('quest-ok', 'quest-finish-ok', 'finalquest-ok', 'quest-finish-bonus'):
return None
# TODO: use Formula.get here
try:
formula = Formula.objects.get(name='%s-%d' % (type, self.id))
except Formula.DoesNotExist:
formula = Formula.objects.get(name=type)
return formula
def is_final(self):
final = FinalQuest.objects.filter(id=self.id).count()
return final > 0
def is_answerable(self):
return self.type == TYPE_CLASSIC or self.type == TYPE_CHECKER
@property
def count(self):
return self.questions.count()
@property
def levels(self):
""" Get questions/levels in specified order """
if not self.order:
return self.questions.all()
else:
order = [int(i) for i in self.order.split(',')]
qs = {}
for q in self.questions.all():
qs[q.id] = q
return [qs[i] for i in order]
@property
def elapsed(self):
return datetime.datetime.now() - self.start
@property
def remaining(self):
return self.end - datetime.datetime.now()
@property
def is_active(self):
acum = datetime.datetime.now()
if self.end < acum:
return False
elif self.start > acum:
return False
return True
@property
def status(self):
""" Current activity status.
Note (alexef): I'm not particulary happy with this
"""
acum = datetime.datetime.now()
if self.end < acum:
return "Passed"
if self.start > acum:
return "Scheduled"
return "Active"
def check_answer(self, user, answer):
if user.current_quest.id != self.id:
user.register_quest_result()
user.set_current(self)
return False
try:
question = self.levels[user.current_level]
except IndexError:
logging.error("No such question")
return False
if not user.current_level == self.count:
if self.answer_correct(user.current_level, question, answer, user):
user.pass_level(self)
return True
return False
def answer_correct(self, level, question, answer, user):
"""
Check if an answer is correct for a question and level.
"""
if self.type == TYPE_EXTERNAL:
return False
elif self.type == TYPE_CHECKER:
path = os.path.join(settings.FINAL_QUEST_CHECKER_PATH, 'task-%02d' % (level + 0), 'check')
if not os.path.exists(path):
self.error = 'No checker for level %d' % level
return False
p = subprocess.Popen([path, user.user.username, answer], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=os.path.dirname(path))
retval = p.wait()
if retval > 1:
self.error = 'Error running checker for %d' % level
return False
return retval == 0
elif self.type == TYPE_CLASSIC:
answers = [a.__unicode__().lower() for a in question.answers]
return answer.strip().lower() in answers
return False
def reorder(self, order):
self.order = ''
for i in order:
self.order += i + ','
self.order = self.order[:-1]
self.save()
def players_count(self):
"""
Number of players who attempted the quest
"""
# exclude players not belonging to a 'can play' race
return self.questresult_set.exclude(user__race__can_play=False).values('user').distinct().count()
def players_completed(self):
"""
Number of players who finished the quest
"""
return self.questresult_set.filter(level=self.count).exclude(user__race__can_play=False).count()
def top_results(self):
"""
Return the first 10 players who finished this quest
"""
top_results = self.questresult_set.exclude(user__race__can_play=False).order_by('id')
top_results = [entry for entry in top_results if entry.level == self.count][:10]
return top_results
def give_bonus(self):
for i, r in enumerate(self.top_results()):
player = r.user.get_extension(Player)
scoring.score(player, QuestGame, 'quest-finish-bonus', position=i + 1, external_id=self.id)
def register(self):
"""
Register the result of all the users who attempted this quest
"""
if not self.is_active:
for user in self.questuser_set.all():
user.register_quest_result()
self.registered = True
self.save()
def __unicode__(self):
return "%s - %s %s" % (self.start, self.end, self.title)
class QuestGame(Game):
""" Each game must extend Game """
class Meta:
proxy = True
QPOOL_CATEGORY = 'quest'
def __init__(self, *args, **kwargs):
# Set parent's fields
self._meta.get_field('verbose_name').default = "Weekly Quest"
self._meta.get_field('short_name').default = ""
# the url field takes as value only a named url from module's urls.py
self._meta.get_field('url').default = "quest_index_view"
super(QuestGame, self).__init__(*args, **kwargs)
@classmethod
def get_current(cls):
try:
quest = FinalQuest.objects.get(start__lte=datetime.datetime.now(),
end__gte=datetime.datetime.now())
except:
try:
quest = Quest.objects.get(start__lte=datetime.datetime.now(),
end__gte=datetime.datetime.now())
except:
quest = None
return quest
@classmethod
def get_staff_and_permissions(cls):
return [{'name': 'Quest Staff', 'permissions': ['change_quest']}]
@classmethod
def get_formulas(kls):
""" Returns a list of formulas used by qotd """
fs = []
quest_game = kls.get_instance()
fs.append(dict(name='quest-ok', expression='points={level}',
owner=quest_game.game,
description='Points earned when finishing a level. Arguments: level.')
)
fs.append(dict(name='quest-finish-ok', expression='points=10',
owner=quest_game.game,
description='Bonus points earned when finishing the entire quest. No arguments.')
)
fs.append(dict(name='quest-finish-bonus', expression='points=fib(12 - {position})',
owner=quest_game.game,
description='Bonus points earned when finishing a quest. Given to first 10, argument: position.')
)
fs.append(dict(name='finalquest-ok', expression='points={level}+{level_users}',
owner=quest_game.game,
description='Bonus points earned when finishing the final quest. Arguments: level, level_users')
)
return fs
@classmethod
def get_api(kls):
from api import QuestAdminHandler, QuestAdminUserHandler
return {r'^quest/admin/$': QuestAdminHandler,
r'^quest/admin/quest=(?P<quest_id>\d+)/username=(?P<username>[^/]+)/$': QuestAdminUserHandler
}
@classmethod
def final_exists(cls):
return FinalQuest.objects.all().count() != 0
@classmethod
def get_final(cls):
try:
return FinalQuest.objects.all()[0]
except IndexError:
return None
register_category(QuestGame.QPOOL_CATEGORY, QuestGame)
class FinalQuest(Quest):
def give_level_bonus(self):
final = QuestGame.get_final()
if not final:
return
for level in xrange(len(self.levels) + 1):
if level == 0:
continue
users = QuestUser.objects.filter(current_quest=final, current_level__gte=level, race__can_play=True)
for user in users:
scoring.score(
user,
QuestGame,
self.get_formula('finalquest-ok'),
level=level,
level_users=users.count()
)
signal_msg = ugettext_noop("received bonus for reaching level {level} in the final quest")
signals.addActivity.send(sender=None, user_from=user,
user_to=user, message=signal_msg,
arguments=dict(level=level),
game=QuestGame.get_instance()
)
def fetch_levels(self):
levels = []
for level in xrange(len(self.levels) + 1):
level_data = {'id': level, 'users': []}
for user in QuestUser.objects.filter(current_quest=self, current_level=level):
# Check finalquest bonus amount
amount = History.objects.filter(user=user.user, formula__name='finalquest-ok').aggregate(sum=Sum('amount'))['sum']
user.amount = amount
level_data['users'].append(user)
levels.append(level_data)
return levels
| StarcoderdataPython |
11384803 | frase = int(input('digite um número:'))
u = frase // 1 % 10
d = frase // 10 % 10
c = frase // 100 % 10
m = frase // 1000 % 10
print ('analizando o número {}'.format(frase))
print ('unidade = {}'.format(u))
print ('dezena = {}'.format(d))
print ('centena = {}'.format(c))
print ('milhar = {}'.format (m)) | StarcoderdataPython |
205102 | <reponame>muelli/twisted
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.conch.checkers}.
"""
try:
import crypt
except ImportError:
cryptSkip = 'cannot run without crypt module'
else:
cryptSkip = ''
import os
from base64 import encodebytes
from collections import namedtuple
from io import BytesIO
from zope.interface.verify import verifyObject
from twisted.python import util
from twisted.python.failure import Failure
from twisted.python.reflect import requireModule
from twisted.trial.unittest import TestCase
from twisted.python.filepath import FilePath
from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
from twisted.cred.credentials import UsernamePassword, IUsernamePassword, \
SSHPrivateKey, ISSHPrivateKey
from twisted.cred.error import UnhandledCredentials, UnauthorizedLogin
from twisted.python.fakepwd import UserDatabase, ShadowDatabase
from twisted.test.test_process import MockOS
if requireModule('cryptography') and requireModule('pyasn1'):
dependencySkip = ''
from twisted.conch.ssh import keys
from twisted.conch import checkers
from twisted.conch.error import NotEnoughAuthentication, ValidPublicKey
from twisted.conch.test import keydata
else:
dependencySkip = "can't run without cryptography and PyASN1"
if getattr(os, 'geteuid', None) is None:
euidSkip = "Cannot run without effective UIDs (questionable)"
else:
euidSkip = ''
class HelperTests(TestCase):
"""
Tests for helper functions L{verifyCryptedPassword}, L{_pwdGetByName} and
L{_shadowGetByName}.
"""
skip = cryptSkip or dependencySkip
def setUp(self):
self.mockos = MockOS()
def test_verifyCryptedPassword(self):
"""
L{verifyCryptedPassword} returns C{True} if the plaintext password
passed to it matches the encrypted password passed to it.
"""
password = '<PASSWORD>'
salt = '<PASSWORD>'
crypted = crypt.crypt(password, salt)
self.assertTrue(
checkers.verifyCryptedPassword(crypted, password),
'%r supposed to be valid encrypted password for %r' % (
crypted, password))
def test_verifyCryptedPasswordMD5(self):
"""
L{verifyCryptedPassword} returns True if the provided cleartext password
matches the provided MD5 password hash.
"""
password = 'password'
salt = <PASSWORD>'
crypted = crypt.crypt(password, salt)
self.assertTrue(
checkers.verifyCryptedPassword(crypted, password),
'%r supposed to be valid encrypted password for %s' % (
crypted, password))
def test_refuteCryptedPassword(self):
"""
L{verifyCryptedPassword} returns C{False} if the plaintext password
passed to it does not match the encrypted password passed to it.
"""
password = '<PASSWORD>'
wrong = '<PASSWORD>'
crypted = crypt.crypt(password, password)
self.assertFalse(
checkers.verifyCryptedPassword(crypted, wrong),
'%r not supposed to be valid encrypted password for %s' % (
crypted, wrong))
def test_pwdGetByName(self):
"""
L{_pwdGetByName} returns a tuple of items from the UNIX /etc/passwd
database if the L{pwd} module is present.
"""
userdb = UserDatabase()
userdb.addUser(
'alice', 'secrit', 1, 2, 'first last', '/foo', '/bin/sh')
self.patch(checkers, 'pwd', userdb)
self.assertEqual(
checkers._pwdGetByName('alice'), userdb.getpwnam('alice'))
def test_pwdGetByNameWithoutPwd(self):
"""
If the C{pwd} module isn't present, L{_pwdGetByName} returns L{None}.
"""
self.patch(checkers, 'pwd', None)
self.assertIsNone(checkers._pwdGetByName('alice'))
def test_shadowGetByName(self):
"""
L{_shadowGetByName} returns a tuple of items from the UNIX /etc/shadow
database if the L{spwd} is present.
"""
userdb = ShadowDatabase()
userdb.addUser('bob', 'passphrase', 1, 2, 3, 4, 5, 6, 7)
self.patch(checkers, 'spwd', userdb)
self.mockos.euid = 2345
self.mockos.egid = 1234
self.patch(util, 'os', self.mockos)
self.assertEqual(
checkers._shadowGetByName('bob'), userdb.getspnam('bob'))
self.assertEqual(self.mockos.seteuidCalls, [0, 2345])
self.assertEqual(self.mockos.setegidCalls, [0, 1234])
def test_shadowGetByNameWithoutSpwd(self):
"""
L{_shadowGetByName} returns L{None} if C{spwd} is not present.
"""
self.patch(checkers, 'spwd', None)
self.assertIsNone(checkers._shadowGetByName('bob'))
self.assertEqual(self.mockos.seteuidCalls, [])
self.assertEqual(self.mockos.setegidCalls, [])
class SSHPublicKeyDatabaseTests(TestCase):
"""
Tests for L{SSHPublicKeyDatabase}.
"""
skip = euidSkip or dependencySkip
def setUp(self):
self.checker = checkers.SSHPublicKeyDatabase()
self.key1 = encodebytes(b"foobar")
self.key2 = encodebytes(b"eggspam")
self.content = (b"t1 " + self.key1 + b" foo\nt2 " + self.key2 +
b" egg\n")
self.mockos = MockOS()
self.mockos.path = FilePath(self.mktemp())
self.mockos.path.makedirs()
self.patch(util, 'os', self.mockos)
self.sshDir = self.mockos.path.child('.ssh')
self.sshDir.makedirs()
userdb = UserDatabase()
userdb.addUser(
b'user', b'password', 1, 2, b'first last',
self.mockos.path.path, b'/bin/shell')
self.checker._userdb = userdb
def test_deprecated(self):
"""
L{SSHPublicKeyDatabase} is deprecated as of version 15.0
"""
warningsShown = self.flushWarnings(
offendingFunctions=[self.setUp])
self.assertEqual(warningsShown[0]['category'], DeprecationWarning)
self.assertEqual(
warningsShown[0]['message'],
"twisted.conch.checkers.SSHPublicKeyDatabase "
"was deprecated in Twisted 15.0.0: Please use "
"twisted.conch.checkers.SSHPublicKeyChecker, "
"initialized with an instance of "
"twisted.conch.checkers.UNIXAuthorizedKeysFiles instead.")
self.assertEqual(len(warningsShown), 1)
def _testCheckKey(self, filename):
self.sshDir.child(filename).setContent(self.content)
user = UsernamePassword(b"<PASSWORD>", b"password")
user.blob = b"foobar"
self.assertTrue(self.checker.checkKey(user))
user.blob = b"eggspam"
self.assertTrue(self.checker.checkKey(user))
user.blob = b"notallowed"
self.assertFalse(self.checker.checkKey(user))
def test_checkKey(self):
"""
L{SSHPublicKeyDatabase.checkKey} should retrieve the content of the
authorized_keys file and check the keys against that file.
"""
self._testCheckKey("authorized_keys")
self.assertEqual(self.mockos.seteuidCalls, [])
self.assertEqual(self.mockos.setegidCalls, [])
def test_checkKey2(self):
"""
L{SSHPublicKeyDatabase.checkKey} should retrieve the content of the
authorized_keys2 file and check the keys against that file.
"""
self._testCheckKey("authorized_keys2")
self.assertEqual(self.mockos.seteuidCalls, [])
self.assertEqual(self.mockos.setegidCalls, [])
def test_checkKeyAsRoot(self):
"""
If the key file is readable, L{SSHPublicKeyDatabase.checkKey} should
switch its uid/gid to the ones of the authenticated user.
"""
keyFile = self.sshDir.child("authorized_keys")
keyFile.setContent(self.content)
# Fake permission error by changing the mode
keyFile.chmod(0o000)
self.addCleanup(keyFile.chmod, 0o777)
# And restore the right mode when seteuid is called
savedSeteuid = self.mockos.seteuid
def seteuid(euid):
keyFile.chmod(0o777)
return savedSeteuid(euid)
self.mockos.euid = 2345
self.mockos.egid = 1234
self.patch(self.mockos, "seteuid", seteuid)
self.patch(util, 'os', self.mockos)
user = UsernamePassword(b"user", b"password")
user.blob = b"foobar"
self.assertTrue(self.checker.checkKey(user))
self.assertEqual(self.mockos.seteuidCalls, [0, 1, 0, 2345])
self.assertEqual(self.mockos.setegidCalls, [2, 1234])
def test_requestAvatarId(self):
"""
L{SSHPublicKeyDatabase.requestAvatarId} should return the avatar id
passed in if its C{_checkKey} method returns True.
"""
def _checkKey(ignored):
return True
self.patch(self.checker, 'checkKey', _checkKey)
credentials = SSHPrivateKey(
b'test', b'ssh-rsa', keydata.publicRSA_openssh, b'foo',
keys.Key.fromString(keydata.privateRSA_openssh).sign(b'foo'))
d = self.checker.requestAvatarId(credentials)
def _verify(avatarId):
self.assertEqual(avatarId, b'test')
return d.addCallback(_verify)
def test_requestAvatarIdWithoutSignature(self):
"""
L{SSHPublicKeyDatabase.requestAvatarId} should raise L{ValidPublicKey}
if the credentials represent a valid key without a signature. This
tells the user that the key is valid for login, but does not actually
allow that user to do so without a signature.
"""
def _checkKey(ignored):
return True
self.patch(self.checker, 'checkKey', _checkKey)
credentials = SSHPrivateKey(
b'test', b'ssh-rsa', keydata.publicRSA_openssh, None, None)
d = self.checker.requestAvatarId(credentials)
return self.assertFailure(d, ValidPublicKey)
def test_requestAvatarIdInvalidKey(self):
"""
If L{SSHPublicKeyDatabase.checkKey} returns False,
C{_cbRequestAvatarId} should raise L{UnauthorizedLogin}.
"""
def _checkKey(ignored):
return False
self.patch(self.checker, 'checkKey', _checkKey)
d = self.checker.requestAvatarId(None);
return self.assertFailure(d, UnauthorizedLogin)
def test_requestAvatarIdInvalidSignature(self):
"""
Valid keys with invalid signatures should cause
L{SSHPublicKeyDatabase.requestAvatarId} to return a {UnauthorizedLogin}
failure
"""
def _checkKey(ignored):
return True
self.patch(self.checker, 'checkKey', _checkKey)
credentials = SSHPrivateKey(
b'test', b'ssh-rsa', keydata.publicRSA_openssh, b'foo',
keys.Key.fromString(keydata.privateDSA_openssh).sign(b'foo'))
d = self.checker.requestAvatarId(credentials)
return self.assertFailure(d, UnauthorizedLogin)
def test_requestAvatarIdNormalizeException(self):
"""
Exceptions raised while verifying the key should be normalized into an
C{UnauthorizedLogin} failure.
"""
def _checkKey(ignored):
return True
self.patch(self.checker, 'checkKey', _checkKey)
credentials = SSHPrivateKey(b'test', None, b'blob', b'sigData', b'sig')
d = self.checker.requestAvatarId(credentials)
def _verifyLoggedException(failure):
errors = self.flushLoggedErrors(keys.BadKeyError)
self.assertEqual(len(errors), 1)
return failure
d.addErrback(_verifyLoggedException)
return self.assertFailure(d, UnauthorizedLogin)
class SSHProtocolCheckerTests(TestCase):
"""
Tests for L{SSHProtocolChecker}.
"""
skip = dependencySkip
def test_registerChecker(self):
"""
L{SSHProcotolChecker.registerChecker} should add the given checker to
the list of registered checkers.
"""
checker = checkers.SSHProtocolChecker()
self.assertEqual(checker.credentialInterfaces, [])
checker.registerChecker(checkers.SSHPublicKeyDatabase(), )
self.assertEqual(checker.credentialInterfaces, [ISSHPrivateKey])
self.assertIsInstance(checker.checkers[ISSHPrivateKey],
checkers.SSHPublicKeyDatabase)
def test_registerCheckerWithInterface(self):
"""
If a specific interface is passed into
L{SSHProtocolChecker.registerChecker}, that interface should be
registered instead of what the checker specifies in
credentialIntefaces.
"""
checker = checkers.SSHProtocolChecker()
self.assertEqual(checker.credentialInterfaces, [])
checker.registerChecker(checkers.SSHPublicKeyDatabase(),
IUsernamePassword)
self.assertEqual(checker.credentialInterfaces, [IUsernamePassword])
self.assertIsInstance(checker.checkers[IUsernamePassword],
checkers.SSHPublicKeyDatabase)
def test_requestAvatarId(self):
"""
L{SSHProtocolChecker.requestAvatarId} should defer to one if its
registered checkers to authenticate a user.
"""
checker = checkers.SSHProtocolChecker()
passwordDatabase = InMemoryUsernamePasswordDatabaseDontUse()
passwordDatabase.addUser(b'test', b'test')
checker.registerChecker(passwordDatabase)
d = checker.requestAvatarId(UsernamePassword(b'test', b'test'))
def _callback(avatarId):
self.assertEqual(avatarId, b'test')
return d.addCallback(_callback)
def test_requestAvatarIdWithNotEnoughAuthentication(self):
"""
If the client indicates that it is never satisfied, by always returning
False from _areDone, then L{SSHProtocolChecker} should raise
L{NotEnoughAuthentication}.
"""
checker = checkers.SSHProtocolChecker()
def _areDone(avatarId):
return False
self.patch(checker, 'areDone', _areDone)
passwordDatabase = InMemoryUsernamePasswordDatabaseDontUse()
passwordDatabase.addUser(b'test', b'test')
checker.registerChecker(passwordDatabase)
d = checker.requestAvatarId(UsernamePassword(b'test', b'test'))
return self.assertFailure(d, NotEnoughAuthentication)
def test_requestAvatarIdInvalidCredential(self):
"""
If the passed credentials aren't handled by any registered checker,
L{SSHProtocolChecker} should raise L{UnhandledCredentials}.
"""
checker = checkers.SSHProtocolChecker()
d = checker.requestAvatarId(UsernamePassword(b'test', b'test'))
return self.assertFailure(d, UnhandledCredentials)
def test_areDone(self):
"""
The default L{SSHProcotolChecker.areDone} should simply return True.
"""
self.assertTrue(checkers.SSHProtocolChecker().areDone(None))
class UNIXPasswordDatabaseTests(TestCase):
"""
Tests for L{UNIXPasswordDatabase}.
"""
skip = cryptSkip or dependencySkip
def assertLoggedIn(self, d, username):
"""
Assert that the L{Deferred} passed in is called back with the value
'username'. This represents a valid login for this TestCase.
NOTE: To work, this method's return value must be returned from the
test method, or otherwise hooked up to the test machinery.
@param d: a L{Deferred} from an L{IChecker.requestAvatarId} method.
@type d: L{Deferred}
@rtype: L{Deferred}
"""
result = []
d.addBoth(result.append)
self.assertEqual(len(result), 1, "login incomplete")
if isinstance(result[0], Failure):
result[0].raiseException()
self.assertEqual(result[0], username)
def test_defaultCheckers(self):
"""
L{UNIXPasswordDatabase} with no arguments has checks the C{pwd} database
and then the C{spwd} database.
"""
checker = checkers.UNIXPasswordDatabase()
def crypted(username, password):
salt = crypt.crypt(password, username)
crypted = crypt.crypt(password, '$1$' + salt)
return crypted
pwd = UserDatabase()
pwd.addUser('alice', crypted('alice', 'password'),
1, 2, 'foo', '/foo', '/bin/sh')
# x and * are convention for "look elsewhere for the password"
pwd.addUser('bob', 'x', 1, 2, 'bar', '/bar', '/bin/sh')
spwd = ShadowDatabase()
spwd.addUser('alice', 'wrong', 1, 2, 3, 4, 5, 6, 7)
spwd.addUser('bob', crypted('bob', 'password'),
8, 9, 10, 11, 12, 13, 14)
self.patch(checkers, 'pwd', pwd)
self.patch(checkers, 'spwd', spwd)
mockos = MockOS()
self.patch(util, 'os', mockos)
mockos.euid = 2345
mockos.egid = 1234
cred = UsernamePassword(b"alice", b"password")
self.assertLoggedIn(checker.requestAvatarId(cred), b'alice')
self.assertEqual(mockos.seteuidCalls, [])
self.assertEqual(mockos.setegidCalls, [])
cred.username = b"bob"
self.assertLoggedIn(checker.requestAvatarId(cred), b'bob')
self.assertEqual(mockos.seteuidCalls, [0, 2345])
self.assertEqual(mockos.setegidCalls, [0, 1234])
def assertUnauthorizedLogin(self, d):
"""
Asserts that the L{Deferred} passed in is erred back with an
L{UnauthorizedLogin} L{Failure}. This reprsents an invalid login for
this TestCase.
NOTE: To work, this method's return value must be returned from the
test method, or otherwise hooked up to the test machinery.
@param d: a L{Deferred} from an L{IChecker.requestAvatarId} method.
@type d: L{Deferred}
@rtype: L{None}
"""
self.assertRaises(
checkers.UnauthorizedLogin, self.assertLoggedIn, d, 'bogus value')
def test_passInCheckers(self):
"""
L{UNIXPasswordDatabase} takes a list of functions to check for UNIX
user information.
"""
password = <PASSWORD>('<PASSWORD>', '<PASSWORD>')
userdb = UserDatabase()
userdb.addUser('anybody', password, 1, 2, 'foo', '/bar', '/bin/sh')
checker = checkers.UNIXPasswordDatabase([userdb.getpwnam])
self.assertLoggedIn(
checker.requestAvatarId(UsernamePassword(b'anybody', b'secret')),
b'anybody')
def test_verifyPassword(self):
"""
If the encrypted password provided by the getpwnam function is valid
(verified by the L{verifyCryptedPassword} function), we callback the
C{requestAvatarId} L{Deferred} with the username.
"""
def verifyCryptedPassword(crypted, pw):
return crypted == pw
def getpwnam(username):
return [username, username]
self.patch(checkers, 'verifyCryptedPassword', verifyCryptedPassword)
checker = checkers.UNIXPasswordDatabase([getpwnam])
credential = UsernamePassword(b'username', b'username')
self.assertLoggedIn(checker.requestAvatarId(credential), b'username')
def test_failOnKeyError(self):
"""
If the getpwnam function raises a KeyError, the login fails with an
L{UnauthorizedLogin} exception.
"""
def getpwnam(username):
raise KeyError(username)
checker = checkers.UNIXPasswordDatabase([getpwnam])
credential = UsernamePassword(b'username', b'username')
self.assertUnauthorizedLogin(checker.requestAvatarId(credential))
def test_failOnBadPassword(self):
"""
If the verifyCryptedPassword function doesn't verify the password, the
login fails with an L{UnauthorizedLogin} exception.
"""
def verifyCryptedPassword(crypted, pw):
return False
def getpwnam(username):
return [username, username]
self.patch(checkers, 'verifyCryptedPassword', verifyCryptedPassword)
checker = checkers.UNIXPasswordDatabase([getpwnam])
credential = UsernamePassword(b'username', b'username')
self.assertUnauthorizedLogin(checker.requestAvatarId(credential))
def test_loopThroughFunctions(self):
"""
UNIXPasswordDatabase.requestAvatarId loops through each getpwnam
function associated with it and returns a L{Deferred} which fires with
the result of the first one which returns a value other than None.
ones do not verify the password.
"""
def verifyCryptedPassword(crypted, pw):
return crypted == pw
def getpwnam1(username):
return [username, 'not the password']
def getpwnam2(username):
return [username, username]
self.patch(checkers, 'verifyCryptedPassword', verifyCryptedPassword)
checker = checkers.UNIXPasswordDatabase([getpwnam1, getpwnam2])
credential = UsernamePassword(b'username', b'username')
self.assertLoggedIn(checker.requestAvatarId(credential), b'username')
def test_failOnSpecial(self):
"""
If the password returned by any function is C{""}, C{"x"}, or C{"*"} it
is not compared against the supplied password. Instead it is skipped.
"""
pwd = UserDatabase()
pwd.addUser('alice', '', 1, 2, '', 'foo', 'bar')
pwd.addUser('bob', 'x', 1, 2, '', 'foo', 'bar')
pwd.addUser('carol', '*', 1, 2, '', 'foo', 'bar')
self.patch(checkers, 'pwd', pwd)
checker = checkers.UNIXPasswordDatabase([checkers._pwdGetByName])
cred = UsernamePassword(b'alice', b'')
self.assertUnauthorizedLogin(checker.requestAvatarId(cred))
cred = UsernamePassword(b'bob', b'x')
self.assertUnauthorizedLogin(checker.requestAvatarId(cred))
cred = UsernamePassword(b'carol', b'*')
self.assertUnauthorizedLogin(checker.requestAvatarId(cred))
class AuthorizedKeyFileReaderTests(TestCase):
"""
Tests for L{checkers.readAuthorizedKeyFile}
"""
skip = dependencySkip
def test_ignoresComments(self):
"""
L{checkers.readAuthorizedKeyFile} does not attempt to turn comments
into keys
"""
fileobj = BytesIO(b'# this comment is ignored\n'
b'this is not\n'
b'# this is again\n'
b'and this is not')
result = checkers.readAuthorizedKeyFile(fileobj, lambda x: x)
self.assertEqual([b'this is not', b'and this is not'], list(result))
def test_ignoresLeadingWhitespaceAndEmptyLines(self):
"""
L{checkers.readAuthorizedKeyFile} ignores leading whitespace in
lines, as well as empty lines
"""
fileobj = BytesIO(b"""
# ignore
not ignored
""")
result = checkers.readAuthorizedKeyFile(fileobj, parseKey=lambda x: x)
self.assertEqual([b'not ignored'], list(result))
def test_ignoresUnparsableKeys(self):
"""
L{checkers.readAuthorizedKeyFile} does not raise an exception
when a key fails to parse (raises a
L{twisted.conch.ssh.keys.BadKeyError}), but rather just keeps going
"""
def failOnSome(line):
if line.startswith(b'f'):
raise keys.BadKeyError('failed to parse')
return line
fileobj = BytesIO(b'failed key\ngood key')
result = checkers.readAuthorizedKeyFile(fileobj,
parseKey=failOnSome)
self.assertEqual([b'good key'], list(result))
class InMemorySSHKeyDBTests(TestCase):
"""
Tests for L{checkers.InMemorySSHKeyDB}
"""
skip = dependencySkip
def test_implementsInterface(self):
"""
L{checkers.InMemorySSHKeyDB} implements
L{checkers.IAuthorizedKeysDB}
"""
keydb = checkers.InMemorySSHKeyDB({b'alice': [b'key']})
verifyObject(checkers.IAuthorizedKeysDB, keydb)
def test_noKeysForUnauthorizedUser(self):
"""
If the user is not in the mapping provided to
L{checkers.InMemorySSHKeyDB}, an empty iterator is returned
by L{checkers.InMemorySSHKeyDB.getAuthorizedKeys}
"""
keydb = checkers.InMemorySSHKeyDB({b'alice': [b'keys']})
self.assertEqual([], list(keydb.getAuthorizedKeys(b'bob')))
def test_allKeysForAuthorizedUser(self):
"""
If the user is in the mapping provided to
L{checkers.InMemorySSHKeyDB}, an iterator with all the keys
is returned by L{checkers.InMemorySSHKeyDB.getAuthorizedKeys}
"""
keydb = checkers.InMemorySSHKeyDB({b'alice': [b'a', b'b']})
self.assertEqual([b'a', b'b'], list(keydb.getAuthorizedKeys(b'alice')))
class UNIXAuthorizedKeysFilesTests(TestCase):
"""
Tests for L{checkers.UNIXAuthorizedKeysFiles}.
"""
skip = dependencySkip
def setUp(self):
mockos = MockOS()
mockos.path = FilePath(self.mktemp())
mockos.path.makedirs()
self.userdb = UserDatabase()
self.userdb.addUser(b'alice', b'password', 1, 2, b'alice lastname',
mockos.path.path, b'/bin/shell')
self.sshDir = mockos.path.child('.ssh')
self.sshDir.makedirs()
authorizedKeys = self.sshDir.child('authorized_keys')
authorizedKeys.setContent(b'key 1\nkey 2')
self.expectedKeys = [b'key 1', b'key 2']
def test_implementsInterface(self):
"""
L{checkers.UNIXAuthorizedKeysFiles} implements
L{checkers.IAuthorizedKeysDB}.
"""
keydb = checkers.UNIXAuthorizedKeysFiles(self.userdb)
verifyObject(checkers.IAuthorizedKeysDB, keydb)
def test_noKeysForUnauthorizedUser(self):
"""
If the user is not in the user database provided to
L{checkers.UNIXAuthorizedKeysFiles}, an empty iterator is returned
by L{checkers.UNIXAuthorizedKeysFiles.getAuthorizedKeys}.
"""
keydb = checkers.UNIXAuthorizedKeysFiles(self.userdb,
parseKey=lambda x: x)
self.assertEqual([], list(keydb.getAuthorizedKeys('bob')))
def test_allKeysInAllAuthorizedFilesForAuthorizedUser(self):
"""
If the user is in the user database provided to
L{checkers.UNIXAuthorizedKeysFiles}, an iterator with all the keys in
C{~/.ssh/authorized_keys} and C{~/.ssh/authorized_keys2} is returned
by L{checkers.UNIXAuthorizedKeysFiles.getAuthorizedKeys}.
"""
self.sshDir.child('authorized_keys2').setContent(b'key 3')
keydb = checkers.UNIXAuthorizedKeysFiles(self.userdb,
parseKey=lambda x: x)
self.assertEqual(self.expectedKeys + [b'key 3'],
list(keydb.getAuthorizedKeys(b'alice')))
def test_ignoresNonexistantFile(self):
"""
L{checkers.UNIXAuthorizedKeysFiles.getAuthorizedKeys} returns only
the keys in C{~/.ssh/authorized_keys} and C{~/.ssh/authorized_keys2}
if they exist.
"""
keydb = checkers.UNIXAuthorizedKeysFiles(self.userdb,
parseKey=lambda x: x)
self.assertEqual(self.expectedKeys,
list(keydb.getAuthorizedKeys(b'alice')))
def test_ignoresUnreadableFile(self):
"""
L{checkers.UNIXAuthorizedKeysFiles.getAuthorizedKeys} returns only
the keys in C{~/.ssh/authorized_keys} and C{~/.ssh/authorized_keys2}
if they are readable.
"""
self.sshDir.child('authorized_keys2').makedirs()
keydb = checkers.UNIXAuthorizedKeysFiles(self.userdb,
parseKey=lambda x: x)
self.assertEqual(self.expectedKeys,
list(keydb.getAuthorizedKeys(b'alice')))
_KeyDB = namedtuple('KeyDB', ['getAuthorizedKeys'])
class _DummyException(Exception):
"""
Fake exception to be used for testing.
"""
pass
class SSHPublicKeyCheckerTests(TestCase):
"""
Tests for L{checkers.SSHPublicKeyChecker}.
"""
skip = dependencySkip
def setUp(self):
self.credentials = SSHPrivateKey(
b'alice', b'ssh-rsa', keydata.publicRSA_openssh, b'foo',
keys.Key.fromString(keydata.privateRSA_openssh).sign(b'foo'))
self.keydb = _KeyDB(lambda _: [
keys.Key.fromString(keydata.publicRSA_openssh)])
self.checker = checkers.SSHPublicKeyChecker(self.keydb)
def test_credentialsWithoutSignature(self):
"""
Calling L{checkers.SSHPublicKeyChecker.requestAvatarId} with
credentials that do not have a signature fails with L{ValidPublicKey}.
"""
self.credentials.signature = None
self.failureResultOf(self.checker.requestAvatarId(self.credentials),
ValidPublicKey)
def test_credentialsWithBadKey(self):
"""
Calling L{checkers.SSHPublicKeyChecker.requestAvatarId} with
credentials that have a bad key fails with L{keys.BadKeyError}.
"""
self.credentials.blob = b''
self.failureResultOf(self.checker.requestAvatarId(self.credentials),
keys.BadKeyError)
def test_credentialsNoMatchingKey(self):
"""
If L{checkers.IAuthorizedKeysDB.getAuthorizedKeys} returns no keys
that match the credentials,
L{checkers.SSHPublicKeyChecker.requestAvatarId} fails with
L{UnauthorizedLogin}.
"""
self.credentials.blob = keydata.publicDSA_openssh
self.failureResultOf(self.checker.requestAvatarId(self.credentials),
UnauthorizedLogin)
def test_credentialsInvalidSignature(self):
"""
Calling L{checkers.SSHPublicKeyChecker.requestAvatarId} with
credentials that are incorrectly signed fails with
L{UnauthorizedLogin}.
"""
self.credentials.signature = (
keys.Key.fromString(keydata.privateDSA_openssh).sign(b'foo'))
self.failureResultOf(self.checker.requestAvatarId(self.credentials),
UnauthorizedLogin)
def test_failureVerifyingKey(self):
"""
If L{keys.Key.verify} raises an exception,
L{checkers.SSHPublicKeyChecker.requestAvatarId} fails with
L{UnauthorizedLogin}.
"""
def fail(*args, **kwargs):
raise _DummyException()
self.patch(keys.Key, 'verify', fail)
self.failureResultOf(self.checker.requestAvatarId(self.credentials),
UnauthorizedLogin)
self.flushLoggedErrors(_DummyException)
def test_usernameReturnedOnSuccess(self):
"""
L{checker.SSHPublicKeyChecker.requestAvatarId}, if successful,
callbacks with the username.
"""
d = self.checker.requestAvatarId(self.credentials)
self.assertEqual(b'alice', self.successResultOf(d))
| StarcoderdataPython |
112399 | from state import called
def setup():
called.append('test_pak1.setup')
def teardown():
called.append('test_pak1.teardown')
def test_one_one():
called.append('test_pak1.test_one_one')
def test_one_two():
called.append('test_pak1.test_one_two')
| StarcoderdataPython |
9632033 | <filename>sudoku_solver/constant.py
VERSION = '0.6.0'
WIDTH = 500
HEIGHT = 510
BUTTON_FONT = 'arial 15'
ENTRY_BLOCK_FONT = 'arial 30'
| StarcoderdataPython |
11225311 | from odoo import api, models, fields
from odoo import tools
class KickerStat(models.Model):
_auto = False
_name = "kicker.stat"
_description = "Kicker Statistic"
_rec_name = 'date'
_order = 'date desc'
player_id = fields.Many2one('res.partner', string='Player', readonly=True)
session_id = fields.Many2one('kicker.session', string='Session', readonly=True)
game_id = fields.Many2one('kicker.game', string='Game', readonly=True)
date = fields.Datetime('Game Date', readonly=True)
won = fields.Boolean('Won', readonly=True)
teammate_id = fields.Many2one('res.partner', string='Teammate', readonly=True)
opponent1_id = fields.Many2one('res.partner', string='Opponent 1', readonly=True)
opponent2_id = fields.Many2one('res.partner', string='Opponent 2', readonly=True)
def _query(self, with_clause='', fields={}, groupby='', from_clause=''):
with_ = ("WITH %s" % with_clause) if with_clause else ""
select_ = """
s.id as id,
s.id as session_id,
g.id as game_id,
p.id as player_id,
s.won as won,
g.date as date,
tm.id as teammate_id,
o1.id as opponent1_id,
o2.id as opponent2_id
"""
for field in fields.values():
select_ += field
from_ = """
kicker_session s
join kicker_game g on (g.id = s.game_id)
join res_partner p on s.player_id = p.id
join kicker_session sm on (sm.game_id=g.id and sm.team=s.team and sm.player_id!=s.player_id)
join res_partner tm on (sm.player_id=tm.id)
join kicker_session os1 on os1.game_id=g.id and os1.team!=s.team
join res_partner o1 on o1.id=os1.player_id
join kicker_session os2 on os2.game_id=g.id and os2.team!=s.team and os2.id>os1.id
join res_partner o2 on o2.id=os2.player_id
%s
""" % from_clause
groupby_ = """
s.id,
p.id,
tm.id,
o1.id,
o2.id,
g.id,
s.won %s
""" % (groupby)
return '%s (SELECT %s FROM %s GROUP BY %s)' % (with_, select_, from_, groupby_)
@api.model_cr
def init(self):
# self._table = sale_report
tools.drop_view_if_exists(self.env.cr, self._table)
self.env.cr.execute("""CREATE or REPLACE VIEW %s as (%s)""" % (self._table, self._query())) | StarcoderdataPython |
4940442 | <reponame>ovekaaven/django-xmpp-server<filename>xmppserver/xmpp/matcher.py
from slixmpp import StanzaPath
def is_server_stanza(stanza):
target = stanza['to']
if target.user != '':
return False
if target.domain != '' and \
target.domain != stanza.stream.host:
return False
return True
def is_local_stanza(stanza):
target = stanza['to']
if target.user != '' and target.resource != '':
return False
if target.domain != '' and \
target.domain != stanza.stream.host:
return False
return True
class ServerStanzaPath(StanzaPath):
def match(self, stanza):
if not StanzaPath.match(self, stanza):
return False
return is_server_stanza(stanza)
class LocalStanzaPath(StanzaPath):
def match(self, stanza):
if not StanzaPath.match(self, stanza):
return False
return is_local_stanza(stanza)
class RemoteStanzaPath(StanzaPath):
def match(self, stanza):
if not StanzaPath.match(self, stanza):
return False
return not is_local_stanza(stanza)
| StarcoderdataPython |
149093 | <reponame>jjalvare/subarulink<gh_stars>1-10
# SPDX-License-Identifier: Apache-2.0
"""
subarulink - A Python Package for interacting with Subaru Starlink Remote Services API.
connection.py - provides management for HTTP sessions to Subaru API
For more details about this api, please refer to the documentation at
https://github.com/G-Two/subarulink
"""
import asyncio
import logging
import pprint
import time
import aiohttp
from yarl import URL
from subarulink.exceptions import IncompleteCredentials, SubaruException
_LOGGER = logging.getLogger(__name__)
class Connection:
"""Connection to Subaru Starlink API."""
def __init__(
self,
websession: aiohttp.ClientSession,
username,
password,
device_id,
device_name,
) -> None:
"""Initialize connection object."""
self.username = username
self.password = password
self.device_id = device_id
self.lock = asyncio.Lock()
self.device_name = device_name
self.vehicles = []
self.vehicle_key = None
self.default_vin = None
self.baseurl = "https://mobileapi.prod.subarucs.com/g2v15"
self.head = {}
self.head[
"User-Agent"
] = "Mozilla/5.0 (Linux; Android 10; Android SDK built for x86 Build/QSR1.191030.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/74.0.3729.185 Mobile Safari/537.36"
self.head["Origin"] = "file://"
self.head["X-Requested-With"] = "com.subaru.telematics.app.remote"
self.head["Accept-Language"] = "en-US,en;q=0.9"
self.head["Accept-Encoding"] = "gzip, deflate"
self.head["Accept"] = "*/*"
self.websession = websession
self.authenticated = False
self.authorized = False
self.current_vin = None
async def connect(self):
"""Connect to and establish session with Subaru Remote Services API."""
if await self._authenticate():
await self._refresh_vehicles()
if self.authorized:
return self.vehicles
if await self._authorize_device():
return self.vehicles
return None
async def validate_session(self, vin):
"""Validate if current session cookie is still valid with Subaru Remote Services API and vehicle context is correct."""
result = False
resp = await self.__open("/validateSession.json", "get")
js_resp = await resp.json()
_LOGGER.debug(pprint.pformat(js_resp))
if js_resp["success"]:
if vin != self.current_vin:
# API call for VIN that is not the current remote context.
_LOGGER.debug("Switching Subaru API vehicle context to: %s", vin)
if await self._select_vehicle(vin):
result = True
else:
result = True
elif await self._authenticate(vin):
# New session cookie. Must call selectVehicle.json before any other API call.
if await self._select_vehicle(vin):
result = True
else:
self.authenticated = False
return result
async def get(self, command, params=None, data=None, json=None):
"""Send HTTPS GET request to Subaru Remote Services API."""
if self.authenticated:
resp = await self.__open(
f"{command}",
method="get",
headers=self.head,
params=params,
data=data,
json=json,
)
js_resp = await resp.json()
return js_resp
async def post(self, command, params=None, data=None, json=None):
"""Send HTTPS POST request to Subaru Remote Services API."""
if self.authenticated:
resp = await self.__open(
f"{command}",
method="post",
headers=self.head,
params=params,
data=data,
json=json,
)
js_resp = await resp.json()
return js_resp
async def _authenticate(self, vin=None) -> bool:
"""Authenticate to Subaru Remote Services API."""
if self.username and self.password and self.device_id:
post_data = {
"env": "cloudprod",
"loginUsername": self.username,
"password": <PASSWORD>,
"deviceId": self.device_id,
"passwordToken": None,
"selectedVin": vin,
"pushToken": None,
"deviceType": "android",
}
resp = await self.__open(
"/login.json", "post", data=post_data, headers=self.head
)
resp = await resp.json()
if resp["success"]:
_LOGGER.debug("Client authentication successful")
_LOGGER.debug(pprint.pformat(resp))
self.authenticated = True
self.authorized = resp["data"]["deviceRegistered"]
i = resp["data"]["currentVehicleIndex"]
self.current_vin = resp["data"]["vehicles"][i]["vin"]
return True
if resp["errorCode"]:
_LOGGER.error("Client authentication failed")
raise SubaruException(resp["errorCode"])
_LOGGER.error("Unknown failure")
raise SubaruException(resp)
raise IncompleteCredentials(
"Connection requires email and password and device id."
)
async def _select_vehicle(self, vin):
"""Select active vehicle for accounts with multiple VINs."""
params = {}
params["vin"] = vin
params["_"] = int(time.time())
js_resp = await self.get("/selectVehicle.json", params=params)
_LOGGER.debug(pprint.pformat(js_resp))
if js_resp["success"]:
self.current_vin = vin
_LOGGER.debug("Current vehicle: vin=%s", js_resp["data"]["vin"])
return js_resp["data"]
self.current_vin = None
return None
async def _refresh_vehicles(self):
resp = await self.__open(
"/refreshVehicles.json", "get", params={"_": int(time.time())}
)
js_resp = await resp.json()
_LOGGER.debug(pprint.pformat(js_resp))
vehicles = js_resp["data"]["vehicles"]
if len(vehicles) > 1:
vehicles = await self._refresh_multi_vehicle(vehicles)
for vehicle in vehicles:
car = {}
car["vin"] = vehicle["vin"]
car["id"] = vehicle["vehicleKey"]
car["display_name"] = vehicle["vehicleName"]
if "g2" in vehicle["features"]:
car["api_gen"] = "g2"
elif "g1" in vehicle["features"]:
car["api_gen"] = "g1"
else:
car["api_gen"] = "g0"
if "PHEV" in vehicle["features"]:
car["hasEV"] = True
else:
car["hasEV"] = False
if "RES" in vehicle["features"]:
car["hasRES"] = True
else:
car["hasRES"] = False
if "REMOTE" in vehicle["subscriptionFeatures"]:
car["hasRemote"] = True
else:
car["hasRemote"] = False
self.vehicles.append(car)
async def _refresh_multi_vehicle(self, vehicles):
# refreshVehicles.json returns unreliable data if multiple cars on account
# use selectVehicle.json to get each car's info
result = []
for vehicle in vehicles:
vin = vehicle["vin"]
result.append(await self._select_vehicle(vin))
return result
async def _authorize_device(self):
_LOGGER.debug("Authorizing device via web API")
web_baseurl = "https://www.mysubaru.com"
if self.username and self.password and self.device_id:
post_data = {
"username": self.username,
"password": <PASSWORD>,
"deviceId": self.device_id,
}
resp = await self.__open(
"/login", "post", data=post_data, baseurl=web_baseurl
)
resp = await self.__open(
"/profile/updateDeviceEntry.json",
"get",
params={"deviceId": self.device_id},
baseurl=web_baseurl,
)
if await resp.json():
_LOGGER.debug("Device successfully authorized")
return await self._set_device_name()
return False
async def _set_device_name(self):
_LOGGER.debug("Setting Device Name to %s", self.device_name)
web_baseurl = "https://www.mysubaru.com"
resp = await self.__open(
"/profile/addDeviceName.json",
"get",
params={"deviceId": self.device_id, "deviceName": self.device_name},
baseurl=web_baseurl,
)
js_resp = await resp.json()
if js_resp:
_LOGGER.debug("Set Device Name Successful")
return True
_LOGGER.debug("Unknown Error during Set Device Name")
return False
async def __open(
self,
url,
method="get",
headers=None,
data=None,
json=None,
params=None,
baseurl="",
) -> None:
"""Open url."""
if not baseurl:
baseurl = self.baseurl
url: URL = URL(baseurl + url)
_LOGGER.debug("%s: %s", method.upper(), url)
with await self.lock:
try:
resp = await getattr(self.websession, method)(
url, headers=headers, params=params, data=data, json=json
)
if resp.status > 299:
_LOGGER.debug(pprint.pformat(resp.request_info))
js_resp = await resp.json()
_LOGGER.debug(pprint.pformat(js_resp))
raise SubaruException(resp.status)
except aiohttp.ClientResponseError as exception:
raise SubaruException(exception.status)
except aiohttp.ClientConnectionError:
raise SubaruException("aiohttp.ClientConnectionError")
return resp
| StarcoderdataPython |
8097982 | <reponame>wwydmanski/Chemformer
import torch
from rdkit import Chem, RDLogger
class DecodeSampler:
def __init__(
self,
tokeniser,
max_seq_len,
length_norm=None
):
self.tokeniser = tokeniser
self.max_seq_len = max_seq_len
self.length_norm = length_norm
assert max_seq_len > 1, f"Max sequence must be at least 2, got {max_seq_len}"
self.begin_token_id = self.tokeniser.vocab[self.tokeniser.begin_token]
self.pad_token_id = self.tokeniser.vocab[self.tokeniser.pad_token]
self.end_token_id = self.tokeniser.vocab[self.tokeniser.end_token]
self.bad_token_ll = -1e5
RDLogger.DisableLog("rdApp.*")
def decode(self, decode_fn, batch_size, sampling_alg="greedy", device="cpu", **kwargs):
""" Sample a molecule from a model by calling the decode function argument
Args:
decode_fn: A function mapping a batched sequence of token identifiers and their associated pad masks
to a log probability distribution over possible next tokens
batch_size: The number of elements to pass into the decode function in one batch
sampling_alg: Algorithm to use for sampling from the model
Returns:
(SMILES of sampled molecules (List[str]), log likelihoods (List[float]))
"""
if sampling_alg == "greedy":
output = self.greedy_decode(decode_fn, batch_size, device)
elif sampling_alg == "beam":
output = self.beam_decode(decode_fn, batch_size, device, kwargs)
else:
raise ValueError(f"Unknown sampling algorithm {sampling_alg}")
return output
def greedy_decode(self, decode_fn, batch_size, device="cpu"):
""" Sample molecules from the model using greedy search
Args:
decode_fn (fn): Function used to apply tokens to model and produce log probability distribution
batch_size (int): Number of molecules to sample
device: Torch device to create tensors on
Returns:
(List[str], List[float]): Tuple of (molecules, their log likelihoods)
"""
# Create tensors which will be reused
token_ids = [self.begin_token_id] + ([self.pad_token_id] * (self.max_seq_len - 1))
token_ids = [token_ids] * batch_size
token_ids = torch.tensor(token_ids, device=device).transpose(0, 1)
pad_mask = torch.zeros((self.max_seq_len, batch_size), device=device, dtype=torch.bool)
log_lhs = torch.zeros((batch_size))
# Iteratively apply the tokens to the model and build up the sequence
for i in range(1, self.max_seq_len):
token_ids_seq = token_ids[:i, :]
pad_mask_seq = pad_mask[:i, :]
# Sample next id for each element in the batch
output_dist = decode_fn(token_ids_seq, pad_mask_seq)
probs, output_ids = output_dist.max(dim=2)
new_ids = output_ids[-1, :]
new_probs = probs[-1, :]
# Generate next elements in the pad mask. An element is padded if:
# 1. The previous token is an end token
# 2. The previous token is a pad token
is_end_token = token_ids[i-1, :] == self.end_token_id
is_pad_token = token_ids[i-1, :] == self.pad_token_id
new_pad_mask = torch.logical_or(is_end_token, is_pad_token)
# Break if sampling is complete
if new_pad_mask.sum().item() == new_pad_mask.numel():
break
# Ensure all sequences contain an end token
if i == self.max_seq_len - 1:
new_ids[~new_pad_mask] = self.end_token_id
# Set the token to pad where required, update the token ids and update lls
new_ids[new_pad_mask] = self.pad_token_id
token_ids[i, :] = new_ids
pad_mask[i, :] = new_pad_mask
log_lhs += new_probs.cpu()
tokens = token_ids.transpose(0, 1).tolist()
tokens = self.tokeniser.convert_ids_to_tokens(tokens)
mol_strs = self.tokeniser.detokenise(tokens)
log_lhs = log_lhs.tolist()
return mol_strs, log_lhs
def beam_decode(self, decode_fn, batch_size, device="cpu", k=5):
""" Sample molecules from the model using beam search
Samples molecules by iteratively building up the sequence of SMILES characters using beam search.
Molecules are returned in a 2D list where batch_size is the outer dimension and k is the inner dimension.
Args:
decode_fn (fn): Function used to apply tokens to model and produce log probability distribution
batch_size (int): Number of molecules to sample
device: Torch device to create tensors on
k (int): Number of beams
Returns:
(List[List[str]], List[List[float]]): Tuple of (molecules, their log likelihoods)
"""
# Create tensors which will be reused
token_ids = [self.begin_token_id] + ([self.pad_token_id] * (self.max_seq_len - 1))
token_ids = [token_ids] * batch_size
token_ids = torch.tensor(token_ids, device=device).transpose(0, 1)
pad_mask = torch.zeros((self.max_seq_len, batch_size), device=device, dtype=torch.bool)
ts = token_ids[:1, :]
ms = pad_mask[:1, :]
ll = torch.zeros((batch_size))
# Apply starting token to model to get a distribution over next tokens
first_lls = self._beam_step(decode_fn, ts, ms, ll)
top_lls, top_idxs = torch.topk(first_lls, k, dim=1)
top_ids = list(top_idxs.T)
# Setup tensors for each beam which will be reused
token_ids_list = [token_ids.clone() for _ in range(k)]
pad_mask_list = [pad_mask.clone() for _ in range(k)]
lls_list = list(top_lls.cpu().T)
for beam_idx, ids in enumerate(top_ids):
token_ids_list[beam_idx][1, :] = ids
pad_mask_list[beam_idx][1, :] = 0
for i in range(2, self.max_seq_len):
complete = self._update_beams_(i, decode_fn, token_ids_list, pad_mask_list, lls_list)
if complete:
break
tokens_list = [token_ids.transpose(0, 1).tolist() for token_ids in token_ids_list]
tokens_list = [self.tokeniser.convert_ids_to_tokens(tokens) for tokens in tokens_list]
mol_strs_list = [self.tokeniser.detokenise(tokens) for tokens in tokens_list]
log_lhs_list = [log_lhs.tolist() for log_lhs in lls_list]
# Transpose and sort list of molecules based on ll
new_mol_strs = self._transpose_list(mol_strs_list)
new_log_lhs = self._transpose_list(log_lhs_list)
sorted_mols, sorted_lls = self._sort_beams(new_mol_strs, new_log_lhs)
return sorted_mols, sorted_lls
def _update_beams_(self, i, decode_fn, token_ids_list, pad_mask_list, lls_list):
""" Update beam tokens and pad mask in-place using a single decode step
Updates token ids and pad mask in-place by producing the probability distribution over next tokens
and choosing the top k (number of beams) log likelihoods to choose the next tokens.
Sampling is complete if every batch element in every beam has produced an end token.
Args:
i (int): The current iteration counter
decode_fn (fn): Function used to apply tokens to model and produce log probability distribution
token_ids_list (List[torch.Tensor]): List of token_ids, each of shape [seq_len, batch_size]
pad_mask_list (List[torch.Tensor]): List of pad_masks, each of shape [seq_len, batch_size]
lls_list (List[torch.Tensor]): List of log likelihoods, each of shape [batch_size]
Returns:
(bool): Specifies whether all of the beams are complete
"""
assert len(token_ids_list) == len(pad_mask_list) == len(lls_list)
num_beams = len(token_ids_list)
ts = [token_ids[:i, :] for token_ids in token_ids_list]
ms = [pad_mask[:i, :] for pad_mask in pad_mask_list]
# Apply current seqs to model to get a distribution over next tokens
# new_lls is a tensor of shape [batch_size, vocab_size * num_beams]
new_lls = [self._beam_step(decode_fn, t, m, lls) for t, m, lls in zip(ts, ms, lls_list)]
norm_lls = [self._norm_length(lls, mask) for lls, mask in zip(new_lls, ms)]
_, vocab_size = tuple(norm_lls[0].shape)
new_lls = torch.cat(new_lls, dim=1)
norm_lls = torch.cat(norm_lls, dim=1)
# Keep lists (of length num_beams) of tensors of shape [batch_size]
top_lls, top_idxs = torch.topk(norm_lls, num_beams, dim=1)
new_ids_list = list((top_idxs % vocab_size).T)
beam_idxs_list = list((top_idxs // vocab_size).T)
top_lls = [new_lls[b_idx, idx] for b_idx, idx in enumerate(list(top_idxs))]
top_lls = torch.stack(top_lls).T
beam_complete = []
new_ts_list = []
new_pm_list = []
new_lls_list = []
# Set the sampled tokens, pad masks and log likelihoods for each of the new beams
for new_beam_idx, (new_ids, beam_idxs, lls) in enumerate(zip(new_ids_list, beam_idxs_list, top_lls)):
# Get the previous sequences corresponding to the new beams
token_ids = [token_ids_list[beam_idx][:, b_idx] for b_idx, beam_idx in enumerate(beam_idxs)]
token_ids = torch.stack(token_ids).transpose(0, 1)
# Generate next elements in the pad mask. An element is padded if:
# 1. The previous token is an end token
# 2. The previous token is a pad token
is_end_token = token_ids[i-1, :] == self.end_token_id
is_pad_token = token_ids[i-1, :] == self.pad_token_id
new_pad_mask = torch.logical_or(is_end_token, is_pad_token)
beam_complete.append(new_pad_mask.sum().item() == new_pad_mask.numel())
# Ensure all sequences contain an end token
if i == self.max_seq_len - 1:
new_ids[~new_pad_mask] = self.end_token_id
# Set the tokens to pad if an end token as already been produced
new_ids[new_pad_mask] = self.pad_token_id
token_ids[i, :] = new_ids
# Generate full pad mask sequence for new token sequence
pad_mask = [pad_mask_list[beam_idx][:, b_idx] for b_idx, beam_idx in enumerate(beam_idxs)]
pad_mask = torch.stack(pad_mask).transpose(0, 1)
pad_mask[i, :] = new_pad_mask
# Add tokens, pad mask and lls to list to be updated after all beams have been processed
new_ts_list.append(token_ids)
new_pm_list.append(pad_mask)
new_lls_list.append(lls)
complete = sum(beam_complete) == len(beam_complete)
# Update all tokens, pad masks and lls
if not complete:
for beam_idx, (ts, pm, lls) in enumerate(zip(new_ts_list, new_pm_list, new_lls_list)):
token_ids_list[beam_idx] = ts
pad_mask_list[beam_idx] = pm
lls_list[beam_idx] = lls
return complete
def _beam_step(self, decode_fn, tokens, mask, lls):
""" Apply tokens to model to produce the log likelihoods for the full sequence
A single iteration of decode is applied to the model to produce the next tokens in the sequences
and the log likelihoods for the entire sequences (including the next token)
The lls are returned as a distribution over all possible next tokens
Args:
decode_fn (fn): Function used to apply tokens to model and produce log probability distribution
tokens (torch.Tensor): Tensor of shape [seq_len, batch_size] containing the current token ids
mask (torch.Tensor): BoolTensor of shape [seq_len, batch_size] containing the padding mask
lls (torch.Tensor): Tensor of shape [batch_size] containing log likelihoods for seqs so far
Returns:
seq_lls (torch.Tensor): Tensor of shape [batch_size, vocab_size]
"""
output_dist = decode_fn(tokens, mask)
next_token_lls = output_dist[-1, :, :].cpu()
# Create a vector from which only a pad token can be sampled
_, vocab_size = tuple(next_token_lls.shape)
complete_seq_ll = torch.ones((1, vocab_size)) * self.bad_token_ll
complete_seq_ll[:, self.pad_token_id] = 0.0
# Use this vector in the output for sequences which are complete
is_end_token = tokens[-1, :] == self.end_token_id
is_pad_token = tokens[-1, :] == self.pad_token_id
ll_mask = torch.logical_or(is_end_token, is_pad_token).cpu().unsqueeze(1)
masked_lls = (ll_mask * complete_seq_ll) + (~ll_mask * next_token_lls)
seq_lls = (lls + masked_lls.T).T
return seq_lls
def _norm_length(self, seq_lls, mask):
""" Normalise log-likelihoods using the length of the constructed sequence
Equation from:
Wu, Yonghui, et al.
"Google's neural machine translation system: Bridging the gap between human and machine translation."
arXiv preprint arXiv:1609.08144 (2016).
Args:
seq_lls (torch.Tensor): Tensor of shape [batch_size, vocab_size] containing log likelihoods for seqs so far
mask (torch.Tensor): BoolTensor of shape [seq_len, batch_size] containing the padding mask
Returns:
norm_lls (torch.Tensor): Tensor of shape [batch_size, vocab_size]
"""
if self.length_norm is not None:
seq_lengths = (~mask).sum(dim=0)
norm = torch.pow(5 + seq_lengths, self.length_norm) / pow(6, self.length_norm)
norm_lls = (seq_lls.T / norm.cpu()).T
return norm_lls
return seq_lls
@staticmethod
def _transpose_list(l):
""" Transpose 2D list so that inner dimension is first
Args:
l (List[Any]): List to be transposed
Returns:
(List[Any]): Transposed list
"""
outer_dim = len(l)
inner_dim = len(l[0])
transposed = [[[]] * outer_dim for _ in range(inner_dim)]
for outer_idx, inner in enumerate(l):
for inner_idx, item in enumerate(inner):
transposed[inner_idx][outer_idx] = item
return transposed
@staticmethod
def _sort_beams(mol_strs, log_lhs):
""" Return mols sorted by their log likelihood
Args:
mol_strs (List[List[str]]): SMILES encoding of molecules
log_lhs (List[List[float]]): Log likelihood for each molecule
Returns:
(List[str], List[float]): Tuple of sorted molecules and sorted log lhs
"""
assert len(mol_strs) == len(log_lhs)
sorted_mols = []
sorted_lls = []
for mols, lls in zip(mol_strs, log_lhs):
mol_lls = sorted(zip(mols, lls), reverse=True, key=lambda mol_ll: mol_ll[1])
mols, lls = tuple(zip(*mol_lls))
sorted_mols.append(list(mols))
sorted_lls.append(list(lls))
return sorted_mols, sorted_lls
@staticmethod
def calc_sampling_metrics(sampled_smiles, target_smiles):
""" Calculate sampling metrics for the model
If sampled_smiles is a List[List[str]] then the following metrics for beam search are calculated (up to the
maximum given by the number of elements in the inner lists):
- "top_1_accuracy"
- "top_5_accuracy"
- "top_10_accuracy"
- "top_20_accuracy"
- "top_50_accuracy"
The SMILES strings must be sorted in decreasing order of their predicted likelihood
If the sampled_smiles is a List[str] then "accuracy" is calculated
The the number of invalid SMILES "invalid" is also returned (for beam search this is just from the top_1)
Args:
sampled_smiles: SMILES strings produced by decode function,
target_smiles: target molecules as canonicalised SMILES strings
Returns:
dict containing results
"""
num_sampled = len(sampled_smiles)
num_target = len(target_smiles)
err_msg = f"The number of sampled and target molecules must be the same, got {num_sampled} and {num_target}"
assert num_sampled == num_target, err_msg
mol_targets = [Chem.MolFromSmiles(smi) for smi in target_smiles]
canon_targets = [Chem.MolToSmiles(mol) for mol in mol_targets]
data_type = type(sampled_smiles[0])
if data_type == str:
results = DecodeSampler._calc_greedy_metrics(sampled_smiles, canon_targets)
elif data_type == list:
results = DecodeSampler._calc_beam_metrics(sampled_smiles, canon_targets)
else:
raise TypeError(f"Elements of sampled_smiles must be either a str or a list, got {data_type}")
return results
@staticmethod
def _calc_greedy_metrics(sampled_smiles, target_smiles):
sampled_mols = [Chem.MolFromSmiles(smi) for smi in sampled_smiles]
invalid = [mol is None for mol in sampled_mols]
canon_smiles = ["Unknown" if mol is None else Chem.MolToSmiles(mol) for mol in sampled_mols]
correct_smiles = [target_smiles[idx] == smi for idx, smi in enumerate(canon_smiles)]
num_correct = sum(correct_smiles)
total = len(correct_smiles)
num_invalid = sum(invalid)
perc_invalid = num_invalid / total
accuracy = num_correct / total
metrics = {
"accuracy": accuracy,
"invalid": perc_invalid
}
return metrics
@staticmethod
def _calc_beam_metrics(sampled_smiles, target_smiles):
top_1_samples = [mols[0] for mols in sampled_smiles]
top_1_results = DecodeSampler._calc_greedy_metrics(top_1_samples, target_smiles)
metrics = {
"top_1_accuracy": top_1_results["accuracy"],
"invalid": top_1_results["invalid"]
}
ks = [2, 3, 5, 10, 20, 50]
num_samples_list = [k for k in ks if k <= len(sampled_smiles[0])]
for num_samples in num_samples_list:
top_k_correct = []
num_mols = len(sampled_smiles)
for batch_idx, mols in enumerate(sampled_smiles):
samples = mols[:num_samples]
samples_mols = [Chem.MolFromSmiles(smi) for smi in samples]
samples_smiles = ["Unknown" if mol is None else Chem.MolToSmiles(mol) for mol in samples_mols]
correct_smiles = [smi == target_smiles[batch_idx] for smi in samples_smiles]
is_correct = sum(correct_smiles) >= 1
top_k_correct.append(is_correct)
accuracy = sum(top_k_correct) / num_mols
metrics[f"top_{str(num_samples)}_accuracy"] = accuracy
return metrics
| StarcoderdataPython |
9647636 | import yaml
import streamlit as st
from yaml.loader import SafeLoader
import streamlit.components.v1 as components
from hasher import Hasher
from authenticate import Authenticate
_RELEASE = True
if not _RELEASE:
# hashed_passwords = Hasher(['<PASSWORD>', '<PASSWORD>']).generate()
with open('../config.yaml') as file:
config = yaml.load(file, Loader=SafeLoader)
authenticator = Authenticate(
config['credentials'],
config['cookie']['name'],
config['cookie']['key'],
config['cookie']['expiry_days'],
config['preauthorized']
)
name, authentication_status, username = authenticator.login('Login', 'main')
if authentication_status:
authenticator.logout('Logout', 'main')
st.write(f'Welcome *{name}*')
st.title('Some content')
authenticator.reset_password('<PASSWORD>')
authenticator.register_user('Register user', preauthorization=True)
username, email, random_password = authenticator.forgot_password('<PASSWORD>')
with open('../config.yaml', 'w') as file:
yaml.dump(config, file, default_flow_style=False)
# Alternatively you use st.session_state['name'] and
# st.session_state['authentication_status'] to access the name and
# authentication_status.
# if st.session_state['authentication_status']:
# authenticator.logout('Logout', 'main')
# st.write(f'Welcome *{st.session_state["name"]}*')
# st.title('Some content')
# elif st.session_state['authentication_status'] == False:
# st.error('Username/password is incorrect')
# elif st.session_state['authentication_status'] == None:
# st.warning('Please enter your username and password')
| StarcoderdataPython |
9688486 | <filename>server/provider_base.py
# Copyright 2017-2021 Lawrence Livermore National Security, LLC and other
# CallFlow Project Developers. See the top-level LICENSE file for details.
#
# SPDX-License-Identifier: MIT
# ------------------------------------------------------------------------------
import os
import shutil
import multiprocessing
from functools import partial
from callflow import SuperGraph, EnsembleGraph
from callflow import get_logger
from callflow.operations import Filter, Group, Unify
from callflow.layout import NodeLinkLayout, SankeyLayout, HierarchyLayout
from callflow.utils.sanitizer import Sanitizer
from callflow.modules import (
Histogram,
Scatterplot,
BoxPlot,
ParameterProjection,
DiffView,
)
LOGGER = get_logger(__name__)
# ------------------------------------------------------------------------------
# BaseProvider Class
# ------------------------------------------------------------------------------
class BaseProvider:
def __init__(self, config: dict = None):
"""
Entry interface to access CallFlow's functionalities.
"""
assert config is not None
assert isinstance(config, dict)
self.config = config
start_date = self.config.get("start_date", "")
end_date = self.config.get("end_date", "")
chunk_idx = int(self.config.get("chunk_idx", 0))
chunk_size = int(self.config.get("chunk_size", -1))
# check if we need caliper
pfmts = list(set([r["profile_format"] for r in self.config["runs"]]))
if "caliper" in pfmts and shutil.which("cali-query") is None:
raise ValueError('Could not find "cali-query" executable in path')
# ----------------------------------------------------------------------
# Stage-1: Each dataset is processed individually into a SuperGraph.
LOGGER.info(f'Detected {len(self.config["runs"])} datasets from config file')
self.datasets = self.config["runs"]
if start_date and end_date:
LOGGER.warning(
f'-------------------- FILTERING {len(self.config["runs"])} SUPERGRAPHS from start_date={start_date} to end_date={end_date} --------------------'
)
self.datasets = BaseProvider._filter_datasets_by_date_range(
self.config, start_date, end_date
)
if chunk_size != 0:
LOGGER.warning(
f"-------------------- CHUNKING size={chunk_size} SUPERGRAPHS from index={chunk_idx} --------------------"
)
self.datasets = self.config["runs"][
chunk_idx * chunk_size : (chunk_idx + 1) * chunk_size
]
self.ndatasets = len(self.datasets)
assert self.ndatasets > 0
self.supergraphs = {}
# --------------------------------------------------------------------------
def _mp_saved_data(self, run_prop, save_path):
"""
Outputs the directories that have the processed result. Others will be omitted during the loading.
"""
_name = run_prop["name"]
_path = os.path.join(save_path, _name)
process = False
for f_type in ["df", "nxg"]:
f_path = os.path.join(_path, SuperGraph._FILENAMES[f_type])
if not os.path.isfile(f_path):
process = True
if not process:
return run_prop
def load(self):
"""Load the processed datasets by the format."""
load_path = self.config.get("save_path", "")
read_param = self.config.get("read_parameter", "")
is_not_ensemble = self.ndatasets == 1
with multiprocessing.Pool(processes=multiprocessing.cpu_count()) as pool:
supergraphs = pool.map(
partial(self.mp_dataset_load, save_path=load_path), self.datasets
)
self.supergraphs = {sg.name: sg for sg in supergraphs}
# ensemble case
if not is_not_ensemble:
name = "ensemble"
eg = EnsembleGraph(name)
eg.load(
os.path.join(load_path, name),
module_callsite_map=self.config.get("module_callsite_map", {}),
read_parameter=read_param,
)
eg.supergraphs = self.supergraphs
self.supergraphs[name] = eg
# self.aux = { dataset: Auxiliary(self.supergraphs[dataset]) for dataset in all_runs }
def mp_dataset_load(self, dataset, save_path):
"""
Parallel function to load single supergraph loading.
"""
name = dataset["name"]
read_param = self.config["read_parameter"]
sg = SuperGraph(name)
sg.load(
os.path.join(save_path, name),
module_callsite_map=self.config.get("module_callsite_map", {}),
read_parameter=read_param,
)
return sg
def split_process_load_datasets(self):
save_path = self.config["save_path"]
ret = []
unret = []
# TODO: Parallelize this.
for dataset in self.config["runs"]:
process = False
_name = dataset["name"]
_path = os.path.join(save_path, _name)
for f_type in ["df", "nxg", "maps"]:
f_path = os.path.join(_path, SuperGraph._FILENAMES[f_type])
if not os.path.isfile(f_path):
LOGGER.debug(f"{f_path} not found!!")
process = True
if process:
ret.append(dataset)
else:
unret.append(dataset)
return ret, unret
@staticmethod
def _filter_datasets_by_date_range(config, start_date, end_date):
_start = Sanitizer.fmt_timestr_to_datetime(Sanitizer.fmt_time(start_date))
_end = Sanitizer.fmt_timestr_to_datetime(Sanitizer.fmt_time(end_date))
LOGGER.info(
f"Filtering datasets by start_date [{_start}] and end_date [{_end}]"
)
ret = []
# Parallelize this.
for dataset in config["runs"]:
is_in_range = (
_start
<= Sanitizer.fmt_timestr_to_datetime(
Sanitizer.fmt_time(dataset["name"])
)
<= _end
)
if is_in_range:
ret.append(dataset)
return ret
def process_single(self, process_datasets, save_supergraphs):
if len(process_datasets) == 0:
return
append_path = self.config.get("append_path", "")
load_path = self.config["data_path"]
m2c = self.config.get("m2c", {})
m2m = self.config.get("m2m", {})
group_by = self.config["group_by"]
filter_by = self.config.get("filter_by", "")
filter_perc = self.config.get("filter_perc", 0)
save_path = self.config.get("save_path", "")
run_props = {
_["name"]: (
os.path.join(_["path"], append_path)
if len(append_path) > 0
else _["path"],
_["profile_format"],
)
for _ in self.config["runs"]
}
if save_supergraphs:
self.supergraphs = {}
for idx, dataset in enumerate(process_datasets):
name = dataset["name"]
_prop = run_props[name]
LOGGER.info(
f"Processing dataset [{idx+1}/{len(process_datasets)}] ({name}) (save={save_supergraphs})"
)
data_path = os.path.join(load_path, _prop[0])
if _prop[1] == "hpctoolkit" and not os.path.isfile(
os.path.join(data_path, "experiment.xml")
):
LOGGER.debug(
f"Skipping {data_path} as it is missing the experiment.xml file"
)
continue
sg = SuperGraph(name)
sg.create(path=data_path, profile_format=_prop[1], m2c=m2c, m2m=m2m)
LOGGER.info(f"Created supergraph ({name})")
Group(sg, group_by=group_by)
LOGGER.info(f"Grouped supergraph {name}")
Filter(sg, filter_by=filter_by, filter_perc=filter_perc)
LOGGER.info(f"Filtered supergraph {name}")
sg.write(os.path.join(save_path, name))
if save_supergraphs:
self.supergraphs[sg.name] = sg
LOGGER.debug(f"Stored in dictionary ({name})")
def load_single(self, load_datasets):
if len(load_datasets) == 0:
return
LOGGER.info(f"Loading {len(load_datasets)} supergraphs")
save_path = self.config.get("save_path", "")
with multiprocessing.Pool(processes=multiprocessing.cpu_count()) as pool:
processed_folders = pool.map(
partial(self._mp_saved_data, save_path=save_path),
self.config["runs"],
)
if len(load_datasets) > 0:
with multiprocessing.Pool(processes=multiprocessing.cpu_count()) as pool:
processed_folders = pool.map(
partial(self._mp_saved_data, save_path=save_path),
self.config["runs"],
)
self.config["runs"] = [
d for d in processed_folders if d is not None
] # Filter the none values
# self.mp_dataset_load(load_datasets[0] , save_path=save_path)
with multiprocessing.Pool(processes=multiprocessing.cpu_count()) as pool:
load_supergraphs = pool.map(
partial(self.mp_dataset_load, save_path=save_path), load_datasets
)
for sg in load_supergraphs:
self.supergraphs[sg.name] = sg
def process_ensemble(self, save_path):
if len(self.supergraphs) <= 1:
return
LOGGER.info("Processing Ensemble supergraph")
name = "ensemble"
sg = EnsembleGraph(name)
Unify(sg, self.supergraphs)
LOGGER.info(f"Created supergraph ({name})")
sg.write(os.path.join(save_path, name))
self.supergraphs[name] = sg
LOGGER.debug(f"Stored in dictionary ({name})")
# --------------------------------------------------------------------------
def process(self, reset=False):
"""Process the datasets using a Pipeline of operations.
1. Each dataset is processed individually into a SuperGraph. Each
SuperGraph is then processed according the provided config
variables, e.g., filter_perc, filter_by.
2. EnsembleGraph is then constructed from the processed SuperGraphs.
"""
save_path = self.config.get("save_path", "")
ensemble_process = self.config.get("ensemble_process", False)
# Do not process, if already processed.
if reset:
process_datasets, load_datasets = self.datasets, []
else:
process_datasets, load_datasets = self.split_process_load_datasets()
self.process_single(process_datasets, save_supergraphs=ensemble_process)
self.load_single(load_datasets)
self.process_ensemble(save_path)
def request_general(self, operation):
"""
Handles general requests
"""
_OPERATIONS = ["init", "summary", "timeline", "cct"]
assert "name" in operation
assert operation["name"] in _OPERATIONS
operation_name = operation["name"]
if operation_name == "init":
if len(self.datasets) > 1:
sg = self.supergraphs["ensemble"]
else:
sg = self.supergraphs[self.datasets[0]["name"]]
time_columns = sg.time_columns
return {
**self.config,
"time_columns": time_columns,
"profile_format_summary": list(
set(map(lambda d: d["profile_format"], self.datasets))
),
"module_callsite_map": sg.module2callsite,
"callsite_module_map": sg.callsite2module,
}
elif operation_name == "summary":
return {sg: self.supergraphs[sg].summary() for sg in self.supergraphs}
elif operation_name == "timeline":
operation["ncount"] = int(operation["ncount"])
assert operation["ntype"] in ["module", "callsite"]
assert isinstance(operation["ncount"], int)
assert operation["metric"] in ["time", "time (inc)"]
if len(self.supergraphs) == 1:
supergraph = self.supergraphs[self.datasets[0]["name"]]
else:
supergraph = self.supergraphs["ensemble"]
# Get the top-n nodes from the "ensemble" based on the ntype.
top_nodes_idx = supergraph.df_get_top_by_attr(
operation["ntype"], operation["ncount"], operation["metric"]
)
all_nodes_idx = supergraph.df_get_top_by_attr(
operation["ntype"], -1, operation["metric"]
)
# Convert the indexs to the modules.
top_nodes = [
supergraph.get_name(node_idx, operation["ntype"])
for node_idx in top_nodes_idx
]
all_nodes = [
supergraph.get_name(node_idx, operation["ntype"])
for node_idx in all_nodes_idx
]
# Construct the per-supergraph timeline data.
data = {}
data["d"] = {
sg: self.supergraphs[sg].timeline(
top_nodes, operation["ntype"], operation["metric"]
)
for sg in self.supergraphs
if sg != "ensemble"
}
# Attach the keys as the top_nodes
data["top_nodes"] = top_nodes
data["all_nodes"] = all_nodes
return data
elif operation_name == "cct":
sg = self.supergraphs[operation["dataset"]]
nll = NodeLinkLayout(sg=sg, selected_runs=operation["dataset"])
return nll.nxg
def request_single(self, operation):
"""
Handles requests connected to Single CallFlow.
"""
assert isinstance(operation, dict)
_OPERATIONS = [
"cct",
"supergraph",
"split_ranks",
"histogram",
"scatterplot",
"boxplots",
]
assert "name" in operation
assert operation["name"] in _OPERATIONS
LOGGER.info(f"[Single Mode] {operation}")
operation_name = operation["name"]
sg = self.supergraphs[operation["dataset"]]
if "ntype" in operation:
ntype = operation["ntype"]
if operation_name == "supergraph":
ssg = SankeyLayout(
grp_column="group_path",
sg=sg,
esg=None,
nbins=operation.get("nbins", 20),
reveal_callsites=operation.get("reveal_callsites", []),
split_entry_module=operation.get("split_entry_module", []),
split_callee_module=operation.get("split_callee_module", []),
)
return ssg.nxg
elif operation_name == "split_ranks":
selected_ranks = operation["ranks"]
selected_sg = SankeyLayout(
sg=sg,
path_column="group_path",
selected_runs=[operation["dataset"]],
ranks=selected_ranks,
)
non_selected_sg = SankeyLayout(
sg=sg,
path_column="group_path",
selected_runs=[operation["dataset"]],
ranks=selected_ranks,
)
return {"selected": selected_sg.nxg, "non_selected": non_selected_sg.nxg}
elif operation_name == "histogram":
node = operation.get("node", None)
nbins = int(operation.get("nbins", 20))
hist = Histogram(
sg=sg,
rel_sg=None,
name=node,
ntype=ntype,
histo_types=["rank"],
bins=nbins,
)
return hist.unpack()
elif operation_name == "scatterplot":
node = operation["node"]
orientation = operation["orientation"]
scatterplot = Scatterplot(
sg=sg,
rel_sg=None,
name=node,
ntype=ntype,
orientation=orientation,
)
return scatterplot.unpack()
elif operation_name == "boxplots":
callsites = operation["callsites"]
result = {}
for callsite in callsites:
bp = BoxPlot(sg=sg, name=callsite, ntype=ntype)
result[callsite] = bp.unpack()
return result
def request_ensemble(self, operation): # noqa: C901
"""
Handles all the socket requests connected to Single CallFlow.
"""
_OPERATIONS = [
"supergraph",
"module_hierarchy",
"projection",
"compare",
"histogram",
"boxplots",
"scatterplot",
"gradients",
]
assert "name" in operation
assert operation["name"] in _OPERATIONS
_OPERATIONS_WO_DATASET = ["projection", "module_hierarchy", "compare"]
if not (operation["name"] in _OPERATIONS_WO_DATASET):
assert "dataset" in operation
_OPERATION_W_COMPARE = ["compare"]
if operation["name"] in _OPERATION_W_COMPARE:
assert "targetRun" in operation
assert "compareRun" in operation
LOGGER.info(f"[Ensemble Mode] {operation}")
operation_name = operation["name"]
if "background" not in operation:
e_sg = self.supergraphs["ensemble"]
else:
e_sg = self.supergraphs[operation["background"]]
if "ntype" in operation:
ntype = operation["ntype"]
if "dataset" in operation:
sg = self.supergraphs[operation["dataset"]]
if operation_name == "supergraph":
ssg = SankeyLayout(
grp_column="group_path",
sg=sg,
esg=e_sg,
nbins=int(operation.get("nbins", 20)),
reveal_callsites=operation.get("reveal_callsites", []),
split_entry_module=operation.get("split_entry_module", []),
split_callee_module=operation.get("split_callee_module", []),
)
return ssg.nxg
elif operation_name == "module_hierarchy":
nbins = int(operation.get("nbins", 20))
dataset = operation.get("dataset")
hl = HierarchyLayout(
esg=e_sg,
dataset=dataset,
node=operation.get("node"),
nbins=nbins,
)
return hl.nxg
elif operation_name == "projection":
selected_runs = operation.get("selected_runs", [])
n_cluster = operation.get("n_cluster", 3)
pp = ParameterProjection(
sg=e_sg,
selected_runs=selected_runs,
n_cluster=n_cluster,
)
return pp.result.to_json(orient="columns")
elif operation_name == "compare":
compare_dataset = operation.get("compareRun", None)
target_dataset = operation.get("targetRun", None)
selected_metric = operation.get("selectedMtric", "time")
dv = DiffView(e_sg, compare_dataset, target_dataset, selected_metric)
return dv.result
elif operation_name == "histogram":
node = operation["node"]
nbins = int(operation.get("nbins", 20))
hist = Histogram(
sg=sg,
rel_sg=e_sg,
name=node,
ntype=ntype,
histo_types=["rank"],
bins=nbins,
)
return hist.unpack()
elif operation_name == "scatterplot":
node = operation["node"]
orientation = operation["orientation"]
scatterplot = Scatterplot(
sg=sg,
rel_sg=e_sg,
name=node,
ntype=ntype,
orientation=orientation,
)
return scatterplot.unpack()
elif operation_name == "boxplots":
callsites = operation.get("callsites", [])
iqr = float(operation.get("iqr", 1.5))
result = {}
for callsite in callsites:
bp = BoxPlot(
sg=sg,
rel_sg=e_sg,
name=callsite,
ntype=ntype,
iqr_scale=iqr,
)
result[callsite] = bp.unpack()
return result
elif operation_name == "gradients":
name = operation.get("node", None)
ntype = operation.get("ntype", None)
nbins = int(operation.get("nbins", 20))
# Gradients are computed only for the ensemble mode.
esg = self.supergraphs["ensemble"]
node = {"id": esg.get_idx(name, ntype), "type": ntype}
return esg.get_gradients(node, nbins)
| StarcoderdataPython |
5042638 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Derived Parameters
------------------
The engineering archive has pseudo-MSIDs that are derived via computation from
telemetry MSIDs. All derived parameter names begin with the characters "DP_"
(not case sensitive as usual). Otherwise there is no difference from standard
MSIDs.
"""
from .base import * # noqa
from .thermal import * # noqa
from .test import * # noqa
from .acispow import * # noqa
from .pcad import * # noqa
from .orbit import * # noqa
from .eps import * # noqa
| StarcoderdataPython |
174480 |
class SearchUI:
'''SearchUI provides a means of easily assembling a search interface
for either Elasticsearch or LunrJS powered search services'''
def __init__(self, cfg):
self.cfg = {}
for key in cfg.keys():
self.cfg[key] = cfg.get(key, None)
if not ('id' in self.cfg):
self.cfg['id'] = 'id'
if not ('href' in self.cfg):
self.cfg['href'] = 'href'
if not ('display_fields' in self.cfg):
self.cfg['display_fields'] = []
if not ('aggregated_fields' in self.cfg):
self.cfg['aggregated_fields'] = []
if not ('sort_fields' in self.cfg):
self.cfg = []
| StarcoderdataPython |
4930227 | from __future__ import absolute_import
from flask import (
Blueprint, abort, flash, g, jsonify, redirect, render_template, request,
session, url_for
)
bp = Blueprint('main', __name__)
@bp.route('/')
def index():
return redirect(url_for('todos.show_index_view'))
| StarcoderdataPython |
6431184 | <gh_stars>0
# Generated by Django 2.0.1 on 2018-02-15 22:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('classy', '0009_classification_logs_user_id'),
]
operations = [
migrations.AlterField(
model_name='classification',
name='created_by',
field=models.CharField(max_length=50, null=True),
),
]
| StarcoderdataPython |
1685103 | <reponame>ShapedRogue/python-youtube-downloader<gh_stars>1-10
import pytube # this is where i imported the pytube module which is necessary for this script
print(
"Enter the YouTube video URL") # This is where we tell the user to input the url in which then pytube grabs the download
a = input()
b = pytube.YouTube(a) # This is where pytube comes in and pulls info from the video
videos = b.get_videos()
s = 1
for v in videos:
print(f"{str(s)}. {str(v)}") # Here is where pytube shows the user what it can download and what file format
s += 1
print(
"Type the number of which quality/format you would like: ") # this is where the user inputs what kind of quality and file format he wants to download
c = int(input())
d = videos[c - 1]
print("Enter the video file path: ") # This is where the user inputs where they want to save the video
path = input()
d.download(path) # the download process is initiated
print("The video",
b.filename + " has been downloaded") # If the download has successfully finished it will display this
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.