id stringlengths 1 7 | text stringlengths 6 1.03M | dataset_id stringclasses 1
value |
|---|---|---|
3320857 | # The MIT License (MIT)
#
# Copyright (c) 2019 <NAME> and <NAME>
# for Adafruit Industries LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
`adafruit_st7735`
====================================================
Displayio driver for ST7735 based displays.
* Author(s): <NAME>
Implementation Notes
--------------------
**Hardware:**
**Software and Dependencies:**
* Adafruit CircuitPython firmware for the supported boards:
https://github.com/adafruit/circuitpython/releases
"""
import displayio
__version__ = "0.0.0-auto.0"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_ST7735.git"
_INIT_SEQUENCE = (
b"\x01\x80\x32" # _SWRESET and Delay 50ms
b"\x11\x80\xFF" # _SLPOUT
b"\x3A\x81\x05\x0A" # _COLMOD
b"\xB1\x83\x00\x06\x03\x0A" # _FRMCTR1
b"\x36\x01\x08" # _MADCTL
b"\xB6\x02\x15\x02" # _DISSET5
# 1 clk cycle nonoverlap, 2 cycle gate, rise, 3 cycle osc equalize, Fix on VTL
b"\xB4\x01\x00" # _INVCTR line inversion
b"\xC0\x82\x02\x70\x0A" # _PWCTR1 GVDD = 4.7V, 1.0uA, 10 ms delay
b"\xC1\x01\x05" # _PWCTR2 VGH = 14.7V, VGL = -7.35V
b"\xC2\x02\x01\x02" # _PWCTR3 Opamp current small, Boost frequency
b"\xC5\x82\x3C\x38\x0A" # _VMCTR1
b"\xFC\x02\x11\x15" # _PWCTR6
b"\xE0\x10\x09\x16\x09\x20\x21\x1B\x13\x19\x17\x15\x1E\x2B\x04\x05\x02\x0E" # _GMCTRP1 Gamma
b"\xE1\x90\x0B\x14\x08\x1E\x22\x1D\x18\x1E\x1B\x1A\x24\x2B\x06\x06\x02\x0F\x0A" # _GMCTRN1
b"\x13\x80\x0a" # _NORON
b"\x29\x80\xFF" # _DISPON
)
# pylint: disable=too-few-public-methods
class ST7735(displayio.Display):
"""ST7735 driver"""
def __init__(self, bus, **kwargs):
super().__init__(bus, _INIT_SEQUENCE, **kwargs)
| StarcoderdataPython |
3200185 | """
<NAME>
orientation.py
Implement the keypoint gradient direction estimation technique based on the
Lecture 9 notes
/\
/**\
/****\ /\
/ \ /**\
/ /\ / \ /\ /\ /\ /\ /\/\/\ /\
/ / \ / \ / \/\/ \/ \ /\/ \/\ /\ /\/ / / \/ \
/ / \/ /\ \ / \ \ / \/ / / \/ \/ \ / \ \
/ / \/ \/\ \ / \ / / \
__/__/_______/___/__\___\__________________________________________________
I had considerable trouble finding the peaks...here they are...let's go ski
"""
import sys
import cv2 as cv
import numpy as np
def getGradient(image, sigma):
"""
Gradient magnitude calculation from lecture
Takes in image and sigma
Calculates gradient magnitudes and directions(angles) based on sigma
Returns magnitude and directions
"""
kernelSize = (int(4*sigma+1), int(4*sigma+1))
imgGauss = cv.GaussianBlur(image, kernelSize, sigma)
kx,ky = cv.getDerivKernels(1,1,3)
kx = np.transpose(kx/2)
ky = ky/2
imgDx = cv.filter2D(imgGauss,-1,kx)
imgDy = cv.filter2D(imgGauss,-1,ky)
imgGradient = np.sqrt(imgDx**2 + imgDy**2)
imgDir = np.arctan2(imgDy, imgDx)
imgDir = 180*imgDir/np.pi
return imgGradient, imgDir
def pixelNeighborhood(point, image, sigma):
"""
Takes in a point, an image, and sigma
Calculates the width from sigma and creates a pixel "neighborhood"
with the point as the center
Returns the neighborhood
"""
width = int(8*sigma)//2
x,y = point
neighborhood = image[x-width:x+width+1, y-width:y+width+1]
return neighborhood
def getWeights(mag, sigma):
"""
Takes in gradient magnitudes and sigma
Generates Gaussian kernel based on sigma
Creates 2D Gaussian kernel from outer product of the 1D kernel
Multiplies the magnitudes by the kernel to get the weights of the pixels
"""
gaussian = cv.getGaussianKernel(int(8*sigma+1), 2*sigma)
window = np.outer(gaussian,gaussian)
weights = mag * window
return weights
def assignBins(angles, weights):
"""
Takes in sorted angles(directions) and sorted weights
Goes through each bin center (i.e. -175, -165, etc.):
Gets the sum of the weights that fall in that bin
Adds the sum to a list that represented a histogram
i.e. bins has 36 slots with each slot a sum corresponding to each bin center
Return the histogram
"""
binCenters = np.arange(-175, 185, 10)
hist = []
for center in binCenters:
sum = getSum(angles, weights, center - 10, center + 10, center)
if (center == 175):
sum += getSum(angles, weights, -180, -175, -185)
elif (center == -175):
sum += getSum(angles, weights, 175, 180, 185)
hist.append(sum)
return hist
def getSum(angles, weights, start, end, center):
"""
Take in sorted list of angles, sorted list of weights, a start index,
an end index, and a center index
Gets the distance each angle is to the center
Calculates the percent of the weight that falls into that bin
Gets the indices of angles that fall between start and end
Get the weights at those indices (i.e. weights at those angles)
Multiply weights times the percentages and sum the result
Return the sum
"""
dist = np.abs(angles[(angles>=start) & (angles<=end)] - center)
percent = 1 - dist / 10
rangeInd = np.where((angles>=start) & (angles<=end))
sum = 0
if len(rangeInd[0]) != 0:
rangeStart = np.min(rangeInd)
rangeEnd = np.max(rangeInd) + 1
weightRange = weights[rangeStart:rangeEnd]
sum = (weightRange * percent).sum()
return sum
def smoothHistogram(hist):
"""
Take in histogram(bins with sum in each)
Iterates through and "smooths" the weight based on the weight of its
neighbor bins
Returns the smoothed histogram
"""
smoothedHist = []
for i in range(len(hist)):
if i == 0:
neighborWeights = hist[1] + hist[-1]
elif i == len(hist) - 1:
neighborWeights = hist[-2] + hist[0]
else:
neighborWeights = hist[i-1] + hist[i+1]
smoothWeight = (hist[i] + (neighborWeights/2))/2
smoothedHist.append(smoothWeight)
return smoothedHist
def findPeaks(hist):
"""
Take in histogram
Go through each bin in the histogram and:
Find local maximum and:
Fit a parabola around the two neighbor bins and local max bin
Calculate the critical point that produces the max of the parabola
(critical point represents orientation, max is the peak)
Add both to list of peaks
Return sorted list of peaks
"""
peaks = []
offsets = []
binRanges = np.arange(-175, 185, 10)
max = np.max(hist)
for i in range(len(hist)):
if i == 0:
left, right = -1, 1
elif i == len(hist) - 1:
left, right = -2, 0
else:
left, right = i-1, i+1
if (hist[i] - hist[left]) >= (0.01*max) \
and (hist[i] - hist[right]) >= (0.01*max):
a = (hist[right] - 2*hist[i] + hist[left]) / 2
b = (hist[right] - hist[left]) / 2
c = hist[i]
aDx = a*2
bDx = -1*b
#critical point
x = bDx/aDx
# max
max = a*(x**2) + b*x + c
offset = (x*10) + binRanges[i]
peaks.append((max, offset))
return sorted(peaks, reverse=True)
def output(point, num, histogram, smoothHistogram, peaks):
"""
Take in the point being evaluated, the number of that point, the histogram
calculated, the smoothed histogram, and the list of peaks
Output histogram and smoothed histogram info
Output peak info and strong orientation peak info
"""
print("\n Point {}: ({},{})\nHistograms:".format(num, point[0], point[1]))
binRanges = np.arange(-180, 190, 10)
for i in range(36):
br1, br2 = binRanges[i], binRanges[i+1]
h, sh = histogram[i], smoothHistogram[i]
print("[{},{}]: {:.2f} {:.2f}".format(br1, br2, h, sh))
maxPeak = 0
for i in range(len(peaks)):
peak, offset = peaks[i]
print("Peak {}: theta {:.1f}, value {:.2f}".format(i, offset, peak))
if peak > maxPeak:
maxPeak = peak
print("Number of strong orientation peaks: {}".format(strongPeaks(maxPeak, peaks)))
def strongPeaks(max, peaks):
"""
Take in max peak and other peaks
Count how many have a strong orientation (i.e. are 80% or above of the max)
Return the count
"""
strongPeaks = 0
for i in range(len(peaks)):
peak = peaks[i][0]
if peak >= 0.8*max:
strongPeaks += 1
return strongPeaks
if __name__ == "__main__":
"""
Handle command line arguments
"""
if len(sys.argv) != 4:
print("Correct usage: p2_compare.py sigma img points")
sys.exit()
else:
sig = sys.argv[1]
inImgName = sys.argv[2]
pointsFileName = sys.argv[3]
try:
sig = float(sig)
except ValueError:
print("Sigma must be real number!")
sys.exit()
try:
inImg = cv.imread(inImgName,0).astype(np.float64)
except AttributeError:
print("{} is not a valid image!".format(inImgName))
sys.exit()
try:
points = np.loadtxt(pointsFileName, dtype=np.uint16)
except ValueError:
print("Malformed points file: {}, must be numbers".format(pointsFile))
sys.exit()
"""
Iterate through all the points in the file:
Get gradient magnitudes and directions of whole image
Crop those magnitudes and directions down to neighborhood around point
Sort the directions and the weights (making sure each element still
corresponds to other element in the other list)
Assign bins (make histogram)
Smooth the histogram
Get the peaks
Output all the info
"""
for i in range(len(points)):
point = points[i]
gradientMag, gradientDir = getGradient(inImg, sig)
gradientMag = pixelNeighborhood(point, gradientMag, sig)
gradientDir = pixelNeighborhood(point, gradientDir, sig)
dirSort = np.sort(gradientDir, axis=None)
dirInd = np.argsort(gradientDir, axis=None)
weights = getWeights(gradientMag, sig).flatten()
weights = weights[dirInd]
bins = assignBins(dirSort, weights)
smoothedBins = smoothHistogram(bins)
peaks = findPeaks(smoothedBins)
output(points[i], i, bins, smoothedBins, peaks)
| StarcoderdataPython |
1672622 | #!/usr/bin/env python2.7
# coding=utf-8
"""
Sopel - An IRC Bot
Copyright 2008, <NAME>, inamidst.com
Copyright © 2012-2014, <NAME> <<EMAIL>>
Licensed under the Eiffel Forum License 2.
https://sopel.chat
"""
from __future__ import unicode_literals, absolute_import, print_function, division
import argparse
import os
import platform
import signal
import sys
import time
import traceback
from sopel import bot, config, logger, tools, __version__
from . import utils
if sys.version_info < (2, 7):
tools.stderr('Error: Requires Python 2.7 or later. Try python2.7 sopel')
sys.exit(1)
if sys.version_info.major == 2:
tools.stderr('Warning: Python 2.x is near end of life. Sopel support at that point is TBD.')
if sys.version_info.major == 3 and sys.version_info.minor < 3:
tools.stderr('Error: When running on Python 3, Python 3.3 is required.')
sys.exit(1)
ERR_CODE = 1
"""Error code: program exited with an error"""
ERR_CODE_NO_RESTART = 2
"""Error code: program exited with an error and should not be restarted
This error code is used to prevent systemd from restarting the bot when it
encounters such an error case.
"""
def run(settings, pid_file, daemon=False):
delay = 20
# Inject ca_certs from config to web for SSL validation of web requests
if not settings.core.ca_certs:
tools.stderr(
'Could not open CA certificates file. SSL will not work properly!')
def signal_handler(sig, frame):
if sig == signal.SIGUSR1 or sig == signal.SIGTERM or sig == signal.SIGINT:
tools.stderr('Got quit signal, shutting down.')
p.quit('Closing')
elif sig == signal.SIGUSR2 or sig == signal.SIGILL:
tools.stderr('Got restart signal.')
p.restart('Restarting')
# Define empty variable `p` for bot
p = None
while True:
if p and p.hasquit: # Check if `hasquit` was set for bot during disconnected phase
break
try:
p = bot.Sopel(settings, daemon=daemon)
if hasattr(signal, 'SIGUSR1'):
signal.signal(signal.SIGUSR1, signal_handler)
if hasattr(signal, 'SIGTERM'):
signal.signal(signal.SIGTERM, signal_handler)
if hasattr(signal, 'SIGINT'):
signal.signal(signal.SIGINT, signal_handler)
if hasattr(signal, 'SIGUSR2'):
signal.signal(signal.SIGUSR2, signal_handler)
if hasattr(signal, 'SIGILL'):
signal.signal(signal.SIGILL, signal_handler)
logger.setup_logging(p)
p.run(settings.core.host, int(settings.core.port))
except KeyboardInterrupt:
break
except Exception: # TODO: Be specific
trace = traceback.format_exc()
try:
tools.stderr(trace)
except Exception: # TODO: Be specific
pass
logfile = open(os.path.join(settings.core.logdir, settings.basename + '.exceptions.log'), 'a')
logfile.write('Critical exception in core')
logfile.write(trace)
logfile.write('----------------------------------------\n\n')
logfile.close()
# TODO: This should be handled by command_start
# All we should need here is a return value, but replacing the
# os._exit() call below (at the end) broke ^C.
# This one is much harder to test, so until that one's sorted it
# isn't worth the risk of trying to remove this one.
os.unlink(pid_file)
os._exit(1)
if not isinstance(delay, int):
break
if p.wantsrestart:
return -1
if p.hasquit:
break
tools.stderr(
'Warning: Disconnected. Reconnecting in %s seconds...' % delay)
time.sleep(delay)
# TODO: This should be handled by command_start
# All we should need here is a return value, but making this
# a return makes Sopel hang on ^C after it says "Closed!"
os.unlink(pid_file)
os._exit(0)
def add_legacy_options(parser):
parser.add_argument("-d", '--fork', action="store_true",
dest="daemonize", help="Daemonize Sopel")
parser.add_argument("-q", '--quit', action="store_true", dest="quit",
help=(
"Gracefully quit Sopel "
"(deprecated, and will be removed in Sopel 8; "
"use `sopel stop` instead)"))
parser.add_argument("-k", '--kill', action="store_true", dest="kill",
help=(
"Kill Sopel "
"(deprecated, and will be removed in Sopel 8; "
"use `sopel stop --kill` instead)"))
parser.add_argument("-r", '--restart', action="store_true", dest="restart",
help=(
"Restart Sopel "
"(deprecated, and will be removed in Sopel 8; "
"use `sopel restart` instead)"))
parser.add_argument("-l", '--list', action="store_true",
dest="list_configs",
help="List all config files found")
parser.add_argument('--quiet', action="store_true", dest="quiet",
help="Suppress all output")
parser.add_argument('-w', '--configure-all', action='store_true',
dest='wizard',
help=(
"Run the configuration wizard "
"(deprecated, and will be removed in Sopel 8; "
"use `sopel configure` instead)"))
parser.add_argument('--configure-modules', action='store_true',
dest='mod_wizard',
help=(
"Run the configuration wizard, but only for the "
"module configuration options "
"(deprecated, and will be removed in Sopel 8; "
"use `sopel configure --modules` instead)"))
parser.add_argument('-v', action="store_true",
dest='version_legacy',
help=(
"Show version number and exit "
"(deprecated, and will be removed in Sopel 8; "
"use -V/--version instead)"))
parser.add_argument('-V', '--version', action='store_true',
dest='version',
help='Show version number and exit')
def build_parser():
"""Build an ``argparse.ArgumentParser`` for the bot"""
parser = argparse.ArgumentParser(description='Sopel IRC Bot',
usage='%(prog)s [options]')
add_legacy_options(parser)
utils.add_common_arguments(parser)
subparsers = parser.add_subparsers(
title='sub-commands',
description='List of Sopel\'s sub-commands',
dest='action',
metavar='{start,configure,stop,restart}')
# manage `legacy` sub-command
parser_legacy = subparsers.add_parser('legacy')
add_legacy_options(parser_legacy)
utils.add_common_arguments(parser_legacy)
# manage `start` sub-command
parser_start = subparsers.add_parser(
'start',
description='Start a Sopel instance',
help='Start a Sopel instance')
parser_start.add_argument(
'-d', '--fork',
dest='daemonize',
action='store_true',
default=False,
help='Run Sopel as a daemon (fork)')
parser_start.add_argument(
'--quiet',
action="store_true",
dest="quiet",
help="Suppress all output")
utils.add_common_arguments(parser_start)
# manage `configure` sub-command
parser_configure = subparsers.add_parser(
'configure', help='Sopel\'s Wizard tool')
parser_configure.add_argument(
'--modules',
action='store_true',
default=False,
dest='modules')
utils.add_common_arguments(parser_configure)
# manage `stop` sub-command
parser_stop = subparsers.add_parser(
'stop',
description='Stop a running Sopel instance',
help='Stop a running Sopel instance')
parser_stop.add_argument(
'-k', '--kill',
action='store_true',
default=False,
help='Kill Sopel without a graceful quit')
parser_stop.add_argument(
'--quiet',
action="store_true",
dest="quiet",
help="Suppress all output")
utils.add_common_arguments(parser_stop)
# manage `restart` sub-command
parser_restart = subparsers.add_parser(
'restart',
description='Restart a running Sopel instance',
help='Restart a running Sopel instance')
parser_restart.add_argument(
'--quiet',
action="store_true",
dest="quiet",
help="Suppress all output")
utils.add_common_arguments(parser_restart)
return parser
def check_not_root():
"""Check if root is running the bot.
It raises a ``RuntimeError`` if the user has root privileges on Linux or
if it is the ``Administrator`` account on Windows.
"""
opersystem = platform.system()
if opersystem in ["Linux", "Darwin"]:
# Linux/Mac
if os.getuid() == 0 or os.geteuid() == 0:
raise RuntimeError('Error: Do not run Sopel with root privileges.')
elif opersystem in ["Windows"]:
# Windows
if os.environ.get("USERNAME") == "Administrator":
raise RuntimeError('Error: Do not run Sopel as Administrator.')
else:
tools.stderr(
"Warning: %s is an uncommon operating system platform. "
"Sopel should still work, but please contact Sopel's developers "
"if you experience issues."
% opersystem)
def print_version():
"""Print Python version and Sopel version on stdout."""
py_ver = '%s.%s.%s' % (sys.version_info.major,
sys.version_info.minor,
sys.version_info.micro)
print('Sopel %s (running on Python %s)' % (__version__, py_ver))
print('https://sopel.chat/')
def print_config():
"""Print list of available configurations from default homedir."""
configs = utils.enumerate_configs(config.DEFAULT_HOMEDIR)
print('Config files in %s:' % config.DEFAULT_HOMEDIR)
configfile = None
for configfile in configs:
print('\t%s' % configfile)
if not configfile:
print('\tNone found')
print('-------------------------')
def get_configuration(options):
"""Get or create a configuration object from ``options``.
:param options: argument parser's options
:type options: ``argparse.Namespace``
:return: a configuration object
:rtype: :class:`sopel.config.Config`
This may raise a :exc:`sopel.config.ConfigurationError` if the
configuration file is invalid.
.. seealso::
The configuration file is loaded by
:func:`~sopel.cli.run.utils.load_settings` or created using the
configuration wizard.
"""
try:
settings = utils.load_settings(options)
except config.ConfigurationNotFound as error:
print(
"Welcome to Sopel!\n"
"I can't seem to find the configuration file, "
"so let's generate it!\n")
settings = utils.wizard(error.filename)
settings._is_daemonized = options.daemonize
return settings
def get_pid_filename(options, pid_dir):
"""Get the pid file name in ``pid_dir`` from the given ``options``.
:param options: command line options
:param str pid_dir: path to the pid directory
:return: absolute filename of the pid file
By default, it's ``sopel.pid``, but if a configuration filename is given
in the ``options``, its basename is used to generate the filename, as:
``sopel-{basename}.pid`` instead.
"""
name = 'sopel.pid'
if options.config:
basename = os.path.basename(options.config)
if basename.endswith('.cfg'):
basename = basename[:-4]
name = 'sopel-%s.pid' % basename
return os.path.abspath(os.path.join(pid_dir, name))
def get_running_pid(filename):
"""Retrieve the PID number from the given ``filename``.
:param str filename: path to file to read the PID from
:return: the PID number of a Sopel instance if running, ``None`` otherwise
:rtype: integer
This function tries to retrieve a PID number from the given ``filename``,
as an integer, and returns ``None`` if the file is not found or if the
content is not an integer.
"""
if not os.path.isfile(filename):
return
with open(filename, 'r') as pid_file:
try:
return int(pid_file.read())
except ValueError:
pass
def command_start(opts):
"""Start a Sopel instance"""
# Step One: Get the configuration file and prepare to run
try:
config_module = get_configuration(opts)
except config.ConfigurationError as e:
tools.stderr(e)
return ERR_CODE_NO_RESTART
if config_module.core.not_configured:
tools.stderr('Bot is not configured, can\'t start')
return ERR_CODE_NO_RESTART
# Step Two: Manage logfile, stdout and stderr
utils.redirect_outputs(config_module, opts.quiet)
# Step Three: Handle process-lifecycle options and manage the PID file
pid_dir = config_module.core.pid_dir
pid_file_path = get_pid_filename(opts, pid_dir)
pid = get_running_pid(pid_file_path)
if pid is not None and tools.check_pid(pid):
tools.stderr('There\'s already a Sopel instance running '
'with this config file.')
tools.stderr('Try using either the `sopel stop` '
'or the `sopel restart` command.')
return ERR_CODE
if opts.daemonize:
child_pid = os.fork()
if child_pid != 0:
return
with open(pid_file_path, 'w') as pid_file:
pid_file.write(str(os.getpid()))
# Step Four: Run Sopel
ret = run(config_module, pid_file_path)
# Step Five: Shutdown Clean-Up
os.unlink(pid_file_path)
if ret == -1:
# Restart
os.execv(sys.executable, ['python'] + sys.argv)
else:
# Quit
return ret
def command_configure(opts):
"""Sopel Configuration Wizard"""
configpath = utils.find_config(
config.DEFAULT_HOMEDIR, opts.config or 'default')
if getattr(opts, 'modules', False):
utils.plugins_wizard(configpath)
else:
utils.wizard(configpath)
def command_stop(opts):
"""Stop a running Sopel instance"""
# Get Configuration
try:
settings = utils.load_settings(opts)
except config.ConfigurationNotFound as error:
tools.stderr('Configuration "%s" not found' % error.filename)
return ERR_CODE
if settings.core.not_configured:
tools.stderr('Sopel is not configured, can\'t stop')
return ERR_CODE
# Redirect Outputs
utils.redirect_outputs(settings, opts.quiet)
# Get Sopel's PID
filename = get_pid_filename(opts, settings.core.pid_dir)
pid = get_running_pid(filename)
if pid is None or not tools.check_pid(pid):
tools.stderr('Sopel is not running!')
return ERR_CODE
# Stop Sopel
if opts.kill:
tools.stderr('Killing the Sopel')
os.kill(pid, signal.SIGKILL)
return
tools.stderr('Signaling Sopel to stop gracefully')
if hasattr(signal, 'SIGUSR1'):
os.kill(pid, signal.SIGUSR1)
else:
# Windows will not generate SIGTERM itself
# https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/signal
os.kill(pid, signal.SIGTERM)
def command_restart(opts):
"""Restart a running Sopel instance"""
# Get Configuration
try:
settings = utils.load_settings(opts)
except config.ConfigurationNotFound as error:
tools.stderr('Configuration "%s" not found' % error.filename)
return ERR_CODE
if settings.core.not_configured:
tools.stderr('Sopel is not configured, can\'t stop')
return ERR_CODE
# Redirect Outputs
utils.redirect_outputs(settings, opts.quiet)
# Get Sopel's PID
filename = get_pid_filename(opts, settings.core.pid_dir)
pid = get_running_pid(filename)
if pid is None or not tools.check_pid(pid):
tools.stderr('Sopel is not running!')
return ERR_CODE
tools.stderr('Asking Sopel to restart')
if hasattr(signal, 'SIGUSR2'):
os.kill(pid, signal.SIGUSR2)
else:
# Windows will not generate SIGILL itself
# https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/signal
os.kill(pid, signal.SIGILL)
def command_legacy(opts):
"""Legacy Sopel run script
The ``legacy`` command manages the old-style ``sopel`` command line tool.
Most of its features are replaced by the following commands:
* ``sopel start`` replaces the default behavior (run the bot)
* ``sopel stop`` replaces the ``--quit/--kill`` options
* ``sopel restart`` replaces the ``--restart`` option
* ``sopel configure`` replaces the
``-w/--configure-all/--configure-modules`` options
The ``-v`` option for "version" is deprecated, ``-V/--version`` should be
used instead.
.. seealso::
The github issue `#1471`__ tracks various changes requested for future
versions of Sopel, some of them related to this legacy command.
.. __: https://github.com/sopel-irc/sopel/issues/1471
"""
# Step One: Handle "No config needed" options
if opts.version:
print_version()
return
elif opts.version_legacy:
tools.stderr(
'WARNING: option -v is deprecated; '
'use `sopel -V/--version` instead')
print_version()
return
# TODO: allow to use a different homedir
configpath = utils.find_config(
config.DEFAULT_HOMEDIR, opts.config or 'default')
if opts.wizard:
tools.stderr(
'WARNING: option -w/--configure-all is deprecated; '
'use `sopel configure` instead')
utils.wizard(configpath)
return
if opts.mod_wizard:
tools.stderr(
'WARNING: option --configure-modules is deprecated; '
'use `sopel configure --modules` instead')
utils.plugins_wizard(configpath)
return
if opts.list_configs:
print_config()
return
# Step Two: Get the configuration file and prepare to run
try:
config_module = get_configuration(opts)
except config.ConfigurationError as e:
tools.stderr(e)
return ERR_CODE_NO_RESTART
if config_module.core.not_configured:
tools.stderr('Bot is not configured, can\'t start')
return ERR_CODE_NO_RESTART
# Step Three: Manage logfile, stdout and stderr
utils.redirect_outputs(config_module, opts.quiet)
# Step Four: Handle process-lifecycle options and manage the PID file
pid_dir = config_module.core.pid_dir
pid_file_path = get_pid_filename(opts, pid_dir)
old_pid = get_running_pid(pid_file_path)
if old_pid is not None and tools.check_pid(old_pid):
if not opts.quit and not opts.kill and not opts.restart:
tools.stderr(
'There\'s already a Sopel instance running with this config file')
tools.stderr(
'Try using either the `sopel stop` command or the `sopel restart` command')
return ERR_CODE
elif opts.kill:
tools.stderr(
'WARNING: option -k/--kill is deprecated; '
'use `sopel stop --kill` instead')
tools.stderr('Killing the Sopel')
os.kill(old_pid, signal.SIGKILL)
return
elif opts.quit:
tools.stderr(
'WARNING: options -q/--quit is deprecated; '
'use `sopel stop` instead')
tools.stderr('Signaling Sopel to stop gracefully')
if hasattr(signal, 'SIGUSR1'):
os.kill(old_pid, signal.SIGUSR1)
else:
# Windows will not generate SIGTERM itself
# https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/signal
os.kill(old_pid, signal.SIGTERM)
return
elif opts.restart:
tools.stderr(
'WARNING: options --restart is deprecated; '
'use `sopel restart` instead')
tools.stderr('Asking Sopel to restart')
if hasattr(signal, 'SIGUSR2'):
os.kill(old_pid, signal.SIGUSR2)
else:
# Windows will not generate SIGILL itself
# https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/signal
os.kill(old_pid, signal.SIGILL)
return
elif opts.kill or opts.quit or opts.restart:
tools.stderr('Sopel is not running!')
return ERR_CODE
if opts.daemonize:
child_pid = os.fork()
if child_pid != 0:
return
with open(pid_file_path, 'w') as pid_file:
pid_file.write(str(os.getpid()))
# Step Five: Initialize and run Sopel
ret = run(config_module, pid_file_path)
os.unlink(pid_file_path)
if ret == -1:
os.execv(sys.executable, ['python'] + sys.argv)
else:
return ret
def main(argv=None):
"""Sopel run script entry point"""
try:
# Step One: Parse The Command Line
parser = build_parser()
# make sure to have an action first (`legacy` by default)
# TODO: `start` should be the default in Sopel 8
argv = argv or sys.argv[1:]
if not argv:
argv = ['legacy']
elif argv[0].startswith('-') and argv[0] not in ['-h', '--help']:
argv = ['legacy'] + argv
opts = parser.parse_args(argv)
# Step Two: "Do not run as root" checks
try:
check_not_root()
except RuntimeError as err:
tools.stderr('%s' % err)
return ERR_CODE
# Step Three: Handle command
action = getattr(opts, 'action', 'legacy')
command = {
'legacy': command_legacy,
'start': command_start,
'configure': command_configure,
'stop': command_stop,
'restart': command_restart,
}.get(action)
return command(opts)
except KeyboardInterrupt:
print("\n\nInterrupted")
return ERR_CODE
if __name__ == '__main__':
sys.exit(main())
| StarcoderdataPython |
895 | <reponame>zsimic/sandbox
import click
import poyo
import ruamel.yaml
import runez
import strictyaml
import yaml as pyyaml
from zyaml import load_path, load_string, tokens_from_path, tokens_from_string
from zyaml.marshal import decode, default_marshal, represented_scalar
from . import TestSettings
class ImplementationCollection(object):
def __init__(self, names, default="zyaml,ruamel"):
av = [ZyamlImplementation, RuamelImplementation, PyyamlBaseImplementation, PoyoImplementation, StrictImplementation]
self.available = dict((m.name, m()) for m in av)
self.unknown = []
self.selected = []
if names.startswith("+"):
names = "%s,%s" % (names[1:], default)
names = [s.strip() for s in names.split(",")]
names = [s for s in names if s]
seen = {}
for name in names:
found = 0
for i in self.available.values():
if name == "all" or name in i.name:
if i.name not in seen:
seen[i.name] = True
self.selected.append(i)
found += 1
if found == 0:
self.unknown.append(name)
self.combinations = None
def track_result_combination(self, impl, data):
if isinstance(data, Exception):
value = runez.stringified(data)
else:
value = runez.represented_json(data, stringify=decode, keep_none=True, none_key="-null-")
name = impl.name
if self.combinations is None:
self.combinations = {}
for i1 in self.selected:
for i2 in self.selected:
if i1.name < i2.name:
self.combinations[(i1.name, i2.name)] = set()
for names, values in self.combinations.items():
if name in names:
values.add(value)
def __repr__(self):
return ",".join(str(i) for i in self.selected)
def __len__(self):
return len(self.selected)
def __iter__(self):
for i in self.selected:
yield i
class Implementation(object):
"""Implementation of loading a yml file"""
name = None # type: str
def __repr__(self):
return self.name
@classmethod
def option(cls, default="zyaml,ruamel", count=None, **kwargs):
"""
Args:
default (str | None): Default implementation(s) to use
count (int | None): Optional: exact number of implementations that have to specified
**kwargs: Passed-through to click
"""
kwargs["default"] = default
def _callback(_ctx, _param, value):
if not value:
return None
impls = ImplementationCollection(value, default=default)
if impls.unknown:
raise click.BadParameter("Unknown implementation(s): %s" % ", ".join(impls.unknown))
if count and len(impls) != count:
if count == 1:
raise click.BadParameter("Need exactly 1 implementation")
raise click.BadParameter("Need exactly %s" % runez.plural(count, "implementation"))
if count == 1:
return impls.selected[0]
return impls
metavar = "I1,..."
hlp = "Implementation(s)"
if count:
hlp = runez.plural(count, "implementation")
metavar = ",".join("I%s" % (i + 1) for i in range(count))
kwargs.setdefault("help", "%s to use" % hlp)
kwargs.setdefault("show_default", True)
kwargs.setdefault("metavar", metavar)
name = "implementation" if count == 1 else "implementations"
return click.option(name, "-i", callback=_callback, **kwargs)
def show_result(self, data, tokens=False):
rtype = "tokens" if tokens else data.__class__.__name__ if data is not None else "None"
rep = data
if not tokens or isinstance(data, Exception):
rep = TestSettings.represented(data)
message = "---- %s: %s" % (runez.bold(self.name), runez.dim(rtype))
if isinstance(data, NotImplementedError):
print("%s - %s" % (message, rep))
return
print(message)
print(rep)
def get_outcome(self, content, tokens=False):
if tokens:
data = self.tokens(content)
if isinstance(data, list):
data = "\n".join(self.represented_token(t) for t in data)
return data
return self.deserialized(content)
def deserialized(self, source):
value = TestSettings.protected_call(self._deserialized, source)
return self._simplified(value)
def tokens(self, source):
return TestSettings.protected_call(self._tokenize, source)
def represented_token(self, token):
return str(token)
def _deserialized(self, source):
if hasattr(source, "path"):
return self._deserialized_from_path(source.path)
return self._deserialized_from_string(source)
def _deserialized_from_path(self, path):
with open(path) as fh:
return self._deserialized_from_string(fh.read())
def _deserialized_from_string(self, source):
raise NotImplementedError()
def _tokenize(self, source):
if hasattr(source, "path"):
return self._tokens_from_path(source.path)
return self._tokens_from_string(source)
def _tokens_from_path(self, path):
with open(path) as fh:
return TestSettings.unwrapped(self._tokens_from_string(fh.read()))
def _tokens_from_string(self, source):
raise NotImplementedError()
def _simplified(self, value):
if isinstance(value, list) and len(value) == 1:
return value[0]
return value
class ZyamlImplementation(Implementation):
name = "zyaml"
def _deserialized_from_path(self, path):
return load_path(path)
def _deserialized_from_string(self, source):
return load_string(source)
def _tokens_from_path(self, path):
return tokens_from_path(path)
def _tokens_from_string(self, source):
return tokens_from_string(source)
def _simplified(self, value):
return value
def ruamel_passthrough_tags(loader, tag, node):
name = node.__class__.__name__
if "Seq" in name:
result = []
for v in node.value:
result.append(ruamel_passthrough_tags(loader, tag, v))
return result
if "Map" in name:
result = {}
for k, v in node.value:
k = ruamel_passthrough_tags(loader, tag, k)
v = ruamel_passthrough_tags(loader, tag, v)
result[k] = v
return result
return default_marshal(node.value)
class RuamelImplementation(Implementation):
name = "ruamel"
def _deserialized_from_string(self, source):
y = ruamel.yaml.YAML(typ="safe")
ruamel.yaml.add_multi_constructor("", ruamel_passthrough_tags, Loader=ruamel.yaml.SafeLoader)
return y.load_all(source)
def _tokens_from_string(self, source):
return ruamel.yaml.main.scan(source)
class PyyamlBaseImplementation(Implementation):
name = "pyyaml"
def _deserialized_from_string(self, source):
return pyyaml.load_all(source, Loader=pyyaml.BaseLoader)
def _tokens_from_string(self, source):
yaml_loader = pyyaml.BaseLoader(source)
curr = yaml_loader.get_token()
while curr is not None:
yield curr
curr = yaml_loader.get_token()
def represented_token(self, token):
linenum = token.start_mark.line + 1
column = token.start_mark.column + 1
result = "%s[%s,%s]" % (token.__class__.__name__, linenum, column)
value = getattr(token, "value", None)
if value is not None:
if token.id == "<scalar>":
value = represented_scalar(token.style, value)
elif token.id == "<anchor>":
value = "&%s" % value
elif token.id == "<alias>":
value = "*%s" % value
elif token.id == "<tag>":
assert isinstance(value, tuple)
value = " ".join(str(s) for s in runez.flattened(value))
elif token.id == "<directive>":
result += " %s" % token.name
value = " ".join(str(s) for s in runez.flattened(value))
else:
assert False
result = "%s %s" % (result, value)
return result
class PoyoImplementation(Implementation):
name = "poyo"
def _deserialized_from_string(self, source):
return [poyo.parse_string(source)]
class StrictImplementation(Implementation):
name = "strict"
def _deserialized_from_string(self, source):
obj = strictyaml.dirty_load(source, allow_flow_style=True)
return obj.data
| StarcoderdataPython |
3373200 | import tensorflow_datasets as tfds
dataset = 'cityscapes'
ds_info = tfds.builder(dataset).info
dataset_name='cityscapes_corrupted/semantic_segmentation_gaussian_noise_2'
builder = tfds.builder(dataset_name)
builder.download_and_prepare()
#%%
| StarcoderdataPython |
4800268 | <filename>Veiculos.py
from datetime import datetime
class organizacao (object):
def __init__(self):
self.veiculo = []
self.prazos = []
def getVeiculos(self):#quantidade de veiculos cadastrados
return len(self.veiculo)
def getIndisponiveis(self):#quantidade de veiculos alugados ou rezervados
return len(self.prazos)
def adicionar(self,marca,modelo,ano,aluguel):
veiculo = "Veiculo: " + str(len(self.veiculo) + 1) + " Modelo: " + modelo + " Marca: " + marca + " Ano: " + str(ano) + " Aluguel: " + str(aluguel) + " 1"
self.veiculo.append(veiculo)
input()
def consultar(self):
i = 0
while i < len(self.veiculo):
aux = self.veiculo[i].find("Marca")
print(self.veiculo[i][:aux-1])
if self.veiculo[i][-1::] == '1':
print("Veiculo disponível")
else:
print("Veiculo indisponível")
i += 1
detalhes = input("Para mais detalhes digite 1")
if detalhes == '1':
i = 0
while i < len(self.veiculo):
print(self.veiculo[i])
if self.veiculo[i][-1::] == '1':
print("Veiculo disponível")
else:
print("Veiculo indisponível")
i += 1
def alugar(self):
nomeLocatario = input("Nome do Locatario: ")
prazo = int(input("Por quanto tempo (dias) deseja alugar o veiculo: "))
now = datetime.now()
dia = now.day + prazo
mes = now.month
ano = now.year
if dia > 30:
dia -= 30
mes = now.month + 1
if mes > 12:
mes -= 12
ano = now.year + 1
if len(self.veiculo) == 0:
print("Não temos veiculos!!! sorry")
elif prazo > 30:
print("Aluguel e reservas somente poderão ser realizadas para no máximo 30 dias")
else:
escolha = int(input("Escolha o veiculo a ser alugado: "))
escolha -= 1
aux = int(self.veiculo[escolha][-1::])
if aux != 1:
print("Veiculo indisponível")
else:
self.veiculo[escolha] = self.veiculo[escolha].replace(self.veiculo[escolha][-1::], '0')
dataVencimento = escolha, str(dia) + "/" + str(mes) + "/" + str(ano), nomeLocatario, prazo
self.prazos.append(dataVencimento)
def reservar(self):
nomeLocatario = input("Nome do Locatario: ")
prazo = int(input("Por quanto tempo (dias) deseja alugar o veiculo: "))
if prazo > 30:
print("Aluguel e reservas somente poderão ser realizadas para no máximo 30 dias")
else:
escolha = int(input("Qual carro deseja rerservar: "))
escolha -= 1
dataReserva = input("Data para a reserva(d/m/aaaa): ")
reserva = dataReserva.split('/')
dia = int(reserva[0])
mes = int(reserva[1])
ano = int(reserva[2])
i = 0
while i < len(self.prazos):
if self.prazos[i][0] == escolha:
reservaVeiculo = self.prazos[i][1].split('/')#data que o veicul tah reservado
diaV = int(reservaVeiculo[0])
mesV = int(reservaVeiculo[1])
anoV = int(reservaVeiculo[2])
if ano == anoV:
if mes == mesV:
if dia >= diaV:
dataVencimento = escolha, str(dia) + "/" + str(mes) + "/" + str(ano)
self.prazos.append(dataVencimento)
self.veiculo[escolha] = self.veiculo[escolha].replace(self.veiculo[escolha][-1::], '0')
else:
print("Veiculo indisponível")
else:
dataVencimento = escolha, str(dia) + "/" + str(mes) + "/" + str(ano)
self.prazos.append(dataVencimento)
self.veiculo[escolha] = self.veiculo[escolha].replace(self.veiculo[escolha][-1::], '0')
else:
dataVencimento = escolha, str(dia) + "/" + str(mes) + "/" + str(ano)
self.prazos.append(dataVencimento)
self.veiculo[escolha] = self.veiculo[escolha].replace(self.veiculo[escolha][-1::], '0')
i +=1
if i == len(self.prazos):
dataVencimento = escolha, str(dia) + "/" + str(mes) + "/" + str(ano), nomeLocatario, prazo
self.prazos.append(dataVencimento)
self.veiculo[escolha] = self.veiculo[escolha].replace(self.veiculo[escolha][-1::], '0')
def devolver(self, dia, mes, ano):
veiculo = int(input("Veiculo a ser devolvido: "))
veiculo -=1
i = 0
while i < len(self.prazos):
print("Veiculo alugado: %d"%(self.prazos[i][0] + 1))
input()
if self.prazos[i][0] == veiculo:
prazoVeiculo = self.prazos[i][1].split('/')#data que o veicul tah reservado
diaV = int(prazoVeiculo[0])
mesV = int(prazoVeiculo[1])
anoV = int(prazoVeiculo[2])
aux1 = self.veiculo[i].find("Aluguel")
aux1 += 10
valor = float(self.veiculo[i][aux1: -2])
if mes == mesV:
if dia == diaV:
print("Nome do Locatario: %s"%(self.prazos[i][2]))
print("Pagar: %d"%(valor*self.prazos[i][3]))
self.prazos.remove(self.prazos[i])
self.veiculo[veiculo] = self.veiculo[veiculo].replace(self.veiculo[veiculo][-1::], '1')
else:
print("Nome do Locatario: %s"%(self.prazos[i][2]))
print("Veiculo está atrasado, Pagar: %d"%(valor*self.prazos[i][3]+(valor*(dia - diaV))))
self.prazos.remove(self.prazos[i])
self.veiculo[veiculo] = self.veiculo[veiculo].replace(self.veiculo[veiculo][-1::], '1')
else:
mesA = mes - mesV
diasA = 30 - diaV + 30*(mesA-1) + dia
print("Nome do Locatario: %s"%(self.prazos[i][2]))
print("Veiculo está atrasado, Pagar: %d"%(valor*self.prazos[i][3]+(valor*(diasA))))
self.prazos.remove(self.prazos[i])
self.veiculo[veiculo] = self.veiculo[veiculo].replace(self.veiculo[veiculo][-1::], '1')
i += 1
def liberar(self):
veiculo = int(input("Veiculo a ser devolvido: "))
veiculo -= 1
i = 0
while i < len(self.prazos):
print("Veiculo alugado: %d"%(self.prazos[i][0] + 1))
input()
if self.prazos[i][0] == veiculo:
self.prazos.remove(self.prazos[veiculo])
self.veiculo[veiculo] = self.veiculo[veiculo].replace(self.veiculo[veiculo][-1::], '1')
i += 1
def excluir(self):
remover = int(input("Veiculo a ser removido: "))
remover -= 1
if self.veiculo[remover][-1::] == '1':
self.veiculo.remove(self.veiculo[remover])
else:
print("O veiculo têm rezervas")
| StarcoderdataPython |
184103 | # upload a pile of images from the given list. the given list has one path per
# line, where each path points to a specific image to upload. it outputs the
# uploaded image info into a comma separated list, where each line is
# image_pk,status,path.
from django.db import transaction
from django.core.management.base import BaseCommand, CommandError
import pathlib
from browser.models import User
from image_mgr.models import Image
from image_mgr.process_image import (process_image, MAX_IMAGE_SIZE,
ProcessingFailure, UnacceptableImage)
class Command(BaseCommand):
help = "Regenerate thumbnails for all images in the database."
def add_arguments(self, parser):
parser.add_argument("file_list", type=str)
parser.add_argument("output_list", type=str)
def handle(self, *args, **options):
file_list = pathlib.Path(options["file_list"]).resolve(strict=True)
output_list = pathlib.Path(options["output_list"])
if output_list.exists():
raise Exception("output list already exists")
file_list = open(file_list, "r")
output_list = open(output_list, "w")
# figure out who to upload as. we use the superuser with the lowest ID
# because they're like the ultra-super user.
uploader = User.objects.filter(
is_superuser=True).order_by('pk')[:1].get()
print("Uploading as '{}'".format(uploader))
for file_name in file_list:
try:
file_name = pathlib.Path(file_name.replace("\n", ""))
image_file = open(file_name, "rb")
image_data = image_file.read(MAX_IMAGE_SIZE+1)
image_file.close()
except Exception as e:
msg = ",read_error:{},{}\n".format(
str(e).replace(","," "), file_name)
print(msg, end="")
output_list.write(msg)
continue
if len(image_data) > MAX_IMAGE_SIZE:
msg = ",too_big,{}\n".format(file_name)
print(msg, end="")
output_list.write(msg)
continue
try:
new_image = process_image(uploader=uploader,
name=file_name.name, orig_data=image_data)
except ProcessingFailure as e:
msg = ",corrupt:{},{}\n".format(
str(e).replace(","," "), file_name)
print(msg, end="")
output_list.write(msg)
except UnacceptableImage as e:
msg = ",unacceptable:{},{}\n".format(
str(e).replace(","," "), file_name)
print(msg, end="")
output_list.write(msg)
else:
msg = "{},ok,{}\n".format(
new_image.pk, file_name)
print(msg, end="")
output_list.write(msg)
file_list.close()
output_list.close()
print("Complete")
| StarcoderdataPython |
99147 | <filename>plugins/lighthouse/ui/module_selector.py
import os
import logging
from lighthouse.util import lmsg
from lighthouse.util.qt import *
from lighthouse.util.misc import human_timestamp
from lighthouse.util.python import *
logger = logging.getLogger("Lighthouse.UI.ModuleSelector")
#------------------------------------------------------------------------------
# Coverage Xref Dialog
#------------------------------------------------------------------------------
class ModuleSelector(QtWidgets.QDialog):
"""
A Qt Dialog to list all the coverage modules in a coverage file.
This class makes up a rudimentary selector dialog. It does not follow Qt
'best practices' because it does not need to be super flashy, nor does
it demand much facetime.
"""
def __init__(self, target_name, module_names, coverage_file):
super(ModuleSelector, self).__init__()
self._target_name = target_name
self._module_names = module_names
self._coverage_file = os.path.basename(coverage_file)
# dialog attributes
self.selected_name = None
# configure the widget for use
self._ui_init()
@property
def remember_alias(self):
return self._checkbox_remember.isChecked()
@property
def ignore_missing(self):
return self._checkbox_ignore_missing.isChecked()
#--------------------------------------------------------------------------
# Initialization - UI
#--------------------------------------------------------------------------
def _ui_init(self):
"""
Initialize UI elements.
"""
self.setWindowTitle("Select module matching this database")
self.setWindowFlags(self.windowFlags() & ~QtCore.Qt.WindowContextHelpButtonHint)
self.setModal(True)
self._font = self.font()
self._font.setPointSizeF(normalize_to_dpi(10))
self._font_metrics = QtGui.QFontMetricsF(self._font)
# initialize module selector table
self._ui_init_header()
self._ui_init_table()
self._populate_table()
# layout the populated UI just before showing it
self._ui_layout()
def _ui_init_header(self):
"""
Initialize the module selector header UI elements.
"""
description_text = \
"Lighthouse could not automatically identify the target module in the given coverage file:<br />" \
"<br />" \
"-- <b>Target:</b> %s<br />" \
"-- <b>Coverage File:</b> %s<br />" \
"<br />" \
"Please double click the name of the module that matches this database, or close this dialog<br />" \
"if you do not see your binary listed in the table below..." % (self._target_name, self._coverage_file)
self._label_description = QtWidgets.QLabel(description_text)
self._label_description.setTextFormat(QtCore.Qt.RichText)
self._label_description.setFont(self._font)
#self._label_description.setWordWrap(True)
# a checkbox to save the user selected alias to the database
self._checkbox_remember = QtWidgets.QCheckBox("Remember target module alias for this session")
self._checkbox_remember.setFont(self._font)
# a checkbox to ignore future 'missing coverage' / select module warnings
self._checkbox_ignore_missing = QtWidgets.QCheckBox("Suppress this dialog for the remaining coverage files")
self._checkbox_ignore_missing.setFont(self._font)
def _ui_init_table(self):
"""
Initialize the module selector table UI elements.
"""
self._table = QtWidgets.QTableWidget()
self._table.verticalHeader().setVisible(False)
self._table.setHorizontalScrollMode(QtWidgets.QAbstractItemView.ScrollPerPixel)
self._table.horizontalHeader().setFont(self._font)
self._table.setFont(self._font)
# Create a simple table / list
self._table.setColumnCount(1)
self._table.setHorizontalHeaderLabels(["Module Name"])
# left align text in column headers
self._table.horizontalHeaderItem(0).setTextAlignment(QtCore.Qt.AlignLeft)
# disable bolding of column headers when selected
self._table.horizontalHeader().setHighlightSections(False)
# stretch the last column of the table (aesthetics)
self._table.horizontalHeader().setStretchLastSection(True)
# make table read only, select a full row by default
self._table.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self._table.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
# catch double click events on table rows
self._table.cellDoubleClicked.connect(self._ui_cell_double_click)
def _populate_table(self):
"""
Populate the module table with the module names provided to this dialog.
"""
self._table.setSortingEnabled(False)
self._table.setRowCount(len(self._module_names))
for i, module_name in enumerate(self._module_names, 0):
self._table.setItem(i, 0, QtWidgets.QTableWidgetItem(module_name))
self._table.resizeRowsToContents()
self._table.setSortingEnabled(True)
def _ui_layout(self):
"""
Layout the major UI elements of the widget.
"""
layout = QtWidgets.QVBoxLayout()
#layout.setContentsMargins(0,0,0,0)
# layout child widgets
layout.addWidget(self._label_description)
layout.addWidget(self._table)
layout.addWidget(self._checkbox_remember)
layout.addWidget(self._checkbox_ignore_missing)
# scale widget dimensions based on DPI
height = get_dpi_scale() * 250
width = get_dpi_scale() * 400
self.setMinimumHeight(height)
self.setMinimumWidth(width)
# apply the widget layout
self.setLayout(layout)
#--------------------------------------------------------------------------
# Signal Handlers
#--------------------------------------------------------------------------
def _ui_cell_double_click(self, row, column):
"""
A cell/row has been double clicked in the module table.
"""
self.selected_name = self._table.item(row, 0).text()
self.accept() | StarcoderdataPython |
3353364 | <filename>hw1_code/scripts/evaluator.py<gh_stars>0
#!/bin/python2.5
import sys
import os
from sklearn.metrics import average_precision_score
if __name__=="__main__":
# load the ground-truth file list
y_true_dir = sys.argv[1]
y_pred_dir = sys.argv[2]
event = y_pred_dir.split('/')[1].split('_')[0]
with open(y_true_dir, 'r') as f:
y_true = [line.split()[1] for line in f.readlines()]
y_true = list(map(lambda x: float(x==event), y_true))
#print y_true[:10]
with open(y_pred_dir, 'r') as f:
y_pred = f.read().split()
y_pred = list(map(lambda x: float(x), y_pred))
print "Average precision: ",average_precision_score(y_true,y_pred)
| StarcoderdataPython |
1721873 | from app.core.crud import CrudRouter
from .models import DataType
from .serializers import DataTypeSerializer
from .views import DataTypeView
data_types_router = CrudRouter(
model=DataType,
serializer=DataTypeSerializer,
view=DataTypeView,
prefix="/api/v1/constructor/data-types",
tags=["data-types"],
).get_router()
| StarcoderdataPython |
1631567 | from .algorithm1 import Algorithm
from .cp_ortools import CPModel1
solvers = \
dict(default=Algorithm,
ortools=CPModel1)
# factory of solvers
def get_solver(name='default'):
return solvers.get(name)
| StarcoderdataPython |
1695426 | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The `JointDistributionCoroutine` class."""
import collections
import warnings
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.distributions import joint_distribution as joint_distribution_lib
from tensorflow_probability.python.internal import structural_tuple
from tensorflow.python.util import nest # pylint: disable=g-direct-tensorflow-import
__all__ = [
'JointDistributionCoroutine',
]
JAX_MODE = False
# Cause all warnings to always be triggered.
# Not having this means subsequent calls wont trigger the warning.
warnings.filterwarnings(
'always',
module='tensorflow_probability.*joint_distribution_coroutine',
append=True) # Don't override user-set filters.
class JointDistributionCoroutine(joint_distribution_lib.JointDistribution):
"""Joint distribution parameterized by a distribution-making generator.
This distribution enables both sampling and joint probability computation from
a single model specification.
A joint distribution is a collection of possibly interdependent distributions.
The `JointDistributionCoroutine` is specified by a generator that
generates the elements of this collection.
#### Mathematical Details
The `JointDistributionCoroutine` implements the chain rule of probability.
That is, the probability function of a length-`d` vector `x` is,
```none
p(x) = prod{ p(x[i] | x[:i]) : i = 0, ..., (d - 1) }
```
The `JointDistributionCoroutine` is parameterized by a generator
that yields `tfp.distributions.Distribution`-like instances.
Each element yielded implements the `i`-th *full conditional distribution*,
`p(x[i] | x[:i])`. Within the generator, the return value from the yield
is a sample from the distribution that may be used to construct subsequent
yielded `Distribution`-like instances. This allows later instances
to be conditional on earlier ones.
**Name resolution**: The names of `JointDistributionCoroutine` components
may be specified by passing `name` arguments to distribution constructors (
`tfd.Normal(0., 1., name='x')). Components without an explicit name will be
assigned a dummy name.
#### Vectorized sampling and model evaluation
When a joint distribution's `sample` method is called with
a `sample_shape` (or the `log_prob` method is called on an input with
multiple sample dimensions) the model must be equipped to handle
additional batch dimensions. This may be done manually, or automatically
by passing `use_vectorized_map=True`. Manual vectorization has historically
been the default, but we now recommend that most users enable automatic
vectorization unless they are affected by a specific issue; some
known issues are listed below.
When using manually-vectorized joint distributions, each operation in the
model must account for the possibility of batch dimensions in Distributions
and their samples. By contrast, auto-vectorized models need only describe
a *single* sample from the joint distribution; any batch evaluation is
automated as required using `tf.vectorized_map` (`vmap` in JAX). In many
cases this allows for significant simplications. For example, the following
manually-vectorized `tfd.JointDistributionCoroutine` model:
```python
def model_fn():
x = yield tfd.JointDistributionCoroutine.Root(
tfd.Normal(0., tf.ones([3])))
y = yield tfd.JointDistributionCoroutine.Root(
tfd.Normal(0., 1.))
z = yield tfd.Normal(x[..., :2] + y[..., tf.newaxis], 1.)
can be written in auto-vectorized form as
```python
def model_fn():
x = yield tfd.Normal(0., tf.ones([3]))
y = yield tfd.Normal(0., 1.)
z = yield tfd.Normal(x[:2] + y, 1.)
```
in which we were able to drop the specification of `Root` nodes and to
avoid explicitly accounting for batch dimensions when indexing and slicing
computed quantities in the third line.
**Root annotations**: When the `sample` method for a manually-vectorized
`JointDistributionCoroutine` is called with a `sample_shape`, the `sample`
method for each of the yielded distributions is called.
The distributions that have been wrapped in the
`JointDistributionCoroutine.Root` class will be called with `sample_shape`
as the `sample_shape` argument, and the unwrapped distributions
will be called with `()` as the `sample_shape` argument. It is the user's
responsibility to ensure that each of the distributions generates samples
with the specified sample size; generally this means applying `Root` wrappers
around any distributions whose parameters are not already a function of other
random variables. The `Root` annotation can be omitted if you never intend to
use a `sample_shape` other than `()`.
**Known limitations of automatic vectorization:**
- A small fraction of TensorFlow ops are unsupported; models that use an
unsupported op will raise an error and must be manually vectorized.
- Sampling large batches may be slow under automatic vectorization because
TensorFlow's stateless samplers are currently converted using a
non-vectorized `while_loop`. This limitation applies only in TensorFlow;
vectorized samplers in JAX should be approximately as fast as manually
vectorized code.
- Calling `sample_distributions` with nontrivial `sample_shape` will raise
an error if the model contains any distributions that are not registered as
CompositeTensors (TFP's basic distributions are usually fine, but support
for wrapper distributions like `tfd.Sample` is a work in progress).
#### Batch semantics and (log-)densities
**tl;dr:** pass `batch_ndims=0` unless you have a good reason not to.
Joint distributions now support 'auto-batching' semantics, in which
the distribution's batch shape is derived by broadcasting the leftmost
`batch_ndims` dimensions of its components' batch shapes. All remaining
dimensions are considered to form a single 'event' of the joint distribution.
If `batch_ndims==0`, then the joint distribution has batch shape `[]`, and all
component dimensions are treated as event shape. For example, the model
```python
def model_fn():
x = yield tfd.Normal(0., tf.ones([3]))
y = yield tfd.Normal(x[..., tf.newaxis], tf.ones([3, 2]))
jd = tfd.JointDistributionCoroutine(model_fn, batch_ndims=0)
```
creates a joint distribution with batch shape `[]` and event shape
`([3], [3, 2])`. The log-density of a sample always has shape
`batch_shape`, so this guarantees that
`jd.log_prob(jd.sample())` will evaluate to a scalar value. We could
alternately construct a joint distribution with batch shape `[3]` and event
shape `([], [2])` by setting `batch_ndims=1`, in which case
`jd.log_prob(jd.sample())` would evaluate to a value of shape `[3]`.
Setting `batch_ndims=None` recovers the 'classic' batch semantics (currently
still the default for backwards-compatibility reasons), in which the joint
distribution's `log_prob` is computed by naively summing log densities from
the component distributions. Since these component densities have shapes equal
to the batch shapes of the individual components, to avoid broadcasting
errors it is usually necessary to construct the components with identical
batch shapes. For example, the component distributions in the model above
have batch shapes of `[3]` and `[3, 2]` respectively, which would raise an
error if summed directly, but can be aligned by wrapping with
`tfd.Independent`, as in this model:
```python
def model_fn():
x = yield tfd.Normal(0., tf.ones([3]))
y = yield tfd.Independent(tfd.Normal(x[..., tf.newaxis], tf.ones([3, 2])),
reinterpreted_batch_ndims=1)
jd = tfd.JointDistributionCoroutine(model_fn, batch_ndims=None)
```
Here the components both have batch shape `[3]`, so
`jd.log_prob(jd.sample())` returns a value of shape `[3]`, just as in the
`batch_ndims=1` case above. In fact, auto-batching semantics are equivalent to
implicitly wrapping each component `dist` as `tfd.Independent(dist,
reinterpreted_batch_ndim=(dist.batch_shape.ndims - jd.batch_ndims))`; the only
vestigial difference is that under auto-batching semantics, the joint
distribution has a single batch shape `[3]`, while under the classic semantics
the value of `jd.batch_shape` is a *structure* of the component batch shapes
`([3], [3])`. Such structured batch shapes will be deprecated in the future,
since they are inconsistent with the definition of batch shapes used
elsewhere in TFP.
#### Examples
```python
tfd = tfp.distributions
def model():
global_log_rate = yield tfd.Normal(loc=0., scale=1.)
local_log_rates = yield tfd.Normal(loc=0., scale=tf.ones([20]))
observed_counts = yield tfd.Poisson(
rate=tf.exp(global_log_rate + local_log_rates))
joint = tfd.JointDistributionCoroutine(model,
use_vectorized_map=True,
batch_ndims=0)
print(joint.event_shape)
# ==> [[], [20], [20]]
print(joint.batch_shape)
# ==> []
xs = joint.sample()
print([x.shape for x in xs])
# ==> [[], [20], [20]]
lp = joint.log_prob(xs)
print(lp.shape)
# ==> []
```
Note that the component distributions of this model would, by themselves,
return batches of log-densities (because they are constructed with batch
shape); the joint model implicitly sums over these to compute the single
joint log-density.
```python
ds, xs = joint.sample_distributions()
print([d.event_shape for d in ds])
# ==> [[], [], []] != model.event_shape
print([d.batch_shape for d in ds])
# ==> [[], [20], [20]] != model.batch_shape
print([d.log_prob(x).shape for (d, x) in zip(ds, xs)])
# ==> [[], [20], [20]]
```
For improved readability of sampled values, the yielded distributions can also
be named:
```python
tfd = tfp.distributions
def model():
global_log_rate = yield tfd.Normal(
loc=0., scale=1., name='global_log_rate')
local_log_rates = yield tfd.Normal(
loc=0., scale=tf.ones([20]), name='local_log_rates')
observed_counts = yield tfd.Poisson(
rate=tf.exp(global_log_rate + local_log_rates), name='observed_counts')
joint = tfd.JointDistributionCoroutine(model,
use_vectorized_map=True,
batch_ndims=0)
print(joint.event_shape)
# ==> StructTuple(global_log_rate=[], local_log_rates=[20],
# observed_counts=[20])
print(joint.batch_shape)
# ==> []
xs = joint.sample()
print(['{}: {}'.format(k, x.shape) for k, x in xs._asdict().items()])
# ==> global_log_scale: []
# local_log_rates: [20]
# observed_counts: [20]
lp = joint.log_prob(xs)
print(lp.shape)
# ==> []
# Passing via `kwargs` also works.
lp = joint.log_prob(**xs._asdict())
# Or:
lp = joint.log_prob(
global_log_scale=...,
local_log_rates=...,
observed_counts=...,
)
```
If any of the yielded distributions are not explicitly named, they will
automatically be given a name of the form `var#` where `#` is the index of the
associated distribution. E.g. the first yielded distribution will have a
default name of `var0`.
#### References
[1] <NAME>, <NAME>, and <NAME>. Joint distributions for
TensorFlow Probability. _arXiv preprint arXiv:2001.11819__,
2020. https://arxiv.org/abs/2001.11819
"""
def __init__(self,
model,
sample_dtype=None,
batch_ndims=None,
use_vectorized_map=False,
validate_args=False,
experimental_use_kahan_sum=False,
name=None):
"""Construct the `JointDistributionCoroutine` distribution.
Args:
model: A generator that yields a sequence of `tfd.Distribution`-like
instances.
sample_dtype: Samples from this distribution will be structured like
`tf.nest.pack_sequence_as(sample_dtype, list_)`. `sample_dtype` is only
used for `tf.nest.pack_sequence_as` structuring of outputs, never
casting (which is the responsibility of the component distributions).
Default value: `None` (i.e. `namedtuple`).
batch_ndims: `int` `Tensor` number of batch dimensions. The `batch_shape`s
of all component distributions must be such that the prefixes of
length `batch_ndims` broadcast to a consistent joint batch shape.
Default value: `None`.
use_vectorized_map: Python `bool`. Whether to use `tf.vectorized_map`
to automatically vectorize evaluation of the model. This allows the
model specification to focus on drawing a single sample, which is often
simpler, but some ops may not be supported.
Default value: `False`.
validate_args: Python `bool`. Whether to validate input with asserts.
If `validate_args` is `False`, and the inputs are invalid,
correct behavior is not guaranteed.
Default value: `False`.
experimental_use_kahan_sum: Python `bool`. When `True`, we use Kahan
summation to aggregate independent underlying log_prob values, which
improves against the precision of a naive float32 sum. This can be
noticeable in particular for large dimensions in float32. See CPU caveat
on `tfp.math.reduce_kahan_sum`. This argument has no effect if
`batch_ndims is None`.
Default value: `False`.
name: The name for ops managed by the distribution.
Default value: `None` (i.e., `JointDistributionCoroutine`).
"""
parameters = dict(locals())
with tf.name_scope(name or 'JointDistributionCoroutine') as name:
self._model_coroutine = model
# Hint `no_dependency` to tell tf.Module not to screw up the sample dtype
# with extraneous wrapping (list => ListWrapper, etc.).
self._sample_dtype = self._no_dependency(sample_dtype)
super(JointDistributionCoroutine, self).__init__(
dtype=sample_dtype,
batch_ndims=batch_ndims,
use_vectorized_map=use_vectorized_map,
validate_args=validate_args,
parameters=parameters,
experimental_use_kahan_sum=experimental_use_kahan_sum,
name=name)
@property
def model(self):
return self._model_coroutine
def _model_unflatten(self, xs):
if self._sample_dtype is None:
return structural_tuple.structtuple(self._flat_resolve_names())(*xs)
# Cast `xs` as `tuple` so we can handle generators.
return tf.nest.pack_sequence_as(self._sample_dtype, tuple(xs))
def _model_flatten(self, xs):
if self._sample_dtype is None:
return tuple((xs[k] for k in self._flat_resolve_names())
if isinstance(xs, collections.abc.Mapping) else xs)
return nest.flatten_up_to(self._sample_dtype, xs)
| StarcoderdataPython |
1665851 | <filename>wiseguy/__init__.py
from translationstring import TranslationStringFactory
_ = TranslationStringFactory('wiseguy')
from wiseguy.schema import StrictSchema # API
from wiseguy.schema import Url # API
from wiseguy.schema import WSGIApp # API
class WSGIComponent(object):
def __init__(self, schema, factory):
self.schema = schema
self.factory = factory
| StarcoderdataPython |
178081 | import streamlit as st
import numpy as np
import pandas as pd
from sklearn.datasets import load_iris
from .generic import Tool
iris = pd.DataFrame(load_iris()["data"])
df = pd.DataFrame(
np.random.randn(50, 20),
columns=('col %d' % i for i in range(20)))
# st.dataframe(df) # Same as st.write(df)
class Dataframe(Tool):
name = "Dataframes"
description = """
Render tabular input in formats like [CSV](https://en.wikipedia.org/wiki/Comma-separated_values) to [dataframes](https://databricks.com/glossary/what-are-dataframes).
"""
# https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html.
def __init__(self):
self.text = ""
def make_examples(self):
return {
"Example 1": iris,
"Example 2": pd.read_csv("https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data"),
}
def make_input(self):
pass
def make_config(self):
use_container_width = st.checkbox("Use container width")
st.session_state.config = dict(
use_container_width = use_container_width
)
def make_output(self):
use_container_width = st.session_state.config["use_container_width"]
st.write(iris) # , use_container_width=use_container_width)
# st.write(st.session_state)
| StarcoderdataPython |
1649952 | #!/usr/bin/env python
# coding: utf8
def cram(text, maxlen):
"""Omit part of a string if needed to make it fit in a
maximum length."""
text = text.decode('utf-8')
if len(text) > maxlen:
pre = max(0, (maxlen-3))
text = text[:pre] + '...'
return text.encode('utf8')
| StarcoderdataPython |
3293547 | # Author : <NAME>
# Email : <EMAIL>
#
# This file is part of LibNeuralArt
''' artistic oprs, used for creating arts '''
import numpy as np
import tensorflow as tf
def na_content_loss(inp, ref):
n = ref.shape[1] * ref.shape[2]
c = ref.shape[3]
loss = (1. / (2. * n ** 0.5 * c ** 0.5)) * tf.reduce_sum(tf.pow((inp - ref), 2))
return loss
def na_style_loss(inp, ref):
n = ref.shape[1] * ref.shape[2]
c = ref.shape[3]
ref = ref.reshape(n, c)
inp = tf.reshape(inp, (n, c))
gref = np.dot(ref.T, ref)
ginp = tf.matmul(tf.transpose(inp), inp)
loss = (1. / (4. * n ** 2 * c ** 0.5)) * tf.reduce_sum(tf.pow(ginp - gref, 2))
return loss
| StarcoderdataPython |
3381579 | from enable.tools.viewport_zoom_tool import ViewportZoomTool
from traits.api import DelegatesTo, Property
class MappingZoomTool(ViewportZoomTool):
"""Zoom tool for a map viewport.
self.component is the viewport
self.component.component is the canvas
"""
zoom_level = DelegatesTo('component')
min_level = Property(lambda self: self.component.min_level)
max_level = Property(lambda self: self.component.max_level)
def _min_zoom_default(self):
if self.zoom_level == self.min_level:
return 1.0
else:
return 0.5
def _max_zoom_default(self):
if self.zoom_level == self.max_level:
return 1.0
else:
return 2.0
def _zoom_level_changed(self, old, new):
self.min_zoom = 0.5
self.max_zoom = 2.0
if new == self.min_level:
self.min_zoom = 1.0
if new == self.max_level:
self.max_zoom = 1.0
def normal_mouse_wheel(self, event):
""" Handles the mouse wheel being used when the tool is in the 'normal'
state.
Scrolling the wheel "up" zooms in; scrolling it "down" zooms out.
"""
if self.enable_wheel and event.mouse_wheel != 0:
self.do_zoom(x=event.x, y=event.y,
zoom_step=self.wheel_zoom_step,
zoom_dir=event.mouse_wheel)
event.handled = True
def do_zoom(self, x, y, zoom_step, zoom_dir):
""" Zoom around pixel coordinates (x, y) by 'zoom_step'.
'zoom_dir' is <0 for zooming out, >0 for zooming in, 0 for no zoom.
"""
position = self.component.view_position
scale = self.component.zoom
transformed_x = x / scale + position[0]
transformed_y = y / scale + position[1]
# Calculate zoom
new_zoom = 1.0
if zoom_dir < 0:
zoom = 1.0 / (1.0 + 0.5 * zoom_step)
new_zoom = self.component.zoom * zoom
elif zoom_dir > 0:
zoom = 1.0 + 0.5 * zoom_step
new_zoom = self.component.zoom * zoom
# Change zoom level if necessary
factor = 1
if new_zoom < self.min_zoom:
new_zoom = 1.0
factor = 0.5
if self.zoom_level > self.min_level:
self.zoom_level -= 1
else:
return
elif new_zoom > self.max_zoom:
new_zoom = 1.0
factor = 2
if self.zoom_level < self.max_level:
self.zoom_level += 1
else:
return
x_pos = (transformed_x - (transformed_x - position[0]) / zoom) * factor
y_pos = (transformed_y - (transformed_y - position[1]) / zoom) * factor
self.component.zoom = new_zoom
bounds = self.component.view_bounds
self.component.set(
view_bounds=[bounds[0] / zoom, bounds[1] / zoom],
view_position=[x_pos, y_pos]
)
self.component.request_redraw()
| StarcoderdataPython |
3210968 | from stdnet.exceptions import *
from structures import pipelines, Structure
novalue = object()
try:
import cPickle as pickle
except ImportError:
import pickle
#default_pickler = jsonPickler()
default_pickler = pickle
class NoPickle(object):
def loads(self, s):
return s
def dumps(self, obj):
return obj
nopickle = NoPickle()
class Keys(object):
def __init__(self,id,timeout,pipeline):
self.timeout = timeout
self.value = None
pipeline[id] = self
def add(self, value):
self.value = value
class BackendDataServer(object):
'''Generic interface for a backend database:
* *name* name of database, such as **redis**, **couchdb**, etc..
* *params* dictionary of configuration parameters
* *pickler* calss for serializing and unserializing data. It must implement the *loads* and *dumps* methods.
'''
structure_module = None
def __init__(self, name, params, pickler = None):
self.__name = name
timeout = params.get('timeout', 0)
try:
timeout = int(timeout)
except (ValueError, TypeError):
timeout = 0
self.default_timeout = timeout
self._cachepipe = {}
self._keys = {}
self.params = params
self.pickler = pickler or default_pickler
@property
def name(self):
return self.__name
def __repr__(self):
return '%s backend' % self.__name
def __str__(self):
return self.__repr__()
def createdb(self, name):
pass
def delete(self, *key):
"Delete one or more keys specified by ``keys``"
raise NotImplementedError
def get_object(self, meta, name, value):
'''Retrive an object from the database. If object is not available, it raises
an :class:`stdnet.exceptions.ObjectNotFund` exception.
* *meta* :ref:`database metaclass <database-metaclass>` or model
* *name* name of field (must be unique)
* *value* value of field to search.'''
if name != 'id':
id = self._get(meta.basekey(name,value))
else:
id = value
if id is None:
raise ObjectNotFund
data = self.hash(meta.basekey()).get(id)
if value is None:
raise ObjectNotFund
return meta.make(id,data)
def _get_pipe(self, id, typ, timeout):
cache = self._cachepipe
cvalue = cache.get(id,None)
if cvalue is None:
cvalue = pipelines(typ, timeout)
cache[id] = cvalue
return cvalue
def add_object(self, obj, data, indexes, commit = True):
'''Add a model object to the database:
* *obj* instance of :ref:`StdModel <model-model>` to add to database
* *commit* If True, *obj* is saved to database, otherwise it remains in local cache.
'''
meta = obj._meta
timeout = meta.timeout
cache = self._cachepipe
hash = meta.table()
objid = obj.id
hash.add(objid, data)
# Create indexes if possible
for field,value in indexes:
key = meta.basekey(field.name,value)
if field.unique:
index = self.index_keys(key, timeout)
else:
if field.ordered:
index = self.ordered_set(key, timeout, pickler = nopickle)
else:
index = self.unordered_set(key, timeout, pickler = nopickle)
index.add(objid)
if commit:
self.commit()
def commit(self):
'''Commit cache objects to database'''
cache = self._cachepipe
keys = self._keys
# flush cache
self._cachepipe = {}
self._keys = {}
# commit
for id,pipe in cache.iteritems():
el = getattr(self,pipe.method)(id, pipeline = pipe)
el.save()
if keys:
self._set_keys(keys)
def delete_object(self, obj, deleted = None):
'''Delete an object from the data server and clean up indices.'''
deleted = deleted if deleted is not None else []
meta = obj._meta
timeout = meta.timeout
hash = meta.table()
bkey = meta.basekey
objid = obj.id
if not hash.delete(objid):
return 0
for field in meta.fields:
name = field.name
if field.index:
key = bkey(name,field.serialize(getattr(obj,name,None)))
if field.unique:
deleted.append(self.delete(key))
else:
if field.ordered:
idx = self.ordered_set(key, timeout, pickler = nopickle)
else:
idx = self.unordered_set(key, timeout, pickler = nopickle)
deleted.append(idx.discard(objid))
fid = field.id(obj)
if fid:
deleted.append(self.delete(fid))
return 1
def set(self, id, value, timeout = None):
value = self.pickler.dumps(value)
return self._set(id,value,timeout)
def get(self, id, default = None):
v = self._get(id)
if v:
return self.pickler.loads(v)
else:
return default
def get_many(self, keys):
"""
Fetch a bunch of keys from the cache. For certain backends (memcached,
pgsql) this can be *much* faster when fetching multiple values.
Returns a dict mapping each key in keys to its value. If the given
key is missing, it will be missing from the response dict.
"""
d = {}
for k in keys:
val = self.get(k)
if val is not None:
d[k] = val
return d
def has_key(self, key):
"""
Returns True if the key is in the cache and has not expired.
"""
return self.get(key) is not None
def incr(self, key, delta=1):
"""
Add delta to value in the cache. If the key does not exist, raise a
ValueError exception.
"""
if key not in self:
raise ValueError("Key '%s' not found" % key)
new_value = self.get(key) + delta
self.set(key, new_value)
return new_value
def decr(self, key, delta=1):
"""
Subtract delta from value in the cache. If the key does not exist, raise
a ValueError exception.
"""
return self.incr(key, -delta)
def __contains__(self, key):
"""
Returns True if the key is in the cache and has not expired.
"""
# This is a separate method, rather than just a copy of has_key(),
# so that it always has the same functionality as has_key(), even
# if a subclass overrides it.
return self.has_key(key)
def delete_many(self, keys):
"""
Set a bunch of values in the cache at once. For certain backends
(memcached), this is much more efficient than calling delete() multiple
times.
"""
for key in keys:
self.delete(key)
def clear(self):
"""Remove *all* values from the database at once."""
raise NotImplementedError
# VIRTUAL METHODS
def keys(self, pattern = '*'):
raise NotImplementedError
def _set(self, id, value, timeout):
raise NotImplementedError
def _get(self, id):
raise NotImplementedError
def _set_keys(self):
raise NotImplementedError
# DATASTRUCTURES
def index_keys(self, id, timeout):
return Keys(id,timeout,self._keys)
def list(self, id, timeout = 0, pipeline = None, **kwargs):
'''Return an instance of :class:`stdnet.List`
for a given *id*.'''
pip = pipeline if pipeline is not None else self._get_pipe(id,'list',timeout)
return self.structure_module.List(self, id, pip.pipe, **kwargs)
def hash(self, id, timeout = 0, pipeline = None, **kwargs):
'''Return an instance of :class:`stdnet.HashTable` structure
for a given *id*.'''
pip = pipeline if pipeline is not None else self._get_pipe(id,'hash',timeout)
return self.structure_module.HashTable(self, id, pip.pipe, **kwargs)
def unordered_set(self, id, timeout = 0, pipeline = None, **kwargs):
'''Return an instance of :class:`stdnet.Set` structure
for a given *id*.'''
pip = pipeline if pipeline is not None else self._get_pipe(id,'set',timeout)
return self.structure_module.Set(self, id, pip.pipe, **kwargs)
def ordered_set(self, id, timeout = 0, pipeline = None, **kwargs):
'''Return an instance of :class:`stdnet.OrderedSet` structure
for a given *id*.'''
pip = pipeline if pipeline is not None else self._get_pipe(id,'oset',timeout)
return self.structure_module.OrderedSet(self, id, pip.pipe, **kwargs)
| StarcoderdataPython |
3374275 | <gh_stars>0
from .client import *
from .protocols import *
from .server import *
from .service import *
from .utils import *
from .common import *
from .exceptions import *
| StarcoderdataPython |
4822171 | def sanitize_tag(tag: str) -> str:
"""Clean tag by replacing empty spaces with underscore.
Parameters
----------
tag: str
Returns
-------
str
Cleaned tag
Examples
--------
>>> sanitize_tag(" Machine Learning ")
"Machine_Learning"
"""
return tag.strip().replace(" ", "_")
| StarcoderdataPython |
3206325 | <reponame>gavinshark/stayHungryStayFoolish
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import utilAlgorithm
from numpy import *
from logger import logger
from utilfile import *
from utilconfigration import cfg
class utilAlg_Mean(utilAlgorithm.utilAlgorithm):
def __init__(self):
print('utilAlg_Mean __init__', self.__class__.__name__)
def trainData(self, trainX, trainY, train_attri_dict, crxvalX, crxvalY):
logger.info("%s trainData", self.__class__.__name__)
trainCarSell = {}
num_of_car_month = 0
for idx in range(shape(trainX)[0]):
cartype = int(trainX[idx, train_attri_dict[CLASS_ID]])
month = int(trainX[idx, train_attri_dict[SALE_DATE]])
if cartype in trainCarSell:
if month in trainCarSell[cartype]:
trainCarSell[cartype][month] += trainY[idx][0]
else:
trainCarSell[cartype][month] = trainY[idx][0]
num_of_car_month += 1
else:
trainCarSell[cartype] = {}
trainCarSell[cartype][month] = trainY[idx][0]
num_of_car_month += 1
conditionX = zeros((num_of_car_month, 2));sellcountY = zeros((num_of_car_month, 1))
trainW = {}
totalsell = 0
totalcarmonthcnt = 0
totalcartype_num = 0
idx_of_car_month = 0
for cartype, selldict in trainCarSell.items():
monthnum = 0
sellsum = 0
for month, monthsell in selldict.items():
monthnum += 1
sellsum += monthsell
if 1 == cfg.getint("mean_method", "genfile"):
conditionX[idx_of_car_month][PREDICT_IDX_CLASS_ID] = cartype
conditionX[idx_of_car_month][PREDICT_IDX_DATE] = month
sellcountY[idx_of_car_month][0] = monthsell
idx_of_car_month += 1
trainW[cartype] = sellsum/monthnum
totalsell = totalsell + sellsum
totalcarmonthcnt = totalcarmonthcnt + monthnum
totalcartype_num += 1
trainW[0] = totalsell/totalcarmonthcnt
if 1 == cfg.getint("mean_method", "genfile"):
output_file_path = cfg.get("mean_method", "outputfile")
utilf = utilfile("", "", output_file_path,"")
callabels = [SALE_DATE, CLASS_ID, SALE_QUANTITY]
utilf.writePredictData(conditionX, callabels, sellcountY)
logger.info("total car type num is %d, total car*month is %d" % (totalcartype_num, num_of_car_month))
return trainW
def predictData(self, trainW, predictX):
logger.info("%s predictData", self.__class__.__name__)
predictY = ones((shape(predictX)[0], 1))*trainW[0]
for idx in range(shape(predictX)[0]):
if predictX[idx, PREDICT_IDX_CLASS_ID] in trainW:
car_idx = predictX[idx, PREDICT_IDX_CLASS_ID]
predictY[idx, 0] = round(float(trainW[car_idx][0][0]))
else:
predictY[idx, 0] = round(trainW[0][0][0])
logger.info('car type %d is not in trainW, using mean data instead',predictX[idx][train_attri_dict[CLASS_ID]])
return predictY | StarcoderdataPython |
56904 | import setuptools
import os
def get_files_in_dir(dirName):
listOfFile = os.listdir(dirName)
completeFileList = list()
for file in listOfFile:
completePath = os.path.join(dirName, file)
if os.path.isdir(completePath):
completeFileList = completeFileList + get_files_in_dir(completePath)
else:
completeFileList.append(completePath)
return completeFileList
def find_json_files():
json_files = []
files = get_files_in_dir(".")
for file in files:
root, extension = os.path.splitext(file)
if extension == ".json":
json_files.append(file)
return json_files
with open("MANIFEST.in", "w") as mfs:
for file in find_json_files():
mfs.write("include " + file + "\n")
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="py-money-legos",
version="0.1.1",
author="<NAME>",
author_email="<EMAIL>",
description="money-legos for Python",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/gokhanbaydar/py-money-legos",
packages=setuptools.find_packages(),
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.6",
) | StarcoderdataPython |
28253 | from Tkinter import *
from Tkinter import filedialog, simpledialog
from Tkinter import messagebox
from editor.settings import backgroundcolor as bc
from editor.settings import forgroundcolor as fc
from editor.settings import back as b
from editor.settings import fore as f
from editor.settings import size
from editor.settings import font as fontx
from gtts import gTTS
import playsound
import os
# Imported everything here
| StarcoderdataPython |
90006 | from PyQt5.QtWidgets import *
from PyQt5 import QtCore, QtWidgets, QtGui
import BLL.ClientSocket
import BLL.FileSystem
# 登录界面
class LoginWin(QWidget):
def __init__(self):
super(LoginWin, self).__init__()
# 设置窗口背景颜色为白色
pe = QtGui.QPalette()
pe.setColor(pe.Background, QtGui.QColor(255, 255, 255))
self.setPalette(pe)
# 调整初始窗口位置
screen = QDesktopWidget().screenGeometry()
size = self.geometry()
self.move((screen.width() - size.width()) / 2,
(screen.height() - size.height()) / 2)
self.MainWin = None
# 设置窗体为只有关闭按钮
self.setWindowFlags(QtCore.Qt.WindowCloseButtonHint)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
# 窗口总布局
layout = QHBoxLayout(self.window())
# 设置总布局内部的间距
layout.setContentsMargins(0, 0, 0, 0)
# 放置图片的窗口
imgWidget = QtWidgets.QWidget()
imgLayout = QVBoxLayout(imgWidget)
# 图片标签
imgLabel = QtWidgets.QLabel()
# 填充图片
fileSystem = BLL.FileSystem.FileSystem()
iconPath = fileSystem.iconPath
path = iconPath + '/' + "loginImg.png"
img = QtGui.QImage(path)
# 设置最大长宽
maxSize = QtCore.QSize(500, 500)
# 按比例放缩(最大长宽通过传入的QSize限制)
loginImg = QtGui.QPixmap.fromImage(img.scaled(maxSize, QtCore.Qt.KeepAspectRatio,
QtCore.Qt.SmoothTransformation))
imgLabel.setPixmap(loginImg)
# 居中
imgLabel.setAlignment(QtCore.Qt.AlignCenter)
# 设置图片窗口的内部间距
imgLayout.setContentsMargins(0, 0, 0, 0)
# 加入布局
imgLayout.addWidget(imgLabel)
# 放置登录界面的窗口
loginWidget = QtWidgets.QWidget()
loginWidget.setMinimumSize(300, 400)
loginWidget.setContentsMargins(0, 5, 5, 0)
loginLayout = QVBoxLayout(loginWidget)
# 关闭按钮
closeButton = QPushButton()
path = iconPath + '/' + "close.png"
closeButton.setStyleSheet("QPushButton{border-image: url(%s)}" % path)
closeButton.setFixedSize(30, 30)
closeButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# 介绍词
welcomeLabel = QTextBrowser()
welcomeLabel.setText("艺术云博客")
welcomeLabel.setAlignment(QtCore.Qt.AlignCenter)
welcomeLabel.setFont(QtGui.QFont("华文彩云", 24, QtGui.QFont.Bold))
welcomeLabel.setStyleSheet("background:transparent;border-width:0;border-style:outset")
# 账号与密码框
self.accountEdit = QLineEdit()
self.accountEdit.setStyleSheet(
"""background:white;
padding-left:10px ;
padding-top:1px ;
border: 2px solid rgb(209 , 209 , 209);
border-top:transparent;
border-left:transparent;
border-right:transparent;
""")
self.accountEdit.setPlaceholderText("请输入用户名")
self.accountEdit.setMinimumSize(240, 40)
self.passwordEdit = QLineEdit()
self.passwordEdit.setStyleSheet(
"""background:white;
padding-left:10px ;
padding-top:1px ;
border: 2px solid rgb(209 , 209 , 209);
border-top:transparent;
border-left:transparent;
border-right:transparent;
""")
self.passwordEdit.setPlaceholderText("请输入密码")
self.passwordEdit.setMinimumSize(240, 40)
# 设置密码不可见
self.passwordEdit.setEchoMode(QLineEdit.Password)
# 按钮
loginButton = QPushButton()
loginButton.setFixedSize(240, 40)
loginButton.setText("登 录")
loginButton.setStyleSheet("""
color:white;
background-color:rgb(14 , 150 , 254);
border-radius:10px;
""")
loginButton.setFont(QtGui.QFont("微软雅黑", 10, QtGui.QFont.Normal))
loginButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
registerButton = QPushButton("还没有账号?点此注册")
registerButton.setFlat(True)
registerButton.setStyleSheet("QPushButton{background: transparent;}")
registerButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
forgotPswButton = QPushButton("忘记密码")
forgotPswButton.setFlat(True)
forgotPswButton.setStyleSheet("color:blue;")
forgotPswButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# 登录布局添加
loginLayout.setContentsMargins(0, 0,0 ,0)
loginLayout.addWidget(closeButton, 1, QtCore.Qt.AlignRight | QtCore.Qt.AlignTop)
loginLayout.addWidget(welcomeLabel, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.accountEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.passwordEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(forgotPswButton, 1, QtCore.Qt.AlignRight)
loginLayout.addWidget(loginButton, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(registerButton, 1, QtCore.Qt.AlignCenter)
# 总布局添加
layout.addWidget(imgWidget)
layout.addWidget(loginWidget)
# 按钮响应
loginButton.clicked.connect(self.login)
registerButton.clicked.connect(self.register)
forgotPswButton.clicked.connect(self.forgotPsw)
closeButton.clicked.connect(self.close)
# 密码栏回车激活登录
self.passwordEdit.returnPressed.connect(self.login)
# 设置初始焦点
self.accountEdit.setFocus()
# 设置后续要打开以及传入参数的主窗口
def setMainWin(self, MainWin):
self.MainWin = MainWin
# 登录
def login(self):
account = self.accountEdit.text()
password = self.passwordEdit.text()
if not account or not password:
simpleMessageBox('提示', '请完整填写输入框')
return
try:
client = BLL.ClientSocket.ClientSocket()
response = client.login(account, password)
except Exception as e:
print(e)
simpleMessageBox('错误', '无法连接到服务器')
return
# 登录失败
if not response:
self.accountEdit.clear()
self.passwordEdit.clear()
simpleMessageBox('提示', '用户名或密码错误')
# 登录成功
else:
self.MainWin.setUserInfo(response, account)
self.MainWin.show()
self.close()
# 弹出注册窗口
def register(self):
self.setVisible(False)
registerDialog = RegisterDialog()
registerDialog.setWindowModality(QtCore.Qt.ApplicationModal)
registerDialog.exec_()
self.setVisible(True)
# 弹出忘记密码窗口
def forgotPsw(self):
# 首先获取密保问题
# 弹出窗口,获取用户输入的新笔记名
account, okPressed = QtWidgets.QInputDialog.getText(self, "忘记密码", "请输入用户名:",
QtWidgets.QLineEdit.Normal)
# 若用户未输入或未点击确定按钮,则返回
if not okPressed or not account:
return
# 调用客户端函数获取密保问题
client = BLL.ClientSocket.ClientSocket()
response = client.getSecurityQes(account)
# 查询失败
if not response:
self.accountEdit.clear()
self.passwordEdit.clear()
simpleMessageBox('提示', '用户名不存在')
return
# 查询成功,弹出窗口
self.setVisible(False)
forgotPswDialog = ForgotPswDialog(response, account)
forgotPswDialog.setWindowModality(QtCore.Qt.ApplicationModal)
forgotPswDialog.exec_()
self.setVisible(True)
# 跳过输入阶段,快速启动
def quickStart(self):
token = "7<PASSWORD>"
account = "123"
self.MainWin.setUserInfo(token, account)
self.MainWin.show()
self.close()
# 注册窗口
class RegisterDialog(QDialog):
def __init__(self):
super().__init__()
# 设置窗口背景颜色为白色
pe = QtGui.QPalette()
pe.setColor(pe.Background, QtGui.QColor(255, 255, 255))
self.setPalette(pe)
# 调整初始窗口位置
screen = QDesktopWidget().screenGeometry()
size = self.geometry()
self.move((screen.width() - size.width()) / 2,
(screen.height() - size.height()) / 2)
# 设置窗体为只有关闭按钮
self.setWindowFlags(QtCore.Qt.WindowCloseButtonHint)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
# 窗口总布局
layout = QHBoxLayout(self.window())
# 设置总布局内部的间距
layout.setContentsMargins(0, 0, 0, 0)
# 放置图片的窗口
imgWidget = QtWidgets.QWidget()
imgLayout = QVBoxLayout(imgWidget)
# 图片标签
imgLabel = QtWidgets.QLabel()
# 填充图片
fileSystem = BLL.FileSystem.FileSystem()
iconPath = fileSystem.iconPath
path = iconPath + '/' + "loginImg.png"
img = QtGui.QImage(path)
# 设置最大长宽
maxSize = QtCore.QSize(500, 500)
# 按比例放缩(最大长宽通过传入的QSize限制)
loginImg = QtGui.QPixmap.fromImage(img.scaled(maxSize, QtCore.Qt.KeepAspectRatio,
QtCore.Qt.SmoothTransformation))
imgLabel.setPixmap(loginImg)
# 居中
imgLabel.setAlignment(QtCore.Qt.AlignCenter)
# 设置图片窗口的内部间距
imgLayout.setContentsMargins(0, 0, 0, 0)
# 加入布局
imgLayout.addWidget(imgLabel)
# 放置登录界面的窗口
loginWidget = QtWidgets.QWidget()
loginWidget.setMinimumSize(300, 400)
loginWidget.setContentsMargins(0, 5, 5, 0)
loginLayout = QVBoxLayout(loginWidget)
# 设置统一格式
loginWidget.setStyleSheet(
"""QLineEdit{background:white;
padding-left:10px ;
padding-top:1px ;
border: 2px solid rgb(209 , 209 , 209);
border-top:transparent;
border-left:transparent;
border-right:transparent;
}
""")
# 关闭按钮
closeButton = QPushButton()
path = iconPath + '/' + "close.png"
closeButton.setStyleSheet("QPushButton{border-image: url(%s)}" % path)
closeButton.setFixedSize(30, 30)
closeButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# 介绍词
welcomeLabel = QTextBrowser()
welcomeLabel.setText("艺术云博客")
welcomeLabel.setAlignment(QtCore.Qt.AlignCenter)
welcomeLabel.setFont(QtGui.QFont("华文彩云", 24, QtGui.QFont.Bold))
welcomeLabel.setStyleSheet("background:transparent;border-width:0;border-style:outset")
# 账号、密码、密保问题、密保答案
self.accountEdit = QLineEdit()
self.accountEdit.setPlaceholderText("请输入用户名")
self.accountEdit.setMinimumSize(240, 40)
self.passwordEdit = QLineEdit()
self.passwordEdit.setPlaceholderText("请输入密码")
self.passwordEdit.setMinimumSize(240, 40)
self.questionEdit = QLineEdit()
self.questionEdit.setPlaceholderText("请输入密码保护问题")
self.questionEdit.setMinimumSize(240, 40)
self.answerEdit = QLineEdit()
self.answerEdit.setPlaceholderText("请输入密码保护答案")
self.answerEdit.setMinimumSize(240, 40)
# 设置密码不可见
self.passwordEdit.setEchoMode(QLineEdit.Password)
# 按钮
registerButton = QPushButton()
registerButton.setFixedSize(240, 40)
registerButton.setText("注 册")
registerButton.setStyleSheet("""
color:white;
background-color:rgb(14 , 150 , 254);
border-radius:10px;
""")
registerButton.setFont(QtGui.QFont("微软雅黑", 10, QtGui.QFont.Normal))
registerButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
forgotPswButton = QPushButton("忘记密码")
forgotPswButton.setFlat(True)
forgotPswButton.setStyleSheet("color:blue;")
forgotPswButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# 登录布局添加
loginLayout.setContentsMargins(0, 0, 0, 0)
loginLayout.addWidget(closeButton, 1, QtCore.Qt.AlignRight | QtCore.Qt.AlignTop)
loginLayout.addWidget(welcomeLabel, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.accountEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.passwordEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.questionEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.answerEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(registerButton, 1, QtCore.Qt.AlignCenter)
# 总布局添加
layout.addWidget(imgWidget)
layout.addWidget(loginWidget)
# 按钮响应
registerButton.clicked.connect(self.register)
closeButton.clicked.connect(self.close)
# 设置初始焦点
self.accountEdit.setFocus()
# 注册
def register(self):
account = self.accountEdit.text()
password = self.passwordEdit.text()
question = self.questionEdit.text()
answer = self.answerEdit.text()
if not account or not password or not question or not answer:
simpleMessageBox('提示', '请完整填写输入框')
return
try:
client = BLL.ClientSocket.ClientSocket()
response = client.register(account, password, question, answer)
except Exception as e:
print(e)
simpleMessageBox('错误', '无法连接到服务器')
return
if not response:
simpleMessageBox('提示', '用户名已存在')
else:
simpleMessageBox('提示', '账号注册成功')
self.close()
# 忘记密码窗口
class ForgotPswDialog(QDialog):
def __init__(self, question, account):
super().__init__()
self.account = account
# 设置窗口背景颜色为白色
pe = QtGui.QPalette()
pe.setColor(pe.Background, QtGui.QColor(255, 255, 255))
self.setPalette(pe)
# 调整初始窗口位置
screen = QDesktopWidget().screenGeometry()
size = self.geometry()
self.move((screen.width() - size.width()) / 2,
(screen.height() - size.height()) / 2)
# 设置窗体为只有关闭按钮
self.setWindowFlags(QtCore.Qt.WindowCloseButtonHint)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
# 窗口总布局
layout = QHBoxLayout(self.window())
# 设置总布局内部的间距
layout.setContentsMargins(0, 0, 0, 0)
# 放置图片的窗口
imgWidget = QtWidgets.QWidget()
imgLayout = QVBoxLayout(imgWidget)
# 图片标签
imgLabel = QtWidgets.QLabel()
# 填充图片
fileSystem = BLL.FileSystem.FileSystem()
iconPath = fileSystem.iconPath
path = iconPath + '/' + "loginImg.png"
img = QtGui.QImage(path)
# 设置最大长宽
maxSize = QtCore.QSize(500, 500)
# 按比例放缩(最大长宽通过传入的QSize限制)
loginImg = QtGui.QPixmap.fromImage(img.scaled(maxSize, QtCore.Qt.KeepAspectRatio,
QtCore.Qt.SmoothTransformation))
imgLabel.setPixmap(loginImg)
# 居中
imgLabel.setAlignment(QtCore.Qt.AlignCenter)
# 设置图片窗口的内部间距
imgLayout.setContentsMargins(0, 0, 0, 0)
# 加入布局
imgLayout.addWidget(imgLabel)
# 放置登录界面的窗口
loginWidget = QtWidgets.QWidget()
loginWidget.setMinimumSize(300, 400)
loginWidget.setContentsMargins(0, 5, 5, 0)
loginLayout = QVBoxLayout(loginWidget)
# 设置统一格式
loginWidget.setStyleSheet(
"""QLineEdit{background:white;
padding-left:10px ;
padding-top:1px ;
border: 2px solid rgb(209 , 209 , 209);
border-top:transparent;
border-left:transparent;
border-right:transparent;
}
""")
# 关闭按钮
closeButton = QPushButton()
path = iconPath + '/' + "close.png"
closeButton.setStyleSheet("QPushButton{border-image: url(%s)}" % path)
closeButton.setFixedSize(30, 30)
closeButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# 介绍词
welcomeLabel = QTextBrowser()
welcomeLabel.setText("艺术云博客")
welcomeLabel.setAlignment(QtCore.Qt.AlignCenter)
welcomeLabel.setFont(QtGui.QFont("华文彩云", 24, QtGui.QFont.Bold))
welcomeLabel.setStyleSheet("background:transparent;border-width:0;border-style:outset")
# 新的密码、密保问题、密保答案
self.passwordEdit = QLineEdit()
self.passwordEdit.setPlaceholderText("请输入新的密码")
self.passwordEdit.setMinimumSize(240, 40)
self.questionLabel = QLabel()
self.questionLabel.setText('问题:' + question)
self.questionLabel.setAlignment(QtCore.Qt.AlignCenter)
self.questionLabel.setMinimumSize(240, 40)
self.answerEdit = QLineEdit()
self.answerEdit.setPlaceholderText("请输入密码保护答案")
self.answerEdit.setMinimumSize(240, 40)
# 设置密码不可见
self.passwordEdit.setEchoMode(QLineEdit.Password)
# 按钮
forgotPswButton = QPushButton()
forgotPswButton.setFixedSize(240, 40)
forgotPswButton.setText("重置密码")
forgotPswButton.setStyleSheet("""
color:white;
background-color:rgb(14 , 150 , 254);
border-radius:10px;
""")
forgotPswButton.setFont(QtGui.QFont("微软雅黑", 10, QtGui.QFont.Normal))
forgotPswButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# 登录布局添加
loginLayout.setContentsMargins(0, 0, 0, 0)
loginLayout.addWidget(closeButton, 1, QtCore.Qt.AlignRight | QtCore.Qt.AlignTop)
loginLayout.addWidget(welcomeLabel, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.questionLabel, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.answerEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.passwordEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(forgotPswButton, 1, QtCore.Qt.AlignCenter)
# 总布局添加
layout.addWidget(imgWidget)
layout.addWidget(loginWidget)
# 按钮响应
forgotPswButton.clicked.connect(self.forgotPsw)
closeButton.clicked.connect(self.close)
# 设置初始焦点
self.answerEdit.setFocus()
# 忘记密码
def forgotPsw(self):
answer = self.answerEdit.text()
password = self.passwordEdit.text()
account = self.account
if not account or not password or not answer:
simpleMessageBox('提示', '请完整填写输入框')
return
try:
client = BLL.ClientSocket.ClientSocket()
response = client.forgotPsw(account, password, answer)
if response:
simpleMessageBox('提示', '密码重置成功,新的密码为:' + response)
self.close()
else:
simpleMessageBox('提示', '密保答案错误,密码重置失败')
except Exception as e:
print(e)
simpleMessageBox('错误', '无法连接到服务器')
return
def simpleMessageBox(title, text):
messageBox = QMessageBox()
messageBox.setWindowTitle(title)
messageBox.setText(text)
messageBox.setStandardButtons(QMessageBox.Yes)
buttonY = messageBox.button(QMessageBox.Yes)
buttonY.setText('确定')
messageBox.exec_()
| StarcoderdataPython |
3321151 | <reponame>OakInn/ysLineidGen
# SL_Common_Test.py
# python v3.6 at least (due to f-string)
# Tests for SL Common class
# Functionality tests - read file, backup file, write file,\
# list of file pathes found by extension
import os
from tempfile import gettempdir
import unittest
from SL_Common import Common
class CommonTest(unittest.TestCase):
def setUp(self):
self.common = Common()
self.testFileName = "testFile.ttxt"
self.tempFolderPath = gettempdir()
self.testText = """
title: Start
---
<<if true>>
Player: Hey, Sally. #line:794945
Sally: Oh! Hi. #line:2dc39b
Sally: You snuck up on me. #line:34de2f
Sally: Don't do that. #line:dcc2bc
<<else>>
Player: Hey. #line:a8e70c
Sally: Hi. #line:305cde
<<endif>>
==="""
self.testFilePath = os.path.join(self.tempFolderPath, self.testFileName)
self.testFileCreate = self.common.writeFile(self.testText.split("\n"), f"{self.testFilePath}")
def tearDown(self) -> None:
file = f"{self.testFilePath}.fc_1.fc"
if os.path.exists(file):
os.remove(file)
return super().tearDown()
def testCommon01Backup(self):
fileForBackup = self.testFilePath
backupPath = self.tempFolderPath
expectedPath = os.path.join(self.tempFolderPath, f"{self.testFileName}.fc")
backup = self.common.backupFile(fileForBackup, backupPath)
with self.subTest(f"00. File exist - {expectedPath}"):
self.assertTrue(os.path.exists(f"{expectedPath}"))
with self.subTest(f"01. Content is the same - {self.common.readFile(fileForBackup)}"):
self.assertTrue(self.common.readFile(expectedPath))
def testCommon02ReadFile(self):
filepath = self.testFilePath
expectedNLines = 14
expectedTextInLine8 = "Sally: Don't do that. #line:dcc2bc"
expectedData = self.testText.split("\n")
readFile = self.common.readFile(filepath)
with self.subTest(f"00. {len(readFile)} != Expected {expectedNLines}"):
self.assertTrue(len(readFile) == expectedNLines)
with self.subTest(f"01. {readFile[8]} != Expected {expectedTextInLine8}"):
self.assertTrue(readFile[8] == expectedTextInLine8)
with self.subTest(f"02. {readFile} != Expected {expectedData}"):
self.assertTrue(readFile == expectedData)
def testCommon03WriteFile(self):
writeData = self.testText.split("\n")
writePath = f"{self.tempFolderPath}/testWriteDataInThisFile.ttxt"
expectedData = writeData
expectedlen = len(writeData)#14
writeFile = self.common.writeFile(writeData, writePath)
readFile = self.common.readFile(writePath)
with self.subTest(f"00. No file - {writePath}"):
self.assertTrue(os.path.exists(str(writePath)))
with self.subTest(f"01. {len(readFile)} != Expected {expectedlen}"):
self.assertTrue(len(readFile) == expectedlen)
with self.subTest(f"02. {readFile} != Expected {expectedData}"):
self.assertTrue(readFile == expectedData)
# Commented lines below valid for testing recursion.
def testCommon04FilePathCollector(self):
file1 = os.path.join(self.tempFolderPath, "testFile.ttxt")
file2 = os.path.join(self.tempFolderPath, "testWriteDataInThisFile.ttxt")
file3 = os.path.join(self.tempFolderPath, "testFile.ttxt.fc")
# file4 = os.path.join(self.tempFolderPath, "testCommon", "testWriteDataInThisFile.ttxt")
folderPath = self.tempFolderPath
searchFileByExtension = (".ttxt", ".fc")
expectedFileList = (file1, file3, file2)
# expectedFileList = (file1, file2, file3, file4)
fileList = tuple(self.common.filePathCollector(folderPath, searchFileByExtension))
with self.subTest(f"\n00. {len(fileList)} != Expected {len(expectedFileList)}"):
self.assertTrue(len(fileList) == len(expectedFileList))
with self.subTest(f"\n01. {fileList} != Expected {expectedFileList}"):
self.assertTrue(sorted(fileList) == sorted(expectedFileList)) | StarcoderdataPython |
3313743 | <reponame>httpsgithu/mindspore
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mindspore import context
from mindspore.nn import Cell
from mindspore.ops import operations as P
from parallel.utils.utils import ParallelValidator, compile_net
SEED_ = 1
SEED2_ = 1
class Net(Cell):
def __init__(self, seed, seed2, strategy=None):
super(Net, self).__init__()
self.uniform_real = P.UniformReal(seed, seed2).shard(strategy)
def construct(self, shape):
out = self.uniform_real(shape)
return out
def test_uniform_real_auto_parallel():
"""
Features: test UniformReal auto parallel
Description: auto parallel
Expectation: compile success
"""
context.set_auto_parallel_context(parallel_mode="auto_parallel", device_num=8, global_rank=0)
net = Net(SEED_, SEED2_)
shape = (4, 4, 4)
compile_net(net, shape)
def test_uniform_real_data_parallel():
"""
Features: test UniformReal data parallel
Description: data parallel
Expectation: compile success
"""
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel", device_num=8, global_rank=1)
net = Net(SEED_, SEED2_)
shape = (8, 8)
phase = compile_net(net, shape)
validator = ParallelValidator(net, phase)
assert validator.check_node_attrs("UniformReal-0", {"seed": 2, "seed2": 2})
def test_uniform_real_model_parallel():
"""
Features: test UniformReal model parallel
Description: model parallel
Expectation: compile success
"""
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel", device_num=8, global_rank=5)
shape = (8, 8)
strategy = ((2, 2),)
net = Net(SEED_, SEED2_, strategy)
phase = compile_net(net, shape)
validator = ParallelValidator(net, phase)
assert validator.check_node_attrs("UniformReal-0", {"seed": 3, "seed2": 3})
| StarcoderdataPython |
3349775 | <reponame>denyingmxd/Torchssc<filename>model/sketch.nyu/resnet.py
import sys
from collections import OrderedDict
from functools import partial
import torch.nn as nn
import functools
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, fist_dilation=1, multi_grid=1, bn_momentum=0.0003, BatchNorm2d=nn.BatchNorm2d):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = BatchNorm2d(planes, momentum=bn_momentum)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
padding=dilation*multi_grid, dilation=dilation*multi_grid, bias=False)
self.bn2 = BatchNorm2d(planes, momentum=bn_momentum)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = BatchNorm2d(planes * 4, momentum=bn_momentum)
self.relu = nn.ReLU(inplace=False)
self.relu_inplace = nn.ReLU(inplace=True)
self.downsample = downsample
self.dilation = dilation
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out = out + residual
out = self.relu_inplace(out)
return out
class ResNet(nn.Module):
def __init__(self, block, layers, num_classes, BatchNorm2d=nn.BatchNorm2d, dilation=[1,1,1,1], bn_momentum=0.0003, is_fpn=False):
self.inplanes = 128
self.is_fpn = is_fpn
super(ResNet, self).__init__()
self.conv1 = nn.Conv2d(3, 64, 3, stride=2, padding=1, bias=False)
self.bn1 = BatchNorm2d(64, momentum=bn_momentum)
self.relu1 = nn.ReLU(inplace=False)
self.conv2 = nn.Conv2d(64, 64, 3, stride=1, padding=1, bias=False)
self.bn2 = BatchNorm2d(64, momentum=bn_momentum)
self.relu2 = nn.ReLU(inplace=False)
self.conv3 = nn.Conv2d(64, 128, 3, stride=1, padding=1, bias=False)
self.bn3 = BatchNorm2d(128, momentum=bn_momentum)
self.relu3 = nn.ReLU(inplace=False)
self.maxpool = nn.MaxPool2d(3, stride=2, padding=1, ceil_mode=False)
self.relu = nn.ReLU(inplace=False)
self.layer1 = self._make_layer(block, 64, layers[0], stride=1, dilation=dilation[0], bn_momentum=bn_momentum, BatchNorm2d=BatchNorm2d)
self.layer2 = self._make_layer(block, 128, layers[1], stride=1 if dilation[1]!=1 else 2, dilation=dilation[1], bn_momentum=bn_momentum, BatchNorm2d=BatchNorm2d)
self.layer3 = self._make_layer(block, 256, layers[2], stride=1 if dilation[2]!=1 else 2, dilation=dilation[2], bn_momentum=bn_momentum, BatchNorm2d=BatchNorm2d)
self.layer4 = self._make_layer(block, 512, layers[3], stride=1 if dilation[3]!=1 else 2, dilation=dilation[3], bn_momentum=bn_momentum, BatchNorm2d=BatchNorm2d)
def _make_layer(self, block, planes, blocks, stride=1, dilation=1, multi_grid=1, bn_momentum=0.0003, BatchNorm2d=nn.BatchNorm2d):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
BatchNorm2d(planes * block.expansion, affine = True, momentum=bn_momentum))
layers = []
generate_multi_grid = lambda index, grids: grids[index%len(grids)] if isinstance(grids, tuple) else 1
layers.append(block(self.inplanes, planes, stride, dilation=dilation, downsample=downsample, multi_grid=generate_multi_grid(0, multi_grid), bn_momentum=bn_momentum, BatchNorm2d=BatchNorm2d))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, dilation=dilation, multi_grid=generate_multi_grid(i, multi_grid), bn_momentum=bn_momentum, BatchNorm2d=BatchNorm2d))
return nn.Sequential(*layers)
def forward(self, x, start_module=1, end_module=5):
if start_module <= 1:
x = self.relu1(self.bn1(self.conv1(x)))
x = self.relu2(self.bn2(self.conv2(x)))
x = self.relu3(self.bn3(self.conv3(x)))
x = self.maxpool(x)
start_module = 2
features = []
for i in range(start_module, end_module+1):
x = eval('self.layer%d'%(i-1))(x)
features.append(x)
if self.is_fpn:
if len(features) == 1:
return features[0]
else:
return tuple(features)
else:
return x
def get_resnet101(num_classes=19, dilation=[1,1,1,1], bn_momentum=0.0003, is_fpn=False, BatchNorm2d=nn.BatchNorm2d):
model = ResNet(Bottleneck,[3, 4, 23, 3], num_classes, dilation=dilation, bn_momentum=bn_momentum, is_fpn=is_fpn, BatchNorm2d=BatchNorm2d)
return model
def get_resnet50(num_classes=19, dilation=[1,1,1,1], bn_momentum=0.0003, is_fpn=False, BatchNorm2d=nn.BatchNorm2d):
model = ResNet(Bottleneck,[3, 4, 6, 3], num_classes, dilation=dilation, bn_momentum=bn_momentum, is_fpn=is_fpn, BatchNorm2d=BatchNorm2d)
return model
if __name__ == '__main__':
net = get_resnet50().cuda() | StarcoderdataPython |
61103 |
#c.execute("CREATE TABLE aud(RollNO text, date integer, starttime integer,endtime integer)")
def check(roll,date,starttime,endtime):
import sqlite3
message=""
conn=sqlite3.connect('aud.db')
c=conn.cursor()
tup=tuple([roll,date,starttime,endtime])
audopen=9
audclose=24
if int(starttime)<int(audopen) or int(endtime)<int(audopen) or int(starttime)>int(audclose) or int(endtime)>int(audclose):
message= "enter a valid time frame, Auditorium functions from 9AM to 12AM"
elif int(starttime)>int(endtime):
message= "start time should be greater than endtime, Please try again"
else:
c.execute("SELECT * FROM aud")
items=c.fetchall()
booked=[]
for i in items:
if i[1]==date:
for j in range(i[2],i[3]):
booked.append(int(j))
t=0
for i in range(int(starttime),int(endtime)):
if i in booked:
t=1
break
if t==1:
message= "That slot is already booked please try another one"
else:
c.execute("INSERT INTO aud VALUES(?,?,?,?)",tup)
message="You have booked the slot sucessfully"
conn.commit()
conn.close()
return message
| StarcoderdataPython |
108220 | <gh_stars>0
# -- LICENSE file in the root directory of this source tree. An additional grant
# -- of patent rights can be found in the PATENTS file in the same directory.
# --
# -- Author: <NAME> <<EMAIL>>
# -- <NAME> <<EMAIL>>
# -- <NAME> <<EMAIL>>
# -- The utility tool box
import random
util = {}
def string_shortfloat(t):
return '%2.4g'.format(t)
# function util.string_shortfloat(t)
# return string.format('%2.4g', t)
# end
def shuffleTable(t):
rand = random.random()
iterations = len(t)
for i in range(iterations-1,1,-1):
j = random.randint(0,i)
t[i],t[j] = t[j], t[i]
# function util.shuffleTable(t)
# local rand = math.random
# local iterations = #t
# local j
# for i = iterations, 2, -1 do
# j = rand(i)
# t[i], t[j] = t[j], t[i]
# end
# end
def string_split(s, c=None):
if not c:
c=' '
temp = s.split(c)
return [x for x in temp if x]
#print(string_split("helloworld",'l'))
# function util.string_split(s, c)
# if c==nil then c=' ' end
# local t={}
# while true do
# local f=s:find(c)
# if f==nil then
# if s:len()>0 then
# table.insert(t, s)
# end
# break
# end
# if f > 1 then
# table.insert(t, s:sub(1,f-1))
# end
# s=s:sub(f+1,s:len())
# end
# return t
# end
def add(t,key):
cur = t
for i in range(len(key)):
if key[i] not in new_cur:
cur[key[i]]={}
new_cur = cur[key[i]]
cur = new_cur
cur[key[len(key)-1]]=True
# function util.add(tab, key)
# local cur = tab
# for i = 1, #key-1 do
# local new_cur = cur[key[i]]
# if new_cur == nil then
# cur[key[i]] = {}
# new_cur = cur[key[i]]
# end
# cur = new_cur
# end
# cur[key[#key]] = true
# end
def has(t,key):
cur = t
for i in range(len(key)):
if not (key[i] in cur):
return False
return True
# print(has({1:1,2:2,3:3},[2,3]))
# print(has({1:1,2:2,3:3},[2,4]))
# function util.has(tab, key)
# local cur = tab
# for i = 1, #key do
# cur = cur[key[i]]
# if cur == nil then
# return false
# end
# end
# return true
# end
def isnan(x):
return x!=x
# function util.isnan(x)
# return x ~= x
# end
# return util
| StarcoderdataPython |
3268549 | <filename>main_gui.py
from gui_displayer import MainMenuWindow
main_menu = MainMenuWindow()
main_menu.show()
| StarcoderdataPython |
124687 | # -*- coding: utf-8 -*-
"""
Created on Fri Sep 18 12:51:14 2020
@author: apurv
"""
import os
from os import listdir
from os.path import isfile, join
import pdfplumber
import pyttsx3
##Setting the current working directory
os.chdir("C://Users//apurv//OneDrive//Documents//Projects//2020//pdf-to-audiofile")
pdf_path = 'pdf_files//'
audio_path = 'audio_files//'
##Getting list of all files in pdf folder
filenames = [f for f in listdir(pdf_path) if isfile(join(pdf_path, f))]
## Setting the voice properties
#initialize the voice engine
engine = pyttsx3.init()
##Choosing voice type
voices = engine.getProperty('voices')
engine.setProperty('voice', voices[0].id) #0 for Male, 1 for Female
##Choosing voice rate
rate = engine.getProperty('rate')
engine.setProperty('rate', 200)
##Reading all PDF files one by one and saving the audio files for respective files
for f in filenames:
with pdfplumber.open(r''.join([pdf_path,f])) as pdf:
text = ''
for page in pdf.pages:
text = text + str(' ' if page.extract_text() is None else page.extract_text())
text.replace("_"," ").replace(" \n"," ")
engine.save_to_file(text, "".join([audio_path,f.split('.pdf')[0],'.mp3']))
engine.runAndWait()
| StarcoderdataPython |
1620406 | import sqlalchemy
import os
connection_name = os.environ["DB_CONN_NAME"]
db_name = os.environ["DB_NAME"]
db_user = os.environ["DB_USER"]
db_password = os.environ["DB_PASS"]
driver_name = 'postgres+pg8000'
query_string = dict({"unix_sock": "/cloudsql/{}/.s.PGSQL.5432".format(connection_name)})
def create_engine():
return sqlalchemy.create_engine(
sqlalchemy.engine.url.URL(
drivername=driver_name,
username=db_user,
password=<PASSWORD>,
database=db_name,
query=query_string,
),
pool_size=5,
max_overflow=2,
pool_timeout=30,
pool_recycle=1800
)
db = create_engine() | StarcoderdataPython |
1717958 | <gh_stars>1-10
"""
Copyright (c) 2014-2015 F-Secure
See LICENSE for details
"""
from datetime import datetime
import unittest
import mock
from werkzeug.test import Client as HttpClient
from resource_api.errors import ValidationError, DoesNotExist, Forbidden
from resource_api.schema import DateTimeField, IntegerField
from resource_api_http.http import Application
from werkzeug.wrappers import Response
from resource_api_http_client.client import(Client, RootResourceCollection, ResourceInstance, ResourceCollection,
LinkHolder, LinkToOne, RootLinkCollection, LinkCollection, LinkInstance)
from resource_api_http_client.transport import JsonClient
from .base_test import BaseTest
from .simulators import TestService, TestResource, TestLink
class JsonTest(unittest.TestCase):
def _validate_exception(self, exception_class, status_code):
resp = mock.Mock()
resp.data = "666"
resp.status_code = status_code
client = mock.Mock()
client.open.return_value = resp
cli = JsonClient(client)
self.assertRaises(exception_class, cli.open, "some_url")
def test_item_does_not_exist(self):
self._validate_exception(DoesNotExist, 404)
def test_unknown_error(self):
self._validate_exception(Exception, 500)
self._validate_exception(Exception, 430)
def test_ok(self):
resp = mock.Mock()
resp.data = "666"
resp.status_code = 200
client = mock.Mock()
client.open.return_value = resp
cli = JsonClient(client)
self.assertEqual(cli.open("foo"), 666)
def test_validation_error(self):
self._validate_exception(ValidationError, 400)
def test_not_implemented_error(self):
self._validate_exception(NotImplementedError, 501)
def test_not_allowed(self):
self._validate_exception(Forbidden, 405)
class BaseClientTest(BaseTest):
def setUp(self):
super(BaseClientTest, self).setUp()
self.client = Client("/", JsonClient(HttpClient(Application(self.srv), Response)))
class ResourceTest(BaseClientTest):
def test_get_schema(self):
self.assertEqual(self.client.schema, {"foo.Target": mock.ANY, "foo.Source": mock.ANY})
def test_get_root_resource_collection(self):
collection = self.client.get_resource_by_name("foo.Source")
self.assertIsInstance(collection, RootResourceCollection)
def test_get_resource(self):
collection = self.client.get_resource_by_name("foo.Source")
item = collection.get(1)
self.assertIsInstance(item, ResourceInstance)
self.assertTrue(item.pk, 1)
self.assertEqual({"pk": 1, "more_data": "bla", "extra": "foo"}, item.data)
def test_get_count(self):
count = self.client.get_resource_by_name("foo.Source").count()
self.assertEqual(count, 2)
length = len(self.client.get_resource_by_name("foo.Source"))
self.assertEqual(length, 2)
def test_filter(self):
collection = self.client.get_resource_by_name("foo.Source").filter(params={"foo": "bar"})
self.assertNotIsInstance(collection, RootResourceCollection)
self.assertIsInstance(collection, ResourceCollection)
def test_iteration(self):
collection = self.client.get_resource_by_name("foo.Source")
items = list(collection)
self.assertIsInstance(items[0], ResourceInstance)
def test_access_by_index(self):
item = self.client.get_resource_by_name("foo.Source")[0]
self.assertIsInstance(item, ResourceInstance)
def test_create(self):
data = dict(pk=5, extra="Foo", more_data="Bar")
item = self.client.get_resource_by_name("foo.Source").create(data)
self.assertIsInstance(item, ResourceInstance)
self.assertEqual(item.pk, 5)
self.assertEqual(item.data, data)
def test_update(self):
item = self.client.get_resource_by_name("foo.Source")[0]
item.update(data={"extra": "Zool!!!!"})
self.assertEqual(item.data, {"extra": "Zool!!!!", "more_data": "bla", "pk": 1})
def test_delete(self):
collection = self.client.get_resource_by_name("foo.Source")
item = collection.get(1)
item.delete()
self.assertRaises(DoesNotExist, collection.get, 1)
class LinkToOneTest(BaseClientTest):
def test_get_link_holder(self):
links = self.client.get_resource_by_name("foo.Source")[0].links
self.assertIsInstance(links, LinkHolder)
def test_get_link_to_one(self):
link = self.client.get_resource_by_name("foo.Source")[0].links.the_target
self.assertIsInstance(link, LinkToOne)
def test_get_link_to_one_target(self):
link = self.client.get_resource_by_name("foo.Source")[0].links.the_target
self.assertEqual(link.item.target.pk, 2)
def test_get_link_to_one_data(self):
link = self.client.get_resource_by_name("foo.Source")[0].links.the_target
self.assertEqual(link.item.data, {"extra": "foo", "more_data": "bla"})
def test_update(self):
link = self.client.get_resource_by_name("foo.Source")[0].links.the_target
link.item.update({"extra": "Baga fel"})
self.assertEqual(link.item.data, {"extra": "Baga fel", "more_data": "bla"})
def test_set(self):
link = self.client.get_resource_by_name("foo.Source")[0].links.the_target
link.set({"@target": 1, "extra": "Baga fel"})
self.assertEqual(link.item.target.pk, 1)
def test_delete(self):
link = self.client.get_resource_by_name("foo.Source")[0].links.the_target
link.item.delete()
self.assertRaises(DoesNotExist, lambda: link.item.data)
class LinkToManytest(BaseClientTest):
def test_get_root_link_collection(self):
links = self.client.get_resource_by_name("foo.Source")[0].links.targets
self.assertIsInstance(links, RootLinkCollection)
def test_filter(self):
links = self.client.get_resource_by_name("foo.Source")[0].links.targets.filter()
self.assertNotIsInstance(links, RootLinkCollection)
self.assertIsInstance(links, LinkCollection)
def test_iteration(self):
links = list(self.client.get_resource_by_name("foo.Source")[0].links.targets)
link = links[0]
self.assertIsInstance(link, LinkInstance)
def test_access_by_index(self):
link = self.client.get_resource_by_name("foo.Source")[0].links.targets[0]
self.assertIsInstance(link, LinkInstance)
def test_get_count(self):
links = self.client.get_resource_by_name("foo.Source")[0].links.targets
count = links.count()
self.assertEqual(count, 1)
length = len(links)
self.assertEqual(length, 1)
def test_update(self):
link = self.client.get_resource_by_name("foo.Source")[0].links.targets[0]
link.update({"extra": "Baga fel"})
self.assertEqual(link.data, {"extra": "Baga fel", "more_data": "bla"})
def test_delete(self):
link = self.client.get_resource_by_name("foo.Source")[0].links.targets[0]
link.delete()
self.assertRaises(DoesNotExist, lambda: link.data)
def test_create(self):
links = self.client.get_resource_by_name("foo.Source")[0].links.targets
link = links.create({"@target": 2})
self.assertIsInstance(link, LinkInstance)
self.assertEqual(links.count(), 2)
def test_get(self):
link = self.client.get_resource_by_name("foo.Source")[0].links.targets.get(1)
self.assertEqual(link.target.pk, 1)
class SerializationTest(unittest.TestCase):
def setUp(self):
class Source(TestResource):
class Schema:
pk = IntegerField(pk=True)
datetieme_field = DateTimeField(required=False)
class Links:
class targets(TestLink):
target = "Target"
one_way = True
class Schema:
datetieme_field = DateTimeField(required=False)
class Target(TestResource):
class Schema:
pk = IntegerField(pk=True)
self.srv = srv = TestService()
srv.register(Target, "foo.Target")
srv.register(Source, "foo.Source")
srv.setup()
def _c(model, pk):
srv.storage.set(model.get_name(), pk, {"pk": pk, "datetieme_field": datetime(1, 1, 1, 1, 1, 1)})
_c(Source, 1)
_c(Target, 1)
src = srv._resources_py[Source.get_name()]
srv.storage.set((1, src.links.targets.get_name()), 1, {"datetieme_field": datetime(1, 1, 1, 1, 1, 1)})
self.entry_point = ep = srv.get_entry_point({})
self.storage = srv.storage
self.src = ep.get_resource(Source)
self.target = ep.get_resource(Target)
self.client = Client("/", JsonClient(HttpClient(Application(srv), Response)))
def test_get_resource_datetime(self):
collection = self.client.get_resource_by_name("foo.Source")
item = collection.get(1)
self.assertEqual({"pk": 1, "datetieme_field": datetime(1, 1, 1, 1, 1, 1)}, item.data)
def test_get_link_datetime(self):
collection = self.client.get_resource_by_name("foo.Source")
item = collection.get(1)
link = item.links.targets.get(1)
self.assertEqual({"datetieme_field": datetime(1, 1, 1, 1, 1, 1)}, link.data)
| StarcoderdataPython |
1685158 | #!/usr/bin/env python3
# Fix an (any) KHARMA restart file so that KHARMA can restart from it
# this works around a bug in Parthenon w.r.t. mesh sizes
import sys
import numpy as np
import h5py
outf = h5py.File(sys.argv[1], "r+")
# Parthenon records the full size here,
# but pretty clearly expects the size without ghost zones.
# TODO running this script twice will cause errors
outf['Info'].attrs.modify('MeshBlockSize',
np.maximum(outf['Info'].attrs['MeshBlockSize'][()] - 2*outf['Info'].attrs['IncludesGhost'][()]*outf['Info'].attrs['NGhost'][()],
np.ones_like(outf['Info'].attrs['MeshBlockSize'][()])))
outf.close() | StarcoderdataPython |
3218330 | from . import deterministic as spectralPDE
from . import stochastic as spectralSPDE
from . import version
spectralPDE = spectralPDE.setup_solver
spectralSPDE = spectralSPDE.setup_solver
| StarcoderdataPython |
3276196 | from struct import pack
import sys
def printPacket(packet, split):
i = 0;
for c in packet:
sys.stdout.write("%02x" % ord(c))
sys.stdout.write(" ")
i += 1
if i == split:
i = 0
sys.stdout.write('\n')
sys.stdout.write('\n\n')
class RecordType:
A = 1
NS = 2
CNAME = 5
class RR:
def __init__(self):
self.rname = 0
self.rtype = 0
self.rclass = 0
self.ttl = 0
self.len = 0
self.record = [0,0,0,0]
def generateRecord(self):
record = pack("!HHHIH",
self.rname,
self.rtype,
self.rclass,
self.ttl,
self.len)
for part in self.record:
record += pack("!B", int(part))
return record;
class DNSResponse:
def __init__(self, transaction):
self.transaction = transaction
self.flags = 0
self.questions = 0
self.answerRR = 0
self.authorityRR = 0
self.additionalRR = 0
self.query = []
self.answer = []
def addQuery(self, domain):
self.query.append(domain)
self.questions += 1
def addAnswer(self, address, rtype):
ip = address.split(".")
rr = RR()
if rtype == RecordType.A:
rr.rname = 0xc00c
elif rtype == RecordType.CNAME:
rr.rname = 0xc010
rr.rtype = rtype
rr.rclass = 1
rr.record = ip
rr.len = len(ip)
rr.ttl = 30
self.answer.append(rr)
self.answerRR += 1
def generatePacket(self):
header = pack('!HHHHHH', self.transaction, 0x8180, self.questions, self.answerRR, self.authorityRR, self.additionalRR)
for dom in self.query:
header += dom
header += pack("!B", 0x00)
header += pack("!H", RecordType.A)
header += pack("!H", 0x1)
for record in self.answer:
header += record.generateRecord()
return header
| StarcoderdataPython |
1745805 | <filename>tests/make_testing_data.py
import rasterio as rio
from rasterio import Affine
import numpy as np
import click
def makehappytiff(dst_path, seams_path):
kwargs = {
'blockxsize': 256,
'blockysize': 256,
'compress': 'lzw',
'count': 4,
'crs': {'init': u'epsg:3857'},
'driver': u'GTiff',
'dtype': 'uint8',
'height': 1065,
'nodata': None,
'tiled': True,
'transform': Affine(4.595839562240513, 0.0, -13550756.3744, 0.0, -4.595839562240513, 6315533.02503),
'width': 1065}
imsize = 1065
testdata = [(np.random.rand(imsize,imsize)*255).astype(np.uint8) for i in range(4)]
for i in range(4):
testdata[i][0:100,:] = 0
testdata[i][:,900:] = 0
with rio.open(dst_path, 'w', **kwargs) as dst:
for i, arr in enumerate(testdata, 1):
dst.write(arr, i)
if seams_path:
frto = np.sort(np.random.rand(2) * imsize).astype(int)
rInds = np.arange(frto[0], frto[1], (frto[1] - frto[0]) / float(imsize)).astype(int)
inds = np.arange(imsize)
for i in range(4):
testdata[i][rInds, inds] = 0
testdata[i][rInds-1, inds] = 0
testdata[i][rInds+1, inds] = 0
testdata[i][inds, rInds] = 0
testdata[i][inds, rInds-1] = 0
testdata[i][inds, rInds+1] = 0
with rio.open(seams_path, 'w', **kwargs) as dst:
for i, arr in enumerate(testdata, 1):
dst.write(arr, i)
def getnulldiff(in1, in2, threshold):
with rio.open(in1, 'r') as src:
msk1 = src.read_masks()
with rio.open(in2, 'r') as src:
msk2 = src.read_masks()
allmsk1 = ((msk1[0] == 0) & (msk1[1] == 0) & (msk1[1] == 0)).astype(int)
allmsk2 = ((msk2[0] == 0) & (msk2[1] == 0) & (msk2[1] == 0)).astype(int)
diff = np.count_nonzero(allmsk1) - np.count_nonzero(allmsk2)
assert diff >= threshold, "input 1 has more than %d nodata pixels than input 2" % (threshold)
if __name__ == '__main__':
makehappytiff()
| StarcoderdataPython |
3229893 | # Copyright (C) 2015-2016 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
def clusterFactory(provisioner, clusterName=None, zone=None, nodeStorage=50, sseKey=None):
"""
:param clusterName: The name of the cluster.
:param provisioner: The cloud type of the cluster.
:param zone: The cloud zone
:return: A cluster object for the the cloud type.
"""
if provisioner == 'aws':
try:
from toil.provisioners.aws.awsProvisioner import AWSProvisioner
except ImportError:
logger.error('The aws extra must be installed to use this provisioner')
raise
return AWSProvisioner(clusterName, zone, nodeStorage, sseKey)
elif provisioner == 'gce':
try:
from toil.provisioners.gceProvisioner import GCEProvisioner
except ImportError:
logger.error('The google extra must be installed to use this provisioner')
raise
return GCEProvisioner(clusterName, zone, nodeStorage, sseKey)
elif provisioner == 'azure':
try:
from toil.provisioners.azure.azureProvisioner import AzureProvisioner
except ImportError:
logger.error('The azure extra must be installed to use this provisioner')
raise
return AzureProvisioner(clusterName, zone, nodeStorage)
else:
raise RuntimeError("Invalid provisioner '%s'" % provisioner)
class NoSuchClusterException(Exception):
"""Indicates that the specified cluster does not exist."""
def __init__(self, clusterName):
super(NoSuchClusterException, self).__init__("The cluster '%s' could not be found" % clusterName)
| StarcoderdataPython |
10953 | <gh_stars>0
from tests.base import TestCase, main, assets
from ocrd_models.ocrd_page import (
AlternativeImageType,
PcGtsType,
PageType,
TextRegionType,
TextLineType,
WordType,
GlyphType,
parseString,
parse,
to_xml
)
simple_page = """\
<PcGts xmlns="http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15 http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15/pagecontent.xsd">
<Metadata>
<Creator>OCR-D</Creator>
<Created>2016-09-20T11:09:27.041+02:00</Created>
<LastChange>2018-04-25T17:44:49.605+01:00</LastChange>
</Metadata>
<Page
imageFilename="https://github.com/OCR-D/assets/raw/master/data/kant_aufklaerung_1784/data/OCR-D-IMG/INPUT_0017.tif"
imageWidth="1457"
imageHeight="2083"
type="content">
<TextRegion type="heading" id="r_1_1" custom="readingOrder {index:0;} structure {type:heading;}">
<Coords points="113,365 919,365 919,439 113,439"/>
<TextLine id="tl_1" primaryLanguage="German" custom="readingOrder {index:0;} textStyle {offset:0; length:26;fontFamily:Arial; fontSize:17.0; bold:true;}">
<Coords points="114,366 918,366 918,438 114,438"/>
<Baseline points="114,429 918,429"/>
<Word id="w_w1aab1b1b2b1b1ab1" language="German" custom="readingOrder {index:0;} textStyle {offset:0; length:11;fontFamily:Arial; fontSize:17.0; bold:true;}">
<Coords points="114,368 442,368 442,437 114,437"/>
<TextEquiv conf="0.987654321">
<Unicode>Berliniſche</Unicode>
</TextEquiv>
</Word>
</TextLine>
</TextRegion>
</Page>
</PcGts>
"""
# pylint: disable=protected-access
class TestOcrdPage(TestCase):
def setUp(self):
with open(assets.path_to('glyph-consistency/data/OCR-D-GT-PAGE/FAULTY_GLYPHS.xml'), 'rb') as f:
self.xml_as_str = f.read()
self.pcgts = parseString(self.xml_as_str, silence=True)
def test_to_xml(self):
# with open('/tmp/test.xml', 'w') as f:
# f.write(to_xml(self.pcgts))
self.assertIn(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15 http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15/pagecontent.xsd"', to_xml(self.pcgts)[:1000])
self.assertIn('</TextRegion', to_xml(self.pcgts))
def test_issue_269(self):
"""
@conf is parsed as str but should be float
https://github.com/OCR-D/core/issues/269
"""
# GIGO
self.pcgts.get_Page().get_TextRegion()[0].get_TextEquiv()[0].set_conf(1.0)
self.assertEqual(type(self.pcgts.get_Page().get_TextRegion()[0].get_TextEquiv()[0].get_conf()), float)
self.pcgts.get_Page().get_TextRegion()[0].get_TextEquiv()[0].set_conf('1.0')
self.assertEqual(type(self.pcgts.get_Page().get_TextRegion()[0].get_TextEquiv()[0].get_conf()), str)
# test with parseString that @conf in TextEquiv won't throw an error
parseString(simple_page, silence=True)
# self.assertTrue(True)
def test_pcGtsId(self):
self.assertEqual(self.pcgts.pcGtsId, 'glyph-test')
def test_delete_region(self):
pcgts = parseString(simple_page, silence=True)
self.assertEqual(len(pcgts.get_Page().get_TextRegion()), 1)
del pcgts.get_Page().get_TextRegion()[0]
self.assertEqual(len(pcgts.get_Page().get_TextRegion()), 0)
def test_imageFileName(self):
# print(self.pcgts.export(sys.stdout, 0))
self.assertEqual(self.pcgts.get_Page().imageFilename, '00000259.sw.tif')
self.pcgts.get_Page().imageFilename = 'foo'
self.assertEqual(self.pcgts.get_Page().imageFilename, 'foo')
def test_alternativeImage(self):
pcgts = PcGtsType(pcGtsId="foo")
self.assertEqual(pcgts.pcGtsId, 'foo')
# Page/AlternativeImage
page = PageType()
pcgts.set_Page(page)
page.add_AlternativeImage(AlternativeImageType())
# TextRegion/AlternativeImage
region = TextRegionType()
page.add_TextRegion(region)
region.add_AlternativeImage(AlternativeImageType())
# TextLine/AlternativeImage
line = TextLineType()
region.add_TextLine(line)
line.add_AlternativeImage(AlternativeImageType())
# Word/AlternativeImage
word = WordType()
line.add_Word(word)
word.add_AlternativeImage(AlternativeImageType())
# Glyph/AlternativeImage
glyph = GlyphType()
word.add_Glyph(glyph)
glyph.add_AlternativeImage(AlternativeImageType())
def test_simpletypes(self):
pcgts = parseString(simple_page, silence=True)
self.assertTrue(isinstance(pcgts.get_Page().imageWidth, int))
el = pcgts.get_Page().get_TextRegion()[0].get_TextLine()[0].get_Word()[0].get_TextEquiv()[0]
self.assertTrue(isinstance(el.conf, float))
# XXX no validation on setting attributes :-(
# c.f. https://www.davekuhlman.org/generateDS.html#simpletype
# el.set_conf('2.0987')
# self.assertTrue(isinstance(el.conf, float))
with self.assertRaisesRegex(TypeError, ''):
el.set_conf('I AM NOT A FLOAT DEAL WITH IT')
parseString(to_xml(pcgts).encode('utf8'))
if __name__ == '__main__':
main()
| StarcoderdataPython |
5755 | <reponame>philippWassibauer/django-activity-stream
from distutils.core import setup
""" django-activity-stream instalation script """
setup(
name = 'activity_stream',
description = 'generic activity feed system for users',
author = '<NAME>',
author_email = '<EMAIL>',
url='http://github.com/philippWassibauer/django-activity-stream',
download_url='http://github.com/philippWassibauer/django-activity-stream/tarball/master',
license='MIT',
version = __import__('activity_stream').__version__,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| StarcoderdataPython |
1765569 | <filename>reverse-templating.py
# https://github.com/pal03377/reverse-templating
# reverse-templating.py
# reverse-templating is licensed under MIT.
# https://github.com/pal03377/reverse-templating/blob/master/LICENSE
# author: <NAME>
# 2018-01-05
# Reverse templating is a lib to reverse simple templates with {mustache} placeholder notation.
# The main function is get_placeholders_in(template, text, case_sensitive=True), which returns
# a list of dictionaries that contain all placeholders and all possible values for them so that
# the template matches at least a part of the text.
# Examples:
# get_placeholders_in("This is a {whatIsThis}.", "This is a test.")
# => [{'whatIsThis': 'test'}]
# get_placeholders_in("What a {adjective} {whatIsThis}!", "What a great tool!")
# => [{'adjective': 'great', 'whatIsThis': 'tool'}]
# get_placeholders_in("What a {adjective} {whatIsThis}!", "WHAT a great tool!") # case sensitive by default => WHAT does not match
# => []
# get_placeholders_in("What a {adjective} {whatIsThis}!", "WHAT a great tool!", case_sensitive=False)
# => [{'adjective': 'great', 'whatIsThis': 'tool'}]
# get_placeholders_in("Here is a {thing} for you: {smiley}", "Here is a smiley for you: :-)") # You'll get multiple possibilities as the lib doesn't know how long the smiley should be.
# => [{'thing': 'smiley', 'smiley': ':'}, {'thing': 'smiley', 'smiley': ':-'}, {'thing': 'smiley', 'smiley': ':-)'}]
# get_dict_with_longest_values(get_placeholders_in(
# "Here is a {thing} for you: {smiley}", "Here is a smiley for you: :-)"))
# => {'thing': 'smiley', 'smiley': ':-)'}
import re, itertools
def get_all_occurrence_ends(sub, a_str):
start = 0
while True:
start = a_str.find(sub, start)
if start == -1: return
yield start + len(sub)
start += 1
def is_ascending_sequence(seq):
"""2 same values next to each other not allowed"""
if seq == []:
return True
for index in range(1, len(seq)):
if seq[index] <= seq[index-1]:
return False
return True
def placeholder_find_helper(template, text, case_sensitive=True):
"""
helper function of get_placeholders_in
returns list of list of potential placeholders in text
takes template as a list of strings, between the strings are the placeholders"""
text_to_search_in = text
template_to_search_in = template
if not case_sensitive:
text_to_search_in = text_to_search_in.lower()
template_to_search_in = [part.lower() for part in template_to_search_in]
# get all occurrences of all template text parts
part_occ = [get_all_occurrence_ends(
part, text_to_search_in) for part in template_to_search_in]
# get all combinations with an element from the first
# part_occ sublist, than with an element from the next one and
# so on
combi_occ = itertools.product(*part_occ)
# filter those out with an ascending number sequence
# => placeholder indices must be in order
combi_occ = filter(is_ascending_sequence, combi_occ)
# trick for later
template.append("")
# now make the indices ~great~ text again
to_return = []
for placeholder_indices in combi_occ:
to_return.append([])
template_index = 0
if placeholder_indices[0] > 0:
# text before the 1st placeholder in template, not needed
# remove it
template_index = 1
for p_i_index in range(len(placeholder_indices)):
placeholder_index = placeholder_indices[p_i_index]
if p_i_index+1 >= len(placeholder_indices):
next_placeholder_index = -1
else:
next_placeholder_index = placeholder_indices[p_i_index+1]
# start is placeholder_index,
# end is next_placeholder_index-len(template[template_index])
to_return[-1].append(
text[placeholder_index:(
next_placeholder_index-len(template[template_index]))]
)
template_index += 1
return to_return
def get_placeholders_in(template, text, case_sensitive=True):
"""takes a template and a text onto which the template (partly) possibly matches
and returns a dict of placeholder names and their values
Placeholders in templates are marked with "{}", e.g. This is {adjective}!
where {adjective} could be anything (e.g. cool)
returns a dict with the values being the placeholder names (here: "adjective")
and the keys the text parts (here: "cool").
case_sensitive=False ignores the case for the search, but still returns cased values. """
# seperate the text from the placeholders in template
seperated = [split_up.split("}") for split_up in template.split("{")]
# flatten it
seperated = [a for b in seperated for a in b]
# now, every second element in seperated is text and the others
# (with an odd index) are placeholders
# extract them
template_text = seperated[::2]
template_placeholders = seperated[1::2]
# great, now let's get potential canidates for the placeholders
potential_placeholders = placeholder_find_helper(
template_text, text, case_sensitive)
to_return = []
for placeholders in potential_placeholders:
# match the placeholder names onto the potential placeholders
matched_placeholders = dict(zip(template_placeholders, placeholders))
# format the text with it
to_return.append(
matched_placeholders
)
return to_return
def get_values_lengthes(dictionary):
"""returns the sum of the lengthes of values of a dictionary"""
return sum(map(lambda value: len(value), dictionary.values()))
def sort_by_values_length(dictionaries):
"""looks at the sum of the lengthes of the values of dicts in a list and returns them sorted"""
return sorted(dictionaries, key=get_values_lengthes)
def get_dict_with_longest_values(dictionaries):
"""returns the dict of a dict list with the longest values (in sum)"""
return sort_by_values_length(dictionaries)[-1]
def get_dict_with_shortest_values(dictionaries):
"""returns the dict of a dict list with the shortest values (in sum)"""
return sort_by_values_length(dictionaries)[0]
def apply_vars_to_template(placeholder_vars, template):
"""takes a return value of placeholder_replace and produces strings out of it with the help of the template"""
to_return = []
for placeholders in placeholder_vars:
# match the placeholder names onto the potential placeholders
matched_placeholders = dict(zip(placeholders0, placeholders))
# format the text with it
to_return.append(
template1.format(**matched_placeholders)
)
return to_return
if __name__ == "__main__":
print(get_placeholders_in("This is a {whatIsThis}.", "This is a test."))
print(get_placeholders_in("What a {adjective} {whatIsThis}!", "What a great tool!"))
print(get_placeholders_in("What a {adjective} {whatIsThis}!", "WHAT a great tool!")) # case sensitive by default => WHAT does not match
print(get_placeholders_in("What a {adjective} {whatIsThis}!", "WHAT a great tool!", case_sensitive=False))
print(get_placeholders_in("Here is a {thing} for you: {smiley}", "Here is a smiley for you: :-)")) # You'll get multiple possibilities as the lib doesn't know how long the smiley should be.
print(get_dict_with_longest_values(get_placeholders_in(
"Here is a {thing} for you: {smiley}", "Here is a smiley for you: :-)")))
| StarcoderdataPython |
1716680 | <gh_stars>1-10
from typing import Optional, Set, Union
import logging
from overrides import overrides
from allennlp.common.file_utils import cached_path
from allennlp.data.dataset_readers.dataset_reader import DatasetReader
from contexteval.contextualizers import Contextualizer
from contexteval.data.dataset_readers import TaggingDatasetReader
logger = logging.getLogger(__name__)
@DatasetReader.register("billion_word_benchmark_language_modeling")
class LanguageModelingDatasetReader(TaggingDatasetReader):
"""
Reads a file with a sentence per line (billion-word benchmark format), and
returns instances for language modeling. Each instances is a line in the dataset,
and they are predicted independently of each other.
Parameters
----------
max_length: int, optional (default=50)
The maximum length of the sequences to use in the LM task. Any sequences that are
longer than this value will be discarded.
backward: bool, optional (default=False)
If so, generate instances suitable for evaluating the a backward language model.
For example, if the sentence is [a, b, c, d], the forward instance would have tokens of
[a, b, c] and labels of [b, c, d], whereaas the backward instance would have tokens of
[b, c, d] and labels of [a, b, c].
vocabulary_path: str, optional (default=None)
If provided, words in the input files that are not in this vocabulary are set to "<UNK>".
contextualizer: Contextualizer, optional (default=``None``)
If provided, it is used to produce contextualized representations of the text.
max_instances: int or float, optional (default=``None``)
The number of instances to use during training. If int, this value is taken
to be the absolute amount of instances to use. If float, this value indicates
that we should use that proportion of the total training data. If ``None``,
all instances are used.
seed: int, optional (default=``0``)
The random seed to use.
lazy : ``bool``, optional (default=``False``)
If this is true, ``instances()`` will return an object whose ``__iter__`` method
reloads the dataset each time it's called. Otherwise, ``instances()`` returns a list.
"""
def __init__(self,
max_length: int = 50,
backward: bool = False,
vocabulary_path: Optional[str] = None,
contextualizer: Optional[Contextualizer] = None,
max_instances: Optional[Union[int, float]] = None,
seed: int = 0,
lazy: bool = False) -> None:
super().__init__(
contextualizer=contextualizer,
max_instances=max_instances,
seed=seed,
lazy=lazy)
self._max_length = max_length
self._vocabulary_path = vocabulary_path
self._vocabulary: Set[str] = set()
if vocabulary_path:
# Load the vocabulary
cached_vocabulary_path = cached_path(vocabulary_path)
with open(cached_vocabulary_path) as cached_vocabulary_file:
for line in cached_vocabulary_file:
token = line.rstrip("\n")
self._vocabulary.add(token)
self._backward = backward
@overrides
def _read_dataset(self,
file_path: str,
count_only: bool = False,
keep_idx: Optional[Set[int]] = None):
"""
Yield instances from the file_path.
Parameters
----------
file_path: str, required
The path to the data file.
count_only: bool, optional (default=``False``)
If True, no instances are returned and instead a dummy object is
returned. This is useful for quickly counting the number of instances
in the data file, since creating instances is relatively expensive.
keep_idx: Set[int], optional (default=``None``)
If not None, only yield instances whose index is in this set.
"""
# if `file_path` is a URL, redirect to the cache
file_path = cached_path(file_path)
if count_only:
logger.info("Counting instances (backward: %s) in LM file at: %s",
self._backward, file_path)
else:
logger.info("Reading instances (backward: %s) from lines in LM file at: %s",
self._backward, file_path)
index = 0
with open(file_path) as input_file:
for line in input_file:
clean_line = line.rstrip("\n")
if line.startswith("#"):
continue
# Get tokens and the labels of the instance
tokenized_line = clean_line.split(" ")
if not tokenized_line or len(tokenized_line) > self._max_length:
continue
if count_only:
yield 1
continue
if keep_idx is not None and index not in keep_idx:
index += 1
continue
# Replace OOV tokens in tokenized_line
if self._vocabulary:
tokenized_line = [word if word in self._vocabulary else "<UNK>" for
word in tokenized_line]
if self._backward:
# Tokens are all tokens, labels are a BOS indicator + all except last token
labels = ["<S>"] + tokenized_line[:-1]
else:
# Tokens are all tokens, and labels
# are all except first token + a EOS indicator
labels = tokenized_line[1:] + ["</S>"]
# Contextualize the tokens if a Contextualizer was provided.
if self._contextualizer:
token_representations = self._contextualizer([tokenized_line])[0]
else:
token_representations = None
yield self.text_to_instance(tokenized_line,
token_representations,
labels)
index += 1
| StarcoderdataPython |
3266139 | <filename>src/sensing/drivers/radar/umrr_driver/setup.py
#!/usr/bin/env Python
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
setup_args = generate_distutils_setup(
packages=['smartmicro'],
package_dir={'': 'src'}
)
setup(**setup_args)
| StarcoderdataPython |
4826884 | <filename>p1_basic/day08_15filefunction/day11/05_global和nonlocal.py
# a = 10 # 全局变量本身就是不安全的, 不能随意修改, 闭包
# def func():
# global a # 1. 可以把全局中的内容引入到函数内部 , 2. 在全局创建一个变量
# #a = 20
# a += 10 # a = a+10
# print(a)
#
# func()
# print(a)
# a = 10
# def outer():
# def inner(): # 在inner中改变a的值
# nonlocal a # 寻找外层函数中离他最近的那个变量
# a = 20
# inner()
# outer()
# a = 1
# def fun_1():
# a = 2
# def fun_2():
# global a
# a = 3
# def fun_3():
# a = 4
# print(a)
# print(a)
# fun_3()
# print(a)
# print(a)
# fun_2()
# print(a)
#
# print(a)
# fun_1()
# print(a) | StarcoderdataPython |
1673924 | # Generated by Django 3.2.7 on 2021-09-20 08:27
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('store', '0004_address_subregion'),
]
operations = [
migrations.RemoveField(
model_name='address',
name='city',
),
]
| StarcoderdataPython |
166397 | import requests
import ast
"""some modifactions to coinCommand to allow an easier way to test cli"""
def cp(coin,currency):
"""gets coin price """
try:
return cp_Request_to_Url(coin,currency)
except Exception as err:
return "coin or currency doesnt exist"
def cp_Request_to_Url(coin,price):
url="https://min-api.cryptocompare.com/data/price?fsym=%s&tsyms=%s"%(coin,price)
request_to_Url = requests.get(url)
return formatting_Unicode_currency(request_To_url.text,price)
def formatting_Unicode_Currency(request,price):
unicode_Format = ast.literal_eval(request)
euro = u'\u20ac'
if(price == 'USD'):
return '$'+str(unicode_Format[price])
if(price =='EUR'):
return euro+str(unicode_Format[price])
def mined(coin):
try:
return mined_Request_To_Url(coin)
except Exception as err:
return coin_Doesnt_Exist()
def mined_Request_To_Url(coin):
url = 'https://www.cryptocompare.com/api/data/coinsnapshot/?fsym=%s&tsym=USD'%coin
request_To_Url = requests.get(url)
return formatting_Unicode_Mined(request_To_Url.text)
def formatting_Unicode_Mined(request):
formatting_Mined_Information = ast.literal_eval(request)
return formatting_Mined_Information['Data']['TotalCoinsMined']
def algo(coin):
""" gets the coin implementation algorithm"""
try:
return algo_Request_To_Url(coin)
except Exception as err:
return(coin_Doesnt_Exist())
def algo_Request_To_Url(coin):
url = 'https://www.cryptocompare.com/api/data/coinsnapshot/?fsym=%s&tsym=USD'%coin
request_To_Url = requests.get(url)
return formatting_Unicode_CoinSnapShot(request_To_Url.text)
def formatting_Unicode_CoinSnapShot(request):
formatting_Of_CoinSnapShot = ast.literal_eval(request)
return formatting_Of_CoinSnapShot['Data']['Algorithm']
def news():
return request_To_news
def request_to_News():
url = 'https://min-api.cryptocompare.com/data/v2/news/?lang=EN'
request_To_url = requests.get(url)
return formatting_Unicode_news(request_To_url.text)
def formatting_Unicode_news(request):
formatting_News_unicode = ast.literal_eval(request)
return formatting_News_unicode
| StarcoderdataPython |
1671433 | from ctypes import *
import math
import random
import os
import cv2
import numpy as np
import time
import darknet
#
import threading
import ini
import datetime
import json
from Connserver import Connserver
####
from time import sleep
from threading import Thread
##from pynput import keyboard
#wa
"""
def convertBack2(x, y, w, h):
xmin = int(round(x - (w / 2)))
xmax = int(round(x + (w / 2)))
ymin = int(round(y - (h / 2)))
ymax = int(round(y + (h / 2)))
return xmin, ymin, xmax, ymax
def cvDrawBoxes2(detections, img):
for detection in detections:
x, y, w, h = detection[2][0],\
detection[2][1],\
detection[2][2],\
detection[2][3]
xmin, ymin, xmax, ymax = convertBack2(
float(x), float(y), float(w), float(h))
pt1 = (xmin, ymin)
pt2 = (xmax, ymax)
cv2.rectangle(img, pt1, pt2, (0, 255, 0), 1)
cv2.circle(img, (int(x),int(y)), 1, (255,0,0), 2)#
cv2.putText(img,
detection[0].decode() +
" [" + str(round(detection[1] * 100, 2)) + "]",
(pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5,
[0, 255, 0], 2)
return img
"""
def init():
global preLeft,preRight,LCheck,RCheck
global LeftNow,RightNow
global LeftCount,RightCount
global forward_temp, forward_appear
global forward_sum_threshold, forward_pixel, forward_road_coord, forward_gaussian_range, Fgaussian_range, darknet_road_line, count
global forward_pixel_threshold, two_way_frist, forward_road_number, count_clear
global forward_pMOG2
global forward_foreground
global Croad, Cline, ExistM, Sspeed, Bspeed, Speed, l_Maxsp, l_Minsp, l_Ct, kname, Csum, count
global Sp_time1, Sfp_time1, Sp_time2, Sfp_time2, Sp_time3, Sfp_time3, Vsc, Vbc, Svs, Svb, Rhold
Sspeed = [0, 0, 0]
Bspeed = [0, 0, 0]
Vsc = [0, 0, 0]
Vbc = [0, 0, 0]
Svs = [0, 0, 0]
Svb = [0, 0, 0]
Rhold = [0.0, 0.0, 0.0]
Croad,Cline = 0, 0
l_Maxsp = 200
l_Minsp = 0
l_Ct = 200
Speed = 0
count_clear = 0
preLeft,preRight,LCheck,RCheck=0,0,0,0
LeftNow,RightNow,LeftCount,RightCount=0,0,0,0
global lock
lock=threading.Lock()
def set_area(x,y,w,h):
global x_sc,y_sc,w_sc,h_sc
x_sc,y_sc,w_sc,h_sc=x,y,w,h
def set_count_area(x,y,w,h):
global c_x,c_y,c_w,c_h
c_x,c_y,c_w,c_h=x,y,w,h
def set_road_line(x1,y1,x2,y2):
global road_point1,road_point2
global _d_x,_d_y,_d_c
road_point1,road_point2=(x1*416,y1*416),(x2*416,y2*416)
_d_x,_d_y=(y2-y1),(x1-x2)
_d_c=-(x1*_d_x+y1*_d_y)
def set_yolo_range(msg):
global Fgaussian_range, darknet_road_line, count_clear
global ExistM, forward_road_number, l_Maxsp, l_Minsp, l_Ct
if(msg=='Error'): return
msg=msg[:-5]
#print("debug:",msg)
#print("_msg=",msg)
_json=json.loads(msg)
if(_json['requset'] == 1):
forward_road_number = _json['road'] + 1
l_Maxsp = _json['lim_Maxsp']
l_Minsp = _json['lim_Minsp']
l_Ct = _json['lim_Ct']
if(_json['cl'] == 1):
count_clear = 1;
if(_json['road'] == 0):
Fgaussian_range[0, 0] = _json['yolo1'][0]
Fgaussian_range[0, 1] = _json['yolo1'][1]
Fgaussian_range[0, 2] = _json['yolo1'][2]
Fgaussian_range[0, 3] = _json['yolo1'][3]
darknet_road_line[0, 0] = _json['road1'][0]
darknet_road_line[0, 1] = _json['road1'][1]
darknet_road_line[0, 2] = _json['road1'][2]
darknet_road_line[0, 3] = _json['road1'][3] #DOWM TO UP
darknet_road_line[1, 0] = _json['road2'][0]
darknet_road_line[1, 1] = _json['road2'][1]
darknet_road_line[1, 2] = _json['road2'][2]
darknet_road_line[1, 3] = _json['road2'][3]
elif(_json['road'] == 1):
Fgaussian_range[0, 0] = _json['yolo1'][0]
Fgaussian_range[0, 1] = _json['yolo1'][1]
Fgaussian_range[0, 2] = _json['yolo1'][2]
Fgaussian_range[0, 3] = _json['yolo1'][3]
Fgaussian_range[1, 0] = _json['yolo2'][0]
Fgaussian_range[1, 1] = _json['yolo2'][1]
Fgaussian_range[1, 2] = _json['yolo2'][2]
Fgaussian_range[1, 3] = _json['yolo2'][3]
darknet_road_line[0, 0] = _json['road1'][0]
darknet_road_line[0, 1] = _json['road1'][1]
darknet_road_line[0, 2] = _json['road1'][2]
darknet_road_line[0, 3] = _json['road1'][3]
darknet_road_line[1, 0] = _json['road2'][0]
darknet_road_line[1, 1] = _json['road2'][1]
darknet_road_line[1, 2] = _json['road2'][2]
darknet_road_line[1, 3] = _json['road2'][3]
darknet_road_line[2, 0] = _json['road3'][0]
darknet_road_line[2, 1] = _json['road3'][1]
darknet_road_line[2, 2] = _json['road3'][2]
darknet_road_line[2, 3] = _json['road3'][3]
elif(_json['road'] == 2):
Fgaussian_range[0, 0] = _json['yolo1'][0]
Fgaussian_range[0, 1] = _json['yolo1'][1]
Fgaussian_range[0, 2] = _json['yolo1'][2]
Fgaussian_range[0, 3] = _json['yolo1'][3]
Fgaussian_range[1, 0] = _json['yolo2'][0]
Fgaussian_range[1, 1] = _json['yolo2'][1]
Fgaussian_range[1, 2] = _json['yolo2'][2]
Fgaussian_range[1, 3] = _json['yolo2'][3]
Fgaussian_range[2, 0] = _json['yolo3'][0]
Fgaussian_range[2, 1] = _json['yolo3'][1]
Fgaussian_range[2, 2] = _json['yolo3'][2]
Fgaussian_range[2, 3] = _json['yolo3'][3]
darknet_road_line[0, 0] = _json['road1'][0]
darknet_road_line[0, 1] = _json['road1'][1]
darknet_road_line[0, 2] = _json['road1'][2]
darknet_road_line[0, 3] = _json['road1'][3]
darknet_road_line[1, 0] = _json['road2'][0]
darknet_road_line[1, 1] = _json['road2'][1]
darknet_road_line[1, 2] = _json['road2'][2]
darknet_road_line[1, 3] = _json['road2'][3]
darknet_road_line[2, 0] = _json['road3'][0]
darknet_road_line[2, 1] = _json['road3'][1]
darknet_road_line[2, 2] = _json['road3'][2]
darknet_road_line[2, 3] = _json['road3'][3]
darknet_road_line[3, 0] = _json['road4'][0]
darknet_road_line[3, 1] = _json['road4'][1]
darknet_road_line[3, 2] = _json['road4'][2]
darknet_road_line[3, 3] = _json['road4'][3]
if(_json['scooterRoad'] == 1):
ExistM = 1
def chk_left(x,y):
#global _d_x,_d_y,_d_c
#is_right=false
#2
_cc=(-_d_y*(y-road_point1[1]))-(_d_x*(x-road_point1[0]))
#print("_cc=",_cc)
if(_cc>0):#/
#print("left")
return True
else:
#print("right")
return False
#count
def counting(detections):
global preLeft,preRight,LCheck,RCheck # L=North R=South
global lock
global LeftNow,RightNow #northnow sourthnow
global LeftCount,RightCount #northCouter sourthcounter
#
Left,Right=0,0
#
for detection in detections:
x, y, w, h = detection[2][0],detection[2][1],detection[2][2],detection[2][3]
#
_is_left=chk_left(x,y) # RIGHT : LEFT
#
if(_is_left):Left=Left+1
else:Right=Right+1
## max
if(Left<preLeft):LCheck=LCheck+1
else:
preLeft=Left
LCheck=0
if(Right<preRight):RCheck=RCheck+1
else:
preRight=Right
RCheck=0
#print("count:",Left,":",Right) ##########################
#
lock.acquire()
LeftNow,RightNow=Left,Right
if(LCheck>4):
LeftCount=preLeft-Left
LCheck=0
preLeft=Left
if(RCheck>4):
RightCount=preRight-Right
RCheck=0
preRight=Right
lock.release()
############################
def convertBack(x, y, w, h):
xmin = int(round(x - (w / 2)))
xmax = int(round(x + (w / 2)))
ymin = int(round(y - (h / 2)))
ymax = int(round(y + (h / 2)))
return xmin, ymin, xmax, ymax
def cvDrawBoxes(detections, img):
for detection in detections:
x, y, w, h = detection[2][0],\
detection[2][1],\
detection[2][2],\
detection[2][3]
xmin, ymin, xmax, ymax = convertBack(
float(x), float(y), float(w), float(h))
pt1 = (xmin, ymin)
pt2 = (xmax, ymax)
cv2.rectangle(img, pt1, pt2, (0, 255, 0), 1)
cv2.circle(img, (int(x),int(y)), 1, (255,0,0), 2)#
cv2.putText(img,
detection[0].decode() +
" [" + str(round(detection[1] * 100, 2)) + "]",
(pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5,
[0, 255, 0], 2)
return img
#
netMain = None
metaMain = None
altNames = None
def GMM():
global metaMain, netMain, altNames,frame_resized,darknet_road_line,Fgaussian_range,kname
configPath = "./cfg/yolov4_t74.cfg"
weightPath = "./yolov4-t74_1230_last.weights"
metaPath = "./cfg/TESTcoco_old.data"
if not os.path.exists(configPath):
raise ValueError("Invalid config path `" +
os.path.abspath(configPath)+"`")
if not os.path.exists(weightPath):
raise ValueError("Invalid weight path `" +
os.path.abspath(weightPath)+"`")
if not os.path.exists(metaPath):
raise ValueError("Invalid data file path `" +
os.path.abspath(metaPath)+"`")
if netMain is None:
netMain = darknet.load_net_custom(configPath.encode(
"ascii"), weightPath.encode("ascii"), 0, 1)
if metaMain is None:
metaMain = darknet.load_meta(metaPath.encode("ascii"))
if altNames is None:
try:
with open(metaPath) as metaFH:
metaContents = metaFH.read()
import re
match = re.search("names *= *(.*)$", metaContents,
re.IGNORECASE | re.MULTILINE)
if match:
result = match.group(1)
else:
result = None
try:
if os.path.exists(result):
with open(result) as namesFH:
namesList = namesFH.read().strip().split("\n")
altNames = [x.strip() for x in namesList]
except TypeError:
pass
except Exception:
pass
#chk cap
for kk in range(24):
global Csum, count
count = np.array([[0,0,0,0,0,0],[0,0,0,0,0,0],[0,0,0,0,0,0]])
kname = str(kk) + ".txt"
cap = cv2.VideoCapture(str(kk) + ".mp4")
cap.set(cv2.CAP_PROP_BUFFERSIZE, 3);
cap.set(3, 640)
cap.set(4, 480)
print("Starting the YOLO loop...")
darknet_image = darknet.make_image(darknet.network_width(netMain),
darknet.network_height(netMain),3)
while _Keep_Run:
prev_time = time.time()
ret, frame_read = cap.read()
if(not ret):
print("Starting the YOLO loop...")
print("RTSP error! Please Check Internert")
break;
frame_rgb = cv2.cvtColor(frame_read, cv2.COLOR_BGR2RGB)
yolox=int(frame_rgb.shape[1]*x_sc)
yoloy=int(frame_rgb.shape[0]*y_sc)
yolow=int(frame_rgb.shape[1]*w_sc)
yoloh=int(frame_rgb.shape[0]*h_sc)
frame_crop=frame_rgb[yoloy:yoloy+yoloh,yolox:yolox+yolow].copy()
frame_resized0 = cv2.resize(frame_crop,
(darknet.network_width(netMain),
darknet.network_height(netMain)),
interpolation=cv2.INTER_LINEAR)
#
frame_resized=cv2.resize(frame_crop,
(darknet.network_width(netMain),
darknet.network_height(netMain)),
interpolation=cv2.INTER_LINEAR)
forward_road_number = 3
darknet_road_line = np.array([[0.575,0.448611111111111,0.21796875,0.661111111111111],
[0.69140625,0.455555555555556,0.3296875,0.780555555555556],
[0.75,0.45,0.43203125,0.844444444444444],
[0.8,0.441666666666667,0.621875,0.851388888888889]])
Fgaussian_range = np.array([[0.45,0.543055555555556,0.06484375,0.0513888888888889],
[0.578125,0.538888888888889,0.0671875,0.0472222222222222],
[0.5984375,0.643055555555556,0.08359375,0.0555555555555556]])
count_area = np.array( [ [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0] ] )
for i in range(forward_road_number):
count_area[i, 0]=int(darknet.network_width(netMain) * Fgaussian_range[i, 0])
count_area[i, 1]=int(darknet.network_height(netMain) * Fgaussian_range[i, 1])
count_area[i, 2]=int(darknet.network_width(netMain) * Fgaussian_range[i, 2])
count_area[i, 3]=int(darknet.network_height(netMain) * Fgaussian_range[i, 3])
global count, count_clear, Vsc, Vbc, Bspeed, Sspeed, Rhold
if (count_clear == 1):
Vsc = [0, 0, 0]
Vbc = [0, 0, 0]
Bspeed = [0, 0, 0]
Sspeed = [0, 0, 0]
Rhold = [0, 0, 0]
for i in range(3):
for j in range(6):
count[i, j] = 0;
count_clear = 0
global forward_road_coord
forward_road_coord = np.array( [ [int(frame_read.shape[1] * Fgaussian_range[0, 0]), int(frame_read.shape[0] * Fgaussian_range[0, 1]), int(frame_read.shape[1] * Fgaussian_range[0, 2]), int(frame_read.shape[0] * Fgaussian_range[0, 3])],[int(frame_read.shape[1] * Fgaussian_range[1, 0]), int(frame_read.shape[0] * Fgaussian_range[1, 1]), int(frame_read.shape[1] * Fgaussian_range[1, 2]), int(frame_read.shape[0] * Fgaussian_range[1, 3])],[int(frame_read.shape[1] * Fgaussian_range[2, 0]), int(frame_read.shape[0] * Fgaussian_range[2, 1]), int(frame_read.shape[1] * Fgaussian_range[2, 2]), int(frame_read.shape[0] * Fgaussian_range[2, 3])] ] )
minx, miny, maxw, maxh = forward_road_coord[0, 0], forward_road_coord[0, 1], 0, 0
for i in range(forward_road_number):
if (minx > forward_road_coord[i, 0]):
minx = forward_road_coord[i, 0]
if (miny > forward_road_coord[i, 1]):
miny = forward_road_coord[i, 1]
if (maxh < forward_road_coord[i, 1] + forward_road_coord[i, 3]):
maxh = forward_road_coord[i, 1] + forward_road_coord[i, 3]
if (maxw < forward_road_coord[i, 0] + forward_road_coord[i, 2]):
maxw = forward_road_coord[i, 0] + forward_road_coord[i, 2]
forward_gaussian_range = [minx, miny, maxw - minx, maxh - miny]
global two_way_frist
global Sp_time1, Sfp_time1, Sp_time2, Sfp_time2, Sp_time3, Sfp_time3
global forward_pMOG2
global forward_foreground
forward_sum = [ 0, 0, 0, 0 ]
forward_detect_img = frame_rgb[forward_gaussian_range[1]:forward_gaussian_range[1]+forward_gaussian_range[3] , forward_gaussian_range[0]:forward_gaussian_range[0]+forward_gaussian_range[2]].copy()
forward_frameGray = cv2.cvtColor(forward_detect_img, cv2.COLOR_BGR2GRAY)
forward_foreground = forward_frameGray
forward_pMOG2.apply(forward_frameGray, forward_foreground, 0.009)
forward_frameGray2 = cv2.cvtColor(frame_rgb, cv2.COLOR_BGR2GRAY)
forward_foreground2 = forward_frameGray2
forward_pMOG3.apply(forward_frameGray2, forward_foreground2, 0.009)
if (two_way_frist < 3):
two_way_frist += 1
if (two_way_frist == 3):
if (forward_road_number == 1):
global forward_temp, forward_appear
global forward_sum_threshold, forward_pixel
forward_road_1 = forward_foreground
forward_road_1 = forward_foreground[forward_road_coord[0, 1] - forward_gaussian_range[1]:forward_road_coord[0, 1] - forward_gaussian_range[1] + forward_road_coord[0, 3],forward_road_coord[0, 0] - forward_gaussian_range[0]:forward_road_coord[0, 0] - forward_gaussian_range[0] + forward_road_coord[0, 2]]
for i_1 in range(0, forward_road_coord[0, 3]):
for j_1 in range(0, forward_road_coord[0, 2]):
forward_pixel[0] = forward_road_1[i_1, j_1]
if (forward_pixel[0] > forward_pixel_threshold):
forward_sum[0] += 1
if (forward_sum[0] > forward_sum_threshold):
Sfp_time1 = datetime.datetime.now().strftime('%f')
Sp_time1 = datetime.datetime.now().strftime('%S')
forward_appear[0] = 1
else:
forward_appear[0] = 0
if (forward_appear[0] == 0 and forward_temp[0] == 1): #when car leave gaussain
YOLO(frame_resized, darknet_image, 0)
if (forward_appear[0] == 1):
forward_temp[0] = 1
else:
forward_temp[0] = 0
elif (forward_road_number == 2): #2 road
forward_road_1 = forward_foreground
forward_road_1 = forward_foreground[forward_road_coord[0, 1] - forward_gaussian_range[1]:forward_road_coord[0, 1] - forward_gaussian_range[1] + forward_road_coord[0, 3],forward_road_coord[0, 0] - forward_gaussian_range[0]:forward_road_coord[0, 0] - forward_gaussian_range[0] + forward_road_coord[0, 2]]
forward_road_2 = forward_foreground
forward_road_2 = forward_foreground[forward_road_coord[1, 1] - forward_gaussian_range[1]:forward_road_coord[1, 1] - forward_gaussian_range[1] + forward_road_coord[1, 3],forward_road_coord[1, 0] - forward_gaussian_range[0]:forward_road_coord[1, 0] - forward_gaussian_range[0] + forward_road_coord[1, 2]]
for i_1 in range(forward_road_coord[0, 3]):
for j_1 in range(forward_road_coord[0, 2]):
forward_pixel[0] = forward_road_1[i_1, j_1]
if (forward_pixel[0] > forward_pixel_threshold):
forward_sum[0] += 1
for i_2 in range(forward_road_coord[1, 3]):
for j_2 in range(forward_road_coord[1, 2]):
forward_pixel[1] = forward_road_2[i_2, j_2]
if (forward_pixel[1] > forward_pixel_threshold):
forward_sum[1] += 1
if (forward_sum[0] > forward_sum_threshold):
forward_appear[0] = 1
Sfp_time1 = datetime.datetime.now().strftime('%f')
Sp_time1 = datetime.datetime.now().strftime('%S')
else:
forward_appear[0] = 0
if (forward_sum[1] > forward_sum_threshold):
forward_appear[1] = 1
Sfp_time2 = datetime.datetime.now().strftime('%f')
Sp_time2 = datetime.datetime.now().strftime('%S')
else:
forward_appear[1] = 0
if (forward_appear[0] == 0 and forward_temp[0] == 1):
YOLO(frame_resized, darknet_image, 0)
if (forward_appear[1] == 0 and forward_temp[1] == 1):
YOLO(frame_resized, darknet_image, 1)
if (forward_appear[0] == 1):
forward_temp[0] = 1
else:
forward_temp[0] = 0
if (forward_appear[1] == 1):
forward_temp[1] = 1
else:
forward_temp[1] = 0
elif (forward_road_number == 3): #3 road
forward_road_1 = forward_foreground
forward_road_1 = forward_foreground[forward_road_coord[0, 1] - forward_gaussian_range[1]:forward_road_coord[0, 1] - forward_gaussian_range[1] + forward_road_coord[0, 3],forward_road_coord[0, 0] - forward_gaussian_range[0]:forward_road_coord[0, 0] - forward_gaussian_range[0] + forward_road_coord[0, 2]]
forward_road_2 = forward_foreground
forward_road_2 = forward_foreground[forward_road_coord[1, 1] - forward_gaussian_range[1]:forward_road_coord[1, 1] - forward_gaussian_range[1] + forward_road_coord[1, 3],forward_road_coord[1, 0] - forward_gaussian_range[0]:forward_road_coord[1, 0] - forward_gaussian_range[0] + forward_road_coord[1, 2]]
forward_road_3 = forward_foreground
forward_road_3 = forward_foreground[forward_road_coord[2, 1] - forward_gaussian_range[1]:forward_road_coord[2, 1] - forward_gaussian_range[1] + forward_road_coord[2, 3],forward_road_coord[2, 0] - forward_gaussian_range[0]:forward_road_coord[2, 0] - forward_gaussian_range[0] + forward_road_coord[2, 2]]
for i_1 in range(0, forward_road_coord[0, 3]):
for j_1 in range(0, forward_road_coord[0, 2]):
forward_pixel[0] = forward_road_1[i_1, j_1]
if (forward_pixel[0] > forward_pixel_threshold):
forward_sum[0] += 1
for i_2 in range(0, forward_road_coord[1, 3]):
for j_2 in range(0, forward_road_coord[1, 2]):
forward_pixel[1] = forward_road_2[i_2, j_2]
if (forward_pixel[1] > forward_pixel_threshold):
forward_sum[1] += 1
for i_3 in range(0, forward_road_coord[2, 3]):
for j_3 in range(0, forward_road_coord[2, 2]):
forward_pixel[2] = forward_road_3[i_3, j_3]
if (forward_pixel[2] > forward_pixel_threshold):
forward_sum[2] += 1
if (forward_sum[0] > forward_sum_threshold):
forward_appear[0] = 1
Sfp_time1 = datetime.datetime.now().strftime('%f')
Sp_time1 = datetime.datetime.now().strftime('%S')
else:
forward_appear[0] = 0
if (forward_sum[1] > forward_sum_threshold):
forward_appear[1] = 1
Sfp_time2 = datetime.datetime.now().strftime('%f')
Sp_time2 = datetime.datetime.now().strftime('%S')
else:
forward_appear[1] = 0
if (forward_sum[2] > forward_sum_threshold):
forward_appear[2] = 1
Sfp_time3 = datetime.datetime.now().strftime('%f')
Sp_time3 = datetime.datetime.now().strftime('%S')
else:
forward_appear[2] = 0
if (forward_appear[0] == 0 and forward_temp[0] == 1):
YOLO(frame_resized, darknet_image, 0)
if (forward_appear[1] == 0 and forward_temp[1] == 1):
YOLO(frame_resized, darknet_image, 1)
if (forward_appear[2] == 0 and forward_temp[2] == 1):
YOLO(frame_resized, darknet_image, 2)
if (forward_appear[0] == 1):
forward_temp[0] = 1
else:
forward_temp[0] = 0
if (forward_appear[1] == 1):
forward_temp[1] = 1
else:
forward_temp[1] = 0
if (forward_appear[2] == 1):
forward_temp[2] = 1
else:
forward_temp[2] = 0
for i in range(forward_road_number): #draw frame
frame_resized0=cv2.rectangle(frame_resized0, (count_area[i, 0],count_area[i, 1]),(count_area[i, 0]+count_area[i, 2],count_area[i,1]+count_area[i, 3]), (255, 255, 255), 1)
image = frame_resized0#detections
#
for i in range(forward_road_number + 1):
image = cv2.line(image,(int(darknet_road_line[i, 0] * darknet.network_width(netMain)), int(darknet_road_line[i, 1] * darknet.network_height(netMain))), (int(darknet_road_line[i, 2] * darknet.network_width(netMain)), int(darknet_road_line[i, 3] * darknet.network_height(netMain))), (0, 255, 255),1)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
_tmp_str="L" if chk_left(0,208) else "R"
_tmp_str2="L" if chk_left(400,200) else "R"
cv2.putText(image,_tmp_str,(0,208),cv2.FONT_HERSHEY_COMPLEX,.5,(255,255,255),1)#L
cv2.putText(image,_tmp_str2,(400,200),cv2.FONT_HERSHEY_COMPLEX,.5,(255,255,255),1)#R
cv2.waitKey(3)
cv2.imshow('Demo', image)
cv2.waitKey(3)
np.savetxt(kname,Csum,delimiter=',',fmt = '%d')
cv2.destroyWindow("Demo")
cap.release()
print("yolo exist")
_Keep_Run=True
#
def YOLO(Cimg, Dimg, Count_Road):
Sfn_time = int(datetime.datetime.now().strftime('%f'))
Sn_time = int(datetime.datetime.now().strftime('%S'))
darknet.copy_image_from_bytes(Dimg,Cimg.tobytes())
#
global Sp_time, Sfp_time, Tdis, Sspeed, Bspeed, Rhold, Csum, count
if (Count_Road == 0):
Sp_time = int(Sp_time1)
Sfp_time = int(Sfp_time1)
elif (Count_Road == 1):
Sp_time = int(Sp_time2)
Sfp_time = int(Sfp_time2)
elif (Count_Road == 2):
Sp_time = int(Sp_time3)
Sfp_time = int(Sfp_time3)
if (Sn_time < Sp_time):
Sn_time += 60
Tdis = ((Sn_time - Sp_time + 0.3) * 1000000 + Sfn_time - Sfp_time) * 0.000001
detections = darknet.detect_image(netMain, metaMain, Dimg, thresh=0.25)
#print(detections)
want=[]
for i in range(len(detections)):
if(i >= len(detections)):
break
Mitem = detections[i]
trust = Mitem[1]
Ikey = i
for j in range(i + 1, len(detections)):
if(j >= len(detections)):
break
Compitem = detections[j]
CompitemX = Compitem[2][0] + Compitem[2][2] / 2
CompitemY = Compitem[2][1] + Compitem[2][3] / 2
if (CompitemX > Mitem[2][0] and CompitemX < Mitem[2][0] + Mitem[2][2] and CompitemY > Mitem[2][1] and CompitemY < Mitem[2][1] + Mitem[2][3]):
if (Compitem[1] > trust):
trust = Mitem[1]
detections.remove(detections[Ikey])
j -= 1
Ikey = j
continue
detections.remove(detections[j])
j -= 1
want.append(detections[Ikey])
LCP = 0
Lastcar = 0
Csum = []
GCenter = np.array( [ [int(Fgaussian_range[0, 0] * 416 + Fgaussian_range[0, 2] * 416 / 2),int(Fgaussian_range[0, 1] * 416 + Fgaussian_range[0, 3] * 416 / 2)],[int(Fgaussian_range[1, 0] * 416 + Fgaussian_range[1, 2] * 416 / 2),int(Fgaussian_range[1, 1] * 416 + Fgaussian_range[1, 3] * 416 / 2)],[int(Fgaussian_range[2, 0] * 416 + Fgaussian_range[2, 2] * 416 / 2),int(Fgaussian_range[2, 1] * 416 + Fgaussian_range[2, 3] * 416 / 2)] ] ) #gaussain frame center
LastXY = [0, 0]
category = [ "sedan", "truck", "scooter", "bus", "Flinkcar", "Hlinkcar"]
XYrate0 = (float(darknet_road_line[Count_Road, 2] * 416 - darknet_road_line[Count_Road, 0]*416)) / (float(darknet_road_line[Count_Road, 3] *416 - darknet_road_line[Count_Road, 1]*416))
XYrate1 = (float(darknet_road_line[Count_Road + 1, 2]*416 - darknet_road_line[Count_Road + 1, 0]*416)) / (float(darknet_road_line[Count_Road + 1, 3]*416 - darknet_road_line[Count_Road + 1, 1]*416))
global count, count_clear, Speed, Vsc, Vbc, Svs, Svb, l_Minsp, l_Maxsp, l_Ct,frame_resized
if (count_clear == 1):
Vsc = [0, 0, 0]
Vbc = [0, 0, 0]
Bspeed = [0, 0, 0]
Sspeed = [0, 0, 0]
Rhold = [0, 0, 0]
for i in range(3):
for j in range(6):
count[i, j] = 0;
count_clear = 0
for detection in want: #counting
x, y, w, h = detection[2][0],\
detection[2][1],\
detection[2][2],\
detection[2][3]
xmin, ymin, xmax, ymax = convertBack(float(x), float(y), float(w), float(h))
for i in range(len(category)):
if (ExistM == 1):
if (Count_Road == forward_road_number - 1 and detection[0] != category[2]):
continue
elif (Count_Road < forward_road_number - 1 and detection[0] == category[2]):
continue
if (detection[0] == category[i]):
RLine = int(XYrate0 * (ymax - (darknet_road_line[Count_Road, 1] * 416)) + (darknet_road_line[Count_Road, 0] * 416))
LLine = int(XYrate1 * (ymax - (darknet_road_line[Count_Road + 1, 1] * 416)) + (darknet_road_line[Count_Road + 1, 0] * 416))
print('xmax '+str(xmax)+' xmix '+str(xmin)+' Rline '+str(RLine*1.1)+' Lline '+str(LLine/1.3))
if (xmin - (xmax-xmin)/2 < RLine *1.108 and xmin + (xmax-xmin) > LLine ): #detect frame cneter between right line and left line
Rhold[Count_Road] += (((Sn_time - Sp_time) * 1000000 + Sfn_time - Sfp_time) * 0.000001) * 10
if (LCP == 0):
count[Count_Road, i] += 1
LastXY[0] = GCenter[Count_Road, 0] - (x + w / 2)
LastXY[1] = GCenter[Count_Road, 1] - (y + h / 2)
Lastcar = i
Speed = (1.3 / (Fgaussian_range[Count_Road, 3] * 416) * 3.6 * h) / Tdis
if (Speed > l_Minsp and Speed < l_Maxsp):
if (detection[0] == category[0]):
Vsc[Count_Road] += 1
Sspeed[Count_Road] += Speed
Svs[Count_Road] = int(Sspeed[Count_Road] / Vsc[Count_Road])
elif (detection[0] == category[1] or detection[0] == category[3] or detection[0] == category[4] or detection[0] == category[5]):
Vbc[Count_Road] += 1
Bspeed[Count_Road] += Speed
Svb[Count_Road] = int(Bspeed[Count_Road] / Vbc[Count_Road])
else:
if (pow(pow(GCenter[Count_Road, 1] - (y + h / 2), 2) + pow(GCenter[Count_Road, 0] - (x + w / 2), 2), 0.5) < pow(pow(LastXY[0], 2) + pow(LastXY[1], 2), 0.5) and (y + h) > GCenter[Count_Road, 1]):
Speed = (1.3 / (Fgaussian_range[Count_Road, 3] * 416) * 3.6 * h) / Tdis
if (Speed > l_Minsp and Speed < l_Maxsp):
if (detection[0] == category[0]):
Sspeed[Count_Road] += Speed
Vsc[Count_Road] += 1
Svs[Count_Road] = int(Sspeed[Count_Road] / Vsc[Count_Road])
elif (detection[0] == category[1] or detection[0] == category[3] or detection[0] == category[4] or detection[0] == category[5]):
Vbc[Count_Road] += 1
Bspeed[Count_Road] += Speed
Svb[Count_Road] = int(Bspeed[Count_Road] / Vbc[Count_Road])
count[Count_Road, Lastcar] -= 1
count[Count_Road, i] += 1
LastXY[0] = GCenter[Count_Road, 0] - (x + w / 2)
LastXY[1] = GCenter[Count_Road, 1] - (y + h / 2)
Lastcar = i
LCP += 1
break
Csum = [0, 0, 0, 0, 0, 0]
for i in range(3):
for j in range(6):
Csum[j] += count[i, j]
for i in range(3):
print("SedanRoad" + str(i) + " sedan: " + str(count[i, 0]) + " truck: " + str(count[i, 1]) + " scooter: " + str(count[i, 2]) + " bus: " + str(count[i, 3]) + " Flinkcar: " + str(count[i, 4]) + " Hlinkcar: " + str(count[i, 5]))
def catch_exit():
global _Keep_Run
print(raw_input())
_Keep_Run = False
if __name__ == "__main__":
##all get road and set!
thread_list=[]
#init
init()
#
set_area(0,0,1,1)
set_count_area(0,0,1,1)
set_road_line(0.1115,0,0.7665,1)#DOWM TO UP
ExistM = 0
count = np.array( [ [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0] ] )
forward_appear = [0, 0, 0, 0]
forward_temp = [0, 0, 0, 0]
darknet_road_line = np.array( [ [0.0,0.0,0.0,0.0], [0.0,0.0,0.0,0.0], [0.0,0.0,0.0,0.0], [0.0,0.0,0.0,0.0] ] )
Fgaussian_range = np.array( [ [0.01,0.01,0.01,0.01], [0.01,0.01,0.01,0.01], [0.01,0.01,0.01,0.01] ] )
forward_road_number = 2
two_way_frist = 1
forward_sum_threshold = 100 #200
forward_pixel_threshold = 125
forward_pixel = [0, 0, 0]
forward_pMOG2 = cv2.createBackgroundSubtractorMOG2()
forward_pMOG3 = cv2.createBackgroundSubtractorMOG2()
#
thread_list.append(threading.Thread(target=catch_exit))
#thread_list.append(threading.Thread(target=tcp_run))
thread_list.append(threading.Thread(target=GMM))
for th in thread_list:
th.start()
#
for th in thread_list:
th.join()
| StarcoderdataPython |
144999 | from collections import defaultdict
class Graph:
def __init__(self,no_of_vertices,list_of_v):
self.no_of_vertices = no_of_vertices
self.graph = defaultdict(list)
for v in list_of_v:
self.graph[v] = []
def addEdge(self, u, v):
self.graph[u].append(v)
def isSink(self):
keys = list(self.graph.keys())
for i in keys:
if(self.graph[i]==[]):
flag = 0
for j in keys:
if(i!=j):
if(not (i in self.graph[j])):
flag = 1
break
if(flag==0):
return True
else:
return False
return False
g = Graph(4,["A","B","C","D"])
g.addEdge("A","B")
g.addEdge("B","C")
g.addEdge("A","D")
g.addEdge("B","D")
g.addEdge("C","D")
g.addEdge("C","A")
print(g.isSink()) | StarcoderdataPython |
149983 | <reponame>buckets1337/UOMUMM
# move.py
# handles movement in the world
def toRoom(server, player, command):
'''
moves player from their currentRoom to newRoom
'''
newRoom = None
#print "cmd:" + str(command)
#print "cmd0:" + str(command[0])
#print str(player.currentRoom.orderedExits)
# args = <some int>
if int(command[0]) <= len(player.currentRoom.orderedExits):
#print player.currentRoom.orderedExits
#print player.currentRoom.orderedExits[int(command[0])-1]
targetRoom = player.currentRoom.orderedExits[int(command[0])-1][0]
#print "tg:" + str(targetRoom)
for room in server.structureManager.masterRooms:
#print room.name, room.exits
if room.ID == targetRoom:
#print room.ID, room.exits
newRoom = room
#print 'nr:' + str(newRoom) + str(newRoom.exits)
elif int(command[0]) > len(player.currentRoom.orderedExits):
player.connection.send_cc("^! There are only " + str(len(player.currentRoom.orderedExits)) + " exits!^~\n")
return
# args = <exit description text>
cmdStr = " ".join(command)
#print "cmdStr:" + cmdStr
for exit in player.currentRoom.orderedExits:
if cmdStr == exit[1]:
newRoom = exit[0]
if newRoom != None:
#print player.currentRoom.players
player.currentRoom.players.remove(player)
#print player.currentRoom.players
#print player
for plyr in player.currentRoom.players:
plyr.connection.send_cc(player.name + " left.\n")
for room in server.structureManager.masterRooms:
if room.ID == newRoom:
newRoom = room
player.currentRoom = newRoom
server.Renderer.roomDisplay(player.connection, player.currentRoom)
for plyr in player.currentRoom.players:
plyr.connection.send_cc(player.name + " entered.\n")
player.currentRoom.players.append(player)
else:
# args does not point to an exit
player.connection.send_cc("^!I am not sure where I want to go!^~\n")
| StarcoderdataPython |
4834811 | #MoRequiem 2015
#The following line will make you insert the cost of your meal, not including taxes.
meal = float(input("-What is the cost of the meal? \n"))
#The tax is set on kenosha restaurant tax, edit if needed; also tip is 15 percent, later will add question for increase or decrease in tip
tax = 0.055
tip = 0.15
#The following two lines will calculate the tax on your meal, and the total with taxes
tax = meal * tax
meal = meal + tax
#The following two lines calculate the tip to give and the subtotal
tip = meal * tip
total = meal + tip
#The following set of lines displays the finished calculations in: total, tip, and subtotal
print ("""-Total: ${0:10.2f}
-Tip: ${1:10.2f}
-Subtotal: ${2:10.2f}""".format(meal, tip, total))
| StarcoderdataPython |
53572 | <filename>tools/w3af/w3af/core/controllers/misc/decorators.py
"""
decorators.py
Copyright 2011 <NAME>
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import math
import time
import threading
import collections
import functools
from functools import wraps
import w3af.core.controllers.output_manager as om
# pylint: disable=E0401
from darts.lib.utils.lru import SynchronizedLRUDict
# pylint: enable=E0401
def runonce(exc_class=Exception):
"""
Function to decorate methods that should be called only once.
:param exc_class: The Exception class to be raised when the method has
already been called.
"""
def runonce_meth(meth):
@wraps(meth)
def inner_runonce_meth(self, *args):
if not getattr(self, '_already_executed', False):
self._already_executed = True
return meth(self, *args)
raise exc_class()
return inner_runonce_meth
return runonce_meth
def retry(tries, delay=1, backoff=2, exc_class=None, err_msg='', log_msg=None):
"""
Retries a function or method if an exception was raised.
:param tries: Number of attempts. Must be >= 1.
:param delay: Initial delay before retrying. Must be non negative.
:param backoff: Indicates how much the delay should lengthen after
each failure. Must greater than 1.
:param exc_class: Exception class to use if all attempts have been
exhausted.
:param err_msg: Error message to use when an instance of `exc_class`
is raised. If no value is passed the string representation
of the current exception is used.
"""
if backoff <= 1:
raise ValueError("'backoff' must be greater than 1")
tries = math.floor(tries)
if tries < 1:
raise ValueError("'tries' must be 1 or greater.")
if delay < 0:
raise ValueError("'delay' must be non negative.")
def deco_retry(f):
@wraps(f)
def f_retry(*args, **kwargs):
mtries, mdelay = tries - 1, delay
while mtries >= 0:
try:
rv = f(*args, **kwargs)
except Exception, ex:
# Ok, fail!
if mtries == 0:
if exc_class:
raise exc_class(err_msg or str(ex))
raise
else:
return rv
mtries -= 1
time.sleep(mdelay)
mdelay *= backoff
if log_msg is not None:
om.out.debug(log_msg)
return f_retry
return deco_retry
def cached_property(fun):
"""
A memoize decorator for class properties.
"""
@wraps(fun)
def get(self):
try:
return self._cache[fun]
except AttributeError:
self._cache = {}
except KeyError:
pass
ret = self._cache[fun] = fun(self)
return ret
return property(get)
class memoized(object):
"""
Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
"""
def __init__(self, func, lru_size=10):
self.func = func
self.cache = SynchronizedLRUDict(lru_size)
def __call__(self, *args, **kwargs):
if not isinstance(args, collections.Hashable) or\
not isinstance(tuple(kwargs.items()), collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args, **kwargs)
try:
return self.cache[(args, tuple(kwargs.items()))]
except KeyError:
value = self.func(*args, **kwargs)
self.cache[(args, tuple(kwargs.items()))] = value
return value
def __repr__(self):
"""
Return the function's docstring.
"""
return self.func.__doc__
def __get__(self, obj, objtype):
"""
Support instance methods.
"""
return functools.partial(self.__call__, obj)
def rate_limited(max_per_second):
"""
Decorator that make functions not be called faster than
"""
lock = threading.Lock()
min_interval = 1.0 / float(max_per_second)
def decorate(func):
last_time_called = [0.0]
@wraps(func)
def rate_limited_function(*args, **kwargs):
lock.acquire()
elapsed = time.clock() - last_time_called[0]
left_to_wait = min_interval - elapsed
if left_to_wait > 0:
time.sleep(left_to_wait)
lock.release()
ret = func(*args, **kwargs)
last_time_called[0] = time.clock()
return ret
return rate_limited_function
return decorate | StarcoderdataPython |
1635109 | <gh_stars>0
# <NAME>
# Data Structures and Algorithms in Python
# Copyright 2018
| StarcoderdataPython |
3344446 | <reponame>Wolfmarsh/mpf<filename>mpf/tests/test_AssetManager.py
"""Test assets."""
import time
from mpf.tests.MpfTestCase import MpfTestCase
class TestAssets(MpfTestCase):
def get_machine_path(self):
return 'tests/machine_files/asset_manager'
def get_config_file(self):
return 'test_asset_loading.yaml'
def test_asset_loading(self):
# TODO: instantiate a fresh machine between test groups
self.expected_duration = 1.5
self._test_machine_wide_asset_loading()
self._test_random_asset_group()
self._test_random_asset_group_with_weighting()
self._test_random_force_all()
self._test_random_force_next()
self._test_sequence_asset_group()
self._test_sequence_asset_group_with_count()
self._test_conditional_random_asset_group()
self._test_conditional_sequence_asset_group()
def _test_machine_wide_asset_loading(self):
# test that the shows asset class gets built correctly
self.assertTrue(self.machine, 'shows')
self.assertTrue(self.machine.asset_manager._asset_classes)
self.assertEqual(self.machine.asset_manager._asset_classes[0].path_string, 'shows')
# tests that assets are registered as expected with various conditions
# /shows folder
self.assertIn('show1', self.machine.shows)
self.assertIn('show2', self.machine.shows)
self.assertIn('show3', self.machine.shows)
# test subfolders listed in assets:shows machine-wide config folders
self.assertIn('show4', self.machine.shows) # /shows/preload
self.assertIn('show4b', self.machine.shows) # /shows/preload/subfolder
self.assertIn('show5', self.machine.shows) # /shows/on_demand
# test shows from subfolder not listed in assets:shows
self.assertIn('show11', self.machine.shows) # /shows/custom1
# test shows from the shows: section that have names configured to be
# different from their file names
self.assertIn('show_12_new_name', self.machine.shows) # show12.png
# custom1/show13.png
self.assertIn('show_13_new_name', self.machine.shows)
# test that the shows that were renamed were not also loaded based on
# their original names
self.assertNotIn('show12', self.machine.shows)
self.assertNotIn('show13', self.machine.shows)
# test that config dicts are merged and/or overwritten properly
# test custom k/v pair from default config based on the folder the
# asset was in
self.assertEqual(self.machine.shows['show4'].config['test_key'],
'test_value')
# test custom k/v pair from default config based on the folder the
# asset was in (custom pair should be inherited from parent folder)
self.assertEqual(self.machine.shows['show4b'].config['test_key'],
'test_value')
# test custom k/v pair from asset entry in the shows: section
self.assertEqual(self.machine.shows['show3'].config['test_key'],
'test_value_override3')
# same as above, but test that it also works when the asset name is
# different from the file name
self.assertEqual(self.machine.shows['show_12_new_name'].config['test_key'], 'test_value_override12')
# Test that mode assets were loaded properly
self.assertIn('show6', self.machine.shows)
self.assertIn('show7', self.machine.shows)
self.assertIn('show8', self.machine.shows)
self.assertIn('show9', self.machine.shows)
self.assertIn('show10', self.machine.shows)
# Make sure all the assets are loaded. Wait if not
while (self.machine.asset_manager.num_assets_to_load <
self.machine.asset_manager.num_assets_loaded):
time.sleep(.0001)
self.advance_time_and_run(.1)
# Need to wait a bit since the loading was a separate thread
self.advance_time_and_run(.1)
# Make sure the ones that should have loaded on startup actually loaded
self.assertTrue(self.machine.shows['show1'].loaded)
self.assertFalse(self.machine.shows['show1'].loading)
self.assertFalse(self.machine.shows['show1'].unloading)
self.assertTrue(self.machine.shows['show2'].loaded)
self.assertFalse(self.machine.shows['show2'].loading)
self.assertFalse(self.machine.shows['show2'].unloading)
self.assertTrue(self.machine.shows['show3'].loaded)
self.assertFalse(self.machine.shows['show3'].loading)
self.assertFalse(self.machine.shows['show3'].unloading)
self.assertTrue(self.machine.shows['show8'].loaded)
self.assertFalse(self.machine.shows['show8'].loading)
self.assertFalse(self.machine.shows['show8'].unloading)
self.assertTrue(self.machine.shows['show2'].loaded)
self.assertFalse(self.machine.shows['show2'].loading)
self.assertFalse(self.machine.shows['show2'].unloading)
self.assertTrue(self.machine.shows['show4'].loaded)
self.assertFalse(self.machine.shows['show4'].loading)
self.assertFalse(self.machine.shows['show4'].unloading)
self.assertTrue(self.machine.shows['show7'].loaded)
self.assertFalse(self.machine.shows['show7'].loading)
self.assertFalse(self.machine.shows['show7'].unloading)
self.assertTrue(self.machine.shows['show11'].loaded)
self.assertFalse(self.machine.shows['show11'].loading)
self.assertFalse(self.machine.shows['show11'].unloading)
self.assertTrue(self.machine.shows['show_12_new_name'].loaded)
self.assertFalse(self.machine.shows['show_12_new_name'].loading)
self.assertFalse(self.machine.shows['show_12_new_name'].unloading)
self.assertTrue(self.machine.shows['show_13_new_name'].loaded)
self.assertFalse(self.machine.shows['show_13_new_name'].loading)
self.assertFalse(self.machine.shows['show_13_new_name'].unloading)
# Make sure the ones that should not have loaded on startup didn't load
self.assertFalse(self.machine.shows['show5'].loaded)
self.assertFalse(self.machine.shows['show5'].loading)
self.assertFalse(self.machine.shows['show5'].unloading)
self.assertFalse(self.machine.shows['show9'].loaded)
self.assertFalse(self.machine.shows['show9'].loading)
self.assertFalse(self.machine.shows['show9'].unloading)
self.assertFalse(self.machine.shows['show10'].loaded)
self.assertFalse(self.machine.shows['show10'].loading)
self.assertFalse(self.machine.shows['show10'].unloading)
# Start the mode and make sure those assets load
self.mock_event("loading_assets")
self.mock_event("asset_loading_complete")
self.machine.modes['mode1'].start()
self.advance_time_and_run()
# Give it a second to load. This file is tiny, so it shouldn't take
# this long
start_time = time.time()
while (not self.machine.shows['show9'].loaded and
time.time() < start_time + 5):
self.assertTrue(self.machine.shows['show9'].loading)
time.sleep(.0001)
self.advance_time_and_run(.1)
self.assertEventCalled("loading_assets")
self.assertEventCalled("asset_loading_complete")
self.assertTrue(self.machine.shows['show9'].loaded)
self.assertFalse(self.machine.shows['show9'].loading)
self.assertFalse(self.machine.shows['show9'].unloading)
# test mode stop which should unload those assets
self.machine.modes['mode1'].stop()
self.advance_time_and_run(.1)
start_time = time.time()
while (self.machine.shows['show9'].loaded and
time.time() < start_time + 5):
self.assertTrue(self.machine.shows['show9'].unloading)
time.sleep(.0001)
self.advance_time_and_run(.1)
self.assertFalse(self.machine.shows['show9'].loaded)
self.assertFalse(self.machine.shows['show9'].loading)
self.assertFalse(self.machine.shows['show9'].unloading)
def _test_random_asset_group(self):
# three assets, no weights
# make sure the asset group was created
self.assertIn('group1', self.machine.shows)
# make sure the randomness is working. To test this, we request the
# asset 10,000 times and then count the results and assume that each
# should be 3,333 +- 500 just to make sure the test never fails/
res = list()
for x in range(10000):
res.append(self.machine.shows['group1'].show)
self.assertAlmostEqual(3333, res.count(self.machine.shows['show1']),
delta=500)
self.assertAlmostEqual(3333, res.count(self.machine.shows['show2']),
delta=500)
self.assertAlmostEqual(3333, res.count(self.machine.shows['show3']),
delta=500)
def _test_random_asset_group_with_weighting(self):
# three assets, third one has a weight of 2
# make sure the asset group was created
self.assertIn('group2', self.machine.shows)
# make sure the randomness is working. To test this, we request the
# asset 10,000 times and then count the results and assume that each
# should be 3,333 +- 500 just to make sure the test never fails/
res = list()
for x in range(10000):
res.append(self.machine.shows['group2'].show)
self.assertAlmostEqual(2500, res.count(self.machine.shows['show1']),
delta=500)
self.assertAlmostEqual(2500, res.count(self.machine.shows['show2']),
delta=500)
self.assertAlmostEqual(5000, res.count(self.machine.shows['show3']),
delta=500)
def _test_sequence_asset_group(self):
# three assets, no weights
self.assertIn('group3', self.machine.shows)
# Should always return in order, 1, 2, 3, 1, 2, 3...
self.assertIs(self.machine.shows['group3'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group3'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group3'].show, self.machine.shows['show3'])
self.assertIs(self.machine.shows['group3'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group3'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group3'].show, self.machine.shows['show3'])
self.assertIs(self.machine.shows['group3'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group3'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group3'].show, self.machine.shows['show3'])
def _test_sequence_asset_group_with_count(self):
# three assets, no weights
self.assertIn('group4', self.machine.shows)
# Should always return in order, 1, 1, 1, 1, 2, 2, 3, 1, 1, 1, 1 ...
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show3'])
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group4'].show, self.machine.shows['show3'])
def _test_random_force_next(self):
# random, except it ensures the same one does not show up twice in a
# row
self.assertIn('group5', self.machine.shows)
# do it 10,000 times just to be sure. :)
last = self.machine.shows['group5'].show
res = list()
for x in range(10000):
show = self.machine.shows['group5'].show
self.assertIsNot(last, show)
last = show
res.append(show)
# Also check that the weights were right
# BTW these weights are non-intuitive since the last asset is not
# considered for the next round. e.g. show1 = 1, show2 = 5,
# show3 = 1, so you'd think they would be 1400, 7200, 1400, but in
# reality, 50% of the time, asset2 is not in contention, so really
# asset2 has a 6-to-1 (84%) chance of being selected 66% of the time,
# but a 0% chance of being selected 33% of the time, etc. So trust that
# these numbers are right. :)
self.assertAlmostEqual(2733, res.count(self.machine.shows['show1']),
delta=500)
self.assertAlmostEqual(4533, res.count(self.machine.shows['show2']),
delta=500)
self.assertAlmostEqual(2733, res.count(self.machine.shows['show3']),
delta=500)
def _test_random_force_all(self):
# random, except it ensures the same one does not show up twice before
# they're all shown
self.assertIn('group6', self.machine.shows)
for x in range(1000):
this_set = set()
this_set.add(self.machine.shows['group6'].show)
this_set.add(self.machine.shows['group6'].show)
this_set.add(self.machine.shows['group6'].show)
self.assertEqual(len(this_set), 3)
def _test_conditional_random_asset_group(self):
# make sure the asset group was created
self.assertIn('group1', self.machine.shows)
# ONE valid show
# Request the show 1,000 times and ensure that only one show was picked
res = list()
for x in range(1000):
res.append(self.machine.shows['group7'].show)
self.assertEqual(1000, res.count(self.machine.shows['show1']))
self.assertEqual(0, res.count(self.machine.shows['show2']))
self.assertEqual(0, res.count(self.machine.shows['show3']))
# TWO valid shows
# Request the show 10,000 times and ensure that two shows are fairly split
self.machine.modes["mode1"].start()
self.advance_time_and_run()
res = list()
for x in range(10000):
res.append(self.machine.shows['group7'].show)
self.assertAlmostEqual(5000, res.count(self.machine.shows['show1']),
delta=250)
self.assertAlmostEqual(5000, res.count(self.machine.shows['show2']),
delta=250)
self.assertEqual(0, res.count(self.machine.shows['show3']))
# THREE valid shows
# Request the show 10,000 times and ensure that all three shows are fairly split
self.machine.modes["mode1"].stop()
res = list()
for x in range(10000):
res.append(self.machine.shows['group7'].show)
self.assertAlmostEqual(3333, res.count(self.machine.shows['show1']),
delta=250)
self.assertAlmostEqual(3333, res.count(self.machine.shows['show2']),
delta=250)
self.assertAlmostEqual(3333, res.count(self.machine.shows['show3']),
delta=250)
def _test_conditional_sequence_asset_group(self):
# These tests are not independent, and mode1 is still running/stopping from the above test :(
self.advance_time_and_run()
self.assertIn('group8', self.machine.shows)
# ONE valid show
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
# TWO valid shows
self.machine.modes["mode1"].start()
self.advance_time_and_run()
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
# THREE valid shows
self.machine.modes["mode1"].stop()
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show3'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show3'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show3'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show1'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show2'])
self.assertIs(self.machine.shows['group8'].show, self.machine.shows['show3'])
| StarcoderdataPython |
3328470 | import eel
import traceback
import HandPose
import cv2
import win32gui, win32con
start_flg = 0 #HandPose.py の開始フラグ、「1」で開始
end_flg = 0 #システム終了のフラグ、「1」で終了
#コンソールを消すときはここのコメントアウトを消してください。
#The_program_to_hide = win32gui.GetForegroundWindow()
#win32gui.ShowWindow(The_program_to_hide , win32con.SW_HIDE)
@eel.expose
def start_flg():
#起動する場合のフラグを立てる
global start_flg
start_flg = 1
@eel.expose
def end_flg():
#正常終了する場合のフラグを立てる
global end_flg
end_flg = 1
if __name__ == '__main__':
continue_flg = 0 #Start.html が起動しているか判別、「1」で起動中
#eel.init("GUI/web")
# def my_other_thread():
# while True:
# print("I'm a thread")
# eel.sleep(1.0) # Use eel.sleep(), not time.sleep()
# eel.spawn(my_other_thread)
#eel.start('html/Start.html',size=(640,320),block=False)
while True:
keep_flg = 0 #HandPose.py 開始前に connect.html を起動したか、「1」で起動済み、 test.html が2つ起動するのを防ぐ
if(continue_flg == 0):
try:
eel.init("GUI/web")
eel.start('html/Start.html',size=(640,320),block=False)
continue_flg = 1
eel.sleep(0.01)
except:
#SystemExit および OSError をキャッチ
traceback.print_exc()
continue
#print("I'm a main loop")
#eel.sleep(1.0)
elif(start_flg == 1):
#「起動」を押下時の処理
continue_flg = 0
webcam_flg = 0 #connect.html が起動中か判別、「1」で起動中
#カメラが接続されているか確認
while(True):
cap = cv2.VideoCapture(0)
ret, frame = cap.read()
if(ret is True):
if(webcam_flg == 1):
eel.windowclose()
print("webcamあったよ!!!!!")
break
else:
if(webcam_flg == 0):
eel.init('GUI/web')
eel.start('html/connect.html',
mode='chrome',
size=(500,600), #サイズ指定(横, 縦)
#position=(width/2-250, height/2-300), #位置指定(left, top)
block=False)
eel.sleep(0.01)
webcam_flg = 1
keep_flg = 1
else:
eel.sleep(0.01)
print("HandPose.py を実行するよ!!!")
HandPose.HandPose_main(keep_flg) #HandPose.py が終了するまで、 Main.py の以降の処理を行わない
start_flg = 0
elif(end_flg == 1):
#「終了」を押下時の処理
print("終了するよ!!!")
break
else:
eel.sleep(0.01)
# while(i<10000):
# eel.sleep(0.01)
# print(i)
# i+=1
#traceback.print_exc()
print("終了したよ!!!!")
| StarcoderdataPython |
105639 | #!/usr/bin/python
import console80v2
singleton = console80v2.singleIntValue()
print "=== Singleton Time ==="
print ""
print singleton
print ""
print "Set a max number of items in history"
singleton.setMaxHistoryLength(4)
print singleton
print ""
print "Add 0 through 7 to history"
for i in range(1,8):
singleton.updateValue(i)
print singleton
print "Increase the history length"
singleton.setMaxHistoryLength(5)
print "Add 1 through 19 to history"
for i in range(1,20):
singleton.updateValue(i)
print singleton
print ""
print "History statistics"
print "Avg: {}".format(singleton.getHistoryAvg())
print "Max: {}".format(singleton.getHistoryMax())
print "Min: {}".format(singleton.getHistoryMin())
print "Last 1: {}".format(singleton.getHistoryLast())
print "Last 3: {}".format(singleton.getHistoryLast(3))
print ""
print "Get current Value"
print "Current: {}".format(singleton.getValue())
print "Scaled: {}".format(singleton.getScaledValue())
print ""
print "Add values outside the range"
singleton.updateValue(101)
print "Get the value back {}".format(singleton.getValue())
print "Get a scaled value back {}".format(singleton.getScaledValue())
print "Am I clipping {}".format(singleton.isClipped())
print ""
singleton.updateValue(-1)
print "Get the value back {}".format(singleton.getValue())
print "Get a scaled value back {}".format(singleton.getScaledValue())
print "Am I clipping {}".format(singleton.isClipped())
print ""
singleton.updateValue(5)
print singleton
print "Get the value back {}".format(singleton.getValue())
print "Am I clipping {}".format(singleton.isClipped())
print ""
print "Set the range 1 - 10"
singleton.setRange((0,10))
print singleton
print ""
print "Sweep to 10 and back"
for i in range(22):
singleton.sweepValue()
print singleton
print ""
print ""
coord = console80v2.singleCoordValue()
print "=== Co-Ordinates Time ==="
print ""
print coord
print ""
print "Set a max number of items in history to 4"
coord.setMaxHistoryLength(4)
print coord
print ""
print "Add 0 through 7 to history"
for i in range(1,8):
coord.updateValue((i**2,i**2))
print coord
print ""
print "Set a max number of items in history to 5"
coord.setMaxHistoryLength(5)
print coord
print ""
print "Add 1 through 19 to history"
for i in range(4,8):
coord.updateValue((i**2,i**2))
print coord
print ""
print "History statistics"
print "Avg: {}".format(coord.getHistoryAvg())
print "Max: {}".format(coord.getHistoryMax())
print "Min: {}".format(coord.getHistoryMin())
print "Last 1: {}".format(coord.getHistoryLast())
print "Last 3: {}".format(coord.getHistoryLast(3))
print ""
print "Get current Value"
print "Current: {}".format(coord.getValue())
print ""
print "Add values outside the range"
coord.updateValue((101,101))
print "Get the value back {}".format(coord.getValue())
print "Am I clipping {}".format(coord.isClipped())
print ""
coord.updateValue((-1,-1))
print "Get the value back {}".format(coord.getValue())
print "Am I clipping {}".format(coord.isClipped())
print ""
coord.updateValue((0,0))
print "Get the value back {}".format(coord.getValue())
print "Am I clipping {}".format(coord.isClipped())
print ""
print "Set the range -5,-5 - 5,5"
coord.setRange(((-5 , -5) , (5 , 5)))
print coord
print ""
print "Sweep to 5 and back to -5"
for i in range(22):
coord.sweepValue()
print coord
print ""
| StarcoderdataPython |
1609503 | <reponame>rthartley/reacombiner<gh_stars>1-10
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from string import Template
import os, sys
HEADER = """\
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Created by: $created_by
from distutils.core import setup
import py2exe
class Target(object):
'''Target is the baseclass for all executables that are created.
It defines properties that are shared by all of them.
'''
def __init__(self, **kw):
self.__dict__.update(kw)
# the VersionInfo resource, uncomment and fill in those items
# that make sense:
# The 'version' attribute MUST be defined, otherwise no versioninfo will be built:
# self.version = "1.0"
# self.company_name = "Company Name"
# self.copyright = "Copyright Company Name © 2013"
# self.legal_copyright = "Copyright Company Name © 2013"
# self.legal_trademark = ""
# self.product_version = "1.0.0.0"
# self.product_name = "Product Name"
# self.private_build = "foo"
# self.special_build = "bar"
def copy(self):
return Target(**self.__dict__)
def __setitem__(self, name, value):
self.__dict__[name] = value
RT_BITMAP = 2
RT_MANIFEST = 24
# A manifest which specifies the executionlevel
# and windows common-controls library version 6
manifest_template = '''\
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity
version="5.0.0.0"
processorArchitecture="*"
name="%(prog)s"
type="win32"
/>
<description>%(prog)s</description>
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel
level="%(level)s"
uiAccess="false">
</requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
<dependentAssembly>
<assemblyIdentity
type="win32"
name="Microsoft.Windows.Common-Controls"
version="6.0.0.0"
processorArchitecture="*"
publicKeyToken="<KEY>"
language="*"
/>
</dependentAssembly>
</dependency>
</assembly>
'''
"""
TARGET = """
$myapp = Target(
# We can extend or override the VersionInfo of the base class:
# version = "1.0",
# file_description = "File Description",
# comments = "Some Comments",
# internal_name = "spam",
script="$script", # path of the main script
# Allows to specify the basename of the executable, if different from '$myapp'
# dest_base = "$myapp",
# Icon resources:[(resource_id, path to .ico file), ...]
# icon_resources=[(1, r"$myapp.ico")]
other_resources = [(RT_MANIFEST, 1, (manifest_template % dict(prog="$myapp", level="asInvoker")).encode("utf-8")),
# for bitmap resources, the first 14 bytes must be skipped when reading the file:
# (RT_BITMAP, 1, open("bitmap.bmp", "rb").read()[14:]),
]
)
"""
OPTIONS = """
# ``zipfile`` and ``bundle_files`` options explained:
# ===================================================
#
# zipfile is the Python runtime library for your exe/dll-files; it
# contains in a ziparchive the modules needed as compiled bytecode.
#
# If 'zipfile=None' is used, the runtime library is appended to the
# exe/dll-files (which will then grow quite large), otherwise the
# zipfile option should be set to a pathname relative to the exe/dll
# files, and a library-file shared by all executables will be created.
#
# The py2exe runtime *can* use extension module by directly importing
# the from a zip-archive - without the need to unpack them to the file
# system. The bundle_files option specifies where the extension modules,
# the python dll itself, and other needed dlls are put.
#
# bundle_files == 3:
# Extension modules, the Python dll and other needed dlls are
# copied into the directory where the zipfile or the exe/dll files
# are created, and loaded in the normal way.
#
# bundle_files == 2:
# Extension modules are put into the library ziparchive and loaded
# from it directly.
# The Python dll and any other needed dlls are copied into the
# directory where the zipfile or the exe/dll files are created,
# and loaded in the normal way.
#
# bundle_files == 1:
# Extension modules and the Python dll are put into the zipfile or
# the exe/dll files, and everything is loaded without unpacking to
# the file system. This does not work for some dlls, so use with
# caution.
#
# bundle_files == 0:
# Extension modules, the Python dll, and other needed dlls are put
# into the zipfile or the exe/dll files, and everything is loaded
# without unpacking to the file system. This does not work for
# some dlls, so use with caution.
py2exe_options = dict(
packages = [$packages],
## excludes = "tof_specials Tkinter".split(),
## ignores = "dotblas gnosis.xml.pickle.parsers._cexpat mx.DateTime".split(),
## dll_excludes = "MSVCP90.dll mswsock.dll powrprof.dll".split(),
optimize=$optimize,
compressed=$compressed, # uncompressed may or may not have a faster startup
bundle_files=$bundle_files,
dist_dir=$destdir,
)
"""
SETUP = """
# Some options can be overridden by command line options...
setup(name="name",
# console based executables
console=[$console],
# windows subsystem executables (no console)
windows=[$windows],
# py2exe options
zipfile=$zipfile,
options={"py2exe": py2exe_options},
)
"""
def write_setup(args):
with open(args.setup_path, "w", encoding="utf-8") as ofi:
header = Template(HEADER)
created_by = " ".join([os.path.basename(sys.executable), "-m", "py2exe"] + sys.argv[1:])
print(header.substitute(locals()), file=ofi)
console = []
for target in args.script:
script = target.script
myapp = os.path.splitext(target.script)[0]
target = Template(TARGET)
print(target.substitute(locals()), file=ofi)
console.append(myapp)
console = ", ".join(console)
windows = ""
optimize = args.optimize or 0
compressed = args.compress or False
destdir = repr(args.destdir)
zipfile = repr(args.libname)
packages = ", ".join(args.packages or [])
bundle_files = args.bundle_files
options = Template(OPTIONS)
print(options.substitute(locals()), file=ofi)
setup = Template(SETUP)
print(setup.substitute(locals()), file=ofi)
print("Created %s." % args.setup_path)
| StarcoderdataPython |
30832 | #!/usr/bin/env python3
from datetime import datetime, timezone, date
import os
import sys
import boto3
import logging
import json
#setup global logger
logger = logging.getLogger("SnapTool")
#set log level
LOGLEVEL = os.environ['LogLevel'].strip()
logger.setLevel(LOGLEVEL.upper())
logging.getLogger("botocore").setLevel(logging.ERROR)
#setup global RDS client
rds = boto3.client("rds")
#rds snapshot tool tag name
toolTagKey="SnapTool"
def json_serial(obj):
"""JSON serializer for objects not serializable by default json code"""
if isinstance(obj, (datetime, date)):
return obj.isoformat()
raise TypeError ("Type %s not serializable" % type(obj))
def startTool(timeNow):
dbClusters=[]
if os.environ['DatabaseNames'] == "ALL":
resp=rds.describe_db_clusters()
for db in resp['DBClusters']:
dbClusters.append(db['DBClusterIdentifier'])
else:
dbClusters=os.environ['DatabaseNames'].split(",")
#make all lowercase
dbClusters=[x.lower() for x in dbClusters]
verifyClusters(dbClusters)
backupConfig=[]
backupConfig.append({
"timePeriod": "yearly",
"retention": int(os.environ['YearlyRetention'])
})
backupConfig.append({
"timePeriod": "monthly",
"retention": int(os.environ['MonthlyRetention'])
})
backupConfig.append({
"timePeriod": "weekly",
"retention": int(os.environ['WeeklyRetention'])
})
for db in dbClusters:
logger.info("Analyzing snapshot status for DB:" + db)
newSnapPeriod = []
snapsToDelete = {}
for period in backupConfig:
if(period['retention']> 0):
if (validBackupTime(timeNow, period['timePeriod'])):
newSnapPeriod.append(period['timePeriod'])
#check if there are snaps to delete keeping in mind we will be creating a new one soon
snapsToDelete[period['timePeriod']] = checkDeleteNeeded(db, period['timePeriod'], period['retention']-1)
else:
#check if there are snaps to delete
snapsToDelete[period['timePeriod']] = checkDeleteNeeded(db, period['timePeriod'], period['retention'])
else:
logger.info("No " + period['timePeriod'] + " retention specified.")
# delete any snaps if present
deleteAllSnaps(db, period['timePeriod'])
if(newSnapPeriod != []):
createSnap(db, newSnapPeriod)
else:
logger.info("No snapshot needed today.")
#delete snaps if needed
for timePeriod in snapsToDelete.keys():
for snap in snapsToDelete[timePeriod]:
deleteSnap(snap, timePeriod)
def validBackupTime(timeNow, timePeriod):
backupDate = int(os.environ['BackupDate'])
backupMonth = os.environ['BackupMonth']
weeklyBackupDay = os.environ['WeeklyBackupDay']
logger.debug("Checking if " + timePeriod + " retention policy is satisfied.")
if (timePeriod == "yearly"):
if(timeNow.day == backupDate and timeNow.strftime("%B") == backupMonth):
logger.debug("Backup date matches specifications")
return True
elif (timePeriod == "monthly"):
if (timeNow.day == backupDate):
logger.debug("Backup date matches specifications")
return True
elif (timePeriod == "weekly"):
if(timeNow.strftime("%A") ==weeklyBackupDay):
logger.debug("Backup date matches specifications")
return True
else:
logger.error("Invalid time period. Exiting")
sys.exit(1)
logger.debug("Backup date does not match specifications. Skipping snapshot")
return False
def checkDeleteNeeded(db, timePeriod, retention):
snaps=getSnaps(db,timePeriod)
if(snaps is not None and len(snaps)>=retention):
return snaps[:-retention]
else:
return []
def deleteAllSnaps(db,timePeriod):
snaps = getSnaps(db, timePeriod)
if(snaps is not None):
logger.info("Removing any old " + timePeriod + " snapshots.")
for snap in snaps:
deleteSnap(snap, timePeriod)
def getSnaps(db, timePeriod):
validSnaps = []
if ("dateSimulationDebugFile" in os.environ):
# snapshot info is stored in local file for debugging
snapStore = {}
try:
with open(os.environ['dateSimulationDebugFile'], 'r') as fp:
snapStore = json.load(fp)
except Exception:
logger.exception("Failed to load snapshot store file. Failing")
sys.exit(1)
for snap in snapStore[db]:
if (timePeriod in snap['Tag']):
# time period matches
# convert date strings to datetime objects
snap['SnapshotCreateTime'] = datetime.strptime(snap['SnapshotCreateTime'],
"%Y-%m-%dT%H:%M:%S.%f+00:00").replace(tzinfo=timezone.utc)
validSnaps.append(snap)
else:
snaps = rds.describe_db_cluster_snapshots(
DBClusterIdentifier=db,
SnapshotType="manual"
)
for s in snaps['DBClusterSnapshots']:
tags = rds.list_tags_for_resource(ResourceName=s['DBClusterSnapshotArn'])
for t in tags['TagList']:
if t['Key'] == toolTagKey and timePeriod in t['Value']:
validSnaps.append(s)
if (len(validSnaps) > 0):
# sort snaps by date
sortedArray = sorted(
validSnaps,
key=lambda x: x['SnapshotCreateTime'],
reverse=False
)
return sortedArray
else:
return None
def createSnap(db, tags):
logger.info("Creating snapshot on DB:" + db + " with tags:" + str(tags))
if ("dateSimulationDebugFile" in os.environ):
# snapshot info is stored in local file for debugging
# get simulated date from env var
simDate = datetime.strptime(os.environ['debugDate'],
"%Y-%m-%dT%H:%M:%S.%f+00:00").replace(tzinfo=timezone.utc)
snap = {
"Tag": " ".join(tags),
"SnapshotCreateTime": simDate,
"DBClusterIdentifier" : db
}
try:
with open(os.environ['dateSimulationDebugFile'], 'r') as json_data:
snapJson= json.load(json_data)
snapJson[db].append(snap)
with open(os.environ['dateSimulationDebugFile'], 'w') as json_data:
json.dump(snapJson, json_data, default=json_serial)
except Exception:
logger.exception("Failed to read or write snapshot store file. Failing")
sys.exit(1)
else:
snapshotName=db + "-" + datetime.now().strftime('%Y-%m-%d')
rds.create_db_cluster_snapshot(
DBClusterSnapshotIdentifier=snapshotName,
DBClusterIdentifier=db,
Tags=[
{
"Key": toolTagKey,
"Value": " ".join(tags)
}
])
def deleteSnap(snapToDelete, timePeriod):
logger.debug("Received a delete request for the " + timePeriod + " time period.")
if ("dateSimulationDebugFile" in os.environ):
# snapshot info is stored in local file for debugging
#read local file
snapJson={}
try:
with open(os.environ['dateSimulationDebugFile'], 'r') as json_data:
snapJson = json.load(json_data)
except Exception:
logger.exception("Failed to read snapshot store file. Failing")
sys.exit(1)
#check all snaps to see if date matches
newSnapList=[]
for snap in snapJson[snapToDelete['DBClusterIdentifier']]:
# convert date strings to datetime objects
snap['SnapshotCreateTime'] = datetime.strptime(snap['SnapshotCreateTime'], "%Y-%m-%dT%H:%M:%S.%f+00:00").replace(tzinfo=timezone.utc)
if (snap['SnapshotCreateTime'].date() == snapToDelete['SnapshotCreateTime'].date()):
#found snap with correct date
tags = snap['Tag'].split(" ")
if(len(tags) ==1 and tags[0]==timePeriod):
#we can delete it
logger.info("Deleting " + timePeriod + " snap from test file")
continue
else:
#update tag to remove time period
tags.remove(timePeriod)
snap['Tag']=" ".join(tags)
#if we are NOT deleting the snap we add its info to a new list
newSnapList.append(snap)
snapJson[snapToDelete['DBClusterIdentifier']]=newSnapList
try:
#write to file
with open(os.environ['dateSimulationDebugFile'], 'w') as json_data:
json.dump(snapJson, json_data, default=json_serial)
except Exception:
logger.exception("Failed to write snapshot store file. Failing")
sys.exit(1)
else:
#using RDS information for snapshots
# check tags on snapshot
tags = rds.list_tags_for_resource(ResourceName=snapToDelete['DBClusterSnapshotArn'])
for t in tags['TagList']:
if t['Key'] == toolTagKey:
tags = t['Value'].split(" ")
if (len(tags) == 1 and tags[0] == timePeriod):
# if the time period specified is the only remaining timeperiod we can delete it
logger.info("Deleting snapshot: " + snapToDelete['DBClusterSnapshotIdentifier'] + " from RDS.")
#delete from RDS
rds.delete_db_cluster_snapshot(DBClusterSnapshotIdentifier=snapToDelete['DBClusterSnapshotArn'])
else:
# update tag to remove time period
logger.info("Removing time period tag:" + timePeriod + " from snapshot:" + snapToDelete['DBClusterSnapshotIdentifier'])
tags.remove(timePeriod)
#rds update tag on snapshot
t['Value']= " ".join(tags)
rds.add_tags_to_resource(ResourceName=snapToDelete['DBClusterSnapshotArn'], Tags=[t])
break
def verifyClusters(dbClusters):
existingDBClusters=[d['DBClusterIdentifier'] for d in rds.describe_db_clusters()['DBClusters']]
for db in dbClusters:
logger.debug("Checking if DB:" + db + " is an existing Aurora Cluster.")
if(db in existingDBClusters):
logger.debug("DB:" + db + " is a valid cluster.")
else:
logger.error("DB:" + db + " is NOT a valid cluster. Failing")
sys.exit(1)
def lambda_handler(event, context):
logger.info("Starting Aurora Snapshot Generator tool")
logger.debug("Environment Variables:")
for key in os.environ:
logger.debug("Found {}={}".format(key, os.environ[key]))
logger.debug("Checking for required env vars.")
requiredEnvVars = ['DatabaseNames', 'WeeklyRetention', 'MonthlyRetention', 'YearlyRetention','WeeklyBackupDay', 'BackupDate', 'BackupMonth']
for r in requiredEnvVars:
if r not in os.environ.keys():
logger.error("Required variable:" + r + " not found. Exiting.")
sys.exit(1)
timeNow=datetime.now(timezone.utc)
logger.debug("Month:" + str(timeNow.strftime("%B")) + " Day:" + str(timeNow.day) + " DOW:" + str(timeNow.strftime("%A")))
startTool(timeNow)
logger.info("End of Aurora Snapshot Generator tool")
| StarcoderdataPython |
171931 | <gh_stars>10-100
from datetime import datetime, timedelta
import logging
import re
import boto3
from dart.util.s3 import get_bucket_name, get_key_name
from dart.util.strings import substitute_date_tokens
_logger = logging.getLogger(__name__)
def data_check(s3_engine, datastore, action):
"""
:type s3_engine: dart.engine.s3.s3.S3Engine
:type datastore: dart.model.datastore.Datastore
:type action: dart.model.action.Action
"""
action = s3_engine.dart.patch_action(action, progress=.1)
args = action.data.args
offset = args.get('date_offset_in_seconds')
now = datetime.utcnow()
s3_path_prefix = substitute_date_tokens(args['s3_path_prefix'], now, offset)
bucket_name = get_bucket_name(s3_path_prefix)
prefix = get_key_name(s3_path_prefix)
last_modified = args.get('s3_file_last_modified')
s3_paginator = boto3.client('s3').get_paginator('list_objects')
for page in s3_paginator.paginate(Bucket=bucket_name, Prefix=prefix):
for element in (page.get('Contents') or []):
path = 's3://' + bucket_name + '/' + element['Key']
s3_path_regex = args.get('s3_path_regex')
if s3_path_regex and not re.match(substitute_date_tokens(s3_path_regex, now, offset), path):
continue
if args.get('min_file_size_in_bytes') and element['Size'] < args['min_file_size_in_bytes']:
continue
if last_modified and element['LastModified'] < now + timedelta(seconds=offset):
continue
return
raise Exception('Data check failed')
| StarcoderdataPython |
47215 | def corpus_file_transform(src_file,dst_file):
import os
assert os.path.isfile(src_file),'Src File Not Exists.'
with open(src_file,'r',encoding = 'utf-8') as text_corpus_src:
with open(dst_file,'w',encoding = 'utf-8') as text_corpus_dst:
from tqdm.notebook import tqdm
text_corpus_dst.write(''.join([(text_word + "\tS\n" if len(text_word) == 1 else (text_word[0] + "\tB\n" + ''.join([(w + "\tM\n") for w in text_word[1 : -1]]) + text_word[-1] + "\tE\n")) for text_line in tqdm_notebook(text_corpus_src.readlines()) for text_word in text_line.strip().split()]))
def IOForFeature(file,feature = None,mode = 'rb',featureList = ['A','B','C']):
assert (mode == 'rb') or (mode == 'wb'),'The third parameter must be \'r\' or \'w\''
assert not((mode == 'wb') and not feature),'The second parameter feature must not be empty.'
try:
import pickle
with open(file,mode) as f:
if mode == 'rb':
feature = pickle.load(f)
elif mode == 'wb':
pickle.dump(feature,f)
except:
feature = {label : {} for label in featureList}
return feature
def TrainingFeatureA(corpus,featureA,wordLabel = {'B' : 0, 'M' : 1, 'E' : 2, 'S' : 3}):
# p(y_i|x_i)
if not featureA:
featureA = {}
for word in tqdm_notebook(corpus):
if not featureA.get(word[0]):
featureA[word[0]] = [0,0,0,0]
featureA[word[0]][wordLabel[word[2]]] += 1
return featureA
def TrainingFeatureB(corpus,featureB,wordLabel = {'B' : 0, 'M' : 1, 'E' : 2, 'S' : 3}):
# p(y_(i+1)|x_i,y_i)
if not featureB:
featureB = {}
for word,nextword in tqdm_notebook(zip(corpus[:-1],corpus[1:])):
if not featureB.get(word[0]):
featureB[word[0]] = [[0,0,0,0] for i in range(4)]
featureB[word[0]][wordLabel[word[2]]][wordLabel[nextword[2]]] += 1
return featureB
def TrainingFeatureC(corpus,featureC,wordLabel = {'B' : 0, 'M' : 1, 'E' : 2, 'S' : 3}):
# p(x_(i-1)|x_i,y_i),p(x_(i+1)|x_i,y_i)
if not featureC:
featureC = {}
for lastWord,word,nextWord in tqdm_notebook(zip(corpus[:-2],corpus[1:-1],corpus[2:])):
if not featureC.get(word[0]):
featureC[word[0]] = {label : {} for label in wordLabel}
if not featureC[word[0]][word[2]].get(lastWord[0]):
featureC[word[0]][word[2]][lastWord[0]] = [0,0]
featureC[word[0]][word[2]][lastWord[0]][0] += 1
if not featureC[word[0]][word[2]].get(nextWord[0]):
featureC[word[0]][word[2]][nextWord[0]] = [0,0]
featureC[word[0]][word[2]][nextWord[0]][1] += 1
return featureC4
def featureTraining(feature,train_corpus,
featureList = ['A','B','C'],
featureFunction = {'A' : TrainingFeatureA, 'B' : TrainingFeatureB,'C' : TrainingFeatureC},
wordLabel = {'B' : 0, 'M' : 1, 'E' : 2, 'S' : 3}):
for featureLabel in featureList:
feature[featureLabel] = featureFunction[featureLabel](train_corpus,feature[featureLabel],wordLabel)
def getTestFeatureABC(test_str,feature,wordLabel):
import numpy as np
test_featureA = {word : (-np.log(np.array(feature['A'][word]) / sum(feature['A'][word]))).tolist()
if feature['A'].get(word) else [0,0,0,0] for word in test_str}
test_featureB = {word : (-np.log(np.array(feature['B'][word]).T / np.array(feature['B'][word]).sum(axis = 1)).T).tolist()
if feature['B'].get(word) else [[0,0,0,0] for label in wordLabel.keys()] for word in test_str}
test_featureC = {word :{d1_key : {d2_key : d2_value for d2_key,d2_value in
zip(d1_value.keys(),(np.array(list(d1_value.values())) / np.array(list(d1_value.values())).sum(axis = 0)).tolist())}
for d1_key,d1_value in feature['C'][word].items()} if feature['C'].get(word) else {label : {} for label in wordLabel.keys()} for word in test_str}
return test_featureA,test_featureB,test_featureC
def getDividedResult(wordLabel,relationDict,test_str):
wordLabelk = list(wordLabel.keys())
thisIndex = relationDict[-1][0].index(min(relationDict[-1][0]))
dividedResult, lastIndex = [[test_str[-1],wordLabelk[thisIndex]]],relationDict[-1][1][thisIndex]
for w_id in range(len(test_str) - 2,-1,-1):
dividedResult.append([test_str[w_id],wordLabelk[lastIndex]])
lastIndex = relationDict[w_id][1][lastIndex]
dividedResult.reverse()
resultString = ''.join([(' ' if d_R[1] == 'S' or d_R[1] == 'B' else '') + d_R[0] + (' ' if d_R[1] == 'S' or d_R[1] == 'E' else '') for d_R in dividedResult])
return dividedResult,resultString
def CRFWordSeperate(test_str,feature,wordLabel = {'B' : 0, 'M' : 1, 'E' : 2, 'S' : 3} ):
import numpy as np
test_featureA,test_featureB,test_featureC = getTestFeatureABC(test_str,feature,wordLabel)
relationDict = [[[test_featureA[test_str[w_id]][wordLabel[l_id]] *
(1 - (0 if w_id == 0 else test_featureC[test_str[w_id]][l_id].get(test_str[w_id - 1], [0,0])[0])) *
(1 - (0 if w_id == len(test_str) - 1 else test_featureC[test_str[w_id]][l_id].get(test_str[w_id + 1], [0,0])[1]))
for l_id in wordLabel],[0 for l_id in wordLabel]] for w_id in range(len(test_str))]
relationDict[0][0][wordLabel['E']] = relationDict[0][0][wordLabel['M']] = float('inf')
for w_id in range(1,len(test_str)):
for l_id in wordLabel:
candidateList = [test_featureB[test_str[w_id - 1]][wordLabel[l]][wordLabel[l_id]]
* (1 - (0 if w_id == 0 else test_featureC[test_str[w_id]][l_id].get(test_str[w_id - 1], [0,0])[0]))
* (1 - (0 if w_id == len(test_str) - 1 else test_featureC[test_str[w_id]][l_id].get(test_str[w_id + 1], [0,0])[1]))
+ relationDict[w_id - 1][0][wordLabel[l]] for l in wordLabel]
candidateList = [float('inf') if np.isnan(c_l) else c_l for c_l in candidateList]
relationDict[w_id][0][wordLabel[l_id]] += min(candidateList)
relationDict[w_id][1][wordLabel[l_id]] = candidateList.index(min(candidateList))
relationDict[-1][0][wordLabel['B']] = relationDict[-1][0][wordLabel['M']] = float('inf')
return getDividedResult(wordLabel,relationDict,test_str)
if __name__=="__main__":
train_corpus_src = 'msr_training.utf8'
train_corpus_dst = 'msr_training.utf8.pr'
corpus_file_transform(train_corpus_src,train_corpus_dst)
with open(train_corpus_dst,'r',encoding = 'utf-8') as f:
train_corpus = f.readlines()
print(train_corpus[:10])
featureFile = 'feature.pkl'
wordLabel = {'B' : 0, 'M' : 1, 'E' : 2, 'S' : 3}
feature = IOForFeature(featureFile,mode='rb')
featureTraining(feature,train_corpus)
feature = IOForFeature(featureFile,feature,mode='wb')
t_str = '最近内存在涨价,不能用以前等价值的物品交换了'
dividedResult,resultString = CRFWordSeperate(t_str,feature,wordLabel)
dividedSequences = ''.join([result[1] for result in dividedResult])
print(resultString)
print(dividedSequences)
print(dividedResult)
test_corpus_src = 'pku_training.utf8'
test_corpus_dst = 'pku_training.utf8.pr'
corpus_file_transform(test_corpus_src,test_corpus_dst)
#将已分词的训练文件转换为未分词的测试文件
with open(test_corpus_src,'r',encoding = 'utf-8') as f:
test_sentences = f.readlines()
test_sentences = [sentence.replace(' ','') for sentence in test_sentences]
test_sentences = [sentence.replace('\n','') for sentence in test_sentences]
#将获得测试文件的正确标注
with open(test_corpus_dst,'r',encoding = 'utf-8') as f:
test_corpus = f.readlines()
test_label = ''.join([result[2] for result in test_corpus])
print(test_sentences[0])
print(test_corpus[:len(test_sentences[0])])
print(test_label[:len(test_sentences[0])])
dividedSequences = ''
dividedResults = []
resultStrings = []
for sentences in tqdm_notebook(test_sentences[:500]):
dividedResult,resultString = CRFWordSeperate(sentences,feature,wordLabel)
dividedResults.append(dividedResult)
resultStrings.append(resultString)
dividedSequences += ''.join([result[1] for result in dividedResult])
for d_R,r_S in zip(dividedResults[:10],resultStrings[:10]):
print(r_S)
print(d_R)
count = [0,0,0,0]
for d_S in dividedSequences:
count[wordLabel[d_S]] += 1
print(list(zip(wordLabel.keys(),count)))
accurate = [0,0]
for d_S in range(len(dividedSequences)):
accurate[test_label[d_S] == dividedSequences[d_S]] += 1
print('Wrong : %.2f%%, Right : %.2f%%' % (accurate[0] / sum(accurate) * 100,accurate[1] / sum(accurate) * 100)) | StarcoderdataPython |
3252984 | class Solution:
def XXX(self, a: str, b: str) -> str:
a_list,b_list=[],[]
res=[]
a_length=len(a)
b_length=len(b)
if(a_length>b_length):
for i in range(a_length-b_length):
b_list.append(0)
else:
for i in range(b_length-a_length):
a_list.append(0)
for word in a:
a_list.append(int(word))
for word in b:
b_list.append(int(word))
additional=0
j=len(a_list)-1
while(j>=0):
if(a_list[j]+b_list[j]==0):
if(additional==0):
res.insert(0,0)
additional=0
else:
res.insert(0,1)
additional=0
elif(a_list[j]+b_list[j]==1):
if(additional==0):
res.insert(0,1)
additional=0
else:
res.insert(0,0)
additional=1
elif(a_list[j]+b_list[j]==2):
if(additional==0):
res.insert(0,0)
additional=1
else:
res.insert(0,1)
additional=1
else:
print("error nums")
j=j-1
if(additional==1):
res.insert(0,1)
res=''.join([str(x) for x in res])
return res
| StarcoderdataPython |
3201380 | """ Data objects in group "energyplus"
"""
from collections import OrderedDict
import logging
from pyidf.helper import DataObject
logger = logging.getLogger("pyidf")
logger.addHandler(logging.NullHandler())
class LeadInput(DataObject):
"""Corresponds to IDD object `Lead Input`"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict(),
'format': None,
'group': 'energyplus',
'min-fields': 0,
'name': u'Lead Input',
'pyname': u'LeadInput',
'required-object': False,
'unique-object': False}
class SimulationData(DataObject):
"""Corresponds to IDD object `Simulation Data`"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict(),
'format': None,
'group': 'energyplus',
'min-fields': 0,
'name': u'Simulation Data',
'pyname': u'SimulationData',
'required-object': False,
'unique-object': False}
| StarcoderdataPython |
3244822 | <filename>tests/test_validation.py
"""Test validation functions."""
# pylint: disable=missing-docstring
from imaps.base.validation import (
validate_bam_file,
validate_bed_file,
validate_date,
validate_integer,
validate_string,
)
from ngs_test_utils.testcase import NgsTestCase
class TestValidation(NgsTestCase):
def test_validate_bed_file(self):
message = "Bed file file.txt should have a valid bed extension."
with self.assertRaisesRegex(ValueError, message):
validate_bed_file("file.txt", check_exist=False)
message = "Bed file file.bed does not exist."
with self.assertRaisesRegex(ValueError, message):
validate_bed_file("file.bed", check_exist=True)
bed = self.make_bed(intervals=[["chr1", 10, 20, ".", 12, "+"]])
validate_bed_file(bed, check_exist=True)
def test_validate_bam_file(self):
message = "Bam file file.txt should have a valid bam extension."
with self.assertRaisesRegex(ValueError, message):
validate_bam_file("file.txt", check_exist=False)
message = "Bam file file.bam does not exist."
with self.assertRaisesRegex(ValueError, message):
validate_bam_file("file.bam", check_exist=True)
bam = self.make_bam(chroms=[("chr1", 100)], segments=[dict(cigar=[(0, 75)])])
validate_bam_file(bam, check_exist=True)
def test_validate_string(self):
message = "Value 123 should be a string."
with self.assertRaisesRegex(ValueError, message):
validate_string(123)
message = "Value C should be one of A, B."
with self.assertRaisesRegex(ValueError, message):
validate_string("C", choices=["A", "B"])
validate_string("A")
validate_string("B", choices=["A", "B"])
validate_string("", allow_empty=True)
def test_validate_integer(self):
message = "Value AAA should be an integer."
with self.assertRaisesRegex(ValueError, message):
validate_integer("AAA")
validate_integer(123)
def test_validate_date(self):
message = "Incorrect date format \\(1.2.1990\\), should be YYYY-MM-DD."
with self.assertRaisesRegex(ValueError, message):
validate_date("1.2.1990")
validate_date("1900-2-1")
validate_date("", allow_empty=True)
| StarcoderdataPython |
1786497 | <gh_stars>0
import os
import pytest
@pytest.fixture
def api_email():
return os.getenv('API_EMAIL') or 'mock-email'
@pytest.fixture
def api_key():
return os.getenv('API_KEY') or 'mock-key'
@pytest.fixture
def base_url():
return os.getenv('BASE_URL') or 'http://tuneapp.localhost/api'
| StarcoderdataPython |
4839584 | """
Useful parameters for the model
"""
| StarcoderdataPython |
3201780 | # Read three angles, which are given on separate lines, from the
# input and print in the following format whether they form a
# triangle: "The triangle is valid!" or "The triangle is not valid!"
a = int(input())
b = int(input())
c = int(input())
print("The triangle is valid!" if a + b + c == 180 else "The triangle is not valid!") | StarcoderdataPython |
1665358 | from django.core import serializers
from django.http import HttpResponse
def export_as_json(modeladmin, request, queryset):
response = HttpResponse(content_type="application/json")
model_name = modeladmin.model._meta.model_name
response["Content-Disposition"] = "attachment;filename={model_name}.json".format(model_name=model_name)
serializers.serialize("json", queryset, stream=response, indent=2, use_natural_primary_keys=True)
return response
| StarcoderdataPython |
3393446 | <reponame>vgoliber/points-on-maps<filename>map_viz.py
import pandas as pd
import matplotlib.pyplot as plt
import geopandas as gpd
from shapely.geometry import Polygon
def viz_results(build_sites, existing_chargers, radius):
street_map = gpd.read_file('data/dubai.shp')
filename = "UAE_Emirate.geojson"
file = open(filename)
emirates_map = gpd.read_file(file)
polygon = Polygon([(55.0, 24.9), (55.0, 25.4), (55.5, 25.4), (55.5, 24.9), (55.0, 24.9)])
street_map = gpd.clip(street_map, polygon)
emirates_map = gpd.clip(emirates_map, polygon)
fig = plt.figure(figsize=(16, 12), dpi=160)
ax = fig.add_axes([0, 0, 1, 1])
ax.axis('off')
# Draw new build sites
with open(build_sites) as f:
lines = f.readlines()
points = []
lats = []
longs = []
for line in lines:
temp = line.split()
points.append(temp[0])
lats.append(temp[1])
longs.append(temp[2])
df = pd.DataFrame({'TYPE': ['build site']*len(points), 'Latitude': lats, 'Longitude':longs})
gdf = gpd.GeoDataFrame(
df, geometry=gpd.points_from_xy(df.Longitude, df.Latitude))
gdf.plot(ax=ax, color='r', zorder=10)
# Draw radius around new build sites
new_df = gdf.copy()
new_df['geometry'] = new_df['geometry'].buffer(radius/111)
new_df.plot(ax=ax, color='r', alpha=0.1, zorder=9)
# Draw existing chargers
with open(existing_chargers) as f:
lines = f.readlines()
points = []
lats = []
longs = []
for line in lines:
temp = line.split()
points.append(temp[0])
lats.append(temp[1])
longs.append(temp[2])
df_charger = pd.DataFrame({'TYPE': ['charger']*len(points), 'Latitude': lats, 'Longitude':longs})
gdf_charger = gpd.GeoDataFrame(
df_charger, geometry=gpd.points_from_xy(df_charger.Longitude, df_charger.Latitude))
gdf_charger.plot(ax=ax, color='y', zorder=8)
# Draw radius around chargers
new_df = gdf_charger.copy()
new_df['geometry'] = new_df['geometry'].buffer(radius/111)
new_df.plot(ax=ax, color='y', alpha=0.1, zorder=7)
street_map.plot(ax = ax, color = '#545454', zorder=5, linewidth=0.5)
emirates_map.plot(ax = ax, color = '#d3d3d3', zorder=0)
plt.savefig('map.png')
if __name__ == '__main__':
viz_results('points.txt', 'chargers.txt', radius=5)
| StarcoderdataPython |
77383 | from detectron2.engine import DefaultTrainer, HookBase
from detectron2.config import get_cfg
from detectron2.data.datasets import register_coco_instances, load_coco_json
from detectron2.modeling import build_model
from detectron2.checkpoint import DetectionCheckpointer
from detectron2.data import DatasetCatalog, MetadataCatalog
import cv2
from detectron2.utils.visualizer import Visualizer
def reg_dataset(name_of_dataset, imgs_root, f_path_annotation):
# register_coco_instances(name_of_dataset, {}, f_path_annotation,imgs_root)
load_coco_json(f_path_annotation, imgs_root, dataset_name=name_of_dataset)
DatasetCatalog.register(
name_of_dataset, lambda: load_coco_json(
f_path_annotation, imgs_root, name_of_dataset
)
)
return name_of_dataset
def create_cfg(weights_root, name_of_dataset):
cfg = get_cfg()
cfg.OUTPUT_DIR = weights_root
cfg.DATASETS.TRAIN = (name_of_dataset,)
cfg.DATASETS.TEST = () # no metrics implemented for this dataset
cfg.MODEL.DEVICE = "cpu" # cpu or cuda
# cfg.INPUT.RANDOM_FLIP = "horizontal"
# cfg.DATALOADER.NUM_WORKERS = 8
# cfg.SOLVER.IMS_PER_BATCH = 1
cfg.DATALOADER.NUM_WORKERS = 0
cfg.SOLVER.IMS_PER_BATCH = 1
cfg.SOLVER.CHECKPOINT_PERIOD = 50
cfg.SOLVER.BASE_LR = 0.00025
cfg.SOLVER.MAX_ITER = 1000
cfg.MODEL.ROI_HEADS.NUM_CLASSES = 4
cfg.MODEL.ANCHOR_GENERATOR.SIZES = [[122.48, 158.97, 86.08, 71.7]]
# P3_C, KC_135, C_5, B_52
cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS = [
[1.5, 2.17, 2.28, 1.83]
]
# cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS = [
# [0.5, 1.0, 1.8, 2.17, 2.3]
# ]
# cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS = [
# [1.0, 1.8], [1.0, 2.3], [1.0, 2.17]
# ]
return cfg
def write_cfg(cfg, full_cfg_path):
with open(full_cfg_path, "w") as f:
f.write(cfg.dump())
return full_cfg_path
def write_weights_from_cfg(cfg, saving_dir, weights_name):
cfg.OUTPUT_DIR = saving_dir
model = build_model(cfg)
checkpointer = DetectionCheckpointer(model, save_dir=saving_dir)
checkpointer.save(weights_name)
return saving_dir + "/" + weights_name
def visualize_img(name_of_dataset, ind_img):
res = DatasetCatalog.get(name_of_dataset)
if ind_img > len(res):
return
img = cv2.imread(res[ind_img]["file_name"])
visualizer = Visualizer(
img[:, :, ::-1],
metadata=MetadataCatalog.get(name_of_dataset),
scale=0.5
)
vis = visualizer.draw_dataset_dict(res[ind_img])
cv2.imshow(f"Image #{ind_img}", vis.get_image()[:, :, ::-1])
cv2.waitKey(0)
def main():
# registration dataset
name_of_dataset = "Planes_detection_Train"
imgs_root = "C:/Users/savchenko.bs/Desktop/new_placement/detectron2/Dataset_30k"
f_path_annotation = "C:/Users/savchenko.bs/Desktop/new_placement/detectron2/Dataset_30k/Train_Data.json"
name_of_dataset = reg_dataset(
name_of_dataset, imgs_root, f_path_annotation
)
# MetadataCatalog.get(name_of_dataset).thing_classes = [
# "B_52", "C_5", "KC_135", "P_3C"
# ]
print(MetadataCatalog.get(name_of_dataset))
# visualize_img(name_of_dataset, 25654)
# config params function
weights_root = "C:/Users/savchenko.bs/Desktop/new_placement/detectron2/weights/results_learning_fixed_anchors"
cfg = create_cfg(weights_root, name_of_dataset)
# write weights specific from config
# name_model = write_weights_from_cfg(cfg, weights_root,"detectron2_model")
cfg_name = "detectron2_config.yaml"
cfg_name = write_cfg(cfg, weights_root + "/" + cfg_name)
cfg_from_file = get_cfg()
cfg_from_file.merge_from_file(cfg_name)
trainer = DefaultTrainer(cfg_from_file)
trainer.resume_or_load(resume=False)
trainer.train()
if __name__ == "__main__":
main()
| StarcoderdataPython |
1629992 | <gh_stars>100-1000
from office365.runtime.client_value import ClientValue
class SimpleDataTable(ClientValue):
pass
| StarcoderdataPython |
3262539 | <filename>utils/draw_fit.py
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_context("paper", rc={'font.sans-serif': 'Helvetica',
'font.size': 12})
df_green = pd.read_csv('~/Resources/Experiments/dcfnex-12/dcstfn-green/train/history.csv')
df_red = pd.read_csv('~/Resources/Experiments/dcfnex-12/dcstfn-red/train/history.csv')
df_nir = pd.read_csv('~/Resources/Experiments/dcfnex-12/dcstfn-nir/train/history.csv')
df_green = df_green.head(50)
df_red = df_red.head(50)
df_nir = df_nir.head(50)
epoch = df_green['epoch']
metrics = ('r2', 'val_r2')
labels = ('Green', 'Red', 'NIR')
colors = ('green', 'red', 'orange')
linestyles = ('-', '--')
fig, ax = plt.subplots()
for metric, linestyle in zip(metrics, linestyles):
score = (df_green[metric], df_red[metric], df_nir[metric])
for i in range(3):
ax.plot(epoch + 1, score[i], label=labels[i], color=colors[i],
linestyle=linestyle)
ax.set_xlabel('Epoch', fontsize=12)
ax.set_ylabel(r'$R^2$', fontsize=12)
ax.tick_params(axis='both', which='both', labelsize=9)
ax.set_xticks(range(0, epoch.size + 1, 10))
ax.set_ylim([0.5, 0.9])
ax.grid(True, color=(0.95, 0.95, 0.95))
for i in range(2):
ax.plot([], [], color='black', linestyle=linestyles[i])
ax.grid(True)
lines = ax.get_lines()
color_legend = ax.legend(handles=[lines[i] for i in range(3)], labels=labels,
loc=4, bbox_to_anchor=(0.967, 0.0), fontsize=10, frameon=False)
line_legend = ax.legend(handles=[lines[i] for i in range(-2, 0)], labels=('Training', 'Validation'),
loc=4, bbox_to_anchor=(0.778, 0.0), fontsize=10, frameon=False)
ax.add_artist(color_legend)
ax.add_artist(line_legend)
ax.set_title('Fitted Curve', fontsize=14, fontweight='bold')
plt.savefig('r2.png', dpi=900)
plt.close()
| StarcoderdataPython |
3277810 | <reponame>dpalmasan/python-user-posts-microservice
import logging
import os
from pathlib import Path
from config import Config
from db.session import create_db_session
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
# create formatter
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
# add formatter to ch
ch.setFormatter(formatter)
logger.addHandler(ch)
cwd = os.getcwd()
logger.info(f"Working directory {cwd}")
logger.info(f"Files {os.listdir(os.path.join(cwd, 'app_config'))}")
config_file = Path("app_config/config.yaml")
if not config_file.exists():
logger.info("No app_config/config.yaml was found ")
config_file = Path("app_config/config.default.yaml")
logger.info(f"Using {config_file}")
config = Config.load_from_file(config_file)
session = create_db_session(config)
| StarcoderdataPython |
1665508 | <reponame>hkvh/sonata-archives
#!/usr/bin/env python
"""
A module designed to render all lilypond files
"""
import glob
import logging
import os
from typing import List
from directories import DATA_DIR
from general_utils.lilypond_utils import render_lilypond_png_into_app_directory
log = logging.getLogger(__name__)
def render_all_lilypond(filenames_list: List[str] = None, remove_temp_dir: bool = True) -> None:
"""
This function recursively iterates over and all lilypond files in the 'data' folder and renders them all into pngs
that it moves to the app directory.
Will throw an error if any single lilypond file does not render properly.
If you provide a filename_list, will skip all files whose name (i.e. the filename itself, not the full path) are
not in the filename list (this is to make it easier to focus on only the specific files you care about)
:param filenames_list: an optional parameter that if provided will filter the files considered to those in the list
(no need to include the .ly since all files have it)
:param remove_temp_dir: an optional parameter that if False will not removes the root-level lilypond_temp directory
containing the extraneous files made by this rendering process. Defaults to True.
"""
# Get all lilypond files recursively under the data dir
data_file_full_path_list = glob.glob(os.path.join(DATA_DIR, '**/*.ly'), recursive=True)
log.info('#' * 40)
log.info('#' * 40)
log.info('#' * 40)
log.info("RENDERING ALL LILYPOND FILES")
log.info('#' * 40)
log.info('#' * 40)
log.info('#' * 40 + "\n")
for data_file_full_path in data_file_full_path_list:
path_name, file_name = os.path.split(data_file_full_path)
# If no list provided, run everything, else check if the filename (minus .ly since we know all files have it)
# is in the filename_list
if filenames_list is None or file_name.split('.ly')[0] in filenames_list:
log.info('\n' * 5)
log.info('#' * 150)
log.info('#' * 150)
log.info("PROCESSING: {}".format(file_name))
log.info('#' * 150)
log.info('#' * 150)
render_lilypond_png_into_app_directory(data_file_full_path, remove_temp_dir=remove_temp_dir)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)s %(levelname)s: %(message)s')
filenames_list = [
'beethoven3_1',
]
# Comment this out to use the filename_list
filenames_list = None
render_all_lilypond(filenames_list=filenames_list, remove_temp_dir=True)
| StarcoderdataPython |
65710 | import os
import json
def evaluation(results, all_res, bug_data, storage_path):
map_value = 0
map_value_all = 0
ap_value = {}
count = 0
for bug_id, bug_cont in bug_data.items():
temp1 = 0
temp2 = 0
ap_tmp = 0
all_ap_tmp = 0
truth_num = 0
file_paths = bug_cont["fixed_files"]
if not len(file_paths) == 0:
for file_path in file_paths:
for i in range(all_res.shape[0]):
if all_res[i]["bug"][0] == bug_id.encode():
for j in range(all_res.shape[1]):
if all_res[i][j]["file"] == file_path.encode():
truth_num += 1
if truth_num > 0:
count += 1
if not truth_num == 0:
ap_value[bug_id] = {}
for i in range(results.shape[0]):
if results[i]["bug"][0] == bug_id.encode():
for j in range(results.shape[1]):
if results[i][j]["file"].decode() in file_paths:
temp1 += 1
ap_tmp += temp1/(j+1.0)
for i in range(all_res.shape[0]):
if all_res[i]["bug"][0] == bug_id.encode():
for j in range(all_res.shape[1]):
if all_res[i][j]["file"].decode() in file_paths:
temp2 += 1
all_ap_tmp += temp2/(j+1.0)
ap_value[bug_id]["AP@top10"] = ap_tmp / len(file_paths)
ap_value[bug_id]["AP@all"] = all_ap_tmp / len(file_paths)
past_ap_value = {}
if os.path.exists(os.path.join(storage_path, "evaluation.json")):
with open(os.path.join(storage_path, "evaluation.json"), "r") as f:
past_ap_value = json.load(f)
past_ap_value.update(ap_value)
with open(os.path.join(storage_path, "evaluation.json"), "w") as f:
json.dump(past_ap_value, f)
if not count == 0:
for ap in ap_value.values():
map_value_all += ap["AP@all"]
map_value += ap["AP@top10"]
map_value /= count
map_value_all /= count
else:
map_value = 0
map_value_all = 0
print("The MAP @ top 10 is", map_value)
print("The MAP @ all results is", map_value_all) | StarcoderdataPython |
100086 | <reponame>naamara/blink
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import accounts.models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='profile',
name='id_expiry',
field=models.CharField(default=1, max_length=30, blank=True),
preserve_default=False,
),
migrations.AddField(
model_name='profile',
name='id_scan_ref',
field=models.CharField(default=1, max_length=50, blank=True),
preserve_default=False,
),
migrations.AddField(
model_name='profile',
name='id_scanned',
field=models.BooleanField(default=False),
preserve_default=True,
),
migrations.AddField(
model_name='profile',
name='id_type',
field=models.CharField(default=1, max_length=30, blank=True),
preserve_default=False,
),
migrations.AddField(
model_name='profile',
name='id_verify_ref',
field=models.CharField(default=1, max_length=50, blank=True),
preserve_default=False,
),
migrations.AddField(
model_name='profile',
name='send_country_code',
field=models.CharField(default=b'256', max_length=10),
preserve_default=True,
),
migrations.AddField(
model_name='profile',
name='verification_attempts',
field=models.IntegerField(default=0),
preserve_default=True,
),
]
| StarcoderdataPython |
3200301 | <reponame>hsmtknj/programming-contest
# a = ['a', 't', 'c', 'o', 'd', 'e', 'r']
# t = [chr(i) for i in range(97,97+26)]
# tt = [i for i in range(97, 97+26)]
# print(tt)
# print(tt[0:3] + [100])
for i in reversed(range(10)):
print(i)
| StarcoderdataPython |
71262 | <filename>sdk/apis/market_service.py
# -*- coding: utf-8 -*-
# 服务市场服务
class MarketService:
__client = None
def __init__(self, client):
self.__client = client
def sync_market_messages(self, start, end, offset, limit):
"""
同步某一段时间内的服务市场消息
:param start:开始时间
:param end:结束时间
:param offset:消息偏移量
:param limit:查询消息数
"""
return self.__client.call("eleme.market.syncMarketMessages", {"start": start, "end": end, "offset": offset, "limit": limit})
def create_order(self, request):
"""
创建内购项目订单
:param request:创建订单请求信息
"""
return self.__client.call("eleme.market.createOrder", {"request": request})
def query_order(self, order_no):
"""
查询服务市场订单
:param orderNo:服务市场订单编号
"""
return self.__client.call("eleme.market.queryOrder", {"orderNo": order_no})
def confirm_order(self, order_no):
"""
服务市场确认订单
:param orderNo:服务市场订单编号
"""
return self.__client.call("eleme.market.confirmOrder", {"orderNo": order_no})
| StarcoderdataPython |
110107 | #!/usr/bin/env python3
import numpy as np
M = np.array(
(
[1, -1, 0, 0, 0, 0, 0, 0],
[0.4, 0.4, 0, -1, 0, 0, 0, 0],
[0.6, 0.6, -1, 0, 0, 0, 0, 0],
[0, 0, 0, -0.75, 0, 1, 0, 0],
[-1, 0, 0, 0, 1, 1, 0, 0],
[0, -1, 0, 0, 0, 0, 1, 1],
[0, 0, 0, -1, 0, 1, 0, 1],
[1, 1, 0, 0, 0, 0, 0, 0],
)
)
M_inv = np.linalg.inv(M)
V = np.array((0, 0, 0, 0, 0, 0, 0, 100))
R = np.matmul(M_inv, V)
# print(R)
print("réponse:", R[6])
| StarcoderdataPython |
8020 | <reponame>cbeall123/E3SM
"""
Interface to the env_build.xml file. This class inherits from EnvBase
"""
from CIME.XML.standard_module_setup import *
from CIME.XML.env_base import EnvBase
logger = logging.getLogger(__name__)
class EnvBuild(EnvBase):
# pylint: disable=unused-argument
def __init__(self, case_root=None, infile="env_build.xml",components=None):
"""
initialize an object interface to file env_build.xml in the case directory
"""
schema = os.path.join(get_cime_root(), "config", "xml_schemas", "env_entry_id.xsd")
EnvBase.__init__(self, case_root, infile, schema=schema)
| StarcoderdataPython |
3270624 | import matplotlib.pyplot as plt
def plot(x, y, ind):
'''Plots the original data with the peaks that were identified
Parameters
----------
x : array-like
Data on the x-axis
y : array-like
Data on the y-axis
ind : array-like
Indexes of the identified peaks
'''
plt.plot(x, y, '--')
plt.plot(x[ind], y[ind], 'r+', ms=5, mew=2,
label='{} peaks'.format(len(ind)))
plt.legend()
| StarcoderdataPython |
3306505 | <gh_stars>10-100
import unittest
import torch
from allennlp.common.params import Params
from torch.jit import Error
from zsl_kg.knowledge_graph.kg import KG
class TestKG(unittest.TestCase):
def setUp(
self,
):
"""creates an instance of KG with sample data."""
params = Params({"embedding_path": ""})
nodes = [
"cat",
"dog",
"elephant",
]
edges = [
(0, 1),
(0, 2),
]
features = torch.randn((3, 10))
self.kg_obj = KG(
nodes,
features,
edges,
params=params,
)
# self.kg_empty_param = KG(
# nodes,
# edges,
# )
self.dir_path = "tests/save_data/kg/"
def test_to(
self,
):
"""test the .to(device) function"""
self.assertEqual(
self.kg_obj.to(torch.device("cpu")),
True,
)
def test_cuda(
self,
):
"""
test the .cuda() function
"""
# Cannot be tested on a cpu machine
pass
def test_save_to_disk(
self,
):
"""test the .save_to_disk function"""
self.assertEqual(
self.kg_obj.save_to_disk(self.dir_path),
self.dir_path,
)
def test_nodes(
self,
):
"""test the .nodes property"""
self.assertEqual(
self.kg_obj.nodes,
[
"cat",
"dog",
"elephant",
],
)
def test_get_node_ids(
self,
):
"""test the .edges property"""
self.assertEqual(
self.kg_obj.get_node_ids(
[
"cat",
"dog",
]
),
[0, 1],
)
with self.assertRaises(Exception):
self.kg_obj.get_node_ids(
[
"cat",
"seal",
]
)
def test_edges(
self,
):
"""test the .edges property"""
self.assertEqual(
self.kg_obj.edges,
[
(
0,
1,
),
(
0,
2,
),
],
)
def test_load_from_disk(self):
kg = self.kg_obj.load_from_disk(
"tests/test_data/subgraphs/snips/train_graph/"
)
self.assertNotEqual(kg.adj_lists, None)
self.assertNotEqual(kg.rw_adj_lists, None)
| StarcoderdataPython |
1729543 | <filename>userbot/plugins/goodbyesahyri2.py
from telethon import events
import asyncio
import os
import sys
import random
from userbot.utils import admin_cmd
@borg.on(admin_cmd(pattern="byeq ?(.*)"))
async def _(event):
if event.fwd_from:
return
await event.edit("@veryhelpful making goodbye quote")
await asyncio.sleep(1)
g=(random.randrange(1,18))
if g==1:
await event.edit(" जिंदगी में तन्हा रहना तो मुमकिन नहीं,\nतेरे साथ चलना दुनिया को गवारा भी नहीं,\nइसलिए, तेरा-मेरा दूर जाना ही बेहतर है।")
if g==2:
await event.edit("कुछ दिन साथ चलने वाले,\nथोड़ा और साथ चलने की तमन्ना थी,\nमजबूरी है कहना ही पड़ेगा अलविदा।")#creadit to kraken,sawan
if g==3:
await event.edit("न कहा न कुछ सुना, बस चुपके से चल दिए,\nमोहब्बत के उन्होंने सारे मायने बदल दिए,\अब तो तन्हा गलियों में गुजरेगी हर शाम,\nमर भी गए, तो भी नहीं भूलेंगे उनका नाम।")
if g==4:
await event.edit("पास थे, तो रोने की वजह बनते थे,\nदूर जाकर शायद मुस्कुराना सीख लें आप।")
if g==5:
await event.edit("दोबारा मिलें जिंदगी में यह दुआ करेंगे,\nदूर रहकर भी नजदीक होने की चाह करेंगे।")#creadit to kraken,sawan
if g==6:
await event.edit("माफ करना मुझे दूर तो जाना पड़ेगा,\nपास होकर भी तुम्हे अब भूल जाना पड़ेगा।")#creadit to kraken,sawan
if g==7:
await event.edit("वो शाम सुहानी थी जो गुजरी तेरे साथ,\nबिन तेरे अब कैसे कटेगी सारी रात,\nसमझ लो तुम भी यह मजबूरी है दिल की,\nनहीं गए, तो कैसे कल फिर होगी मुलाकात।")#creadit to kraken,sawan
if g==8:
await eventt.edit("तेरे साथ मुस्कुराना और ठोकरों से संभलना सीखा है,\nआता नहीं अलविदा कहना बस रोकर जताना सीखा है।")
if g==9:
await event.edit("यार तेरी दोस्ती को सलाम है,\nअलविदा कहकर भी हंसा दिया,\nयह बस तेरी यारी का कमाल है।")#creadit to kraken,sawan
if g==10:
await event.edit("ताउम्र तेरे साथ बीती रातों को फिर याद करेंगे,\nकह सकें अलविदा तुझसे इसलिए मेरे यार,\nआंसू का एक भी कतरा बहाए बिना बात करेंगे।")#creadit to kraken,sawan
if g==11:
await event.edit("रूठा जमाना जिंदगी भी रूठी,\nतभी तो तेरे-मेरे बीच ये दूरी छूटी,\nसमझ लेना तुम है ये मेरी मजबूरी,\nवरना न आने देता तेरे-मेरे बीच यह दूरी।")#creadit to kraken,sawan
if g==12:
await event.edit("करीब आते-आते तू कुछ दूर सा हो गया है,\nशाम को अलविदा कह तू कहीं गुम सा गया है,\nचाहता हूं मैं करीब होने का एहसास तेरे पर,\nखुशी के खातिर तेरी तुझे अलविदा कह गया हूं।")
if g==13:
await event.edit("खुश हूं फिर भी ये आंखे नम हैं,\nन चाहते हुए भी दूर जाने का गम है।")
if g==14:
await event.edit("दूर जाने की खबर सुनकर ये धड़कने रुक जाती हैं,\nअलविदा कहने के वक्त यार मेरी आंखें भर आती हैं।")#creadit to kraken,sawan
if g==15:
await event.edit(" अब हर लम्हा तुम्हारे बिना सूना सा लगेगा,\nअलविदा कहकर तुम्हारी यादों में जीना पड़ेगा।")
if g==16:
await event.edit("अब हलचल है दिल में नई उम्मीद की तलाश के लिए,\nकहना पड़ेगा अलविदा नई मंजिल की तलाश के लिए")
if g==17:
await event.edit(" जब तुम जाते हो, तो गुलिस्तां के सभी फूल झड़ जाते हैं,\nसंभलकर कहो अलविदा जाते-जाते पेड़ों से क्यों टकरा जाते हो।")
if g==18:
await event.edit("14. तिरछी निगाहों से जो देखा उन्होंने,\nतो हम मदहोश हो चले,\nजब पता चला कि वो अलविदा कहने आए,\nतो हम बेहोश हो चले।")
| StarcoderdataPython |
1754574 | """beerup_django URL Configuration
"""
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('api/v1/', include('djoser.urls')),
path('api/v1/', include('djoser.urls.authtoken')),
path('api/v1/', include('product.urls')),
path('api/v1/', include('category.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| StarcoderdataPython |
1624249 | <gh_stars>0
"""Additional NeuroDriver Components for ANTcircuits"""
from .NoisyConnorStevens import NoisyConnorStevens
from .OTP import OTP
| StarcoderdataPython |
1688228 | <reponame>restlessankyyy/Python<gh_stars>0
import matplotlib .pyplot as pit
x = [101,102,103]
y= [5500,6000,4000]
x1 = [101,102,103]
y1=[5500,4800,1800]
pit.plot(x,y,label="Salary for 2015",color= 'red')
pit.plot(x1,y1,label="Salary for 2016")
pit.xlabel("Employee ID")
pit.ylabel("Salary in $s")
pit.title("Employee Salary")
pit.legend()
pit.show()
| StarcoderdataPython |
1609041 | <gh_stars>0
# Encoding: UTF-8
# --
# Copyright (c) 2008-2021 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
# --
from nagare.renderers import rml
def sample(output):
r = rml.Renderer()
with r.document(invariant=1):
with r.template(pageSize='letter', leftMargin=72, showBoundary=1):
with r.pageTemplate(id='main', pageSize='letter portrait'):
with r.pageGraphics:
r << r.setFont(name='Helvetica-BoldOblique', size=18)
r << r.drawRightString('RML2PDF Test Suite', x=523, y=800)
with r.textAnnotation:
r << r.param('0,0,1,1', name='Rect')
r << r.param(3, name='F')
r << r.param(6, name='escape')
r << '''X::PDF
PX(S)
MT(PINK)
'''
r << r.frame(id='first', x1='1in', y1='1in', width='6.27in', height='9.69in')
with r.stylesheet:
with r.initialize:
r << r.alias(id='style.normal', value='style.Normal')
r << r.paraStyle(name='h1', fontName='Helvetica-BoldOblique', fontSize=32, leading=36)
r << r.paraStyle(name='normal', fontName='Helvetica', fontSize=10, leading=12)
r << r.paraStyle(name='spaced', fontName='Helvetica', fontSize=10, leading=12, spaceBefore=12, spaceAfter=12)
with r.story:
with r.para(style='normal'):
r << u'Il était là. Hello World. This is a normal paragraph. Blah '
r << r.font('IPO ', color='red')
r << 'blah blah blah blah growth forecast blah '
r << 'blah blah forecast blah.Blah blah blah blah blah blah blah blah blah blah blah profit blah blah blah blah blah '
r << 'blah blah blah blah blah IPO.Blah blah blah blah blah blah blah reengineering blah growth blah blah blah '
r << 'proactive direction strategic blah blah blah forward-thinking blah.Blah blah doubletalk blah blah blah blah '
r << 'blah profit blah blah growth blah blah blah blah blah profit.Blah blah blah blah venture capital blah blah blah '
r << 'blah blah forward-thinking blah.'
with r.para(style='normal'):
r << 'This is another normal paragraph. Blah IPO blah blah blah blah growth forecast blah '
r << 'blah blah forecast blah.Blah blah blah blah blah blah blah blah blah blah blah profit blah blah blah blah blah '
r << 'blah blah blah blah blah IPO.Blah blah blah blah blah blah blah reengineering blah growth blah blah blah '
r << 'proactive direction strategic blah blah blah forward-thinking blah.Blah blah doubletalk blah blah blah blah '
r << 'blah profit blah blah growth blah blah blah blah blah profit.Blah blah blah blah venture capital blah blah blah '
r << 'blah blah forward-thinking blah.'
r << r.para('I should NOT have a tiny leading space in front of me!', style='normal')
r << r.para('This is spaced. There should be 12 points before and after.', style='spaced')
with r.para(style='normal'):
r << 'Hello World. This is a normal paragraph. Blah IPO blah blah blah blah growth forecast blah '
r << 'blah blah forecast blah.Blah blah blah blah blah blah blah blah blah blah blah profit blah blah blah blah blah '
r << 'blah blah blah blah blah IPO.Blah blah blah blah blah blah blah reengineering blah growth blah blah blah '
r << 'proactive direction strategic blah blah blah forward-thinking blah.Blah blah doubletalk blah blah blah blah '
r << 'blah profit blah blah growth blah blah blah blah blah profit.Blah blah blah blah venture capital blah blah blah '
r << 'blah blah forward-thinking blah.'
return r.root.topdffile(output)
if __name__ == '__main__':
sample('/tmp/sample.pdf')
| StarcoderdataPython |
3287916 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017-2018 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Top-level package for chartify."""
from chartify._core.chart import Chart
from chartify._core.colors import color_palettes
from chartify._core.options import options
from chartify import examples
__author__ = """<NAME>"""
__email__ = '<EMAIL>'
__version__ = '2.3.5'
_IPYTHON_INSTANCE = False
def set_display_settings():
"""Enable notebook output settings if running in a jupyter notebook"""
from IPython.core.getipython import get_ipython
from ipykernel.zmqshell import ZMQInteractiveShell
from bokeh.io import output_notebook
from bokeh.resources import Resources
ipython_instance = get_ipython()
if ipython_instance is not None:
if isinstance(ipython_instance, ZMQInteractiveShell):
_IPYTHON_INSTANCE = True
# Inline resources uses bokeh.js from the local version.
# This enables offline usage.
output_notebook(Resources('inline'), hide_banner=True)
set_display_settings()
del set_display_settings
| StarcoderdataPython |
115577 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module implements Deep QL with two networks: Q-network and target-network
the DQL uses an MLP Q-network
it is retrained only after 'episodes' iterations
the training uses replay memory
"""
from copy import deepcopy
import numpy as np
import logging
from DQL.deepQL import DeepQL
from DQL.clone import clone_model
class DQL(DeepQL):
"""
ref. https://keon.io/deep-q-learning/
https://github.com/simoninithomas/deep_q_learning/blob/master/DQL%20Cartpole.ipynb
https://medium.com/@gtnjuvin/my-journey-into-deep-q-learning-with-keras-and-gym-3e779cc12762
"""
def __init__(self,
env, # the environment object
model,
memory,
timesteps=1,
epsilon=0.1, # exploration rate
epsilon_min=0.1,
epsilon_decay=0.995,
learning_rate=0.001,
gamma=0.95,
batch_size=32, # size of the batch in the replay
episodes=30, # number of iterations before replay occurs
epochs=1,
log_level=logging.DEBUG,
interaction_interval=30, # wait 30 second, before another cycle
**kwargs):
# call super
super().__init__(env=env,
model=model,
memory=memory,
timesteps=timesteps,
epsilon=epsilon,
epsilon_min=epsilon_min,
epsilon_decay=epsilon_decay,
learning_rate=learning_rate,
gamma=gamma,
batch_size=batch_size,
episodes=episodes,
epochs=epochs,
log_level=log_level,
interaction_interval=interaction_interval,
)
# create a target model based on the self.model
self.target = self.model
self.copy_to_target()
def copy_to_target(self):
self.log.debug("copy model to target")
try:
model_copy = deepcopy(self.model)
except TypeError:
model_copy = clone_model(self.model)
self.target = model_copy
def copy_weights(self):
weights = self.model.get_weights()
self.target.set_weights(weights)
def save_model(self, model_filename='model.json'):
""" save the model and target networks to a json file and the weights to a h5 file
overwritten method to save both networks
@param model_filename: the filename with '.json' extension
"""
super().save_model(model_filename=model_filename) # this saves self.model
self.target.save_weights(model_filename.replace('.json', '-target.h5'))
def get_q_max(self, sprime):
""" the Q_max is calculated using the target network
@param sprime: the sequence of next states (s')
@return: the Qmax value used in the TD-error, defined as the greedy move
Q_max = max Q_target(s', a')
a'
"""
q_prediction = self.target.predict(sprime)
q_max = np.amax(q_prediction, axis=1)
self.log.debug("s': {} Q max: {}".format(sprime, q_max))
return q_max
def replay(self):
"""
produces the replay, that trains the model's parameters
and if C replays occur then update target's parameters
"""
# call the parent replay
if super().replay() and self.replay_counter == self.episodes:
# after C replays, the theta from model is copied to target
self.copy_to_target()
self.log.info("Updated target network in #{}".format(self.runs))
self.replay_counter = 0 # zeros the count
| StarcoderdataPython |
1661649 | from typing import Any, Dict, NoReturn
from chaosplt_auth.storage.interface import BaseAuthStorage
__all__ = ["MyAuthStorage"]
class MyAuthStorage(BaseAuthStorage):
def __init__(self, config: Dict[str, Any]):
self.some_flag = True
def release(self) -> NoReturn:
self.some_flag = False
| StarcoderdataPython |
42372 | import json
def filePath():
""" ask for file path"""
filepath = hou.ui.selectFile()
return filepath
def getData(filename):
return eval(open(filename).read(), {"false": False, "true":True})
temp_data = getData(filePath())
for i in range(len(temp_data)):
#print(dict[i])
data = temp_data[i]
# Create Root Null
sceneroot = hou.node('/obj')
globalnull = sceneroot.createNode('null', 'size_locator')
globalnull.setParms({'scale': 1})
# Create RS_Light
light = hou.node("/obj").createNode('rslight', 'Key')
light.setInput(0, globalnull)
hou.node("obj").layoutChildren()
light.parmTuple('t').set(tuple(data["translate"][0]))
light.parmTuple('r').set(tuple(data["rotate"][0]))
light.parm('RSL_intensityMultiplier').set(data["intensity"])
light.parm('Light1_exposure').set(data["exposure"])
light.parm('RSL_affectDiffuse').set(data["affectsDiffuse"])
light.parm('RSL_bidirectional').set(data["areaBidirectional"])
light.parm('RSL_visible').set(data["areaVisibleInRender"])
light.parm('RSL_volumeScale').set(data["volumeRayContributionScale"])
light.parm('RSL_areaShape').set(data["areaShape"])
light.setGenericFlag(hou.nodeFlag.DisplayComment, True)
light.setComment(data["name"])
#attributes = ['scale', 'rotate', 'translate', 'intensity', 'color', 'affectsDiffuse', 'affectsSpecular','areaVisibleInRender', 'areaBidirectional', 'volumeRayContributionScale',
# 'exposure', 'areaShape','spotConeAngle', 'areaSamples','areaSpread','on', 'colorR', 'colorG','colorB','temperature','colorMode', 'intensity',
# 'exposure', 'unitsType','lumensperwatt','decayType','falloffStart', 'falloffStop', 'shadow', 'shadowTransparency',
# 'SAMPLINGOVERRIDES_shadowSamplesScale','SAMPLINGOVERRIDES_numShadowSamples', 'spotConeFalloffAngle',
# 'spotConeFalloffCurve','affectedByRefraction', 'emitGiPhotons', 'emitCausticPhotons','normalize',
# 'photonIntensityMultiplierGI','photonIntensityMultiplierCaustics','diffuseRayContributionScale',
# 'glossyRayContributionScale','singleScatteringRayContributionScale','multipleScatteringRayContributionScale',
# 'indirectRayContributionScale', 'indirectMaxTraceDepth', 'volumeRayContributionScale','volumeNumSamples','dropoff']
| StarcoderdataPython |
3317423 | <filename>phoenix/supervisor/views/supervisor.py
from pyramid.view import view_config, view_defaults
from pyramid.httpexceptions import HTTPFound
from phoenix.views import MyView
from phoenix.grid import CustomGrid
@view_defaults(permission='admin', layout='default')
class Supervisor(MyView):
def __init__(self, request):
super(Supervisor, self).__init__(request, name='supervisor', title='Supervisor')
self.settings = self.request.registry.settings
import xmlrpclib
# TODO: dont use hardcoded url
self.server = xmlrpclib.Server(self.settings.get('supervisor.url'))
@view_config(route_name="supervisor_process")
def supervisor_process(self):
action = self.request.matchdict.get('action')
name = self.request.matchdict.get('name')
if action == 'start':
self.server.supervisor.startProcess(name)
self.session.flash("Service {0} started.".format(name), queue="success")
elif action == 'stop':
self.server.supervisor.stopProcess(name)
self.session.flash("Service {0} stopped.".format(name), queue="danger")
elif action == 'restart':
self.server.supervisor.stopProcess(name)
self.server.supervisor.startProcess(name)
self.session.flash("Service {0} restarted.".format(name), queue="success")
elif action == 'clear':
self.server.supervisor.clearProcessLogs(name)
self.session.flash("Logs of service {0} cleared.".format(name), queue="success")
return HTTPFound(location=self.request.route_path(self.name))
@view_config(route_name="supervisor", renderer='../templates/supervisor/supervisor.pt')
def view(self):
# TODO: show only wps processes
grid = Grid(self.request, self.server.supervisor.getAllProcessInfo(), ['state', 'description', 'name', ''])
return dict(grid=grid)
class Grid(CustomGrid):
def __init__(self, request, *args, **kwargs):
super(Grid, self).__init__(request, *args, **kwargs)
self.column_formats['state'] = self.state_td
self.column_formats[''] = self.buttongroup_td
self.exclude_ordering = self.columns
def state_td(self, col_num, i, item):
return self.render_td(
renderer="supervisor_state_td.mako",
state=item.get('state'),
statename=item.get('statename'))
def buttongroup_td(self, col_num, i, item):
from phoenix.utils import ActionButton
buttons = []
if item.get('state') == 20:
buttons.append(ActionButton(
'restart', css_class="btn btn-success", icon="fa fa-refresh",
href=self.request.route_path('supervisor_process', action='restart', name=item.get('name'))))
buttons.append(ActionButton(
'stop', css_class="btn btn-danger", icon="fa fa-stop",
href=self.request.route_path('supervisor_process', action='stop', name=item.get('name'))))
else:
buttons.append(ActionButton(
'start', icon="fa fa-play",
href=self.request.route_path('supervisor_process', action='start', name=item.get('name'))))
# TODO: enable clear button again
buttons.append(ActionButton(
'tail', icon="fa fa-align-left",
href=self.request.route_path('supervisor_log', name=item.get('name'), offset=0)))
return self.render_buttongroup_td(buttons=buttons)
| StarcoderdataPython |
4808724 | <reponame>roeap/flight-fusion
import flight_fusion
def test_import_flight_fusion():
assert flight_fusion.__name__ == "flight_fusion"
def test_flight_fusion_python_version():
assert flight_fusion.__version__ > "0.0.0"
| StarcoderdataPython |
3213728 | # -*- coding: UTF-8 -*-
from nonebot.default_config import *
#添加超级管理员 Q号-数值 例:SUPERUSERS.add(12345678)
SUPERUSERS.add(12345)
#nonebot的监听端口
HOST = '127.0.0.1'
PORT = 9100
#SECRET = ''
#ACCESS_TOKEN = ''
#API_ROOT = 'http://127.0.0.1:5700'
#nonebot的debug开关
DEBUG = False
COMMAND_START = {'!','!'}
NICKNAME = {'bot', 'bot哥', '工具人', '最菜群友'}
SESSION_CANCEL_EXPRESSION = ("好 我爬","好 我现在就爬",)
#SHORT_MESSAGE_MAX_LENGTH = 500
#默认botQQ 默认推送用的bot,错误信息会使用此bot推送。(此QQ不存在时报错将影响程序运行)
default_bot_QQ : int = 12345
#bot错误信息推送到的Q号,为空时不进行推送
feedback_push_switch : bool = True #推送反馈信息
error_push_switch : bool = True #推送错误信息
bot_waring_printID : int = 12345
#语音发送映射
music_path = r"file:///E:\CQ\python-okayu_two\cache" | StarcoderdataPython |
1779653 | <gh_stars>1-10
import numpy
from datetime import datetime
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
plt.style.use('classic')
from matplotlib.ticker import FormatStrFormatter
from visuallib import candlestick2_ohlc
def volume_analysis(client,market,num_hours):
candles=numpy.array(client.get_historical_klines(market,'3m',str(num_hours+1)+' hours ago'),dtype='float')
closes=[candle[4] for candle in candles]
volumes=[candle[5] for candle in candles]
close_min=numpy.amin(closes)
close_max=numpy.amax(closes)
close_step=(close_max-close_min)/12
close_range=[[close_min+(i-1)*close_step,close_min+i*close_step] for i in numpy.arange(1,13)]
unit_volumes=[]
unit_closes=[]
for i in numpy.arange(0,11):
unit_closes.append(0.5*(close_range[i][0]+close_range[i][1]))
unit_volume=sum([volumes[j] for j in numpy.arange(0,len(closes)) if close_range[i][0]<=closes[j]<=close_range[i][1]])
unit_volumes.append(unit_volume)
index_max=unit_volumes.index(max(unit_volumes))
return unit_closes,unit_volumes,index_max
def volume_profile(client,market):
coinInfo=client.get_symbol_info(market)['filters']
priceUnit=float(coinInfo[0]['tickSize'])
units=['5m','15m','30m','1h','4h']
intervals=['1 day ago','1 week ago','1 month ago','3 months ago','1 year ago']
infos=['1D','1W','1M','3M','1Y']
msg='Vol. profile:'
for k in range(len(units)):
try:
candles=numpy.array(client.get_historical_klines(market,units[k],intervals[k]),dtype='float')
closes=[candle[4] for candle in candles]
volumes=[candle[5] for candle in candles]
close_min=numpy.amin(closes)
close_max=numpy.amax(closes)
close_step=(close_max-close_min)/24
close_range=[[close_min+(i-1)*close_step,close_min+i*close_step] for i in numpy.arange(1,25)]
unit_volumes=[]
unit_closes=[]
for i in numpy.arange(0,23):
unit_closes.append(0.5*(close_range[i][0]+close_range[i][1]))
unit_volume=sum([volumes[j] for j in numpy.arange(0,len(closes)) if close_range[i][0]<=closes[j]<=close_range[i][1]])
unit_volumes.append(unit_volume)
index_max=unit_volumes.index(max(unit_volumes))
msg=msg+" "+infos[k]+": "+('%.8f' % float(int(numpy.floor(unit_closes[index_max]/priceUnit))*priceUnit)).rstrip('0').rstrip('.')
except Exception:
pass
return msg
def getOrderBook(client,market):
orders=client.get_order_book(symbol=market)
bids=orders['bids']
asks=orders['asks']
bid_prices=[float(bid[0]) for bid in bids]
bid_qties=[float(bid[1]) for bid in bids]
ask_prices=[float(ask[0]) for ask in asks]
ask_qties=[float(ask[1]) for ask in asks]
i1=bid_qties.index(max(bid_qties))
i2=ask_qties.index(max(ask_qties))
return bid_prices,ask_prices,bid_qties,ask_qties,i1,i2
def trade_analysis_h1(client,market,numTrades):
toId=client.get_historical_trades(symbol=market,limit=1)[0]['id']
listId=numpy.arange(toId-numTrades+1,toId-10,500)
trades=[]
for fromId in listId:
trades=trades+client.get_historical_trades(symbol=market,fromId=str(fromId))
trade_orders=numpy.arange(0,numTrades)
trade_days=[datetime.fromtimestamp(int(trade['time']/1000)).day for trade in trades]
indexes=numpy.unique(trade_days,return_index=True)[1]
day_counters=[trade_days[index] for index in sorted(indexes)]
total_coin_buy=[]
total_coin_sell=[]
for day_counter in day_counters:
day_buy_orders=[i for i in trade_orders if trades[i]['isBuyerMaker']==False and datetime.fromtimestamp(int(trades[i]['time']/1000)).day==day_counter]
day_sell_orders=[i for i in trade_orders if trades[i]['isBuyerMaker']==True and datetime.fromtimestamp(int(trades[i]['time']/1000)).day==day_counter]
hour_counters=numpy.unique([datetime.fromtimestamp(int(trade['time']/1000)).hour for trade in trades if datetime.fromtimestamp(int(trade['time']/1000)).day==day_counter])
for hour_counter in hour_counters:
total_coin_buy.append(sum([float(trades[i]['qty']) for i in day_buy_orders if datetime.fromtimestamp(int(trades[i]['time']/1000)).hour==hour_counter]))
total_coin_sell.append(sum([float(trades[i]['qty']) for i in day_sell_orders if datetime.fromtimestamp(int(trades[i]['time']/1000)).hour==hour_counter]))
total_coin=[x+y for x,y in zip(total_coin_buy,total_coin_sell)]
return total_coin_buy,total_coin_sell,total_coin
def trade_msg_h1(client,market,numTrades):
total_coin_buy,total_coin_sell,total_coin=trade_analysis_h1(client,market,numTrades)
unit_closes,unit_volumes,index_max=volume_analysis(client,market,len(total_coin))
f,(ax1,ax2)=plt.subplots(2,1,gridspec_kw={'height_ratios':[1,1]})
f.set_size_inches(20,15)
ax1p=ax1.twiny()
ax1p.barh(unit_closes,unit_volumes,color='gray',edgecolor='w',height=unit_closes[1]-unit_closes[0],align='center',alpha=0.35)
ax1p.set_xticks([])
for tic in ax1p.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
candles=numpy.array(client.get_historical_klines(market,'1h','1 month ago'),dtype='float')[-len(total_coin):]
candlestick2_ohlc(ax1,candles[:,1],candles[:,2],candles[:,3],candles[:,4],width=0.6,alpha=1)
ax1.yaxis.grid(True)
for tic in ax1.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
ax1.set_xticks([])
ax1.set_xlim(.5,len(total_coin))
ax1.set_ylim(numpy.amin(candles[:,3]),numpy.amax(candles[:,2]))
ax1.yaxis.set_major_formatter(FormatStrFormatter('%.8f'))
ax1.get_yaxis().set_label_coords(-0.075,0.5)
ax1.set_ylabel("Volume Profile Visible Range",fontsize=20)
ax1.set_title('Exchange: Binance Market: '+market+' Time Frame: 1 [hour]'+' Time Length: '+str(len(candles[:,1]))+' [hours]'+'\nTotal Trades: '+"{:,}".format(numTrades)+' Total Buy Volume: '+"{:,}".format((sum(total_coin_buy)))+' Total Sell Volume: '+"{:,}".format((sum(total_coin_sell))),fontsize=25,y=1.03,loc='left')
candlestick2_ohlc(ax2,numpy.zeros(len(total_coin)),total_coin,numpy.zeros(len(total_coin)),total_coin,width=0.6,alpha=.35)
candlestick2_ohlc(ax2,numpy.zeros(len(total_coin)),total_coin_buy,numpy.zeros(len(total_coin)),total_coin_buy,width=0.29,alpha=1,shift=-0.15)
candlestick2_ohlc(ax2,total_coin_sell,total_coin_sell,numpy.zeros(len(total_coin)),numpy.zeros(len(total_coin)),width=0.29,alpha=1,shift=+0.15)
ax2.yaxis.grid(True)
for tic in ax2.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
ax2.set_xticks([])
ax2.set_xlim(.5,len(total_coin))
ax2.get_yaxis().set_label_coords(-0.075,0.5)
ax2.set_ylabel("Buy versus Sell Volume",fontsize=20)
ax2.yaxis.set_major_formatter(FormatStrFormatter('%.2f'))
f.tight_layout()
plt.savefig(market+'.png',bbox_inches='tight')
def trade_analysis_m30(client,market,numTrades):
toId=client.get_historical_trades(symbol=market,limit=1)[0]['id']
listId=numpy.arange(toId-numTrades+1,toId-10,500)
trades=[]
for fromId in listId:
trades=trades+client.get_historical_trades(symbol=market,fromId=str(fromId))
trade_orders=numpy.arange(0,numTrades)
trade_days=[datetime.fromtimestamp(int(trade['time']/1000)).day for trade in trades]
indexes=numpy.unique(trade_days,return_index=True)[1]
day_counters=[trade_days[index] for index in sorted(indexes)]
volume_buy_m30=[]
volume_sell_m30=[]
for day_counter in day_counters:
day_buy_orders=[i for i in trade_orders if trades[i]['isBuyerMaker']==False and datetime.fromtimestamp(int(trades[i]['time']/1000)).day==day_counter]
day_sell_orders=[i for i in trade_orders if trades[i]['isBuyerMaker']==True and datetime.fromtimestamp(int(trades[i]['time']/1000)).day==day_counter]
hour_counters=numpy.unique([datetime.fromtimestamp(int(trade['time']/1000)).hour for trade in trades if datetime.fromtimestamp(int(trade['time']/1000)).day==day_counter])
for hour_counter in hour_counters:
volume_buy_m30.append(sum([float(trades[i]['qty']) for i in day_buy_orders if datetime.fromtimestamp(int(trades[i]['time']/1000)).hour==hour_counter and datetime.fromtimestamp(int(trades[i]['time']/1000)).minute<30]))
volume_buy_m30.append(sum([float(trades[i]['qty']) for i in day_buy_orders if datetime.fromtimestamp(int(trades[i]['time']/1000)).hour==hour_counter and datetime.fromtimestamp(int(trades[i]['time']/1000)).minute>=30]))
volume_sell_m30.append(sum([float(trades[i]['qty']) for i in day_sell_orders if datetime.fromtimestamp(int(trades[i]['time']/1000)).hour==hour_counter and datetime.fromtimestamp(int(trades[i]['time']/1000)).minute<30]))
volume_sell_m30.append(sum([float(trades[i]['qty']) for i in day_sell_orders if datetime.fromtimestamp(int(trades[i]['time']/1000)).hour==hour_counter and datetime.fromtimestamp(int(trades[i]['time']/1000)).minute>=30]))
volume_m30=[x+y for x,y in zip(volume_buy_m30,volume_sell_m30)]
if volume_m30[0]==0:
volume_m30=volume_m30[1:]
volume_buy_m30=volume_buy_m30[1:]
volume_sell_m30=volume_sell_m30[1:]
if volume_m30[-1]==0:
volume_m30=volume_m30[:-1]
volume_buy_m30=volume_buy_m30[:-1]
volume_sell_m30=volume_sell_m30[:-1]
return volume_buy_m30,volume_sell_m30,volume_m30
def trade_msg_m30(client,market,numTrades):
total_coin_buy,total_coin_sell,total_coin=trade_analysis_m30(client,market,numTrades)
bid_prices,ask_prices,bid_qties,ask_qties,i1,i2=getOrderBook(client,market)
f,(ax1,ax2)=plt.subplots(2,1,gridspec_kw={'height_ratios':[1,1]})
f.set_size_inches(20,15)
ax1p=ax1.twiny()
ax1p.step(bid_qties,bid_prices,'k',linewidth=2.5,alpha=0.25)
ax1p.step(ask_qties,ask_prices,'r',linewidth=2.5,alpha=0.25)
ax1p.set_xticks([])
for tic in ax1p.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
candles=numpy.array(client.get_historical_klines(market,'30m','1 month ago'),dtype='float')[-len(total_coin):]
candlestick2_ohlc(ax1,candles[:,1],candles[:,2],candles[:,3],candles[:,4],width=0.6,alpha=1)
ax1.yaxis.grid(True)
for tic in ax1.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
ax1.set_xticks([])
ax1.set_xlim(.5,len(total_coin))
ax1.yaxis.set_major_formatter(FormatStrFormatter('%.8f'))
ax1.get_yaxis().set_label_coords(-0.075,0.5)
ax1.set_ylabel("Order Book",fontsize=20)
msg=volume_profile(client,market)
ax1.set_title('Exchange: Binance Market: '+market+' Time Frame: 30 [minute]\n'+msg,fontsize=25,y=1.03,loc='left')
candlestick2_ohlc(ax2,numpy.zeros(len(total_coin)),total_coin,numpy.zeros(len(total_coin)),total_coin,width=0.6,alpha=.35)
candlestick2_ohlc(ax2,numpy.zeros(len(total_coin)),total_coin_buy,numpy.zeros(len(total_coin)),total_coin_buy,width=0.29,alpha=1,shift=-0.15)
candlestick2_ohlc(ax2,total_coin_sell,total_coin_sell,numpy.zeros(len(total_coin)),numpy.zeros(len(total_coin)),width=0.29,alpha=1,shift=+0.15)
ax2.yaxis.grid(True)
for tic in ax2.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
ax2.set_xticks([])
ax2.set_xlim(.5,len(total_coin))
ax2.get_yaxis().set_label_coords(-0.075,0.5)
ax2.set_ylabel("Buy versus Sell Volume",fontsize=20)
ax2.yaxis.set_major_formatter(FormatStrFormatter('%.2f'))
f.tight_layout()
plt.savefig(market+'.png',bbox_inches='tight')
def trade_analysis_500(client,market,opt):
trades=client.get_historical_trades(symbol=market)
minute_counters=[datetime.fromtimestamp(int(trade['time']/1000)).minute for trade in trades]
market_price=trades[-1]['price']
market_price=('%.8f' % float(market_price)).rstrip('0').rstrip('.')
buy_trades=[trade for trade in trades if trade['isBuyerMaker']==False]
sell_trades=[trade for trade in trades if trade['isBuyerMaker']==True]
buy_prices=[float(trade['price']) for trade in buy_trades]
buy_qties=[float(trade['qty']) for trade in buy_trades]
buy_values=numpy.array([price*qty for price,qty in zip(buy_prices,buy_qties)])
sell_prices=[float(trade['price']) for trade in sell_trades]
sell_qties=[float(trade['qty']) for trade in sell_trades]
sell_values=numpy.array([price*qty for price,qty in zip(sell_prices,sell_qties)])
if market[-3:]=='BTC':
n_bot_buy=len(numpy.where(buy_values<0.001)[0])
n_bot_sell=len(numpy.where(sell_values<0.001)[0])
elif market[-3:]=='ETH':
n_bot_buy=len(numpy.where(buy_values<0.01)[0])
n_bot_sell=len(numpy.where(sell_values<0.01)[0])
else:
n_bot_buy=-1
n_bot_sell=-1
if market[-3:]=='BTC':
btcPrice=float(client.get_recent_trades(symbol='BTCUSDT')[-1]['price'])
buy_values=buy_values*btcPrice
sell_values=sell_values*btcPrice
if market[-3:]=='ETH':
ethPrice=float(client.get_recent_trades(symbol='ETHUSDT')[-1]['price'])
buy_values=buy_values*ethPrice
sell_values=sell_values*ethPrice
total_buy=int(sum(buy_values))
total_sell=int(sum(sell_values))
n_buy=len(buy_values)
n_sell=len(sell_values)
sig_buy=[]
sig_sell=[]
thresholds=[100,200,500,1000,2000,5000,10000]
for threshold in thresholds:
sig_buy.append(len(numpy.where(buy_values>threshold)[0]))
sig_sell.append(len(numpy.where(sell_values>threshold)[0]))
n_buy_small=len(numpy.where(buy_values<10)[0])
n_sell_small=len(numpy.where(sell_values<10)[0])
time_duration='From '+str(datetime.fromtimestamp(int(trades[0]['time'])/1000))+' to '+str(datetime.fromtimestamp(int(trades[-1]['time'])/1000))+' (UTC)'
msg='#'+market+': '+market_price+'\n*Transactions statistics* (Last 500 trades)\n'+time_duration
msg=msg+'\n~ 0$: Buy '+str(n_bot_buy)+' vs Sell '+str(n_bot_sell)
msg=msg+'\n~ 1-10$: Buy '+str(n_buy_small-n_bot_buy)+' vs Sell '+str(n_sell_small-n_bot_sell)
for i in numpy.arange(0,len(thresholds),1):
msg=msg+'\n> '+"{:,}".format(thresholds[i])+'$: Buy '+str(sig_buy[i])+' vs Sell '+str(sig_sell[i])
msg=msg+'\nTotal: Buy '+str(n_buy)+' ('+"{:,}".format(total_buy)+'$) vs Sell '+str(n_sell)+' ('+"{:,}".format(total_sell)+'$)'
trade_prices=[float(trade['price']) for trade in trades]
trade_orders=numpy.arange(0,500)
buy_orders=[i for i in trade_orders if trades[i]['isBuyerMaker']==False]
sell_orders=[i for i in trade_orders if trades[i]['isBuyerMaker']==True]
if opt==1:
f,ax=plt.subplots(1,1)
f.set_size_inches(20,5)
ax.bar(buy_orders,buy_qties,color='g',edgecolor='g',width=0.9,align='center',alpha=0.75,label='Buy quantities')
ax.bar(sell_orders,sell_qties,color='r',edgecolor='r',width=0.9,align='center',alpha=0.75,label='Sell quantities')
ax.get_yaxis().set_label_coords(-0.075,0.5)
ax.set_ylabel("Trade volumes",fontsize=20)
axt=ax.twinx()
axt.step(trade_orders,trade_prices,color='b',linewidth=2,linestyle='-',label='Trade prices')
axt.set_ylabel("Trade prices",fontsize=20)
axt.get_yaxis().set_label_coords(1.075,0.5)
axs=ax.twinx()
axs.step(trade_orders,minute_counters,color='violet',linewidth=2,alpha=.5,linestyle='-',label='Time minute counters')
axs.set_yticks([])
ax.set_xlim(0,500)
ax.yaxis.grid(True)
for tic in ax.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
ax.yaxis.set_major_formatter(FormatStrFormatter('%.2f'))
axt.yaxis.set_major_formatter(FormatStrFormatter('%.8f'))
ax.set_title(str(market),fontsize=20,loc='left',y=1.03)
ax.legend(loc='upper left', bbox_to_anchor=(0.3, 1.15), shadow=True, fontsize='x-large', ncol=2)
axt.legend(loc='upper left', bbox_to_anchor=(0.634, 1.15), shadow=True, fontsize='x-large', ncol=2)
axs.legend(loc='upper left', bbox_to_anchor=(0.783, 1.15), shadow=True, fontsize='x-large', ncol=1)
f.tight_layout()
plt.savefig(market+'.png',bbox_inches='tight')
return msg
| StarcoderdataPython |
3323134 | <filename>larq_zoo/training/sota_experiments.py<gh_stars>0
import larq as lq
import tensorflow as tf
from zookeeper import ComponentField, Field, cli, task
from larq_zoo.sota.quicknet import (
QuickNetFactory,
QuickNetLargeFactory,
QuickNetSmallFactory,
)
from larq_zoo.training.learning_schedules import CosineDecayWithWarmup
from larq_zoo.training.train import TrainLarqZooModel
@task
class TrainQuickNet(TrainLarqZooModel):
model = ComponentField(QuickNetFactory)
epochs = Field(150)
batch_size = Field(512)
lab_blocks = Field((True, True, True, True))
model.lab_blocks = lab_blocks
resume_from = Field(None)
@Field
def optimizer(self):
return tf.keras.optimizers.Adam(
learning_rate=CosineDecayWithWarmup(
max_learning_rate=2.5e-3,
warmup_steps=self.steps_per_epoch * 5,
decay_steps=self.steps_per_epoch * self.epochs,
)
)
@task
class TrainQuickNetSmall(TrainQuickNet):
model = ComponentField(QuickNetSmallFactory)
@task
class TrainQuickNetLarge(TrainQuickNet):
model = ComponentField(QuickNetLargeFactory)
if __name__ == "__main__":
cli() | StarcoderdataPython |
3227332 | <reponame>sleepinhoo/Python
# tip: a condição poderia ser escrita da forma reduzida >>> pr = dist * 0.50 if dist <= 200 else dist * 0.45 >>> use-a para códigos menores, caso contrário, use a versão tradicional, deixa o código mais bonito
dist = float(input("QUal é a distância da sua passagem? "))
print(f"Você está prestes a começar uma viagem de {dist}Km.")
if dist <= 200:
pr = dist * 0.50
else:
pr = dist * 0.45
print(f"O preço da sua passagem será de R${pr}")
| StarcoderdataPython |
3394001 | <gh_stars>0
import click
import sys
import logging
from config import init_logging
from network.funcnet import FN
init_logging()
logger = logging.getLogger(__name__)
@click.command()
def make():
fn = FN()
if __name__ == '__main__':
make() | StarcoderdataPython |
151865 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
from appxs.account.views.role import list_role, edit_role, add_role, del_role
from ..apps import app_name
urlpatterns = [
url(r'^add/$', add_role, name='add'),
url(r'^list/$', list_role, name='list'),
# url(r'^detail/(?P<role_id>\d+)/$', profile_role, name='detail'),
url(r'^edit/(?P<role_id>\d+)/$', edit_role, name='edit'),
url(r'^del/(?P<role_id>\d+)/$', del_role, name='del'),
]
| StarcoderdataPython |
73061 | import bitmath
import ipaddress
import re
from ipaddress import AddressValueError
from insights.parsers.installed_rpms import InstalledRpm
from kerlescan.constants import SYSTEM_ID_KEY
from kerlescan.constants import SYSTEM_PROFILE_STRINGS, SYSTEM_PROFILE_INTEGERS
from kerlescan.constants import SYSTEM_PROFILE_BOOLEANS
from kerlescan.constants import SYSTEM_PROFILE_LISTS_OF_STRINGS_ENABLED
from kerlescan.constants import SYSTEM_PROFILE_LISTS_OF_STRINGS_INSTALLED
from kerlescan.exceptions import UnparsableNEVRAError
def parse_profile(system_profile, display_name, logger):
"""
break complex data structures into more simple structures that can be compared easily.
display_name is added to data structure for sorting and is later stripped from data structure
"""
def _parse_lists_of_strings(names, verb):
"""
helper method to convert lists of strings to comparable facts
"""
for list_of_strings in names:
for item in system_profile.get(list_of_strings, []):
parsed_profile[list_of_strings + "." + item] = verb
def _parse_running_processes(processes):
"""
helper method to convert running process lists to facts. We output a
fact for each process name, with its count as a value. The count is
returned as a string to match the API spec.
"""
for process in processes:
process_fact_name = "running_processes." + process
if process_fact_name in parsed_profile:
parsed_profile[process_fact_name] = str(
int(parsed_profile[process_fact_name]) + 1
)
else:
parsed_profile[process_fact_name] = "1"
def _parse_yum_repo(name):
"""
helper method to convert yum repo objects to comparable facts
"""
parsed_profile["yum_repos." + name + ".base_url"] = yum_repo.get(
"base_url", "N/A"
)
parsed_profile["yum_repos." + name + ".enabled"] = str(
yum_repo.get("enabled", "N/A")
)
parsed_profile["yum_repos." + name + ".gpgcheck"] = str(
yum_repo.get("gpgcheck", "N/A")
)
def _canonicalize_ipv6_addr(addr):
"""
helper method to display ipv6 address strings unambiguously. If the address
is not parsable (for example: 'N/A'), just keep the string as-is.
"""
try:
return str(ipaddress.IPv6Address(addr).compressed)
except AddressValueError:
return addr
def _parse_interface(name):
"""
helper method to convert network interface objects to comparable facts
"""
ipv6_addresses = [
_canonicalize_ipv6_addr(addr)
for addr in interface.get("ipv6_addresses", ["N/A"])
]
parsed_profile["network_interfaces." + name + ".ipv4_addresses"] = ",".join(
interface.get("ipv4_addresses", ["N/A"])
)
parsed_profile["network_interfaces." + name + ".ipv6_addresses"] = ",".join(
ipv6_addresses
)
parsed_profile["network_interfaces." + name + ".mac_address"] = interface.get(
"mac_address", "N/A"
)
parsed_profile["network_interfaces." + name + ".mtu"] = str(
interface.get("mtu", "N/A")
)
parsed_profile["network_interfaces." + name + ".state"] = interface.get(
"state", "N/A"
)
parsed_profile["network_interfaces." + name + ".type"] = interface.get(
"loopback", "N/A"
)
# start with metadata that we have brought down from the system record
parsed_profile = {"id": system_profile[SYSTEM_ID_KEY], "name": display_name}
# add all strings as-is
for key in SYSTEM_PROFILE_STRINGS:
parsed_profile[key] = system_profile.get(key, None)
# add all integers, converting to str
for key in SYSTEM_PROFILE_INTEGERS | SYSTEM_PROFILE_BOOLEANS:
parsed_profile[key] = str(system_profile.get(key, "N/A"))
_parse_lists_of_strings(SYSTEM_PROFILE_LISTS_OF_STRINGS_ENABLED, "enabled")
_parse_lists_of_strings(SYSTEM_PROFILE_LISTS_OF_STRINGS_INSTALLED, "installed")
_parse_running_processes(system_profile.get("running_processes", []))
# convert bytes to human readable format
if "system_memory_bytes" in system_profile:
with bitmath.format(fmt_str="{value:.2f} {unit}"):
formatted_size = bitmath.Byte(
system_profile["system_memory_bytes"]
).best_prefix()
parsed_profile["system_memory"] = str(formatted_size)
system_profile.pop("system_memory_bytes")
for package in system_profile.get("installed_packages", []):
try:
name, vra = _get_name_vra_from_string(package)
if name != "gpg-pubkey":
parsed_profile["installed_packages." + name] = vra
except UnparsableNEVRAError as e:
logger.warn(e.message)
for interface in system_profile.get("network_interfaces", []):
try:
name = interface["name"]
_parse_interface(name)
except KeyError:
logger.warn("network interface has no name, skipping")
continue
for yum_repo in system_profile.get("yum_repos", []):
try:
name = yum_repo["name"]
_parse_yum_repo(name)
except KeyError:
logger.warn("yum repo has no name, skipping")
continue
return parsed_profile
def _get_name_vra_from_string(rpm_string):
"""
small helper to pull name + version/release/arch from string
This supports two styles: ENVRA and NEVRA. The latter is preferred.
"""
try:
if re.match("^[0-9]+:", rpm_string):
_, remainder = rpm_string.split(":", maxsplit=1)
rpm = InstalledRpm.from_package(remainder)
else:
rpm = InstalledRpm.from_package(rpm_string)
except TypeError:
raise UnparsableNEVRAError("unable to parse %s into nevra" % rpm_string)
vra = rpm.version if rpm.version else ""
if rpm.release:
vra = vra + "-" + rpm.release
if rpm.arch:
vra = vra + "." + rpm.arch
return rpm.name, vra
def get_name(system):
# this mimics how the inventory service modal displays names.
name = system["id"]
if system.get("fqdn"):
name = system.get("fqdn")
if system.get("display_name"):
name = system.get("display_name")
return name
| StarcoderdataPython |
30688 | <filename>torcharc/module/merge.py<gh_stars>1-10
from abc import ABC, abstractmethod
from torch import nn
from typing import Dict, List
import torch
class Merge(ABC, nn.Module):
'''A Merge module merges a dict of tensors into one tensor'''
@abstractmethod
def forward(self, xs: dict) -> torch.Tensor: # pragma: no cover
raise NotImplementedError
class ConcatMerge(Merge):
'''Merge layer to merge a dict of tensors by concatenating along dim=1. Reverse of Split'''
def forward(self, xs: dict) -> torch.Tensor:
return torch.cat(list(xs.values()), dim=1)
class FiLMMerge(Merge):
'''
Merge layer to merge a dict of 2 tensors by Feature-wise Linear Modulation layer https://distill.pub/2018/feature-wise-transformations/
Takes a feature tensor and conditioning tensor and affine-transforms it with a conditioning tensor:
output = conditioner_scale * feature + conditioner_shift
The conditioning tensor is a vector, and will be passed through a Linear layer with out_features = number of features or channels (image), and the operation is element-wise on the features or channels.
'''
def __init__(self, names: Dict[str, str], shapes: Dict[str, List[int]]) -> None:
super().__init__()
self.feature_name = names['feature']
self.conditioner_name = names['conditioner']
assert len(shapes) == 2, f'shapes {shapes} should specify only two keys for feature and conditioner'
self.feature_size = shapes[self.feature_name][0]
self.conditioner_size = shapes[self.conditioner_name][0]
self.conditioner_scale = nn.Linear(self.conditioner_size, self.feature_size)
self.conditioner_shift = nn.Linear(self.conditioner_size, self.feature_size)
@classmethod
def affine_transform(cls, feature: torch.Tensor, conditioner_scale: torch.Tensor, conditioner_shift: torch.Tensor) -> torch.Tensor:
'''Apply affine transform with safe-broadcast across the entire features/channels of the feature tensor'''
view_shape = list(conditioner_scale.shape) + [1] * (feature.dim() - conditioner_scale.dim())
return conditioner_scale.view(*view_shape) * feature + conditioner_shift.view(*view_shape)
def forward(self, xs: dict) -> torch.Tensor:
'''Apply FiLM affine transform on feature using conditioner'''
feature = xs[self.feature_name]
conditioner = xs[self.conditioner_name]
conditioner_scale = self.conditioner_scale(conditioner)
conditioner_shift = self.conditioner_shift(conditioner)
return self.affine_transform(feature, conditioner_scale, conditioner_shift)
| StarcoderdataPython |
1640175 | """
! #1 BASICS
@app.route("/user_teste/<name>")
def user_test(name):
return f"Hello {name}"
@app.route("/admin")
def admin():
return redirect(url_for("user", name="admin"))
"""
"""
! #2 HTML
@app.route("/<text>")
def home_page(text):
names_list = ["lucas", "luana", "lukita", "luanita"]
return render_template("video1.html"
,content = text
,names=names_list
)
# <body>
# <h1> HELLO </h1>
# <!-- receive var from python code -->
# <!-- {% #expression statement %} -->
# <p> TEXT : {{content}}</p>
# {% for x in names %}
# <!-- {{variable}} -->
# <p> IMPAR: {{x}} </p>
# {% endfor %}
# </body>
pass
"""
"""
! #3 BOOTSTRAP - INHERITANCE
@app.route("/boot")
def boot_page():
text = "Some content! "
return render_template("index.html"
,content = text
)
pass
""" | StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.