hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71da88383379817259c926e3dd530d8bc03e35e | 1,211 | py | Python | setup.py | kleinesfilmroellchen/fancytables | 9bf63fa27662c8b5f1df9f4af7d3747108a72bf2 | [
"Apache-2.0"
] | 1 | 2019-07-28T18:50:13.000Z | 2019-07-28T18:50:13.000Z | setup.py | kleinesfilmroellchen/fancytables | 9bf63fa27662c8b5f1df9f4af7d3747108a72bf2 | [
"Apache-2.0"
] | null | null | null | setup.py | kleinesfilmroellchen/fancytables | 9bf63fa27662c8b5f1df9f4af7d3747108a72bf2 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
from setuptools import setup, find_packages
from fancytables import __version__
with open("README.md", "r") as f:
long_description = f.read()
setup(
name="fancytables",
version=__version__,
author="kleinesfilmröllchen",
description="Fancy table formatting that builds on prettytable",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/kleinesfilmroellchen/fancytables",
license="Apache 2.0",
python_requires=">=3",
packages=find_packages(),
test_suite="test_bootstrap.test_suite",
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: User Interfaces",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Libraries",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent"
],
keywords=["prettytable", "fancytable", "table", "asciitable",
"unicodetable", "nicetable", "table formatting", "cli"]
)
| 36.69697 | 71 | 0.669694 |
from setuptools import setup, find_packages
from fancytables import __version__
with open("README.md", "r") as f:
long_description = f.read()
setup(
name="fancytables",
version=__version__,
author="kleinesfilmröllchen",
description="Fancy table formatting that builds on prettytable",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/kleinesfilmroellchen/fancytables",
license="Apache 2.0",
python_requires=">=3",
packages=find_packages(),
test_suite="test_bootstrap.test_suite",
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: User Interfaces",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Libraries",
"Programming Language :: Python :: 3 :: Only",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent"
],
keywords=["prettytable", "fancytable", "table", "asciitable",
"unicodetable", "nicetable", "table formatting", "cli"]
)
| true | true |
f71da8f6228385b82094f40fddf9fda8725396bb | 222 | py | Python | src/python/squarepattern.py | helara/a-patterns | 42c6fb713371fae8c6c38a2a17c04915e9fef8b3 | [
"MIT"
] | 9 | 2020-10-02T03:40:07.000Z | 2021-10-17T11:55:01.000Z | src/python/squarepattern.py | helara/a-patterns | 42c6fb713371fae8c6c38a2a17c04915e9fef8b3 | [
"MIT"
] | 62 | 2020-10-02T03:02:20.000Z | 2021-10-12T09:14:18.000Z | src/python/squarepattern.py | helara/a-patterns | 42c6fb713371fae8c6c38a2a17c04915e9fef8b3 | [
"MIT"
] | 58 | 2020-10-02T03:19:24.000Z | 2021-10-12T07:28:14.000Z |
def square_pattern(n):
for i in range(n):
for j in range(n):
print("*",end=" ")
print()
square_pattern(5)
'''
python3 squarepattern.py
* * * * *
* * * * *
* * * * *
* * * * *
* * * * *
''' | 12.333333 | 34 | 0.414414 |
def square_pattern(n):
for i in range(n):
for j in range(n):
print("*",end=" ")
print()
square_pattern(5)
| true | true |
f71da97d64c3b539f23c5f8231fda32b53eaeed1 | 3,994 | py | Python | hapi_demo.py | hbatta/client-python | 1c1d32fce9e84bc1a4938ae7adc30cef8d682aa4 | [
"BSD-3-Clause"
] | null | null | null | hapi_demo.py | hbatta/client-python | 1c1d32fce9e84bc1a4938ae7adc30cef8d682aa4 | [
"BSD-3-Clause"
] | null | null | null | hapi_demo.py | hbatta/client-python | 1c1d32fce9e84bc1a4938ae7adc30cef8d682aa4 | [
"BSD-3-Clause"
] | null | null | null | # Basic demo of hapiclient. Install package using
# pip install hapiclient --upgrade
# from command line.
# Note:
# In IPython, enter
# %matplotlib qt
# on command line to open plots in new window. Enter
# %matplotlib inline
# to revert.
# For more extensive demos and examples, see
# https://colab.research.google.com/drive/11Zy99koiE90JKJ4u_KPTaEBMQFzbfU3P?usp=sharing
def main():
demos = [omniweb, sscweb, cdaweb, cassini, lisird]
for demo in demos:
try:
demo()
except Exception as e:
print("\033[0;31mError:\033[0m " + str(e))
def omniweb():
from hapiclient import hapi
from hapiplot import hapiplot
server = 'https://cdaweb.gsfc.nasa.gov/hapi'
dataset = 'OMNI2_H0_MRG1HR'
start = '2003-09-01T00:00:00'
stop = '2003-12-01T00:00:00'
parameters = 'DST1800'
opts = {'logging': True, 'usecache': False}
# Get data
data, meta = hapi(server, dataset, parameters, start, stop, **opts)
# Plot all parameters
hapiplot(data, meta)
def sscweb():
from hapiclient import hapi
from hapiplot import hapiplot
# SSCWeb data
server = 'http://hapi-server.org/servers/SSCWeb/hapi'
dataset = 'ace'
start = '2001-01-01T05:00:00'
stop = '2001-01-01T10:00:00'
parameters = 'X_GSE,Y_GSE,Z_GSE'
opts = {'logging': True, 'usecache': True}
data, meta = hapi(server, dataset, parameters, start, stop, **opts)
hapiplot(data, meta, **opts)
def cdaweb():
from hapiclient import hapi
from hapiplot import hapiplot
# CDAWeb data - Magnitude and BGSEc from dataset AC_H0_MFI
server = 'https://cdaweb.gsfc.nasa.gov/hapi'
dataset = 'AC_H0_MFI'
start = '2001-01-01T05:00:00'
stop = '2001-01-01T10:00:00'
parameters = 'Magnitude,BGSEc'
opts = {'logging': True, 'usecache': True}
data, meta = hapi(server, dataset, parameters, start, stop, **opts)
hapiplot(data, meta, **opts)
# CDAWeb metadata for AC_H0_MFI
server = 'https://cdaweb.gsfc.nasa.gov/hapi'
dataset = 'AC_H0_MFI'
meta = hapi(server, dataset, **opts)
print('Parameters in %s' % dataset)
for i in range(0, len(meta['parameters'])):
print(' %s' % meta['parameters'][i]['name'])
print('')
# CDAWeb metadata for all datasets
server = 'https://cdaweb.gsfc.nasa.gov/hapi'
meta = hapi(server, **opts)
print('%d CDAWeb datasets' % len(meta['catalog']))
for i in range(0, 3):
print(' %d. %s' % (i, meta['catalog'][i]['id']))
print(' ...')
print(' %d. %s' % (len(meta['catalog']), meta['catalog'][-1]['id']))
print('')
# List all servers
servers = hapi(logging=True) # servers is an array of URLs
print('')
def cassini():
from hapiclient import hapi
from hapiplot import hapiplot
server = 'http://datashop.elasticbeanstalk.com/hapi';
dataset = 'CHEMS_PHA_BOX_FLUXES_FULL_TIME_RES';
parameters = 'HPlus_BEST_T1';
start = '2004-07-01T04:00:00Z';
stop = '2004-07-01T06:00:00Z';
opts = {'usecache': True, 'logging': True}
data, meta = hapi(server, dataset, parameters, start, stop, **opts)
popts = {'logging': False, 'logy': True, 'logz': True}
hapiplot(data, meta, **popts)
def lisird():
from hapiclient import hapi
from hapiplot import hapiplot
server = 'http://lasp.colorado.edu/lisird/hapi';
dataset = 'sme_ssi';
parameters = 'irradiance';
start = '1981-10-09T00:00:00.000Z';
stop = '1981-10-14T00:00:00.000Z';
opts = {'usecache': True, 'logging': True}
data, meta = hapi(server, dataset, parameters, start, stop, **opts)
hapiplot(data, meta)
if __name__ == '__main__':
try:
from hapiplot import hapiplot
except:
print('Package hapiplot is not installed. Will not plot results.')
main()
| 29.80597 | 87 | 0.605658 |
def main():
demos = [omniweb, sscweb, cdaweb, cassini, lisird]
for demo in demos:
try:
demo()
except Exception as e:
print("\033[0;31mError:\033[0m " + str(e))
def omniweb():
from hapiclient import hapi
from hapiplot import hapiplot
server = 'https://cdaweb.gsfc.nasa.gov/hapi'
dataset = 'OMNI2_H0_MRG1HR'
start = '2003-09-01T00:00:00'
stop = '2003-12-01T00:00:00'
parameters = 'DST1800'
opts = {'logging': True, 'usecache': False}
data, meta = hapi(server, dataset, parameters, start, stop, **opts)
hapiplot(data, meta)
def sscweb():
from hapiclient import hapi
from hapiplot import hapiplot
server = 'http://hapi-server.org/servers/SSCWeb/hapi'
dataset = 'ace'
start = '2001-01-01T05:00:00'
stop = '2001-01-01T10:00:00'
parameters = 'X_GSE,Y_GSE,Z_GSE'
opts = {'logging': True, 'usecache': True}
data, meta = hapi(server, dataset, parameters, start, stop, **opts)
hapiplot(data, meta, **opts)
def cdaweb():
from hapiclient import hapi
from hapiplot import hapiplot
server = 'https://cdaweb.gsfc.nasa.gov/hapi'
dataset = 'AC_H0_MFI'
start = '2001-01-01T05:00:00'
stop = '2001-01-01T10:00:00'
parameters = 'Magnitude,BGSEc'
opts = {'logging': True, 'usecache': True}
data, meta = hapi(server, dataset, parameters, start, stop, **opts)
hapiplot(data, meta, **opts)
server = 'https://cdaweb.gsfc.nasa.gov/hapi'
dataset = 'AC_H0_MFI'
meta = hapi(server, dataset, **opts)
print('Parameters in %s' % dataset)
for i in range(0, len(meta['parameters'])):
print(' %s' % meta['parameters'][i]['name'])
print('')
server = 'https://cdaweb.gsfc.nasa.gov/hapi'
meta = hapi(server, **opts)
print('%d CDAWeb datasets' % len(meta['catalog']))
for i in range(0, 3):
print(' %d. %s' % (i, meta['catalog'][i]['id']))
print(' ...')
print(' %d. %s' % (len(meta['catalog']), meta['catalog'][-1]['id']))
print('')
servers = hapi(logging=True)
print('')
def cassini():
from hapiclient import hapi
from hapiplot import hapiplot
server = 'http://datashop.elasticbeanstalk.com/hapi';
dataset = 'CHEMS_PHA_BOX_FLUXES_FULL_TIME_RES';
parameters = 'HPlus_BEST_T1';
start = '2004-07-01T04:00:00Z';
stop = '2004-07-01T06:00:00Z';
opts = {'usecache': True, 'logging': True}
data, meta = hapi(server, dataset, parameters, start, stop, **opts)
popts = {'logging': False, 'logy': True, 'logz': True}
hapiplot(data, meta, **popts)
def lisird():
from hapiclient import hapi
from hapiplot import hapiplot
server = 'http://lasp.colorado.edu/lisird/hapi';
dataset = 'sme_ssi';
parameters = 'irradiance';
start = '1981-10-09T00:00:00.000Z';
stop = '1981-10-14T00:00:00.000Z';
opts = {'usecache': True, 'logging': True}
data, meta = hapi(server, dataset, parameters, start, stop, **opts)
hapiplot(data, meta)
if __name__ == '__main__':
try:
from hapiplot import hapiplot
except:
print('Package hapiplot is not installed. Will not plot results.')
main()
| true | true |
f71da982f16665ea81e00e48c0b297a273d3faab | 1,269 | py | Python | jina/types/request/mixin.py | slettner/jina | 4140961c62359e3acd540a6d88931665c6313824 | [
"Apache-2.0"
] | null | null | null | jina/types/request/mixin.py | slettner/jina | 4140961c62359e3acd540a6d88931665c6313824 | [
"Apache-2.0"
] | null | null | null | jina/types/request/mixin.py | slettner/jina | 4140961c62359e3acd540a6d88931665c6313824 | [
"Apache-2.0"
] | null | null | null | from ..arrays import DocumentArray
from ...proto import jina_pb2
class DocsPropertyMixin:
"""Mixin class of docs property."""
@property
def docs(self) -> 'DocumentArray':
"""Get the :class: `DocumentArray` with sequence `body.docs` as content.
:return: requested :class: `DocumentArray`
"""
self.is_used = True
return DocumentArray(self.body.docs)
class GroundtruthPropertyMixin:
"""Mixin class of groundtruths property."""
@property
def groundtruths(self) -> 'DocumentArray':
"""Get the groundtruths in :class: `DocumentArray` type.
:return: requested groundtruths :class: `DocumentArray`
"""
self.is_used = True
return DocumentArray(self.body.groundtruths)
class IdsMixin:
"""Mixin class of ids property."""
@property
def ids(self):
"""Get the ids.
:return: ids
"""
return self.body.ids
class CommandMixin:
"""Mixin class of command property."""
@property
def command(self) -> str:
"""Get the command.
:return: command
"""
self.is_used = True
return jina_pb2.RequestProto.ControlRequestProto.Command.Name(
self.proto.control.command
)
| 24.403846 | 80 | 0.616233 | from ..arrays import DocumentArray
from ...proto import jina_pb2
class DocsPropertyMixin:
@property
def docs(self) -> 'DocumentArray':
self.is_used = True
return DocumentArray(self.body.docs)
class GroundtruthPropertyMixin:
@property
def groundtruths(self) -> 'DocumentArray':
self.is_used = True
return DocumentArray(self.body.groundtruths)
class IdsMixin:
@property
def ids(self):
return self.body.ids
class CommandMixin:
@property
def command(self) -> str:
self.is_used = True
return jina_pb2.RequestProto.ControlRequestProto.Command.Name(
self.proto.control.command
)
| true | true |
f71daa30288191894bea8d2352348972b7d9dab7 | 1,139 | py | Python | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyFormData/setup.py | ljhljh235/AutoRest | b9ab4000e9b93d16925db84d08bafc225b098f8e | [
"MIT"
] | 3 | 2018-03-20T22:36:32.000Z | 2021-07-15T02:36:51.000Z | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyFormData/setup.py | ljhljh235/AutoRest | b9ab4000e9b93d16925db84d08bafc225b098f8e | [
"MIT"
] | null | null | null | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyFormData/setup.py | ljhljh235/AutoRest | b9ab4000e9b93d16925db84d08bafc225b098f8e | [
"MIT"
] | 1 | 2019-07-20T12:20:03.000Z | 2019-07-20T12:20:03.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# coding: utf-8
from setuptools import setup, find_packages
NAME = "autorestswaggerbatformdataservice"
VERSION = "1.0.0"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = ["msrest>=0.2.0"]
setup(
name=NAME,
version=VERSION,
description="AutoRestSwaggerBATFormDataService",
author_email="",
url="",
keywords=["Swagger", "AutoRestSwaggerBATFormDataService"],
install_requires=REQUIRES,
packages=find_packages(),
include_package_data=True,
long_description="""\
Test Infrastructure for AutoRest Swagger BAT
"""
)
| 27.780488 | 76 | 0.634767 |
from setuptools import setup, find_packages
NAME = "autorestswaggerbatformdataservice"
VERSION = "1.0.0"
REQUIRES = ["msrest>=0.2.0"]
setup(
name=NAME,
version=VERSION,
description="AutoRestSwaggerBATFormDataService",
author_email="",
url="",
keywords=["Swagger", "AutoRestSwaggerBATFormDataService"],
install_requires=REQUIRES,
packages=find_packages(),
include_package_data=True,
long_description="""\
Test Infrastructure for AutoRest Swagger BAT
"""
)
| true | true |
f71daab921001a2e7c9422eb3c8b3b95ef64ffa7 | 5,243 | py | Python | ps3000aExamples/ps3000aBlockMSOExample.py | joe-jordan/picosdk-python-wrappers | 76f393b500200de168b4f2b74b74aad74d89fd92 | [
"ISC"
] | null | null | null | ps3000aExamples/ps3000aBlockMSOExample.py | joe-jordan/picosdk-python-wrappers | 76f393b500200de168b4f2b74b74aad74d89fd92 | [
"ISC"
] | null | null | null | ps3000aExamples/ps3000aBlockMSOExample.py | joe-jordan/picosdk-python-wrappers | 76f393b500200de168b4f2b74b74aad74d89fd92 | [
"ISC"
] | null | null | null | #
# Copyright (C) 2018 Pico Technology Ltd. See LICENSE file for terms.
#
# PS3000A BLOCK MODE MSO EXAMPLE
# This example opens a 3000a driver device, sets up one digital port and a trigger to collect a block of data.
# This data is then split into the indivual digital channels and plotted as the binary value against time in ns.
import ctypes
from picosdk.ps3000a import ps3000a as ps
from picosdk.functions import splitMSODataPort0, assert_pico_ok
import numpy as np
import matplotlib.pyplot as plt
import time
from array import *
# Gives the device a handle
status = {}
chandle = ctypes.c_int16()
# Opens the device/s
status["openunit"] = ps.ps3000aOpenUnit(ctypes.byref(chandle), None)
try:
assert_pico_ok(status["openunit"])
except:
# powerstate becomes the status number of openunit
powerstate = status["openunit"]
# If powerstate is the same as 282 then it will run this if statement
if powerstate == 282:
# Changes the power input to "PICO_POWER_SUPPLY_NOT_CONNECTED"
status["ChangePowerSource"] = ps.ps3000aChangePowerSource(chandle, 282)
# If the powerstate is the same as 286 then it will run this if statement
elif powerstate == 286:
# Changes the power input to "PICO_USB3_0_DEVICE_NON_USB3_0_PORT"
status["ChangePowerSource"] = ps.ps3000aChangePowerSource(chandle, 286)
else:
raise
assert_pico_ok(status["ChangePowerSource"])
# set up digital port
# handle = chandle
# PS3000a_DIGITAL_PORT = 0x80
# Enable = 1
# logicLevel = 10000
status["SetDigitalPort"] = ps.ps3000aSetDigitalPort( chandle, 0x80, 1, 10000)
assert_pico_ok(status["SetDigitalPort"])
# Setting the number of sample to be collected
preTriggerSamples = 400
postTriggerSamples = 400
maxsamples = preTriggerSamples + postTriggerSamples
# Gets timebase innfomation
# Handle = chandle
# Timebase = 2 = timebase
# Nosample = maxsamples
# TimeIntervalNanoseconds = ctypes.byref(timeIntervalns)
# MaxSamples = ctypes.byref(returnedMaxSamples)
# Segement index = 0
timebase = 8
timeIntervalns = ctypes.c_float()
returnedMaxSamples = ctypes.c_int16()
status["GetTimebase"] = ps.ps3000aGetTimebase2(chandle, timebase, maxsamples, ctypes.byref(timeIntervalns), 1, ctypes.byref(returnedMaxSamples), 0)
assert_pico_ok(status["GetTimebase"])
# Creates a overlow location for data
overflow = ctypes.c_int16()
# Creates converted types maxsamples
cmaxSamples = ctypes.c_int32(maxsamples)
# Create buffers ready for assigning pointers for data collection
bufferAMax = (ctypes.c_int16 * maxsamples)()
bufferAMin = (ctypes.c_int16 * maxsamples)()
# Setting the data buffer location for data collection from PS3000A_DIGITAL_PORT0
# Handle = Chandle
# source = PS3000A_DIGITAL_PORT0 = 0x80
# Buffer max = ctypes.byref(bufferAMax)
# Buffer min = ctypes.byref(bufferAMin)
# Buffer length = maxsamples
# Segment index = 0
# Ratio mode = ps3000A_Ratio_Mode_None = 0
status["SetDataBuffers"] = ps.ps3000aSetDataBuffers(chandle, 0x80, ctypes.byref(bufferAMax), ctypes.byref(bufferAMin), maxsamples, 0, 0)
assert_pico_ok(status["SetDataBuffers"])
# Starts the block capture
# Handle = chandle
# Number of prTriggerSamples
# Number of postTriggerSamples
# Timebase = 2 = 4ns (see Programmer's guide for more information on timebases)
# time indisposed ms = None (This is not needed within the example)
# Segment index = 0
# LpRead = None
# pParameter = None
status["runblock"] = ps.ps3000aRunBlock(chandle, preTriggerSamples, postTriggerSamples, timebase, 1, None, 0, None, None)
assert_pico_ok(status["runblock"])
# Creates a overlow location for data
overflow = (ctypes.c_int16 * 10)()
# Creates converted types maxsamples
cmaxSamples = ctypes.c_int32(maxsamples)
# Checks data collection to finish the capture
ready = ctypes.c_int16(0)
check = ctypes.c_int16(0)
while ready.value == check.value:
status["isReady"] = ps.ps3000aIsReady(chandle, ctypes.byref(ready))
# Handle = chandle
# start index = 0
# noOfSamples = ctypes.byref(cmaxSamples)
# DownSampleRatio = 0
# DownSampleRatioMode = 0
# SegmentIndex = 0
# Overflow = ctypes.byref(overflow)
status["GetValues"] = ps.ps3000aGetValues(chandle, 0, ctypes.byref(cmaxSamples), 0, 0, 0, ctypes.byref(overflow))
assert_pico_ok(status["GetValues"])
bufferAMaxBinaryD0, bufferAMaxBinaryD1, bufferAMaxBinaryD2, bufferAMaxBinaryD3, bufferAMaxBinaryD4, bufferAMaxBinaryD5, bufferAMaxBinaryD6, bufferAMaxBinaryD7 = splitMSODataPort0(cmaxSamples, bufferAMax)
# Creates the time data
time = np.linspace(0, (cmaxSamples.value) * timeIntervalns.value, cmaxSamples.value)
# Plots the data from digital channel onto a graph
plt.plot(time, bufferAMaxBinaryD0[:])
plt.plot(time, bufferAMaxBinaryD1[:])
plt.plot(time, bufferAMaxBinaryD2[:])
plt.plot(time, bufferAMaxBinaryD3[:])
plt.plot(time, bufferAMaxBinaryD4[:])
plt.plot(time, bufferAMaxBinaryD5[:])
plt.plot(time, bufferAMaxBinaryD6[:])
plt.plot(time, bufferAMaxBinaryD7[:])
plt.xlabel('Time (ns)')
plt.ylabel('Binary')
plt.show()
# Stops the scope
# Handle = chandle
status["stop"] = ps.ps3000aStop(chandle)
assert_pico_ok(status["stop"])
# Closes the unit
# Handle = chandle
status["stop"] = ps.ps3000aCloseUnit(chandle)
assert_pico_ok(status["stop"])
# Displays the staus returns
print(status) | 34.493421 | 203 | 0.764639 |
import ctypes
from picosdk.ps3000a import ps3000a as ps
from picosdk.functions import splitMSODataPort0, assert_pico_ok
import numpy as np
import matplotlib.pyplot as plt
import time
from array import *
status = {}
chandle = ctypes.c_int16()
status["openunit"] = ps.ps3000aOpenUnit(ctypes.byref(chandle), None)
try:
assert_pico_ok(status["openunit"])
except:
powerstate = status["openunit"]
if powerstate == 282:
status["ChangePowerSource"] = ps.ps3000aChangePowerSource(chandle, 282)
elif powerstate == 286:
status["ChangePowerSource"] = ps.ps3000aChangePowerSource(chandle, 286)
else:
raise
assert_pico_ok(status["ChangePowerSource"])
status["SetDigitalPort"] = ps.ps3000aSetDigitalPort( chandle, 0x80, 1, 10000)
assert_pico_ok(status["SetDigitalPort"])
preTriggerSamples = 400
postTriggerSamples = 400
maxsamples = preTriggerSamples + postTriggerSamples
timebase = 8
timeIntervalns = ctypes.c_float()
returnedMaxSamples = ctypes.c_int16()
status["GetTimebase"] = ps.ps3000aGetTimebase2(chandle, timebase, maxsamples, ctypes.byref(timeIntervalns), 1, ctypes.byref(returnedMaxSamples), 0)
assert_pico_ok(status["GetTimebase"])
overflow = ctypes.c_int16()
cmaxSamples = ctypes.c_int32(maxsamples)
bufferAMax = (ctypes.c_int16 * maxsamples)()
bufferAMin = (ctypes.c_int16 * maxsamples)()
status["SetDataBuffers"] = ps.ps3000aSetDataBuffers(chandle, 0x80, ctypes.byref(bufferAMax), ctypes.byref(bufferAMin), maxsamples, 0, 0)
assert_pico_ok(status["SetDataBuffers"])
# time indisposed ms = None (This is not needed within the example)
# Segment index = 0
# LpRead = None
# pParameter = None
status["runblock"] = ps.ps3000aRunBlock(chandle, preTriggerSamples, postTriggerSamples, timebase, 1, None, 0, None, None)
assert_pico_ok(status["runblock"])
# Creates a overlow location for data
overflow = (ctypes.c_int16 * 10)()
# Creates converted types maxsamples
cmaxSamples = ctypes.c_int32(maxsamples)
# Checks data collection to finish the capture
ready = ctypes.c_int16(0)
check = ctypes.c_int16(0)
while ready.value == check.value:
status["isReady"] = ps.ps3000aIsReady(chandle, ctypes.byref(ready))
# Handle = chandle
# start index = 0
# noOfSamples = ctypes.byref(cmaxSamples)
# DownSampleRatio = 0
# DownSampleRatioMode = 0
# SegmentIndex = 0
# Overflow = ctypes.byref(overflow)
status["GetValues"] = ps.ps3000aGetValues(chandle, 0, ctypes.byref(cmaxSamples), 0, 0, 0, ctypes.byref(overflow))
assert_pico_ok(status["GetValues"])
bufferAMaxBinaryD0, bufferAMaxBinaryD1, bufferAMaxBinaryD2, bufferAMaxBinaryD3, bufferAMaxBinaryD4, bufferAMaxBinaryD5, bufferAMaxBinaryD6, bufferAMaxBinaryD7 = splitMSODataPort0(cmaxSamples, bufferAMax)
# Creates the time data
time = np.linspace(0, (cmaxSamples.value) * timeIntervalns.value, cmaxSamples.value)
# Plots the data from digital channel onto a graph
plt.plot(time, bufferAMaxBinaryD0[:])
plt.plot(time, bufferAMaxBinaryD1[:])
plt.plot(time, bufferAMaxBinaryD2[:])
plt.plot(time, bufferAMaxBinaryD3[:])
plt.plot(time, bufferAMaxBinaryD4[:])
plt.plot(time, bufferAMaxBinaryD5[:])
plt.plot(time, bufferAMaxBinaryD6[:])
plt.plot(time, bufferAMaxBinaryD7[:])
plt.xlabel('Time (ns)')
plt.ylabel('Binary')
plt.show()
# Stops the scope
# Handle = chandle
status["stop"] = ps.ps3000aStop(chandle)
assert_pico_ok(status["stop"])
# Closes the unit
# Handle = chandle
status["stop"] = ps.ps3000aCloseUnit(chandle)
assert_pico_ok(status["stop"])
# Displays the staus returns
print(status) | true | true |
f71dab8a82af3d313e92b214f7c9f4a85f08258e | 2,721 | py | Python | wavetorch/io.py | Kshitiz-Bansal/wavetorch | 7958e512ceda7dfa8d2228d0961157dac4362b58 | [
"MIT"
] | 470 | 2019-04-30T00:49:21.000Z | 2022-03-20T08:31:59.000Z | wavetorch/io.py | geofiber/wavetorch | 927ad02dc9db83f72b8df1d91418a6681e60fd56 | [
"MIT"
] | 8 | 2019-04-30T01:06:36.000Z | 2021-07-18T06:24:56.000Z | wavetorch/io.py | geofiber/wavetorch | 927ad02dc9db83f72b8df1d91418a6681e60fd56 | [
"MIT"
] | 76 | 2019-04-30T09:40:39.000Z | 2022-03-08T18:38:13.000Z | import copy
import os
import torch
from . import geom
from .cell import WaveCell
from .probe import WaveIntensityProbe
from .rnn import WaveRNN
from .source import WaveSource
from .utils import set_dtype
def save_model(model,
name,
savedir='./study/',
history=None,
history_geom_state=None,
cfg=None,
verbose=True):
"""Save the model state and history to a file
"""
str_filename = name + '.pt'
if not os.path.exists(savedir):
os.makedirs(savedir)
str_savepath = savedir + str_filename
if history_geom_state is None:
history_geom_state = [model.cell.geom.state_reconstruction_args()]
data = {'model_geom_class_str': model.cell.geom.__class__.__name__,
# Class name so we know which constructor to call in load()
'model_state': model.state_dict(),
# For now just store model state without history (only geom is likely to change)
'history': history,
'history_geom_state': history_geom_state, # Full history of the geometry state,
'cfg': cfg}
if verbose:
print("Saving model to %s" % str_savepath)
torch.save(data, str_savepath)
def new_geometry(class_str, state):
WaveGeometryClass = getattr(geom, class_str)
geom_state = copy.deepcopy(state)
return WaveGeometryClass(**geom_state)
def load_model(str_filename, which_iteration=-1):
"""Load a previously saved model and its history from a file
"""
print("Loading model from %s" % str_filename)
data = torch.load(str_filename)
# Set the type for floats from the save
set_dtype(data['cfg']['dtype'])
# Reconstruct Geometry
new_geom = new_geometry(data['model_geom_class_str'], data['history_geom_state'][which_iteration])
# Get model state to recreate probes and sources
model_state = copy.deepcopy(data['model_state'])
# Parse out the probe and source coords
px = [model_state[k].item() for k in model_state if 'probes' in k and 'x' in k]
py = [model_state[k].item() for k in model_state if 'probes' in k and 'y' in k]
sx = [model_state[k].item() for k in model_state if 'sources' in k and 'x' in k]
sy = [model_state[k].item() for k in model_state if 'sources' in k and 'y' in k]
# Manually add the probes and sources
new_probes = []
for (x, y) in zip(px, py):
new_probes.append(WaveIntensityProbe(x, y))
# TODO(ian): here we should actually try to infer the type of probe (e.g. intensity or not)
new_sources = []
for (x, y) in zip(sx, sy):
new_sources.append(WaveSource(x, y))
new_cell = WaveCell(model_state['cell.dt'].item(), new_geom)
new_model = WaveRNN(new_cell, new_sources, new_probes)
# Put into eval mode (doesn't really matter for us but whatever)
new_model.eval()
return new_model, data['history'], data['history_geom_state'], data['cfg']
| 30.920455 | 99 | 0.720691 | import copy
import os
import torch
from . import geom
from .cell import WaveCell
from .probe import WaveIntensityProbe
from .rnn import WaveRNN
from .source import WaveSource
from .utils import set_dtype
def save_model(model,
name,
savedir='./study/',
history=None,
history_geom_state=None,
cfg=None,
verbose=True):
str_filename = name + '.pt'
if not os.path.exists(savedir):
os.makedirs(savedir)
str_savepath = savedir + str_filename
if history_geom_state is None:
history_geom_state = [model.cell.geom.state_reconstruction_args()]
data = {'model_geom_class_str': model.cell.geom.__class__.__name__,
'model_state': model.state_dict(),
'history': history,
'history_geom_state': history_geom_state,
'cfg': cfg}
if verbose:
print("Saving model to %s" % str_savepath)
torch.save(data, str_savepath)
def new_geometry(class_str, state):
WaveGeometryClass = getattr(geom, class_str)
geom_state = copy.deepcopy(state)
return WaveGeometryClass(**geom_state)
def load_model(str_filename, which_iteration=-1):
print("Loading model from %s" % str_filename)
data = torch.load(str_filename)
set_dtype(data['cfg']['dtype'])
new_geom = new_geometry(data['model_geom_class_str'], data['history_geom_state'][which_iteration])
model_state = copy.deepcopy(data['model_state'])
px = [model_state[k].item() for k in model_state if 'probes' in k and 'x' in k]
py = [model_state[k].item() for k in model_state if 'probes' in k and 'y' in k]
sx = [model_state[k].item() for k in model_state if 'sources' in k and 'x' in k]
sy = [model_state[k].item() for k in model_state if 'sources' in k and 'y' in k]
new_probes = []
for (x, y) in zip(px, py):
new_probes.append(WaveIntensityProbe(x, y))
new_sources = []
for (x, y) in zip(sx, sy):
new_sources.append(WaveSource(x, y))
new_cell = WaveCell(model_state['cell.dt'].item(), new_geom)
new_model = WaveRNN(new_cell, new_sources, new_probes)
new_model.eval()
return new_model, data['history'], data['history_geom_state'], data['cfg']
| true | true |
f71dadd500ddc556382c08754efe696aea4fc7e4 | 565 | py | Python | modules/pastebin.py | f0ur0ne/vicky | f4ede29480a14bd10e72066a57dc5bd2139deab9 | [
"MIT"
] | 1 | 2020-05-19T03:42:49.000Z | 2020-05-19T03:42:49.000Z | modules/pastebin.py | f0ur0ne/vicky | f4ede29480a14bd10e72066a57dc5bd2139deab9 | [
"MIT"
] | null | null | null | modules/pastebin.py | f0ur0ne/vicky | f4ede29480a14bd10e72066a57dc5bd2139deab9 | [
"MIT"
] | null | null | null | import requests
dev_key = "redacted"
username = "redacted"
password = "redacted"
header = {"Content-Type": "application/json; charset=utf8"}
privatepaste = 1 #limits for this are confusing http://192.184.83.59/SPG%20All/pastebin.com/faq.html#11a
def pastebin(pastedata):
params = {"api_option": "paste", "api_user_key": "", "api_paste_private": privatepaste, "api_dev_key": dev_key, "api_paste_expire_date": "10M", "api_paste_format": "php", "api_paste_code": pastedata}
req = requests.post("http://pastebin.com/api/api_post.php", data=params)
return req.text
| 43.461538 | 200 | 0.739823 | import requests
dev_key = "redacted"
username = "redacted"
password = "redacted"
header = {"Content-Type": "application/json; charset=utf8"}
privatepaste = 1 f pastebin(pastedata):
params = {"api_option": "paste", "api_user_key": "", "api_paste_private": privatepaste, "api_dev_key": dev_key, "api_paste_expire_date": "10M", "api_paste_format": "php", "api_paste_code": pastedata}
req = requests.post("http://pastebin.com/api/api_post.php", data=params)
return req.text
| true | true |
f71daedf5d358af52f6cde39e1fc0f8bde6f2e51 | 5,466 | py | Python | saleor/product/utils/__init__.py | dnordio/saleor | 323963748e6a2702265ec6635b930a234abde4f5 | [
"BSD-3-Clause"
] | 1 | 2019-05-02T17:24:05.000Z | 2019-05-02T17:24:05.000Z | saleor/product/utils/__init__.py | valentine217/saleor | 323963748e6a2702265ec6635b930a234abde4f5 | [
"BSD-3-Clause"
] | null | null | null | saleor/product/utils/__init__.py | valentine217/saleor | 323963748e6a2702265ec6635b930a234abde4f5 | [
"BSD-3-Clause"
] | 1 | 2019-05-23T07:30:50.000Z | 2019-05-23T07:30:50.000Z | from urllib.parse import urlencode
from django.conf import settings
from django.db.models import F
from ...checkout.utils import (
get_checkout_from_request, get_or_create_checkout_from_request)
from ...core.utils import get_paginator_items
from ...core.utils.filters import get_now_sorted_by
from ...core.utils.taxes import ZERO_TAXED_MONEY, TaxedMoney
from ..forms import ProductForm
from .availability import products_with_availability
def products_visible_to_user(user):
# pylint: disable=cyclic-import
from ..models import Product
if user.is_authenticated and user.is_active and user.is_staff:
return Product.objects.all()
return Product.objects.published()
def products_with_details(user):
products = products_visible_to_user(user)
products = products.prefetch_related(
'translations', 'category__translations', 'collections__translations',
'images', 'variants__variant_images__image',
'attributes__values__translations',
'product_type__product_attributes__translations',
'product_type__product_attributes__values__translations')
return products
def products_for_products_list(user):
products = products_visible_to_user(user)
products = products.prefetch_related(
'translations', 'images', 'variants__variant_images__image')
return products
def products_for_homepage(user, homepage_collection):
products = products_visible_to_user(user)
products = products.prefetch_related(
'translations', 'images', 'variants__variant_images__image',
'collections')
products = products.filter(collections=homepage_collection)
return products
def get_product_images(product):
"""Return list of product images that will be placed in product gallery."""
return list(product.images.all())
def handle_checkout_form(request, product, create_checkout=False):
if create_checkout:
checkout = get_or_create_checkout_from_request(request)
else:
checkout = get_checkout_from_request(request)
form = ProductForm(
checkout=checkout, product=product, data=request.POST or None,
discounts=request.discounts, taxes=request.taxes)
return form, checkout
def products_for_checkout(user):
products = products_visible_to_user(user)
products = products.prefetch_related('variants__variant_images__image')
return products
def get_variant_url_from_product(product, attributes):
return '%s?%s' % (product.get_absolute_url(), urlencode(attributes))
def get_variant_url(variant):
attributes = {
str(attribute.pk): attribute
for attribute in variant.product.product_type.variant_attributes.all()}
return get_variant_url_from_product(variant.product, attributes)
def allocate_stock(variant, quantity):
variant.quantity_allocated = F('quantity_allocated') + quantity
variant.save(update_fields=['quantity_allocated'])
def deallocate_stock(variant, quantity):
variant.quantity_allocated = F('quantity_allocated') - quantity
variant.save(update_fields=['quantity_allocated'])
def decrease_stock(variant, quantity):
variant.quantity = F('quantity') - quantity
variant.quantity_allocated = F('quantity_allocated') - quantity
variant.save(update_fields=['quantity', 'quantity_allocated'])
def increase_stock(variant, quantity, allocate=False):
"""Return given quantity of product to a stock."""
variant.quantity = F('quantity') + quantity
update_fields = ['quantity']
if allocate:
variant.quantity_allocated = F('quantity_allocated') + quantity
update_fields.append('quantity_allocated')
variant.save(update_fields=update_fields)
def get_product_list_context(request, filter_set):
"""
:param request: request object
:param filter_set: filter set for product list
:return: context dictionary
"""
# Avoiding circular dependency
from ..filters import SORT_BY_FIELDS
qs = filter_set.qs
if not filter_set.form.is_valid():
qs = qs.none()
products_paginated = get_paginator_items(
qs, settings.PAGINATE_BY, request.GET.get('page'))
products_and_availability = list(products_with_availability(
products_paginated, request.discounts, request.taxes,
request.currency))
now_sorted_by = get_now_sorted_by(filter_set)
arg_sort_by = request.GET.get('sort_by')
is_descending = arg_sort_by.startswith('-') if arg_sort_by else False
return {
'filter_set': filter_set,
'products': products_and_availability,
'products_paginated': products_paginated,
'sort_by_choices': SORT_BY_FIELDS,
'now_sorted_by': now_sorted_by,
'is_descending': is_descending}
def collections_visible_to_user(user):
# pylint: disable=cyclic-import
from ..models import Collection
if user.is_authenticated and user.is_active and user.is_staff:
return Collection.objects.all()
return Collection.objects.published()
def calculate_revenue_for_variant(variant, start_date):
"""Calculate total revenue generated by a product variant."""
revenue = ZERO_TAXED_MONEY
for order_line in variant.order_lines.all():
if order_line.order.created >= start_date:
net = order_line.unit_price_net * order_line.quantity
gross = order_line.unit_price_gross * order_line.quantity
revenue += TaxedMoney(net, gross)
return revenue
| 35.493506 | 79 | 0.742042 | from urllib.parse import urlencode
from django.conf import settings
from django.db.models import F
from ...checkout.utils import (
get_checkout_from_request, get_or_create_checkout_from_request)
from ...core.utils import get_paginator_items
from ...core.utils.filters import get_now_sorted_by
from ...core.utils.taxes import ZERO_TAXED_MONEY, TaxedMoney
from ..forms import ProductForm
from .availability import products_with_availability
def products_visible_to_user(user):
from ..models import Product
if user.is_authenticated and user.is_active and user.is_staff:
return Product.objects.all()
return Product.objects.published()
def products_with_details(user):
products = products_visible_to_user(user)
products = products.prefetch_related(
'translations', 'category__translations', 'collections__translations',
'images', 'variants__variant_images__image',
'attributes__values__translations',
'product_type__product_attributes__translations',
'product_type__product_attributes__values__translations')
return products
def products_for_products_list(user):
products = products_visible_to_user(user)
products = products.prefetch_related(
'translations', 'images', 'variants__variant_images__image')
return products
def products_for_homepage(user, homepage_collection):
products = products_visible_to_user(user)
products = products.prefetch_related(
'translations', 'images', 'variants__variant_images__image',
'collections')
products = products.filter(collections=homepage_collection)
return products
def get_product_images(product):
return list(product.images.all())
def handle_checkout_form(request, product, create_checkout=False):
if create_checkout:
checkout = get_or_create_checkout_from_request(request)
else:
checkout = get_checkout_from_request(request)
form = ProductForm(
checkout=checkout, product=product, data=request.POST or None,
discounts=request.discounts, taxes=request.taxes)
return form, checkout
def products_for_checkout(user):
products = products_visible_to_user(user)
products = products.prefetch_related('variants__variant_images__image')
return products
def get_variant_url_from_product(product, attributes):
return '%s?%s' % (product.get_absolute_url(), urlencode(attributes))
def get_variant_url(variant):
attributes = {
str(attribute.pk): attribute
for attribute in variant.product.product_type.variant_attributes.all()}
return get_variant_url_from_product(variant.product, attributes)
def allocate_stock(variant, quantity):
variant.quantity_allocated = F('quantity_allocated') + quantity
variant.save(update_fields=['quantity_allocated'])
def deallocate_stock(variant, quantity):
variant.quantity_allocated = F('quantity_allocated') - quantity
variant.save(update_fields=['quantity_allocated'])
def decrease_stock(variant, quantity):
variant.quantity = F('quantity') - quantity
variant.quantity_allocated = F('quantity_allocated') - quantity
variant.save(update_fields=['quantity', 'quantity_allocated'])
def increase_stock(variant, quantity, allocate=False):
variant.quantity = F('quantity') + quantity
update_fields = ['quantity']
if allocate:
variant.quantity_allocated = F('quantity_allocated') + quantity
update_fields.append('quantity_allocated')
variant.save(update_fields=update_fields)
def get_product_list_context(request, filter_set):
from ..filters import SORT_BY_FIELDS
qs = filter_set.qs
if not filter_set.form.is_valid():
qs = qs.none()
products_paginated = get_paginator_items(
qs, settings.PAGINATE_BY, request.GET.get('page'))
products_and_availability = list(products_with_availability(
products_paginated, request.discounts, request.taxes,
request.currency))
now_sorted_by = get_now_sorted_by(filter_set)
arg_sort_by = request.GET.get('sort_by')
is_descending = arg_sort_by.startswith('-') if arg_sort_by else False
return {
'filter_set': filter_set,
'products': products_and_availability,
'products_paginated': products_paginated,
'sort_by_choices': SORT_BY_FIELDS,
'now_sorted_by': now_sorted_by,
'is_descending': is_descending}
def collections_visible_to_user(user):
from ..models import Collection
if user.is_authenticated and user.is_active and user.is_staff:
return Collection.objects.all()
return Collection.objects.published()
def calculate_revenue_for_variant(variant, start_date):
revenue = ZERO_TAXED_MONEY
for order_line in variant.order_lines.all():
if order_line.order.created >= start_date:
net = order_line.unit_price_net * order_line.quantity
gross = order_line.unit_price_gross * order_line.quantity
revenue += TaxedMoney(net, gross)
return revenue
| true | true |
f71db06cb92d25ecf2ddda7df1fc07ed62c5f692 | 7,559 | py | Python | tests/unit/test_indicator_node.py | philtrade/gQuant | 08b2a82a257c234b92f097b925f25cab16fd0926 | [
"Apache-2.0"
] | 1 | 2021-07-09T14:49:08.000Z | 2021-07-09T14:49:08.000Z | tests/unit/test_indicator_node.py | philtrade/gQuant | 08b2a82a257c234b92f097b925f25cab16fd0926 | [
"Apache-2.0"
] | null | null | null | tests/unit/test_indicator_node.py | philtrade/gQuant | 08b2a82a257c234b92f097b925f25cab16fd0926 | [
"Apache-2.0"
] | 1 | 2021-03-22T19:54:38.000Z | 2021-03-22T19:54:38.000Z | '''
Technical Indicator Node Unit Tests
To run unittests:
# Using standard library unittest
python -m unittest -v
python -m unittest tests/unit/test_indicator_node.py -v
or
python -m unittest discover <test_directory>
python -m unittest discover -s <directory> -p 'test_*.py'
# Using pytest
# "conda install pytest" or "pip install pytest"
pytest -v tests
pytest -v tests/unit/test_indicator_node.py
'''
import warnings
import unittest
import cudf
import gquant.cuindicator as gi
from gquant.plugin_nodes.transform.indicatorNode import IndicatorNode
from gquant.dataframe_flow.task import Task
from .utils import make_orderer
import numpy as np
import copy
ordered, compare = make_orderer()
unittest.defaultTestLoader.sortTestMethodsUsing = compare
class TestIndicatorNode(unittest.TestCase):
def setUp(self):
warnings.simplefilter('ignore', category=ImportWarning)
warnings.simplefilter('ignore', category=DeprecationWarning)
# ignore importlib warnings.
size = 200
half = size // 2
self.size = size
self.half = half
np.random.seed(10)
random_array = np.random.rand(size)
open_array = np.random.rand(size)
close_array = np.random.rand(size)
high_array = np.random.rand(size)
low_array = np.random.rand(size)
volume_array = np.random.rand(size)
indicator = np.zeros(size, dtype=np.int32)
indicator[0] = 1
indicator[half] = 1
df = cudf.DataFrame()
df['in'] = random_array
df['open'] = open_array
df['close'] = close_array
df['high'] = high_array
df['low'] = low_array
df['volume'] = volume_array
df['indicator'] = indicator
self._cudf_data = df
self.conf = {
"indicators": [
{"function": "port_chaikin_oscillator",
"columns": ["high", "low", "close", "volume"],
"args": [10, 20]},
{"function": "port_bollinger_bands",
"columns": ["close"],
"args": [10],
"outputs": ["b1", "b2"]}
],
"remove_na": True
}
def tearDown(self):
pass
@ordered
def test_colums(self):
'''Test node columns requirments'''
node_obj = {"id": "abc",
"type": "IndicatorNode",
"conf": self.conf,
"inputs": []}
task = Task(node_obj)
inN = IndicatorNode(task)
col = "indicator"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.required, msg)
col = "high"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.required, msg)
col = "low"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.required, msg)
col = "close"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.required, msg)
col = "volume"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.required, msg)
col = "CH_OS_10_20"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.addition, msg)
col = "BO_BA_b1_10"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.addition, msg)
col = "BO_BA_b2_10"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.addition, msg)
@ordered
def test_drop(self):
'''Test node columns drop'''
node_obj = {"id": "abc",
"type": "IndicatorNode",
"conf": self.conf,
"inputs": []}
task = Task(node_obj)
inN = IndicatorNode(task)
o = inN.process([self._cudf_data])
msg = "bad error: df len %d is not right" % (len(o))
self.assertTrue(len(o) == 162, msg)
newConf = copy.deepcopy(self.conf)
newConf['remove_na'] = False
node_obj = {"id": "abc",
"type": "IndicatorNode",
"conf": newConf,
"inputs": []}
task = Task(node_obj)
inN = IndicatorNode(task)
o = inN.process([self._cudf_data])
msg = "bad error: df len %d is not right" % (len(o))
self.assertTrue(len(o) == 200, msg)
@ordered
def test_signal(self):
'''Test signal computation'''
newConf = copy.deepcopy(self.conf)
newConf['remove_na'] = False
node_obj = {"id": "abc",
"type": "IndicatorNode",
"conf": newConf,
"inputs": []}
task = Task(node_obj)
inN = IndicatorNode(task)
o = inN.process([self._cudf_data])
# check chaikin oscillator computation
r_cudf = gi.chaikin_oscillator(self._cudf_data[:self.half]['high'],
self._cudf_data[:self.half]['low'],
self._cudf_data[:self.half]['close'],
self._cudf_data[:self.half]['volume'],
10, 20)
computed = o[:self.half]['CH_OS_10_20'].to_array('pandas')
ref = r_cudf.to_array('pandas')
err = np.abs(computed[~np.isnan(computed)] - ref[~np.isnan(ref)]).max()
msg = "bad error %f\n" % (err,)
self.assertTrue(np.isclose(err, 0, atol=1e-6), msg)
r_cudf = gi.chaikin_oscillator(self._cudf_data[self.half:]['high'],
self._cudf_data[self.half:]['low'],
self._cudf_data[self.half:]['close'],
self._cudf_data[self.half:]['volume'],
10, 20)
computed = o[self.half:]['CH_OS_10_20'].to_array('pandas')
ref = r_cudf.to_array('pandas')
err = np.abs(computed[~np.isnan(computed)] - ref[~np.isnan(ref)]).max()
msg = "bad error %f\n" % (err,)
self.assertTrue(np.isclose(err, 0, atol=1e-6), msg)
# check bollinger bands computation
r_cudf = gi.bollinger_bands(self._cudf_data[:self.half]['close'], 10)
computed = o[:self.half]["BO_BA_b1_10"].to_array('pandas')
ref = r_cudf.b1.to_array('pandas')
err = np.abs(computed[~np.isnan(computed)] - ref[~np.isnan(ref)]).max()
msg = "bad error %f\n" % (err,)
self.assertTrue(np.isclose(err, 0, atol=1e-6), msg)
computed = o[:self.half]["BO_BA_b2_10"].to_array('pandas')
ref = r_cudf.b2.to_array('pandas')
err = np.abs(computed[~np.isnan(computed)] - ref[~np.isnan(ref)]).max()
msg = "bad error %f\n" % (err,)
self.assertTrue(np.isclose(err, 0, atol=1e-6), msg)
r_cudf = gi.bollinger_bands(self._cudf_data[self.half:]['close'], 10)
computed = o[self.half:]["BO_BA_b1_10"].to_array('pandas')
ref = r_cudf.b1.to_array('pandas')
err = np.abs(computed[~np.isnan(computed)] - ref[~np.isnan(ref)]).max()
msg = "bad error %f\n" % (err,)
self.assertTrue(np.isclose(err, 0, atol=1e-6), msg)
computed = o[self.half:]["BO_BA_b2_10"].to_array('pandas')
ref = r_cudf.b2.to_array('pandas')
err = np.abs(computed[~np.isnan(computed)] - ref[~np.isnan(ref)]).max()
msg = "bad error %f\n" % (err,)
self.assertTrue(np.isclose(err, 0, atol=1e-6), msg)
if __name__ == '__main__':
unittest.main()
| 36.341346 | 79 | 0.546765 | import warnings
import unittest
import cudf
import gquant.cuindicator as gi
from gquant.plugin_nodes.transform.indicatorNode import IndicatorNode
from gquant.dataframe_flow.task import Task
from .utils import make_orderer
import numpy as np
import copy
ordered, compare = make_orderer()
unittest.defaultTestLoader.sortTestMethodsUsing = compare
class TestIndicatorNode(unittest.TestCase):
def setUp(self):
warnings.simplefilter('ignore', category=ImportWarning)
warnings.simplefilter('ignore', category=DeprecationWarning)
size = 200
half = size // 2
self.size = size
self.half = half
np.random.seed(10)
random_array = np.random.rand(size)
open_array = np.random.rand(size)
close_array = np.random.rand(size)
high_array = np.random.rand(size)
low_array = np.random.rand(size)
volume_array = np.random.rand(size)
indicator = np.zeros(size, dtype=np.int32)
indicator[0] = 1
indicator[half] = 1
df = cudf.DataFrame()
df['in'] = random_array
df['open'] = open_array
df['close'] = close_array
df['high'] = high_array
df['low'] = low_array
df['volume'] = volume_array
df['indicator'] = indicator
self._cudf_data = df
self.conf = {
"indicators": [
{"function": "port_chaikin_oscillator",
"columns": ["high", "low", "close", "volume"],
"args": [10, 20]},
{"function": "port_bollinger_bands",
"columns": ["close"],
"args": [10],
"outputs": ["b1", "b2"]}
],
"remove_na": True
}
def tearDown(self):
pass
@ordered
def test_colums(self):
node_obj = {"id": "abc",
"type": "IndicatorNode",
"conf": self.conf,
"inputs": []}
task = Task(node_obj)
inN = IndicatorNode(task)
col = "indicator"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.required, msg)
col = "high"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.required, msg)
col = "low"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.required, msg)
col = "close"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.required, msg)
col = "volume"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.required, msg)
col = "CH_OS_10_20"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.addition, msg)
col = "BO_BA_b1_10"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.addition, msg)
col = "BO_BA_b2_10"
msg = "bad error: %s is missing" % (col)
self.assertTrue(col in inN.addition, msg)
@ordered
def test_drop(self):
node_obj = {"id": "abc",
"type": "IndicatorNode",
"conf": self.conf,
"inputs": []}
task = Task(node_obj)
inN = IndicatorNode(task)
o = inN.process([self._cudf_data])
msg = "bad error: df len %d is not right" % (len(o))
self.assertTrue(len(o) == 162, msg)
newConf = copy.deepcopy(self.conf)
newConf['remove_na'] = False
node_obj = {"id": "abc",
"type": "IndicatorNode",
"conf": newConf,
"inputs": []}
task = Task(node_obj)
inN = IndicatorNode(task)
o = inN.process([self._cudf_data])
msg = "bad error: df len %d is not right" % (len(o))
self.assertTrue(len(o) == 200, msg)
@ordered
def test_signal(self):
newConf = copy.deepcopy(self.conf)
newConf['remove_na'] = False
node_obj = {"id": "abc",
"type": "IndicatorNode",
"conf": newConf,
"inputs": []}
task = Task(node_obj)
inN = IndicatorNode(task)
o = inN.process([self._cudf_data])
r_cudf = gi.chaikin_oscillator(self._cudf_data[:self.half]['high'],
self._cudf_data[:self.half]['low'],
self._cudf_data[:self.half]['close'],
self._cudf_data[:self.half]['volume'],
10, 20)
computed = o[:self.half]['CH_OS_10_20'].to_array('pandas')
ref = r_cudf.to_array('pandas')
err = np.abs(computed[~np.isnan(computed)] - ref[~np.isnan(ref)]).max()
msg = "bad error %f\n" % (err,)
self.assertTrue(np.isclose(err, 0, atol=1e-6), msg)
r_cudf = gi.chaikin_oscillator(self._cudf_data[self.half:]['high'],
self._cudf_data[self.half:]['low'],
self._cudf_data[self.half:]['close'],
self._cudf_data[self.half:]['volume'],
10, 20)
computed = o[self.half:]['CH_OS_10_20'].to_array('pandas')
ref = r_cudf.to_array('pandas')
err = np.abs(computed[~np.isnan(computed)] - ref[~np.isnan(ref)]).max()
msg = "bad error %f\n" % (err,)
self.assertTrue(np.isclose(err, 0, atol=1e-6), msg)
r_cudf = gi.bollinger_bands(self._cudf_data[:self.half]['close'], 10)
computed = o[:self.half]["BO_BA_b1_10"].to_array('pandas')
ref = r_cudf.b1.to_array('pandas')
err = np.abs(computed[~np.isnan(computed)] - ref[~np.isnan(ref)]).max()
msg = "bad error %f\n" % (err,)
self.assertTrue(np.isclose(err, 0, atol=1e-6), msg)
computed = o[:self.half]["BO_BA_b2_10"].to_array('pandas')
ref = r_cudf.b2.to_array('pandas')
err = np.abs(computed[~np.isnan(computed)] - ref[~np.isnan(ref)]).max()
msg = "bad error %f\n" % (err,)
self.assertTrue(np.isclose(err, 0, atol=1e-6), msg)
r_cudf = gi.bollinger_bands(self._cudf_data[self.half:]['close'], 10)
computed = o[self.half:]["BO_BA_b1_10"].to_array('pandas')
ref = r_cudf.b1.to_array('pandas')
err = np.abs(computed[~np.isnan(computed)] - ref[~np.isnan(ref)]).max()
msg = "bad error %f\n" % (err,)
self.assertTrue(np.isclose(err, 0, atol=1e-6), msg)
computed = o[self.half:]["BO_BA_b2_10"].to_array('pandas')
ref = r_cudf.b2.to_array('pandas')
err = np.abs(computed[~np.isnan(computed)] - ref[~np.isnan(ref)]).max()
msg = "bad error %f\n" % (err,)
self.assertTrue(np.isclose(err, 0, atol=1e-6), msg)
if __name__ == '__main__':
unittest.main()
| true | true |
f71db0b1d9066233cc40f717f39f446dad22cf25 | 219 | py | Python | test.py | bobbysoon/Taxi3 | 48e01ed063152c834b0e3f43eef9494c7d56b02b | [
"Unlicense"
] | null | null | null | test.py | bobbysoon/Taxi3 | 48e01ed063152c834b0e3f43eef9494c7d56b02b | [
"Unlicense"
] | 1 | 2019-11-13T13:52:12.000Z | 2020-01-31T02:21:25.000Z | test.py | bobbysoon/Taxi3 | 48e01ed063152c834b0e3f43eef9494c7d56b02b | [
"Unlicense"
] | null | null | null | #!/usr/bin/python
from Demo import Demo
from Swarm import Swarm
from Taxi import Taxi
swarm= Swarm(count=24)
demo= Demo(swarm)
while demo.is_open:
demo.draw(Taxi(swarm))
if not demo.paused:
swarm.move(demo.step)
| 15.642857 | 23 | 0.744292 |
from Demo import Demo
from Swarm import Swarm
from Taxi import Taxi
swarm= Swarm(count=24)
demo= Demo(swarm)
while demo.is_open:
demo.draw(Taxi(swarm))
if not demo.paused:
swarm.move(demo.step)
| true | true |
f71db1164a4a62b58d179f38b09f4c707a5ebaf0 | 107 | py | Python | ckanext-hdx_service_checker/ckanext/hdx_service_checker/tests/test_plugin.py | alexandru-m-g/hdx-ckan | 647f1f23f0505fa195601245b758edcaf4d25985 | [
"Apache-2.0"
] | 1 | 2020-03-07T02:47:15.000Z | 2020-03-07T02:47:15.000Z | ckanext-hdx_service_checker/ckanext/hdx_service_checker/tests/test_plugin.py | datopian/hdx-ckan | 2d8871c035a18e48b53859fec522b997b500afe9 | [
"Apache-2.0"
] | null | null | null | ckanext-hdx_service_checker/ckanext/hdx_service_checker/tests/test_plugin.py | datopian/hdx-ckan | 2d8871c035a18e48b53859fec522b997b500afe9 | [
"Apache-2.0"
] | null | null | null | """Tests for plugin.py."""
import ckanext.hdx_service_checker.plugin as plugin
def test_plugin():
pass | 21.4 | 51 | 0.747664 | import ckanext.hdx_service_checker.plugin as plugin
def test_plugin():
pass | true | true |
f71db16a0f56dddf2f635176ae6a0cb63823d0dc | 1,370 | py | Python | src/gwauth/mailer/templates.py | gravitationalwavedc/gwcloud_auth | 83d2a4928aaf86884e0bfc0fff938106a7fcd132 | [
"MIT"
] | null | null | null | src/gwauth/mailer/templates.py | gravitationalwavedc/gwcloud_auth | 83d2a4928aaf86884e0bfc0fff938106a7fcd132 | [
"MIT"
] | 25 | 2020-06-01T05:18:30.000Z | 2022-02-28T03:29:48.000Z | src/gwauth/mailer/templates.py | gravitationalwavedc/gwcloud_auth | 83d2a4928aaf86884e0bfc0fff938106a7fcd132 | [
"MIT"
] | null | null | null | """
Distributed under the MIT License. See LICENSE.txt for more info.
"""
# Templates for different emails
VERIFY_EMAIL_ADDRESS = dict()
VERIFY_EMAIL_ADDRESS['subject'] = '[GW Cloud] Please verify your email address'
VERIFY_EMAIL_ADDRESS['message'] = '<p>Dear {{first_name}} {{last_name}}: </p>' \
'<p>We have received a new account request with our GW Cloud system from this ' \
'email address. Please verify your email address by clicking on the following ' \
'<a href="{{link}}" target="_blank">link</a>: </p>' \
'<p><a href="{{link}}" target="_blank">{{link}}</a> </p>' \
'<p>If you believe that the email has been sent by mistake or you have not ' \
'requested for an account please <strong>do not</strong> click on the link. </p>' \
'<p>Alternatively you can report this incident to <a ' \
'href="mailto:paul.lasky@monash.edu" target="_top">paul.lasky@monash.edu</a> for ' \
'investigation. </p>' \
'<p> </p>' \
'<p>Regards, </p>' \
'<p>GW Cloud Team</p>' | 68.5 | 118 | 0.476642 |
VERIFY_EMAIL_ADDRESS = dict()
VERIFY_EMAIL_ADDRESS['subject'] = '[GW Cloud] Please verify your email address'
VERIFY_EMAIL_ADDRESS['message'] = '<p>Dear {{first_name}} {{last_name}}: </p>' \
'<p>We have received a new account request with our GW Cloud system from this ' \
'email address. Please verify your email address by clicking on the following ' \
'<a href="{{link}}" target="_blank">link</a>: </p>' \
'<p><a href="{{link}}" target="_blank">{{link}}</a> </p>' \
'<p>If you believe that the email has been sent by mistake or you have not ' \
'requested for an account please <strong>do not</strong> click on the link. </p>' \
'<p>Alternatively you can report this incident to <a ' \
'href="mailto:paul.lasky@monash.edu" target="_top">paul.lasky@monash.edu</a> for ' \
'investigation. </p>' \
'<p> </p>' \
'<p>Regards, </p>' \
'<p>GW Cloud Team</p>' | true | true |
f71db23eb8f046faa45c2b20bfa0b74fca05cf1b | 5,071 | py | Python | scipy/fft/tests/test_real_transforms.py | avivajpeyi/scipy | dbfe06e6618232b26c241cbe8861e2ea1489b535 | [
"BSD-3-Clause"
] | 353 | 2020-12-10T10:47:17.000Z | 2022-03-31T23:08:29.000Z | scipy/fft/tests/test_real_transforms.py | avivajpeyi/scipy | dbfe06e6618232b26c241cbe8861e2ea1489b535 | [
"BSD-3-Clause"
] | 80 | 2020-12-10T09:54:22.000Z | 2022-03-30T22:08:45.000Z | scipy/fft/tests/test_real_transforms.py | avivajpeyi/scipy | dbfe06e6618232b26c241cbe8861e2ea1489b535 | [
"BSD-3-Clause"
] | 63 | 2020-12-10T17:10:34.000Z | 2022-03-28T16:27:07.000Z |
import numpy as np
from numpy.testing import assert_allclose, assert_array_equal
import pytest
from scipy.fft import dct, idct, dctn, idctn, dst, idst, dstn, idstn
import scipy.fft as fft
from scipy import fftpack
# scipy.fft wraps the fftpack versions but with normalized inverse transforms.
# So, the forward transforms and definitions are already thoroughly tested in
# fftpack/test_real_transforms.py
@pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)])
@pytest.mark.parametrize("type", [1, 2, 3, 4])
@pytest.mark.parametrize("n", [2, 3, 4, 5, 10, 16])
@pytest.mark.parametrize("axis", [0, 1])
@pytest.mark.parametrize("norm", [None, 'ortho'])
def test_identity_1d(forward, backward, type, n, axis, norm):
# Test the identity f^-1(f(x)) == x
x = np.random.rand(n, n)
y = forward(x, type, axis=axis, norm=norm)
z = backward(y, type, axis=axis, norm=norm)
assert_allclose(z, x)
pad = [(0, 0)] * 2
pad[axis] = (0, 4)
y2 = np.pad(y, pad, mode='edge')
z2 = backward(y2, type, n, axis, norm)
assert_allclose(z2, x)
@pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)])
@pytest.mark.parametrize("type", [1, 2, 3, 4])
@pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64,
np.complex64, np.complex128])
@pytest.mark.parametrize("axis", [0, 1])
@pytest.mark.parametrize("norm", [None, 'ortho'])
@pytest.mark.parametrize("overwrite_x", [True, False])
def test_identity_1d_overwrite(forward, backward, type, dtype, axis, norm,
overwrite_x):
# Test the identity f^-1(f(x)) == x
x = np.random.rand(7, 8)
x_orig = x.copy()
y = forward(x, type, axis=axis, norm=norm, overwrite_x=overwrite_x)
y_orig = y.copy()
z = backward(y, type, axis=axis, norm=norm, overwrite_x=overwrite_x)
if not overwrite_x:
assert_allclose(z, x, rtol=1e-6, atol=1e-6)
assert_array_equal(x, x_orig)
assert_array_equal(y, y_orig)
else:
assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6)
@pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)])
@pytest.mark.parametrize("type", [1, 2, 3, 4])
@pytest.mark.parametrize("shape, axes",
[
((4, 4), 0),
((4, 4), 1),
((4, 4), None),
((4, 4), (0, 1)),
((10, 12), None),
((10, 12), (0, 1)),
((4, 5, 6), None),
((4, 5, 6), 1),
((4, 5, 6), (0, 2)),
])
@pytest.mark.parametrize("norm", [None, 'ortho'])
def test_identity_nd(forward, backward, type, shape, axes, norm):
# Test the identity f^-1(f(x)) == x
x = np.random.random(shape)
if axes is not None:
shape = np.take(shape, axes)
y = forward(x, type, axes=axes, norm=norm)
z = backward(y, type, axes=axes, norm=norm)
assert_allclose(z, x)
if axes is None:
pad = [(0, 4)] * x.ndim
elif isinstance(axes, int):
pad = [(0, 0)] * x.ndim
pad[axes] = (0, 4)
else:
pad = [(0, 0)] * x.ndim
for a in axes:
pad[a] = (0, 4)
y2 = np.pad(y, pad, mode='edge')
z2 = backward(y2, type, shape, axes, norm)
assert_allclose(z2, x)
@pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)])
@pytest.mark.parametrize("type", [1, 2, 3, 4])
@pytest.mark.parametrize("shape, axes",
[
((4, 5), 0),
((4, 5), 1),
((4, 5), None),
])
@pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64,
np.complex64, np.complex128])
@pytest.mark.parametrize("norm", [None, 'ortho'])
@pytest.mark.parametrize("overwrite_x", [False, True])
def test_identity_nd_overwrite(forward, backward, type, shape, axes, dtype,
norm, overwrite_x):
# Test the identity f^-1(f(x)) == x
x = np.random.random(shape).astype(dtype)
x_orig = x.copy()
if axes is not None:
shape = np.take(shape, axes)
y = forward(x, type, axes=axes, norm=norm)
y_orig = y.copy()
z = backward(y, type, axes=axes, norm=norm)
if overwrite_x:
assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6)
else:
assert_allclose(z, x, rtol=1e-6, atol=1e-6)
assert_array_equal(x, x_orig)
assert_array_equal(y, y_orig)
@pytest.mark.parametrize("func", ['dct', 'dst', 'dctn', 'dstn'])
@pytest.mark.parametrize("type", [1, 2, 3, 4])
@pytest.mark.parametrize("norm", [None, 'ortho'])
def test_fftpack_equivalience(func, type, norm):
x = np.random.rand(8, 16)
fft_res = getattr(fft, func)(x, type, norm=norm)
fftpack_res = getattr(fftpack, func)(x, type, norm=norm)
assert_allclose(fft_res, fftpack_res)
| 34.972414 | 78 | 0.559259 |
import numpy as np
from numpy.testing import assert_allclose, assert_array_equal
import pytest
from scipy.fft import dct, idct, dctn, idctn, dst, idst, dstn, idstn
import scipy.fft as fft
from scipy import fftpack
@pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)])
@pytest.mark.parametrize("type", [1, 2, 3, 4])
@pytest.mark.parametrize("n", [2, 3, 4, 5, 10, 16])
@pytest.mark.parametrize("axis", [0, 1])
@pytest.mark.parametrize("norm", [None, 'ortho'])
def test_identity_1d(forward, backward, type, n, axis, norm):
x = np.random.rand(n, n)
y = forward(x, type, axis=axis, norm=norm)
z = backward(y, type, axis=axis, norm=norm)
assert_allclose(z, x)
pad = [(0, 0)] * 2
pad[axis] = (0, 4)
y2 = np.pad(y, pad, mode='edge')
z2 = backward(y2, type, n, axis, norm)
assert_allclose(z2, x)
@pytest.mark.parametrize("forward, backward", [(dct, idct), (dst, idst)])
@pytest.mark.parametrize("type", [1, 2, 3, 4])
@pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64,
np.complex64, np.complex128])
@pytest.mark.parametrize("axis", [0, 1])
@pytest.mark.parametrize("norm", [None, 'ortho'])
@pytest.mark.parametrize("overwrite_x", [True, False])
def test_identity_1d_overwrite(forward, backward, type, dtype, axis, norm,
overwrite_x):
x = np.random.rand(7, 8)
x_orig = x.copy()
y = forward(x, type, axis=axis, norm=norm, overwrite_x=overwrite_x)
y_orig = y.copy()
z = backward(y, type, axis=axis, norm=norm, overwrite_x=overwrite_x)
if not overwrite_x:
assert_allclose(z, x, rtol=1e-6, atol=1e-6)
assert_array_equal(x, x_orig)
assert_array_equal(y, y_orig)
else:
assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6)
@pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)])
@pytest.mark.parametrize("type", [1, 2, 3, 4])
@pytest.mark.parametrize("shape, axes",
[
((4, 4), 0),
((4, 4), 1),
((4, 4), None),
((4, 4), (0, 1)),
((10, 12), None),
((10, 12), (0, 1)),
((4, 5, 6), None),
((4, 5, 6), 1),
((4, 5, 6), (0, 2)),
])
@pytest.mark.parametrize("norm", [None, 'ortho'])
def test_identity_nd(forward, backward, type, shape, axes, norm):
x = np.random.random(shape)
if axes is not None:
shape = np.take(shape, axes)
y = forward(x, type, axes=axes, norm=norm)
z = backward(y, type, axes=axes, norm=norm)
assert_allclose(z, x)
if axes is None:
pad = [(0, 4)] * x.ndim
elif isinstance(axes, int):
pad = [(0, 0)] * x.ndim
pad[axes] = (0, 4)
else:
pad = [(0, 0)] * x.ndim
for a in axes:
pad[a] = (0, 4)
y2 = np.pad(y, pad, mode='edge')
z2 = backward(y2, type, shape, axes, norm)
assert_allclose(z2, x)
@pytest.mark.parametrize("forward, backward", [(dctn, idctn), (dstn, idstn)])
@pytest.mark.parametrize("type", [1, 2, 3, 4])
@pytest.mark.parametrize("shape, axes",
[
((4, 5), 0),
((4, 5), 1),
((4, 5), None),
])
@pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64,
np.complex64, np.complex128])
@pytest.mark.parametrize("norm", [None, 'ortho'])
@pytest.mark.parametrize("overwrite_x", [False, True])
def test_identity_nd_overwrite(forward, backward, type, shape, axes, dtype,
norm, overwrite_x):
x = np.random.random(shape).astype(dtype)
x_orig = x.copy()
if axes is not None:
shape = np.take(shape, axes)
y = forward(x, type, axes=axes, norm=norm)
y_orig = y.copy()
z = backward(y, type, axes=axes, norm=norm)
if overwrite_x:
assert_allclose(z, x_orig, rtol=1e-6, atol=1e-6)
else:
assert_allclose(z, x, rtol=1e-6, atol=1e-6)
assert_array_equal(x, x_orig)
assert_array_equal(y, y_orig)
@pytest.mark.parametrize("func", ['dct', 'dst', 'dctn', 'dstn'])
@pytest.mark.parametrize("type", [1, 2, 3, 4])
@pytest.mark.parametrize("norm", [None, 'ortho'])
def test_fftpack_equivalience(func, type, norm):
x = np.random.rand(8, 16)
fft_res = getattr(fft, func)(x, type, norm=norm)
fftpack_res = getattr(fftpack, func)(x, type, norm=norm)
assert_allclose(fft_res, fftpack_res)
| true | true |
f71db30154f5a7f8493945b56142710aef1d8b07 | 401 | py | Python | spotter/spotter_proj/asgi.py | gulpinhenry/spotter | 2a3f828f2e09dc4835861e2be489f537a197b19a | [
"MIT"
] | 1 | 2022-02-05T23:04:04.000Z | 2022-02-05T23:04:04.000Z | spotter/spotter_proj/asgi.py | gulpinhenry/spotter | 2a3f828f2e09dc4835861e2be489f537a197b19a | [
"MIT"
] | null | null | null | spotter/spotter_proj/asgi.py | gulpinhenry/spotter | 2a3f828f2e09dc4835861e2be489f537a197b19a | [
"MIT"
] | 1 | 2022-02-06T23:16:16.000Z | 2022-02-06T23:16:16.000Z | """
ASGI config for spotter_proj project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/4.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'spotter_proj.settings')
application = get_asgi_application()
| 23.588235 | 78 | 0.790524 |
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'spotter_proj.settings')
application = get_asgi_application()
| true | true |
f71db3b46c1a74a09b80fe88cca4e193ac811a77 | 783 | py | Python | starling_sim/basemodel/agent/spatial_agent.py | tellae/starling | 56121c728eb5de3dfc77cdf08da89548f3315c87 | [
"CECILL-B"
] | 19 | 2021-02-16T12:32:22.000Z | 2022-01-06T11:16:44.000Z | starling_sim/basemodel/agent/spatial_agent.py | tellae/starling | 56121c728eb5de3dfc77cdf08da89548f3315c87 | [
"CECILL-B"
] | 20 | 2021-01-13T20:58:07.000Z | 2022-03-21T15:53:07.000Z | starling_sim/basemodel/agent/spatial_agent.py | tellae/starling | 56121c728eb5de3dfc77cdf08da89548f3315c87 | [
"CECILL-B"
] | null | null | null | from starling_sim.basemodel.agent.agent import Agent
class SpatialAgent(Agent):
"""
Class describing a spatial agent, with a position and origin in the simulation environment.
"""
SCHEMA = {
"properties": {
"origin": {
"type": ["number", "string"],
"title": "Origin position",
"description": "origin position id (inferred from geometry)",
}
},
"required": ["origin"]
}
def __init__(self, simulation_model, agent_id, origin, **kwargs):
Agent.__init__(self, simulation_model, agent_id, **kwargs)
self.origin = origin
self.position = origin
def __str__(self):
return "[id={}, origin={}]".format(self.id, self.origin)
| 25.258065 | 95 | 0.56705 | from starling_sim.basemodel.agent.agent import Agent
class SpatialAgent(Agent):
SCHEMA = {
"properties": {
"origin": {
"type": ["number", "string"],
"title": "Origin position",
"description": "origin position id (inferred from geometry)",
}
},
"required": ["origin"]
}
def __init__(self, simulation_model, agent_id, origin, **kwargs):
Agent.__init__(self, simulation_model, agent_id, **kwargs)
self.origin = origin
self.position = origin
def __str__(self):
return "[id={}, origin={}]".format(self.id, self.origin)
| true | true |
f71db3fb67e6c69828ce6a77378bfdcbb73779fb | 1,235 | py | Python | src/dsloader/kronecker.py | willshiao/brgan | 99d1627176a59811bf9032ef1f99d6e7261095fb | [
"MIT"
] | 1 | 2021-10-03T15:58:48.000Z | 2021-10-03T15:58:48.000Z | src/dsloader/kronecker.py | willshiao/brgan | 99d1627176a59811bf9032ef1f99d6e7261095fb | [
"MIT"
] | null | null | null | src/dsloader/kronecker.py | willshiao/brgan | 99d1627176a59811bf9032ef1f99d6e7261095fb | [
"MIT"
] | null | null | null | import networkx as nx
import numpy as np
import torch
from torch.utils.data import Dataset
from dsloader.util import kron_graph, random_binary, make_fractional
class KroneckerDataset (Dataset):
def __init__(self, kron_iter=4, seed_size=4, fixed_seed=None, num_graphs=1, perms_per_graph=256, progress_bar=False):
self.kron_iter = kron_iter
self.seed_size = seed_size
self.num_nodes = seed_size ** (kron_iter + 1)
self.seeds = []
self.matrices = []
num_iter = range(num_graphs)
if progress_bar:
from tqdm import tqdm
num_iter = tqdm(num_iter)
for i in num_iter:
seed = random_binary(seed_size, use_sparsity=False)
self.seeds.append(seed)
if fixed_seed is not None:
k_g = kron_graph(fixed_seed, n=kron_iter).astype(np.float)
else:
k_g = kron_graph(seed, n=kron_iter).astype(np.float)
for j in range(perms_per_graph):
self.matrices.append(make_fractional(k_g, inplace=False))
def __len__(self):
return len(self.matrices)
def __getitem__(self, idx):
return torch.tensor(self.matrices[idx])
| 30.875 | 121 | 0.630769 | import networkx as nx
import numpy as np
import torch
from torch.utils.data import Dataset
from dsloader.util import kron_graph, random_binary, make_fractional
class KroneckerDataset (Dataset):
def __init__(self, kron_iter=4, seed_size=4, fixed_seed=None, num_graphs=1, perms_per_graph=256, progress_bar=False):
self.kron_iter = kron_iter
self.seed_size = seed_size
self.num_nodes = seed_size ** (kron_iter + 1)
self.seeds = []
self.matrices = []
num_iter = range(num_graphs)
if progress_bar:
from tqdm import tqdm
num_iter = tqdm(num_iter)
for i in num_iter:
seed = random_binary(seed_size, use_sparsity=False)
self.seeds.append(seed)
if fixed_seed is not None:
k_g = kron_graph(fixed_seed, n=kron_iter).astype(np.float)
else:
k_g = kron_graph(seed, n=kron_iter).astype(np.float)
for j in range(perms_per_graph):
self.matrices.append(make_fractional(k_g, inplace=False))
def __len__(self):
return len(self.matrices)
def __getitem__(self, idx):
return torch.tensor(self.matrices[idx])
| true | true |
f71db4024d86975fa3fe5c0de9ee417e25b9a19b | 2,132 | py | Python | examples/assignment3/MH.py | koriavinash1/pgm | 89e11b61f7141a75d8991ff4ea229ef66d7a4a0c | [
"MIT"
] | 4 | 2020-02-25T06:14:16.000Z | 2020-12-07T11:08:18.000Z | examples/assignment3/MH.py | koriavinash1/pgm | 89e11b61f7141a75d8991ff4ea229ef66d7a4a0c | [
"MIT"
] | 2 | 2020-03-24T05:37:44.000Z | 2020-04-02T04:48:57.000Z | examples/assignment3/MH.py | koriavinash1/pgm | 89e11b61f7141a75d8991ff4ea229ef66d7a4a0c | [
"MIT"
] | 2 | 2020-03-23T16:07:04.000Z | 2020-04-02T04:48:50.000Z | import sys
import numpy as np
sys.path.append('../..')
from pgm.inference.MetropolisHastings import MH
from matplotlib import pyplot as plt
def Gamma(theta, k = 1):
def G(k):
if k <= 0: return 1
elif k == 0.5: return np.pi **0.5
return k*G(k-1)
def distribution(x):
x = np.abs(x)
return (x**(k-1))*np.exp(-x/theta)/((theta**k) * G(k))
return distribution
def proposalDistribution(sigma=0.1):
"""
Describes example proposal distribution
considers gaussion distribution with fixed sigma
as the mean keeps changing it's made an inner function argument
"""
def QDistribution(param = 0):
return lambda x: (1/(((2*np.pi)**0.5) * sigma))*np.exp(-((x-param)**2)/ (sigma**2))
return QDistribution, lambda x: np.random.normal(x, sigma)
# ==========================================
function = Gamma(theta=5.5, k=1)
sigma = [0.1, 1.0, 2.0]
burnin = [2, 5, 10, 100, 200]
"""
for sig in sigma:
for _burnin in burnin:
proposalDist, proposalSamp = proposalDistribution(sig)
mh = MH(function, _burnin, proposalDist, proposalSamp)
nMontecarlo = 1000
for _ in range(nMontecarlo):
next(mh.sampler())
sampledvalues = np.array(mh.x_seq)
print("sig, burin, mean, bacc, cacc: ", sig, _burnin, np.mean(sampledvalues), np.mean(mh.burninAcc), np.mean(mh.collectionAcc))
"""
x = np.linspace(-20, 20, 500)
fx = function(x)
proposalDist, proposalSamp = proposalDistribution(sigma = 2.0)
mh = MH(function, 100, proposalDist, proposalSamp)
for _ in range(1000):
next(mh.sampler())
sampledvalues = np.array(mh.x_seq)
plt.plot(x, fx, 'b--', linewidth=2.0)
hist = np.histogram(sampledvalues, bins=50)
x = hist[1][1:]
hist = hist[0]
print(hist.shape, x.shape)
hist = hist*np.max(fx)/np.max(hist)
plt.bar(x, hist, color = 'g', width=1.8, alpha=0.7)
# plt.hist(sampledvalues, 50, density=True, stacked=True, facecolor='g', alpha=0.7, linewidth=0)
plt.legend(['target pdf', 'sampled histogram'])
plt.show()
plt.plot(sampledvalues, linewidth=2.0)
plt.ylim(-20.0, 20.0)
plt.show()
| 27.688312 | 136 | 0.626173 | import sys
import numpy as np
sys.path.append('../..')
from pgm.inference.MetropolisHastings import MH
from matplotlib import pyplot as plt
def Gamma(theta, k = 1):
def G(k):
if k <= 0: return 1
elif k == 0.5: return np.pi **0.5
return k*G(k-1)
def distribution(x):
x = np.abs(x)
return (x**(k-1))*np.exp(-x/theta)/((theta**k) * G(k))
return distribution
def proposalDistribution(sigma=0.1):
def QDistribution(param = 0):
return lambda x: (1/(((2*np.pi)**0.5) * sigma))*np.exp(-((x-param)**2)/ (sigma**2))
return QDistribution, lambda x: np.random.normal(x, sigma)
function = Gamma(theta=5.5, k=1)
sigma = [0.1, 1.0, 2.0]
burnin = [2, 5, 10, 100, 200]
x = np.linspace(-20, 20, 500)
fx = function(x)
proposalDist, proposalSamp = proposalDistribution(sigma = 2.0)
mh = MH(function, 100, proposalDist, proposalSamp)
for _ in range(1000):
next(mh.sampler())
sampledvalues = np.array(mh.x_seq)
plt.plot(x, fx, 'b--', linewidth=2.0)
hist = np.histogram(sampledvalues, bins=50)
x = hist[1][1:]
hist = hist[0]
print(hist.shape, x.shape)
hist = hist*np.max(fx)/np.max(hist)
plt.bar(x, hist, color = 'g', width=1.8, alpha=0.7)
plt.legend(['target pdf', 'sampled histogram'])
plt.show()
plt.plot(sampledvalues, linewidth=2.0)
plt.ylim(-20.0, 20.0)
plt.show()
| true | true |
f71db4e455ac4c2288344181a36e874628a54146 | 1,959 | py | Python | projects/20130381/3rd/impassion_community/impassionuser/views.py | sisobus/WebStudio2019 | 2f659a84647110bcf975525905722931fa7055b3 | [
"MIT"
] | 14 | 2019-03-06T10:32:40.000Z | 2021-11-18T01:44:28.000Z | projects/20130381/3rd/impassion_community/impassionuser/views.py | sisobus/WebStudio2019 | 2f659a84647110bcf975525905722931fa7055b3 | [
"MIT"
] | 35 | 2019-03-13T07:04:02.000Z | 2019-10-08T06:26:45.000Z | projects/20130381/3rd/impassion_community/impassionuser/views.py | sisobus/WebStudio2019 | 2f659a84647110bcf975525905722931fa7055b3 | [
"MIT"
] | 22 | 2019-03-11T11:00:24.000Z | 2019-09-14T06:53:30.000Z | from django.shortcuts import render, redirect
from .models import Impassionuser
from django.http import HttpResponse
from django.contrib.auth.hashers import make_password, check_password
from .forms import LoginForm
# Create your views here.
def home(request):
return render(request, 'home.html')
def about_us(request):
return render(request, 'about_us.html')
def open_session(request):
return render(request, 'open_session.html')
def login(request):
if request.method =='POST':
form = LoginForm(request.POST)
if form.is_valid():
impassionuser = Impassionuser.objects.get(useremail=form.useremail)
request.session['user']= impassionuser.id
return redirect('/')
else:
form=LoginForm()
return render(request, 'login.html', {'form': form})
def logout(request):
if request.session.get('user'):
del(request.session['user'])
return redirect('/')
def register(request):
if request.method == 'GET':
return render(request, 'register.html')
elif request.method == 'POST':
username = request.POST.get('username', None)
useremail = request.POST.get('useremail', None)
password = request.POST.get('password', None)
re_password = request.POST.get('re-password', None)
cardinal_number=request.POST.get('cardinal_number', None)
res_data={}
if not (username and useremail and password and re_password and cardinal_number):
res_data['error'] = '모든 값을 입력해야합니다.'
elif password != re_password:
res_data['error'] = '비밀번호가 다릅니다.'
else:
impassionuser = Impassionuser(
username=username,
useremail=useremail,
password=make_password(password),
cardinal_number=cardinal_number
)
impassionuser.save()
return render(request, 'home.html', res_data)
| 30.138462 | 89 | 0.640123 | from django.shortcuts import render, redirect
from .models import Impassionuser
from django.http import HttpResponse
from django.contrib.auth.hashers import make_password, check_password
from .forms import LoginForm
def home(request):
return render(request, 'home.html')
def about_us(request):
return render(request, 'about_us.html')
def open_session(request):
return render(request, 'open_session.html')
def login(request):
if request.method =='POST':
form = LoginForm(request.POST)
if form.is_valid():
impassionuser = Impassionuser.objects.get(useremail=form.useremail)
request.session['user']= impassionuser.id
return redirect('/')
else:
form=LoginForm()
return render(request, 'login.html', {'form': form})
def logout(request):
if request.session.get('user'):
del(request.session['user'])
return redirect('/')
def register(request):
if request.method == 'GET':
return render(request, 'register.html')
elif request.method == 'POST':
username = request.POST.get('username', None)
useremail = request.POST.get('useremail', None)
password = request.POST.get('password', None)
re_password = request.POST.get('re-password', None)
cardinal_number=request.POST.get('cardinal_number', None)
res_data={}
if not (username and useremail and password and re_password and cardinal_number):
res_data['error'] = '모든 값을 입력해야합니다.'
elif password != re_password:
res_data['error'] = '비밀번호가 다릅니다.'
else:
impassionuser = Impassionuser(
username=username,
useremail=useremail,
password=make_password(password),
cardinal_number=cardinal_number
)
impassionuser.save()
return render(request, 'home.html', res_data)
| true | true |
f71db52a6273627b9fdb578a9b437983757a0692 | 8,369 | py | Python | src/sagemaker_tensorflow_container/training.py | Freakawho/sagemaker-tensorflow-training-toolkit-master | f37c7d85600beb5461788db8c471b66c25beff8f | [
"Apache-2.0"
] | 156 | 2018-07-10T13:37:16.000Z | 2020-06-04T13:40:17.000Z | src/sagemaker_tensorflow_container/training.py | Freakawho/sagemaker-tensorflow-training-toolkit-master | f37c7d85600beb5461788db8c471b66c25beff8f | [
"Apache-2.0"
] | 166 | 2018-07-09T09:03:26.000Z | 2020-06-10T23:27:52.000Z | src/sagemaker_tensorflow_container/training.py | Freakawho/sagemaker-tensorflow-training-toolkit-master | f37c7d85600beb5461788db8c471b66c25beff8f | [
"Apache-2.0"
] | 129 | 2018-07-04T20:00:29.000Z | 2020-06-10T02:47:54.000Z | # Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License'). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the 'license' file accompanying this file. This file is
# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import absolute_import
import json
import logging
import multiprocessing
import os
import subprocess
import time
from sagemaker_training import entry_point, environment, mapping, runner
import tensorflow as tf
from sagemaker_tensorflow_container import s3_utils
logger = logging.getLogger(__name__)
SAGEMAKER_PARAMETER_SERVER_ENABLED = "sagemaker_parameter_server_enabled"
MODEL_DIR = "/opt/ml/model"
def _is_host_master(hosts, current_host):
return current_host == hosts[0]
def _build_tf_config(hosts, current_host, ps_task=False):
"""Builds a dictionary containing cluster information based on number of hosts and number of
parameter servers.
Args:
hosts (list[str]): List of host names in the cluster
current_host (str): Current host name
ps_task (bool): Set to True if this config is built for a parameter server process
(default: False)
Returns:
dict[str: dict]: A dictionary describing the cluster setup for distributed training.
For more information regarding TF_CONFIG:
https://cloud.google.com/ml-engine/docs/tensorflow/distributed-training-details
"""
# Assign the first host as the master. Rest of the hosts if any will be worker hosts.
# The first ps_num hosts will also have a parameter task assign to them.
masters = hosts[:1]
workers = hosts[1:]
ps = hosts if len(hosts) > 1 else None
def host_addresses(hosts, port=2222):
return ["{}:{}".format(host, port) for host in hosts]
tf_config = {"cluster": {"master": host_addresses(masters)}, "environment": "cloud"}
if ps:
tf_config["cluster"]["ps"] = host_addresses(ps, port="2223")
if workers:
tf_config["cluster"]["worker"] = host_addresses(workers)
if ps_task:
if ps is None:
raise ValueError(
"Cannot have a ps task if there are no parameter servers in the cluster"
)
task_type = "ps"
task_index = ps.index(current_host)
elif _is_host_master(hosts, current_host):
task_type = "master"
task_index = 0
else:
task_type = "worker"
task_index = workers.index(current_host)
tf_config["task"] = {"index": task_index, "type": task_type}
return tf_config
def _run_ps(env, cluster):
logger.info("Running distributed training job with parameter servers")
cluster_spec = tf.train.ClusterSpec(cluster)
task_index = env.hosts.index(env.current_host)
# Force parameter server to run on cpu. Running multiple TensorFlow processes on the same
# GPU is not safe:
# https://stackoverflow.com/questions/46145100/is-it-unsafe-to-run-multiple-tensorflow-processes-on-the-same-gpu
no_gpu_config = tf.ConfigProto(device_count={"GPU": 0})
server = tf.train.Server(
cluster_spec, job_name="ps", task_index=task_index, config=no_gpu_config
)
multiprocessing.Process(target=lambda: server.join()).start()
def _run_worker(env, cmd_args, tf_config):
env_vars = env.to_env_vars()
env_vars["TF_CONFIG"] = json.dumps(tf_config)
entry_point.run(
uri=env.module_dir,
user_entry_point=env.user_entry_point,
args=cmd_args,
env_vars=env_vars,
capture_error=True,
)
def _wait_until_master_is_down(master):
while True:
try:
subprocess.check_call(
["curl", "{}:2222".format(master)], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
logger.info("master {} is still up, waiting for it to exit".format(master))
time.sleep(10)
except subprocess.CalledProcessError:
logger.info("master {} is down, stopping parameter server".format(master))
return
def train(env, cmd_args):
"""Get training job environment from env and run the training job.
Args:
env (sagemaker_training.env.TrainingEnv): Instance of TrainingEnv class
"""
parameter_server_enabled = env.additional_framework_parameters.get(
SAGEMAKER_PARAMETER_SERVER_ENABLED, False
)
if len(env.hosts) > 1 and parameter_server_enabled:
tf_config = _build_tf_config(hosts=env.hosts, current_host=env.current_host)
logger.info("Running distributed training job with parameter servers")
logger.info("Launching parameter server process")
_run_ps(env, tf_config["cluster"])
logger.info("Launching worker process")
_run_worker(env, cmd_args, tf_config)
if not _is_host_master(env.hosts, env.current_host):
_wait_until_master_is_down(env.hosts[0])
else:
mpi_enabled = env.additional_framework_parameters.get("sagemaker_mpi_enabled")
if mpi_enabled:
runner_type = runner.MPIRunnerType
else:
runner_type = runner.ProcessRunnerType
entry_point.run(
uri=env.module_dir,
user_entry_point=env.user_entry_point,
args=cmd_args,
env_vars=env.to_env_vars(),
capture_error=True,
runner_type=runner_type,
)
def _log_model_missing_warning(model_dir):
pb_file_exists = False
file_exists = False
for dirpath, dirnames, filenames in os.walk(model_dir):
if filenames:
file_exists = True
for f in filenames:
if "saved_model.pb" in f or "saved_model.pbtxt" in f:
pb_file_exists = True
path, direct_parent_dir = os.path.split(dirpath)
if not str.isdigit(direct_parent_dir):
logger.warn(
"Your model will NOT be servable with SageMaker TensorFlow Serving containers. "
'The SavedModel bundle is under directory "{}", not a numeric name.'.format(
direct_parent_dir
)
)
if not file_exists:
logger.warn(
"No model artifact is saved under path {}."
" Your training job will not save any model files to S3.\n"
"For details of how to construct your training script see:\n"
"https://sagemaker.readthedocs.io/en/stable/using_tf.html#adapting-your-local-tensorflow-script".format(
model_dir
)
)
elif not pb_file_exists:
logger.warn(
"Your model will NOT be servable with SageMaker TensorFlow Serving container. "
"The model artifact was not saved in the TensorFlow SavedModel directory structure:\n"
"https://www.tensorflow.org/guide/saved_model#structure_of_a_savedmodel_directory"
)
def _model_dir_with_training_job(model_dir, job_name):
if model_dir and model_dir.startswith("/opt/ml"):
return model_dir
else:
return "{}/{}/model".format(model_dir, job_name)
def main():
"""Training entry point
"""
hyperparameters = environment.read_hyperparameters()
env = environment.Environment(hyperparameters=hyperparameters)
user_hyperparameters = env.hyperparameters
# If the training job is part of the multiple training jobs for tuning, we need to append the training job name to
# model_dir in case they read from/write to the same object
if "_tuning_objective_metric" in hyperparameters:
model_dir = _model_dir_with_training_job(hyperparameters.get("model_dir"), env.job_name)
logger.info("Appending the training job name to model_dir: {}".format(model_dir))
user_hyperparameters["model_dir"] = model_dir
s3_utils.configure(user_hyperparameters.get("model_dir"), os.environ.get("SAGEMAKER_REGION"))
train(env, mapping.to_cmd_args(user_hyperparameters))
_log_model_missing_warning(MODEL_DIR)
| 36.229437 | 118 | 0.674394 |
from __future__ import absolute_import
import json
import logging
import multiprocessing
import os
import subprocess
import time
from sagemaker_training import entry_point, environment, mapping, runner
import tensorflow as tf
from sagemaker_tensorflow_container import s3_utils
logger = logging.getLogger(__name__)
SAGEMAKER_PARAMETER_SERVER_ENABLED = "sagemaker_parameter_server_enabled"
MODEL_DIR = "/opt/ml/model"
def _is_host_master(hosts, current_host):
return current_host == hosts[0]
def _build_tf_config(hosts, current_host, ps_task=False):
masters = hosts[:1]
workers = hosts[1:]
ps = hosts if len(hosts) > 1 else None
def host_addresses(hosts, port=2222):
return ["{}:{}".format(host, port) for host in hosts]
tf_config = {"cluster": {"master": host_addresses(masters)}, "environment": "cloud"}
if ps:
tf_config["cluster"]["ps"] = host_addresses(ps, port="2223")
if workers:
tf_config["cluster"]["worker"] = host_addresses(workers)
if ps_task:
if ps is None:
raise ValueError(
"Cannot have a ps task if there are no parameter servers in the cluster"
)
task_type = "ps"
task_index = ps.index(current_host)
elif _is_host_master(hosts, current_host):
task_type = "master"
task_index = 0
else:
task_type = "worker"
task_index = workers.index(current_host)
tf_config["task"] = {"index": task_index, "type": task_type}
return tf_config
def _run_ps(env, cluster):
logger.info("Running distributed training job with parameter servers")
cluster_spec = tf.train.ClusterSpec(cluster)
task_index = env.hosts.index(env.current_host)
no_gpu_config = tf.ConfigProto(device_count={"GPU": 0})
server = tf.train.Server(
cluster_spec, job_name="ps", task_index=task_index, config=no_gpu_config
)
multiprocessing.Process(target=lambda: server.join()).start()
def _run_worker(env, cmd_args, tf_config):
env_vars = env.to_env_vars()
env_vars["TF_CONFIG"] = json.dumps(tf_config)
entry_point.run(
uri=env.module_dir,
user_entry_point=env.user_entry_point,
args=cmd_args,
env_vars=env_vars,
capture_error=True,
)
def _wait_until_master_is_down(master):
while True:
try:
subprocess.check_call(
["curl", "{}:2222".format(master)], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
logger.info("master {} is still up, waiting for it to exit".format(master))
time.sleep(10)
except subprocess.CalledProcessError:
logger.info("master {} is down, stopping parameter server".format(master))
return
def train(env, cmd_args):
parameter_server_enabled = env.additional_framework_parameters.get(
SAGEMAKER_PARAMETER_SERVER_ENABLED, False
)
if len(env.hosts) > 1 and parameter_server_enabled:
tf_config = _build_tf_config(hosts=env.hosts, current_host=env.current_host)
logger.info("Running distributed training job with parameter servers")
logger.info("Launching parameter server process")
_run_ps(env, tf_config["cluster"])
logger.info("Launching worker process")
_run_worker(env, cmd_args, tf_config)
if not _is_host_master(env.hosts, env.current_host):
_wait_until_master_is_down(env.hosts[0])
else:
mpi_enabled = env.additional_framework_parameters.get("sagemaker_mpi_enabled")
if mpi_enabled:
runner_type = runner.MPIRunnerType
else:
runner_type = runner.ProcessRunnerType
entry_point.run(
uri=env.module_dir,
user_entry_point=env.user_entry_point,
args=cmd_args,
env_vars=env.to_env_vars(),
capture_error=True,
runner_type=runner_type,
)
def _log_model_missing_warning(model_dir):
pb_file_exists = False
file_exists = False
for dirpath, dirnames, filenames in os.walk(model_dir):
if filenames:
file_exists = True
for f in filenames:
if "saved_model.pb" in f or "saved_model.pbtxt" in f:
pb_file_exists = True
path, direct_parent_dir = os.path.split(dirpath)
if not str.isdigit(direct_parent_dir):
logger.warn(
"Your model will NOT be servable with SageMaker TensorFlow Serving containers. "
'The SavedModel bundle is under directory "{}", not a numeric name.'.format(
direct_parent_dir
)
)
if not file_exists:
logger.warn(
"No model artifact is saved under path {}."
" Your training job will not save any model files to S3.\n"
"For details of how to construct your training script see:\n"
"https://sagemaker.readthedocs.io/en/stable/using_tf.html#adapting-your-local-tensorflow-script".format(
model_dir
)
)
elif not pb_file_exists:
logger.warn(
"Your model will NOT be servable with SageMaker TensorFlow Serving container. "
"The model artifact was not saved in the TensorFlow SavedModel directory structure:\n"
"https://www.tensorflow.org/guide/saved_model#structure_of_a_savedmodel_directory"
)
def _model_dir_with_training_job(model_dir, job_name):
if model_dir and model_dir.startswith("/opt/ml"):
return model_dir
else:
return "{}/{}/model".format(model_dir, job_name)
def main():
hyperparameters = environment.read_hyperparameters()
env = environment.Environment(hyperparameters=hyperparameters)
user_hyperparameters = env.hyperparameters
if "_tuning_objective_metric" in hyperparameters:
model_dir = _model_dir_with_training_job(hyperparameters.get("model_dir"), env.job_name)
logger.info("Appending the training job name to model_dir: {}".format(model_dir))
user_hyperparameters["model_dir"] = model_dir
s3_utils.configure(user_hyperparameters.get("model_dir"), os.environ.get("SAGEMAKER_REGION"))
train(env, mapping.to_cmd_args(user_hyperparameters))
_log_model_missing_warning(MODEL_DIR)
| true | true |
f71db54c69c8e0384954c6e22ac6249f1eba58c2 | 2,422 | py | Python | lsql/judge/forms.py | iburgoa13/lsql | d60007c915162c6c5c12168f6e2eebdcb9d10989 | [
"MIT"
] | null | null | null | lsql/judge/forms.py | iburgoa13/lsql | d60007c915162c6c5c12168f6e2eebdcb9d10989 | [
"MIT"
] | null | null | null | lsql/judge/forms.py | iburgoa13/lsql | d60007c915162c6c5c12168f6e2eebdcb9d10989 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Copyright Enrique Martín <emartinm@ucm.es> 2020
Forms used in LSQL
"""
from datetime import date
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
class FunctionProblemAdminForm(forms.ModelForm):
"""Customized form for FunctionProblem in admin to have a better label"""
calls = forms.CharField(label='List of function calls to test (one per line)',
widget=forms.Textarea(attrs={'rows': 10, 'cols': 80}),
required=False)
class ProcProblemAdminForm(forms.ModelForm):
"""Customized form for ProcProblem in admin to have a better label"""
proc_call = forms.CharField(label='Procedure call to test (only one)',
widget=forms.Textarea(attrs={'rows': 10, 'cols': 80}),
required=False)
class TriggerProblemAdminForm(forms.ModelForm):
"""Customized form for TriggerProblem in admin to have a better label"""
tests = forms.CharField(label='SQL statements to test the trigger (separated by ";" as usual)',
widget=forms.Textarea(attrs={'rows': 10, 'cols': 80}),
required=False)
class LoginForm(forms.Form):
"""Form used to validate user login"""
username = forms.CharField(label='Nombre de usuario', max_length=100)
password = forms.CharField(label='Contraseña', max_length=100, widget=forms.PasswordInput)
class ResultForm(forms.Form):
"""Form used results"""
group = forms.IntegerField(label='Grupo', min_value=1)
start = forms.DateField(label='Desde', input_formats=['%Y-%m-%d'])
end = forms.DateField(label='Hasta', input_formats=['%Y-%m-%d'])
def clean(self):
cleaned_data = super().clean()
start = cleaned_data.get("start")
end = cleaned_data.get("end")
if end is not None and start is not None:
if end < start:
raise ValidationError(_("¡Error! La fecha inicial no puede ser mayor que la fecha final."))
if end > date.today():
raise ValidationError(_("¡Error! La fecha final no puede ser mayor que la fecha de hoy."))
class SubmitForm(forms.Form):
"""Form used to validate user submissions"""
code = forms.CharField(min_length=10, strip=False) # Keep spaces for error messages
| 40.366667 | 107 | 0.645747 |
from datetime import date
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
class FunctionProblemAdminForm(forms.ModelForm):
calls = forms.CharField(label='List of function calls to test (one per line)',
widget=forms.Textarea(attrs={'rows': 10, 'cols': 80}),
required=False)
class ProcProblemAdminForm(forms.ModelForm):
proc_call = forms.CharField(label='Procedure call to test (only one)',
widget=forms.Textarea(attrs={'rows': 10, 'cols': 80}),
required=False)
class TriggerProblemAdminForm(forms.ModelForm):
tests = forms.CharField(label='SQL statements to test the trigger (separated by ";" as usual)',
widget=forms.Textarea(attrs={'rows': 10, 'cols': 80}),
required=False)
class LoginForm(forms.Form):
username = forms.CharField(label='Nombre de usuario', max_length=100)
password = forms.CharField(label='Contraseña', max_length=100, widget=forms.PasswordInput)
class ResultForm(forms.Form):
group = forms.IntegerField(label='Grupo', min_value=1)
start = forms.DateField(label='Desde', input_formats=['%Y-%m-%d'])
end = forms.DateField(label='Hasta', input_formats=['%Y-%m-%d'])
def clean(self):
cleaned_data = super().clean()
start = cleaned_data.get("start")
end = cleaned_data.get("end")
if end is not None and start is not None:
if end < start:
raise ValidationError(_("¡Error! La fecha inicial no puede ser mayor que la fecha final."))
if end > date.today():
raise ValidationError(_("¡Error! La fecha final no puede ser mayor que la fecha de hoy."))
class SubmitForm(forms.Form):
code = forms.CharField(min_length=10, strip=False)
| true | true |
f71db6c703c175874f5cd66e3998079a192b818c | 581 | py | Python | src/python/120Triangle.py | witimlfl/leetcode-exercise | 9449c41fa03b996a37923f1dede0933753691282 | [
"MIT"
] | null | null | null | src/python/120Triangle.py | witimlfl/leetcode-exercise | 9449c41fa03b996a37923f1dede0933753691282 | [
"MIT"
] | null | null | null | src/python/120Triangle.py | witimlfl/leetcode-exercise | 9449c41fa03b996a37923f1dede0933753691282 | [
"MIT"
] | null | null | null | def minimumTotal(triangle):
if not triangle: return 0
res = triangle[-1]
for i in range(len(triangle) -2, -1, -1):
for j in range(len(triangle[i])):
res[j] = min(res[j], res[j+1]) + triangle[i][j]
return res[0]
def minimumTotal1(triangle):
if not triangle:
return 0
for i in range(len(triangle) -2, -1, -1):
for j in range(len(triangle[i])):
triangle[i][j] += min(triangle[i+1][j], triangle[i+1][j+1])
return triangle[0][0]
result = minimumTotal1([[2],[3,4],[6,5,7],[4,1,8,3]])
print(result)
| 27.666667 | 71 | 0.555938 | def minimumTotal(triangle):
if not triangle: return 0
res = triangle[-1]
for i in range(len(triangle) -2, -1, -1):
for j in range(len(triangle[i])):
res[j] = min(res[j], res[j+1]) + triangle[i][j]
return res[0]
def minimumTotal1(triangle):
if not triangle:
return 0
for i in range(len(triangle) -2, -1, -1):
for j in range(len(triangle[i])):
triangle[i][j] += min(triangle[i+1][j], triangle[i+1][j+1])
return triangle[0][0]
result = minimumTotal1([[2],[3,4],[6,5,7],[4,1,8,3]])
print(result)
| true | true |
f71db8071548b827f1a29f3a5b9fb958d481c189 | 545 | py | Python | samples-python/datalayer.provider/setup.py | bracoe/ctrlx-automation-sdk | 6b2e61e146c557488125baf941e4d64c6fa6d0fb | [
"MIT"
] | 16 | 2021-08-23T13:07:12.000Z | 2022-02-21T13:29:21.000Z | samples-python/datalayer.provider/setup.py | bracoe/ctrlx-automation-sdk | 6b2e61e146c557488125baf941e4d64c6fa6d0fb | [
"MIT"
] | null | null | null | samples-python/datalayer.provider/setup.py | bracoe/ctrlx-automation-sdk | 6b2e61e146c557488125baf941e4d64c6fa6d0fb | [
"MIT"
] | 10 | 2021-09-29T09:58:33.000Z | 2022-01-13T07:20:00.000Z | from setuptools import setup
setup(
name='sdk-py-datalayer-provider',
version='2.0.0',
description='This sample shows how to provide data to ctrlX Data Layer',
author='SDK Team',
install_requires = ['ctrlx-datalayer', 'ctrlx_fbs'],
packages=['app', 'sample.schema'],
# https://stackoverflow.com/questions/1612733/including-non-python-files-with-setup-py
package_data={'./': ['sampleSchema.bfbs']},
scripts=['main.py'],
license='Copyright (c) 2020-2021 Bosch Rexroth AG, Licensed under MIT License'
)
| 36.333333 | 90 | 0.684404 | from setuptools import setup
setup(
name='sdk-py-datalayer-provider',
version='2.0.0',
description='This sample shows how to provide data to ctrlX Data Layer',
author='SDK Team',
install_requires = ['ctrlx-datalayer', 'ctrlx_fbs'],
packages=['app', 'sample.schema'],
package_data={'./': ['sampleSchema.bfbs']},
scripts=['main.py'],
license='Copyright (c) 2020-2021 Bosch Rexroth AG, Licensed under MIT License'
)
| true | true |
f71db83273809f77def4132e79fc25e819d9175d | 5,586 | py | Python | examples/decoding/plot_decoding_csp_eeg.py | TanayGahlot/mne-python | 857aa97c201451b82931c5eba50642975afc423d | [
"BSD-3-Clause"
] | null | null | null | examples/decoding/plot_decoding_csp_eeg.py | TanayGahlot/mne-python | 857aa97c201451b82931c5eba50642975afc423d | [
"BSD-3-Clause"
] | null | null | null | examples/decoding/plot_decoding_csp_eeg.py | TanayGahlot/mne-python | 857aa97c201451b82931c5eba50642975afc423d | [
"BSD-3-Clause"
] | null | null | null | """
===========================================================================
Motor imagery decoding from EEG data using the Common Spatial Pattern (CSP)
===========================================================================
Decoding of motor imagery applied to EEG data decomposed using CSP.
Here the classifier is applied to features extracted on CSP filtered signals.
See http://en.wikipedia.org/wiki/Common_spatial_pattern and [1]
The EEGBCI dataset is documented in [2]
The data set is available at PhysioNet [3]
[1] Zoltan J. Koles. The quantitative extraction and topographic mapping
of the abnormal components in the clinical EEG. Electroencephalography
and Clinical Neurophysiology, 79(6):440--447, December 1991.
[2] Schalk, G., McFarland, D.J., Hinterberger, T., Birbaumer, N.,
Wolpaw, J.R. (2004) BCI2000: A General-Purpose Brain-Computer Interface
(BCI) System. IEEE TBME 51(6):1034-1043
[3] Goldberger AL, Amaral LAN, Glass L, Hausdorff JM, Ivanov PCh, Mark RG,
Mietus JE, Moody GB, Peng C-K, Stanley HE. (2000) PhysioBank,
PhysioToolkit, and PhysioNet: Components of a New Research Resource for
Complex Physiologic Signals. Circulation 101(23):e215-e220
"""
# Authors: Martin Billinger <martin.billinger@tugraz.at>
#
# License: BSD (3-clause)
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from mne import Epochs, pick_types
from mne.io import concatenate_raws
from mne.io.edf import read_raw_edf
from mne.datasets import eegbci
from mne.event import find_events
from mne.decoding import CSP
from mne.layouts import read_layout
###############################################################################
## Set parameters and read data
# avoid classification of evoked responses by using epochs that start 1s after
# cue onset.
tmin, tmax = -1., 4.
event_id = dict(hands=2, feet=3)
subject = 1
runs = [6, 10, 14] # motor imagery: hands vs feet
raw_fnames = eegbci.load_data(subject, runs)
raw_files = [read_raw_edf(f, tal_channel=-1, preload=True) for f in raw_fnames]
raw = concatenate_raws(raw_files)
# strip channel names
raw.info['ch_names'] = [chn.strip('.') for chn in raw.info['ch_names']]
# Apply band-pass filter
raw.filter(7., 30., method='iir')
events = find_events(raw, shortest_event=0, stim_channel='STI 014')
picks = pick_types(raw.info, meg=False, eeg=True, stim=False, eog=False,
exclude='bads')
# Read epochs (train will be done only between 1 and 2s)
# Testing will be done with a running classifier
epochs = Epochs(raw, events, event_id, tmin, tmax, proj=True, picks=picks,
baseline=None, preload=True, add_eeg_ref=False)
epochs_train = epochs.crop(tmin=1., tmax=2., copy=True)
labels = epochs.events[:, -1] - 2
###############################################################################
# Classification with linear discrimant analysis
from sklearn.lda import LDA
from sklearn.cross_validation import ShuffleSplit
# Assemble a classifier
svc = LDA()
csp = CSP(n_components=4, reg=None, log=True)
# Define a monte-carlo cross-validation generator (reduce variance):
cv = ShuffleSplit(len(labels), 10, test_size=0.2, random_state=42)
scores = []
epochs_data = epochs.get_data()
epochs_data_train = epochs_train.get_data()
# Use scikit-learn Pipeline with cross_val_score function
from sklearn.pipeline import Pipeline
from sklearn.cross_validation import cross_val_score
clf = Pipeline([('CSP', csp), ('SVC', svc)])
scores = cross_val_score(clf, epochs_data_train, labels, cv=cv, n_jobs=1)
# Printing the results
class_balance = np.mean(labels == labels[0])
class_balance = max(class_balance, 1. - class_balance)
print("Classification accuracy: %f / Chance level: %f" % (np.mean(scores),
class_balance))
# plot CSP patterns estimated on full data for visualization
csp.fit_transform(epochs_data, labels)
evoked = epochs.average()
evoked.data = csp.patterns_.T
evoked.times = np.arange(evoked.data.shape[0])
layout = read_layout('EEG1005')
evoked.plot_topomap(times=[0, 1, 2, 61, 62, 63], ch_type='eeg', layout=layout,
scale_time=1, time_format='%i', scale=1,
unit='Patterns (AU)', size=1.5)
###############################################################################
# Look at performance over time
sfreq = raw.info['sfreq']
w_length = int(sfreq * 0.5) # running classifier: window length
w_step = int(sfreq * 0.1) # running classifier: window step size
w_start = np.arange(0, epochs_data.shape[2] - w_length, w_step)
scores_windows = []
for train_idx, test_idx in cv:
y_train, y_test = labels[train_idx], labels[test_idx]
X_train = csp.fit_transform(epochs_data_train[train_idx], y_train)
X_test = csp.transform(epochs_data_train[test_idx])
# fit classifier
svc.fit(X_train, y_train)
# running classifier: test classifier on sliding window
score_this_window = []
for n in w_start:
X_test = csp.transform(epochs_data[test_idx][:, :, n:(n + w_length)])
score_this_window.append(svc.score(X_test, y_test))
scores_windows.append(score_this_window)
# Plot scores over time
w_times = (w_start + w_length / 2.) / sfreq + epochs.tmin
plt.figure()
plt.plot(w_times, np.mean(scores_windows, 0), label='Score')
plt.axvline(0, linestyle='--', color='k', label='Onset')
plt.axhline(0.5, linestyle='-', color='k', label='Chance')
plt.xlabel('time (s)')
plt.ylabel('classification accuracy')
plt.title('Classification score over time')
plt.legend(loc='lower right')
plt.show()
| 36.509804 | 79 | 0.672037 |
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from mne import Epochs, pick_types
from mne.io import concatenate_raws
from mne.io.edf import read_raw_edf
from mne.datasets import eegbci
from mne.event import find_events
from mne.decoding import CSP
from mne.layouts import read_layout
| true | true |
f71dba22e36d7a5ff9ad3ce5dd64f729807fab5f | 9,895 | py | Python | lib/python3.8/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_devprof_device_profile_fortianalyzer.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | lib/python3.8/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_devprof_device_profile_fortianalyzer.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | lib/python3.8/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_devprof_device_profile_fortianalyzer.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | #!/usr/bin/python
from __future__ import absolute_import, division, print_function
# Copyright 2019-2020 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_devprof_device_profile_fortianalyzer
short_description: no description
description:
- This module is able to configure a FortiManager device by allowing the
user to [ get set update ] the following apis.
- /pm/config/adom/{adom}/devprof/{devprof}/device/profile/fortianalyzer
- Examples include all parameters and values need to be adjusted to data sources before usage.
version_added: "2.10"
author:
- Frank Shen (@fshen01)
- Link Zheng (@zhengl)
notes:
- There are only three top-level parameters where 'method' is always required
while other two 'params' and 'url_params' can be optional
- Due to the complexity of fortimanager api schema, the validation is done
out of Ansible native parameter validation procedure.
- The syntax of OPTIONS doen not comply with the standard Ansible argument
specification, but with the structure of fortimanager API schema, we need
a trivial transformation when we are filling the ansible playbook
options:
loose_validation:
description:
- Do parameter validation in a loose way
type: bool
required: false
workspace_locking_adom:
description:
- the adom name to lock in case FortiManager running in workspace mode
- it can be global or any other custom adom names
required: false
type: str
workspace_locking_timeout:
description:
- the maximum time in seconds to wait for other user to release the workspace lock
required: false
type: int
default: 300
method:
description:
- The method in request
required: true
type: str
choices:
- get
- set
- update
params:
description:
- The parameters for each method
- See full parameters list in https://ansible-galaxy-fortimanager-docs.readthedocs.io/en/latest
type: list
required: false
url_params:
description:
- The parameters for each API request URL
- Also see full URL parameters in https://ansible-galaxy-fortimanager-docs.readthedocs.io/en/latest
required: false
type: dict
'''
EXAMPLES = '''
- hosts: fortimanager-inventory
collections:
- fortinet.fortimanager
connection: httpapi
vars:
ansible_httpapi_use_ssl: True
ansible_httpapi_validate_certs: False
ansible_httpapi_port: 443
tasks:
- name: REQUESTING /PM/CONFIG/DEVPROF/{DEVPROF}/DEVICE/PROFILE/FORTIANALYZER
fmgr_devprof_device_profile_fortianalyzer:
loose_validation: False
workspace_locking_adom: <value in [global, custom adom]>
workspace_locking_timeout: 300
method: <value in [get]>
url_params:
adom: <value in [none, global, custom dom]>
devprof: <value of string>
params:
-
option: <value in [object member, chksum, datasrc]>
- name: REQUESTING /PM/CONFIG/DEVPROF/{DEVPROF}/DEVICE/PROFILE/FORTIANALYZER
fmgr_devprof_device_profile_fortianalyzer:
loose_validation: False
workspace_locking_adom: <value in [global, custom adom]>
workspace_locking_timeout: 300
method: <value in [set, update]>
url_params:
adom: <value in [none, global, custom dom]>
devprof: <value of string>
params:
-
data:
managed-sn: <value of string>
target: <value in [none, this-fmg, managed, ...]>
target-ip: <value of string>
target-sn:
- <value of string>
'''
RETURN = '''
url:
description: The full url requested
returned: always
type: str
sample: /sys/login/user
status:
description: The status of api request
returned: always
type: dict
data:
description: The payload returned in the request
type: dict
returned: always
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortimanager.plugins.module_utils.common import FAIL_SOCKET_MSG
from ansible_collections.fortinet.fortimanager.plugins.module_utils.common import DEFAULT_RESULT_OBJ
from ansible_collections.fortinet.fortimanager.plugins.module_utils.common import FMGRCommon
from ansible_collections.fortinet.fortimanager.plugins.module_utils.common import FMGBaseException
from ansible_collections.fortinet.fortimanager.plugins.module_utils.fortimanager import FortiManagerHandler
def main():
jrpc_urls = [
'/pm/config/adom/{adom}/devprof/{devprof}/device/profile/fortianalyzer'
]
url_schema = [
{
'name': 'adom',
'type': 'string'
},
{
'name': 'devprof',
'type': 'string'
}
]
body_schema = {
'schema_objects': {
'object0': [
{
'name': 'option',
'type': 'dict',
'dict': {
'type': 'string',
'enum': [
'object member',
'chksum',
'datasrc'
]
},
'api_tag': 0
},
{
'type': 'string',
'name': 'url',
'api_tag': 0
}
],
'object1': [
{
'name': 'data',
'type': 'dict',
'dict': {
'managed-sn': {
'type': 'string'
},
'target': {
'type': 'string',
'enum': [
'none',
'this-fmg',
'managed',
'others'
]
},
'target-ip': {
'type': 'string'
},
'target-sn': {
'type': 'array',
'items': {
'type': 'string'
}
}
},
'api_tag': 0
},
{
'type': 'string',
'name': 'url',
'api_tag': 0
}
]
},
'method_mapping': {
'get': 'object0',
'set': 'object1',
'update': 'object1'
}
}
module_arg_spec = {
'loose_validation': {
'type': 'bool',
'required': False,
'default': False
},
'workspace_locking_adom': {
'type': 'str',
'required': False
},
'workspace_locking_timeout': {
'type': 'int',
'required': False,
'default': 300
},
'params': {
'type': 'list',
'required': False
},
'method': {
'type': 'str',
'required': True,
'choices': [
'get',
'set',
'update'
]
},
'url_params': {
'type': 'dict',
'required': False
}
}
module = AnsibleModule(argument_spec=module_arg_spec,
supports_check_mode=False)
method = module.params['method']
loose_validation = module.params['loose_validation']
fmgr = None
payload = None
response = DEFAULT_RESULT_OBJ
if module._socket_path:
connection = Connection(module._socket_path)
tools = FMGRCommon()
if loose_validation is False:
tools.validate_module_params(module, body_schema)
tools.validate_module_url_params(module, jrpc_urls, url_schema)
full_url = tools.get_full_url_path(module, jrpc_urls)
payload = tools.get_full_payload(module, full_url)
fmgr = FortiManagerHandler(connection, module)
fmgr.tools = tools
else:
module.fail_json(**FAIL_SOCKET_MSG)
try:
response = fmgr._conn.send_request(method, payload)
fmgr.govern_response(module=module, results=response,
msg='Operation Finished',
ansible_facts=fmgr.construct_ansible_facts(response, module.params, module.params))
except Exception as e:
raise FMGBaseException(e)
module.exit_json(meta=response[1])
if __name__ == '__main__':
main()
| 32.336601 | 112 | 0.543103 |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_devprof_device_profile_fortianalyzer
short_description: no description
description:
- This module is able to configure a FortiManager device by allowing the
user to [ get set update ] the following apis.
- /pm/config/adom/{adom}/devprof/{devprof}/device/profile/fortianalyzer
- Examples include all parameters and values need to be adjusted to data sources before usage.
version_added: "2.10"
author:
- Frank Shen (@fshen01)
- Link Zheng (@zhengl)
notes:
- There are only three top-level parameters where 'method' is always required
while other two 'params' and 'url_params' can be optional
- Due to the complexity of fortimanager api schema, the validation is done
out of Ansible native parameter validation procedure.
- The syntax of OPTIONS doen not comply with the standard Ansible argument
specification, but with the structure of fortimanager API schema, we need
a trivial transformation when we are filling the ansible playbook
options:
loose_validation:
description:
- Do parameter validation in a loose way
type: bool
required: false
workspace_locking_adom:
description:
- the adom name to lock in case FortiManager running in workspace mode
- it can be global or any other custom adom names
required: false
type: str
workspace_locking_timeout:
description:
- the maximum time in seconds to wait for other user to release the workspace lock
required: false
type: int
default: 300
method:
description:
- The method in request
required: true
type: str
choices:
- get
- set
- update
params:
description:
- The parameters for each method
- See full parameters list in https://ansible-galaxy-fortimanager-docs.readthedocs.io/en/latest
type: list
required: false
url_params:
description:
- The parameters for each API request URL
- Also see full URL parameters in https://ansible-galaxy-fortimanager-docs.readthedocs.io/en/latest
required: false
type: dict
'''
EXAMPLES = '''
- hosts: fortimanager-inventory
collections:
- fortinet.fortimanager
connection: httpapi
vars:
ansible_httpapi_use_ssl: True
ansible_httpapi_validate_certs: False
ansible_httpapi_port: 443
tasks:
- name: REQUESTING /PM/CONFIG/DEVPROF/{DEVPROF}/DEVICE/PROFILE/FORTIANALYZER
fmgr_devprof_device_profile_fortianalyzer:
loose_validation: False
workspace_locking_adom: <value in [global, custom adom]>
workspace_locking_timeout: 300
method: <value in [get]>
url_params:
adom: <value in [none, global, custom dom]>
devprof: <value of string>
params:
-
option: <value in [object member, chksum, datasrc]>
- name: REQUESTING /PM/CONFIG/DEVPROF/{DEVPROF}/DEVICE/PROFILE/FORTIANALYZER
fmgr_devprof_device_profile_fortianalyzer:
loose_validation: False
workspace_locking_adom: <value in [global, custom adom]>
workspace_locking_timeout: 300
method: <value in [set, update]>
url_params:
adom: <value in [none, global, custom dom]>
devprof: <value of string>
params:
-
data:
managed-sn: <value of string>
target: <value in [none, this-fmg, managed, ...]>
target-ip: <value of string>
target-sn:
- <value of string>
'''
RETURN = '''
url:
description: The full url requested
returned: always
type: str
sample: /sys/login/user
status:
description: The status of api request
returned: always
type: dict
data:
description: The payload returned in the request
type: dict
returned: always
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortimanager.plugins.module_utils.common import FAIL_SOCKET_MSG
from ansible_collections.fortinet.fortimanager.plugins.module_utils.common import DEFAULT_RESULT_OBJ
from ansible_collections.fortinet.fortimanager.plugins.module_utils.common import FMGRCommon
from ansible_collections.fortinet.fortimanager.plugins.module_utils.common import FMGBaseException
from ansible_collections.fortinet.fortimanager.plugins.module_utils.fortimanager import FortiManagerHandler
def main():
jrpc_urls = [
'/pm/config/adom/{adom}/devprof/{devprof}/device/profile/fortianalyzer'
]
url_schema = [
{
'name': 'adom',
'type': 'string'
},
{
'name': 'devprof',
'type': 'string'
}
]
body_schema = {
'schema_objects': {
'object0': [
{
'name': 'option',
'type': 'dict',
'dict': {
'type': 'string',
'enum': [
'object member',
'chksum',
'datasrc'
]
},
'api_tag': 0
},
{
'type': 'string',
'name': 'url',
'api_tag': 0
}
],
'object1': [
{
'name': 'data',
'type': 'dict',
'dict': {
'managed-sn': {
'type': 'string'
},
'target': {
'type': 'string',
'enum': [
'none',
'this-fmg',
'managed',
'others'
]
},
'target-ip': {
'type': 'string'
},
'target-sn': {
'type': 'array',
'items': {
'type': 'string'
}
}
},
'api_tag': 0
},
{
'type': 'string',
'name': 'url',
'api_tag': 0
}
]
},
'method_mapping': {
'get': 'object0',
'set': 'object1',
'update': 'object1'
}
}
module_arg_spec = {
'loose_validation': {
'type': 'bool',
'required': False,
'default': False
},
'workspace_locking_adom': {
'type': 'str',
'required': False
},
'workspace_locking_timeout': {
'type': 'int',
'required': False,
'default': 300
},
'params': {
'type': 'list',
'required': False
},
'method': {
'type': 'str',
'required': True,
'choices': [
'get',
'set',
'update'
]
},
'url_params': {
'type': 'dict',
'required': False
}
}
module = AnsibleModule(argument_spec=module_arg_spec,
supports_check_mode=False)
method = module.params['method']
loose_validation = module.params['loose_validation']
fmgr = None
payload = None
response = DEFAULT_RESULT_OBJ
if module._socket_path:
connection = Connection(module._socket_path)
tools = FMGRCommon()
if loose_validation is False:
tools.validate_module_params(module, body_schema)
tools.validate_module_url_params(module, jrpc_urls, url_schema)
full_url = tools.get_full_url_path(module, jrpc_urls)
payload = tools.get_full_payload(module, full_url)
fmgr = FortiManagerHandler(connection, module)
fmgr.tools = tools
else:
module.fail_json(**FAIL_SOCKET_MSG)
try:
response = fmgr._conn.send_request(method, payload)
fmgr.govern_response(module=module, results=response,
msg='Operation Finished',
ansible_facts=fmgr.construct_ansible_facts(response, module.params, module.params))
except Exception as e:
raise FMGBaseException(e)
module.exit_json(meta=response[1])
if __name__ == '__main__':
main()
| true | true |
f71dba64825ff9d2aecf0ac1d5279cb56a1da34d | 14,532 | py | Python | src/lstm/lstm_wp.py | kafkasl/contextualLSTM | a4421d592c3960c79842b0f23de162e61fcab3dd | [
"Apache-2.0"
] | 31 | 2017-08-21T11:39:30.000Z | 2020-09-02T03:55:54.000Z | src/lstm/lstm_wp.py | kafkasl/contextualLSTM | a4421d592c3960c79842b0f23de162e61fcab3dd | [
"Apache-2.0"
] | 2 | 2018-03-27T08:57:04.000Z | 2018-05-14T09:39:11.000Z | src/lstm/lstm_wp.py | kafkasl/contextualLSTM | a4421d592c3960c79842b0f23de162e61fcab3dd | [
"Apache-2.0"
] | 9 | 2017-07-02T15:17:43.000Z | 2020-05-30T08:11:36.000Z | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Example / benchmark for building a PTB LSTM model.
Trains the model described in:
(Zaremba, et. al.) Recurrent Neural Network Regularization
http://arxiv.org/abs/1409.2329
There are 3 supported model configurations:
===========================================
| config | epochs | train | valid | test
===========================================
| small | 13 | 37.99 | 121.39 | 115.91
| medium | 39 | 48.45 | 86.16 | 82.07
| large | 55 | 37.87 | 82.62 | 78.29
The exact results may vary depending on the random initialization.
The hyperparameters used in the model:
- init_scale - the initial scale of the weights
- learning_rate - the initial value of the learning rate
- max_grad_norm - the maximum permissible norm of the gradient
- num_layers - the number of LSTM layers
- num_steps - the number of unrolled steps of LSTM
- hidden_size - the number of LSTM units
- max_epoch - the number of epochs trained with the initial learning rate
- max_max_epoch - the total number of epochs for training
- keep_prob - the probability of keeping weights in the dropout layer
- lr_decay - the decay of the learning rate for each epoch after "max_epoch"
- batch_size - the batch size
The data required for this example is in the data/ dir of the
PTB dataset from Tomas Mikolov's webpage:
$ wget http://www.fit.vutbr.cz/~imikolov/rnnlm/simple-examples.tgz
$ tar xvf simple-examples.tgz
To run:
$ python ptb_word_lm.py --data_path=simple-examples/data/
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
sys.path.insert(0, "../src/")
import inspect
import time
import numpy as np
import tensorflow as tf
import reader_wp as reader
flags = tf.flags
logging = tf.logging
flags.DEFINE_string(
"model", "small",
"A type of model. Possible options are: small, medium, large.")
flags.DEFINE_string(
"tasks", "all",
"Tasks to be performed. Possible options are: all, train, test, valid")
flags.DEFINE_string(
"word_to_id_path", "../models/eos/word2id_1000.pklz",
"A type of model. Possible options are: small, medium, large.")
flags.DEFINE_string("data_path", None,
"Where the training/test data is stored.")
flags.DEFINE_string("save_path", None,
"Model output directory.")
flags.DEFINE_bool("use_fp16", False,
"Train using 16-bit floats instead of 32bit floats")
FLAGS = flags.FLAGS
def data_type():
return tf.float16 if FLAGS.use_fp16 else tf.float32
def get_vocab_size():
word_to_id = VectorManager.read_vector(FLAGS.word_to_id_path)
size = len(word_to_id)
print("Vocabulary size: %s" % size)
return size
class WPInput(object):
"""The input data."""
def __init__(self, config, data, name=None):
self.batch_size = batch_size = config.batch_size
self.num_steps = num_steps = config.num_steps
self.epoch_size = ((len(data) // batch_size) - 1) // num_steps
self.input_data, self.targets = reader.wiki_producer(
data, batch_size, num_steps, name=name)
class WPModel(object):
"""Word Prediction model."""
def __init__(self, is_training, config, input_):
self._input = input_
batch_size = input_.batch_size
num_steps = input_.num_steps
size = config.hidden_size
vocab_size = config.vocab_size
# Slightly better results can be obtained with forget gate biases
# initialized to 1 but the hyperparameters of the model would need to be
# different than reported in the paper.
def lstm_cell():
# With the latest TensorFlow source code (as of Mar 27, 2017),
# the BasicLSTMCell will need a reuse parameter which is unfortunately not
# defined in TensorFlow 1.0. To maintain backwards compatibility, we add
# an argument check here:
# if 'reuse' in inspect.getargspec(
# tf.contrib.rnn.BasicLSTMCell.__init__).args:
# return tf.contrib.rnn.BasicLSTMCell(
# size, forget_bias=0.0, state_is_tuple=True,
# reuse=tf.get_variable_scope().reuse)
# else:
return tf.contrib.rnn.BasicLSTMCell(
size, forget_bias=0.0, state_is_tuple=True)
attn_cell = lstm_cell
if is_training and config.keep_prob < 1:
def attn_cell():
return tf.contrib.rnn.DropoutWrapper(
lstm_cell(), output_keep_prob=config.keep_prob)
cell = tf.contrib.rnn.MultiRNNCell(
[attn_cell() for _ in range(config.num_layers)], state_is_tuple=True)
# data_type() returns float32 or float16
self._initial_state = cell.zero_state(batch_size, data_type())
with tf.device("/cpu:0"):
# TODO: replace TF input with my embeddings
# TODO: implement PTB reader or something similar
embedding = tf.get_variable(
"embedding", [vocab_size, size], dtype=data_type())
inputs = tf.nn.embedding_lookup(embedding, input_.input_data)
if is_training and config.keep_prob < 1:
# Dropout allows to use the net for train and testing
# See: https://stackoverflow.com/questions/34597316/why-input-is-scaled-in-tf-nn-dropout-in-tensorflow
# and: http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf
inputs = tf.nn.dropout(inputs, config.keep_prob)
# Simplified version of models/tutorials/rnn/rnn.py's rnn().
# This builds an unrolled LSTM for tutorial purposes only.
# In general, use the rnn() or state_saving_rnn() from rnn.py.
#
# The alternative version of the code below is:
#
inputs = tf.unstack(inputs, num=num_steps, axis=1)
outputs, state = tf.contrib.rnn.static_rnn(
cell, inputs, initial_state=self._initial_state)
# TODO: passing the sequence_length argument will enable to input variable-length tensors
# outputs = []
# state = self._initial_state
# with tf.variable_scope("RNN"):
# for time_step in range(num_steps):
# if time_step > 0:
# tf.get_variable_scope().reuse_variables()
# (cell_output, state) = cell(inputs[:, time_step, :], state) # Call (inputs, state)
# outputs.append(cell_output)
# TODO: check why outputs are stacked and resized
output = tf.reshape(tf.stack(axis=1, values=outputs), [-1, size])
softmax_w = tf.get_variable(
"softmax_w", [size, vocab_size], dtype=data_type())
softmax_b = tf.get_variable("softmax_b", [vocab_size], dtype=data_type())
logits = tf.matmul(output, softmax_w) + softmax_b
loss = tf.contrib.legacy_seq2seq.sequence_loss_by_example(
[logits],
[tf.reshape(input_.targets, [-1])],
[tf.ones([batch_size * num_steps], dtype=data_type())])
self._cost = cost = tf.reduce_sum(loss) / batch_size
self._final_state = state
if not is_training:
return
self._lr = tf.Variable(0.0, trainable=False)
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(cost, tvars),
config.max_grad_norm)
optimizer = tf.train.GradientDescentOptimizer(self._lr)
self._train_op = optimizer.apply_gradients(
zip(grads, tvars),
global_step=tf.contrib.framework.get_or_create_global_step())
self._new_lr = tf.placeholder(
tf.float32, shape=[], name="new_learning_rate")
self._lr_update = tf.assign(self._lr, self._new_lr)
def assign_lr(self, session, lr_value):
session.run(self._lr_update, feed_dict={self._new_lr: lr_value})
@property
def input(self):
return self._input
@property
def initial_state(self):
return self._initial_state
@property
def cost(self):
return self._cost
@property
def final_state(self):
return self._final_state
@property
def lr(self):
return self._lr
@property
def train_op(self):
return self._train_op
class SmallConfig(object):
"""Small config."""
init_scale = 0.1
learning_rate = 1.0
max_grad_norm = 5
num_layers = 2
num_steps = 20
hidden_size = 200
max_epoch = 4
max_max_epoch = 13
keep_prob = 1.0
lr_decay = 0.5
batch_size = 20
vocab_size = 27942
class MediumConfig(object):
"""Medium config."""
init_scale = 0.05
learning_rate = 1.0
max_grad_norm = 5
num_layers = 2
num_steps = 35
hidden_size = 650
max_epoch = 6
max_max_epoch = 39
keep_prob = 0.5
lr_decay = 0.8
batch_size = 20
vocab_size = 10000
class LargeConfig(object):
"""Large config."""
init_scale = 0.04
learning_rate = 1.0
max_grad_norm = 10
num_layers = 2
num_steps = 35
hidden_size = 1024
max_epoch = 14
max_max_epoch = 55
keep_prob = 0.35
lr_decay = 1 / 1.15
batch_size = 20
vocab_size = 10000
class TestConfig(object):
"""Tiny config, for testing."""
init_scale = 0.1
learning_rate = 1.0
max_grad_norm = 1
num_layers = 1
num_steps = 2
hidden_size = 2
max_epoch = 1
max_max_epoch = 1
keep_prob = 1.0
lr_decay = 0.5
batch_size = 20
vocab_size = 10000
def run_epoch(session, model, eval_op=None, verbose=False):
"""Runs the model on the given data."""
start_time = time.time()
costs = 0.0
iters = 0
state = session.run(model.initial_state)
fetches = {
"cost": model.cost,
"final_state": model.final_state,
}
if eval_op is not None:
fetches["eval_op"] = eval_op
for step in range(model.input.epoch_size):
feed_dict = {}
for i, (c, h) in enumerate(model.initial_state):
feed_dict[c] = state[i].c
feed_dict[h] = state[i].h
vals = session.run(fetches, feed_dict)
cost = vals["cost"]
state = vals["final_state"]
costs += cost
iters += model.input.num_steps
if verbose and step % (model.input.epoch_size // 10) == 10:
print("%.3f perplexity: %.3f speed: %.0f wps" %
(step * 1.0 / model.input.epoch_size, np.exp(costs / iters),
iters * model.input.batch_size / (time.time() - start_time)))
return np.exp(costs / iters)
def get_config():
if FLAGS.model == "small":
return SmallConfig()
elif FLAGS.model == "medium":
return MediumConfig()
elif FLAGS.model == "large":
return LargeConfig()
elif FLAGS.model == "test":
return TestConfig()
else:
raise ValueError("Invalid model: %s", FLAGS.model)
def main(_):
if not FLAGS.data_path:
raise ValueError("Must set --data_path to wiki data directory")
raw_data = reader.wiki_raw_data(FLAGS.data_path, FLAGS.word_to_id_path)
train_data, valid_data, test_data = raw_data
#vocab_size = get_vocab_size()
vocab_size = 126930
config = get_config()
config.vocab_size = vocab_size
eval_config = get_config()
eval_config.batch_size = 1
eval_config.num_steps = 1
eval_config.vocab_size = vocab_size
with tf.Graph().as_default():
# Args: [minval, maxval]
initializer = tf.random_uniform_initializer(-config.init_scale,
config.init_scale)
with tf.name_scope("Train"):
train_input = WPInput(config=config, data=train_data, name="TrainInput")
with tf.variable_scope("Model", reuse=None, initializer=initializer):
m = WPModel(is_training=True, config=config, input_=train_input)
tf.summary.scalar("Training Loss", m.cost)
tf.summary.scalar("Learning Rate", m.lr)
with tf.name_scope("Valid"):
valid_input = WPInput(config=config, data=valid_data, name="ValidInput")
with tf.variable_scope("Model", reuse=True, initializer=initializer):
mvalid = WPModel(is_training=False, config=config, input_=valid_input)
tf.summary.scalar("Validation Loss", mvalid.cost)
with tf.name_scope("Test"):
test_input = WPInput(config=eval_config, data=test_data, name="TestInput")
with tf.variable_scope("Model", reuse=True, initializer=initializer):
mtest = WPModel(is_training=False, config=eval_config,
input_=test_input)
sv = tf.train.Supervisor(logdir=FLAGS.save_path)
with sv.managed_session() as session:
for i in range(config.max_max_epoch):
lr_decay = config.lr_decay ** max(i + 1 - config.max_epoch, 0.0)
m.assign_lr(session, config.learning_rate * lr_decay)
print("Epoch: %d Learning rate: %.3f" % (i + 1, session.run(m.lr)))
train_perplexity = run_epoch(session, m, eval_op=m.train_op,
verbose=True)
print("Epoch: %d Train Perplexity: %.3f" % (i + 1, train_perplexity))
valid_perplexity = run_epoch(session, mvalid)
print("Epoch: %d Valid Perplexity: %.3f" % (i + 1, valid_perplexity))
test_perplexity = run_epoch(session, mtest)
print("Test Perplexity: %.3f" % test_perplexity)
if FLAGS.save_path:
print("Saving model to %s." % FLAGS.save_path)
sv.saver.save(session, FLAGS.save_path, global_step=sv.global_step)
if __name__ == "__main__":
tf.app.run()
| 34.273585 | 114 | 0.628475 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
sys.path.insert(0, "../src/")
import inspect
import time
import numpy as np
import tensorflow as tf
import reader_wp as reader
flags = tf.flags
logging = tf.logging
flags.DEFINE_string(
"model", "small",
"A type of model. Possible options are: small, medium, large.")
flags.DEFINE_string(
"tasks", "all",
"Tasks to be performed. Possible options are: all, train, test, valid")
flags.DEFINE_string(
"word_to_id_path", "../models/eos/word2id_1000.pklz",
"A type of model. Possible options are: small, medium, large.")
flags.DEFINE_string("data_path", None,
"Where the training/test data is stored.")
flags.DEFINE_string("save_path", None,
"Model output directory.")
flags.DEFINE_bool("use_fp16", False,
"Train using 16-bit floats instead of 32bit floats")
FLAGS = flags.FLAGS
def data_type():
return tf.float16 if FLAGS.use_fp16 else tf.float32
def get_vocab_size():
word_to_id = VectorManager.read_vector(FLAGS.word_to_id_path)
size = len(word_to_id)
print("Vocabulary size: %s" % size)
return size
class WPInput(object):
def __init__(self, config, data, name=None):
self.batch_size = batch_size = config.batch_size
self.num_steps = num_steps = config.num_steps
self.epoch_size = ((len(data) // batch_size) - 1) // num_steps
self.input_data, self.targets = reader.wiki_producer(
data, batch_size, num_steps, name=name)
class WPModel(object):
def __init__(self, is_training, config, input_):
self._input = input_
batch_size = input_.batch_size
num_steps = input_.num_steps
size = config.hidden_size
vocab_size = config.vocab_size
def lstm_cell():
return tf.contrib.rnn.BasicLSTMCell(
size, forget_bias=0.0, state_is_tuple=True)
attn_cell = lstm_cell
if is_training and config.keep_prob < 1:
def attn_cell():
return tf.contrib.rnn.DropoutWrapper(
lstm_cell(), output_keep_prob=config.keep_prob)
cell = tf.contrib.rnn.MultiRNNCell(
[attn_cell() for _ in range(config.num_layers)], state_is_tuple=True)
self._initial_state = cell.zero_state(batch_size, data_type())
with tf.device("/cpu:0"):
embedding = tf.get_variable(
"embedding", [vocab_size, size], dtype=data_type())
inputs = tf.nn.embedding_lookup(embedding, input_.input_data)
if is_training and config.keep_prob < 1:
inputs = tf.nn.dropout(inputs, config.keep_prob)
# This builds an unrolled LSTM for tutorial purposes only.
# In general, use the rnn() or state_saving_rnn() from rnn.py.
#
# The alternative version of the code below is:
#
inputs = tf.unstack(inputs, num=num_steps, axis=1)
outputs, state = tf.contrib.rnn.static_rnn(
cell, inputs, initial_state=self._initial_state)
# TODO: passing the sequence_length argument will enable to input variable-length tensors
# outputs = []
# state = self._initial_state
# with tf.variable_scope("RNN"):
# for time_step in range(num_steps):
# if time_step > 0:
# tf.get_variable_scope().reuse_variables()
# (cell_output, state) = cell(inputs[:, time_step, :], state) # Call (inputs, state)
# outputs.append(cell_output)
# TODO: check why outputs are stacked and resized
output = tf.reshape(tf.stack(axis=1, values=outputs), [-1, size])
softmax_w = tf.get_variable(
"softmax_w", [size, vocab_size], dtype=data_type())
softmax_b = tf.get_variable("softmax_b", [vocab_size], dtype=data_type())
logits = tf.matmul(output, softmax_w) + softmax_b
loss = tf.contrib.legacy_seq2seq.sequence_loss_by_example(
[logits],
[tf.reshape(input_.targets, [-1])],
[tf.ones([batch_size * num_steps], dtype=data_type())])
self._cost = cost = tf.reduce_sum(loss) / batch_size
self._final_state = state
if not is_training:
return
self._lr = tf.Variable(0.0, trainable=False)
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(cost, tvars),
config.max_grad_norm)
optimizer = tf.train.GradientDescentOptimizer(self._lr)
self._train_op = optimizer.apply_gradients(
zip(grads, tvars),
global_step=tf.contrib.framework.get_or_create_global_step())
self._new_lr = tf.placeholder(
tf.float32, shape=[], name="new_learning_rate")
self._lr_update = tf.assign(self._lr, self._new_lr)
def assign_lr(self, session, lr_value):
session.run(self._lr_update, feed_dict={self._new_lr: lr_value})
@property
def input(self):
return self._input
@property
def initial_state(self):
return self._initial_state
@property
def cost(self):
return self._cost
@property
def final_state(self):
return self._final_state
@property
def lr(self):
return self._lr
@property
def train_op(self):
return self._train_op
class SmallConfig(object):
init_scale = 0.1
learning_rate = 1.0
max_grad_norm = 5
num_layers = 2
num_steps = 20
hidden_size = 200
max_epoch = 4
max_max_epoch = 13
keep_prob = 1.0
lr_decay = 0.5
batch_size = 20
vocab_size = 27942
class MediumConfig(object):
init_scale = 0.05
learning_rate = 1.0
max_grad_norm = 5
num_layers = 2
num_steps = 35
hidden_size = 650
max_epoch = 6
max_max_epoch = 39
keep_prob = 0.5
lr_decay = 0.8
batch_size = 20
vocab_size = 10000
class LargeConfig(object):
init_scale = 0.04
learning_rate = 1.0
max_grad_norm = 10
num_layers = 2
num_steps = 35
hidden_size = 1024
max_epoch = 14
max_max_epoch = 55
keep_prob = 0.35
lr_decay = 1 / 1.15
batch_size = 20
vocab_size = 10000
class TestConfig(object):
init_scale = 0.1
learning_rate = 1.0
max_grad_norm = 1
num_layers = 1
num_steps = 2
hidden_size = 2
max_epoch = 1
max_max_epoch = 1
keep_prob = 1.0
lr_decay = 0.5
batch_size = 20
vocab_size = 10000
def run_epoch(session, model, eval_op=None, verbose=False):
start_time = time.time()
costs = 0.0
iters = 0
state = session.run(model.initial_state)
fetches = {
"cost": model.cost,
"final_state": model.final_state,
}
if eval_op is not None:
fetches["eval_op"] = eval_op
for step in range(model.input.epoch_size):
feed_dict = {}
for i, (c, h) in enumerate(model.initial_state):
feed_dict[c] = state[i].c
feed_dict[h] = state[i].h
vals = session.run(fetches, feed_dict)
cost = vals["cost"]
state = vals["final_state"]
costs += cost
iters += model.input.num_steps
if verbose and step % (model.input.epoch_size // 10) == 10:
print("%.3f perplexity: %.3f speed: %.0f wps" %
(step * 1.0 / model.input.epoch_size, np.exp(costs / iters),
iters * model.input.batch_size / (time.time() - start_time)))
return np.exp(costs / iters)
def get_config():
if FLAGS.model == "small":
return SmallConfig()
elif FLAGS.model == "medium":
return MediumConfig()
elif FLAGS.model == "large":
return LargeConfig()
elif FLAGS.model == "test":
return TestConfig()
else:
raise ValueError("Invalid model: %s", FLAGS.model)
def main(_):
if not FLAGS.data_path:
raise ValueError("Must set --data_path to wiki data directory")
raw_data = reader.wiki_raw_data(FLAGS.data_path, FLAGS.word_to_id_path)
train_data, valid_data, test_data = raw_data
#vocab_size = get_vocab_size()
vocab_size = 126930
config = get_config()
config.vocab_size = vocab_size
eval_config = get_config()
eval_config.batch_size = 1
eval_config.num_steps = 1
eval_config.vocab_size = vocab_size
with tf.Graph().as_default():
# Args: [minval, maxval]
initializer = tf.random_uniform_initializer(-config.init_scale,
config.init_scale)
with tf.name_scope("Train"):
train_input = WPInput(config=config, data=train_data, name="TrainInput")
with tf.variable_scope("Model", reuse=None, initializer=initializer):
m = WPModel(is_training=True, config=config, input_=train_input)
tf.summary.scalar("Training Loss", m.cost)
tf.summary.scalar("Learning Rate", m.lr)
with tf.name_scope("Valid"):
valid_input = WPInput(config=config, data=valid_data, name="ValidInput")
with tf.variable_scope("Model", reuse=True, initializer=initializer):
mvalid = WPModel(is_training=False, config=config, input_=valid_input)
tf.summary.scalar("Validation Loss", mvalid.cost)
with tf.name_scope("Test"):
test_input = WPInput(config=eval_config, data=test_data, name="TestInput")
with tf.variable_scope("Model", reuse=True, initializer=initializer):
mtest = WPModel(is_training=False, config=eval_config,
input_=test_input)
sv = tf.train.Supervisor(logdir=FLAGS.save_path)
with sv.managed_session() as session:
for i in range(config.max_max_epoch):
lr_decay = config.lr_decay ** max(i + 1 - config.max_epoch, 0.0)
m.assign_lr(session, config.learning_rate * lr_decay)
print("Epoch: %d Learning rate: %.3f" % (i + 1, session.run(m.lr)))
train_perplexity = run_epoch(session, m, eval_op=m.train_op,
verbose=True)
print("Epoch: %d Train Perplexity: %.3f" % (i + 1, train_perplexity))
valid_perplexity = run_epoch(session, mvalid)
print("Epoch: %d Valid Perplexity: %.3f" % (i + 1, valid_perplexity))
test_perplexity = run_epoch(session, mtest)
print("Test Perplexity: %.3f" % test_perplexity)
if FLAGS.save_path:
print("Saving model to %s." % FLAGS.save_path)
sv.saver.save(session, FLAGS.save_path, global_step=sv.global_step)
if __name__ == "__main__":
tf.app.run()
| true | true |
f71dbae64f29e199ef282e3693547b0b41233811 | 2,952 | py | Python | pensetup.py | fazildgr8/virtual_pen_MNIST | 69055980ee0f0005766e62e3a1ca4e2a0259157c | [
"MIT"
] | 2 | 2020-07-03T23:52:45.000Z | 2021-03-10T07:49:08.000Z | pensetup.py | fazildgr8/virtual_pen_MNIST | 69055980ee0f0005766e62e3a1ca4e2a0259157c | [
"MIT"
] | null | null | null | pensetup.py | fazildgr8/virtual_pen_MNIST | 69055980ee0f0005766e62e3a1ca4e2a0259157c | [
"MIT"
] | null | null | null | import cv2
import numpy as np
import time
# A required callback method that goes into the trackbar function.
def nothing(x):
pass
# Initializing the webcam feed.
cap = cv2.VideoCapture(0)
cap.set(3, 1280)
cap.set(4, 720)
# Create a window named trackbars.
cv2.namedWindow("Trackbars")
# Now create 6 trackbars that will control the lower and upper range of
# H,S and V channels. The Arguments are like this: Name of trackbar,
# window name, range,callback function. For Hue the range is 0-179 and
# for S,V its 0-255.
cv2.createTrackbar("L - H", "Trackbars", 0, 179, nothing)
cv2.createTrackbar("L - S", "Trackbars", 0, 255, nothing)
cv2.createTrackbar("L - V", "Trackbars", 0, 255, nothing)
cv2.createTrackbar("U - H", "Trackbars", 179, 179, nothing)
cv2.createTrackbar("U - S", "Trackbars", 255, 255, nothing)
cv2.createTrackbar("U - V", "Trackbars", 255, 255, nothing)
while True:
# Start reading the webcam feed frame by frame.
ret, frame = cap.read()
if not ret:
break
# Flip the frame horizontally (Not required)
frame = cv2.flip(frame, 1)
# Convert the BGR image to HSV image.
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# Get the new values of the trackbar in real time as the user changes
# them
l_h = cv2.getTrackbarPos("L - H", "Trackbars")
l_s = cv2.getTrackbarPos("L - S", "Trackbars")
l_v = cv2.getTrackbarPos("L - V", "Trackbars")
u_h = cv2.getTrackbarPos("U - H", "Trackbars")
u_s = cv2.getTrackbarPos("U - S", "Trackbars")
u_v = cv2.getTrackbarPos("U - V", "Trackbars")
# Set the lower and upper HSV range according to the value selected
# by the trackbar
lower_range = np.array([l_h, l_s, l_v])
upper_range = np.array([u_h, u_s, u_v])
# Filter the image and get the binary mask, where white represents
# your target color
mask = cv2.inRange(hsv, lower_range, upper_range)
# You can also visualize the real part of the target color (Optional)
res = cv2.bitwise_and(frame, frame, mask=mask)
# Converting the binary mask to 3 channel image, this is just so
# we can stack it with the others
mask_3 = cv2.cvtColor(mask, cv2.COLOR_GRAY2BGR)
# stack the mask, orginal frame and the filtered result
stacked = np.hstack((mask_3, frame, res))
# Show this stacked frame at 40% of the size.
cv2.imshow('Trackbars', cv2.resize(stacked, None, fx=0.4, fy=0.4))
# If the user presses ESC then exit the program
key = cv2.waitKey(1)
if key == 27:
break
# If the user presses `s` then print this array.
if key == ord('s'):
thearray = [[l_h, l_s, l_v], [u_h, u_s, u_v]]
print(thearray)
# Also save this array as penval.npy
np.save('penval', thearray)
break
# Release the camera & destroy the windows.
cap.release()
cv2.destroyAllWindows() | 32.8 | 74 | 0.647019 | import cv2
import numpy as np
import time
def nothing(x):
pass
cap = cv2.VideoCapture(0)
cap.set(3, 1280)
cap.set(4, 720)
cv2.namedWindow("Trackbars")
cv2.createTrackbar("L - H", "Trackbars", 0, 179, nothing)
cv2.createTrackbar("L - S", "Trackbars", 0, 255, nothing)
cv2.createTrackbar("L - V", "Trackbars", 0, 255, nothing)
cv2.createTrackbar("U - H", "Trackbars", 179, 179, nothing)
cv2.createTrackbar("U - S", "Trackbars", 255, 255, nothing)
cv2.createTrackbar("U - V", "Trackbars", 255, 255, nothing)
while True:
ret, frame = cap.read()
if not ret:
break
frame = cv2.flip(frame, 1)
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
l_h = cv2.getTrackbarPos("L - H", "Trackbars")
l_s = cv2.getTrackbarPos("L - S", "Trackbars")
l_v = cv2.getTrackbarPos("L - V", "Trackbars")
u_h = cv2.getTrackbarPos("U - H", "Trackbars")
u_s = cv2.getTrackbarPos("U - S", "Trackbars")
u_v = cv2.getTrackbarPos("U - V", "Trackbars")
lower_range = np.array([l_h, l_s, l_v])
upper_range = np.array([u_h, u_s, u_v])
mask = cv2.inRange(hsv, lower_range, upper_range)
res = cv2.bitwise_and(frame, frame, mask=mask)
mask_3 = cv2.cvtColor(mask, cv2.COLOR_GRAY2BGR)
stacked = np.hstack((mask_3, frame, res))
cv2.imshow('Trackbars', cv2.resize(stacked, None, fx=0.4, fy=0.4))
key = cv2.waitKey(1)
if key == 27:
break
if key == ord('s'):
thearray = [[l_h, l_s, l_v], [u_h, u_s, u_v]]
print(thearray)
np.save('penval', thearray)
break
cap.release()
cv2.destroyAllWindows() | true | true |
f71dbbae930412e85855a613c4fed7593eeb6d4f | 344 | py | Python | Task1F.py | reib2/Lab-3-Flood-Warning | 9f86b4b8a7fa9508ddaa0e9754d64ff6c4e38f66 | [
"MIT"
] | null | null | null | Task1F.py | reib2/Lab-3-Flood-Warning | 9f86b4b8a7fa9508ddaa0e9754d64ff6c4e38f66 | [
"MIT"
] | null | null | null | Task1F.py | reib2/Lab-3-Flood-Warning | 9f86b4b8a7fa9508ddaa0e9754d64ff6c4e38f66 | [
"MIT"
] | 1 | 2022-02-01T23:24:15.000Z | 2022-02-01T23:24:15.000Z |
from floodsystem import datafetcher
from floodsystem.station import MonitoringStation, inconsistent_typical_range_stations
from floodsystem.stationdata import build_station_list
stations = build_station_list() #builds list of stations
inconsistent_stations = inconsistent_typical_range_stations(stations)
print (inconsistent_stations)
| 24.571429 | 86 | 0.866279 |
from floodsystem import datafetcher
from floodsystem.station import MonitoringStation, inconsistent_typical_range_stations
from floodsystem.stationdata import build_station_list
stations = build_station_list()
inconsistent_stations = inconsistent_typical_range_stations(stations)
print (inconsistent_stations)
| true | true |
f71dbbef82f1b7c0963a75022302469ae15db6e7 | 11,435 | py | Python | book/_build/jupyter_execute/notebooks/high_energy_protons.py | AvijeetPrasad/laputas-blog | 27d969e341b1d264ef4fe3a334c775ce631ba2f1 | [
"BSD-3-Clause"
] | null | null | null | book/_build/jupyter_execute/notebooks/high_energy_protons.py | AvijeetPrasad/laputas-blog | 27d969e341b1d264ef4fe3a334c775ce631ba2f1 | [
"BSD-3-Clause"
] | null | null | null | book/_build/jupyter_execute/notebooks/high_energy_protons.py | AvijeetPrasad/laputas-blog | 27d969e341b1d264ef4fe3a334c775ce631ba2f1 | [
"BSD-3-Clause"
] | null | null | null | <a href="https://colab.research.google.com/github/AvijeetPrasad/laputas/blob/main/notebooks/high_energy_protons.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>
# High energy protons
## Summary
Protons have a rest mass equivalent to an energy of around 938 MeV. As the proton is accelerated, due to special relativistic effects, their mass increases. In order to understand the physics at conditions close to the Big Bang, particle accelerators such as the Large Hadron Collider (LHC) accelerates protons and other charged particles close to the speed of light. For example in LHC, protons are accelerated to energies of 7 TeV, i.e. ~7000 times their rest mass. Due to this increase in their mass/energy their behaviour in a gravitational field will be modified. Here we study these effects as the proton's velocity approaches the speed of light. We extend this discussion to Planck energies and look at the feasibility of the universe having gone through a Planck epoch.
# Import relevant packages and constants
import math
import matplotlib.pyplot as plt
import numpy as np
from astropy import units as u
from astropy.constants.si import c, G, m_p, m_e, e, eps0, h, hbar
from astropy.cosmology import WMAP9 as cosmo
# c = speed of light
# e = charge of proton
# eps0= Vacuum electric permittivity
# G = universal gravatational constant
# h = Planck's constant
# hbar= reduced Planck's constant
# m_e = rest mass of electron
# m_p = rest mass of proton
## 1.Introduction
The gravitational force between two protons of mass $m_p$ separated by a distance $r$ is given by the Newton's law as:
$F_g = \frac{G m_p^2}{r^2} \quad (1)$
For instance, two protons separated by a distance of 1 fm (typical nuclear scale) will experience a attractive force of $1.87 \times 10^{-34} N$.
# Let r be the separation of 1 fm
r = (1*u.fm).to(u.m)
# The Gravitational force Fg between two protons
Fg = (G * m_p * m_p) / (r * r)
print(f"Gravitational force between two protons of mass {m_p:.2e} at a distance {r} = {Fg:.2e}")
This is beyond the experimental limit of force that can be detected, which is $10^{-24}$ N [(Biercuk et al. 2010)](https://arxiv.org/abs/1004.0780v2).
For the gravitational force between two particles at a distance of 1 fm to be equal to this observable limit, their mass has to be $1.22 \times 10^{-22} kg$. For protons to have such a high mass they have to accelerated to higher speeds.
# Let F_el be the experimental limit of detectable force
F_el = (1e-24*u.N).decompose()
# The mass at the experimental limit m_el
m_el = ((F_el * r * r)/G)** 0.5
print(f"The mass of particles separated by a distance of {r} at the experimental detection limit = {m_el:.2e}")
Another fundamental parameter associated with the proton is its charge. The electrostatic repulsive (Coulomb) force between two protons of charge $e$ separated by a distance $r$ is given by:
$F_e = k_e \frac{e^2}{r^2} \quad (2)$
where $k_e = 9\times 10^{9} N m^2/C^2$ is the permitivity constant. In SI units, $F_e = 2.31 \times 10^{2} C^2/(F m)$.
# Let ke be the Coulomb constant and e the charge
ke = 1/(4*np.pi*eps0)
# The Coulomb force Fe between two protons:
Fe = (ke * e.si * e.si) / (r * r)
print(f"Coulomb repulsive force between two protons of chrage {e.si:.2e} at a distance {r} = {Fe:.2e}")
The electrostatic force between two protons is 36 orders greaters than the gravitational force.
For the Gravitation force to be equal to Coulomb force between two protons, i.e. $\frac{G m_p^{'2}}{r^2}=\frac{k_e e^2}{r^2}$, the mass of the proton should be
$ m'_p=\sqrt{\frac{k_e e^2}{G}} = 1.86 \times 10^{-9}\quad(3)$
# Let mpp be the mass where Gravitational and elctrostatic force becomes equal
mpp = np.sqrt((ke * e.si * e.si) / G )
mpp = mpp.decompose()
print(f"Mass of proton when gravity balances electrostatic repulsion = {mpp:.2e}")
Due to special relativistic effcts, as a particle of rest mass ($m_0$) travels with a velocity ($v$) close to the speed of light ($c$), its mass increases by a factor $\gamma$ given by:
$\gamma=\frac{1}{\sqrt(1-v^2/c^2)} \quad(4)$
The increased mass ($m$) is given by
$m= \gamma m_0 \quad (5)$
For instance at the limit of detection the mass of proton $10^{-22} kg$ corresponds to a $\gamma$ of 73000.
## 2. Relativistic effects
### Q 2.1
How does the mass of the proton increase as its velocity tends to $c$?
For protons of rest mass $m_p$, the mass is given as $m'_p=\gamma m_p$. From equations (4) & (5), we obtain the following expression for the velocity of high energy protons
$v = c\sqrt{1-(m_p/m_p')^2} \quad(6)$
# Calculating the mass (in GeV) as a function of velocity (in units of c)
# Set the range and stepsize of velocity in units of c
v = np.arange(start=0.99, stop=0.999, step=.0001)*c
# Calculate the gamma factor and mass
gamma = 1/(np.sqrt(1-(v*v)/(c*c)))
mpp = m_p * gamma
# Set the range and stepsize of mass in units of m_p
mpv = np.arange(start=1000, stop=11000, step=100)*m_p
# Calculate the velocity
vv = c * np.sqrt(1 - (m_p/mpv)**2)
# Make the plots
fig, (ax1, ax2) = plt.subplots(1, 2,figsize=(10,5))
#fig.suptitle('Variation with c')
# Variation of proton mass with velocity
ax1.plot(v/c, (mpp * c *c).to(u.GeV),lw=2,c='b')
ax1.set_xlabel('v/c ')
ax1.set_ylabel("$m_p'$ (GeV)")
ax1.grid(True)
# Difference in proton speed and light speed as a fucntion of proton mass
ax2.plot( (mpv * c *c).to(u.GeV),(c-vv),lw=2,c='red')
ax2.set_xlabel("$m'_p$ (GeV)")
ax2.set_ylabel("c-v (m/s)")
ax2.grid(True)
plt.show()
### Q 2.2
Given the above difference in the speeds of the proton and light, what will be the separation between these high energy proton and photon after:
1. one year?
2. Hubble time?
---
Hubble time is the age of the universe ~ 13.8 Billion years, inverse of which gives the Hubble's constant $H_0$.
# Table of c - v as a function of mass
cmv = c - v
# Calculate the separation over a year
dist = cmv*u.year
disty = dist.decompose()
# Calculate the Hubble time
t_h = 1/cosmo.H(0).decompose()
print(f"Hubble constant = {cosmo.H(0):.2f}")
print(f"Hubble time = {t_h:.2e}")
print("-"*60)
sep = cmv*t_h
seph = sep.decompose()
print(" mp \t\t c-v \t sep. (1yr) sep. (Hubble)")
print("-"*60)
for i in range(0,len(cmv),10):
mpgev = (mpv * c *c).to(u.GeV)[i]
print(f"{mpgev:8.2f} {cmv[i]:10.2} {disty[i]:10.2} {seph[i]:10.2}")
### Q 2.3
What will be gamma factor and mass of the proton when the separation after Hubble time is Compton length?
---
The Compton wavelength ($\lambda_c$) of a particle is same as the wavelength of a photon having the same energy as the mass (energy) of the particle and is given by:
$\lambda_c = \frac{h}{m_p c} \quad (7)$.
Now we have:
$(c-v)/H_0 = \lambda_c \quad (8)$,
so the expression for $\gamma$ then becomes:
$\gamma = \left[1-(1-(\lambda_c H_0/c)^2)^{-1/2}\right] \quad (9)$.
Since $(\lambda_c H_0/c)\sim 10^{-41} << 1$, we can expand eqaution (9) binomially and neglect the higher order terms to get:
$\gamma = (2 \lambda_c H_0 /c)^{-1/2} \quad (10)$.
# Calculate the Compton length of a proton
cl = h/(m_p * c)
print(f"Compton Length = {cl.decompose():.2e}")
H0 = cosmo.H(0).decompose()
#gamma at Compton Length
gcl = (2*cl * H0 /c ) ** (-0.5)
print(f"Gamma factor at Compton Length = {gcl:.2e}")
print(f"Proton mass at Compton Length = {gcl * m_p:.2e}")
# Calculate the Planck mass
mpl = (hbar * c / G) ** 0.5
print(f"Plank mass = {mpl.decompose():.2e}")
Planck mass is a unit of mass in natural units given by
$m_{pl} = \sqrt{\frac{\hbar c}{G}} \approx 2 \times 10^{-8} kg \quad (11)$.
The proton mass when the separation over Hubble time becomes Compton length approaches the Planck mass.
## 3. Accelerating high energy protons
To accelerate these protons to such high energies,we need *Particle accelerators*, which are devices which use electromagnets to enhance their speeds. The most powerful accelerator is the Large Hadron Collider (LHC), which is a circular accelerator. To accelerate protons to such high energies, we need a linear accelerator, since in the case of a circular accelerator there is energy loss due to synchrotron radiation.
The most intense laser we have so far has an intensity $I \sim 10^{26} W/m^2$. This intensity is related to the electric field $E$ as
$I = \frac{1}{2} c\epsilon_0E^2 \quad (12)$,
where $\epsilon_0$ is the permittivity of free space. This gives an electric field given by,
$E \sim 2.7 \times 10^{14} V/m$.
For a voltage of $\sim 10^{28} V$, the linear accelerator powered by this electric field should have an arm length $l$ given by
$l = V/E \sim 4 \times 10^{13} m$.
To reduce the required arm lenght of the linear accelerator we need to increase the electric field strength.
The maximum possible electric field strength will be that around a fundamental charge ($e=1.6\times 10^{-19} C$) at a distance of 1 fm is given by
$E_{max} = \frac{ke}{(1 fm)^2} \sim 10^{21} V/m \quad (13)$
The energy density of the laser is given by
$\epsilon = \frac{e V}{l w^2} \quad (14)$,
where $w$ beam-width of the laser.
### Q 3.1
Calculate the energy density of the beam of width $10^{-7}~m$ (wavelength of the beam).
# Width of the beam (wavelength of the laser) w = 10^-7 m
# Volume of the beam w^2 * l = 10^-7 m
w = 1.e-7 *u.m
vol = w * w * V/Emax
# Calculate the energy density
ed = (e * V /vol).to(unit=u.J/u.m**3)
print(f"Volume of the beam = {vol:.2e}")
print(f"Energy density of the beam = {ed:.2e}")
### Q 3.2
Calculate the arm length corresponding to this electric field.
# I is the intensity of the laser
I = 1.e26 *u.W/u.m**2 #W/m^2
# The corresoinding electric field E
E = (((2 * I ) / (c * eps0)) **.5).to(unit=u.V/u.m)
print(f"Electric field = {E:.2e}")
# The voltage V and arm length l
V = 1.e28 *u.V
l = V/E
print(f"Arm length = {l:.2e}")
Emax = ((1/(4 * math.pi * eps0)) * (e / cl ** 2)).to(unit=u.V/u.m)
print(f"Maximum electric field = {Emax:.2e}")
print(f"Arm length for max E = {V/Emax:.2e}")
The arm length $l\sim 10^{13} m$ is roughly 100 times the distance between the earth and the sun (100 Astronomical Units).
A particle of charge $e$ moving with velocity $v$ moving in a magnetic field $B$ gets deflected due to the Lorentz force, tracing a circular path of radius
$r = \gamma \frac{m v}{e B} \quad (15)$.
Since the arm length of these accelerators need to be quite large, the particles will be affected by the galactic magnetic field ($10^{-6} G$)
### Q 3.3
Calculate the radius of the high energy proton in the galactic magnetic field at energies:
a. at 7 TeV (LHC energies)
b. at Planck energy.
# From equation (6)
# vg: velocity as a function of gamma
vg = lambda g: c*np.sqrt(1 - (1/g)**2)
B = 10**(-10.)*u.T # in Tesla
# rb: radius of the particle as a function of gamma
rb = lambda g: (g * m_p) * vg(g) / (e * B )
# Radius at LHC energy
g1 = 7000.
print(f"Radius at LHC energy = {rb(g1).decompose():.2e}")
# Radius at Planck energy
g2 = 10**18.
print(f"Radius at Planck energy = {rb(g2).decompose():.2e}")
Note: The radius at the LHC energy is one $10^6$ times smaller than that of the Milky Galaxy ($10^{20} m$), while at the Planck energy, it is $10^8$ times larger.
Since the accerlerator required to produce Planck energies is untenable and not practical it is unlikely that we can test theories involving energy scales at Planck epoch. | 39.843206 | 777 | 0.696895 | <a href="https://colab.research.google.com/github/AvijeetPrasad/laputas/blob/main/notebooks/high_energy_protons.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>
have a rest mass equivalent to an energy of around 938 MeV. As the proton is accelerated, due to special relativistic effects, their mass increases. In order to understand the physics at conditions close to the Big Bang, particle accelerators such as the Large Hadron Collider (LHC) accelerates protons and other charged particles close to the speed of light. For example in LHC, protons are accelerated to energies of 7 TeV, i.e. ~7000 times their rest mass. Due to this increase in their mass/energy their behaviour in a gravitational field will be modified. Here we study these effects as the proton's velocity approaches the speed of light. We extend this discussion to Planck energies and look at the feasibility of the universe having gone through a Planck epoch.
# Import relevant packages and constants
import math
import matplotlib.pyplot as plt
import numpy as np
from astropy import units as u
from astropy.constants.si import c, G, m_p, m_e, e, eps0, h, hbar
from astropy.cosmology import WMAP9 as cosmo
# c = speed of light
# e = charge of proton
# eps0= Vacuum electric permittivity
# G = universal gravatational constant
# h = Planck's constant
# m_e = rest mass of electron
# m_p = rest mass of proton
## 1.Introduction
The gravitational force between two protons of mass $m_p$ separated by a distance $r$ is given by the Newton's law as:
$F_g = \frac{G m_p^2}{r^2} \quad (1)$
For instance, two protons separated by a distance of 1 fm (typical nuclear scale) will experience a attractive force of $1.87 \times 10^{-34} N$.
r = (1*u.fm).to(u.m)
Fg = (G * m_p * m_p) / (r * r)
print(f"Gravitational force between two protons of mass {m_p:.2e} at a distance {r} = {Fg:.2e}")
This is beyond the experimental limit of force that can be detected, which is $10^{-24}$ N [(Biercuk et al. 2010)](https://arxiv.org/abs/1004.0780v2).
For the gravitational force between two particles at a distance of 1 fm to be equal to this observable limit, their mass has to be $1.22 \times 10^{-22} kg$. For protons to have such a high mass they have to accelerated to higher speeds.
F_el = (1e-24*u.N).decompose()
m_el = ((F_el * r * r)/G)** 0.5
print(f"The mass of particles separated by a distance of {r} at the experimental detection limit = {m_el:.2e}")
Another fundamental parameter associated with the proton is its charge. The electrostatic repulsive (Coulomb) force between two protons of charge $e$ separated by a distance $r$ is given by:
$F_e = k_e \frac{e^2}{r^2} \quad (2)$
where $k_e = 9\times 10^{9} N m^2/C^2$ is the permitivity constant. In SI units, $F_e = 2.31 \times 10^{2} C^2/(F m)$.
ke = 1/(4*np.pi*eps0)
Fe = (ke * e.si * e.si) / (r * r)
print(f"Coulomb repulsive force between two protons of chrage {e.si:.2e} at a distance {r} = {Fe:.2e}")
The electrostatic force between two protons is 36 orders greaters than the gravitational force.
For the Gravitation force to be equal to Coulomb force between two protons, i.e. $\frac{G m_p^{'2}}{r^2}=\frac{k_e e^2}{r^2}$, the mass of the proton should be
$ m'_p=\sqrt{\frac{k_e e^2}{G}} = 1.86 \times 10^{-9}\quad(3)$
mpp = np.sqrt((ke * e.si * e.si) / G )
mpp = mpp.decompose()
print(f"Mass of proton when gravity balances electrostatic repulsion = {mpp:.2e}")
Due to special relativistic effcts, as a particle of rest mass ($m_0$) travels with a velocity ($v$) close to the speed of light ($c$), its mass increases by a factor $\gamma$ given by:
$\gamma=\frac{1}{\sqrt(1-v^2/c^2)} \quad(4)$
The increased mass ($m$) is given by
$m= \gamma m_0 \quad (5)$
For instance at the limit of detection the mass of proton $10^{-22} kg$ corresponds to a $\gamma$ of 73000.
e as its velocity tends to $c$?
For protons of rest mass $m_p$, the mass is given as $m'_p=\gamma m_p$. From equations (4) & (5), we obtain the following expression for the velocity of high energy protons
$v = c\sqrt{1-(m_p/m_p')^2} \quad(6)$
v = np.arange(start=0.99, stop=0.999, step=.0001)*c
gamma = 1/(np.sqrt(1-(v*v)/(c*c)))
mpp = m_p * gamma
mpv = np.arange(start=1000, stop=11000, step=100)*m_p
vv = c * np.sqrt(1 - (m_p/mpv)**2)
fig, (ax1, ax2) = plt.subplots(1, 2,figsize=(10,5))
ax1.plot(v/c, (mpp * c *c).to(u.GeV),lw=2,c='b')
ax1.set_xlabel('v/c ')
ax1.set_ylabel("$m_p'$ (GeV)")
ax1.grid(True)
# Difference in proton speed and light speed as a fucntion of proton mass
ax2.plot( (mpv * c *c).to(u.GeV),(c-vv),lw=2,c='red')
ax2.set_xlabel("$m'_p$ (GeV)")
ax2.set_ylabel("c-v (m/s)")
ax2.grid(True)
plt.show()
difference in the speeds of the proton and light, what will be the separation between these high energy proton and photon after:
1. one year?
2. Hubble time?
---
Hubble time is the age of the universe ~ 13.8 Billion years, inverse of which gives the Hubble's constant $H_0$.
# Table of c - v as a function of mass
cmv = c - v
# Calculate the separation over a year
dist = cmv*u.year
disty = dist.decompose()
# Calculate the Hubble time
t_h = 1/cosmo.H(0).decompose()
print(f"Hubble constant = {cosmo.H(0):.2f}")
print(f"Hubble time = {t_h:.2e}")
print("-"*60)
sep = cmv*t_h
seph = sep.decompose()
print(" mp \t\t c-v \t sep. (1yr) sep. (Hubble)")
print("-"*60)
for i in range(0,len(cmv),10):
mpgev = (mpv * c *c).to(u.GeV)[i]
print(f"{mpgev:8.2f} {cmv[i]:10.2} {disty[i]:10.2} {seph[i]:10.2}")
### Q 2.3
What will be gamma factor and mass of the proton when the separation after Hubble time is Compton length?
---
The Compton wavelength ($\lambda_c$) of a particle is same as the wavelength of a photon having the same energy as the mass (energy) of the particle and is given by:
$\lambda_c = \frac{h}{m_p c} \quad (7)$.
Now we have:
$(c-v)/H_0 = \lambda_c \quad (8)$,
so the expression for $\gamma$ then becomes:
$\gamma = \left[1-(1-(\lambda_c H_0/c)^2)^{-1/2}\right] \quad (9)$.
Since $(\lambda_c H_0/c)\sim 10^{-41} << 1$, we can expand eqaution (9) binomially and neglect the higher order terms to get:
$\gamma = (2 \lambda_c H_0 /c)^{-1/2} \quad (10)$.
# Calculate the Compton length of a proton
cl = h/(m_p * c)
print(f"Compton Length = {cl.decompose():.2e}")
H0 = cosmo.H(0).decompose()
#gamma at Compton Length
gcl = (2*cl * H0 /c ) ** (-0.5)
print(f"Gamma factor at Compton Length = {gcl:.2e}")
print(f"Proton mass at Compton Length = {gcl * m_p:.2e}")
# Calculate the Planck mass
mpl = (hbar * c / G) ** 0.5
print(f"Plank mass = {mpl.decompose():.2e}")
Planck mass is a unit of mass in natural units given by
$m_{pl} = \sqrt{\frac{\hbar c}{G}} \approx 2 \times 10^{-8} kg \quad (11)$.
The proton mass when the separation over Hubble time becomes Compton length approaches the Planck mass.
## 3. Accelerating high energy protons
To accelerate these protons to such high energies,we need *Particle accelerators*, which are devices which use electromagnets to enhance their speeds. The most powerful accelerator is the Large Hadron Collider (LHC), which is a circular accelerator. To accelerate protons to such high energies, we need a linear accelerator, since in the case of a circular accelerator there is energy loss due to synchrotron radiation.
The most intense laser we have so far has an intensity $I \sim 10^{26} W/m^2$. This intensity is related to the electric field $E$ as
$I = \frac{1}{2} c\epsilon_0E^2 \quad (12)$,
where $\epsilon_0$ is the permittivity of free space. This gives an electric field given by,
$E \sim 2.7 \times 10^{14} V/m$.
For a voltage of $\sim 10^{28} V$, the linear accelerator powered by this electric field should have an arm length $l$ given by
$l = V/E \sim 4 \times 10^{13} m$.
To reduce the required arm lenght of the linear accelerator we need to increase the electric field strength.
The maximum possible electric field strength will be that around a fundamental charge ($e=1.6\times 10^{-19} C$) at a distance of 1 fm is given by
$E_{max} = \frac{ke}{(1 fm)^2} \sim 10^{21} V/m \quad (13)$
The energy density of the laser is given by
$\epsilon = \frac{e V}{l w^2} \quad (14)$,
where $w$ beam-width of the laser.
### Q 3.1
Calculate the energy density of the beam of width $10^{-7}~m$ (wavelength of the beam).
# Width of the beam (wavelength of the laser) w = 10^-7 m
# Volume of the beam w^2 * l = 10^-7 m
w = 1.e-7 *u.m
vol = w * w * V/Emax
# Calculate the energy density
ed = (e * V /vol).to(unit=u.J/u.m**3)
print(f"Volume of the beam = {vol:.2e}")
print(f"Energy density of the beam = {ed:.2e}")
### Q 3.2
Calculate the arm length corresponding to this electric field.
# I is the intensity of the laser
I = 1.e26 *u.W/u.m**2 #W/m^2
# The corresoinding electric field E
E = (((2 * I ) / (c * eps0)) **.5).to(unit=u.V/u.m)
print(f"Electric field = {E:.2e}")
# The voltage V and arm length l
V = 1.e28 *u.V
l = V/E
print(f"Arm length = {l:.2e}")
Emax = ((1/(4 * math.pi * eps0)) * (e / cl ** 2)).to(unit=u.V/u.m)
print(f"Maximum electric field = {Emax:.2e}")
print(f"Arm length for max E = {V/Emax:.2e}")
The arm length $l\sim 10^{13} m$ is roughly 100 times the distance between the earth and the sun (100 Astronomical Units).
A particle of charge $e$ moving with velocity $v$ moving in a magnetic field $B$ gets deflected due to the Lorentz force, tracing a circular path of radius
$r = \gamma \frac{m v}{e B} \quad (15)$.
Since the arm length of these accelerators need to be quite large, the particles will be affected by the galactic magnetic field ($10^{-6} G$)
### Q 3.3
Calculate the radius of the high energy proton in the galactic magnetic field at energies:
a. at 7 TeV (LHC energies)
b. at Planck energy.
# From equation (6)
# vg: velocity as a function of gamma
vg = lambda g: c*np.sqrt(1 - (1/g)**2)
B = 10**(-10.)*u.T # in Tesla
# rb: radius of the particle as a function of gamma
rb = lambda g: (g * m_p) * vg(g) / (e * B )
# Radius at LHC energy
g1 = 7000.
print(f"Radius at LHC energy = {rb(g1).decompose():.2e}")
# Radius at Planck energy
g2 = 10**18.
print(f"Radius at Planck energy = {rb(g2).decompose():.2e}")
Note: The radius at the LHC energy is one $10^6$ times smaller than that of the Milky Galaxy ($10^{20} m$), while at the Planck energy, it is $10^8$ times larger.
Since the accerlerator required to produce Planck energies is untenable and not practical it is unlikely that we can test theories involving energy scales at Planck epoch. | false | true |
f71dbcac2d479aab6a392d579c8e4f997407c26f | 685 | py | Python | apis/alembic/versions/eab8d977bfb9_add_exception_in_trace_result.py | iii-org/devops-system | 71f938c9e225ac24ab9102a8221dc5341a01889c | [
"Apache-2.0"
] | 4 | 2021-07-15T15:59:01.000Z | 2022-02-24T02:58:52.000Z | apis/alembic/versions/eab8d977bfb9_add_exception_in_trace_result.py | iii-org/devops-system | 71f938c9e225ac24ab9102a8221dc5341a01889c | [
"Apache-2.0"
] | 4 | 2020-06-12T04:05:46.000Z | 2021-11-09T03:53:13.000Z | apis/alembic/versions/eab8d977bfb9_add_exception_in_trace_result.py | iii-org/devops-system | 71f938c9e225ac24ab9102a8221dc5341a01889c | [
"Apache-2.0"
] | 2 | 2020-09-29T05:39:28.000Z | 2021-11-26T09:52:17.000Z | """add_exception_in_trace_result
Revision ID: eab8d977bfb9
Revises: 06302deefc58
Create Date: 2021-08-26 02:10:55.283203
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'eab8d977bfb9'
down_revision = '06302deefc58'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('trace_result', sa.Column('exception', sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('trace_result', 'exception')
# ### end Alembic commands ###
| 23.62069 | 85 | 0.706569 | from alembic import op
import sqlalchemy as sa
revision = 'eab8d977bfb9'
down_revision = '06302deefc58'
branch_labels = None
depends_on = None
def upgrade():
| true | true |
f71dbd8e28eb5668c8b490e266f103d12edcb364 | 788 | py | Python | cursoemvideo/desafios/Desafio071.py | adinsankofa/python | 8f2f26c77015c0baaa76174e004406b4115272c7 | [
"MIT"
] | null | null | null | cursoemvideo/desafios/Desafio071.py | adinsankofa/python | 8f2f26c77015c0baaa76174e004406b4115272c7 | [
"MIT"
] | null | null | null | cursoemvideo/desafios/Desafio071.py | adinsankofa/python | 8f2f26c77015c0baaa76174e004406b4115272c7 | [
"MIT"
] | null | null | null | '''
Exercício Python 071: Crie um programa que simule o funcionamento de um caixa
eletrônico. No início, pergunte ao usuário qual será o valor a ser sacado
(número inteiro) e o programa vai informar quantas cédulas de cada valor
serão entregues.
'''
print('=' * 30)
print('{:^30}'.format('BANCO CEV'))
print('=' * 30)
saque = int(input('Que valor você quer sacar? R$ '))
total = saque
ced = 50
totalCed = 0
while True:
if total >= ced:
total -= ced
totalCed += 1
else:
if totalCed > 0:
print('Total de {} cédulas de R$ {}'.format(totalCed, ced))
if ced == 50:
ced = 20
elif ced == 20:
ced = 10
elif ced == 10:
ced = 1
totalCed = 0
if total == 0:
break
| 23.176471 | 77 | 0.559645 |
print('=' * 30)
print('{:^30}'.format('BANCO CEV'))
print('=' * 30)
saque = int(input('Que valor você quer sacar? R$ '))
total = saque
ced = 50
totalCed = 0
while True:
if total >= ced:
total -= ced
totalCed += 1
else:
if totalCed > 0:
print('Total de {} cédulas de R$ {}'.format(totalCed, ced))
if ced == 50:
ced = 20
elif ced == 20:
ced = 10
elif ced == 10:
ced = 1
totalCed = 0
if total == 0:
break
| true | true |
f71dbe0266711d6c70cc1edf4795530136c40f52 | 4,922 | py | Python | Lib/test/test_hexoct.py | jasonadu/Python-2.5 | 93e24b88564de120b1296165b5c55975fdcb8a3c | [
"PSF-2.0"
] | 49 | 2015-03-10T17:34:19.000Z | 2021-11-10T22:23:18.000Z | Lib/test/test_hexoct.py | jasonadu/Python-2.5 | 93e24b88564de120b1296165b5c55975fdcb8a3c | [
"PSF-2.0"
] | 1 | 2018-07-28T20:07:04.000Z | 2018-07-30T18:28:34.000Z | Lib/test/test_hexoct.py | jasonadu/Python-2.5 | 93e24b88564de120b1296165b5c55975fdcb8a3c | [
"PSF-2.0"
] | 32 | 2015-02-06T12:10:32.000Z | 2019-06-18T03:21:36.000Z | """Test correct treatment of hex/oct constants.
This is complex because of changes due to PEP 237.
"""
import sys
platform_long_is_32_bits = sys.maxint == 2147483647
import unittest
from test import test_support
import warnings
warnings.filterwarnings("ignore", "hex/oct constants", FutureWarning,
"<string>")
class TextHexOct(unittest.TestCase):
def test_hex_baseline(self):
# Baseline tests
self.assertEqual(0x0, 0)
self.assertEqual(0x10, 16)
if platform_long_is_32_bits:
self.assertEqual(0x7fffffff, 2147483647)
else:
self.assertEqual(0x7fffffffffffffff, 9223372036854775807)
# Ditto with a minus sign and parentheses
self.assertEqual(-(0x0), 0)
self.assertEqual(-(0x10), -16)
if platform_long_is_32_bits:
self.assertEqual(-(0x7fffffff), -2147483647)
else:
self.assertEqual(-(0x7fffffffffffffff), -9223372036854775807)
# Ditto with a minus sign and NO parentheses
self.assertEqual(-0x0, 0)
self.assertEqual(-0x10, -16)
if platform_long_is_32_bits:
self.assertEqual(-0x7fffffff, -2147483647)
else:
self.assertEqual(-0x7fffffffffffffff, -9223372036854775807)
def test_hex_unsigned(self):
if platform_long_is_32_bits:
# Positive constants
self.assertEqual(0x80000000, 2147483648L)
self.assertEqual(0xffffffff, 4294967295L)
# Ditto with a minus sign and parentheses
self.assertEqual(-(0x80000000), -2147483648L)
self.assertEqual(-(0xffffffff), -4294967295L)
# Ditto with a minus sign and NO parentheses
# This failed in Python 2.2 through 2.2.2 and in 2.3a1
self.assertEqual(-0x80000000, -2147483648L)
self.assertEqual(-0xffffffff, -4294967295L)
else:
# Positive constants
self.assertEqual(0x8000000000000000, 9223372036854775808L)
self.assertEqual(0xffffffffffffffff, 18446744073709551615L)
# Ditto with a minus sign and parentheses
self.assertEqual(-(0x8000000000000000), -9223372036854775808L)
self.assertEqual(-(0xffffffffffffffff), -18446744073709551615L)
# Ditto with a minus sign and NO parentheses
# This failed in Python 2.2 through 2.2.2 and in 2.3a1
self.assertEqual(-0x8000000000000000, -9223372036854775808L)
self.assertEqual(-0xffffffffffffffff, -18446744073709551615L)
def test_oct_baseline(self):
# Baseline tests
self.assertEqual(00, 0)
self.assertEqual(020, 16)
if platform_long_is_32_bits:
self.assertEqual(017777777777, 2147483647)
else:
self.assertEqual(0777777777777777777777, 9223372036854775807)
# Ditto with a minus sign and parentheses
self.assertEqual(-(00), 0)
self.assertEqual(-(020), -16)
if platform_long_is_32_bits:
self.assertEqual(-(017777777777), -2147483647)
else:
self.assertEqual(-(0777777777777777777777), -9223372036854775807)
# Ditto with a minus sign and NO parentheses
self.assertEqual(-00, 0)
self.assertEqual(-020, -16)
if platform_long_is_32_bits:
self.assertEqual(-017777777777, -2147483647)
else:
self.assertEqual(-0777777777777777777777, -9223372036854775807)
def test_oct_unsigned(self):
if platform_long_is_32_bits:
# Positive constants
self.assertEqual(020000000000, 2147483648L)
self.assertEqual(037777777777, 4294967295L)
# Ditto with a minus sign and parentheses
self.assertEqual(-(020000000000), -2147483648L)
self.assertEqual(-(037777777777), -4294967295L)
# Ditto with a minus sign and NO parentheses
# This failed in Python 2.2 through 2.2.2 and in 2.3a1
self.assertEqual(-020000000000, -2147483648L)
self.assertEqual(-037777777777, -4294967295L)
else:
# Positive constants
self.assertEqual(01000000000000000000000, 9223372036854775808L)
self.assertEqual(01777777777777777777777, 18446744073709551615L)
# Ditto with a minus sign and parentheses
self.assertEqual(-(01000000000000000000000), -9223372036854775808L)
self.assertEqual(-(01777777777777777777777), -18446744073709551615L)
# Ditto with a minus sign and NO parentheses
# This failed in Python 2.2 through 2.2.2 and in 2.3a1
self.assertEqual(-01000000000000000000000, -9223372036854775808L)
self.assertEqual(-01777777777777777777777, -18446744073709551615L)
def test_main():
test_support.run_unittest(TextHexOct)
if __name__ == "__main__":
test_main()
| 42.068376 | 80 | 0.656847 | """Test correct treatment of hex/oct constants.
This is complex because of changes due to PEP 237.
"""
import sys
platform_long_is_32_bits = sys.maxint == 2147483647
import unittest
from test import test_support
import warnings
warnings.filterwarnings("ignore", "hex/oct constants", FutureWarning,
"<string>")
class TextHexOct(unittest.TestCase):
def test_hex_baseline(self):
self.assertEqual(0x0, 0)
self.assertEqual(0x10, 16)
if platform_long_is_32_bits:
self.assertEqual(0x7fffffff, 2147483647)
else:
self.assertEqual(0x7fffffffffffffff, 9223372036854775807)
self.assertEqual(-(0x0), 0)
self.assertEqual(-(0x10), -16)
if platform_long_is_32_bits:
self.assertEqual(-(0x7fffffff), -2147483647)
else:
self.assertEqual(-(0x7fffffffffffffff), -9223372036854775807)
self.assertEqual(-0x0, 0)
self.assertEqual(-0x10, -16)
if platform_long_is_32_bits:
self.assertEqual(-0x7fffffff, -2147483647)
else:
self.assertEqual(-0x7fffffffffffffff, -9223372036854775807)
def test_hex_unsigned(self):
if platform_long_is_32_bits:
self.assertEqual(0x80000000, 2147483648L)
self.assertEqual(0xffffffff, 4294967295L)
self.assertEqual(-(0x80000000), -2147483648L)
self.assertEqual(-(0xffffffff), -4294967295L)
self.assertEqual(-0x80000000, -2147483648L)
self.assertEqual(-0xffffffff, -4294967295L)
else:
self.assertEqual(0x8000000000000000, 9223372036854775808L)
self.assertEqual(0xffffffffffffffff, 18446744073709551615L)
self.assertEqual(-(0x8000000000000000), -9223372036854775808L)
self.assertEqual(-(0xffffffffffffffff), -18446744073709551615L)
self.assertEqual(-0x8000000000000000, -9223372036854775808L)
self.assertEqual(-0xffffffffffffffff, -18446744073709551615L)
def test_oct_baseline(self):
self.assertEqual(00, 0)
self.assertEqual(020, 16)
if platform_long_is_32_bits:
self.assertEqual(017777777777, 2147483647)
else:
self.assertEqual(0777777777777777777777, 9223372036854775807)
self.assertEqual(-(00), 0)
self.assertEqual(-(020), -16)
if platform_long_is_32_bits:
self.assertEqual(-(017777777777), -2147483647)
else:
self.assertEqual(-(0777777777777777777777), -9223372036854775807)
self.assertEqual(-00, 0)
self.assertEqual(-020, -16)
if platform_long_is_32_bits:
self.assertEqual(-017777777777, -2147483647)
else:
self.assertEqual(-0777777777777777777777, -9223372036854775807)
def test_oct_unsigned(self):
if platform_long_is_32_bits:
self.assertEqual(020000000000, 2147483648L)
self.assertEqual(037777777777, 4294967295L)
self.assertEqual(-(020000000000), -2147483648L)
self.assertEqual(-(037777777777), -4294967295L)
self.assertEqual(-020000000000, -2147483648L)
self.assertEqual(-037777777777, -4294967295L)
else:
self.assertEqual(01000000000000000000000, 9223372036854775808L)
self.assertEqual(01777777777777777777777, 18446744073709551615L)
self.assertEqual(-(01000000000000000000000), -9223372036854775808L)
self.assertEqual(-(01777777777777777777777), -18446744073709551615L)
self.assertEqual(-01000000000000000000000, -9223372036854775808L)
self.assertEqual(-01777777777777777777777, -18446744073709551615L)
def test_main():
test_support.run_unittest(TextHexOct)
if __name__ == "__main__":
test_main()
| false | true |
f71dbea28c6bb0f66e8170b73a2d179586fc3668 | 8,203 | py | Python | sdk/identity/azure-identity/azure/identity/aio/_credentials/default.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-03-09T08:59:13.000Z | 2022-03-09T08:59:13.000Z | sdk/identity/azure-identity/azure/identity/aio/_credentials/default.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | sdk/identity/azure-identity/azure/identity/aio/_credentials/default.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | # ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import logging
import os
from typing import TYPE_CHECKING
from ..._constants import EnvironmentVariables
from ..._internal import get_default_authority, normalize_authority
from .azure_cli import AzureCliCredential
from .azure_powershell import AzurePowerShellCredential
from .chained import ChainedTokenCredential
from .environment import EnvironmentCredential
from .managed_identity import ManagedIdentityCredential
from .shared_cache import SharedTokenCacheCredential
from .vscode import VisualStudioCodeCredential
if TYPE_CHECKING:
from typing import Any, List
from azure.core.credentials import AccessToken
from azure.core.credentials_async import AsyncTokenCredential
_LOGGER = logging.getLogger(__name__)
class DefaultAzureCredential(ChainedTokenCredential):
"""A default credential capable of handling most Azure SDK authentication scenarios.
The identity it uses depends on the environment. When an access token is needed, it requests one using these
identities in turn, stopping when one provides a token:
1. A service principal configured by environment variables. See :class:`~azure.identity.aio.EnvironmentCredential`
for more details.
2. An Azure managed identity. See :class:`~azure.identity.aio.ManagedIdentityCredential` for more details.
3. On Windows only: a user who has signed in with a Microsoft application, such as Visual Studio. If multiple
identities are in the cache, then the value of the environment variable ``AZURE_USERNAME`` is used to select
which identity to use. See :class:`~azure.identity.aio.SharedTokenCacheCredential` for more details.
4. The user currently signed in to Visual Studio Code.
5. The identity currently logged in to the Azure CLI.
6. The identity currently logged in to Azure PowerShell.
This default behavior is configurable with keyword arguments.
:keyword str authority: Authority of an Azure Active Directory endpoint, for example 'login.microsoftonline.com',
the authority for Azure Public Cloud (which is the default). :class:`~azure.identity.AzureAuthorityHosts`
defines authorities for other clouds. Managed identities ignore this because they reside in a single cloud.
:keyword bool exclude_cli_credential: Whether to exclude the Azure CLI from the credential. Defaults to **False**.
:keyword bool exclude_environment_credential: Whether to exclude a service principal configured by environment
variables from the credential. Defaults to **False**.
:keyword bool exclude_powershell_credential: Whether to exclude Azure PowerShell. Defaults to **False**.
:keyword bool exclude_visual_studio_code_credential: Whether to exclude stored credential from VS Code.
Defaults to **False**.
:keyword bool exclude_managed_identity_credential: Whether to exclude managed identity from the credential.
Defaults to **False**.
:keyword bool exclude_shared_token_cache_credential: Whether to exclude the shared token cache. Defaults to
**False**.
:keyword str managed_identity_client_id: The client ID of a user-assigned managed identity. Defaults to the value
of the environment variable AZURE_CLIENT_ID, if any. If not specified, a system-assigned identity will be used.
:keyword str shared_cache_username: Preferred username for :class:`~azure.identity.aio.SharedTokenCacheCredential`.
Defaults to the value of environment variable AZURE_USERNAME, if any.
:keyword str shared_cache_tenant_id: Preferred tenant for :class:`~azure.identity.aio.SharedTokenCacheCredential`.
Defaults to the value of environment variable AZURE_TENANT_ID, if any.
:keyword str visual_studio_code_tenant_id: Tenant ID to use when authenticating with
:class:`~azure.identity.aio.VisualStudioCodeCredential`. Defaults to the "Azure: Tenant" setting in VS Code's
user settings or, when that setting has no value, the "organizations" tenant, which supports only Azure Active
Directory work or school accounts.
"""
def __init__(self, **kwargs: "Any") -> None:
if "tenant_id" in kwargs:
raise TypeError("'tenant_id' is not supported in DefaultAzureCredential.")
authority = kwargs.pop("authority", None)
vscode_tenant_id = kwargs.pop(
"visual_studio_code_tenant_id", os.environ.get(EnvironmentVariables.AZURE_TENANT_ID)
)
vscode_args = dict(kwargs)
if authority:
vscode_args["authority"] = authority
if vscode_tenant_id:
vscode_args["tenant_id"] = vscode_tenant_id
authority = normalize_authority(authority) if authority else get_default_authority()
shared_cache_username = kwargs.pop("shared_cache_username", os.environ.get(EnvironmentVariables.AZURE_USERNAME))
shared_cache_tenant_id = kwargs.pop(
"shared_cache_tenant_id", os.environ.get(EnvironmentVariables.AZURE_TENANT_ID)
)
managed_identity_client_id = kwargs.pop(
"managed_identity_client_id", os.environ.get(EnvironmentVariables.AZURE_CLIENT_ID)
)
vscode_tenant_id = kwargs.pop(
"visual_studio_code_tenant_id", os.environ.get(EnvironmentVariables.AZURE_TENANT_ID)
)
exclude_visual_studio_code_credential = kwargs.pop("exclude_visual_studio_code_credential", False)
exclude_cli_credential = kwargs.pop("exclude_cli_credential", False)
exclude_environment_credential = kwargs.pop("exclude_environment_credential", False)
exclude_managed_identity_credential = kwargs.pop("exclude_managed_identity_credential", False)
exclude_shared_token_cache_credential = kwargs.pop("exclude_shared_token_cache_credential", False)
exclude_powershell_credential = kwargs.pop("exclude_powershell_credential", False)
credentials = [] # type: List[AsyncTokenCredential]
if not exclude_environment_credential:
credentials.append(EnvironmentCredential(authority=authority, **kwargs))
if not exclude_managed_identity_credential:
credentials.append(ManagedIdentityCredential(client_id=managed_identity_client_id, **kwargs))
if not exclude_shared_token_cache_credential and SharedTokenCacheCredential.supported():
try:
# username and/or tenant_id are only required when the cache contains tokens for multiple identities
shared_cache = SharedTokenCacheCredential(
username=shared_cache_username, tenant_id=shared_cache_tenant_id, authority=authority, **kwargs
)
credentials.append(shared_cache)
except Exception as ex: # pylint:disable=broad-except
_LOGGER.info("Shared token cache is unavailable: '%s'", ex)
if not exclude_visual_studio_code_credential:
credentials.append(VisualStudioCodeCredential(**vscode_args))
if not exclude_cli_credential:
credentials.append(AzureCliCredential())
if not exclude_powershell_credential:
credentials.append(AzurePowerShellCredential())
super().__init__(*credentials)
async def get_token(self, *scopes: str, **kwargs: "Any") -> "AccessToken":
"""Asynchronously request an access token for `scopes`.
This method is called automatically by Azure SDK clients.
:param str scopes: desired scopes for the access token. This method requires at least one scope.
:keyword str tenant_id: optional tenant to include in the token request.
:rtype: :class:`azure.core.credentials.AccessToken`
:raises ~azure.core.exceptions.ClientAuthenticationError: authentication failed. The exception has a
`message` attribute listing each authentication attempt and its error message.
"""
if self._successful_credential:
return await self._successful_credential.get_token(*scopes, **kwargs)
return await super().get_token(*scopes, **kwargs)
| 55.802721 | 120 | 0.737413 |
import logging
import os
from typing import TYPE_CHECKING
from ..._constants import EnvironmentVariables
from ..._internal import get_default_authority, normalize_authority
from .azure_cli import AzureCliCredential
from .azure_powershell import AzurePowerShellCredential
from .chained import ChainedTokenCredential
from .environment import EnvironmentCredential
from .managed_identity import ManagedIdentityCredential
from .shared_cache import SharedTokenCacheCredential
from .vscode import VisualStudioCodeCredential
if TYPE_CHECKING:
from typing import Any, List
from azure.core.credentials import AccessToken
from azure.core.credentials_async import AsyncTokenCredential
_LOGGER = logging.getLogger(__name__)
class DefaultAzureCredential(ChainedTokenCredential):
def __init__(self, **kwargs: "Any") -> None:
if "tenant_id" in kwargs:
raise TypeError("'tenant_id' is not supported in DefaultAzureCredential.")
authority = kwargs.pop("authority", None)
vscode_tenant_id = kwargs.pop(
"visual_studio_code_tenant_id", os.environ.get(EnvironmentVariables.AZURE_TENANT_ID)
)
vscode_args = dict(kwargs)
if authority:
vscode_args["authority"] = authority
if vscode_tenant_id:
vscode_args["tenant_id"] = vscode_tenant_id
authority = normalize_authority(authority) if authority else get_default_authority()
shared_cache_username = kwargs.pop("shared_cache_username", os.environ.get(EnvironmentVariables.AZURE_USERNAME))
shared_cache_tenant_id = kwargs.pop(
"shared_cache_tenant_id", os.environ.get(EnvironmentVariables.AZURE_TENANT_ID)
)
managed_identity_client_id = kwargs.pop(
"managed_identity_client_id", os.environ.get(EnvironmentVariables.AZURE_CLIENT_ID)
)
vscode_tenant_id = kwargs.pop(
"visual_studio_code_tenant_id", os.environ.get(EnvironmentVariables.AZURE_TENANT_ID)
)
exclude_visual_studio_code_credential = kwargs.pop("exclude_visual_studio_code_credential", False)
exclude_cli_credential = kwargs.pop("exclude_cli_credential", False)
exclude_environment_credential = kwargs.pop("exclude_environment_credential", False)
exclude_managed_identity_credential = kwargs.pop("exclude_managed_identity_credential", False)
exclude_shared_token_cache_credential = kwargs.pop("exclude_shared_token_cache_credential", False)
exclude_powershell_credential = kwargs.pop("exclude_powershell_credential", False)
credentials = []
if not exclude_environment_credential:
credentials.append(EnvironmentCredential(authority=authority, **kwargs))
if not exclude_managed_identity_credential:
credentials.append(ManagedIdentityCredential(client_id=managed_identity_client_id, **kwargs))
if not exclude_shared_token_cache_credential and SharedTokenCacheCredential.supported():
try:
shared_cache = SharedTokenCacheCredential(
username=shared_cache_username, tenant_id=shared_cache_tenant_id, authority=authority, **kwargs
)
credentials.append(shared_cache)
except Exception as ex:
_LOGGER.info("Shared token cache is unavailable: '%s'", ex)
if not exclude_visual_studio_code_credential:
credentials.append(VisualStudioCodeCredential(**vscode_args))
if not exclude_cli_credential:
credentials.append(AzureCliCredential())
if not exclude_powershell_credential:
credentials.append(AzurePowerShellCredential())
super().__init__(*credentials)
async def get_token(self, *scopes: str, **kwargs: "Any") -> "AccessToken":
if self._successful_credential:
return await self._successful_credential.get_token(*scopes, **kwargs)
return await super().get_token(*scopes, **kwargs)
| true | true |
f71dbeb61268f52210ee67c77b92ed7020b4b55f | 1,515 | py | Python | neodroidagent/utilities/exploration/sampling/random_process/ornstein_uhlenbeck.py | gitter-badger/agent | 3f53eaa7ebdee3ab423c7b58785d584fe1a6ae11 | [
"Apache-2.0"
] | 8 | 2017-09-13T08:28:44.000Z | 2022-01-21T15:59:19.000Z | neodroidagent/utilities/exploration/sampling/random_process/ornstein_uhlenbeck.py | gitter-badger/agent | 3f53eaa7ebdee3ab423c7b58785d584fe1a6ae11 | [
"Apache-2.0"
] | 4 | 2019-03-22T13:49:16.000Z | 2019-03-25T13:49:39.000Z | neodroidagent/utilities/exploration/sampling/random_process/ornstein_uhlenbeck.py | gitter-badger/agent | 3f53eaa7ebdee3ab423c7b58785d584fe1a6ae11 | [
"Apache-2.0"
] | 3 | 2017-09-13T08:31:38.000Z | 2021-11-09T11:22:27.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from .annealed_guassian import AnnealedGaussianProcess
__author__ = "Christian Heider Nielsen"
# Based on http://math.stackexchange.com/questions/1287634/implementing-ornstein-uhlenbeck-in-matlab
import numpy
__all__ = ["OrnsteinUhlenbeckProcess"]
class OrnsteinUhlenbeckProcess(AnnealedGaussianProcess):
def __init__(
self,
*,
theta: float = 0.15,
mean: float = 0.0,
sigma: float = 1.0,
dt: float = 1e-2,
x_0=None,
sigma_min: float = None,
n_steps_annealing: int = 1000,
**kwargs
):
super().__init__(
mean=mean,
sigma=sigma,
sigma_min=sigma_min,
n_steps_annealing=n_steps_annealing,
**kwargs
)
self.theta = theta
self.mean = mean
self.dt = dt
self.x_0 = x_0
self.reset()
def sample(self, size):
x = (
self.x_prev
+ self.theta * (self.mean - self.x_prev) * self.dt
+ self.current_sigma * numpy.sqrt(self.dt) * numpy.random.normal(size=size)
)
self.x_prev = x
self.n_steps += 1
return x
def reset(self):
super().reset()
self.x_prev = self.x_0 if self.x_0 is not None else numpy.zeros_like(self.x_0)
if __name__ == "__main__":
random_process = OrnsteinUhlenbeckProcess(theta=0.5)
for i in range(1000):
print(random_process.sample((2, 1)))
| 25.25 | 100 | 0.580858 |
from .annealed_guassian import AnnealedGaussianProcess
__author__ = "Christian Heider Nielsen"
import numpy
__all__ = ["OrnsteinUhlenbeckProcess"]
class OrnsteinUhlenbeckProcess(AnnealedGaussianProcess):
def __init__(
self,
*,
theta: float = 0.15,
mean: float = 0.0,
sigma: float = 1.0,
dt: float = 1e-2,
x_0=None,
sigma_min: float = None,
n_steps_annealing: int = 1000,
**kwargs
):
super().__init__(
mean=mean,
sigma=sigma,
sigma_min=sigma_min,
n_steps_annealing=n_steps_annealing,
**kwargs
)
self.theta = theta
self.mean = mean
self.dt = dt
self.x_0 = x_0
self.reset()
def sample(self, size):
x = (
self.x_prev
+ self.theta * (self.mean - self.x_prev) * self.dt
+ self.current_sigma * numpy.sqrt(self.dt) * numpy.random.normal(size=size)
)
self.x_prev = x
self.n_steps += 1
return x
def reset(self):
super().reset()
self.x_prev = self.x_0 if self.x_0 is not None else numpy.zeros_like(self.x_0)
if __name__ == "__main__":
random_process = OrnsteinUhlenbeckProcess(theta=0.5)
for i in range(1000):
print(random_process.sample((2, 1)))
| true | true |
f71dbec8bab51add607111fbfb0eae639d16b61c | 3,039 | py | Python | Common/DataModel/Testing/Python/TestGetBounds.py | txwhhny/vtk | 854d9aa87b944bc9079510515996406b98b86f7c | [
"BSD-3-Clause"
] | 2 | 2021-07-07T22:53:19.000Z | 2021-07-31T19:29:35.000Z | Common/DataModel/Testing/Python/TestGetBounds.py | txwhhny/vtk | 854d9aa87b944bc9079510515996406b98b86f7c | [
"BSD-3-Clause"
] | 2 | 2020-11-18T16:50:34.000Z | 2022-01-21T13:31:47.000Z | Common/DataModel/Testing/Python/TestGetBounds.py | txwhhny/vtk | 854d9aa87b944bc9079510515996406b98b86f7c | [
"BSD-3-Clause"
] | 5 | 2020-10-02T10:14:35.000Z | 2022-03-10T07:50:22.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import vtk
import sys
# Test speed of compute bounds in vtkPolyData, vtkPoints, and
# vtkBoundingBox.
# Control model size
res = 500
timer = vtk.vtkTimerLog()
# Uncomment if you want to use as a little interactive program
#if len(sys.argv) >= 2 :
# res = int(sys.argv[1])
#else:
# res = 500
# Data source. Note that different types of cells are created
# to exercise the vtkPolyData::GetBounds() properly.
plane = vtk.vtkPlaneSource()
plane.SetResolution(res,res)
edges = vtk.vtkFeatureEdges()
edges.SetInputConnection(plane.GetOutputPort())
#edges.ExtractAllEdgeTypesOff()
edges.BoundaryEdgesOn()
edges.ManifoldEdgesOff()
edges.NonManifoldEdgesOff()
edges.FeatureEdgesOff()
t1 = vtk.vtkTransform()
t1.Translate(-1.0,0,0)
tf1 = vtk.vtkTransformPolyDataFilter()
tf1.SetInputConnection(edges.GetOutputPort())
tf1.SetTransform(t1)
t2 = vtk.vtkTransform()
t2.Translate(1.0,0,0)
tf2 = vtk.vtkTransformPolyDataFilter()
tf2.SetInputConnection(edges.GetOutputPort())
tf2.SetTransform(t2)
append = vtk.vtkAppendPolyData()
append.AddInputConnection(tf1.GetOutputPort())
append.AddInputConnection(plane.GetOutputPort())
append.AddInputConnection(tf2.GetOutputPort())
append.Update()
output = append.GetOutput()
points = output.GetPoints()
box = [0.0,0.0,0.0,0.0,0.0,0.0]
print("Input data:")
print("\tNum Points: {0}".format(output.GetNumberOfPoints()))
print("\tNum Cells: {0}".format(output.GetNumberOfCells()))
# Currently vtkPolyData takes into account cells that are connected to
# points; hence only connected points (i.e., points used by cells) are
# considered.
# Compute bounds on polydata
points.Modified()
timer.StartTimer()
output.GetBounds(box)
timer.StopTimer()
time = timer.GetElapsedTime()
print("vtkPolyData::ComputeBounds():")
print("\tTime: {0}".format(time))
print("\tBounds: {0}".format(box))
assert box[0] == -1.5
assert box[1] == 1.5
assert box[2] == -0.5
assert box[3] == 0.5
assert box[4] == 0.0
assert box[5] == 0.0
# Uses vtkPoints::ComputeBounds() which uses threaded vtkSMPTools and
# vtkArrayDispatch (see vtkDataArrayPrivate.txx). In other words, cell
# connectivity is not taken into account.
points.Modified()
timer.StartTimer()
points.GetBounds(box)
timer.StopTimer()
time = timer.GetElapsedTime()
print("vtkPoints::ComputeBounds():")
print("\tTime: {0}".format(time))
print("\tBounds: {0}".format(box))
assert box[0] == -1.5
assert box[1] == 1.5
assert box[2] == -0.5
assert box[3] == 0.5
assert box[4] == 0.0
assert box[5] == 0.0
# Uses vtkBoundingBox with vtkSMPTools. This method takes into account
# an (optional) pointUses array to only consider selected points.
bbox = vtk.vtkBoundingBox()
timer.StartTimer()
bbox.ComputeBounds(points,box)
timer.StopTimer()
time = timer.GetElapsedTime()
print("vtkBoundingBox::ComputeBounds():")
print("\tTime: {0}".format(time))
print("\tBounds: {0}".format(box))
assert box[0] == -1.5
assert box[1] == 1.5
assert box[2] == -0.5
assert box[3] == 0.5
assert box[4] == 0.0
assert box[5] == 0.0
| 26.198276 | 70 | 0.726226 |
import vtk
import sys
res = 500
timer = vtk.vtkTimerLog()
plane = vtk.vtkPlaneSource()
plane.SetResolution(res,res)
edges = vtk.vtkFeatureEdges()
edges.SetInputConnection(plane.GetOutputPort())
edges.BoundaryEdgesOn()
edges.ManifoldEdgesOff()
edges.NonManifoldEdgesOff()
edges.FeatureEdgesOff()
t1 = vtk.vtkTransform()
t1.Translate(-1.0,0,0)
tf1 = vtk.vtkTransformPolyDataFilter()
tf1.SetInputConnection(edges.GetOutputPort())
tf1.SetTransform(t1)
t2 = vtk.vtkTransform()
t2.Translate(1.0,0,0)
tf2 = vtk.vtkTransformPolyDataFilter()
tf2.SetInputConnection(edges.GetOutputPort())
tf2.SetTransform(t2)
append = vtk.vtkAppendPolyData()
append.AddInputConnection(tf1.GetOutputPort())
append.AddInputConnection(plane.GetOutputPort())
append.AddInputConnection(tf2.GetOutputPort())
append.Update()
output = append.GetOutput()
points = output.GetPoints()
box = [0.0,0.0,0.0,0.0,0.0,0.0]
print("Input data:")
print("\tNum Points: {0}".format(output.GetNumberOfPoints()))
print("\tNum Cells: {0}".format(output.GetNumberOfCells()))
points.Modified()
timer.StartTimer()
output.GetBounds(box)
timer.StopTimer()
time = timer.GetElapsedTime()
print("vtkPolyData::ComputeBounds():")
print("\tTime: {0}".format(time))
print("\tBounds: {0}".format(box))
assert box[0] == -1.5
assert box[1] == 1.5
assert box[2] == -0.5
assert box[3] == 0.5
assert box[4] == 0.0
assert box[5] == 0.0
points.Modified()
timer.StartTimer()
points.GetBounds(box)
timer.StopTimer()
time = timer.GetElapsedTime()
print("vtkPoints::ComputeBounds():")
print("\tTime: {0}".format(time))
print("\tBounds: {0}".format(box))
assert box[0] == -1.5
assert box[1] == 1.5
assert box[2] == -0.5
assert box[3] == 0.5
assert box[4] == 0.0
assert box[5] == 0.0
bbox = vtk.vtkBoundingBox()
timer.StartTimer()
bbox.ComputeBounds(points,box)
timer.StopTimer()
time = timer.GetElapsedTime()
print("vtkBoundingBox::ComputeBounds():")
print("\tTime: {0}".format(time))
print("\tBounds: {0}".format(box))
assert box[0] == -1.5
assert box[1] == 1.5
assert box[2] == -0.5
assert box[3] == 0.5
assert box[4] == 0.0
assert box[5] == 0.0
| true | true |
f71dc033893fb25f5c43d5040820941c39dbf11b | 1,244 | py | Python | labs/9/zstudentDAO.py | G00364778/52957_dataRepresentation | de5127573a5b717aa67105c3dbe5e1d98f601fca | [
"MIT"
] | null | null | null | labs/9/zstudentDAO.py | G00364778/52957_dataRepresentation | de5127573a5b717aa67105c3dbe5e1d98f601fca | [
"MIT"
] | null | null | null | labs/9/zstudentDAO.py | G00364778/52957_dataRepresentation | de5127573a5b717aa67105c3dbe5e1d98f601fca | [
"MIT"
] | null | null | null | import mysql.connector
class StudentDAO:
db=""
def __init__(self):
self.db = mysql.connector.connect(
host="localhost",
user="root",
password="root",
#user="datarep", # this is the user name on my mac
#passwd="password" # for my mac
database="datarep"
)
def create(self, values):
cursor = self.db.cursor()
sql="insert into student (name, age) values (%s,%s)"
cursor.execute(sql, values)
self.db.commit()
return cursor.lastrowid
def getAll(self):
cursor = self.db.cursor()
sql="select * from student"
cursor.execute(sql)
result = cursor.fetchall()
return result
def findByID(self, id):
cursor = self.db.cursor()
sql="select * from student where id = %s"
values = (id,)
cursor.execute(sql, values)
result = cursor.fetchone()
return result
def update(self, values):
cursor = self.db.cursor()
sql="update student set name= %s, age=%s where id = %s"
cursor.execute(sql, values)
self.db.commit()
def delete(self, id):
cursor = self.db.cursor()
sql="delete from student where id = %s"
values = (id,)
cursor.execute(sql, values)
self.db.commit()
print("delete done")
studentDAO = StudentDAO() | 24.392157 | 60 | 0.62701 | import mysql.connector
class StudentDAO:
db=""
def __init__(self):
self.db = mysql.connector.connect(
host="localhost",
user="root",
password="root",
te(self, values):
cursor = self.db.cursor()
sql="insert into student (name, age) values (%s,%s)"
cursor.execute(sql, values)
self.db.commit()
return cursor.lastrowid
def getAll(self):
cursor = self.db.cursor()
sql="select * from student"
cursor.execute(sql)
result = cursor.fetchall()
return result
def findByID(self, id):
cursor = self.db.cursor()
sql="select * from student where id = %s"
values = (id,)
cursor.execute(sql, values)
result = cursor.fetchone()
return result
def update(self, values):
cursor = self.db.cursor()
sql="update student set name= %s, age=%s where id = %s"
cursor.execute(sql, values)
self.db.commit()
def delete(self, id):
cursor = self.db.cursor()
sql="delete from student where id = %s"
values = (id,)
cursor.execute(sql, values)
self.db.commit()
print("delete done")
studentDAO = StudentDAO() | true | true |
f71dc07b7ca849d8293362b0077e6b64d15f8c1f | 4,544 | py | Python | samples/openapi3/client/petstore/python-experimental/petstore_api/models/file.py | MalcolmScoffable/openapi-generator | 73605a0c0e0c825286c95123c63678ba75b44d5c | [
"Apache-2.0"
] | 4 | 2020-07-24T07:02:57.000Z | 2022-01-08T17:37:38.000Z | samples/openapi3/client/petstore/python-experimental/petstore_api/models/file.py | MalcolmScoffable/openapi-generator | 73605a0c0e0c825286c95123c63678ba75b44d5c | [
"Apache-2.0"
] | 1 | 2020-05-13T10:37:01.000Z | 2020-05-14T16:30:33.000Z | samples/openapi3/client/petstore/python-experimental/petstore_api/models/file.py | MalcolmScoffable/openapi-generator | 73605a0c0e0c825286c95123c63678ba75b44d5c | [
"Apache-2.0"
] | 2 | 2020-04-24T15:18:41.000Z | 2021-12-07T09:39:40.000Z | # coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from petstore_api.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
class File(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'source_uri': (str,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
'source_uri': 'sourceURI', # noqa: E501
}
@staticmethod
def _composed_schemas():
return None
required_properties = set([
'_data_store',
'_check_type',
'_from_server',
'_path_to_item',
'_configuration',
])
def __init__(self, _check_type=True, _from_server=False, _path_to_item=(), _configuration=None, **kwargs): # noqa: E501
"""file.File - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
source_uri (str): Test capitalization. [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
for var_name, var_value in six.iteritems(kwargs):
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
| 33.910448 | 174 | 0.610915 |
from __future__ import absolute_import
import re
import sys
import six
import nulltype
from petstore_api.model_utils import (
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
class File(ModelNormal):
allowed_values = {
}
validations = {
}
additional_properties_type = None
@staticmethod
def openapi_types():
return {
'source_uri': (str,),
}
@staticmethod
def discriminator():
return None
attribute_map = {
'source_uri': 'sourceURI',
}
@staticmethod
def _composed_schemas():
return None
required_properties = set([
'_data_store',
'_check_type',
'_from_server',
'_path_to_item',
'_configuration',
])
def __init__(self, _check_type=True, _from_server=False, _path_to_item=(), _configuration=None, **kwargs):
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
for var_name, var_value in six.iteritems(kwargs):
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
continue
setattr(self, var_name, var_value)
| true | true |
f71dc0aed8b56c5bb977a643acc0af8621adeeeb | 1,551 | py | Python | chain_of_logic_advanced.py | rdmunden/logic_chain | c065525c0fff2ec655f7c7c1921881745f1ddd70 | [
"MIT"
] | null | null | null | chain_of_logic_advanced.py | rdmunden/logic_chain | c065525c0fff2ec655f7c7c1921881745f1ddd70 | [
"MIT"
] | null | null | null | chain_of_logic_advanced.py | rdmunden/logic_chain | c065525c0fff2ec655f7c7c1921881745f1ddd70 | [
"MIT"
] | null | null | null | """ generate random strings of logic
v2 - randomly adds 'not' before values
TODO: 1. add random parentheses, 2. add expressions like 'i==1' or 'print()' for values
TODO: Make it more explicit as to which True or False value it is evaluating to.
The cycle:
1. Start with a True
a. keep doing 'and True' to the end
b. if you hit an 'or' before then, stop there (before the 'or', with the current True value)
c. if you hit 'and False' look for the next 'or'
i. if you find one, start the cycle again from there (after the 'or')
ii. if you don't then stop there (on that False value)
2. Start with a False
a. look for the next 'or'
i. if you find one, start the cycle again from there (after the 'or')
ii. if you don't then stop there (on that False value)
"""
import random
def r2():
return random.randint(0, 1)
def r7():
return random.randint(0,6)
tv = true_values = ["'a'", "'b'", "'c'", "'d'", "'e'", "'f'", "'g'"]
fv = false_values = ["''", 0, (), [], {}, set(), None]
lv = logic_values = ['and', 'or']
nv = ['', 'not ']
vals = [tv, fv]
n = 5
cont = ''
while cont == '':
expr = "{}{}".format(nv[r2()], vals[r2()][r7()])
for i in range(n):
item = " {} {}{}".format(lv[r2()], nv[r2()], vals[r2()][r7()])
expr += item
print('\n' + expr + '\n')
resp = input("Enter for answer...")
ans = eval(expr)
if isinstance(ans, str): ans = ans or "''"
print(f"result: {ans}")
cont = input("\nEnter to continue... ")
| 29.264151 | 98 | 0.563507 | import random
def r2():
return random.randint(0, 1)
def r7():
return random.randint(0,6)
tv = true_values = ["'a'", "'b'", "'c'", "'d'", "'e'", "'f'", "'g'"]
fv = false_values = ["''", 0, (), [], {}, set(), None]
lv = logic_values = ['and', 'or']
nv = ['', 'not ']
vals = [tv, fv]
n = 5
cont = ''
while cont == '':
expr = "{}{}".format(nv[r2()], vals[r2()][r7()])
for i in range(n):
item = " {} {}{}".format(lv[r2()], nv[r2()], vals[r2()][r7()])
expr += item
print('\n' + expr + '\n')
resp = input("Enter for answer...")
ans = eval(expr)
if isinstance(ans, str): ans = ans or "''"
print(f"result: {ans}")
cont = input("\nEnter to continue... ")
| true | true |
f71dc2088bad114f2855b10d70c2b7953a80f393 | 1,954 | py | Python | templatetags/distance_filters.py | redditnfl/draft-cards | 63779107a731ad741c8cf02b98a4b3d74cdcc3ac | [
"Apache-2.0",
"0BSD"
] | null | null | null | templatetags/distance_filters.py | redditnfl/draft-cards | 63779107a731ad741c8cf02b98a4b3d74cdcc3ac | [
"Apache-2.0",
"0BSD"
] | 10 | 2020-06-05T20:27:08.000Z | 2022-02-10T10:47:58.000Z | templatetags/distance_filters.py | redditnfl/draft-cards | 63779107a731ad741c8cf02b98a4b3d74cdcc3ac | [
"Apache-2.0",
"0BSD"
] | 1 | 2021-06-06T01:11:32.000Z | 2021-06-06T01:11:32.000Z | """
>>> set(filter(lambda t: t not in ('AFC', 'NFC'), nflteams.fullinfo.keys())) - set(team_locations.keys())
set()
>>> set(filter(lambda t: t not in ('AFC', 'NFC'), nflteams.fullinfo.keys())) == set(team_locations.keys())
True
"""
import geopy.distance
from django import template
from redditnfl.nfltools import nflteams, sites
register = template.Library()
team_locations = {team['short']: sites.by_team(team['short'])[1][3] for team in filter(lambda t: t['short'] not in ('AFC', 'NFC'), nflteams.fullinfo.values())}
def distance(a, b):
"""
>>> round(distance((39.900833,-75.1675), (39.900833,-75.1675)).km, 2)
0.0
>>> round(distance((39.900833,-75.1675), (34.014167,-118.287778)).km, 2)
3857.54
"""
return geopy.distance.distance(a, b)
return 0.0
@register.filter
def team_location(team):
"""
>>> team_location({'short': 'PHI'})
(39.900833, -75.1675)
>>> team_location({'short': 'ABC'})
>>> team_location({})
"""
return team_locations.get(team.get('short', ''), None)
@register.filter
def closest_team(data):
"""
>>> closest_team({'lat':39.6094227, 'lng':-75.8395724})['short']
'PHI'
"""
if 'lat' not in data or 'lng' not in data:
return None
latlng = (data['lat'], data['lng'])
distances = [(team, distance(latlng, loc)) for team, loc in team_locations.items()]
closest = sorted(distances, key=lambda d: d[1].km)[0]
ct = nflteams.fullinfo[closest[0]]
ct['distance'] = closest[1]
return ct
@register.filter('distance')
def distance_filter(fromlatlng, toplayer):
if toplayer is None or not hasattr(toplayer, 'data'):
return None
todata = toplayer.data
tolatlng = (todata['lat'], todata['lng'])
return distance(fromlatlng, tolatlng)
if __name__ == "__main__":
from pprint import pprint
pprint(team_locations)
import doctest
doctest.testmod()
print(" ".join(nflteams.fullinfo.keys()))
| 27.914286 | 159 | 0.631525 | import geopy.distance
from django import template
from redditnfl.nfltools import nflteams, sites
register = template.Library()
team_locations = {team['short']: sites.by_team(team['short'])[1][3] for team in filter(lambda t: t['short'] not in ('AFC', 'NFC'), nflteams.fullinfo.values())}
def distance(a, b):
return geopy.distance.distance(a, b)
return 0.0
@register.filter
def team_location(team):
return team_locations.get(team.get('short', ''), None)
@register.filter
def closest_team(data):
if 'lat' not in data or 'lng' not in data:
return None
latlng = (data['lat'], data['lng'])
distances = [(team, distance(latlng, loc)) for team, loc in team_locations.items()]
closest = sorted(distances, key=lambda d: d[1].km)[0]
ct = nflteams.fullinfo[closest[0]]
ct['distance'] = closest[1]
return ct
@register.filter('distance')
def distance_filter(fromlatlng, toplayer):
if toplayer is None or not hasattr(toplayer, 'data'):
return None
todata = toplayer.data
tolatlng = (todata['lat'], todata['lng'])
return distance(fromlatlng, tolatlng)
if __name__ == "__main__":
from pprint import pprint
pprint(team_locations)
import doctest
doctest.testmod()
print(" ".join(nflteams.fullinfo.keys()))
| true | true |
f71dc2fc6add5b655d07736e36d9fcba57b81fe9 | 2,253 | py | Python | Tavan/kemija_SMyth.py | vedgar/ip | 5ed0773eea4243077f5defb77fb1839661308c83 | [
"Unlicense"
] | 5 | 2017-03-15T11:34:55.000Z | 2021-03-10T13:05:02.000Z | Tavan/kemija_SMyth.py | vedgar/ip | 5ed0773eea4243077f5defb77fb1839661308c83 | [
"Unlicense"
] | null | null | null | Tavan/kemija_SMyth.py | vedgar/ip | 5ed0773eea4243077f5defb77fb1839661308c83 | [
"Unlicense"
] | 14 | 2017-01-11T19:11:01.000Z | 2021-05-09T18:42:19.000Z | from pj import *
class KF(enum.Enum):
OTV, ZATV = '()'
class ATOM(Token):
def Mr(self, **atomi):
return pogledaj(atomi,self)
class BROJ(Token):
def vrijednost(self,**_):
return int(self.sadržaj)
class N(Token):
literal='n'
def vrijednost(self, **atomi):
return atomi['n']
def kf_lex(formula):
lex=Tokenizer(formula)
for i, znak in enumerate(iter(lex.čitaj, '')):
print(znak)
if not i and znak=='n' or znak!=')' and lex.slijedi('n'):
raise lex.greška("nema ')' prije n!")
elif znak.isdigit() and znak!='0':
lex.zvijezda(str.isdigit)
yield lex.token(KF.BROJ)
elif znak.isupper():
idući=lex.čitaj()
print('"', idući)
if not idući.islower(): lex.vrati()
yield lex.literal(KF.ATOM)
else: yield lex.literal(KF)
### Beskontekstna gramatika
# formula -> formula skupina | skupina
# skupina -> ATOM BROJ? | OTV formula ZATV (N | BROJ)?
### Apstraktna sintaksna stabla
# Formula: skupine:[(Formula, broj|'n')]
jedan=Token(KF.BROJ,'1')
class KFParser(Parser):
def formula(self):
skupine=[self.skupina()]
while not self>={E.KRAJ,KF.ZATV}:
skupine.append(self.skupina())
return Formula(skupine)
def skupina(self):
if self >> KF.ATOM:
atom=self.zadnji
if self >> KF.BROJ:
broj=self.zadnji
else:
broj=jedan
return (atom,broj)
else:
self.pročitaj(KF.OTV)
f=self.formula()
self.pročitaj(KF.ZATV)
if self >> {KF.N, KF.BROJ}:
broj=self.zadnji
else:
broj=jedan
return (f,broj)
start = formula
class Formula(AST('skupine')):
def Mr(self, **atomi):
suma=0
for skupina, broj in self.skupine:
suma += skupina.Mr(**atomi)*broj.vrijednost(**atomi)
return suma
if __name__=='__main__':
formula='CabH3(CabH2)nCabH3'
formula = 'AbcdeF'
tokeni=list(kf_lex(formula))
p=KFParser.parsiraj(tokeni)
print(tokeni,p,p.Mr(Cab=12.01,H=1.008,n=2),sep='\n\n')
| 27.814815 | 65 | 0.541056 | from pj import *
class KF(enum.Enum):
OTV, ZATV = '()'
class ATOM(Token):
def Mr(self, **atomi):
return pogledaj(atomi,self)
class BROJ(Token):
def vrijednost(self,**_):
return int(self.sadržaj)
class N(Token):
literal='n'
def vrijednost(self, **atomi):
return atomi['n']
def kf_lex(formula):
lex=Tokenizer(formula)
for i, znak in enumerate(iter(lex.čitaj, '')):
print(znak)
if not i and znak=='n' or znak!=')' and lex.slijedi('n'):
raise lex.greška("nema ')' prije n!")
elif znak.isdigit() and znak!='0':
lex.zvijezda(str.isdigit)
yield lex.token(KF.BROJ)
elif znak.isupper():
idući=lex.čitaj()
print('"', idući)
if not idući.islower(): lex.vrati()
yield lex.literal(KF.ATOM)
else: yield lex.literal(KF)
### Beskontekstna gramatika
# formula -> formula skupina | skupina
# skupina -> ATOM BROJ? | OTV formula ZATV (N | BROJ)?
### Apstraktna sintaksna stabla
# Formula: skupine:[(Formula, broj|'n')]
jedan=Token(KF.BROJ,'1')
class KFParser(Parser):
def formula(self):
skupine=[self.skupina()]
while not self>={E.KRAJ,KF.ZATV}:
skupine.append(self.skupina())
return Formula(skupine)
def skupina(self):
if self >> KF.ATOM:
atom=self.zadnji
if self >> KF.BROJ:
broj=self.zadnji
else:
broj=jedan
return (atom,broj)
else:
self.pročitaj(KF.OTV)
f=self.formula()
self.pročitaj(KF.ZATV)
if self >> {KF.N, KF.BROJ}:
broj=self.zadnji
else:
broj=jedan
return (f,broj)
start = formula
class Formula(AST('skupine')):
def Mr(self, **atomi):
suma=0
for skupina, broj in self.skupine:
suma += skupina.Mr(**atomi)*broj.vrijednost(**atomi)
return suma
if __name__=='__main__':
formula='CabH3(CabH2)nCabH3'
formula = 'AbcdeF'
tokeni=list(kf_lex(formula))
p=KFParser.parsiraj(tokeni)
print(tokeni,p,p.Mr(Cab=12.01,H=1.008,n=2),sep='\n\n')
| true | true |
f71dc35d8468a735ae390aa81a65c092ec43ba1e | 8,263 | py | Python | custom/icds_reports/utils/aggregation_helpers/monolith/awc_location.py | kkrampa/commcare-hq | d64d7cad98b240325ad669ccc7effb07721b4d44 | [
"BSD-3-Clause"
] | 1 | 2020-05-05T13:10:01.000Z | 2020-05-05T13:10:01.000Z | custom/icds_reports/utils/aggregation_helpers/monolith/awc_location.py | kkrampa/commcare-hq | d64d7cad98b240325ad669ccc7effb07721b4d44 | [
"BSD-3-Clause"
] | 1 | 2019-12-09T14:00:14.000Z | 2019-12-09T14:00:14.000Z | custom/icds_reports/utils/aggregation_helpers/monolith/awc_location.py | MaciejChoromanski/commcare-hq | fd7f65362d56d73b75a2c20d2afeabbc70876867 | [
"BSD-3-Clause"
] | 5 | 2015-11-30T13:12:45.000Z | 2019-07-01T19:27:07.000Z | from __future__ import absolute_import
from __future__ import unicode_literals
from six.moves import map
from corehq.apps.userreports.models import StaticDataSourceConfiguration, get_datasource_config
from corehq.apps.userreports.util import get_table_name
from custom.icds_reports.const import AWC_LOCATION_TABLE_ID, AWW_USER_TABLE_ID
from custom.icds_reports.utils.aggregation_helpers.monolith.base import BaseICDSAggregationHelper
from six.moves import range
class LocationAggregationHelper(BaseICDSAggregationHelper):
helper_key = 'location'
base_tablename = 'awc_location'
ucr_location_table = AWC_LOCATION_TABLE_ID
ucr_aww_table = AWW_USER_TABLE_ID
local_tablename = 'awc_location_local'
def __init__(self):
pass
def aggregate(self, cursor):
drop_table_query = self.drop_table_query()
agg_query = self.aggregate_query()
aww_query = self.aww_query()
rollup_queries = [self.rollup_query(i) for i in range(4, 0, -1)]
cursor.execute(drop_table_query)
cursor.execute(agg_query)
cursor.execute(aww_query)
for rollup_query in rollup_queries:
cursor.execute(rollup_query)
cursor.execute(self.create_local_table())
@property
def ucr_location_tablename(self):
doc_id = StaticDataSourceConfiguration.get_doc_id(self.domain, self.ucr_location_table)
config, _ = get_datasource_config(doc_id, self.domain)
return get_table_name(self.domain, config.table_id)
@property
def ucr_aww_tablename(self):
doc_id = StaticDataSourceConfiguration.get_doc_id(self.domain, self.ucr_aww_table)
config, _ = get_datasource_config(doc_id, self.domain)
return get_table_name(self.domain, config.table_id)
def drop_table_query(self):
return """
DELETE FROM "{tablename}";
""".format(tablename=self.base_tablename)
def aggregate_query(self):
columns = (
('doc_id', 'doc_id'),
('awc_name', 'awc_name'),
('awc_site_code', 'awc_site_code'),
('supervisor_id', 'supervisor_id'),
('supervisor_name', 'supervisor_name'),
('supervisor_site_code', 'supervisor_site_code'),
('block_id', 'block_id'),
('block_name', 'block_name'),
('block_site_code', 'block_site_code'),
('district_id', 'district_id'),
('district_name', 'district_name'),
('district_site_code', 'district_site_code'),
('state_id', 'state_id'),
('state_name', 'state_name'),
('state_site_code', 'state_site_code'),
('aggregation_level', '5'),
('block_map_location_name', 'block_map_location_name'),
('district_map_location_name', 'district_map_location_name'),
('state_map_location_name', 'state_map_location_name'),
('aww_name', 'NULL'),
('contact_phone_number', 'NULL'),
('state_is_test', 'state_is_test'),
('district_is_test', 'district_is_test'),
('block_is_test', 'block_is_test'),
('supervisor_is_test', 'supervisor_is_test'),
('awc_is_test', 'awc_is_test')
)
return """
INSERT INTO "{tablename}" (
{columns}
) (
SELECT
{calculations}
FROM "{ucr_location_tablename}"
)
""".format(
tablename=self.base_tablename,
columns=", ".join([col[0] for col in columns]),
calculations=", ".join([col[1] for col in columns]),
ucr_location_tablename=self.ucr_location_tablename
)
def aww_query(self):
return """
UPDATE "{tablename}" awc_loc SET
aww_name = ut.aww_name,
contact_phone_number = ut.contact_phone_number
FROM (
SELECT
commcare_location_id,
aww_name,
contact_phone_number
FROM "{ucr_aww_tablename}"
) ut
WHERE ut.commcare_location_id = awc_loc.doc_id
""".format(
tablename=self.base_tablename,
ucr_aww_tablename=self.ucr_aww_tablename
)
def rollup_query(self, aggregation_level):
columns = (
('doc_id', lambda col: col if aggregation_level > 4 else "'All'"),
('awc_name', lambda col: col if aggregation_level > 4 else "NULL"),
('awc_site_code', lambda col: col if aggregation_level > 4 else "'All'"),
('supervisor_id', lambda col: col if aggregation_level > 3 else "'All'"),
('supervisor_name', lambda col: col if aggregation_level > 3 else "NULL"),
('supervisor_site_code', lambda col: col if aggregation_level > 3 else "'All'"),
('block_id', lambda col: col if aggregation_level > 2 else "'All'"),
('block_name', lambda col: col if aggregation_level > 2 else "NULL"),
('block_site_code', lambda col: col if aggregation_level > 2 else "'All'"),
('district_id', lambda col: col if aggregation_level > 1 else "'All'"),
('district_name', lambda col: col if aggregation_level > 1 else "NULL"),
('district_site_code', lambda col: col if aggregation_level > 1 else "'All'"),
('state_id', 'state_id'),
('state_name', 'state_name'),
('state_site_code', 'state_site_code'),
('aggregation_level', '{}'.format(aggregation_level)),
('block_map_location_name', lambda col: col if aggregation_level > 2 else "'All'"),
('district_map_location_name', lambda col: col if aggregation_level > 1 else "'All'"),
('state_map_location_name', 'state_map_location_name'),
('aww_name', 'NULL'),
('contact_phone_number', 'NULL'),
('state_is_test', 'MAX(state_is_test)'),
(
'district_is_test',
lambda col: 'MAX({column})'.format(column=col) if aggregation_level > 1 else "0"
),
(
'block_is_test',
lambda col: 'MAX({column})'.format(column=col) if aggregation_level > 2 else "0"
),
(
'supervisor_is_test',
lambda col: 'MAX({column})'.format(column=col) if aggregation_level > 3 else "0"
),
(
'awc_is_test',
lambda col: 'MAX({column})'.format(column=col) if aggregation_level > 4 else "0"
)
)
def _transform_column(column_tuple):
column = column_tuple[0]
agg_col = column_tuple[1]
if callable(agg_col):
return (column, agg_col(column))
return column_tuple
columns = list(map(_transform_column, columns))
end_text_column = ["id", "name", "site_code", "map_location_name"]
group_by = ["state_{}".format(name) for name in end_text_column]
if aggregation_level > 1:
group_by.extend(["district_{}".format(name) for name in end_text_column])
if aggregation_level > 2:
group_by.extend(["block_{}".format(name) for name in end_text_column])
if aggregation_level > 3:
group_by.extend(
["supervisor_{}".format(name) for name in end_text_column if name is not "map_location_name"]
)
return """
INSERT INTO "{tablename}" (
{columns}
) (
SELECT
{calculations}
FROM "{tablename}"
GROUP BY {group_by}
)
""".format(
tablename=self.base_tablename,
columns=", ".join([col[0] for col in columns]),
calculations=", ".join([col[1] for col in columns]),
group_by=", ".join(group_by)
)
def create_local_table(self):
return """
DELETE FROM "{local_tablename}";
INSERT INTO "{local_tablename}" SELECT * FROM "{tablename}";
""".format(
tablename=self.base_tablename,
local_tablename=self.local_tablename
)
| 40.11165 | 109 | 0.588527 | from __future__ import absolute_import
from __future__ import unicode_literals
from six.moves import map
from corehq.apps.userreports.models import StaticDataSourceConfiguration, get_datasource_config
from corehq.apps.userreports.util import get_table_name
from custom.icds_reports.const import AWC_LOCATION_TABLE_ID, AWW_USER_TABLE_ID
from custom.icds_reports.utils.aggregation_helpers.monolith.base import BaseICDSAggregationHelper
from six.moves import range
class LocationAggregationHelper(BaseICDSAggregationHelper):
helper_key = 'location'
base_tablename = 'awc_location'
ucr_location_table = AWC_LOCATION_TABLE_ID
ucr_aww_table = AWW_USER_TABLE_ID
local_tablename = 'awc_location_local'
def __init__(self):
pass
def aggregate(self, cursor):
drop_table_query = self.drop_table_query()
agg_query = self.aggregate_query()
aww_query = self.aww_query()
rollup_queries = [self.rollup_query(i) for i in range(4, 0, -1)]
cursor.execute(drop_table_query)
cursor.execute(agg_query)
cursor.execute(aww_query)
for rollup_query in rollup_queries:
cursor.execute(rollup_query)
cursor.execute(self.create_local_table())
@property
def ucr_location_tablename(self):
doc_id = StaticDataSourceConfiguration.get_doc_id(self.domain, self.ucr_location_table)
config, _ = get_datasource_config(doc_id, self.domain)
return get_table_name(self.domain, config.table_id)
@property
def ucr_aww_tablename(self):
doc_id = StaticDataSourceConfiguration.get_doc_id(self.domain, self.ucr_aww_table)
config, _ = get_datasource_config(doc_id, self.domain)
return get_table_name(self.domain, config.table_id)
def drop_table_query(self):
return """
DELETE FROM "{tablename}";
""".format(tablename=self.base_tablename)
def aggregate_query(self):
columns = (
('doc_id', 'doc_id'),
('awc_name', 'awc_name'),
('awc_site_code', 'awc_site_code'),
('supervisor_id', 'supervisor_id'),
('supervisor_name', 'supervisor_name'),
('supervisor_site_code', 'supervisor_site_code'),
('block_id', 'block_id'),
('block_name', 'block_name'),
('block_site_code', 'block_site_code'),
('district_id', 'district_id'),
('district_name', 'district_name'),
('district_site_code', 'district_site_code'),
('state_id', 'state_id'),
('state_name', 'state_name'),
('state_site_code', 'state_site_code'),
('aggregation_level', '5'),
('block_map_location_name', 'block_map_location_name'),
('district_map_location_name', 'district_map_location_name'),
('state_map_location_name', 'state_map_location_name'),
('aww_name', 'NULL'),
('contact_phone_number', 'NULL'),
('state_is_test', 'state_is_test'),
('district_is_test', 'district_is_test'),
('block_is_test', 'block_is_test'),
('supervisor_is_test', 'supervisor_is_test'),
('awc_is_test', 'awc_is_test')
)
return """
INSERT INTO "{tablename}" (
{columns}
) (
SELECT
{calculations}
FROM "{ucr_location_tablename}"
)
""".format(
tablename=self.base_tablename,
columns=", ".join([col[0] for col in columns]),
calculations=", ".join([col[1] for col in columns]),
ucr_location_tablename=self.ucr_location_tablename
)
def aww_query(self):
return """
UPDATE "{tablename}" awc_loc SET
aww_name = ut.aww_name,
contact_phone_number = ut.contact_phone_number
FROM (
SELECT
commcare_location_id,
aww_name,
contact_phone_number
FROM "{ucr_aww_tablename}"
) ut
WHERE ut.commcare_location_id = awc_loc.doc_id
""".format(
tablename=self.base_tablename,
ucr_aww_tablename=self.ucr_aww_tablename
)
def rollup_query(self, aggregation_level):
columns = (
('doc_id', lambda col: col if aggregation_level > 4 else "'All'"),
('awc_name', lambda col: col if aggregation_level > 4 else "NULL"),
('awc_site_code', lambda col: col if aggregation_level > 4 else "'All'"),
('supervisor_id', lambda col: col if aggregation_level > 3 else "'All'"),
('supervisor_name', lambda col: col if aggregation_level > 3 else "NULL"),
('supervisor_site_code', lambda col: col if aggregation_level > 3 else "'All'"),
('block_id', lambda col: col if aggregation_level > 2 else "'All'"),
('block_name', lambda col: col if aggregation_level > 2 else "NULL"),
('block_site_code', lambda col: col if aggregation_level > 2 else "'All'"),
('district_id', lambda col: col if aggregation_level > 1 else "'All'"),
('district_name', lambda col: col if aggregation_level > 1 else "NULL"),
('district_site_code', lambda col: col if aggregation_level > 1 else "'All'"),
('state_id', 'state_id'),
('state_name', 'state_name'),
('state_site_code', 'state_site_code'),
('aggregation_level', '{}'.format(aggregation_level)),
('block_map_location_name', lambda col: col if aggregation_level > 2 else "'All'"),
('district_map_location_name', lambda col: col if aggregation_level > 1 else "'All'"),
('state_map_location_name', 'state_map_location_name'),
('aww_name', 'NULL'),
('contact_phone_number', 'NULL'),
('state_is_test', 'MAX(state_is_test)'),
(
'district_is_test',
lambda col: 'MAX({column})'.format(column=col) if aggregation_level > 1 else "0"
),
(
'block_is_test',
lambda col: 'MAX({column})'.format(column=col) if aggregation_level > 2 else "0"
),
(
'supervisor_is_test',
lambda col: 'MAX({column})'.format(column=col) if aggregation_level > 3 else "0"
),
(
'awc_is_test',
lambda col: 'MAX({column})'.format(column=col) if aggregation_level > 4 else "0"
)
)
def _transform_column(column_tuple):
column = column_tuple[0]
agg_col = column_tuple[1]
if callable(agg_col):
return (column, agg_col(column))
return column_tuple
columns = list(map(_transform_column, columns))
end_text_column = ["id", "name", "site_code", "map_location_name"]
group_by = ["state_{}".format(name) for name in end_text_column]
if aggregation_level > 1:
group_by.extend(["district_{}".format(name) for name in end_text_column])
if aggregation_level > 2:
group_by.extend(["block_{}".format(name) for name in end_text_column])
if aggregation_level > 3:
group_by.extend(
["supervisor_{}".format(name) for name in end_text_column if name is not "map_location_name"]
)
return """
INSERT INTO "{tablename}" (
{columns}
) (
SELECT
{calculations}
FROM "{tablename}"
GROUP BY {group_by}
)
""".format(
tablename=self.base_tablename,
columns=", ".join([col[0] for col in columns]),
calculations=", ".join([col[1] for col in columns]),
group_by=", ".join(group_by)
)
def create_local_table(self):
return """
DELETE FROM "{local_tablename}";
INSERT INTO "{local_tablename}" SELECT * FROM "{tablename}";
""".format(
tablename=self.base_tablename,
local_tablename=self.local_tablename
)
| true | true |
f71dc4758ddd251e1c752ee55a5decd14ff8c7c2 | 37 | py | Python | Extra/lamda.py | tanvinaminul/Python | dcd9ba615d4f841c0732e3bf0443f14865d95993 | [
"MIT"
] | null | null | null | Extra/lamda.py | tanvinaminul/Python | dcd9ba615d4f841c0732e3bf0443f14865d95993 | [
"MIT"
] | 1 | 2019-12-18T09:38:42.000Z | 2019-12-18T09:38:42.000Z | Extra/lamda.py | tanvinaminul/Python | dcd9ba615d4f841c0732e3bf0443f14865d95993 | [
"MIT"
] | null | null | null | sum= lambda a,b: a+b
print(sum(5,6))
| 12.333333 | 20 | 0.621622 | sum= lambda a,b: a+b
print(sum(5,6))
| true | true |
f71dc4cf4a2ee89c783f91b8af82794cd3767b90 | 50 | py | Python | app/schemas/__init__.py | i-gulyaev/receipt-bot | 262bc64c305443e23183eb65c337097f03db6c90 | [
"MIT"
] | null | null | null | app/schemas/__init__.py | i-gulyaev/receipt-bot | 262bc64c305443e23183eb65c337097f03db6c90 | [
"MIT"
] | null | null | null | app/schemas/__init__.py | i-gulyaev/receipt-bot | 262bc64c305443e23183eb65c337097f03db6c90 | [
"MIT"
] | null | null | null | from .receipt import Receipt, ReceiptItem # noqa
| 25 | 49 | 0.78 | from .receipt import Receipt, ReceiptItem
| true | true |
f71dc58fb506be3ca346fa50bd945777190d999f | 650 | py | Python | label_traincatset.py | diegulio/Breed_Recognition-to-Buscomiperro | 040ee45b9b5c355c3ec2c7413cd89a623024ad4e | [
"MIT"
] | null | null | null | label_traincatset.py | diegulio/Breed_Recognition-to-Buscomiperro | 040ee45b9b5c355c3ec2c7413cd89a623024ad4e | [
"MIT"
] | null | null | null | label_traincatset.py | diegulio/Breed_Recognition-to-Buscomiperro | 040ee45b9b5c355c3ec2c7413cd89a623024ad4e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""label_TrainCatSet.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1vDyBZ7Ql-8qQ3l7EWJB9TfnwGy66qGGn
"""
import pandas as pd
import os
import numpy as np
# Enlisto los nombres de las imagenes
imagenes = os.listdir('/content/drive/MyDrive/Colab Data/Proyecto buscomiperro/gatos')
imagenes[:5]
def extract_ext(id): # Para que el resultado sea como el de razas le quito la extensión
return os.path.splitext(id)[0]
labels = list(map(extract_ext, imagenes))
df = pd.DataFrame()
df['id'] = labels
df['breed'] = 'gato'
df.to_csv('cat_labels.csv')
| 23.214286 | 88 | 0.74 |
import pandas as pd
import os
import numpy as np
imagenes = os.listdir('/content/drive/MyDrive/Colab Data/Proyecto buscomiperro/gatos')
imagenes[:5]
def extract_ext(id):
return os.path.splitext(id)[0]
labels = list(map(extract_ext, imagenes))
df = pd.DataFrame()
df['id'] = labels
df['breed'] = 'gato'
df.to_csv('cat_labels.csv')
| true | true |
f71dc67f643b30e6caced22a11e2cae608407e4e | 4,219 | py | Python | chainer/functions/activation/leaky_relu.py | higumachan/chainer | c9209a1099c9a2a5ecab2b28e1b008b19effa724 | [
"MIT"
] | null | null | null | chainer/functions/activation/leaky_relu.py | higumachan/chainer | c9209a1099c9a2a5ecab2b28e1b008b19effa724 | [
"MIT"
] | 2 | 2019-05-14T15:45:01.000Z | 2019-05-15T07:12:49.000Z | chainer/functions/activation/leaky_relu.py | higumachan/chainer | c9209a1099c9a2a5ecab2b28e1b008b19effa724 | [
"MIT"
] | null | null | null | from chainer.backends import cuda
from chainer.backends import intel64
from chainer import function_node
from chainer.utils import type_check
_kern = None
def _get_kern():
global _kern
if _kern is None:
_kern = cuda.elementwise(
'T cond, T x, T slope', 'T y',
'y = cond >= 0 ? x : (T)(slope * x)', 'lrelu')
return _kern
class LeakyReLU(function_node.FunctionNode):
"""Leaky rectifier unit."""
def __init__(self, slope=0.2):
self.slope = slope
def check_type_forward(self, in_types):
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward_cpu(self, inputs):
if (intel64.should_use_ideep('>=auto')
and intel64.inputs_all_ready(inputs)):
return self.forward_ideep(inputs)
x, = inputs
y = x.copy()
y[x < 0] *= self.slope
if self.slope >= 0:
self.retain_outputs((0,))
else:
self.retain_inputs((0,))
return y,
def forward_ideep(self, inputs):
x, = inputs
y = intel64.ideep.relu.Forward(
intel64.ideep.array(x), self.slope)
if self.slope >= 0:
self.retain_outputs((0,))
else:
self.retain_inputs((0,))
return y,
def forward_gpu(self, inputs):
x, = inputs
y = _get_kern()(x, x, self.slope)
if self.slope >= 0:
self.retain_outputs((0,))
else:
self.retain_inputs((0,))
return y,
def backward(self, indexes, grad_outputs):
if self.slope >= 0:
x = None
y = self.get_retained_outputs()[0].data
else:
x = self.get_retained_inputs()[0].data
y = None
return _LeakyReLUGrad(x, y, self.slope).apply(grad_outputs)
class _LeakyReLUGrad(function_node.FunctionNode):
def __init__(self, x, y, slope):
self.slope = slope
self.x = x
self.y = y
def forward_cpu(self, inputs):
if (intel64.should_use_ideep('>=auto')
and intel64.inputs_all_ready(inputs)):
return self.forward_ideep(inputs)
gy, = inputs
gy = gy.copy()
if self.slope >= 0:
gy[self.y < 0] *= self.slope
else:
gy[self.x < 0] *= self.slope
return gy,
def forward_ideep(self, inputs):
gy, = inputs
if self.slope >= 0:
gy = intel64.ideep.relu.Backward(
intel64.ideep.array(self.y),
intel64.ideep.array(gy), self.slope)
else:
gy = intel64.ideep.relu.Backward(
intel64.ideep.array(self.x),
intel64.ideep.array(gy), self.slope)
return gy,
def forward_gpu(self, inputs):
gy, = inputs
if self.slope >= 0:
gy = _get_kern()(self.y, gy, self.slope)
else:
gy = _get_kern()(self.x, gy, self.slope)
return gy,
def backward(self, indexes, grad_outputs):
return _LeakyReLUGrad(self.x, self.y, self.slope).apply(grad_outputs)
def leaky_relu(x, slope=0.2):
"""Leaky Rectified Linear Unit function.
This function is expressed as
.. math::
f(x) = \\left \\{ \\begin{array}{ll}
x & {\\rm if}~ x \\ge 0 \\\\
ax & {\\rm if}~ x < 0,
\\end{array} \\right.
where :math:`a` is a configurable slope value.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`):
Input variable. A :math:`(s_1, s_2, ..., s_N)`-shaped float array.
slope (float): Slope value :math:`a`.
Returns:
~chainer.Variable: Output variable. A
:math:`(s_1, s_2, ..., s_N)`-shaped float array.
.. admonition:: Example
>>> x = np.array([[-1, 0], [2, -3], [-2, 1]], np.float32)
>>> x
array([[-1., 0.],
[ 2., -3.],
[-2., 1.]], dtype=float32)
>>> F.leaky_relu(x, slope=0.2).array
array([[-0.2, 0. ],
[ 2. , -0.6],
[-0.4, 1. ]], dtype=float32)
"""
return LeakyReLU(slope).apply((x,))[0]
| 26.872611 | 78 | 0.526428 | from chainer.backends import cuda
from chainer.backends import intel64
from chainer import function_node
from chainer.utils import type_check
_kern = None
def _get_kern():
global _kern
if _kern is None:
_kern = cuda.elementwise(
'T cond, T x, T slope', 'T y',
'y = cond >= 0 ? x : (T)(slope * x)', 'lrelu')
return _kern
class LeakyReLU(function_node.FunctionNode):
def __init__(self, slope=0.2):
self.slope = slope
def check_type_forward(self, in_types):
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward_cpu(self, inputs):
if (intel64.should_use_ideep('>=auto')
and intel64.inputs_all_ready(inputs)):
return self.forward_ideep(inputs)
x, = inputs
y = x.copy()
y[x < 0] *= self.slope
if self.slope >= 0:
self.retain_outputs((0,))
else:
self.retain_inputs((0,))
return y,
def forward_ideep(self, inputs):
x, = inputs
y = intel64.ideep.relu.Forward(
intel64.ideep.array(x), self.slope)
if self.slope >= 0:
self.retain_outputs((0,))
else:
self.retain_inputs((0,))
return y,
def forward_gpu(self, inputs):
x, = inputs
y = _get_kern()(x, x, self.slope)
if self.slope >= 0:
self.retain_outputs((0,))
else:
self.retain_inputs((0,))
return y,
def backward(self, indexes, grad_outputs):
if self.slope >= 0:
x = None
y = self.get_retained_outputs()[0].data
else:
x = self.get_retained_inputs()[0].data
y = None
return _LeakyReLUGrad(x, y, self.slope).apply(grad_outputs)
class _LeakyReLUGrad(function_node.FunctionNode):
def __init__(self, x, y, slope):
self.slope = slope
self.x = x
self.y = y
def forward_cpu(self, inputs):
if (intel64.should_use_ideep('>=auto')
and intel64.inputs_all_ready(inputs)):
return self.forward_ideep(inputs)
gy, = inputs
gy = gy.copy()
if self.slope >= 0:
gy[self.y < 0] *= self.slope
else:
gy[self.x < 0] *= self.slope
return gy,
def forward_ideep(self, inputs):
gy, = inputs
if self.slope >= 0:
gy = intel64.ideep.relu.Backward(
intel64.ideep.array(self.y),
intel64.ideep.array(gy), self.slope)
else:
gy = intel64.ideep.relu.Backward(
intel64.ideep.array(self.x),
intel64.ideep.array(gy), self.slope)
return gy,
def forward_gpu(self, inputs):
gy, = inputs
if self.slope >= 0:
gy = _get_kern()(self.y, gy, self.slope)
else:
gy = _get_kern()(self.x, gy, self.slope)
return gy,
def backward(self, indexes, grad_outputs):
return _LeakyReLUGrad(self.x, self.y, self.slope).apply(grad_outputs)
def leaky_relu(x, slope=0.2):
return LeakyReLU(slope).apply((x,))[0]
| true | true |
f71dc72de773fc61570b336f8a2d569a4007f69b | 6,784 | py | Python | robo/fmin/bayesian_optimization.py | lebrice/RoBO | 0cb58a1622d3a540f7714b239f0cedf048b6fd9f | [
"BSD-3-Clause"
] | null | null | null | robo/fmin/bayesian_optimization.py | lebrice/RoBO | 0cb58a1622d3a540f7714b239f0cedf048b6fd9f | [
"BSD-3-Clause"
] | null | null | null | robo/fmin/bayesian_optimization.py | lebrice/RoBO | 0cb58a1622d3a540f7714b239f0cedf048b6fd9f | [
"BSD-3-Clause"
] | null | null | null | import logging
import george
import numpy as np
import inspect
from pybnn import BaseModel
from pybnn.dngo import DNGO
from robo.priors.default_priors import DefaultPrior
from robo.models.base_model import BaseModel as BaseModel_
from robo.models.wrapper_bohamiann import WrapperBohamiann
from robo.models.gaussian_process import GaussianProcess
from robo.models.gaussian_process_mcmc import GaussianProcessMCMC
from robo.models.random_forest import RandomForest
from robo.maximizers.base_maximizer import BaseMaximizer
from robo.maximizers.scipy_optimizer import SciPyOptimizer
from robo.maximizers.random_sampling import RandomSampling
from robo.maximizers.differential_evolution import DifferentialEvolution
from robo.solver.bayesian_optimization import BayesianOptimization
from robo.acquisition_functions.base_acquisition import BaseAcquisitionFunction
from robo.acquisition_functions.ei import EI
from robo.acquisition_functions.pi import PI
from robo.acquisition_functions.log_ei import LogEI
from robo.acquisition_functions.lcb import LCB
from robo.acquisition_functions.marginalization import MarginalizationGPMCMC
from robo.initial_design import init_latin_hypercube_sampling
logger = logging.getLogger(__name__)
def bayesian_optimization(objective_function, lower, upper, num_iterations=30, X_init=None, Y_init=None,
maximizer="random", acquisition_func="log_ei", model_type="gp_mcmc",
n_init=3, rng=None, output_path=None):
"""
General interface for Bayesian optimization for global black box
optimization problems.
Parameters
----------
objective_function: function
The objective function that is minimized. This function gets a numpy
array (D,) as input and returns the function value (scalar)
lower: np.ndarray (D,)
The lower bound of the search space
upper: np.ndarray (D,)
The upper bound of the search space
num_iterations: int
The number of iterations (initial design + BO)
X_init: np.ndarray(N,D)
Initial points to warmstart BO
Y_init: np.ndarray(N,1)
Function values of the already initial points
maximizer: {"random", "scipy", "differential_evolution"}
The optimizer for the acquisition function.
acquisition_func: {"ei", "log_ei", "lcb", "pi"}
The acquisition function
model_type: {"gp", "gp_mcmc", "rf", "bohamiann", "dngo"}
The model for the objective function.
n_init: int
Number of points for the initial design. Make sure that it
is <= num_iterations.
output_path: string
Specifies the path where the intermediate output after each iteration will be saved.
If None no output will be saved to disk.
rng: numpy.random.RandomState
Random number generator
Returns
-------
dict with all results
"""
assert upper.shape[0] == lower.shape[0], "Dimension miss match"
assert np.all(lower < upper), "Lower bound >= upper bound"
assert n_init <= num_iterations, "Number of initial design point has to be <= than the number of iterations"
if rng is None:
rng = np.random.RandomState(np.random.randint(0, 10000))
cov_amp = 2
n_dims = lower.shape[0]
initial_ls = np.ones([n_dims])
exp_kernel = george.kernels.Matern52Kernel(initial_ls,
ndim=n_dims)
kernel = cov_amp * exp_kernel
prior = DefaultPrior(len(kernel) + 1)
n_hypers = 3 * len(kernel)
if n_hypers % 2 == 1:
n_hypers += 1
if model_type == "gp":
model = GaussianProcess(kernel, prior=prior, rng=rng,
normalize_output=False, normalize_input=True,
lower=lower, upper=upper)
elif model_type == "gp_mcmc":
model = GaussianProcessMCMC(kernel, prior=prior,
n_hypers=n_hypers,
chain_length=200,
burnin_steps=100,
normalize_input=True,
normalize_output=False,
rng=rng, lower=lower, upper=upper)
elif model_type == "rf":
model = RandomForest(rng=rng)
elif model_type == "bohamiann":
model = WrapperBohamiann()
elif model_type == "dngo":
model = DNGO()
elif isinstance(model_type, (BaseModel, BaseModel_)):
model = model_type
elif callable(model_type):
model = model_type()
else:
raise ValueError("'{}' is not a valid model".format(model_type))
if acquisition_func == "ei":
a = EI(model)
elif acquisition_func == "log_ei":
a = LogEI(model)
elif acquisition_func == "pi":
a = PI(model)
elif acquisition_func == "lcb":
a = LCB(model)
elif isinstance(acquisition_func, BaseAcquisitionFunction):
a = acquisition_func
elif callable(acquisition_func):
a = acquisition_func(model)
else:
raise ValueError("'{}' is not a valid acquisition function"
.format(acquisition_func))
if model_type == "gp_mcmc":
acquisition_func = MarginalizationGPMCMC(a)
else:
acquisition_func = a
if maximizer == "random":
max_func = RandomSampling(acquisition_func, lower, upper, rng=rng)
elif maximizer == "scipy":
max_func = SciPyOptimizer(acquisition_func, lower, upper, rng=rng)
elif maximizer == "differential_evolution":
max_func = DifferentialEvolution(acquisition_func, lower, upper, rng=rng)
elif isinstance(maximizer, BaseMaximizer):
max_func = maximizer
elif callable(maximizer):
max_func = maximizer(acquisition_func, lower, upper, rng=rng)
else:
raise ValueError("'{}' is not a valid function to maximize the "
"acquisition function".format(maximizer))
bo = BayesianOptimization(objective_function, lower, upper,
acquisition_func, model, max_func,
initial_points=n_init, rng=rng,
initial_design=init_latin_hypercube_sampling,
output_path=output_path)
x_best, f_min = bo.run(num_iterations, X=X_init, y=Y_init)
results = dict()
results["x_opt"] = x_best
results["f_opt"] = f_min
results["incumbents"] = [inc for inc in bo.incumbents]
results["incumbent_values"] = [val for val in bo.incumbents_values]
results["runtime"] = bo.runtime
results["overhead"] = bo.time_overhead
results["X"] = [x.tolist() for x in bo.X]
results["y"] = [y for y in bo.y]
return results
| 38.11236 | 112 | 0.654039 | import logging
import george
import numpy as np
import inspect
from pybnn import BaseModel
from pybnn.dngo import DNGO
from robo.priors.default_priors import DefaultPrior
from robo.models.base_model import BaseModel as BaseModel_
from robo.models.wrapper_bohamiann import WrapperBohamiann
from robo.models.gaussian_process import GaussianProcess
from robo.models.gaussian_process_mcmc import GaussianProcessMCMC
from robo.models.random_forest import RandomForest
from robo.maximizers.base_maximizer import BaseMaximizer
from robo.maximizers.scipy_optimizer import SciPyOptimizer
from robo.maximizers.random_sampling import RandomSampling
from robo.maximizers.differential_evolution import DifferentialEvolution
from robo.solver.bayesian_optimization import BayesianOptimization
from robo.acquisition_functions.base_acquisition import BaseAcquisitionFunction
from robo.acquisition_functions.ei import EI
from robo.acquisition_functions.pi import PI
from robo.acquisition_functions.log_ei import LogEI
from robo.acquisition_functions.lcb import LCB
from robo.acquisition_functions.marginalization import MarginalizationGPMCMC
from robo.initial_design import init_latin_hypercube_sampling
logger = logging.getLogger(__name__)
def bayesian_optimization(objective_function, lower, upper, num_iterations=30, X_init=None, Y_init=None,
maximizer="random", acquisition_func="log_ei", model_type="gp_mcmc",
n_init=3, rng=None, output_path=None):
assert upper.shape[0] == lower.shape[0], "Dimension miss match"
assert np.all(lower < upper), "Lower bound >= upper bound"
assert n_init <= num_iterations, "Number of initial design point has to be <= than the number of iterations"
if rng is None:
rng = np.random.RandomState(np.random.randint(0, 10000))
cov_amp = 2
n_dims = lower.shape[0]
initial_ls = np.ones([n_dims])
exp_kernel = george.kernels.Matern52Kernel(initial_ls,
ndim=n_dims)
kernel = cov_amp * exp_kernel
prior = DefaultPrior(len(kernel) + 1)
n_hypers = 3 * len(kernel)
if n_hypers % 2 == 1:
n_hypers += 1
if model_type == "gp":
model = GaussianProcess(kernel, prior=prior, rng=rng,
normalize_output=False, normalize_input=True,
lower=lower, upper=upper)
elif model_type == "gp_mcmc":
model = GaussianProcessMCMC(kernel, prior=prior,
n_hypers=n_hypers,
chain_length=200,
burnin_steps=100,
normalize_input=True,
normalize_output=False,
rng=rng, lower=lower, upper=upper)
elif model_type == "rf":
model = RandomForest(rng=rng)
elif model_type == "bohamiann":
model = WrapperBohamiann()
elif model_type == "dngo":
model = DNGO()
elif isinstance(model_type, (BaseModel, BaseModel_)):
model = model_type
elif callable(model_type):
model = model_type()
else:
raise ValueError("'{}' is not a valid model".format(model_type))
if acquisition_func == "ei":
a = EI(model)
elif acquisition_func == "log_ei":
a = LogEI(model)
elif acquisition_func == "pi":
a = PI(model)
elif acquisition_func == "lcb":
a = LCB(model)
elif isinstance(acquisition_func, BaseAcquisitionFunction):
a = acquisition_func
elif callable(acquisition_func):
a = acquisition_func(model)
else:
raise ValueError("'{}' is not a valid acquisition function"
.format(acquisition_func))
if model_type == "gp_mcmc":
acquisition_func = MarginalizationGPMCMC(a)
else:
acquisition_func = a
if maximizer == "random":
max_func = RandomSampling(acquisition_func, lower, upper, rng=rng)
elif maximizer == "scipy":
max_func = SciPyOptimizer(acquisition_func, lower, upper, rng=rng)
elif maximizer == "differential_evolution":
max_func = DifferentialEvolution(acquisition_func, lower, upper, rng=rng)
elif isinstance(maximizer, BaseMaximizer):
max_func = maximizer
elif callable(maximizer):
max_func = maximizer(acquisition_func, lower, upper, rng=rng)
else:
raise ValueError("'{}' is not a valid function to maximize the "
"acquisition function".format(maximizer))
bo = BayesianOptimization(objective_function, lower, upper,
acquisition_func, model, max_func,
initial_points=n_init, rng=rng,
initial_design=init_latin_hypercube_sampling,
output_path=output_path)
x_best, f_min = bo.run(num_iterations, X=X_init, y=Y_init)
results = dict()
results["x_opt"] = x_best
results["f_opt"] = f_min
results["incumbents"] = [inc for inc in bo.incumbents]
results["incumbent_values"] = [val for val in bo.incumbents_values]
results["runtime"] = bo.runtime
results["overhead"] = bo.time_overhead
results["X"] = [x.tolist() for x in bo.X]
results["y"] = [y for y in bo.y]
return results
| true | true |
f71dc8aeb93f7835f2caaf5b59252fc6ba16d798 | 135 | py | Python | tests/fixtures/unused_import_comment_6.py | cdce8p/python-typing-update | 2ad78b9ce4b5e3d8e8ff5dd35474c8e214d69983 | [
"MIT"
] | 5 | 2021-03-17T16:12:09.000Z | 2021-09-12T22:19:51.000Z | tests/fixtures/unused_import_comment_6.py | cdce8p/python-typing-update | 2ad78b9ce4b5e3d8e8ff5dd35474c8e214d69983 | [
"MIT"
] | 10 | 2021-03-23T18:14:24.000Z | 2022-03-28T03:05:18.000Z | tests/fixtures/unused_import_comment_6.py | cdce8p/python-typing-update | 2ad78b9ce4b5e3d8e8ff5dd35474c8e214d69983 | [
"MIT"
] | 2 | 2021-03-20T08:47:52.000Z | 2021-06-07T04:02:02.000Z | """Test unused import retention."""
from logging import DEBUG # unused-import
from typing import Any, List
var1: List[str]
var2: Any
| 19.285714 | 42 | 0.740741 | from logging import DEBUG
from typing import Any, List
var1: List[str]
var2: Any
| true | true |
f71dc8f1ad707249189bd1d16a568bd6a4c983c1 | 1,981 | py | Python | src/fetch/playstore.py | adityabharti/fawkes | c1b298ea1f4b96c208e12448ddefe44259bc2316 | [
"MIT"
] | null | null | null | src/fetch/playstore.py | adityabharti/fawkes | c1b298ea1f4b96c208e12448ddefe44259bc2316 | [
"MIT"
] | null | null | null | src/fetch/playstore.py | adityabharti/fawkes | c1b298ea1f4b96c208e12448ddefe44259bc2316 | [
"MIT"
] | null | null | null | import requests
import json
import sys
import os
from pprint import pprint
# This is so that below import works
sys.path.append(os.path.realpath("."))
import src.utils.utils as utils
import src.constants as constants
def fetch(review_channel):
# Since searchman allows us to have limited credits, we iterate over a set of API keys that we will use every month.
# The API key gets refreshed every month
searchman_api_key_index = 0
params = {
"appId": review_channel.app_id,
"apiKey": review_channel.searchman_api_key[searchman_api_key_index],
"count": 100,
"start": 0
}
reviews = []
current_page = 0
while current_page < constants.PLAYSTORE_FETCH_PAGES:
# I am using try catch because we can't afford to waste the response of the API call.
# TODO: Remove any such thing from when we directly fetch from play
# store.
try:
params["start"] = current_page * 100
response = requests.get(constants.SEARCHMAN_REVIEWS_ENDPOINT.format(
platform=review_channel.channel_type),
params=params)
review_page = json.loads(response.text)
if "data" in review_page:
review_page = review_page["data"]
reviews += review_page
current_page += 1
else:
print(
"[LOG][ERROR] Bad Response from fetch_app_reviews. Trying next API Key."
)
raise Exception("Bad Response from fetch_app_reviews")
except BaseException:
searchman_api_key_index += 1
if searchman_api_key_index < len(review_channel.searchman_api_key):
params["apiKey"] = review_channel.searchman_api_key[
searchman_api_key_index]
else:
print("[LOG][ERROR] Exhausted all API keys")
break
return reviews
| 34.754386 | 120 | 0.612317 | import requests
import json
import sys
import os
from pprint import pprint
sys.path.append(os.path.realpath("."))
import src.utils.utils as utils
import src.constants as constants
def fetch(review_channel):
searchman_api_key_index = 0
params = {
"appId": review_channel.app_id,
"apiKey": review_channel.searchman_api_key[searchman_api_key_index],
"count": 100,
"start": 0
}
reviews = []
current_page = 0
while current_page < constants.PLAYSTORE_FETCH_PAGES:
# TODO: Remove any such thing from when we directly fetch from play
# store.
try:
params["start"] = current_page * 100
response = requests.get(constants.SEARCHMAN_REVIEWS_ENDPOINT.format(
platform=review_channel.channel_type),
params=params)
review_page = json.loads(response.text)
if "data" in review_page:
review_page = review_page["data"]
reviews += review_page
current_page += 1
else:
print(
"[LOG][ERROR] Bad Response from fetch_app_reviews. Trying next API Key."
)
raise Exception("Bad Response from fetch_app_reviews")
except BaseException:
searchman_api_key_index += 1
if searchman_api_key_index < len(review_channel.searchman_api_key):
params["apiKey"] = review_channel.searchman_api_key[
searchman_api_key_index]
else:
print("[LOG][ERROR] Exhausted all API keys")
break
return reviews
| true | true |
f71dc914c70852e586f370a1875963cd13b5c4b7 | 393 | py | Python | project_plantware/warehouse/migrations/0009_auto_20200629_2114.py | naiem2525/plantware | 5d72989780ff39b59949dde649052d9d01729c86 | [
"bzip2-1.0.6"
] | null | null | null | project_plantware/warehouse/migrations/0009_auto_20200629_2114.py | naiem2525/plantware | 5d72989780ff39b59949dde649052d9d01729c86 | [
"bzip2-1.0.6"
] | null | null | null | project_plantware/warehouse/migrations/0009_auto_20200629_2114.py | naiem2525/plantware | 5d72989780ff39b59949dde649052d9d01729c86 | [
"bzip2-1.0.6"
] | null | null | null | # Generated by Django 3.0.7 on 2020-06-29 15:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('warehouse', '0008_auto_20200629_2114'),
]
operations = [
migrations.AlterField(
model_name='order',
name='date_ordered',
field=models.DateTimeField(null=True),
),
]
| 20.684211 | 50 | 0.605598 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('warehouse', '0008_auto_20200629_2114'),
]
operations = [
migrations.AlterField(
model_name='order',
name='date_ordered',
field=models.DateTimeField(null=True),
),
]
| true | true |
f71dc9e972b6d43146a206b01bacba1306097b59 | 81,265 | py | Python | discord/http.py | Icebluewolf/pycord | 91572a1440aecf0eb91b2249d960a9eba3f4ebec | [
"MIT"
] | null | null | null | discord/http.py | Icebluewolf/pycord | 91572a1440aecf0eb91b2249d960a9eba3f4ebec | [
"MIT"
] | null | null | null | discord/http.py | Icebluewolf/pycord | 91572a1440aecf0eb91b2249d960a9eba3f4ebec | [
"MIT"
] | null | null | null | """
The MIT License (MIT)
Copyright (c) 2015-2021 Rapptz
Copyright (c) 2021-present Pycord Development
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
import asyncio
import logging
import sys
import weakref
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Coroutine,
Dict,
Iterable,
List,
Optional,
Sequence,
Tuple,
Type,
TypeVar,
Union,
)
from urllib.parse import quote as _uriquote
import aiohttp
from . import __version__, utils
from .errors import (
DiscordServerError,
Forbidden,
GatewayNotFound,
HTTPException,
InvalidArgument,
LoginFailure,
NotFound,
)
from .gateway import DiscordClientWebSocketResponse
from .utils import MISSING
_log = logging.getLogger(__name__)
if TYPE_CHECKING:
from types import TracebackType
from .enums import AuditLogAction, InteractionResponseType
from .file import File
from .types import (
appinfo,
audit_log,
channel,
components,
embed,
emoji,
guild,
integration,
interactions,
invite,
member,
message,
role,
scheduled_events,
sticker,
template,
threads,
user,
webhook,
welcome_screen,
widget,
)
from .types.snowflake import Snowflake, SnowflakeList
T = TypeVar("T")
BE = TypeVar("BE", bound=BaseException)
MU = TypeVar("MU", bound="MaybeUnlock")
Response = Coroutine[Any, Any, T]
API_VERSION: int = 10
async def json_or_text(response: aiohttp.ClientResponse) -> Union[Dict[str, Any], str]:
text = await response.text(encoding="utf-8")
try:
if response.headers["content-type"] == "application/json":
return utils._from_json(text)
except KeyError:
# Thanks Cloudflare
pass
return text
class Route:
def __init__(self, method: str, path: str, **parameters: Any) -> None:
self.path: str = path
self.method: str = method
url = self.base + self.path
if parameters:
url = url.format_map({k: _uriquote(v) if isinstance(v, str) else v for k, v in parameters.items()})
self.url: str = url
# major parameters:
self.channel_id: Optional[Snowflake] = parameters.get("channel_id")
self.guild_id: Optional[Snowflake] = parameters.get("guild_id")
self.webhook_id: Optional[Snowflake] = parameters.get("webhook_id")
self.webhook_token: Optional[str] = parameters.get("webhook_token")
@property
def base(self) -> str:
return f"https://discord.com/api/v{API_VERSION}"
@property
def bucket(self) -> str:
# the bucket is just method + path w/ major parameters
return f"{self.channel_id}:{self.guild_id}:{self.path}"
class MaybeUnlock:
def __init__(self, lock: asyncio.Lock) -> None:
self.lock: asyncio.Lock = lock
self._unlock: bool = True
def __enter__(self: MU) -> MU:
return self
def defer(self) -> None:
self._unlock = False
def __exit__(
self,
exc_type: Optional[Type[BE]],
exc: Optional[BE],
traceback: Optional[TracebackType],
) -> None:
if self._unlock:
self.lock.release()
# For some reason, the Discord voice websocket expects this header to be
# completely lowercase while aiohttp respects spec and does it as case-insensitive
aiohttp.hdrs.WEBSOCKET = "websocket" # type: ignore
class HTTPClient:
"""Represents an HTTP client sending HTTP requests to the Discord API."""
def __init__(
self,
connector: Optional[aiohttp.BaseConnector] = None,
*,
proxy: Optional[str] = None,
proxy_auth: Optional[aiohttp.BasicAuth] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
unsync_clock: bool = True,
) -> None:
self.loop: asyncio.AbstractEventLoop = asyncio.get_event_loop() if loop is None else loop
self.connector = connector
self.__session: aiohttp.ClientSession = MISSING # filled in static_login
self._locks: weakref.WeakValueDictionary = weakref.WeakValueDictionary()
self._global_over: asyncio.Event = asyncio.Event()
self._global_over.set()
self.token: Optional[str] = None
self.bot_token: bool = False
self.proxy: Optional[str] = proxy
self.proxy_auth: Optional[aiohttp.BasicAuth] = proxy_auth
self.use_clock: bool = not unsync_clock
user_agent = "DiscordBot (https://github.com/Pycord-Development/pycord {0}) Python/{1[0]}.{1[1]} aiohttp/{2}"
self.user_agent: str = user_agent.format(__version__, sys.version_info, aiohttp.__version__)
def recreate(self) -> None:
if self.__session.closed:
self.__session = aiohttp.ClientSession(
connector=self.connector,
ws_response_class=DiscordClientWebSocketResponse,
)
async def ws_connect(self, url: str, *, compress: int = 0) -> Any:
kwargs = {
"proxy_auth": self.proxy_auth,
"proxy": self.proxy,
"max_msg_size": 0,
"timeout": 30.0,
"autoclose": False,
"headers": {
"User-Agent": self.user_agent,
},
"compress": compress,
}
return await self.__session.ws_connect(url, **kwargs)
async def request(
self,
route: Route,
*,
files: Optional[Sequence[File]] = None,
form: Optional[Iterable[Dict[str, Any]]] = None,
**kwargs: Any,
) -> Any:
bucket = route.bucket
method = route.method
url = route.url
lock = self._locks.get(bucket)
if lock is None:
lock = asyncio.Lock()
if bucket is not None:
self._locks[bucket] = lock
# header creation
headers: Dict[str, str] = {
"User-Agent": self.user_agent,
}
if self.token is not None:
headers["Authorization"] = f"Bot {self.token}"
# some checking if it's a JSON request
if "json" in kwargs:
headers["Content-Type"] = "application/json"
kwargs["data"] = utils._to_json(kwargs.pop("json"))
try:
reason = kwargs.pop("reason")
except KeyError:
pass
else:
if reason:
headers["X-Audit-Log-Reason"] = _uriquote(reason, safe="/ ")
kwargs["headers"] = headers
# Proxy support
if self.proxy is not None:
kwargs["proxy"] = self.proxy
if self.proxy_auth is not None:
kwargs["proxy_auth"] = self.proxy_auth
if not self._global_over.is_set():
# wait until the global lock is complete
await self._global_over.wait()
response: Optional[aiohttp.ClientResponse] = None
data: Optional[Union[Dict[str, Any], str]] = None
await lock.acquire()
with MaybeUnlock(lock) as maybe_lock:
for tries in range(5):
if files:
for f in files:
f.reset(seek=tries)
if form:
form_data = aiohttp.FormData(quote_fields=False)
for params in form:
form_data.add_field(**params)
kwargs["data"] = form_data
try:
async with self.__session.request(method, url, **kwargs) as response:
_log.debug(
"%s %s with %s has returned %s",
method,
url,
kwargs.get("data"),
response.status,
)
# even errors have text involved in them so this is safe to call
data = await json_or_text(response)
# check if we have rate limit header information
remaining = response.headers.get("X-Ratelimit-Remaining")
if remaining == "0" and response.status != 429:
# we've depleted our current bucket
delta = utils._parse_ratelimit_header(response, use_clock=self.use_clock)
_log.debug(
"A rate limit bucket has been exhausted (bucket: %s, retry: %s).",
bucket,
delta,
)
maybe_lock.defer()
self.loop.call_later(delta, lock.release)
# the request was successful so just return the text/json
if 300 > response.status >= 200:
_log.debug("%s %s has received %s", method, url, data)
return data
# we are being rate limited
if response.status == 429:
if not response.headers.get("Via") or isinstance(data, str):
# Banned by Cloudflare more than likely.
raise HTTPException(response, data)
fmt = 'We are being rate limited. Retrying in %.2f seconds. Handled under the bucket "%s"'
# sleep a bit
retry_after: float = data["retry_after"]
_log.warning(fmt, retry_after, bucket)
# check if it's a global rate limit
is_global = data.get("global", False)
if is_global:
_log.warning(
"Global rate limit has been hit. Retrying in %.2f seconds.",
retry_after,
)
self._global_over.clear()
await asyncio.sleep(retry_after)
_log.debug("Done sleeping for the rate limit. Retrying...")
# release the global lock now that the
# global rate limit has passed
if is_global:
self._global_over.set()
_log.debug("Global rate limit is now over.")
continue
# we've received a 500, 502, or 504, unconditional retry
if response.status in {500, 502, 504}:
await asyncio.sleep(1 + tries * 2)
continue
# the usual error cases
if response.status == 403:
raise Forbidden(response, data)
elif response.status == 404:
raise NotFound(response, data)
elif response.status >= 500:
raise DiscordServerError(response, data)
else:
raise HTTPException(response, data)
# This is handling exceptions from the request
except OSError as e:
# Connection reset by peer
if tries < 4 and e.errno in (54, 10054):
await asyncio.sleep(1 + tries * 2)
continue
raise
if response is not None:
# We've run out of retries, raise.
if response.status >= 500:
raise DiscordServerError(response, data)
raise HTTPException(response, data)
raise RuntimeError("Unreachable code in HTTP handling")
async def get_from_cdn(self, url: str) -> bytes:
async with self.__session.get(url) as resp:
if resp.status == 200:
return await resp.read()
elif resp.status == 404:
raise NotFound(resp, "asset not found")
elif resp.status == 403:
raise Forbidden(resp, "cannot retrieve asset")
else:
raise HTTPException(resp, "failed to get asset")
# state management
async def close(self) -> None:
if self.__session:
await self.__session.close()
# login management
async def static_login(self, token: str) -> user.User:
# Necessary to get aiohttp to stop complaining about session creation
self.__session = aiohttp.ClientSession(
connector=self.connector, ws_response_class=DiscordClientWebSocketResponse
)
old_token = self.token
self.token = token
try:
data = await self.request(Route("GET", "/users/@me"))
except HTTPException as exc:
self.token = old_token
if exc.status == 401:
raise LoginFailure("Improper token has been passed.") from exc
raise
return data
def logout(self) -> Response[None]:
return self.request(Route("POST", "/auth/logout"))
# Group functionality
def start_group(self, user_id: Snowflake, recipients: List[int]) -> Response[channel.GroupDMChannel]:
payload = {
"recipients": recipients,
}
return self.request(Route("POST", "/users/{user_id}/channels", user_id=user_id), json=payload)
def leave_group(self, channel_id) -> Response[None]:
return self.request(Route("DELETE", "/channels/{channel_id}", channel_id=channel_id))
# Message management
def start_private_message(self, user_id: Snowflake) -> Response[channel.DMChannel]:
payload = {
"recipient_id": user_id,
}
return self.request(Route("POST", "/users/@me/channels"), json=payload)
def send_message(
self,
channel_id: Snowflake,
content: Optional[str],
*,
tts: bool = False,
embed: Optional[embed.Embed] = None,
embeds: Optional[List[embed.Embed]] = None,
nonce: Optional[str] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
message_reference: Optional[message.MessageReference] = None,
stickers: Optional[List[sticker.StickerItem]] = None,
components: Optional[List[components.Component]] = None,
) -> Response[message.Message]:
r = Route("POST", "/channels/{channel_id}/messages", channel_id=channel_id)
payload = {}
if content:
payload["content"] = content
if tts:
payload["tts"] = True
if embed:
payload["embeds"] = [embed]
if embeds:
payload["embeds"] = embeds
if nonce:
payload["nonce"] = nonce
if allowed_mentions:
payload["allowed_mentions"] = allowed_mentions
if message_reference:
payload["message_reference"] = message_reference
if components:
payload["components"] = components
if stickers:
payload["sticker_ids"] = stickers
return self.request(r, json=payload)
def send_typing(self, channel_id: Snowflake) -> Response[None]:
return self.request(Route("POST", "/channels/{channel_id}/typing", channel_id=channel_id))
def send_multipart_helper(
self,
route: Route,
*,
files: Sequence[File],
content: Optional[str] = None,
tts: bool = False,
embed: Optional[embed.Embed] = None,
embeds: Optional[Iterable[Optional[embed.Embed]]] = None,
nonce: Optional[str] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
message_reference: Optional[message.MessageReference] = None,
stickers: Optional[List[sticker.StickerItem]] = None,
components: Optional[List[components.Component]] = None,
) -> Response[message.Message]:
form = []
payload: Dict[str, Any] = {"tts": tts}
if content:
payload["content"] = content
if embed:
payload["embeds"] = [embed]
if embeds:
payload["embeds"] = embeds
if nonce:
payload["nonce"] = nonce
if allowed_mentions:
payload["allowed_mentions"] = allowed_mentions
if message_reference:
payload["message_reference"] = message_reference
if components:
payload["components"] = components
if stickers:
payload["sticker_ids"] = stickers
attachments = []
form.append({"name": "payload_json"})
for index, file in enumerate(files):
attachments.append(
{
"id": index,
"filename": file.filename,
"description": file.description,
}
)
form.append(
{
"name": f"files[{index}]",
"value": file.fp,
"filename": file.filename,
"content_type": "application/octet-stream",
}
)
payload["attachments"] = attachments
form[0]["value"] = utils._to_json(payload)
return self.request(route, form=form, files=files)
def send_files(
self,
channel_id: Snowflake,
*,
files: Sequence[File],
content: Optional[str] = None,
tts: bool = False,
embed: Optional[embed.Embed] = None,
embeds: Optional[List[embed.Embed]] = None,
nonce: Optional[str] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
message_reference: Optional[message.MessageReference] = None,
stickers: Optional[List[sticker.StickerItem]] = None,
components: Optional[List[components.Component]] = None,
) -> Response[message.Message]:
r = Route("POST", "/channels/{channel_id}/messages", channel_id=channel_id)
return self.send_multipart_helper(
r,
files=files,
content=content,
tts=tts,
embed=embed,
embeds=embeds,
nonce=nonce,
allowed_mentions=allowed_mentions,
message_reference=message_reference,
stickers=stickers,
components=components,
)
def edit_multipart_helper(
self,
route: Route,
files: Sequence[File],
**payload,
) -> Response[message.Message]:
form = []
attachments = []
form.append({"name": "payload_json"})
for index, file in enumerate(files):
attachments.append(
{
"id": index,
"filename": file.filename,
"description": file.description,
}
)
form.append(
{
"name": f"files[{index}]",
"value": file.fp,
"filename": file.filename,
"content_type": "application/octet-stream",
}
)
if "attachments" not in payload:
payload["attachments"] = attachments
else:
payload["attachments"].extend(attachments)
form[0]["value"] = utils._to_json(payload)
return self.request(route, form=form, files=files)
def edit_files(
self,
channel_id: Snowflake,
message_id: Snowflake,
files: Sequence[File],
**fields,
) -> Response[message.Message]:
r = Route(
"PATCH",
f"/channels/{channel_id}/messages/{message_id}",
channel_id=channel_id,
message_id=message_id,
)
payload: Dict[str, Any] = {}
if "attachments" in fields:
payload["attachments"] = fields["attachments"]
if "flags" in fields:
payload["flags"] = fields["flags"]
if "content" in fields:
payload["content"] = fields["content"]
if "embeds" in fields:
payload["embeds"] = fields["embeds"]
if "allowed_mentions" in fields:
payload["allowed_mentions"] = fields["allowed_mentions"]
if "components" in fields:
payload["components"] = fields["components"]
return self.edit_multipart_helper(
r,
files=files,
**payload,
)
def delete_message(
self,
channel_id: Snowflake,
message_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/messages/{message_id}",
channel_id=channel_id,
message_id=message_id,
)
return self.request(r, reason=reason)
def delete_messages(
self,
channel_id: Snowflake,
message_ids: SnowflakeList,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route("POST", "/channels/{channel_id}/messages/bulk-delete", channel_id=channel_id)
payload = {
"messages": message_ids,
}
return self.request(r, json=payload, reason=reason)
def edit_message(self, channel_id: Snowflake, message_id: Snowflake, **fields: Any) -> Response[message.Message]:
r = Route(
"PATCH",
"/channels/{channel_id}/messages/{message_id}",
channel_id=channel_id,
message_id=message_id,
)
return self.request(r, json=fields)
def add_reaction(self, channel_id: Snowflake, message_id: Snowflake, emoji: str) -> Response[None]:
r = Route(
"PUT",
"/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/@me",
channel_id=channel_id,
message_id=message_id,
emoji=emoji,
)
return self.request(r)
def remove_reaction(
self,
channel_id: Snowflake,
message_id: Snowflake,
emoji: str,
member_id: Snowflake,
) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/{member_id}",
channel_id=channel_id,
message_id=message_id,
member_id=member_id,
emoji=emoji,
)
return self.request(r)
def remove_own_reaction(self, channel_id: Snowflake, message_id: Snowflake, emoji: str) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/@me",
channel_id=channel_id,
message_id=message_id,
emoji=emoji,
)
return self.request(r)
def get_reaction_users(
self,
channel_id: Snowflake,
message_id: Snowflake,
emoji: str,
limit: int,
after: Optional[Snowflake] = None,
) -> Response[List[user.User]]:
r = Route(
"GET",
"/channels/{channel_id}/messages/{message_id}/reactions/{emoji}",
channel_id=channel_id,
message_id=message_id,
emoji=emoji,
)
params: Dict[str, Any] = {
"limit": limit,
}
if after:
params["after"] = after
return self.request(r, params=params)
def clear_reactions(self, channel_id: Snowflake, message_id: Snowflake) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/messages/{message_id}/reactions",
channel_id=channel_id,
message_id=message_id,
)
return self.request(r)
def clear_single_reaction(self, channel_id: Snowflake, message_id: Snowflake, emoji: str) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/messages/{message_id}/reactions/{emoji}",
channel_id=channel_id,
message_id=message_id,
emoji=emoji,
)
return self.request(r)
def get_message(self, channel_id: Snowflake, message_id: Snowflake) -> Response[message.Message]:
r = Route(
"GET",
"/channels/{channel_id}/messages/{message_id}",
channel_id=channel_id,
message_id=message_id,
)
return self.request(r)
def get_channel(self, channel_id: Snowflake) -> Response[channel.Channel]:
r = Route("GET", "/channels/{channel_id}", channel_id=channel_id)
return self.request(r)
def logs_from(
self,
channel_id: Snowflake,
limit: int,
before: Optional[Snowflake] = None,
after: Optional[Snowflake] = None,
around: Optional[Snowflake] = None,
) -> Response[List[message.Message]]:
params: Dict[str, Any] = {
"limit": limit,
}
if before is not None:
params["before"] = before
if after is not None:
params["after"] = after
if around is not None:
params["around"] = around
return self.request(
Route("GET", "/channels/{channel_id}/messages", channel_id=channel_id),
params=params,
)
def publish_message(self, channel_id: Snowflake, message_id: Snowflake) -> Response[message.Message]:
return self.request(
Route(
"POST",
"/channels/{channel_id}/messages/{message_id}/crosspost",
channel_id=channel_id,
message_id=message_id,
)
)
def pin_message(self, channel_id: Snowflake, message_id: Snowflake, reason: Optional[str] = None) -> Response[None]:
r = Route(
"PUT",
"/channels/{channel_id}/pins/{message_id}",
channel_id=channel_id,
message_id=message_id,
)
return self.request(r, reason=reason)
def unpin_message(
self, channel_id: Snowflake, message_id: Snowflake, reason: Optional[str] = None
) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/pins/{message_id}",
channel_id=channel_id,
message_id=message_id,
)
return self.request(r, reason=reason)
def pins_from(self, channel_id: Snowflake) -> Response[List[message.Message]]:
return self.request(Route("GET", "/channels/{channel_id}/pins", channel_id=channel_id))
# Member management
def kick(self, user_id: Snowflake, guild_id: Snowflake, reason: Optional[str] = None) -> Response[None]:
r = Route(
"DELETE",
"/guilds/{guild_id}/members/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
return self.request(r, reason=reason)
def ban(
self,
user_id: Snowflake,
guild_id: Snowflake,
delete_message_days: int = 1,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"PUT",
"/guilds/{guild_id}/bans/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
params = {
"delete_message_days": delete_message_days,
}
return self.request(r, params=params, reason=reason)
def unban(self, user_id: Snowflake, guild_id: Snowflake, *, reason: Optional[str] = None) -> Response[None]:
r = Route(
"DELETE",
"/guilds/{guild_id}/bans/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
return self.request(r, reason=reason)
def guild_voice_state(
self,
user_id: Snowflake,
guild_id: Snowflake,
*,
mute: Optional[bool] = None,
deafen: Optional[bool] = None,
reason: Optional[str] = None,
) -> Response[member.Member]:
r = Route(
"PATCH",
"/guilds/{guild_id}/members/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
payload = {}
if mute is not None:
payload["mute"] = mute
if deafen is not None:
payload["deaf"] = deafen
return self.request(r, json=payload, reason=reason)
def edit_profile(self, payload: Dict[str, Any]) -> Response[user.User]:
return self.request(Route("PATCH", "/users/@me"), json=payload)
def change_my_nickname(
self,
guild_id: Snowflake,
nickname: str,
*,
reason: Optional[str] = None,
) -> Response[member.Nickname]:
r = Route("PATCH", "/guilds/{guild_id}/members/@me/nick", guild_id=guild_id)
payload = {
"nick": nickname,
}
return self.request(r, json=payload, reason=reason)
def change_nickname(
self,
guild_id: Snowflake,
user_id: Snowflake,
nickname: str,
*,
reason: Optional[str] = None,
) -> Response[member.Member]:
r = Route(
"PATCH",
"/guilds/{guild_id}/members/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
payload = {
"nick": nickname,
}
return self.request(r, json=payload, reason=reason)
def edit_my_voice_state(self, guild_id: Snowflake, payload: Dict[str, Any]) -> Response[None]:
r = Route("PATCH", "/guilds/{guild_id}/voice-states/@me", guild_id=guild_id)
return self.request(r, json=payload)
def edit_voice_state(self, guild_id: Snowflake, user_id: Snowflake, payload: Dict[str, Any]) -> Response[None]:
r = Route(
"PATCH",
"/guilds/{guild_id}/voice-states/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
return self.request(r, json=payload)
def edit_member(
self,
guild_id: Snowflake,
user_id: Snowflake,
*,
reason: Optional[str] = None,
**fields: Any,
) -> Response[member.MemberWithUser]:
r = Route(
"PATCH",
"/guilds/{guild_id}/members/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
return self.request(r, json=fields, reason=reason)
# Channel management
def edit_channel(
self,
channel_id: Snowflake,
*,
reason: Optional[str] = None,
**options: Any,
) -> Response[channel.Channel]:
r = Route("PATCH", "/channels/{channel_id}", channel_id=channel_id)
valid_keys = (
"name",
"parent_id",
"topic",
"bitrate",
"nsfw",
"user_limit",
"position",
"permission_overwrites",
"rate_limit_per_user",
"type",
"rtc_region",
"video_quality_mode",
"archived",
"auto_archive_duration",
"locked",
"invitable",
"default_auto_archive_duration",
)
payload = {k: v for k, v in options.items() if k in valid_keys}
return self.request(r, reason=reason, json=payload)
def bulk_channel_update(
self,
guild_id: Snowflake,
data: List[guild.ChannelPositionUpdate],
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route("PATCH", "/guilds/{guild_id}/channels", guild_id=guild_id)
return self.request(r, json=data, reason=reason)
def create_channel(
self,
guild_id: Snowflake,
channel_type: channel.ChannelType,
*,
reason: Optional[str] = None,
**options: Any,
) -> Response[channel.GuildChannel]:
payload = {
"type": channel_type,
}
valid_keys = (
"name",
"parent_id",
"topic",
"bitrate",
"nsfw",
"user_limit",
"position",
"permission_overwrites",
"rate_limit_per_user",
"rtc_region",
"video_quality_mode",
"auto_archive_duration",
)
payload.update({k: v for k, v in options.items() if k in valid_keys and v is not None})
return self.request(
Route("POST", "/guilds/{guild_id}/channels", guild_id=guild_id),
json=payload,
reason=reason,
)
def delete_channel(
self,
channel_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[None]:
return self.request(
Route("DELETE", "/channels/{channel_id}", channel_id=channel_id),
reason=reason,
)
# Thread management
def start_thread_with_message(
self,
channel_id: Snowflake,
message_id: Snowflake,
*,
name: str,
auto_archive_duration: threads.ThreadArchiveDuration,
reason: Optional[str] = None,
) -> Response[threads.Thread]:
payload = {
"name": name,
"auto_archive_duration": auto_archive_duration,
}
route = Route(
"POST",
"/channels/{channel_id}/messages/{message_id}/threads",
channel_id=channel_id,
message_id=message_id,
)
return self.request(route, json=payload, reason=reason)
def start_thread_without_message(
self,
channel_id: Snowflake,
*,
name: str,
auto_archive_duration: threads.ThreadArchiveDuration,
type: threads.ThreadType,
invitable: bool = True,
reason: Optional[str] = None,
) -> Response[threads.Thread]:
payload = {
"name": name,
"auto_archive_duration": auto_archive_duration,
"type": type,
"invitable": invitable,
}
route = Route("POST", "/channels/{channel_id}/threads", channel_id=channel_id)
return self.request(route, json=payload, reason=reason)
def join_thread(self, channel_id: Snowflake) -> Response[None]:
return self.request(
Route(
"POST",
"/channels/{channel_id}/thread-members/@me",
channel_id=channel_id,
)
)
def add_user_to_thread(self, channel_id: Snowflake, user_id: Snowflake) -> Response[None]:
return self.request(
Route(
"PUT",
"/channels/{channel_id}/thread-members/{user_id}",
channel_id=channel_id,
user_id=user_id,
)
)
def leave_thread(self, channel_id: Snowflake) -> Response[None]:
return self.request(
Route(
"DELETE",
"/channels/{channel_id}/thread-members/@me",
channel_id=channel_id,
)
)
def remove_user_from_thread(self, channel_id: Snowflake, user_id: Snowflake) -> Response[None]:
route = Route(
"DELETE",
"/channels/{channel_id}/thread-members/{user_id}",
channel_id=channel_id,
user_id=user_id,
)
return self.request(route)
def get_public_archived_threads(
self, channel_id: Snowflake, before: Optional[Snowflake] = None, limit: int = 50
) -> Response[threads.ThreadPaginationPayload]:
route = Route(
"GET",
"/channels/{channel_id}/threads/archived/public",
channel_id=channel_id,
)
params = {}
if before:
params["before"] = before
params["limit"] = limit
return self.request(route, params=params)
def get_private_archived_threads(
self, channel_id: Snowflake, before: Optional[Snowflake] = None, limit: int = 50
) -> Response[threads.ThreadPaginationPayload]:
route = Route(
"GET",
"/channels/{channel_id}/threads/archived/private",
channel_id=channel_id,
)
params = {}
if before:
params["before"] = before
params["limit"] = limit
return self.request(route, params=params)
def get_joined_private_archived_threads(
self, channel_id: Snowflake, before: Optional[Snowflake] = None, limit: int = 50
) -> Response[threads.ThreadPaginationPayload]:
route = Route(
"GET",
"/channels/{channel_id}/users/@me/threads/archived/private",
channel_id=channel_id,
)
params = {}
if before:
params["before"] = before
params["limit"] = limit
return self.request(route, params=params)
def get_active_threads(self, guild_id: Snowflake) -> Response[threads.ThreadPaginationPayload]:
route = Route("GET", "/guilds/{guild_id}/threads/active", guild_id=guild_id)
return self.request(route)
def get_thread_members(self, channel_id: Snowflake) -> Response[List[threads.ThreadMember]]:
route = Route("GET", "/channels/{channel_id}/thread-members", channel_id=channel_id)
return self.request(route)
# Webhook management
def create_webhook(
self,
channel_id: Snowflake,
*,
name: str,
avatar: Optional[bytes] = None,
reason: Optional[str] = None,
) -> Response[webhook.Webhook]:
payload: Dict[str, Any] = {
"name": name,
}
if avatar is not None:
payload["avatar"] = avatar
r = Route("POST", "/channels/{channel_id}/webhooks", channel_id=channel_id)
return self.request(r, json=payload, reason=reason)
def channel_webhooks(self, channel_id: Snowflake) -> Response[List[webhook.Webhook]]:
return self.request(Route("GET", "/channels/{channel_id}/webhooks", channel_id=channel_id))
def guild_webhooks(self, guild_id: Snowflake) -> Response[List[webhook.Webhook]]:
return self.request(Route("GET", "/guilds/{guild_id}/webhooks", guild_id=guild_id))
def get_webhook(self, webhook_id: Snowflake) -> Response[webhook.Webhook]:
return self.request(Route("GET", "/webhooks/{webhook_id}", webhook_id=webhook_id))
def follow_webhook(
self,
channel_id: Snowflake,
webhook_channel_id: Snowflake,
reason: Optional[str] = None,
) -> Response[None]:
payload = {
"webhook_channel_id": str(webhook_channel_id),
}
return self.request(
Route("POST", "/channels/{channel_id}/followers", channel_id=channel_id),
json=payload,
reason=reason,
)
# Guild management
def get_guilds(
self,
limit: int,
before: Optional[Snowflake] = None,
after: Optional[Snowflake] = None,
) -> Response[List[guild.Guild]]:
params: Dict[str, Any] = {
"limit": limit,
}
if before:
params["before"] = before
if after:
params["after"] = after
return self.request(Route("GET", "/users/@me/guilds"), params=params)
def leave_guild(self, guild_id: Snowflake) -> Response[None]:
return self.request(Route("DELETE", "/users/@me/guilds/{guild_id}", guild_id=guild_id))
def get_guild(self, guild_id: Snowflake, *, with_counts=True) -> Response[guild.Guild]:
params = {"with_counts": int(with_counts)}
return self.request(Route("GET", "/guilds/{guild_id}", guild_id=guild_id), params=params)
def delete_guild(self, guild_id: Snowflake) -> Response[None]:
return self.request(Route("DELETE", "/guilds/{guild_id}", guild_id=guild_id))
def create_guild(self, name: str, region: str, icon: Optional[str]) -> Response[guild.Guild]:
payload = {
"name": name,
"region": region,
}
if icon:
payload["icon"] = icon
return self.request(Route("POST", "/guilds"), json=payload)
def edit_guild(self, guild_id: Snowflake, *, reason: Optional[str] = None, **fields: Any) -> Response[guild.Guild]:
valid_keys = (
"name",
"region",
"icon",
"afk_timeout",
"owner_id",
"afk_channel_id",
"splash",
"discovery_splash",
"features",
"verification_level",
"system_channel_id",
"default_message_notifications",
"description",
"explicit_content_filter",
"banner",
"system_channel_flags",
"rules_channel_id",
"public_updates_channel_id",
"preferred_locale",
"premium_progress_bar_enabled",
)
payload = {k: v for k, v in fields.items() if k in valid_keys}
return self.request(
Route("PATCH", "/guilds/{guild_id}", guild_id=guild_id),
json=payload,
reason=reason,
)
def get_template(self, code: str) -> Response[template.Template]:
return self.request(Route("GET", "/guilds/templates/{code}", code=code))
def guild_templates(self, guild_id: Snowflake) -> Response[List[template.Template]]:
return self.request(Route("GET", "/guilds/{guild_id}/templates", guild_id=guild_id))
def create_template(self, guild_id: Snowflake, payload: template.CreateTemplate) -> Response[template.Template]:
return self.request(
Route("POST", "/guilds/{guild_id}/templates", guild_id=guild_id),
json=payload,
)
def sync_template(self, guild_id: Snowflake, code: str) -> Response[template.Template]:
return self.request(
Route(
"PUT",
"/guilds/{guild_id}/templates/{code}",
guild_id=guild_id,
code=code,
)
)
def edit_template(self, guild_id: Snowflake, code: str, payload) -> Response[template.Template]:
valid_keys = (
"name",
"description",
)
payload = {k: v for k, v in payload.items() if k in valid_keys}
return self.request(
Route(
"PATCH",
"/guilds/{guild_id}/templates/{code}",
guild_id=guild_id,
code=code,
),
json=payload,
)
def delete_template(self, guild_id: Snowflake, code: str) -> Response[None]:
return self.request(
Route(
"DELETE",
"/guilds/{guild_id}/templates/{code}",
guild_id=guild_id,
code=code,
)
)
def create_from_template(self, code: str, name: str, region: str, icon: Optional[str]) -> Response[guild.Guild]:
payload = {
"name": name,
"region": region,
}
if icon:
payload["icon"] = icon
return self.request(Route("POST", "/guilds/templates/{code}", code=code), json=payload)
def get_bans(self, guild_id: Snowflake) -> Response[List[guild.Ban]]:
return self.request(Route("GET", "/guilds/{guild_id}/bans", guild_id=guild_id))
def get_ban(self, user_id: Snowflake, guild_id: Snowflake) -> Response[guild.Ban]:
return self.request(
Route(
"GET",
"/guilds/{guild_id}/bans/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
)
def get_vanity_code(self, guild_id: Snowflake) -> Response[invite.VanityInvite]:
return self.request(Route("GET", "/guilds/{guild_id}/vanity-url", guild_id=guild_id))
def change_vanity_code(self, guild_id: Snowflake, code: str, *, reason: Optional[str] = None) -> Response[None]:
payload: Dict[str, Any] = {"code": code}
return self.request(
Route("PATCH", "/guilds/{guild_id}/vanity-url", guild_id=guild_id),
json=payload,
reason=reason,
)
def get_all_guild_channels(self, guild_id: Snowflake) -> Response[List[guild.GuildChannel]]:
return self.request(Route("GET", "/guilds/{guild_id}/channels", guild_id=guild_id))
def get_members(
self, guild_id: Snowflake, limit: int, after: Optional[Snowflake]
) -> Response[List[member.MemberWithUser]]:
params: Dict[str, Any] = {
"limit": limit,
}
if after:
params["after"] = after
r = Route("GET", "/guilds/{guild_id}/members", guild_id=guild_id)
return self.request(r, params=params)
def get_member(self, guild_id: Snowflake, member_id: Snowflake) -> Response[member.MemberWithUser]:
return self.request(
Route(
"GET",
"/guilds/{guild_id}/members/{member_id}",
guild_id=guild_id,
member_id=member_id,
)
)
def prune_members(
self,
guild_id: Snowflake,
days: int,
compute_prune_count: bool,
roles: List[str],
*,
reason: Optional[str] = None,
) -> Response[guild.GuildPrune]:
payload: Dict[str, Any] = {
"days": days,
"compute_prune_count": "true" if compute_prune_count else "false",
}
if roles:
payload["include_roles"] = ", ".join(roles)
return self.request(
Route("POST", "/guilds/{guild_id}/prune", guild_id=guild_id),
json=payload,
reason=reason,
)
def estimate_pruned_members(
self,
guild_id: Snowflake,
days: int,
roles: List[str],
) -> Response[guild.GuildPrune]:
params: Dict[str, Any] = {
"days": days,
}
if roles:
params["include_roles"] = ", ".join(roles)
return self.request(Route("GET", "/guilds/{guild_id}/prune", guild_id=guild_id), params=params)
def get_sticker(self, sticker_id: Snowflake) -> Response[sticker.Sticker]:
return self.request(Route("GET", "/stickers/{sticker_id}", sticker_id=sticker_id))
def list_premium_sticker_packs(self) -> Response[sticker.ListPremiumStickerPacks]:
return self.request(Route("GET", "/sticker-packs"))
def get_all_guild_stickers(self, guild_id: Snowflake) -> Response[List[sticker.GuildSticker]]:
return self.request(Route("GET", "/guilds/{guild_id}/stickers", guild_id=guild_id))
def get_guild_sticker(self, guild_id: Snowflake, sticker_id: Snowflake) -> Response[sticker.GuildSticker]:
return self.request(
Route(
"GET",
"/guilds/{guild_id}/stickers/{sticker_id}",
guild_id=guild_id,
sticker_id=sticker_id,
)
)
def create_guild_sticker(
self,
guild_id: Snowflake,
payload: sticker.CreateGuildSticker,
file: File,
reason: str,
) -> Response[sticker.GuildSticker]:
initial_bytes = file.fp.read(16)
try:
mime_type = utils._get_mime_type_for_image(initial_bytes)
except InvalidArgument:
if initial_bytes.startswith(b"{"):
mime_type = "application/json"
else:
mime_type = "application/octet-stream"
finally:
file.reset()
form: List[Dict[str, Any]] = [
{
"name": "file",
"value": file.fp,
"filename": file.filename,
"content_type": mime_type,
}
]
for k, v in payload.items():
form.append(
{
"name": k,
"value": v,
}
)
return self.request(
Route("POST", "/guilds/{guild_id}/stickers", guild_id=guild_id),
form=form,
files=[file],
reason=reason,
)
def modify_guild_sticker(
self,
guild_id: Snowflake,
sticker_id: Snowflake,
payload: sticker.EditGuildSticker,
reason: Optional[str],
) -> Response[sticker.GuildSticker]:
return self.request(
Route(
"PATCH",
"/guilds/{guild_id}/stickers/{sticker_id}",
guild_id=guild_id,
sticker_id=sticker_id,
),
json=payload,
reason=reason,
)
def delete_guild_sticker(self, guild_id: Snowflake, sticker_id: Snowflake, reason: Optional[str]) -> Response[None]:
return self.request(
Route(
"DELETE",
"/guilds/{guild_id}/stickers/{sticker_id}",
guild_id=guild_id,
sticker_id=sticker_id,
),
reason=reason,
)
def get_all_custom_emojis(self, guild_id: Snowflake) -> Response[List[emoji.Emoji]]:
return self.request(Route("GET", "/guilds/{guild_id}/emojis", guild_id=guild_id))
def get_custom_emoji(self, guild_id: Snowflake, emoji_id: Snowflake) -> Response[emoji.Emoji]:
return self.request(
Route(
"GET",
"/guilds/{guild_id}/emojis/{emoji_id}",
guild_id=guild_id,
emoji_id=emoji_id,
)
)
def create_custom_emoji(
self,
guild_id: Snowflake,
name: str,
image: bytes,
*,
roles: Optional[SnowflakeList] = None,
reason: Optional[str] = None,
) -> Response[emoji.Emoji]:
payload = {
"name": name,
"image": image,
"roles": roles or [],
}
r = Route("POST", "/guilds/{guild_id}/emojis", guild_id=guild_id)
return self.request(r, json=payload, reason=reason)
def delete_custom_emoji(
self,
guild_id: Snowflake,
emoji_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"DELETE",
"/guilds/{guild_id}/emojis/{emoji_id}",
guild_id=guild_id,
emoji_id=emoji_id,
)
return self.request(r, reason=reason)
def edit_custom_emoji(
self,
guild_id: Snowflake,
emoji_id: Snowflake,
*,
payload: Dict[str, Any],
reason: Optional[str] = None,
) -> Response[emoji.Emoji]:
r = Route(
"PATCH",
"/guilds/{guild_id}/emojis/{emoji_id}",
guild_id=guild_id,
emoji_id=emoji_id,
)
return self.request(r, json=payload, reason=reason)
def get_all_integrations(self, guild_id: Snowflake) -> Response[List[integration.Integration]]:
r = Route("GET", "/guilds/{guild_id}/integrations", guild_id=guild_id)
return self.request(r)
def create_integration(self, guild_id: Snowflake, type: integration.IntegrationType, id: int) -> Response[None]:
payload = {
"type": type,
"id": id,
}
r = Route("POST", "/guilds/{guild_id}/integrations", guild_id=guild_id)
return self.request(r, json=payload)
def edit_integration(self, guild_id: Snowflake, integration_id: Snowflake, **payload: Any) -> Response[None]:
r = Route(
"PATCH",
"/guilds/{guild_id}/integrations/{integration_id}",
guild_id=guild_id,
integration_id=integration_id,
)
return self.request(r, json=payload)
def sync_integration(self, guild_id: Snowflake, integration_id: Snowflake) -> Response[None]:
r = Route(
"POST",
"/guilds/{guild_id}/integrations/{integration_id}/sync",
guild_id=guild_id,
integration_id=integration_id,
)
return self.request(r)
def delete_integration(
self,
guild_id: Snowflake,
integration_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"DELETE",
"/guilds/{guild_id}/integrations/{integration_id}",
guild_id=guild_id,
integration_id=integration_id,
)
return self.request(r, reason=reason)
def get_audit_logs(
self,
guild_id: Snowflake,
limit: int = 100,
before: Optional[Snowflake] = None,
after: Optional[Snowflake] = None,
user_id: Optional[Snowflake] = None,
action_type: Optional[AuditLogAction] = None,
) -> Response[audit_log.AuditLog]:
params: Dict[str, Any] = {"limit": limit}
if before:
params["before"] = before
if after:
params["after"] = after
if user_id:
params["user_id"] = user_id
if action_type:
params["action_type"] = action_type
r = Route("GET", "/guilds/{guild_id}/audit-logs", guild_id=guild_id)
return self.request(r, params=params)
def get_widget(self, guild_id: Snowflake) -> Response[widget.Widget]:
return self.request(Route("GET", "/guilds/{guild_id}/widget.json", guild_id=guild_id))
def edit_widget(self, guild_id: Snowflake, payload) -> Response[widget.WidgetSettings]:
return self.request(Route("PATCH", "/guilds/{guild_id}/widget", guild_id=guild_id), json=payload)
# Invite management
def create_invite(
self,
channel_id: Snowflake,
*,
reason: Optional[str] = None,
max_age: int = 0,
max_uses: int = 0,
temporary: bool = False,
unique: bool = True,
target_type: Optional[invite.InviteTargetType] = None,
target_user_id: Optional[Snowflake] = None,
target_application_id: Optional[Snowflake] = None,
) -> Response[invite.Invite]:
r = Route("POST", "/channels/{channel_id}/invites", channel_id=channel_id)
payload = {
"max_age": max_age,
"max_uses": max_uses,
"temporary": temporary,
"unique": unique,
}
if target_type:
payload["target_type"] = target_type
if target_user_id:
payload["target_user_id"] = target_user_id
if target_application_id:
payload["target_application_id"] = str(target_application_id)
return self.request(r, reason=reason, json=payload)
def get_invite(
self,
invite_id: str,
*,
with_counts: bool = True,
with_expiration: bool = True,
guild_scheduled_event_id: Optional[int] = None,
) -> Response[invite.Invite]:
params = {
"with_counts": int(with_counts),
"with_expiration": int(with_expiration),
}
if guild_scheduled_event_id is not None:
params["guild_scheduled_event_id"] = int(guild_scheduled_event_id)
return self.request(Route("GET", "/invites/{invite_id}", invite_id=invite_id), params=params)
def invites_from(self, guild_id: Snowflake) -> Response[List[invite.Invite]]:
return self.request(Route("GET", "/guilds/{guild_id}/invites", guild_id=guild_id))
def invites_from_channel(self, channel_id: Snowflake) -> Response[List[invite.Invite]]:
return self.request(Route("GET", "/channels/{channel_id}/invites", channel_id=channel_id))
def delete_invite(self, invite_id: str, *, reason: Optional[str] = None) -> Response[None]:
return self.request(Route("DELETE", "/invites/{invite_id}", invite_id=invite_id), reason=reason)
# Role management
def get_roles(self, guild_id: Snowflake) -> Response[List[role.Role]]:
return self.request(Route("GET", "/guilds/{guild_id}/roles", guild_id=guild_id))
def edit_role(
self,
guild_id: Snowflake,
role_id: Snowflake,
*,
reason: Optional[str] = None,
**fields: Any,
) -> Response[role.Role]:
r = Route(
"PATCH",
"/guilds/{guild_id}/roles/{role_id}",
guild_id=guild_id,
role_id=role_id,
)
valid_keys = (
"name",
"permissions",
"color",
"hoist",
"mentionable",
"icon",
"unicode_emoji",
)
payload = {k: v for k, v in fields.items() if k in valid_keys}
return self.request(r, json=payload, reason=reason)
def delete_role(self, guild_id: Snowflake, role_id: Snowflake, *, reason: Optional[str] = None) -> Response[None]:
r = Route(
"DELETE",
"/guilds/{guild_id}/roles/{role_id}",
guild_id=guild_id,
role_id=role_id,
)
return self.request(r, reason=reason)
def replace_roles(
self,
user_id: Snowflake,
guild_id: Snowflake,
role_ids: List[int],
*,
reason: Optional[str] = None,
) -> Response[member.MemberWithUser]:
return self.edit_member(guild_id=guild_id, user_id=user_id, roles=role_ids, reason=reason)
def create_role(self, guild_id: Snowflake, *, reason: Optional[str] = None, **fields: Any) -> Response[role.Role]:
r = Route("POST", "/guilds/{guild_id}/roles", guild_id=guild_id)
return self.request(r, json=fields, reason=reason)
def move_role_position(
self,
guild_id: Snowflake,
positions: List[guild.RolePositionUpdate],
*,
reason: Optional[str] = None,
) -> Response[List[role.Role]]:
r = Route("PATCH", "/guilds/{guild_id}/roles", guild_id=guild_id)
return self.request(r, json=positions, reason=reason)
def add_role(
self,
guild_id: Snowflake,
user_id: Snowflake,
role_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"PUT",
"/guilds/{guild_id}/members/{user_id}/roles/{role_id}",
guild_id=guild_id,
user_id=user_id,
role_id=role_id,
)
return self.request(r, reason=reason)
def remove_role(
self,
guild_id: Snowflake,
user_id: Snowflake,
role_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"DELETE",
"/guilds/{guild_id}/members/{user_id}/roles/{role_id}",
guild_id=guild_id,
user_id=user_id,
role_id=role_id,
)
return self.request(r, reason=reason)
def edit_channel_permissions(
self,
channel_id: Snowflake,
target: Snowflake,
allow: str,
deny: str,
type: channel.OverwriteType,
*,
reason: Optional[str] = None,
) -> Response[None]:
payload = {"id": target, "allow": allow, "deny": deny, "type": type}
r = Route(
"PUT",
"/channels/{channel_id}/permissions/{target}",
channel_id=channel_id,
target=target,
)
return self.request(r, json=payload, reason=reason)
def delete_channel_permissions(
self,
channel_id: Snowflake,
target: channel.OverwriteType,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/permissions/{target}",
channel_id=channel_id,
target=target,
)
return self.request(r, reason=reason)
# Welcome Screen
def get_welcome_screen(self, guild_id: Snowflake) -> Response[welcome_screen.WelcomeScreen]:
return self.request(Route("GET", "/guilds/{guild_id}/welcome-screen", guild_id=guild_id))
def edit_welcome_screen(
self, guild_id: Snowflake, payload: Any, *, reason: Optional[str] = None
) -> Response[welcome_screen.WelcomeScreen]:
keys = (
"description",
"welcome_channels",
"enabled",
)
payload = {key: val for key, val in payload.items() if key in keys}
return self.request(
Route("PATCH", "/guilds/{guild_id}/welcome-screen", guild_id=guild_id),
json=payload,
reason=reason,
)
# Voice management
def move_member(
self,
user_id: Snowflake,
guild_id: Snowflake,
channel_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[member.MemberWithUser]:
return self.edit_member(guild_id=guild_id, user_id=user_id, channel_id=channel_id, reason=reason)
# Stage instance management
def get_stage_instance(self, channel_id: Snowflake) -> Response[channel.StageInstance]:
return self.request(Route("GET", "/stage-instances/{channel_id}", channel_id=channel_id))
def create_stage_instance(self, *, reason: Optional[str], **payload: Any) -> Response[channel.StageInstance]:
valid_keys = (
"channel_id",
"topic",
"privacy_level",
)
payload = {k: v for k, v in payload.items() if k in valid_keys}
return self.request(Route("POST", "/stage-instances"), json=payload, reason=reason)
def edit_stage_instance(
self, channel_id: Snowflake, *, reason: Optional[str] = None, **payload: Any
) -> Response[None]:
valid_keys = (
"topic",
"privacy_level",
)
payload = {k: v for k, v in payload.items() if k in valid_keys}
return self.request(
Route("PATCH", "/stage-instances/{channel_id}", channel_id=channel_id),
json=payload,
reason=reason,
)
def delete_stage_instance(self, channel_id: Snowflake, *, reason: Optional[str] = None) -> Response[None]:
return self.request(
Route("DELETE", "/stage-instances/{channel_id}", channel_id=channel_id),
reason=reason,
)
# Guild scheduled events management
def get_scheduled_events(
self, guild_id: Snowflake, with_user_count: bool = True
) -> Response[List[scheduled_events.ScheduledEvent]]:
params = {
"with_user_count": int(with_user_count),
}
return self.request(
Route("GET", "/guilds/{guild_id}/scheduled-events", guild_id=guild_id),
params=params,
)
def get_scheduled_event(
self, guild_id: Snowflake, event_id: Snowflake, with_user_count: bool = True
) -> Response[scheduled_events.ScheduledEvent]:
params = {
"with_user_count": int(with_user_count),
}
return self.request(
Route(
"GET",
"/guilds/{guild_id}/scheduled-events/{event_id}",
guild_id=guild_id,
event_id=event_id,
),
params=params,
)
def create_scheduled_event(
self, guild_id: Snowflake, reason: Optional[str] = None, **payload: Any
) -> Response[scheduled_events.ScheduledEvent]:
valid_keys = (
"channel_id",
"name",
"privacy_level",
"scheduled_start_time",
"scheduled_end_time",
"description",
"entity_type",
"entity_metadata",
)
payload = {k: v for k, v in payload.items() if k in valid_keys}
return self.request(
Route("POST", "/guilds/{guild_id}/scheduled-events", guild_id=guild_id),
json=payload,
reason=reason,
)
def delete_scheduled_event(self, guild_id: Snowflake, event_id: Snowflake) -> Response[None]:
return self.request(
Route(
"DELETE",
"/guilds/{guild_id}/scheduled-events/{event_id}",
guild_id=guild_id,
event_id=event_id,
)
)
def edit_scheduled_event(
self,
guild_id: Snowflake,
event_id: Snowflake,
reason: Optional[str] = None,
**payload: Any,
) -> Response[scheduled_events.ScheduledEvent]:
valid_keys = (
"channel_id",
"name",
"privacy_level",
"scheduled_start_time",
"scheduled_end_time",
"description",
"entity_type",
"status",
"entity_metadata",
"image",
)
payload = {k: v for k, v in payload.items() if k in valid_keys}
return self.request(
Route(
"PATCH",
"/guilds/{guild_id}/scheduled-events/{event_id}",
guild_id=guild_id,
event_id=event_id,
),
json=payload,
reason=reason,
)
def get_scheduled_event_users(
self,
guild_id: Snowflake,
event_id: Snowflake,
limit: int,
with_member: bool = False,
before: Snowflake = None,
after: Snowflake = None,
) -> Response[List[scheduled_events.ScheduledEventSubscriber]]:
params = {
"limit": int(limit),
"with_member": int(with_member),
}
if before is not None:
params["before"] = int(before)
if after is not None:
params["after"] = int(after)
return self.request(
Route(
"GET",
"/guilds/{guild_id}/scheduled-events/{event_id}/users",
guild_id=guild_id,
event_id=event_id,
),
params=params,
)
# Application commands (global)
def get_global_commands(self, application_id: Snowflake) -> Response[List[interactions.ApplicationCommand]]:
return self.request(
Route(
"GET",
"/applications/{application_id}/commands",
application_id=application_id,
)
)
def get_global_command(
self, application_id: Snowflake, command_id: Snowflake
) -> Response[interactions.ApplicationCommand]:
r = Route(
"GET",
"/applications/{application_id}/commands/{command_id}",
application_id=application_id,
command_id=command_id,
)
return self.request(r)
def upsert_global_command(self, application_id: Snowflake, payload) -> Response[interactions.ApplicationCommand]:
r = Route(
"POST",
"/applications/{application_id}/commands",
application_id=application_id,
)
return self.request(r, json=payload)
def edit_global_command(
self,
application_id: Snowflake,
command_id: Snowflake,
payload: interactions.EditApplicationCommand,
) -> Response[interactions.ApplicationCommand]:
valid_keys = (
"name",
"description",
"options",
)
payload = {k: v for k, v in payload.items() if k in valid_keys} # type: ignore
r = Route(
"PATCH",
"/applications/{application_id}/commands/{command_id}",
application_id=application_id,
command_id=command_id,
)
return self.request(r, json=payload)
def delete_global_command(self, application_id: Snowflake, command_id: Snowflake) -> Response[None]:
r = Route(
"DELETE",
"/applications/{application_id}/commands/{command_id}",
application_id=application_id,
command_id=command_id,
)
return self.request(r)
def bulk_upsert_global_commands(
self, application_id: Snowflake, payload
) -> Response[List[interactions.ApplicationCommand]]:
r = Route(
"PUT",
"/applications/{application_id}/commands",
application_id=application_id,
)
return self.request(r, json=payload)
# Application commands (guild)
def get_guild_commands(
self, application_id: Snowflake, guild_id: Snowflake
) -> Response[List[interactions.ApplicationCommand]]:
r = Route(
"GET",
"/applications/{application_id}/guilds/{guild_id}/commands",
application_id=application_id,
guild_id=guild_id,
)
return self.request(r)
def get_guild_command(
self,
application_id: Snowflake,
guild_id: Snowflake,
command_id: Snowflake,
) -> Response[interactions.ApplicationCommand]:
r = Route(
"GET",
"/applications/{application_id}/guilds/{guild_id}/commands/{command_id}",
application_id=application_id,
guild_id=guild_id,
command_id=command_id,
)
return self.request(r)
def upsert_guild_command(
self,
application_id: Snowflake,
guild_id: Snowflake,
payload: interactions.EditApplicationCommand,
) -> Response[interactions.ApplicationCommand]:
r = Route(
"POST",
"/applications/{application_id}/guilds/{guild_id}/commands",
application_id=application_id,
guild_id=guild_id,
)
return self.request(r, json=payload)
def edit_guild_command(
self,
application_id: Snowflake,
guild_id: Snowflake,
command_id: Snowflake,
payload: interactions.EditApplicationCommand,
) -> Response[interactions.ApplicationCommand]:
valid_keys = (
"name",
"description",
"options",
)
payload = {k: v for k, v in payload.items() if k in valid_keys} # type: ignore
r = Route(
"PATCH",
"/applications/{application_id}/guilds/{guild_id}/commands/{command_id}",
application_id=application_id,
guild_id=guild_id,
command_id=command_id,
)
return self.request(r, json=payload)
def delete_guild_command(
self,
application_id: Snowflake,
guild_id: Snowflake,
command_id: Snowflake,
) -> Response[None]:
r = Route(
"DELETE",
"/applications/{application_id}/guilds/{guild_id}/commands/{command_id}",
application_id=application_id,
guild_id=guild_id,
command_id=command_id,
)
return self.request(r)
def bulk_upsert_guild_commands(
self,
application_id: Snowflake,
guild_id: Snowflake,
payload: List[interactions.EditApplicationCommand],
) -> Response[List[interactions.ApplicationCommand]]:
r = Route(
"PUT",
"/applications/{application_id}/guilds/{guild_id}/commands",
application_id=application_id,
guild_id=guild_id,
)
return self.request(r, json=payload)
def bulk_upsert_command_permissions(
self,
application_id: Snowflake,
guild_id: Snowflake,
payload: List[interactions.EditApplicationCommand],
) -> Response[List[interactions.ApplicationCommand]]:
r = Route(
"PUT",
"/applications/{application_id}/guilds/{guild_id}/commands/permissions",
application_id=application_id,
guild_id=guild_id,
)
return self.request(r, json=payload)
# Interaction responses
def _edit_webhook_helper(
self,
route: Route,
file: Optional[File] = None,
content: Optional[str] = None,
embeds: Optional[List[embed.Embed]] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
):
payload: Dict[str, Any] = {}
if content:
payload["content"] = content
if embeds:
payload["embeds"] = embeds
if allowed_mentions:
payload["allowed_mentions"] = allowed_mentions
form: List[Dict[str, Any]] = [
{
"name": "payload_json",
"value": utils._to_json(payload),
}
]
if file:
form.append(
{
"name": "file",
"value": file.fp,
"filename": file.filename,
"content_type": "application/octet-stream",
}
)
return self.request(route, form=form, files=[file] if file else None)
def create_interaction_response(
self,
interaction_id: Snowflake,
token: str,
*,
type: InteractionResponseType,
data: Optional[interactions.InteractionApplicationCommandCallbackData] = None,
) -> Response[None]:
r = Route(
"POST",
"/interactions/{interaction_id}/{interaction_token}/callback",
interaction_id=interaction_id,
interaction_token=token,
)
payload: Dict[str, Any] = {
"type": type,
}
if data is not None:
payload["data"] = data
return self.request(r, json=payload)
def get_original_interaction_response(
self,
application_id: Snowflake,
token: str,
) -> Response[message.Message]:
r = Route(
"GET",
"/webhooks/{application_id}/{interaction_token}/messages/@original",
application_id=application_id,
interaction_token=token,
)
return self.request(r)
def edit_original_interaction_response(
self,
application_id: Snowflake,
token: str,
file: Optional[File] = None,
content: Optional[str] = None,
embeds: Optional[List[embed.Embed]] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
) -> Response[message.Message]:
r = Route(
"PATCH",
"/webhooks/{application_id}/{interaction_token}/messages/@original",
application_id=application_id,
interaction_token=token,
)
return self._edit_webhook_helper(
r,
file=file,
content=content,
embeds=embeds,
allowed_mentions=allowed_mentions,
)
def delete_original_interaction_response(self, application_id: Snowflake, token: str) -> Response[None]:
r = Route(
"DELETE",
"/webhooks/{application_id}/{interaction_token}/messages/@original",
application_id=application_id,
interaction_token=token,
)
return self.request(r)
def create_followup_message(
self,
application_id: Snowflake,
token: str,
files: List[File] = [],
content: Optional[str] = None,
tts: bool = False,
embeds: Optional[List[embed.Embed]] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
) -> Response[message.Message]:
r = Route(
"POST",
"/webhooks/{application_id}/{interaction_token}",
application_id=application_id,
interaction_token=token,
)
return self.send_multipart_helper(
r,
content=content,
files=files,
tts=tts,
embeds=embeds,
allowed_mentions=allowed_mentions,
)
def edit_followup_message(
self,
application_id: Snowflake,
token: str,
message_id: Snowflake,
file: Optional[File] = None,
content: Optional[str] = None,
embeds: Optional[List[embed.Embed]] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
) -> Response[message.Message]:
r = Route(
"PATCH",
"/webhooks/{application_id}/{interaction_token}/messages/{message_id}",
application_id=application_id,
interaction_token=token,
message_id=message_id,
)
return self._edit_webhook_helper(
r,
file=file,
content=content,
embeds=embeds,
allowed_mentions=allowed_mentions,
)
def delete_followup_message(self, application_id: Snowflake, token: str, message_id: Snowflake) -> Response[None]:
r = Route(
"DELETE",
"/webhooks/{application_id}/{interaction_token}/messages/{message_id}",
application_id=application_id,
interaction_token=token,
message_id=message_id,
)
return self.request(r)
def get_guild_application_command_permissions(
self,
application_id: Snowflake,
guild_id: Snowflake,
) -> Response[List[interactions.GuildApplicationCommandPermissions]]:
r = Route(
"GET",
"/applications/{application_id}/guilds/{guild_id}/commands/permissions",
application_id=application_id,
guild_id=guild_id,
)
return self.request(r)
def get_application_command_permissions(
self,
application_id: Snowflake,
guild_id: Snowflake,
command_id: Snowflake,
) -> Response[interactions.GuildApplicationCommandPermissions]:
r = Route(
"GET",
"/applications/{application_id}/guilds/{guild_id}/commands/{command_id}/permissions",
application_id=application_id,
guild_id=guild_id,
command_id=command_id,
)
return self.request(r)
def edit_application_command_permissions(
self,
application_id: Snowflake,
guild_id: Snowflake,
command_id: Snowflake,
payload: interactions.BaseGuildApplicationCommandPermissions,
) -> Response[None]:
r = Route(
"PUT",
"/applications/{application_id}/guilds/{guild_id}/commands/{command_id}/permissions",
application_id=application_id,
guild_id=guild_id,
command_id=command_id,
)
return self.request(r, json=payload)
def bulk_edit_guild_application_command_permissions(
self,
application_id: Snowflake,
guild_id: Snowflake,
payload: List[interactions.PartialGuildApplicationCommandPermissions],
) -> Response[None]:
r = Route(
"PUT",
"/applications/{application_id}/guilds/{guild_id}/commands/permissions",
application_id=application_id,
guild_id=guild_id,
)
return self.request(r, json=payload)
# Misc
def application_info(self) -> Response[appinfo.AppInfo]:
return self.request(Route("GET", "/oauth2/applications/@me"))
async def get_gateway(self, *, encoding: str = "json", zlib: bool = True) -> str:
try:
data = await self.request(Route("GET", "/gateway"))
except HTTPException as exc:
raise GatewayNotFound() from exc
if zlib:
value = "{0}?encoding={1}&v={2}&compress=zlib-stream"
else:
value = "{0}?encoding={1}&v={2}"
return value.format(data["url"], encoding, API_VERSION)
async def get_bot_gateway(self, *, encoding: str = "json", zlib: bool = True) -> Tuple[int, str]:
try:
data = await self.request(Route("GET", "/gateway/bot"))
except HTTPException as exc:
raise GatewayNotFound() from exc
if zlib:
value = "{0}?encoding={1}&v={2}&compress=zlib-stream"
else:
value = "{0}?encoding={1}&v={2}"
return data["shards"], value.format(data["url"], encoding, API_VERSION)
def get_user(self, user_id: Snowflake) -> Response[user.User]:
return self.request(Route("GET", "/users/{user_id}", user_id=user_id))
| 32.821082 | 120 | 0.559897 |
from __future__ import annotations
import asyncio
import logging
import sys
import weakref
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Coroutine,
Dict,
Iterable,
List,
Optional,
Sequence,
Tuple,
Type,
TypeVar,
Union,
)
from urllib.parse import quote as _uriquote
import aiohttp
from . import __version__, utils
from .errors import (
DiscordServerError,
Forbidden,
GatewayNotFound,
HTTPException,
InvalidArgument,
LoginFailure,
NotFound,
)
from .gateway import DiscordClientWebSocketResponse
from .utils import MISSING
_log = logging.getLogger(__name__)
if TYPE_CHECKING:
from types import TracebackType
from .enums import AuditLogAction, InteractionResponseType
from .file import File
from .types import (
appinfo,
audit_log,
channel,
components,
embed,
emoji,
guild,
integration,
interactions,
invite,
member,
message,
role,
scheduled_events,
sticker,
template,
threads,
user,
webhook,
welcome_screen,
widget,
)
from .types.snowflake import Snowflake, SnowflakeList
T = TypeVar("T")
BE = TypeVar("BE", bound=BaseException)
MU = TypeVar("MU", bound="MaybeUnlock")
Response = Coroutine[Any, Any, T]
API_VERSION: int = 10
async def json_or_text(response: aiohttp.ClientResponse) -> Union[Dict[str, Any], str]:
text = await response.text(encoding="utf-8")
try:
if response.headers["content-type"] == "application/json":
return utils._from_json(text)
except KeyError:
pass
return text
class Route:
def __init__(self, method: str, path: str, **parameters: Any) -> None:
self.path: str = path
self.method: str = method
url = self.base + self.path
if parameters:
url = url.format_map({k: _uriquote(v) if isinstance(v, str) else v for k, v in parameters.items()})
self.url: str = url
self.channel_id: Optional[Snowflake] = parameters.get("channel_id")
self.guild_id: Optional[Snowflake] = parameters.get("guild_id")
self.webhook_id: Optional[Snowflake] = parameters.get("webhook_id")
self.webhook_token: Optional[str] = parameters.get("webhook_token")
@property
def base(self) -> str:
return f"https://discord.com/api/v{API_VERSION}"
@property
def bucket(self) -> str:
return f"{self.channel_id}:{self.guild_id}:{self.path}"
class MaybeUnlock:
def __init__(self, lock: asyncio.Lock) -> None:
self.lock: asyncio.Lock = lock
self._unlock: bool = True
def __enter__(self: MU) -> MU:
return self
def defer(self) -> None:
self._unlock = False
def __exit__(
self,
exc_type: Optional[Type[BE]],
exc: Optional[BE],
traceback: Optional[TracebackType],
) -> None:
if self._unlock:
self.lock.release()
aiohttp.hdrs.WEBSOCKET = "websocket"
class HTTPClient:
def __init__(
self,
connector: Optional[aiohttp.BaseConnector] = None,
*,
proxy: Optional[str] = None,
proxy_auth: Optional[aiohttp.BasicAuth] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
unsync_clock: bool = True,
) -> None:
self.loop: asyncio.AbstractEventLoop = asyncio.get_event_loop() if loop is None else loop
self.connector = connector
self.__session: aiohttp.ClientSession = MISSING
self._locks: weakref.WeakValueDictionary = weakref.WeakValueDictionary()
self._global_over: asyncio.Event = asyncio.Event()
self._global_over.set()
self.token: Optional[str] = None
self.bot_token: bool = False
self.proxy: Optional[str] = proxy
self.proxy_auth: Optional[aiohttp.BasicAuth] = proxy_auth
self.use_clock: bool = not unsync_clock
user_agent = "DiscordBot (https://github.com/Pycord-Development/pycord {0}) Python/{1[0]}.{1[1]} aiohttp/{2}"
self.user_agent: str = user_agent.format(__version__, sys.version_info, aiohttp.__version__)
def recreate(self) -> None:
if self.__session.closed:
self.__session = aiohttp.ClientSession(
connector=self.connector,
ws_response_class=DiscordClientWebSocketResponse,
)
async def ws_connect(self, url: str, *, compress: int = 0) -> Any:
kwargs = {
"proxy_auth": self.proxy_auth,
"proxy": self.proxy,
"max_msg_size": 0,
"timeout": 30.0,
"autoclose": False,
"headers": {
"User-Agent": self.user_agent,
},
"compress": compress,
}
return await self.__session.ws_connect(url, **kwargs)
async def request(
self,
route: Route,
*,
files: Optional[Sequence[File]] = None,
form: Optional[Iterable[Dict[str, Any]]] = None,
**kwargs: Any,
) -> Any:
bucket = route.bucket
method = route.method
url = route.url
lock = self._locks.get(bucket)
if lock is None:
lock = asyncio.Lock()
if bucket is not None:
self._locks[bucket] = lock
headers: Dict[str, str] = {
"User-Agent": self.user_agent,
}
if self.token is not None:
headers["Authorization"] = f"Bot {self.token}"
if "json" in kwargs:
headers["Content-Type"] = "application/json"
kwargs["data"] = utils._to_json(kwargs.pop("json"))
try:
reason = kwargs.pop("reason")
except KeyError:
pass
else:
if reason:
headers["X-Audit-Log-Reason"] = _uriquote(reason, safe="/ ")
kwargs["headers"] = headers
# Proxy support
if self.proxy is not None:
kwargs["proxy"] = self.proxy
if self.proxy_auth is not None:
kwargs["proxy_auth"] = self.proxy_auth
if not self._global_over.is_set():
# wait until the global lock is complete
await self._global_over.wait()
response: Optional[aiohttp.ClientResponse] = None
data: Optional[Union[Dict[str, Any], str]] = None
await lock.acquire()
with MaybeUnlock(lock) as maybe_lock:
for tries in range(5):
if files:
for f in files:
f.reset(seek=tries)
if form:
form_data = aiohttp.FormData(quote_fields=False)
for params in form:
form_data.add_field(**params)
kwargs["data"] = form_data
try:
async with self.__session.request(method, url, **kwargs) as response:
_log.debug(
"%s %s with %s has returned %s",
method,
url,
kwargs.get("data"),
response.status,
)
# even errors have text involved in them so this is safe to call
data = await json_or_text(response)
# check if we have rate limit header information
remaining = response.headers.get("X-Ratelimit-Remaining")
if remaining == "0" and response.status != 429:
# we've depleted our current bucket
delta = utils._parse_ratelimit_header(response, use_clock=self.use_clock)
_log.debug(
"A rate limit bucket has been exhausted (bucket: %s, retry: %s).",
bucket,
delta,
)
maybe_lock.defer()
self.loop.call_later(delta, lock.release)
if 300 > response.status >= 200:
_log.debug("%s %s has received %s", method, url, data)
return data
if response.status == 429:
if not response.headers.get("Via") or isinstance(data, str):
raise HTTPException(response, data)
fmt = 'We are being rate limited. Retrying in %.2f seconds. Handled under the bucket "%s"'
retry_after: float = data["retry_after"]
_log.warning(fmt, retry_after, bucket)
is_global = data.get("global", False)
if is_global:
_log.warning(
"Global rate limit has been hit. Retrying in %.2f seconds.",
retry_after,
)
self._global_over.clear()
await asyncio.sleep(retry_after)
_log.debug("Done sleeping for the rate limit. Retrying...")
# release the global lock now that the
# global rate limit has passed
if is_global:
self._global_over.set()
_log.debug("Global rate limit is now over.")
continue
# we've received a 500, 502, or 504, unconditional retry
if response.status in {500, 502, 504}:
await asyncio.sleep(1 + tries * 2)
continue
if response.status == 403:
raise Forbidden(response, data)
elif response.status == 404:
raise NotFound(response, data)
elif response.status >= 500:
raise DiscordServerError(response, data)
else:
raise HTTPException(response, data)
except OSError as e:
if tries < 4 and e.errno in (54, 10054):
await asyncio.sleep(1 + tries * 2)
continue
raise
if response is not None:
if response.status >= 500:
raise DiscordServerError(response, data)
raise HTTPException(response, data)
raise RuntimeError("Unreachable code in HTTP handling")
async def get_from_cdn(self, url: str) -> bytes:
async with self.__session.get(url) as resp:
if resp.status == 200:
return await resp.read()
elif resp.status == 404:
raise NotFound(resp, "asset not found")
elif resp.status == 403:
raise Forbidden(resp, "cannot retrieve asset")
else:
raise HTTPException(resp, "failed to get asset")
# state management
async def close(self) -> None:
if self.__session:
await self.__session.close()
# login management
async def static_login(self, token: str) -> user.User:
# Necessary to get aiohttp to stop complaining about session creation
self.__session = aiohttp.ClientSession(
connector=self.connector, ws_response_class=DiscordClientWebSocketResponse
)
old_token = self.token
self.token = token
try:
data = await self.request(Route("GET", "/users/@me"))
except HTTPException as exc:
self.token = old_token
if exc.status == 401:
raise LoginFailure("Improper token has been passed.") from exc
raise
return data
def logout(self) -> Response[None]:
return self.request(Route("POST", "/auth/logout"))
# Group functionality
def start_group(self, user_id: Snowflake, recipients: List[int]) -> Response[channel.GroupDMChannel]:
payload = {
"recipients": recipients,
}
return self.request(Route("POST", "/users/{user_id}/channels", user_id=user_id), json=payload)
def leave_group(self, channel_id) -> Response[None]:
return self.request(Route("DELETE", "/channels/{channel_id}", channel_id=channel_id))
# Message management
def start_private_message(self, user_id: Snowflake) -> Response[channel.DMChannel]:
payload = {
"recipient_id": user_id,
}
return self.request(Route("POST", "/users/@me/channels"), json=payload)
def send_message(
self,
channel_id: Snowflake,
content: Optional[str],
*,
tts: bool = False,
embed: Optional[embed.Embed] = None,
embeds: Optional[List[embed.Embed]] = None,
nonce: Optional[str] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
message_reference: Optional[message.MessageReference] = None,
stickers: Optional[List[sticker.StickerItem]] = None,
components: Optional[List[components.Component]] = None,
) -> Response[message.Message]:
r = Route("POST", "/channels/{channel_id}/messages", channel_id=channel_id)
payload = {}
if content:
payload["content"] = content
if tts:
payload["tts"] = True
if embed:
payload["embeds"] = [embed]
if embeds:
payload["embeds"] = embeds
if nonce:
payload["nonce"] = nonce
if allowed_mentions:
payload["allowed_mentions"] = allowed_mentions
if message_reference:
payload["message_reference"] = message_reference
if components:
payload["components"] = components
if stickers:
payload["sticker_ids"] = stickers
return self.request(r, json=payload)
def send_typing(self, channel_id: Snowflake) -> Response[None]:
return self.request(Route("POST", "/channels/{channel_id}/typing", channel_id=channel_id))
def send_multipart_helper(
self,
route: Route,
*,
files: Sequence[File],
content: Optional[str] = None,
tts: bool = False,
embed: Optional[embed.Embed] = None,
embeds: Optional[Iterable[Optional[embed.Embed]]] = None,
nonce: Optional[str] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
message_reference: Optional[message.MessageReference] = None,
stickers: Optional[List[sticker.StickerItem]] = None,
components: Optional[List[components.Component]] = None,
) -> Response[message.Message]:
form = []
payload: Dict[str, Any] = {"tts": tts}
if content:
payload["content"] = content
if embed:
payload["embeds"] = [embed]
if embeds:
payload["embeds"] = embeds
if nonce:
payload["nonce"] = nonce
if allowed_mentions:
payload["allowed_mentions"] = allowed_mentions
if message_reference:
payload["message_reference"] = message_reference
if components:
payload["components"] = components
if stickers:
payload["sticker_ids"] = stickers
attachments = []
form.append({"name": "payload_json"})
for index, file in enumerate(files):
attachments.append(
{
"id": index,
"filename": file.filename,
"description": file.description,
}
)
form.append(
{
"name": f"files[{index}]",
"value": file.fp,
"filename": file.filename,
"content_type": "application/octet-stream",
}
)
payload["attachments"] = attachments
form[0]["value"] = utils._to_json(payload)
return self.request(route, form=form, files=files)
def send_files(
self,
channel_id: Snowflake,
*,
files: Sequence[File],
content: Optional[str] = None,
tts: bool = False,
embed: Optional[embed.Embed] = None,
embeds: Optional[List[embed.Embed]] = None,
nonce: Optional[str] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
message_reference: Optional[message.MessageReference] = None,
stickers: Optional[List[sticker.StickerItem]] = None,
components: Optional[List[components.Component]] = None,
) -> Response[message.Message]:
r = Route("POST", "/channels/{channel_id}/messages", channel_id=channel_id)
return self.send_multipart_helper(
r,
files=files,
content=content,
tts=tts,
embed=embed,
embeds=embeds,
nonce=nonce,
allowed_mentions=allowed_mentions,
message_reference=message_reference,
stickers=stickers,
components=components,
)
def edit_multipart_helper(
self,
route: Route,
files: Sequence[File],
**payload,
) -> Response[message.Message]:
form = []
attachments = []
form.append({"name": "payload_json"})
for index, file in enumerate(files):
attachments.append(
{
"id": index,
"filename": file.filename,
"description": file.description,
}
)
form.append(
{
"name": f"files[{index}]",
"value": file.fp,
"filename": file.filename,
"content_type": "application/octet-stream",
}
)
if "attachments" not in payload:
payload["attachments"] = attachments
else:
payload["attachments"].extend(attachments)
form[0]["value"] = utils._to_json(payload)
return self.request(route, form=form, files=files)
def edit_files(
self,
channel_id: Snowflake,
message_id: Snowflake,
files: Sequence[File],
**fields,
) -> Response[message.Message]:
r = Route(
"PATCH",
f"/channels/{channel_id}/messages/{message_id}",
channel_id=channel_id,
message_id=message_id,
)
payload: Dict[str, Any] = {}
if "attachments" in fields:
payload["attachments"] = fields["attachments"]
if "flags" in fields:
payload["flags"] = fields["flags"]
if "content" in fields:
payload["content"] = fields["content"]
if "embeds" in fields:
payload["embeds"] = fields["embeds"]
if "allowed_mentions" in fields:
payload["allowed_mentions"] = fields["allowed_mentions"]
if "components" in fields:
payload["components"] = fields["components"]
return self.edit_multipart_helper(
r,
files=files,
**payload,
)
def delete_message(
self,
channel_id: Snowflake,
message_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/messages/{message_id}",
channel_id=channel_id,
message_id=message_id,
)
return self.request(r, reason=reason)
def delete_messages(
self,
channel_id: Snowflake,
message_ids: SnowflakeList,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route("POST", "/channels/{channel_id}/messages/bulk-delete", channel_id=channel_id)
payload = {
"messages": message_ids,
}
return self.request(r, json=payload, reason=reason)
def edit_message(self, channel_id: Snowflake, message_id: Snowflake, **fields: Any) -> Response[message.Message]:
r = Route(
"PATCH",
"/channels/{channel_id}/messages/{message_id}",
channel_id=channel_id,
message_id=message_id,
)
return self.request(r, json=fields)
def add_reaction(self, channel_id: Snowflake, message_id: Snowflake, emoji: str) -> Response[None]:
r = Route(
"PUT",
"/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/@me",
channel_id=channel_id,
message_id=message_id,
emoji=emoji,
)
return self.request(r)
def remove_reaction(
self,
channel_id: Snowflake,
message_id: Snowflake,
emoji: str,
member_id: Snowflake,
) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/{member_id}",
channel_id=channel_id,
message_id=message_id,
member_id=member_id,
emoji=emoji,
)
return self.request(r)
def remove_own_reaction(self, channel_id: Snowflake, message_id: Snowflake, emoji: str) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/@me",
channel_id=channel_id,
message_id=message_id,
emoji=emoji,
)
return self.request(r)
def get_reaction_users(
self,
channel_id: Snowflake,
message_id: Snowflake,
emoji: str,
limit: int,
after: Optional[Snowflake] = None,
) -> Response[List[user.User]]:
r = Route(
"GET",
"/channels/{channel_id}/messages/{message_id}/reactions/{emoji}",
channel_id=channel_id,
message_id=message_id,
emoji=emoji,
)
params: Dict[str, Any] = {
"limit": limit,
}
if after:
params["after"] = after
return self.request(r, params=params)
def clear_reactions(self, channel_id: Snowflake, message_id: Snowflake) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/messages/{message_id}/reactions",
channel_id=channel_id,
message_id=message_id,
)
return self.request(r)
def clear_single_reaction(self, channel_id: Snowflake, message_id: Snowflake, emoji: str) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/messages/{message_id}/reactions/{emoji}",
channel_id=channel_id,
message_id=message_id,
emoji=emoji,
)
return self.request(r)
def get_message(self, channel_id: Snowflake, message_id: Snowflake) -> Response[message.Message]:
r = Route(
"GET",
"/channels/{channel_id}/messages/{message_id}",
channel_id=channel_id,
message_id=message_id,
)
return self.request(r)
def get_channel(self, channel_id: Snowflake) -> Response[channel.Channel]:
r = Route("GET", "/channels/{channel_id}", channel_id=channel_id)
return self.request(r)
def logs_from(
self,
channel_id: Snowflake,
limit: int,
before: Optional[Snowflake] = None,
after: Optional[Snowflake] = None,
around: Optional[Snowflake] = None,
) -> Response[List[message.Message]]:
params: Dict[str, Any] = {
"limit": limit,
}
if before is not None:
params["before"] = before
if after is not None:
params["after"] = after
if around is not None:
params["around"] = around
return self.request(
Route("GET", "/channels/{channel_id}/messages", channel_id=channel_id),
params=params,
)
def publish_message(self, channel_id: Snowflake, message_id: Snowflake) -> Response[message.Message]:
return self.request(
Route(
"POST",
"/channels/{channel_id}/messages/{message_id}/crosspost",
channel_id=channel_id,
message_id=message_id,
)
)
def pin_message(self, channel_id: Snowflake, message_id: Snowflake, reason: Optional[str] = None) -> Response[None]:
r = Route(
"PUT",
"/channels/{channel_id}/pins/{message_id}",
channel_id=channel_id,
message_id=message_id,
)
return self.request(r, reason=reason)
def unpin_message(
self, channel_id: Snowflake, message_id: Snowflake, reason: Optional[str] = None
) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/pins/{message_id}",
channel_id=channel_id,
message_id=message_id,
)
return self.request(r, reason=reason)
def pins_from(self, channel_id: Snowflake) -> Response[List[message.Message]]:
return self.request(Route("GET", "/channels/{channel_id}/pins", channel_id=channel_id))
# Member management
def kick(self, user_id: Snowflake, guild_id: Snowflake, reason: Optional[str] = None) -> Response[None]:
r = Route(
"DELETE",
"/guilds/{guild_id}/members/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
return self.request(r, reason=reason)
def ban(
self,
user_id: Snowflake,
guild_id: Snowflake,
delete_message_days: int = 1,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"PUT",
"/guilds/{guild_id}/bans/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
params = {
"delete_message_days": delete_message_days,
}
return self.request(r, params=params, reason=reason)
def unban(self, user_id: Snowflake, guild_id: Snowflake, *, reason: Optional[str] = None) -> Response[None]:
r = Route(
"DELETE",
"/guilds/{guild_id}/bans/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
return self.request(r, reason=reason)
def guild_voice_state(
self,
user_id: Snowflake,
guild_id: Snowflake,
*,
mute: Optional[bool] = None,
deafen: Optional[bool] = None,
reason: Optional[str] = None,
) -> Response[member.Member]:
r = Route(
"PATCH",
"/guilds/{guild_id}/members/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
payload = {}
if mute is not None:
payload["mute"] = mute
if deafen is not None:
payload["deaf"] = deafen
return self.request(r, json=payload, reason=reason)
def edit_profile(self, payload: Dict[str, Any]) -> Response[user.User]:
return self.request(Route("PATCH", "/users/@me"), json=payload)
def change_my_nickname(
self,
guild_id: Snowflake,
nickname: str,
*,
reason: Optional[str] = None,
) -> Response[member.Nickname]:
r = Route("PATCH", "/guilds/{guild_id}/members/@me/nick", guild_id=guild_id)
payload = {
"nick": nickname,
}
return self.request(r, json=payload, reason=reason)
def change_nickname(
self,
guild_id: Snowflake,
user_id: Snowflake,
nickname: str,
*,
reason: Optional[str] = None,
) -> Response[member.Member]:
r = Route(
"PATCH",
"/guilds/{guild_id}/members/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
payload = {
"nick": nickname,
}
return self.request(r, json=payload, reason=reason)
def edit_my_voice_state(self, guild_id: Snowflake, payload: Dict[str, Any]) -> Response[None]:
r = Route("PATCH", "/guilds/{guild_id}/voice-states/@me", guild_id=guild_id)
return self.request(r, json=payload)
def edit_voice_state(self, guild_id: Snowflake, user_id: Snowflake, payload: Dict[str, Any]) -> Response[None]:
r = Route(
"PATCH",
"/guilds/{guild_id}/voice-states/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
return self.request(r, json=payload)
def edit_member(
self,
guild_id: Snowflake,
user_id: Snowflake,
*,
reason: Optional[str] = None,
**fields: Any,
) -> Response[member.MemberWithUser]:
r = Route(
"PATCH",
"/guilds/{guild_id}/members/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
return self.request(r, json=fields, reason=reason)
# Channel management
def edit_channel(
self,
channel_id: Snowflake,
*,
reason: Optional[str] = None,
**options: Any,
) -> Response[channel.Channel]:
r = Route("PATCH", "/channels/{channel_id}", channel_id=channel_id)
valid_keys = (
"name",
"parent_id",
"topic",
"bitrate",
"nsfw",
"user_limit",
"position",
"permission_overwrites",
"rate_limit_per_user",
"type",
"rtc_region",
"video_quality_mode",
"archived",
"auto_archive_duration",
"locked",
"invitable",
"default_auto_archive_duration",
)
payload = {k: v for k, v in options.items() if k in valid_keys}
return self.request(r, reason=reason, json=payload)
def bulk_channel_update(
self,
guild_id: Snowflake,
data: List[guild.ChannelPositionUpdate],
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route("PATCH", "/guilds/{guild_id}/channels", guild_id=guild_id)
return self.request(r, json=data, reason=reason)
def create_channel(
self,
guild_id: Snowflake,
channel_type: channel.ChannelType,
*,
reason: Optional[str] = None,
**options: Any,
) -> Response[channel.GuildChannel]:
payload = {
"type": channel_type,
}
valid_keys = (
"name",
"parent_id",
"topic",
"bitrate",
"nsfw",
"user_limit",
"position",
"permission_overwrites",
"rate_limit_per_user",
"rtc_region",
"video_quality_mode",
"auto_archive_duration",
)
payload.update({k: v for k, v in options.items() if k in valid_keys and v is not None})
return self.request(
Route("POST", "/guilds/{guild_id}/channels", guild_id=guild_id),
json=payload,
reason=reason,
)
def delete_channel(
self,
channel_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[None]:
return self.request(
Route("DELETE", "/channels/{channel_id}", channel_id=channel_id),
reason=reason,
)
# Thread management
def start_thread_with_message(
self,
channel_id: Snowflake,
message_id: Snowflake,
*,
name: str,
auto_archive_duration: threads.ThreadArchiveDuration,
reason: Optional[str] = None,
) -> Response[threads.Thread]:
payload = {
"name": name,
"auto_archive_duration": auto_archive_duration,
}
route = Route(
"POST",
"/channels/{channel_id}/messages/{message_id}/threads",
channel_id=channel_id,
message_id=message_id,
)
return self.request(route, json=payload, reason=reason)
def start_thread_without_message(
self,
channel_id: Snowflake,
*,
name: str,
auto_archive_duration: threads.ThreadArchiveDuration,
type: threads.ThreadType,
invitable: bool = True,
reason: Optional[str] = None,
) -> Response[threads.Thread]:
payload = {
"name": name,
"auto_archive_duration": auto_archive_duration,
"type": type,
"invitable": invitable,
}
route = Route("POST", "/channels/{channel_id}/threads", channel_id=channel_id)
return self.request(route, json=payload, reason=reason)
def join_thread(self, channel_id: Snowflake) -> Response[None]:
return self.request(
Route(
"POST",
"/channels/{channel_id}/thread-members/@me",
channel_id=channel_id,
)
)
def add_user_to_thread(self, channel_id: Snowflake, user_id: Snowflake) -> Response[None]:
return self.request(
Route(
"PUT",
"/channels/{channel_id}/thread-members/{user_id}",
channel_id=channel_id,
user_id=user_id,
)
)
def leave_thread(self, channel_id: Snowflake) -> Response[None]:
return self.request(
Route(
"DELETE",
"/channels/{channel_id}/thread-members/@me",
channel_id=channel_id,
)
)
def remove_user_from_thread(self, channel_id: Snowflake, user_id: Snowflake) -> Response[None]:
route = Route(
"DELETE",
"/channels/{channel_id}/thread-members/{user_id}",
channel_id=channel_id,
user_id=user_id,
)
return self.request(route)
def get_public_archived_threads(
self, channel_id: Snowflake, before: Optional[Snowflake] = None, limit: int = 50
) -> Response[threads.ThreadPaginationPayload]:
route = Route(
"GET",
"/channels/{channel_id}/threads/archived/public",
channel_id=channel_id,
)
params = {}
if before:
params["before"] = before
params["limit"] = limit
return self.request(route, params=params)
def get_private_archived_threads(
self, channel_id: Snowflake, before: Optional[Snowflake] = None, limit: int = 50
) -> Response[threads.ThreadPaginationPayload]:
route = Route(
"GET",
"/channels/{channel_id}/threads/archived/private",
channel_id=channel_id,
)
params = {}
if before:
params["before"] = before
params["limit"] = limit
return self.request(route, params=params)
def get_joined_private_archived_threads(
self, channel_id: Snowflake, before: Optional[Snowflake] = None, limit: int = 50
) -> Response[threads.ThreadPaginationPayload]:
route = Route(
"GET",
"/channels/{channel_id}/users/@me/threads/archived/private",
channel_id=channel_id,
)
params = {}
if before:
params["before"] = before
params["limit"] = limit
return self.request(route, params=params)
def get_active_threads(self, guild_id: Snowflake) -> Response[threads.ThreadPaginationPayload]:
route = Route("GET", "/guilds/{guild_id}/threads/active", guild_id=guild_id)
return self.request(route)
def get_thread_members(self, channel_id: Snowflake) -> Response[List[threads.ThreadMember]]:
route = Route("GET", "/channels/{channel_id}/thread-members", channel_id=channel_id)
return self.request(route)
# Webhook management
def create_webhook(
self,
channel_id: Snowflake,
*,
name: str,
avatar: Optional[bytes] = None,
reason: Optional[str] = None,
) -> Response[webhook.Webhook]:
payload: Dict[str, Any] = {
"name": name,
}
if avatar is not None:
payload["avatar"] = avatar
r = Route("POST", "/channels/{channel_id}/webhooks", channel_id=channel_id)
return self.request(r, json=payload, reason=reason)
def channel_webhooks(self, channel_id: Snowflake) -> Response[List[webhook.Webhook]]:
return self.request(Route("GET", "/channels/{channel_id}/webhooks", channel_id=channel_id))
def guild_webhooks(self, guild_id: Snowflake) -> Response[List[webhook.Webhook]]:
return self.request(Route("GET", "/guilds/{guild_id}/webhooks", guild_id=guild_id))
def get_webhook(self, webhook_id: Snowflake) -> Response[webhook.Webhook]:
return self.request(Route("GET", "/webhooks/{webhook_id}", webhook_id=webhook_id))
def follow_webhook(
self,
channel_id: Snowflake,
webhook_channel_id: Snowflake,
reason: Optional[str] = None,
) -> Response[None]:
payload = {
"webhook_channel_id": str(webhook_channel_id),
}
return self.request(
Route("POST", "/channels/{channel_id}/followers", channel_id=channel_id),
json=payload,
reason=reason,
)
# Guild management
def get_guilds(
self,
limit: int,
before: Optional[Snowflake] = None,
after: Optional[Snowflake] = None,
) -> Response[List[guild.Guild]]:
params: Dict[str, Any] = {
"limit": limit,
}
if before:
params["before"] = before
if after:
params["after"] = after
return self.request(Route("GET", "/users/@me/guilds"), params=params)
def leave_guild(self, guild_id: Snowflake) -> Response[None]:
return self.request(Route("DELETE", "/users/@me/guilds/{guild_id}", guild_id=guild_id))
def get_guild(self, guild_id: Snowflake, *, with_counts=True) -> Response[guild.Guild]:
params = {"with_counts": int(with_counts)}
return self.request(Route("GET", "/guilds/{guild_id}", guild_id=guild_id), params=params)
def delete_guild(self, guild_id: Snowflake) -> Response[None]:
return self.request(Route("DELETE", "/guilds/{guild_id}", guild_id=guild_id))
def create_guild(self, name: str, region: str, icon: Optional[str]) -> Response[guild.Guild]:
payload = {
"name": name,
"region": region,
}
if icon:
payload["icon"] = icon
return self.request(Route("POST", "/guilds"), json=payload)
def edit_guild(self, guild_id: Snowflake, *, reason: Optional[str] = None, **fields: Any) -> Response[guild.Guild]:
valid_keys = (
"name",
"region",
"icon",
"afk_timeout",
"owner_id",
"afk_channel_id",
"splash",
"discovery_splash",
"features",
"verification_level",
"system_channel_id",
"default_message_notifications",
"description",
"explicit_content_filter",
"banner",
"system_channel_flags",
"rules_channel_id",
"public_updates_channel_id",
"preferred_locale",
"premium_progress_bar_enabled",
)
payload = {k: v for k, v in fields.items() if k in valid_keys}
return self.request(
Route("PATCH", "/guilds/{guild_id}", guild_id=guild_id),
json=payload,
reason=reason,
)
def get_template(self, code: str) -> Response[template.Template]:
return self.request(Route("GET", "/guilds/templates/{code}", code=code))
def guild_templates(self, guild_id: Snowflake) -> Response[List[template.Template]]:
return self.request(Route("GET", "/guilds/{guild_id}/templates", guild_id=guild_id))
def create_template(self, guild_id: Snowflake, payload: template.CreateTemplate) -> Response[template.Template]:
return self.request(
Route("POST", "/guilds/{guild_id}/templates", guild_id=guild_id),
json=payload,
)
def sync_template(self, guild_id: Snowflake, code: str) -> Response[template.Template]:
return self.request(
Route(
"PUT",
"/guilds/{guild_id}/templates/{code}",
guild_id=guild_id,
code=code,
)
)
def edit_template(self, guild_id: Snowflake, code: str, payload) -> Response[template.Template]:
valid_keys = (
"name",
"description",
)
payload = {k: v for k, v in payload.items() if k in valid_keys}
return self.request(
Route(
"PATCH",
"/guilds/{guild_id}/templates/{code}",
guild_id=guild_id,
code=code,
),
json=payload,
)
def delete_template(self, guild_id: Snowflake, code: str) -> Response[None]:
return self.request(
Route(
"DELETE",
"/guilds/{guild_id}/templates/{code}",
guild_id=guild_id,
code=code,
)
)
def create_from_template(self, code: str, name: str, region: str, icon: Optional[str]) -> Response[guild.Guild]:
payload = {
"name": name,
"region": region,
}
if icon:
payload["icon"] = icon
return self.request(Route("POST", "/guilds/templates/{code}", code=code), json=payload)
def get_bans(self, guild_id: Snowflake) -> Response[List[guild.Ban]]:
return self.request(Route("GET", "/guilds/{guild_id}/bans", guild_id=guild_id))
def get_ban(self, user_id: Snowflake, guild_id: Snowflake) -> Response[guild.Ban]:
return self.request(
Route(
"GET",
"/guilds/{guild_id}/bans/{user_id}",
guild_id=guild_id,
user_id=user_id,
)
)
def get_vanity_code(self, guild_id: Snowflake) -> Response[invite.VanityInvite]:
return self.request(Route("GET", "/guilds/{guild_id}/vanity-url", guild_id=guild_id))
def change_vanity_code(self, guild_id: Snowflake, code: str, *, reason: Optional[str] = None) -> Response[None]:
payload: Dict[str, Any] = {"code": code}
return self.request(
Route("PATCH", "/guilds/{guild_id}/vanity-url", guild_id=guild_id),
json=payload,
reason=reason,
)
def get_all_guild_channels(self, guild_id: Snowflake) -> Response[List[guild.GuildChannel]]:
return self.request(Route("GET", "/guilds/{guild_id}/channels", guild_id=guild_id))
def get_members(
self, guild_id: Snowflake, limit: int, after: Optional[Snowflake]
) -> Response[List[member.MemberWithUser]]:
params: Dict[str, Any] = {
"limit": limit,
}
if after:
params["after"] = after
r = Route("GET", "/guilds/{guild_id}/members", guild_id=guild_id)
return self.request(r, params=params)
def get_member(self, guild_id: Snowflake, member_id: Snowflake) -> Response[member.MemberWithUser]:
return self.request(
Route(
"GET",
"/guilds/{guild_id}/members/{member_id}",
guild_id=guild_id,
member_id=member_id,
)
)
def prune_members(
self,
guild_id: Snowflake,
days: int,
compute_prune_count: bool,
roles: List[str],
*,
reason: Optional[str] = None,
) -> Response[guild.GuildPrune]:
payload: Dict[str, Any] = {
"days": days,
"compute_prune_count": "true" if compute_prune_count else "false",
}
if roles:
payload["include_roles"] = ", ".join(roles)
return self.request(
Route("POST", "/guilds/{guild_id}/prune", guild_id=guild_id),
json=payload,
reason=reason,
)
def estimate_pruned_members(
self,
guild_id: Snowflake,
days: int,
roles: List[str],
) -> Response[guild.GuildPrune]:
params: Dict[str, Any] = {
"days": days,
}
if roles:
params["include_roles"] = ", ".join(roles)
return self.request(Route("GET", "/guilds/{guild_id}/prune", guild_id=guild_id), params=params)
def get_sticker(self, sticker_id: Snowflake) -> Response[sticker.Sticker]:
return self.request(Route("GET", "/stickers/{sticker_id}", sticker_id=sticker_id))
def list_premium_sticker_packs(self) -> Response[sticker.ListPremiumStickerPacks]:
return self.request(Route("GET", "/sticker-packs"))
def get_all_guild_stickers(self, guild_id: Snowflake) -> Response[List[sticker.GuildSticker]]:
return self.request(Route("GET", "/guilds/{guild_id}/stickers", guild_id=guild_id))
def get_guild_sticker(self, guild_id: Snowflake, sticker_id: Snowflake) -> Response[sticker.GuildSticker]:
return self.request(
Route(
"GET",
"/guilds/{guild_id}/stickers/{sticker_id}",
guild_id=guild_id,
sticker_id=sticker_id,
)
)
def create_guild_sticker(
self,
guild_id: Snowflake,
payload: sticker.CreateGuildSticker,
file: File,
reason: str,
) -> Response[sticker.GuildSticker]:
initial_bytes = file.fp.read(16)
try:
mime_type = utils._get_mime_type_for_image(initial_bytes)
except InvalidArgument:
if initial_bytes.startswith(b"{"):
mime_type = "application/json"
else:
mime_type = "application/octet-stream"
finally:
file.reset()
form: List[Dict[str, Any]] = [
{
"name": "file",
"value": file.fp,
"filename": file.filename,
"content_type": mime_type,
}
]
for k, v in payload.items():
form.append(
{
"name": k,
"value": v,
}
)
return self.request(
Route("POST", "/guilds/{guild_id}/stickers", guild_id=guild_id),
form=form,
files=[file],
reason=reason,
)
def modify_guild_sticker(
self,
guild_id: Snowflake,
sticker_id: Snowflake,
payload: sticker.EditGuildSticker,
reason: Optional[str],
) -> Response[sticker.GuildSticker]:
return self.request(
Route(
"PATCH",
"/guilds/{guild_id}/stickers/{sticker_id}",
guild_id=guild_id,
sticker_id=sticker_id,
),
json=payload,
reason=reason,
)
def delete_guild_sticker(self, guild_id: Snowflake, sticker_id: Snowflake, reason: Optional[str]) -> Response[None]:
return self.request(
Route(
"DELETE",
"/guilds/{guild_id}/stickers/{sticker_id}",
guild_id=guild_id,
sticker_id=sticker_id,
),
reason=reason,
)
def get_all_custom_emojis(self, guild_id: Snowflake) -> Response[List[emoji.Emoji]]:
return self.request(Route("GET", "/guilds/{guild_id}/emojis", guild_id=guild_id))
def get_custom_emoji(self, guild_id: Snowflake, emoji_id: Snowflake) -> Response[emoji.Emoji]:
return self.request(
Route(
"GET",
"/guilds/{guild_id}/emojis/{emoji_id}",
guild_id=guild_id,
emoji_id=emoji_id,
)
)
def create_custom_emoji(
self,
guild_id: Snowflake,
name: str,
image: bytes,
*,
roles: Optional[SnowflakeList] = None,
reason: Optional[str] = None,
) -> Response[emoji.Emoji]:
payload = {
"name": name,
"image": image,
"roles": roles or [],
}
r = Route("POST", "/guilds/{guild_id}/emojis", guild_id=guild_id)
return self.request(r, json=payload, reason=reason)
def delete_custom_emoji(
self,
guild_id: Snowflake,
emoji_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"DELETE",
"/guilds/{guild_id}/emojis/{emoji_id}",
guild_id=guild_id,
emoji_id=emoji_id,
)
return self.request(r, reason=reason)
def edit_custom_emoji(
self,
guild_id: Snowflake,
emoji_id: Snowflake,
*,
payload: Dict[str, Any],
reason: Optional[str] = None,
) -> Response[emoji.Emoji]:
r = Route(
"PATCH",
"/guilds/{guild_id}/emojis/{emoji_id}",
guild_id=guild_id,
emoji_id=emoji_id,
)
return self.request(r, json=payload, reason=reason)
def get_all_integrations(self, guild_id: Snowflake) -> Response[List[integration.Integration]]:
r = Route("GET", "/guilds/{guild_id}/integrations", guild_id=guild_id)
return self.request(r)
def create_integration(self, guild_id: Snowflake, type: integration.IntegrationType, id: int) -> Response[None]:
payload = {
"type": type,
"id": id,
}
r = Route("POST", "/guilds/{guild_id}/integrations", guild_id=guild_id)
return self.request(r, json=payload)
def edit_integration(self, guild_id: Snowflake, integration_id: Snowflake, **payload: Any) -> Response[None]:
r = Route(
"PATCH",
"/guilds/{guild_id}/integrations/{integration_id}",
guild_id=guild_id,
integration_id=integration_id,
)
return self.request(r, json=payload)
def sync_integration(self, guild_id: Snowflake, integration_id: Snowflake) -> Response[None]:
r = Route(
"POST",
"/guilds/{guild_id}/integrations/{integration_id}/sync",
guild_id=guild_id,
integration_id=integration_id,
)
return self.request(r)
def delete_integration(
self,
guild_id: Snowflake,
integration_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"DELETE",
"/guilds/{guild_id}/integrations/{integration_id}",
guild_id=guild_id,
integration_id=integration_id,
)
return self.request(r, reason=reason)
def get_audit_logs(
self,
guild_id: Snowflake,
limit: int = 100,
before: Optional[Snowflake] = None,
after: Optional[Snowflake] = None,
user_id: Optional[Snowflake] = None,
action_type: Optional[AuditLogAction] = None,
) -> Response[audit_log.AuditLog]:
params: Dict[str, Any] = {"limit": limit}
if before:
params["before"] = before
if after:
params["after"] = after
if user_id:
params["user_id"] = user_id
if action_type:
params["action_type"] = action_type
r = Route("GET", "/guilds/{guild_id}/audit-logs", guild_id=guild_id)
return self.request(r, params=params)
def get_widget(self, guild_id: Snowflake) -> Response[widget.Widget]:
return self.request(Route("GET", "/guilds/{guild_id}/widget.json", guild_id=guild_id))
def edit_widget(self, guild_id: Snowflake, payload) -> Response[widget.WidgetSettings]:
return self.request(Route("PATCH", "/guilds/{guild_id}/widget", guild_id=guild_id), json=payload)
# Invite management
def create_invite(
self,
channel_id: Snowflake,
*,
reason: Optional[str] = None,
max_age: int = 0,
max_uses: int = 0,
temporary: bool = False,
unique: bool = True,
target_type: Optional[invite.InviteTargetType] = None,
target_user_id: Optional[Snowflake] = None,
target_application_id: Optional[Snowflake] = None,
) -> Response[invite.Invite]:
r = Route("POST", "/channels/{channel_id}/invites", channel_id=channel_id)
payload = {
"max_age": max_age,
"max_uses": max_uses,
"temporary": temporary,
"unique": unique,
}
if target_type:
payload["target_type"] = target_type
if target_user_id:
payload["target_user_id"] = target_user_id
if target_application_id:
payload["target_application_id"] = str(target_application_id)
return self.request(r, reason=reason, json=payload)
def get_invite(
self,
invite_id: str,
*,
with_counts: bool = True,
with_expiration: bool = True,
guild_scheduled_event_id: Optional[int] = None,
) -> Response[invite.Invite]:
params = {
"with_counts": int(with_counts),
"with_expiration": int(with_expiration),
}
if guild_scheduled_event_id is not None:
params["guild_scheduled_event_id"] = int(guild_scheduled_event_id)
return self.request(Route("GET", "/invites/{invite_id}", invite_id=invite_id), params=params)
def invites_from(self, guild_id: Snowflake) -> Response[List[invite.Invite]]:
return self.request(Route("GET", "/guilds/{guild_id}/invites", guild_id=guild_id))
def invites_from_channel(self, channel_id: Snowflake) -> Response[List[invite.Invite]]:
return self.request(Route("GET", "/channels/{channel_id}/invites", channel_id=channel_id))
def delete_invite(self, invite_id: str, *, reason: Optional[str] = None) -> Response[None]:
return self.request(Route("DELETE", "/invites/{invite_id}", invite_id=invite_id), reason=reason)
# Role management
def get_roles(self, guild_id: Snowflake) -> Response[List[role.Role]]:
return self.request(Route("GET", "/guilds/{guild_id}/roles", guild_id=guild_id))
def edit_role(
self,
guild_id: Snowflake,
role_id: Snowflake,
*,
reason: Optional[str] = None,
**fields: Any,
) -> Response[role.Role]:
r = Route(
"PATCH",
"/guilds/{guild_id}/roles/{role_id}",
guild_id=guild_id,
role_id=role_id,
)
valid_keys = (
"name",
"permissions",
"color",
"hoist",
"mentionable",
"icon",
"unicode_emoji",
)
payload = {k: v for k, v in fields.items() if k in valid_keys}
return self.request(r, json=payload, reason=reason)
def delete_role(self, guild_id: Snowflake, role_id: Snowflake, *, reason: Optional[str] = None) -> Response[None]:
r = Route(
"DELETE",
"/guilds/{guild_id}/roles/{role_id}",
guild_id=guild_id,
role_id=role_id,
)
return self.request(r, reason=reason)
def replace_roles(
self,
user_id: Snowflake,
guild_id: Snowflake,
role_ids: List[int],
*,
reason: Optional[str] = None,
) -> Response[member.MemberWithUser]:
return self.edit_member(guild_id=guild_id, user_id=user_id, roles=role_ids, reason=reason)
def create_role(self, guild_id: Snowflake, *, reason: Optional[str] = None, **fields: Any) -> Response[role.Role]:
r = Route("POST", "/guilds/{guild_id}/roles", guild_id=guild_id)
return self.request(r, json=fields, reason=reason)
def move_role_position(
self,
guild_id: Snowflake,
positions: List[guild.RolePositionUpdate],
*,
reason: Optional[str] = None,
) -> Response[List[role.Role]]:
r = Route("PATCH", "/guilds/{guild_id}/roles", guild_id=guild_id)
return self.request(r, json=positions, reason=reason)
def add_role(
self,
guild_id: Snowflake,
user_id: Snowflake,
role_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"PUT",
"/guilds/{guild_id}/members/{user_id}/roles/{role_id}",
guild_id=guild_id,
user_id=user_id,
role_id=role_id,
)
return self.request(r, reason=reason)
def remove_role(
self,
guild_id: Snowflake,
user_id: Snowflake,
role_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"DELETE",
"/guilds/{guild_id}/members/{user_id}/roles/{role_id}",
guild_id=guild_id,
user_id=user_id,
role_id=role_id,
)
return self.request(r, reason=reason)
def edit_channel_permissions(
self,
channel_id: Snowflake,
target: Snowflake,
allow: str,
deny: str,
type: channel.OverwriteType,
*,
reason: Optional[str] = None,
) -> Response[None]:
payload = {"id": target, "allow": allow, "deny": deny, "type": type}
r = Route(
"PUT",
"/channels/{channel_id}/permissions/{target}",
channel_id=channel_id,
target=target,
)
return self.request(r, json=payload, reason=reason)
def delete_channel_permissions(
self,
channel_id: Snowflake,
target: channel.OverwriteType,
*,
reason: Optional[str] = None,
) -> Response[None]:
r = Route(
"DELETE",
"/channels/{channel_id}/permissions/{target}",
channel_id=channel_id,
target=target,
)
return self.request(r, reason=reason)
# Welcome Screen
def get_welcome_screen(self, guild_id: Snowflake) -> Response[welcome_screen.WelcomeScreen]:
return self.request(Route("GET", "/guilds/{guild_id}/welcome-screen", guild_id=guild_id))
def edit_welcome_screen(
self, guild_id: Snowflake, payload: Any, *, reason: Optional[str] = None
) -> Response[welcome_screen.WelcomeScreen]:
keys = (
"description",
"welcome_channels",
"enabled",
)
payload = {key: val for key, val in payload.items() if key in keys}
return self.request(
Route("PATCH", "/guilds/{guild_id}/welcome-screen", guild_id=guild_id),
json=payload,
reason=reason,
)
# Voice management
def move_member(
self,
user_id: Snowflake,
guild_id: Snowflake,
channel_id: Snowflake,
*,
reason: Optional[str] = None,
) -> Response[member.MemberWithUser]:
return self.edit_member(guild_id=guild_id, user_id=user_id, channel_id=channel_id, reason=reason)
# Stage instance management
def get_stage_instance(self, channel_id: Snowflake) -> Response[channel.StageInstance]:
return self.request(Route("GET", "/stage-instances/{channel_id}", channel_id=channel_id))
def create_stage_instance(self, *, reason: Optional[str], **payload: Any) -> Response[channel.StageInstance]:
valid_keys = (
"channel_id",
"topic",
"privacy_level",
)
payload = {k: v for k, v in payload.items() if k in valid_keys}
return self.request(Route("POST", "/stage-instances"), json=payload, reason=reason)
def edit_stage_instance(
self, channel_id: Snowflake, *, reason: Optional[str] = None, **payload: Any
) -> Response[None]:
valid_keys = (
"topic",
"privacy_level",
)
payload = {k: v for k, v in payload.items() if k in valid_keys}
return self.request(
Route("PATCH", "/stage-instances/{channel_id}", channel_id=channel_id),
json=payload,
reason=reason,
)
def delete_stage_instance(self, channel_id: Snowflake, *, reason: Optional[str] = None) -> Response[None]:
return self.request(
Route("DELETE", "/stage-instances/{channel_id}", channel_id=channel_id),
reason=reason,
)
# Guild scheduled events management
def get_scheduled_events(
self, guild_id: Snowflake, with_user_count: bool = True
) -> Response[List[scheduled_events.ScheduledEvent]]:
params = {
"with_user_count": int(with_user_count),
}
return self.request(
Route("GET", "/guilds/{guild_id}/scheduled-events", guild_id=guild_id),
params=params,
)
def get_scheduled_event(
self, guild_id: Snowflake, event_id: Snowflake, with_user_count: bool = True
) -> Response[scheduled_events.ScheduledEvent]:
params = {
"with_user_count": int(with_user_count),
}
return self.request(
Route(
"GET",
"/guilds/{guild_id}/scheduled-events/{event_id}",
guild_id=guild_id,
event_id=event_id,
),
params=params,
)
def create_scheduled_event(
self, guild_id: Snowflake, reason: Optional[str] = None, **payload: Any
) -> Response[scheduled_events.ScheduledEvent]:
valid_keys = (
"channel_id",
"name",
"privacy_level",
"scheduled_start_time",
"scheduled_end_time",
"description",
"entity_type",
"entity_metadata",
)
payload = {k: v for k, v in payload.items() if k in valid_keys}
return self.request(
Route("POST", "/guilds/{guild_id}/scheduled-events", guild_id=guild_id),
json=payload,
reason=reason,
)
def delete_scheduled_event(self, guild_id: Snowflake, event_id: Snowflake) -> Response[None]:
return self.request(
Route(
"DELETE",
"/guilds/{guild_id}/scheduled-events/{event_id}",
guild_id=guild_id,
event_id=event_id,
)
)
def edit_scheduled_event(
self,
guild_id: Snowflake,
event_id: Snowflake,
reason: Optional[str] = None,
**payload: Any,
) -> Response[scheduled_events.ScheduledEvent]:
valid_keys = (
"channel_id",
"name",
"privacy_level",
"scheduled_start_time",
"scheduled_end_time",
"description",
"entity_type",
"status",
"entity_metadata",
"image",
)
payload = {k: v for k, v in payload.items() if k in valid_keys}
return self.request(
Route(
"PATCH",
"/guilds/{guild_id}/scheduled-events/{event_id}",
guild_id=guild_id,
event_id=event_id,
),
json=payload,
reason=reason,
)
def get_scheduled_event_users(
self,
guild_id: Snowflake,
event_id: Snowflake,
limit: int,
with_member: bool = False,
before: Snowflake = None,
after: Snowflake = None,
) -> Response[List[scheduled_events.ScheduledEventSubscriber]]:
params = {
"limit": int(limit),
"with_member": int(with_member),
}
if before is not None:
params["before"] = int(before)
if after is not None:
params["after"] = int(after)
return self.request(
Route(
"GET",
"/guilds/{guild_id}/scheduled-events/{event_id}/users",
guild_id=guild_id,
event_id=event_id,
),
params=params,
)
# Application commands (global)
def get_global_commands(self, application_id: Snowflake) -> Response[List[interactions.ApplicationCommand]]:
return self.request(
Route(
"GET",
"/applications/{application_id}/commands",
application_id=application_id,
)
)
def get_global_command(
self, application_id: Snowflake, command_id: Snowflake
) -> Response[interactions.ApplicationCommand]:
r = Route(
"GET",
"/applications/{application_id}/commands/{command_id}",
application_id=application_id,
command_id=command_id,
)
return self.request(r)
def upsert_global_command(self, application_id: Snowflake, payload) -> Response[interactions.ApplicationCommand]:
r = Route(
"POST",
"/applications/{application_id}/commands",
application_id=application_id,
)
return self.request(r, json=payload)
def edit_global_command(
self,
application_id: Snowflake,
command_id: Snowflake,
payload: interactions.EditApplicationCommand,
) -> Response[interactions.ApplicationCommand]:
valid_keys = (
"name",
"description",
"options",
)
payload = {k: v for k, v in payload.items() if k in valid_keys} # type: ignore
r = Route(
"PATCH",
"/applications/{application_id}/commands/{command_id}",
application_id=application_id,
command_id=command_id,
)
return self.request(r, json=payload)
def delete_global_command(self, application_id: Snowflake, command_id: Snowflake) -> Response[None]:
r = Route(
"DELETE",
"/applications/{application_id}/commands/{command_id}",
application_id=application_id,
command_id=command_id,
)
return self.request(r)
def bulk_upsert_global_commands(
self, application_id: Snowflake, payload
) -> Response[List[interactions.ApplicationCommand]]:
r = Route(
"PUT",
"/applications/{application_id}/commands",
application_id=application_id,
)
return self.request(r, json=payload)
# Application commands (guild)
def get_guild_commands(
self, application_id: Snowflake, guild_id: Snowflake
) -> Response[List[interactions.ApplicationCommand]]:
r = Route(
"GET",
"/applications/{application_id}/guilds/{guild_id}/commands",
application_id=application_id,
guild_id=guild_id,
)
return self.request(r)
def get_guild_command(
self,
application_id: Snowflake,
guild_id: Snowflake,
command_id: Snowflake,
) -> Response[interactions.ApplicationCommand]:
r = Route(
"GET",
"/applications/{application_id}/guilds/{guild_id}/commands/{command_id}",
application_id=application_id,
guild_id=guild_id,
command_id=command_id,
)
return self.request(r)
def upsert_guild_command(
self,
application_id: Snowflake,
guild_id: Snowflake,
payload: interactions.EditApplicationCommand,
) -> Response[interactions.ApplicationCommand]:
r = Route(
"POST",
"/applications/{application_id}/guilds/{guild_id}/commands",
application_id=application_id,
guild_id=guild_id,
)
return self.request(r, json=payload)
def edit_guild_command(
self,
application_id: Snowflake,
guild_id: Snowflake,
command_id: Snowflake,
payload: interactions.EditApplicationCommand,
) -> Response[interactions.ApplicationCommand]:
valid_keys = (
"name",
"description",
"options",
)
payload = {k: v for k, v in payload.items() if k in valid_keys} # type: ignore
r = Route(
"PATCH",
"/applications/{application_id}/guilds/{guild_id}/commands/{command_id}",
application_id=application_id,
guild_id=guild_id,
command_id=command_id,
)
return self.request(r, json=payload)
def delete_guild_command(
self,
application_id: Snowflake,
guild_id: Snowflake,
command_id: Snowflake,
) -> Response[None]:
r = Route(
"DELETE",
"/applications/{application_id}/guilds/{guild_id}/commands/{command_id}",
application_id=application_id,
guild_id=guild_id,
command_id=command_id,
)
return self.request(r)
def bulk_upsert_guild_commands(
self,
application_id: Snowflake,
guild_id: Snowflake,
payload: List[interactions.EditApplicationCommand],
) -> Response[List[interactions.ApplicationCommand]]:
r = Route(
"PUT",
"/applications/{application_id}/guilds/{guild_id}/commands",
application_id=application_id,
guild_id=guild_id,
)
return self.request(r, json=payload)
def bulk_upsert_command_permissions(
self,
application_id: Snowflake,
guild_id: Snowflake,
payload: List[interactions.EditApplicationCommand],
) -> Response[List[interactions.ApplicationCommand]]:
r = Route(
"PUT",
"/applications/{application_id}/guilds/{guild_id}/commands/permissions",
application_id=application_id,
guild_id=guild_id,
)
return self.request(r, json=payload)
# Interaction responses
def _edit_webhook_helper(
self,
route: Route,
file: Optional[File] = None,
content: Optional[str] = None,
embeds: Optional[List[embed.Embed]] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
):
payload: Dict[str, Any] = {}
if content:
payload["content"] = content
if embeds:
payload["embeds"] = embeds
if allowed_mentions:
payload["allowed_mentions"] = allowed_mentions
form: List[Dict[str, Any]] = [
{
"name": "payload_json",
"value": utils._to_json(payload),
}
]
if file:
form.append(
{
"name": "file",
"value": file.fp,
"filename": file.filename,
"content_type": "application/octet-stream",
}
)
return self.request(route, form=form, files=[file] if file else None)
def create_interaction_response(
self,
interaction_id: Snowflake,
token: str,
*,
type: InteractionResponseType,
data: Optional[interactions.InteractionApplicationCommandCallbackData] = None,
) -> Response[None]:
r = Route(
"POST",
"/interactions/{interaction_id}/{interaction_token}/callback",
interaction_id=interaction_id,
interaction_token=token,
)
payload: Dict[str, Any] = {
"type": type,
}
if data is not None:
payload["data"] = data
return self.request(r, json=payload)
def get_original_interaction_response(
self,
application_id: Snowflake,
token: str,
) -> Response[message.Message]:
r = Route(
"GET",
"/webhooks/{application_id}/{interaction_token}/messages/@original",
application_id=application_id,
interaction_token=token,
)
return self.request(r)
def edit_original_interaction_response(
self,
application_id: Snowflake,
token: str,
file: Optional[File] = None,
content: Optional[str] = None,
embeds: Optional[List[embed.Embed]] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
) -> Response[message.Message]:
r = Route(
"PATCH",
"/webhooks/{application_id}/{interaction_token}/messages/@original",
application_id=application_id,
interaction_token=token,
)
return self._edit_webhook_helper(
r,
file=file,
content=content,
embeds=embeds,
allowed_mentions=allowed_mentions,
)
def delete_original_interaction_response(self, application_id: Snowflake, token: str) -> Response[None]:
r = Route(
"DELETE",
"/webhooks/{application_id}/{interaction_token}/messages/@original",
application_id=application_id,
interaction_token=token,
)
return self.request(r)
def create_followup_message(
self,
application_id: Snowflake,
token: str,
files: List[File] = [],
content: Optional[str] = None,
tts: bool = False,
embeds: Optional[List[embed.Embed]] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
) -> Response[message.Message]:
r = Route(
"POST",
"/webhooks/{application_id}/{interaction_token}",
application_id=application_id,
interaction_token=token,
)
return self.send_multipart_helper(
r,
content=content,
files=files,
tts=tts,
embeds=embeds,
allowed_mentions=allowed_mentions,
)
def edit_followup_message(
self,
application_id: Snowflake,
token: str,
message_id: Snowflake,
file: Optional[File] = None,
content: Optional[str] = None,
embeds: Optional[List[embed.Embed]] = None,
allowed_mentions: Optional[message.AllowedMentions] = None,
) -> Response[message.Message]:
r = Route(
"PATCH",
"/webhooks/{application_id}/{interaction_token}/messages/{message_id}",
application_id=application_id,
interaction_token=token,
message_id=message_id,
)
return self._edit_webhook_helper(
r,
file=file,
content=content,
embeds=embeds,
allowed_mentions=allowed_mentions,
)
def delete_followup_message(self, application_id: Snowflake, token: str, message_id: Snowflake) -> Response[None]:
r = Route(
"DELETE",
"/webhooks/{application_id}/{interaction_token}/messages/{message_id}",
application_id=application_id,
interaction_token=token,
message_id=message_id,
)
return self.request(r)
def get_guild_application_command_permissions(
self,
application_id: Snowflake,
guild_id: Snowflake,
) -> Response[List[interactions.GuildApplicationCommandPermissions]]:
r = Route(
"GET",
"/applications/{application_id}/guilds/{guild_id}/commands/permissions",
application_id=application_id,
guild_id=guild_id,
)
return self.request(r)
def get_application_command_permissions(
self,
application_id: Snowflake,
guild_id: Snowflake,
command_id: Snowflake,
) -> Response[interactions.GuildApplicationCommandPermissions]:
r = Route(
"GET",
"/applications/{application_id}/guilds/{guild_id}/commands/{command_id}/permissions",
application_id=application_id,
guild_id=guild_id,
command_id=command_id,
)
return self.request(r)
def edit_application_command_permissions(
self,
application_id: Snowflake,
guild_id: Snowflake,
command_id: Snowflake,
payload: interactions.BaseGuildApplicationCommandPermissions,
) -> Response[None]:
r = Route(
"PUT",
"/applications/{application_id}/guilds/{guild_id}/commands/{command_id}/permissions",
application_id=application_id,
guild_id=guild_id,
command_id=command_id,
)
return self.request(r, json=payload)
def bulk_edit_guild_application_command_permissions(
self,
application_id: Snowflake,
guild_id: Snowflake,
payload: List[interactions.PartialGuildApplicationCommandPermissions],
) -> Response[None]:
r = Route(
"PUT",
"/applications/{application_id}/guilds/{guild_id}/commands/permissions",
application_id=application_id,
guild_id=guild_id,
)
return self.request(r, json=payload)
# Misc
def application_info(self) -> Response[appinfo.AppInfo]:
return self.request(Route("GET", "/oauth2/applications/@me"))
async def get_gateway(self, *, encoding: str = "json", zlib: bool = True) -> str:
try:
data = await self.request(Route("GET", "/gateway"))
except HTTPException as exc:
raise GatewayNotFound() from exc
if zlib:
value = "{0}?encoding={1}&v={2}&compress=zlib-stream"
else:
value = "{0}?encoding={1}&v={2}"
return value.format(data["url"], encoding, API_VERSION)
async def get_bot_gateway(self, *, encoding: str = "json", zlib: bool = True) -> Tuple[int, str]:
try:
data = await self.request(Route("GET", "/gateway/bot"))
except HTTPException as exc:
raise GatewayNotFound() from exc
if zlib:
value = "{0}?encoding={1}&v={2}&compress=zlib-stream"
else:
value = "{0}?encoding={1}&v={2}"
return data["shards"], value.format(data["url"], encoding, API_VERSION)
def get_user(self, user_id: Snowflake) -> Response[user.User]:
return self.request(Route("GET", "/users/{user_id}", user_id=user_id))
| true | true |
f71dc9f71695063532e9550c8b6ba76b48535559 | 698 | py | Python | alshamelah_api/apps/categories/migrations/0003_auto_20200622_1614.py | devna-dev/durar-backend | 36ea29bafd4cb95098e4057eb71df211dc923008 | [
"MIT"
] | null | null | null | alshamelah_api/apps/categories/migrations/0003_auto_20200622_1614.py | devna-dev/durar-backend | 36ea29bafd4cb95098e4057eb71df211dc923008 | [
"MIT"
] | null | null | null | alshamelah_api/apps/categories/migrations/0003_auto_20200622_1614.py | devna-dev/durar-backend | 36ea29bafd4cb95098e4057eb71df211dc923008 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.7 on 2020-06-22 16:14
import apps.categories.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('categories', '0002_auto_20200616_1853'),
]
operations = [
migrations.AddField(
model_name='category',
name='image',
field=models.ImageField(blank=True, null=True, upload_to=apps.categories.models.Category.get_path),
),
migrations.AddField(
model_name='subcategory',
name='image',
field=models.ImageField(blank=True, null=True, upload_to=apps.categories.models.SubCategory.get_path),
),
]
| 27.92 | 114 | 0.636103 |
import apps.categories.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('categories', '0002_auto_20200616_1853'),
]
operations = [
migrations.AddField(
model_name='category',
name='image',
field=models.ImageField(blank=True, null=True, upload_to=apps.categories.models.Category.get_path),
),
migrations.AddField(
model_name='subcategory',
name='image',
field=models.ImageField(blank=True, null=True, upload_to=apps.categories.models.SubCategory.get_path),
),
]
| true | true |
f71dca35de7fa623ae7ad9dcf06fb60056718130 | 181 | py | Python | Baekjoon/Python/1110.py | KHJcode/Algorithm-study | fa08d3c752fcb3557fd45fb394157926afc0de4a | [
"MIT"
] | 2 | 2020-05-23T01:55:38.000Z | 2020-07-07T15:59:00.000Z | Baekjoon/Python/1110.py | KHJcode/Algorithm-study | fa08d3c752fcb3557fd45fb394157926afc0de4a | [
"MIT"
] | null | null | null | Baekjoon/Python/1110.py | KHJcode/Algorithm-study | fa08d3c752fcb3557fd45fb394157926afc0de4a | [
"MIT"
] | null | null | null | n = int(input())
count = 1
_n = int(str(n % 10) + str((n // 10 + n % 10) % 10))
while _n != n:
count += 1
_n = int(str(_n % 10) + str((_n // 10 + _n % 10) % 10))
print(count)
| 18.1 | 57 | 0.475138 | n = int(input())
count = 1
_n = int(str(n % 10) + str((n // 10 + n % 10) % 10))
while _n != n:
count += 1
_n = int(str(_n % 10) + str((_n // 10 + _n % 10) % 10))
print(count)
| true | true |
f71dca38621e636c85ed737b16d0993b1b7ba0a7 | 1,592 | py | Python | session_server/servers/application.py | w359405949/browserquest_py | 20c2569431db9dca74a986efa9bc0ce69ed5a8fc | [
"WTFPL"
] | 1 | 2019-03-27T07:46:15.000Z | 2019-03-27T07:46:15.000Z | session_server/servers/application.py | w359405949/browserquest_py | 20c2569431db9dca74a986efa9bc0ce69ed5a8fc | [
"WTFPL"
] | null | null | null | session_server/servers/application.py | w359405949/browserquest_py | 20c2569431db9dca74a986efa9bc0ce69ed5a8fc | [
"WTFPL"
] | null | null | null | import json
from geventwebsocket import WebSocketApplication
from controllers.controller import Controller
from services.browserquest import BrowserQuestImpl
class BrowserQuestApplication(WebSocketApplication):
browserquest = BrowserQuestImpl()
def __init__(self, *args, **kwargs):
super(BrowserQuestApplication, self).__init__(*args, **kwargs)
self.connection = None
self.environ = {}
def on_open(self):
self.ws.send("go")
self.connection = self.ws
def on_message(self, message):
if message is None:
return
print "data:", message
request_data = json.loads(message)
method_descriptor = self.browserquest.DESCRIPTOR.methods[request_data[0]]
request_class = self.browserquest.GetRequestClass(method_descriptor)
request = request_class()
for index in xrange(1, len(request_data)):
field_descriptor = request_class.DESCRIPTOR.fields_by_number[index]
if field_descriptor.label == 3: # repeated TODO: only WHO enter this
field = getattr(request, field_descriptor.name)
field.extend(request_data[index:])
break
else:
setattr(request, field_descriptor.name, request_data[index])
controller = Controller()
controller.connection = self.connection
controller.environ = self.environ
self.browserquest.CallMethod(method_descriptor, controller, request, None)
def on_close(self, reason):
self.connection = None
print reason
| 37.023256 | 82 | 0.670854 | import json
from geventwebsocket import WebSocketApplication
from controllers.controller import Controller
from services.browserquest import BrowserQuestImpl
class BrowserQuestApplication(WebSocketApplication):
browserquest = BrowserQuestImpl()
def __init__(self, *args, **kwargs):
super(BrowserQuestApplication, self).__init__(*args, **kwargs)
self.connection = None
self.environ = {}
def on_open(self):
self.ws.send("go")
self.connection = self.ws
def on_message(self, message):
if message is None:
return
print "data:", message
request_data = json.loads(message)
method_descriptor = self.browserquest.DESCRIPTOR.methods[request_data[0]]
request_class = self.browserquest.GetRequestClass(method_descriptor)
request = request_class()
for index in xrange(1, len(request_data)):
field_descriptor = request_class.DESCRIPTOR.fields_by_number[index]
if field_descriptor.label == 3:
field = getattr(request, field_descriptor.name)
field.extend(request_data[index:])
break
else:
setattr(request, field_descriptor.name, request_data[index])
controller = Controller()
controller.connection = self.connection
controller.environ = self.environ
self.browserquest.CallMethod(method_descriptor, controller, request, None)
def on_close(self, reason):
self.connection = None
print reason
| false | true |
f71dca3afc1566dc07ee04e455859bef0fa42f69 | 1,796 | py | Python | lwar_aws.py | mallarme/ArtWithDataCodes | bbda69f81385404fe838fdacb8730940b9318460 | [
"CC0-1.0"
] | 1 | 2021-06-24T22:18:27.000Z | 2021-06-24T22:18:27.000Z | lwar_aws.py | mallarme/ArtWithDataCodes | bbda69f81385404fe838fdacb8730940b9318460 | [
"CC0-1.0"
] | null | null | null | lwar_aws.py | mallarme/ArtWithDataCodes | bbda69f81385404fe838fdacb8730940b9318460 | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# lwar_aws.py
#
# Copyright 2013 Leandro <Leandro@leandrowar>
# Fontes:
# http://aws.amazon.com/articles/Amazon-S3/3998
# http://boto.s3.amazonaws.com/s3_tut.html
#Imports
import boto.s3
from boto.s3.connection import S3Connection #para estabelecer a conexão
import sys
#from boto.s3.key import key #para armazenar dados
# Criando uma conexão com o serviço S3
try:
conn = S3Connection('aws access key','aws secret key') #(<aws access key>,<aws secret key>)
print 'Conexao AWS estabelecida'
except:
print 'Erro na conexão AWS'
# Criando um bucket
try:
bucket = conn.create_bucket('mi4i.files')
print
print 'Bucket mi4i.files criado com sucesso'
except:
print
print 'Erro ao criar o bucket'
# Funcao para esperar o upload e o download
def percent_cb(complete, total):
sys.stdout.write('.')
sys.stdout.flush()
# Criando a chave para armazenamento e armazenando os dados no S3
try:
key = bucket.new_key('feedsSecure') #cria um objeto para o arquivo, mas ainda não há nada armazendo
key.set_contents_from_filename('C:/Users/Leandro/dump/fia/feedsSecure_15042014.metadata.bson',cb = percent_cb, num_cb = 10)
#abre um handle para o arquivo local, realizando a escrita no objeto chave criado na linha anterior
key.set_acl('public-read') #determina o tipo de controle de acesso
print
print 'Arquivo transferido com sucesso'
except:
print
print 'Falha na transferencia do arquvio'
#~ # Fazendo o download dos dados
#~ try:
#~ key = conn.get_bucket('lwar.invest').get_key('certificado')
#~ key.get_contents_to_filename('C:\mongo_files\download\certificado.pdf',cb = percent_cb, num_cb = 10)
#~ print
#~ print 'Download concluido'
#~ print
#~ print
#~
#~ except:
#~ print
#~ print 'Falha no download'
| 27.630769 | 125 | 0.729399 |
import boto.s3
from boto.s3.connection import S3Connection
import sys
nection('aws access key','aws secret key')
print 'Conexao AWS estabelecida'
except:
print 'Erro na conexão AWS'
try:
bucket = conn.create_bucket('mi4i.files')
print
print 'Bucket mi4i.files criado com sucesso'
except:
print
print 'Erro ao criar o bucket'
def percent_cb(complete, total):
sys.stdout.write('.')
sys.stdout.flush()
try:
key = bucket.new_key('feedsSecure')
key.set_contents_from_filename('C:/Users/Leandro/dump/fia/feedsSecure_15042014.metadata.bson',cb = percent_cb, num_cb = 10)
key.set_acl('public-read')
print
print 'Arquivo transferido com sucesso'
except:
print
print 'Falha na transferencia do arquvio'
| false | true |
f71dccac86bbe48329c6774ca48a06fd949d57f9 | 800 | py | Python | T1/code/visualizar_reta.py | andersonmanhaes/ml_mestrado | d737d80e07d9392895e4455e49a33b8700080cf1 | [
"MIT"
] | null | null | null | T1/code/visualizar_reta.py | andersonmanhaes/ml_mestrado | d737d80e07d9392895e4455e49a33b8700080cf1 | [
"MIT"
] | null | null | null | T1/code/visualizar_reta.py | andersonmanhaes/ml_mestrado | d737d80e07d9392895e4455e49a33b8700080cf1 | [
"MIT"
] | null | null | null | import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
def plot(filepath, theta):
path = os.getcwd() + filepath
dataset = pd.read_csv(path, header=None)
X = dataset.iloc[:, 0:-1].values
y = dataset.iloc[:, -1:].values
t = np.arange(0, 25, 1)
plt.scatter(X, y, color='red', marker='x', label='Training Data')
plt.plot(t, theta[0] + (theta[1]*t), color='blue', label='Linear Regression')
plt.axis([4, 25, -5, 25])
plt.title('Populacao da cidade x Lucro da filial')
plt.xlabel('Populacao da cidade (10k)')
plt.ylabel('Lucro (10k)')
plt.legend()
plt.show()
filename = 'target/plot1.2.png'
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
plt.savefig(filename)
| 27.586207 | 81 | 0.635 | import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
def plot(filepath, theta):
path = os.getcwd() + filepath
dataset = pd.read_csv(path, header=None)
X = dataset.iloc[:, 0:-1].values
y = dataset.iloc[:, -1:].values
t = np.arange(0, 25, 1)
plt.scatter(X, y, color='red', marker='x', label='Training Data')
plt.plot(t, theta[0] + (theta[1]*t), color='blue', label='Linear Regression')
plt.axis([4, 25, -5, 25])
plt.title('Populacao da cidade x Lucro da filial')
plt.xlabel('Populacao da cidade (10k)')
plt.ylabel('Lucro (10k)')
plt.legend()
plt.show()
filename = 'target/plot1.2.png'
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
plt.savefig(filename)
| true | true |
f71dce4d66492238ecef8f12ab4e49ea5128a5b0 | 42 | py | Python | app.py | abdala9512/gpt2-multip-app | fb827137dacfc065f8787592f699b056d50dac81 | [
"MIT"
] | null | null | null | app.py | abdala9512/gpt2-multip-app | fb827137dacfc065f8787592f699b056d50dac81 | [
"MIT"
] | null | null | null | app.py | abdala9512/gpt2-multip-app | fb827137dacfc065f8787592f699b056d50dac81 | [
"MIT"
] | null | null | null | import streamlit as st
st.title("GPT2 ") | 10.5 | 22 | 0.714286 | import streamlit as st
st.title("GPT2 ") | true | true |
f71dcf4492576871fadaaed3c2a40b7423af2970 | 2,393 | py | Python | txstatsd/stats/uniformsample.py | drawks/txstatsd | da674d7a86e0e5ec40eaa53fe81310ef19d1ed9e | [
"MIT"
] | null | null | null | txstatsd/stats/uniformsample.py | drawks/txstatsd | da674d7a86e0e5ec40eaa53fe81310ef19d1ed9e | [
"MIT"
] | 1 | 2020-07-10T23:35:49.000Z | 2020-07-10T23:35:49.000Z | txstatsd/stats/uniformsample.py | drawks/txstatsd | da674d7a86e0e5ec40eaa53fe81310ef19d1ed9e | [
"MIT"
] | 1 | 2020-07-13T05:31:58.000Z | 2020-07-13T05:31:58.000Z | # Copyright (C) 2011-2012 Canonical Services Ltd
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import random
import sys
class UniformSample(object):
"""
A random sample of a stream of values. Uses Vitter's Algorithm R to
produce a statistically representative sample.
See:
- U{Random Sampling with a Reservoir
<http://www.cs.umd.edu/~samir/498/vitter.pdf>}
"""
def __init__(self, reservoir_size):
"""Creates a new C{UniformSample}.
@param reservoir_size: The number of samples to keep in the sampling
reservoir.
"""
self._values = [0 for i in range(reservoir_size)]
self._count = 0
self.clear()
self.maxint = getattr(sys, 'maxint', sys.maxsize)
def clear(self):
self._values = [0 for i in range(len(self._values))]
self._count = 0
def size(self):
c = self._count
return len(self._values) if c > len(self._values) else c
def update(self, value):
self._count += 1
if self._count <= len(self._values):
self._values[self._count - 1] = value
else:
r = random.randint(1, self.maxint) % self._count
if r < len(self._values):
self._values[r] = value
def get_values(self):
s = self.size()
return [self._values[i] for i in range(0, s)]
| 35.716418 | 76 | 0.674885 |
import random
import sys
class UniformSample(object):
def __init__(self, reservoir_size):
self._values = [0 for i in range(reservoir_size)]
self._count = 0
self.clear()
self.maxint = getattr(sys, 'maxint', sys.maxsize)
def clear(self):
self._values = [0 for i in range(len(self._values))]
self._count = 0
def size(self):
c = self._count
return len(self._values) if c > len(self._values) else c
def update(self, value):
self._count += 1
if self._count <= len(self._values):
self._values[self._count - 1] = value
else:
r = random.randint(1, self.maxint) % self._count
if r < len(self._values):
self._values[r] = value
def get_values(self):
s = self.size()
return [self._values[i] for i in range(0, s)]
| true | true |
f71dcf5945851eb64bb226ac287f229197ca66fc | 1,067 | py | Python | setup.py | sayaHub/track-web | c7695978392a11e5fdbca15d2cafd493a5e7c2e9 | [
"CC0-1.0"
] | null | null | null | setup.py | sayaHub/track-web | c7695978392a11e5fdbca15d2cafd493a5e7c2e9 | [
"CC0-1.0"
] | null | null | null | setup.py | sayaHub/track-web | c7695978392a11e5fdbca15d2cafd493a5e7c2e9 | [
"CC0-1.0"
] | null | null | null | import setuptools
setuptools.setup(
name='track-web',
version='0.0.1',
long_description='',
author='GSA 18F, CDS-SNC',
author_email='pulse@cio.gov, cds-snc@tbs-sct.gc.ca',
url='https://github.com/cds-snc/track-web',
include_package_data=True,
packages=[
'track',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 3',
],
install_requires=[
'flask==0.12.4',
'gunicorn==19.6.0',
'pyyaml==3.13',
'python-slugify==1.2.1',
'pymongo==3.7.2',
'Flask-PyMongo==2.2.0',
'flask-compress==1.4.0',
'click==6.7',
'Babel==2.6.0',
'Flask-Caching==1.4.0',
'azure-keyvault==1.1.0',
'msrestazure==0.5.1'
],
extras_require={
'development': [
'mypy==0.590',
'pylint==1.8.4',
'pytest==3.5.0',
'pytest-cov==2.5.1',
],
},
)
| 24.813953 | 56 | 0.497657 | import setuptools
setuptools.setup(
name='track-web',
version='0.0.1',
long_description='',
author='GSA 18F, CDS-SNC',
author_email='pulse@cio.gov, cds-snc@tbs-sct.gc.ca',
url='https://github.com/cds-snc/track-web',
include_package_data=True,
packages=[
'track',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 3',
],
install_requires=[
'flask==0.12.4',
'gunicorn==19.6.0',
'pyyaml==3.13',
'python-slugify==1.2.1',
'pymongo==3.7.2',
'Flask-PyMongo==2.2.0',
'flask-compress==1.4.0',
'click==6.7',
'Babel==2.6.0',
'Flask-Caching==1.4.0',
'azure-keyvault==1.1.0',
'msrestazure==0.5.1'
],
extras_require={
'development': [
'mypy==0.590',
'pylint==1.8.4',
'pytest==3.5.0',
'pytest-cov==2.5.1',
],
},
)
| true | true |
f71dcf7616818d83e932104ad923982909610c0a | 8,300 | py | Python | simbad/rotsearch/phaser_rotation_search.py | hlasimpk/SIMBAD | 684de027f25fe63e8d973e494b0adf74db08cd89 | [
"BSD-3-Clause"
] | null | null | null | simbad/rotsearch/phaser_rotation_search.py | hlasimpk/SIMBAD | 684de027f25fe63e8d973e494b0adf74db08cd89 | [
"BSD-3-Clause"
] | null | null | null | simbad/rotsearch/phaser_rotation_search.py | hlasimpk/SIMBAD | 684de027f25fe63e8d973e494b0adf74db08cd89 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env ccp4-python
"""Module to run phaser rotation search on a model"""
__author__ = "Adam Simpkin"
__date__ = "12 April 2018"
__version__ = "1.0"
import os
from phaser import InputMR_DAT, runMR_DAT, InputMR_FRF, runMR_FRF
class Phaser(object):
"""Class to run PHASER
Attributes
----------
hklin : str
Path to the input hkl file
f : str
The column label for F
i : str
The column label for I
pdbin : str
Path to the input pdb file
pdbout : str
Path to the output pdb file
sigf : str
The column label for SIGF
sigi : str
The column label for SIGI
solvent : int float
The estimated solvent content of the crystal
work_dir : str
Path to the working directory were you want PHASER to run
hires : str
The high resolution limit of data used to find/refine this solution
Examples
--------
>>> from simbad.rotsearch.phaser_rotation_search import Phaser
>>> phaser = Phaser('<hklin>', '<f>', '<i>', '<logfile>', '<nmol>', '<pdbin>', '<pdbout>', '<sgalternative>',
>>> '<sigf>', '<sigi>', '<solvent>', '<timeout>', '<workdir>', '<autohigh>', '<hires>', '<eid>')
>>> phaser.run()
Files relating to the PHASER run will be contained within the work_dir however the location of the output hkl, pdb
and logfile can be specified.
"""
def __init__(self, hklin, f, i, logfile, nmol, pdbin, sigf, sigi, solvent, timeout, work_dir, hires, eid):
self._f = None
self._hires = None
self._hklin = None
self._i = None
self._logfile = None
self._nmol = None
self._pdbin = None
self._sigf = None
self._sigi = None
self._solvent = None
self._timeout = None
self._work_dir = None
self.eid = eid
self.f = f
self.hires = hires
self.hklin = hklin
self.i = i
self.logfile = logfile
self.nmol = nmol
self.pdbin = pdbin
self.sigf = sigf
self.sigi = sigi
self.solvent = solvent
self.timeout = timeout
self.work_dir = work_dir
@property
def f(self):
"""The F label from the input hkl"""
return self._f
@f.setter
def f(self, f):
"""Define the F label from the input hkl"""
self._f = f
@property
def hires(self):
"""The high resolution limit of data used to find/refine this solution"""
return self._hires
@hires.setter
def hires(self, hires):
"""Define the high resolution limit of data used to find/refine this solution"""
self._hires = hires
@property
def hklin(self):
"""The input hkl file"""
return self._hklin
@hklin.setter
def hklin(self, hklin):
"""Define the input hkl file"""
self._hklin = hklin
@property
def i(self):
"""The I label from the input hkl"""
return self._i
@i.setter
def i(self, i):
"""Define the I label from the input hkl"""
self._i = i
@property
def logfile(self):
"""The logfile output"""
return self._logfile
@logfile.setter
def logfile(self, logfile):
"""Define the output logfile"""
self._logfile = logfile
@property
def nmol(self):
"""The number of molecules to look for"""
return self._nmol
@nmol.setter
def nmol(self, nmol):
"""Define the number of molecules to look for"""
self._nmol = nmol
@property
def pdbin(self):
"""The input pdb file"""
return self._pdbin
@pdbin.setter
def pdbin(self, pdbin):
"""Define the input pdb file"""
self._pdbin = pdbin
@property
def sigf(self):
"""The SIGF label from the input hkl"""
return self._sigf
@sigf.setter
def sigf(self, sigf):
"""Define the SIGF label from the input hkl"""
self._sigf = sigf
@property
def sigi(self):
"""The SIGI label from the input hkl"""
return self._sigi
@sigi.setter
def sigi(self, sigi):
"""Define the SIGI label from the input hkl"""
self._sigi = sigi
@property
def solvent(self):
"""The estimated solvent content of the crystal"""
return self._solvent
@solvent.setter
def solvent(self, solvent):
"""Define the estimated solvent content of the crystal"""
self._solvent = solvent
@property
def timeout(self):
"""The time in minutes before phaser is killed"""
return self._timeout
@timeout.setter
def timeout(self, timeout):
"""Define the time in minutes before phaser should be killed"""
self._timeout = timeout
def run(self):
"""Function to run rotation search using PHASER"""
current_work_dir = os.getcwd()
if os.path.exists(self.work_dir):
os.chdir(self.work_dir)
else:
os.makedirs(self.work_dir)
os.chdir(self.work_dir)
i = InputMR_DAT()
i.setHKLI(self.hklin)
if self.hires:
i.setHIRES(self.hires)
if self.i != "None" and self.sigi != "None":
i.setLABI_I_SIGI(self.i, self.sigi)
elif self.f != "None" and self.sigf != "None":
i.setLABI_F_SIGF(self.f, self.sigf)
else:
msg = "No flags for intensities or amplitudes have been provided"
raise RuntimeError(msg)
i.setMUTE(True)
run_mr_data = runMR_DAT(i)
if run_mr_data.Success():
i = InputMR_FRF()
i.setJOBS(1)
i.setREFL_DATA(run_mr_data.getREFL_DATA())
i.setSPAC_HALL(run_mr_data.getSpaceGroupHall())
i.setCELL6(run_mr_data.getUnitCell())
i.setROOT("phaser_mr_output")
i.addENSE_PDB_ID("PDB", self.pdbin, float(self.eid))
i.setENSE_DISA_CHEC('PDB', True)
i.setCOMP_BY("SOLVENT")
i.setCOMP_PERC(self.solvent)
i.addSEAR_ENSE_NUM('PDB', self.nmol)
i.setRFAC_USE(False)
if self.timeout != 0:
i.setKILL_TIME(self.timeout)
run_mr_rot = runMR_FRF(i)
with open(self.logfile, 'w') as f:
f.write(run_mr_rot.summary())
os.chdir(current_work_dir)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Runs rotation search using PHASER', prefix_chars="-")
group = parser.add_argument_group()
group.add_argument('-eid', type=str,
help="The estimated sequence identity")
group.add_argument('-f', type=str,
help="The column label for F")
group.add_argument('-hires', type=float, default=None,
help="The high resolution limit of data used to find/refine this solution")
group.add_argument('-hklin', type=str,
help="Path the input hkl file")
group.add_argument('-i', type=str,
help="The column label for I")
group.add_argument('-logfile', type=str,
help="Path to the ouput log file")
group.add_argument('-nmol', type=int,
help="The predicted number of molecules to build")
group.add_argument('-pdbin', type=str,
help="Path to the input pdb file")
group.add_argument('-sigf', type=str,
help="The column label for SIGF")
group.add_argument('-sigi', type=str,
help="The column label for SIGI")
group.add_argument('-solvent', type=float,
help="The estimated solvent content of the crystal")
group.add_argument('-timeout', type=int, default=0,
help="The time in mins before phaser will kill a job")
group.add_argument('-work_dir', type=str,
help="Path to the working directory")
args = parser.parse_args()
phaser = Phaser(args.hklin, args.f, args.i, args.logfile, args.nmol, args.pdbin, args.sigf, args.sigi, args.solvent,
args.timeout, args.work_dir, args.hires, args.eid)
phaser.run() | 30.514706 | 120 | 0.577952 |
__author__ = "Adam Simpkin"
__date__ = "12 April 2018"
__version__ = "1.0"
import os
from phaser import InputMR_DAT, runMR_DAT, InputMR_FRF, runMR_FRF
class Phaser(object):
def __init__(self, hklin, f, i, logfile, nmol, pdbin, sigf, sigi, solvent, timeout, work_dir, hires, eid):
self._f = None
self._hires = None
self._hklin = None
self._i = None
self._logfile = None
self._nmol = None
self._pdbin = None
self._sigf = None
self._sigi = None
self._solvent = None
self._timeout = None
self._work_dir = None
self.eid = eid
self.f = f
self.hires = hires
self.hklin = hklin
self.i = i
self.logfile = logfile
self.nmol = nmol
self.pdbin = pdbin
self.sigf = sigf
self.sigi = sigi
self.solvent = solvent
self.timeout = timeout
self.work_dir = work_dir
@property
def f(self):
return self._f
@f.setter
def f(self, f):
self._f = f
@property
def hires(self):
return self._hires
@hires.setter
def hires(self, hires):
self._hires = hires
@property
def hklin(self):
return self._hklin
@hklin.setter
def hklin(self, hklin):
self._hklin = hklin
@property
def i(self):
return self._i
@i.setter
def i(self, i):
self._i = i
@property
def logfile(self):
return self._logfile
@logfile.setter
def logfile(self, logfile):
self._logfile = logfile
@property
def nmol(self):
return self._nmol
@nmol.setter
def nmol(self, nmol):
self._nmol = nmol
@property
def pdbin(self):
return self._pdbin
@pdbin.setter
def pdbin(self, pdbin):
self._pdbin = pdbin
@property
def sigf(self):
return self._sigf
@sigf.setter
def sigf(self, sigf):
self._sigf = sigf
@property
def sigi(self):
return self._sigi
@sigi.setter
def sigi(self, sigi):
self._sigi = sigi
@property
def solvent(self):
return self._solvent
@solvent.setter
def solvent(self, solvent):
self._solvent = solvent
@property
def timeout(self):
return self._timeout
@timeout.setter
def timeout(self, timeout):
self._timeout = timeout
def run(self):
current_work_dir = os.getcwd()
if os.path.exists(self.work_dir):
os.chdir(self.work_dir)
else:
os.makedirs(self.work_dir)
os.chdir(self.work_dir)
i = InputMR_DAT()
i.setHKLI(self.hklin)
if self.hires:
i.setHIRES(self.hires)
if self.i != "None" and self.sigi != "None":
i.setLABI_I_SIGI(self.i, self.sigi)
elif self.f != "None" and self.sigf != "None":
i.setLABI_F_SIGF(self.f, self.sigf)
else:
msg = "No flags for intensities or amplitudes have been provided"
raise RuntimeError(msg)
i.setMUTE(True)
run_mr_data = runMR_DAT(i)
if run_mr_data.Success():
i = InputMR_FRF()
i.setJOBS(1)
i.setREFL_DATA(run_mr_data.getREFL_DATA())
i.setSPAC_HALL(run_mr_data.getSpaceGroupHall())
i.setCELL6(run_mr_data.getUnitCell())
i.setROOT("phaser_mr_output")
i.addENSE_PDB_ID("PDB", self.pdbin, float(self.eid))
i.setENSE_DISA_CHEC('PDB', True)
i.setCOMP_BY("SOLVENT")
i.setCOMP_PERC(self.solvent)
i.addSEAR_ENSE_NUM('PDB', self.nmol)
i.setRFAC_USE(False)
if self.timeout != 0:
i.setKILL_TIME(self.timeout)
run_mr_rot = runMR_FRF(i)
with open(self.logfile, 'w') as f:
f.write(run_mr_rot.summary())
os.chdir(current_work_dir)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Runs rotation search using PHASER', prefix_chars="-")
group = parser.add_argument_group()
group.add_argument('-eid', type=str,
help="The estimated sequence identity")
group.add_argument('-f', type=str,
help="The column label for F")
group.add_argument('-hires', type=float, default=None,
help="The high resolution limit of data used to find/refine this solution")
group.add_argument('-hklin', type=str,
help="Path the input hkl file")
group.add_argument('-i', type=str,
help="The column label for I")
group.add_argument('-logfile', type=str,
help="Path to the ouput log file")
group.add_argument('-nmol', type=int,
help="The predicted number of molecules to build")
group.add_argument('-pdbin', type=str,
help="Path to the input pdb file")
group.add_argument('-sigf', type=str,
help="The column label for SIGF")
group.add_argument('-sigi', type=str,
help="The column label for SIGI")
group.add_argument('-solvent', type=float,
help="The estimated solvent content of the crystal")
group.add_argument('-timeout', type=int, default=0,
help="The time in mins before phaser will kill a job")
group.add_argument('-work_dir', type=str,
help="Path to the working directory")
args = parser.parse_args()
phaser = Phaser(args.hklin, args.f, args.i, args.logfile, args.nmol, args.pdbin, args.sigf, args.sigi, args.solvent,
args.timeout, args.work_dir, args.hires, args.eid)
phaser.run() | true | true |
f71dcf8a96a9d2305374385e55f241406bbc3021 | 16,043 | py | Python | main_imp_visda.py | eyov7/CV_LTH_Pre-training-LLNL | bb18ba2093328aeb4e5ab3929f2749264ef3c981 | [
"MIT"
] | 47 | 2020-12-15T03:40:50.000Z | 2022-03-30T03:38:29.000Z | main_imp_visda.py | eyov7/CV_LTH_Pre-training-LLNL | bb18ba2093328aeb4e5ab3929f2749264ef3c981 | [
"MIT"
] | null | null | null | main_imp_visda.py | eyov7/CV_LTH_Pre-training-LLNL | bb18ba2093328aeb4e5ab3929f2749264ef3c981 | [
"MIT"
] | 10 | 2021-03-17T01:28:57.000Z | 2022-02-24T20:23:57.000Z | import argparse
import os
import random
import shutil
import time
import warnings
import copy
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import torchvision.models as models
from pruning_utils import *
from visda2017 import VisDA17
model_names = sorted(name for name in models.__dict__
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
parser = argparse.ArgumentParser(description='PyTorch Visda Training')
################################ required settings ################################
parser.add_argument('data', metavar='DIR',
help='path to dataset')
parser.add_argument('-a', '--arch', metavar='ARCH', default='resnet50',
choices=model_names,
help='model architecture: ' +
' | '.join(model_names) +
' (default: resnet18)')
parser.add_argument('--epochs', default=20, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('-b', '--batch-size', default=128, type=int,
metavar='N',
help='mini-batch size (default: 256), this is the total '
'batch size of all GPUs on the current node when '
'using Data Parallel or Distributed Data Parallel')
parser.add_argument('--lr', '--learning-rate', default=0.001, type=float,
metavar='LR', help='initial learning rate', dest='lr')
parser.add_argument('--prune_type', default=None, type=str, help='prune type [lt, pt_trans]')
parser.add_argument('--pre_weight', default=None, type=str)
parser.add_argument('--dataset', default='visda17', type=str)
parser.add_argument('--save_dir', default='results/', type=str)
parser.add_argument('--percent', default=0.2, type=float, help='pruning rate for each iteration')
parser.add_argument('--states', default=19, type=int, help='number of iterative pruning states')
parser.add_argument('--start_state', default=0, type=int, help='number of iterative pruning states')
################################ other settings ################################
parser.add_argument('-j', '--workers', default=4, type=int, metavar='N',
help='number of data loading workers (default: 4)')
parser.add_argument('--start-epoch', default=0, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--wd', '--weight-decay', default=5e-4, type=float,
metavar='W', help='weight decay (default: 1e-4)',
dest='weight_decay')
parser.add_argument('-p', '--print-freq', default=50, type=int,
metavar='N', help='print frequency (default: 10)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('--pretrained', dest='pretrained', action='store_true',
help='use pre-trained model')
parser.add_argument('--seed', default=None, type=int,
help='seed for initializing training. ')
parser.add_argument('--gpu', default=None, type=int,
help='GPU id to use.')
best_acc1 = 0
best_epoch = 0
def main():
args = parser.parse_args()
os.makedirs(args.save_dir, exist_ok=True)
if args.seed is not None:
random.seed(args.seed)
torch.manual_seed(args.seed)
cudnn.deterministic = True
warnings.warn('You have chosen to seed training. '
'This will turn on the CUDNN deterministic setting, '
'which can slow down your training considerably! '
'You may see unexpected behavior when restarting '
'from checkpoints.')
if args.gpu is not None:
warnings.warn('You have chosen a specific GPU. This will completely '
'disable data parallelism.')
main_worker(args.gpu, args)
def main_worker(gpu, args):
global best_acc1, best_epoch
args.gpu = gpu
if args.gpu is not None:
print("Use GPU: {} for training".format(args.gpu))
# create model
print("=> using model '{}'".format(args.arch))
model = models.__dict__[args.arch](pretrained=False)
if_pruned = False
assert args.dataset == 'visda17'
ch = model.fc.in_features
model.fc = nn.Linear(ch,12)
if args.prune_type=='lt':
print('using Lottery Tickets setting ')
initalization = copy.deepcopy(model.state_dict())
torch.save({'state_dict': initalization}, os.path.join(args.save_dir, 'random_init.pt'))
elif args.prune_type=='pt_trans':
print('using Pretrain Tickets setting')
ticket_init_weight = torch.load(args.pre_weight)
if 'state_dict' in ticket_init_weight.keys():
ticket_init_weight = ticket_init_weight['state_dict']
all_keys = list(ticket_init_weight.keys())
for key in all_keys:
if 'fc.' in key:
del ticket_init_weight[key]
print('layer number', len(ticket_init_weight.keys()))
for key in ticket_init_weight.keys():
assert key in model.state_dict().keys()
model.load_state_dict(ticket_init_weight, strict=False)
initalization = copy.deepcopy(model.state_dict())
else:
raise ValueError("Unknown Pruning Type")
print('Mode: Dataparallel')
model = torch.nn.DataParallel(model).cuda()
# optionally resume from a checkpoint
if args.resume:
if os.path.isfile(args.resume):
print("=> loading checkpoint '{}'".format(args.resume))
if args.gpu is None:
checkpoint = torch.load(args.resume)
else:
# Map model to be loaded to specified single gpu.
loc = 'cuda:{}'.format(args.gpu)
checkpoint = torch.load(args.resume, map_location=loc)
args.start_epoch = checkpoint['epoch']
args.start_state = checkpoint['state']
best_acc1 = checkpoint['best_acc1']
if_pruned = checkpoint['if_pruned']
initalization = checkpoint['init_weight']
if if_pruned:
prune_model_custom(model.module, checkpoint['mask'], False)
model.module.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
print("=> loaded checkpoint '{}' (epoch {})"
.format(args.resume, checkpoint['epoch']))
else:
print("=> no checkpoint found at '{}'".format(args.resume))
criterion = nn.CrossEntropyLoss().cuda(args.gpu)
optimizer = torch.optim.SGD(model.parameters(), args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay)
cudnn.benchmark = True
# Data loading code
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train_trans = transforms.Compose([
transforms.RandomResizedCrop(size=224, scale=(0.75, 1.33)),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
])
val_trans = transforms.Compose([
transforms.Resize((224, 224)),
transforms.ToTensor(),
normalize,
])
train_dataset = VisDA17(txt_file=os.path.join(args.data, "train/image_list.txt"),
root_dir=os.path.join(args.data, "train"), transform=train_trans)
val_dataset = VisDA17(txt_file=os.path.join(args.data, "validation/image_list.txt"),
root_dir=os.path.join(args.data, "validation"), transform=val_trans)
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=args.batch_size, shuffle=True,
num_workers=args.workers, pin_memory=True)
val_loader = torch.utils.data.DataLoader(
val_dataset,
batch_size=args.batch_size, shuffle=False,
num_workers=args.workers, pin_memory=True)
if args.evaluate:
validate(val_loader, model, criterion, args)
return
for prun_iter in range(args.start_state, args.states):
check_sparsity(model.module, False)
for epoch in range(args.start_epoch, args.epochs):
print(optimizer.state_dict()['param_groups'][0]['lr'])
# train for one epoch
train(train_loader, model, criterion, optimizer, epoch, args)
# evaluate on validation set
acc1 = validate(val_loader, model, criterion, args)
# remember best acc@1 and save checkpoint
is_best = acc1 > best_acc1
best_acc1 = max(acc1, best_acc1)
if is_best:
best_epoch = epoch+1
if if_pruned:
mask_dict = extract_mask(model.state_dict())
else:
mask_dict = None
save_checkpoint({
'epoch': epoch + 1,
'state': prun_iter,
'arch': args.arch,
'state_dict': model.module.state_dict(),
'mask': mask_dict,
'best_acc1': best_acc1,
'optimizer' : optimizer.state_dict(),
'if_pruned': if_pruned,
'init_weight':initalization
}, is_best, checkpoint=args.save_dir, best_name=str(prun_iter)+'model_best.pth.tar')
check_sparsity(model.module, False)
print('**best TA = ', best_acc1, 'best epoch = ', best_epoch)
# start pruning
print('start pruning model')
pruning_model(model.module, args.percent, False)
if_pruned = True
current_mask = extract_mask(model.state_dict())
remove_prune(model.module, False)
model.module.load_state_dict(initalization)
best_acc1 = 0
best_epoch = 0
prune_model_custom(model.module, current_mask, False)
validate(val_loader, model, criterion, args)
optimizer = torch.optim.SGD(model.parameters(), args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay)
def train(train_loader, model, criterion, optimizer, epoch, args):
batch_time = AverageMeter('Time', ':6.3f')
data_time = AverageMeter('Data', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Acc@1', ':6.2f')
top5 = AverageMeter('Acc@5', ':6.2f')
progress = ProgressMeter(
len(train_loader),
[batch_time, data_time, losses, top1, top5],
prefix="Epoch: [{}]".format(epoch))
# switch to train mode
model.train()
wp_steps = len(train_loader)
end = time.time()
for i, (images, target) in enumerate(train_loader):
# measure data loading time
data_time.update(time.time() - end)
adjust_learning_rate(optimizer, epoch, args, i+1, steps_for_one_epoch=wp_steps)
if args.gpu is not None:
images = images.cuda(args.gpu, non_blocking=True)
target = target.cuda(args.gpu, non_blocking=True)
# compute output
output = model(images)
loss = criterion(output, target)
# measure accuracy and record loss
acc1, acc5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), images.size(0))
top1.update(acc1[0], images.size(0))
top5.update(acc5[0], images.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
progress.display(i)
def validate(val_loader, model, criterion, args):
batch_time = AverageMeter('Time', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Acc@1', ':6.2f')
top5 = AverageMeter('Acc@5', ':6.2f')
progress = ProgressMeter(
len(val_loader),
[batch_time, losses, top1, top5],
prefix='Test: ')
# switch to evaluate mode
model.eval()
with torch.no_grad():
end = time.time()
for i, (images, target) in enumerate(val_loader):
if args.gpu is not None:
images = images.cuda(args.gpu, non_blocking=True)
target = target.cuda(args.gpu, non_blocking=True)
# compute output
output = model(images)
loss = criterion(output, target)
# measure accuracy and record loss
acc1, acc5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), images.size(0))
top1.update(acc1[0], images.size(0))
top5.update(acc5[0], images.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
progress.display(i)
# TODO: this should also be done with the ProgressMeter
print(' * Acc@1 {top1.avg:.3f} Acc@5 {top5.avg:.3f}'
.format(top1=top1, top5=top5))
return top1.avg
def save_checkpoint(state, is_best, checkpoint, filename='checkpoint.pth.tar', best_name='model_best.pth.tar'):
filepath = os.path.join(checkpoint, filename)
torch.save(state, filepath)
if is_best:
shutil.copyfile(filepath, os.path.join(checkpoint, best_name))
def adjust_learning_rate(optimizer, epoch, args, iterations, steps_for_one_epoch):
max_lr = args.lr
if epoch < 10:
lr = max_lr
else:
lr = max_lr*0.1
for param_group in optimizer.param_groups:
param_group['lr'] = lr
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self, name, fmt=':f'):
self.name = name
self.fmt = fmt
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def __str__(self):
fmtstr = '{name} {val' + self.fmt + '} ({avg' + self.fmt + '})'
return fmtstr.format(**self.__dict__)
class ProgressMeter(object):
def __init__(self, num_batches, meters, prefix=""):
self.batch_fmtstr = self._get_batch_fmtstr(num_batches)
self.meters = meters
self.prefix = prefix
def display(self, batch):
entries = [self.prefix + self.batch_fmtstr.format(batch)]
entries += [str(meter) for meter in self.meters]
print('\t'.join(entries))
def _get_batch_fmtstr(self, num_batches):
num_digits = len(str(num_batches // 1))
fmt = '{:' + str(num_digits) + 'd}'
return '[' + fmt + '/' + fmt.format(num_batches) + ']'
def accuracy(output, target, topk=(1,)):
"""Computes the accuracy over the k top predictions for the specified values of k"""
with torch.no_grad():
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
if __name__ == '__main__':
main() | 36.627854 | 111 | 0.59808 | import argparse
import os
import random
import shutil
import time
import warnings
import copy
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import torchvision.models as models
from pruning_utils import *
from visda2017 import VisDA17
model_names = sorted(name for name in models.__dict__
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
parser = argparse.ArgumentParser(description='PyTorch Visda Training')
if args.resume:
if os.path.isfile(args.resume):
print("=> loading checkpoint '{}'".format(args.resume))
if args.gpu is None:
checkpoint = torch.load(args.resume)
else:
loc = 'cuda:{}'.format(args.gpu)
checkpoint = torch.load(args.resume, map_location=loc)
args.start_epoch = checkpoint['epoch']
args.start_state = checkpoint['state']
best_acc1 = checkpoint['best_acc1']
if_pruned = checkpoint['if_pruned']
initalization = checkpoint['init_weight']
if if_pruned:
prune_model_custom(model.module, checkpoint['mask'], False)
model.module.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
print("=> loaded checkpoint '{}' (epoch {})"
.format(args.resume, checkpoint['epoch']))
else:
print("=> no checkpoint found at '{}'".format(args.resume))
criterion = nn.CrossEntropyLoss().cuda(args.gpu)
optimizer = torch.optim.SGD(model.parameters(), args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay)
cudnn.benchmark = True
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train_trans = transforms.Compose([
transforms.RandomResizedCrop(size=224, scale=(0.75, 1.33)),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
])
val_trans = transforms.Compose([
transforms.Resize((224, 224)),
transforms.ToTensor(),
normalize,
])
train_dataset = VisDA17(txt_file=os.path.join(args.data, "train/image_list.txt"),
root_dir=os.path.join(args.data, "train"), transform=train_trans)
val_dataset = VisDA17(txt_file=os.path.join(args.data, "validation/image_list.txt"),
root_dir=os.path.join(args.data, "validation"), transform=val_trans)
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=args.batch_size, shuffle=True,
num_workers=args.workers, pin_memory=True)
val_loader = torch.utils.data.DataLoader(
val_dataset,
batch_size=args.batch_size, shuffle=False,
num_workers=args.workers, pin_memory=True)
if args.evaluate:
validate(val_loader, model, criterion, args)
return
for prun_iter in range(args.start_state, args.states):
check_sparsity(model.module, False)
for epoch in range(args.start_epoch, args.epochs):
print(optimizer.state_dict()['param_groups'][0]['lr'])
train(train_loader, model, criterion, optimizer, epoch, args)
acc1 = validate(val_loader, model, criterion, args)
is_best = acc1 > best_acc1
best_acc1 = max(acc1, best_acc1)
if is_best:
best_epoch = epoch+1
if if_pruned:
mask_dict = extract_mask(model.state_dict())
else:
mask_dict = None
save_checkpoint({
'epoch': epoch + 1,
'state': prun_iter,
'arch': args.arch,
'state_dict': model.module.state_dict(),
'mask': mask_dict,
'best_acc1': best_acc1,
'optimizer' : optimizer.state_dict(),
'if_pruned': if_pruned,
'init_weight':initalization
}, is_best, checkpoint=args.save_dir, best_name=str(prun_iter)+'model_best.pth.tar')
check_sparsity(model.module, False)
print('**best TA = ', best_acc1, 'best epoch = ', best_epoch)
print('start pruning model')
pruning_model(model.module, args.percent, False)
if_pruned = True
current_mask = extract_mask(model.state_dict())
remove_prune(model.module, False)
model.module.load_state_dict(initalization)
best_acc1 = 0
best_epoch = 0
prune_model_custom(model.module, current_mask, False)
validate(val_loader, model, criterion, args)
optimizer = torch.optim.SGD(model.parameters(), args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay)
def train(train_loader, model, criterion, optimizer, epoch, args):
batch_time = AverageMeter('Time', ':6.3f')
data_time = AverageMeter('Data', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Acc@1', ':6.2f')
top5 = AverageMeter('Acc@5', ':6.2f')
progress = ProgressMeter(
len(train_loader),
[batch_time, data_time, losses, top1, top5],
prefix="Epoch: [{}]".format(epoch))
model.train()
wp_steps = len(train_loader)
end = time.time()
for i, (images, target) in enumerate(train_loader):
data_time.update(time.time() - end)
adjust_learning_rate(optimizer, epoch, args, i+1, steps_for_one_epoch=wp_steps)
if args.gpu is not None:
images = images.cuda(args.gpu, non_blocking=True)
target = target.cuda(args.gpu, non_blocking=True)
output = model(images)
loss = criterion(output, target)
acc1, acc5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), images.size(0))
top1.update(acc1[0], images.size(0))
top5.update(acc5[0], images.size(0))
optimizer.zero_grad()
loss.backward()
optimizer.step()
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
progress.display(i)
def validate(val_loader, model, criterion, args):
batch_time = AverageMeter('Time', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Acc@1', ':6.2f')
top5 = AverageMeter('Acc@5', ':6.2f')
progress = ProgressMeter(
len(val_loader),
[batch_time, losses, top1, top5],
prefix='Test: ')
model.eval()
with torch.no_grad():
end = time.time()
for i, (images, target) in enumerate(val_loader):
if args.gpu is not None:
images = images.cuda(args.gpu, non_blocking=True)
target = target.cuda(args.gpu, non_blocking=True)
output = model(images)
loss = criterion(output, target)
acc1, acc5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), images.size(0))
top1.update(acc1[0], images.size(0))
top5.update(acc5[0], images.size(0))
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
progress.display(i)
print(' * Acc@1 {top1.avg:.3f} Acc@5 {top5.avg:.3f}'
.format(top1=top1, top5=top5))
return top1.avg
def save_checkpoint(state, is_best, checkpoint, filename='checkpoint.pth.tar', best_name='model_best.pth.tar'):
filepath = os.path.join(checkpoint, filename)
torch.save(state, filepath)
if is_best:
shutil.copyfile(filepath, os.path.join(checkpoint, best_name))
def adjust_learning_rate(optimizer, epoch, args, iterations, steps_for_one_epoch):
max_lr = args.lr
if epoch < 10:
lr = max_lr
else:
lr = max_lr*0.1
for param_group in optimizer.param_groups:
param_group['lr'] = lr
class AverageMeter(object):
def __init__(self, name, fmt=':f'):
self.name = name
self.fmt = fmt
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def __str__(self):
fmtstr = '{name} {val' + self.fmt + '} ({avg' + self.fmt + '})'
return fmtstr.format(**self.__dict__)
class ProgressMeter(object):
def __init__(self, num_batches, meters, prefix=""):
self.batch_fmtstr = self._get_batch_fmtstr(num_batches)
self.meters = meters
self.prefix = prefix
def display(self, batch):
entries = [self.prefix + self.batch_fmtstr.format(batch)]
entries += [str(meter) for meter in self.meters]
print('\t'.join(entries))
def _get_batch_fmtstr(self, num_batches):
num_digits = len(str(num_batches // 1))
fmt = '{:' + str(num_digits) + 'd}'
return '[' + fmt + '/' + fmt.format(num_batches) + ']'
def accuracy(output, target, topk=(1,)):
with torch.no_grad():
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
if __name__ == '__main__':
main() | true | true |
f71dd03a0673407f061dc6e0310f017e116e67c7 | 22,135 | py | Python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_02_01/aio/operations/_express_route_circuit_authorizations_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 3 | 2020-06-23T02:25:27.000Z | 2021-09-07T18:48:11.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_02_01/aio/operations/_express_route_circuit_authorizations_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 510 | 2019-07-17T16:11:19.000Z | 2021-08-02T08:38:32.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_02_01/aio/operations/_express_route_circuit_authorizations_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 5 | 2019-09-04T12:51:37.000Z | 2020-09-16T07:28:40.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCircuitAuthorizationsOperations:
"""ExpressRouteCircuitAuthorizationsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_02_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'authorizationName': self._serialize.url("authorization_name", authorization_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified authorization from the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param authorization_name: The name of the authorization.
:type authorization_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
authorization_name=authorization_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'authorizationName': self._serialize.url("authorization_name", authorization_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}'} # type: ignore
async def get(
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
**kwargs
) -> "_models.ExpressRouteCircuitAuthorization":
"""Gets the specified authorization from the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param authorization_name: The name of the authorization.
:type authorization_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitAuthorization, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_02_01.models.ExpressRouteCircuitAuthorization
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitAuthorization"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'authorizationName': self._serialize.url("authorization_name", authorization_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitAuthorization', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
authorization_parameters: "_models.ExpressRouteCircuitAuthorization",
**kwargs
) -> "_models.ExpressRouteCircuitAuthorization":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitAuthorization"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'authorizationName': self._serialize.url("authorization_name", authorization_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(authorization_parameters, 'ExpressRouteCircuitAuthorization')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitAuthorization', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuitAuthorization', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
authorization_parameters: "_models.ExpressRouteCircuitAuthorization",
**kwargs
) -> AsyncLROPoller["_models.ExpressRouteCircuitAuthorization"]:
"""Creates or updates an authorization in the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param authorization_name: The name of the authorization.
:type authorization_name: str
:param authorization_parameters: Parameters supplied to the create or update express route
circuit authorization operation.
:type authorization_parameters: ~azure.mgmt.network.v2019_02_01.models.ExpressRouteCircuitAuthorization
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuitAuthorization or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_02_01.models.ExpressRouteCircuitAuthorization]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitAuthorization"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
authorization_name=authorization_name,
authorization_parameters=authorization_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitAuthorization', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'authorizationName': self._serialize.url("authorization_name", authorization_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}'} # type: ignore
def list(
self,
resource_group_name: str,
circuit_name: str,
**kwargs
) -> AsyncIterable["_models.AuthorizationListResult"]:
"""Gets all authorizations in an express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AuthorizationListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_02_01.models.AuthorizationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('AuthorizationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations'} # type: ignore
| 51.476744 | 232 | 0.680416 |
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCircuitAuthorizationsOperations:
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
url = self._delete_initial.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'authorizationName': self._serialize.url("authorization_name", authorization_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}'}
async def begin_delete(
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
**kwargs
) -> AsyncLROPoller[None]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
authorization_name=authorization_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'authorizationName': self._serialize.url("authorization_name", authorization_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}'}
async def get(
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
**kwargs
) -> "_models.ExpressRouteCircuitAuthorization":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'authorizationName': self._serialize.url("authorization_name", authorization_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitAuthorization', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}'}
async def _create_or_update_initial(
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
authorization_parameters: "_models.ExpressRouteCircuitAuthorization",
**kwargs
) -> "_models.ExpressRouteCircuitAuthorization":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self._create_or_update_initial.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'authorizationName': self._serialize.url("authorization_name", authorization_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(authorization_parameters, 'ExpressRouteCircuitAuthorization')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitAuthorization', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuitAuthorization', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}'}
async def begin_create_or_update(
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
authorization_parameters: "_models.ExpressRouteCircuitAuthorization",
**kwargs
) -> AsyncLROPoller["_models.ExpressRouteCircuitAuthorization"]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
authorization_name=authorization_name,
authorization_parameters=authorization_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitAuthorization', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'authorizationName': self._serialize.url("authorization_name", authorization_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}'}
def list(
self,
resource_group_name: str,
circuit_name: str,
**kwargs
) -> AsyncIterable["_models.AuthorizationListResult"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('AuthorizationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations'}
| true | true |
f71dd3ed81987c22b8b1889c1956daf31f9b46b1 | 275 | py | Python | electrum_blk/plugins/labels/cmdline.py | nedcloud-blackchain/electrum-blk | bf1992ecac9fffcc52e229e249da400b8751324e | [
"MIT"
] | 2 | 2022-03-09T18:21:02.000Z | 2022-03-13T13:27:07.000Z | electrum_blk/plugins/labels/cmdline.py | nedcloud-blackchain/electrum-blk | bf1992ecac9fffcc52e229e249da400b8751324e | [
"MIT"
] | null | null | null | electrum_blk/plugins/labels/cmdline.py | nedcloud-blackchain/electrum-blk | bf1992ecac9fffcc52e229e249da400b8751324e | [
"MIT"
] | 1 | 2022-02-21T07:38:29.000Z | 2022-02-21T07:38:29.000Z | from .labels import LabelsPlugin
from electrum_blk.plugin import hook
class Plugin(LabelsPlugin):
@hook
def load_wallet(self, wallet, window):
self.start_wallet(wallet)
def on_pulled(self, wallet):
self.logger.info('labels pulled from server')
| 22.916667 | 53 | 0.716364 | from .labels import LabelsPlugin
from electrum_blk.plugin import hook
class Plugin(LabelsPlugin):
@hook
def load_wallet(self, wallet, window):
self.start_wallet(wallet)
def on_pulled(self, wallet):
self.logger.info('labels pulled from server')
| true | true |
f71dd4587a751d52ef0eae8febb345a0a7c738b5 | 2,851 | py | Python | test/test_series_actors_data.py | h3llrais3r/tvdbapi-v2-client | 1210df9dd5869ccc5b63149b1b80630310a14f40 | [
"MIT"
] | 2 | 2021-01-24T07:45:22.000Z | 2021-11-15T11:29:25.000Z | test/test_series_actors_data.py | h3llrais3r/tvdb_api_v2 | 1210df9dd5869ccc5b63149b1b80630310a14f40 | [
"MIT"
] | null | null | null | test/test_series_actors_data.py | h3llrais3r/tvdb_api_v2 | 1210df9dd5869ccc5b63149b1b80630310a14f40 | [
"MIT"
] | 1 | 2020-05-07T10:16:15.000Z | 2020-05-07T10:16:15.000Z | # coding: utf-8
"""
TheTVDB API v2
API v3 targets v2 functionality with a few minor additions. The API is accessible via https://api.thetvdb.com and provides the following REST endpoints in JSON format. How to use this API documentation ---------------- You may browse the API routes without authentication, but if you wish to send requests to the API and see response data, then you must authenticate. 1. Obtain a JWT token by `POST`ing to the `/login` route in the `Authentication` section with your API key and credentials. 1. Paste the JWT token from the response into the \"JWT Token\" field at the top of the page and click the 'Add Token' button. You will now be able to use the remaining routes to send requests to the API and get a response. Language Selection ---------------- Language selection is done via the `Accept-Language` header. At the moment, you may only pass one language abbreviation in the header at a time. Valid language abbreviations can be found at the `/languages` route.. Authentication ---------------- Authentication to use the API is similar to the How-to section above. Users must `POST` to the `/login` route with their API key and credentials in the following format in order to obtain a JWT token. `{\"apikey\":\"APIKEY\",\"username\":\"USERNAME\",\"userkey\":\"USERKEY\"}` Note that the username and key are ONLY required for the `/user` routes. The user's key is labled `Account Identifier` in the account section of the main site. The token is then used in all subsequent requests by providing it in the `Authorization` header. The header will look like: `Authorization: Bearer <yourJWTtoken>`. Currently, the token expires after 24 hours. You can `GET` the `/refresh_token` route to extend that expiration date. Versioning ---------------- You may request a different version of the API by including an `Accept` header in your request with the following format: `Accept:application/vnd.thetvdb.v$VERSION`. This documentation automatically uses the version seen at the top and bottom of the page. # noqa: E501
OpenAPI spec version: 3.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import tvdb_api
from tvdb_api.models.series_actors_data import SeriesActorsData # noqa: E501
from tvdb_api.rest import ApiException
class TestSeriesActorsData(unittest.TestCase):
"""SeriesActorsData unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testSeriesActorsData(self):
"""Test SeriesActorsData"""
# FIXME: construct object with mandatory attributes with example values
# model = tvdb_api.models.series_actors_data.SeriesActorsData() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 69.536585 | 2,040 | 0.730972 |
from __future__ import absolute_import
import unittest
import tvdb_api
from tvdb_api.models.series_actors_data import SeriesActorsData
from tvdb_api.rest import ApiException
class TestSeriesActorsData(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testSeriesActorsData(self):
s
if __name__ == '__main__':
unittest.main()
| true | true |
f71dd4f671262d90856ccf1d5b3556ce316e02a9 | 4,928 | py | Python | eda.py | Lim-Guowei/RUL | e23e97a373df73abc2fde14ce070dcb5230a79c2 | [
"MIT"
] | null | null | null | eda.py | Lim-Guowei/RUL | e23e97a373df73abc2fde14ce070dcb5230a79c2 | [
"MIT"
] | null | null | null | eda.py | Lim-Guowei/RUL | e23e97a373df73abc2fde14ce070dcb5230a79c2 | [
"MIT"
] | null | null | null | import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from dataloader import dataloader
import seaborn as sns
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
pd.set_option('display.float_format', '{:.6f}'.format)
def countNullPercent(dataframe):
""" Print percentage of null values for each column in dataframe sorted in descending order
"""
nullCollect = {}
for column in dataframe:
rowCount = len(dataframe[column])
nullCount = dataframe[column].isnull().sum()
percentNull = round((nullCount/rowCount)*100, 2)
nullCollect.update({column: percentNull})
for key, value in sorted(nullCollect.items(), key=lambda item: item[1], reverse=True): # Sort dictionary based on value in descending order
print("{}: {}".format(key, value))
return
def countUniqueVal(dataframe, column):
""" Print unique values for each columns
"""
for count, name in enumerate(column):
print("#{} - {}".format(count, name))
print(dataframe[name].value_counts())
print("\n")
return
def plot_by_unit(dataframe, unit):
""" Generate visualization for each fleet unit
Unit number can be obtained by inspecting "unit" column in dataframe
Generate plot for each variable (x-axis) vs rul (y-axis)
"""
df_unit = dataframe[dataframe["unit"] == unit]
print(df_unit)
### Correlation plot
plt.subplots(figsize=(20,15))
color = plt.get_cmap('inferno') # default color
color.set_bad('lightblue')
corr_plot = sns.heatmap(data=df_unit.corr(), annot=False, cmap=color)
plt.title("Correlation matrix for unit {}".format(unit), fontdict={'fontsize': 16})
plt.savefig("corr_plot_unit_{}.png".format(unit))
return
def rank_feature_importance(dataframe):
feat_labels = dataframe.columns.values
Y = dataframe["RUL"]
X = dataframe.drop(["RUL"], axis=1)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, random_state=42, shuffle=True, test_size=0.2)
# Create a random forest classifier
clf = RandomForestClassifier(n_estimators=100, random_state=0, n_jobs=-1)
# Train the classifier
clf.fit(X_train, Y_train)
# Plot random forest feature importance
importances = clf.feature_importances_
indices = np.argsort(importances)
plt.title('Feature Importances', fontdict={'fontsize': 16})
plt.barh(range(len(indices)), importances[indices], color='b', align='center')
plt.yticks(range(len(indices)), [feat_labels[i] for i in indices])
plt.xlabel('Relative Importance')
plt.savefig("feature_importance.png")
return
def add_lag_features(dataframe):
dataframe["RUL_lag1"] = dataframe["RUL"].shift(1)
dataframe["RUL_lag3"] = dataframe["RUL"].shift(3)
dataframe["RUL_lag5"] = dataframe["RUL"].shift(5)
dataframe = dataframe.iloc[5::] # Discard NaN rows
fig = dataframe.plot(y=["RUL", "RUL_lag1", "RUL_lag1", "RUL_lag3", "RUL_lag5"],
kind="line",
title="Lag on RUL variable",
xlabel="index",
use_index=True,
linewidth=1.0,
alpha=0.7,
xlim=(0, dataframe.index.max()),
figsize=(20, 15)
).get_figure()
fig.savefig("lag_on_RUL.png")
return
def eda(filename):
df_dev, df_test = dataloader(filename)
column_name = df_dev.columns.tolist()
### Check for null or zeroes
countNullPercent(df_dev) # No null values in dataframe
countNullPercent(df_test) # No null values in dataframe
df_dev.describe().to_csv("df_dev_description.csv")
df_test.describe().to_csv("df_test_description.csv")
# Remove columns containing all zeroes
# Remove "cycle" as "RUL" is sufficient as target variable
df_dev = df_dev.drop(columns=["fan_eff_mod", "fan_flow_mod", "LPC_eff_mod", "LPC_flow_mod", "HPC_eff_mod", "HPC_flow_mod", "HPT_flow_mod", "LPT_eff_mod", "LPT_flow_mod", "cycle"])
df_test = df_test.drop(columns=["fan_eff_mod", "fan_flow_mod", "LPC_eff_mod", "LPC_flow_mod", "HPC_eff_mod", "HPC_flow_mod", "HPT_flow_mod", "LPT_eff_mod", "LPT_flow_mod", "cycle"])
### Identify categorical features as "unit", "Fc", "hs"
countUniqueVal(df_dev, ["unit", "Fc", "hs"])
### Generate correlation matrix plot for each unit in fleet
plot_by_unit(df_dev, 1.0)
plot_by_unit(df_dev, 2.0)
plot_by_unit(df_dev, 3.0)
plot_by_unit(df_dev, 4.0)
plot_by_unit(df_dev, 5.0)
plot_by_unit(df_dev, 6.0)
# Rank feature importance using random forest classifier
rank_feature_importance(df_dev)
add_lag_features(df_dev)
return
if __name__ == "__main__":
eda("N-CMAPSS_DS01-005.h5") | 37.618321 | 185 | 0.654424 | import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from dataloader import dataloader
import seaborn as sns
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
pd.set_option('display.float_format', '{:.6f}'.format)
def countNullPercent(dataframe):
nullCollect = {}
for column in dataframe:
rowCount = len(dataframe[column])
nullCount = dataframe[column].isnull().sum()
percentNull = round((nullCount/rowCount)*100, 2)
nullCollect.update({column: percentNull})
for key, value in sorted(nullCollect.items(), key=lambda item: item[1], reverse=True):
print("{}: {}".format(key, value))
return
def countUniqueVal(dataframe, column):
for count, name in enumerate(column):
print("#{} - {}".format(count, name))
print(dataframe[name].value_counts())
print("\n")
return
def plot_by_unit(dataframe, unit):
df_unit = dataframe[dataframe["unit"] == unit]
print(df_unit)
color = plt.get_cmap('inferno')
color.set_bad('lightblue')
corr_plot = sns.heatmap(data=df_unit.corr(), annot=False, cmap=color)
plt.title("Correlation matrix for unit {}".format(unit), fontdict={'fontsize': 16})
plt.savefig("corr_plot_unit_{}.png".format(unit))
return
def rank_feature_importance(dataframe):
feat_labels = dataframe.columns.values
Y = dataframe["RUL"]
X = dataframe.drop(["RUL"], axis=1)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, random_state=42, shuffle=True, test_size=0.2)
clf = RandomForestClassifier(n_estimators=100, random_state=0, n_jobs=-1)
clf.fit(X_train, Y_train)
importances = clf.feature_importances_
indices = np.argsort(importances)
plt.title('Feature Importances', fontdict={'fontsize': 16})
plt.barh(range(len(indices)), importances[indices], color='b', align='center')
plt.yticks(range(len(indices)), [feat_labels[i] for i in indices])
plt.xlabel('Relative Importance')
plt.savefig("feature_importance.png")
return
def add_lag_features(dataframe):
dataframe["RUL_lag1"] = dataframe["RUL"].shift(1)
dataframe["RUL_lag3"] = dataframe["RUL"].shift(3)
dataframe["RUL_lag5"] = dataframe["RUL"].shift(5)
dataframe = dataframe.iloc[5::]
fig = dataframe.plot(y=["RUL", "RUL_lag1", "RUL_lag1", "RUL_lag3", "RUL_lag5"],
kind="line",
title="Lag on RUL variable",
xlabel="index",
use_index=True,
linewidth=1.0,
alpha=0.7,
xlim=(0, dataframe.index.max()),
figsize=(20, 15)
).get_figure()
fig.savefig("lag_on_RUL.png")
return
def eda(filename):
df_dev, df_test = dataloader(filename)
column_name = df_dev.columns.tolist()
t(df_test)
df_dev.describe().to_csv("df_dev_description.csv")
df_test.describe().to_csv("df_test_description.csv")
df_dev = df_dev.drop(columns=["fan_eff_mod", "fan_flow_mod", "LPC_eff_mod", "LPC_flow_mod", "HPC_eff_mod", "HPC_flow_mod", "HPT_flow_mod", "LPT_eff_mod", "LPT_flow_mod", "cycle"])
df_test = df_test.drop(columns=["fan_eff_mod", "fan_flow_mod", "LPC_eff_mod", "LPC_flow_mod", "HPC_eff_mod", "HPC_flow_mod", "HPT_flow_mod", "LPT_eff_mod", "LPT_flow_mod", "cycle"])
f_dev, 6.0)
rank_feature_importance(df_dev)
add_lag_features(df_dev)
return
if __name__ == "__main__":
eda("N-CMAPSS_DS01-005.h5") | true | true |
f71dd531b8bf168b0db051cb85560196dc6c3184 | 749 | py | Python | jakso_ml/training_data/rotator.py | JaksoSoftware/jakso-ml | 5720ea557ca2fcf9ae16e329c198acd8e31258c4 | [
"MIT"
] | null | null | null | jakso_ml/training_data/rotator.py | JaksoSoftware/jakso-ml | 5720ea557ca2fcf9ae16e329c198acd8e31258c4 | [
"MIT"
] | 3 | 2020-09-25T18:40:52.000Z | 2021-08-25T14:44:30.000Z | jakso_ml/training_data/rotator.py | JaksoSoftware/jakso-ml | 5720ea557ca2fcf9ae16e329c198acd8e31258c4 | [
"MIT"
] | null | null | null | import random, copy
import cv2 as cv
from .augmenter import Augmenter
class Rotator(Augmenter):
'''
Augmenter that rotates the SampleImages randomly based on
the min_angle and max_angle parameters.
'''
def __init__(
self,
min_angle,
max_angle,
**kwargs
):
super().__init__(**kwargs)
self.min_angle = min_angle
self.max_angle = max_angle
def augment(self, sample):
im_h, im_w, _ = sample.image.shape
angle = random.uniform(self.min_angle, self.max_angle)
rotation_matrix = cv.getRotationMatrix2D(sample.roi_center, angle, 1)
rotated = cv.warpAffine(sample.image, rotation_matrix, (im_w, im_h))
sample_copy = copy.copy(sample)
sample_copy.image = rotated
return sample_copy
| 23.40625 | 73 | 0.706275 | import random, copy
import cv2 as cv
from .augmenter import Augmenter
class Rotator(Augmenter):
def __init__(
self,
min_angle,
max_angle,
**kwargs
):
super().__init__(**kwargs)
self.min_angle = min_angle
self.max_angle = max_angle
def augment(self, sample):
im_h, im_w, _ = sample.image.shape
angle = random.uniform(self.min_angle, self.max_angle)
rotation_matrix = cv.getRotationMatrix2D(sample.roi_center, angle, 1)
rotated = cv.warpAffine(sample.image, rotation_matrix, (im_w, im_h))
sample_copy = copy.copy(sample)
sample_copy.image = rotated
return sample_copy
| true | true |
f71dd53fcf4343cdc5ff6b6b84e0462547c02cfd | 3,842 | py | Python | openGaussBase/testcase/TOOLS/SERVER_TOOLS/gs_dump/Opengauss_Function_Tools_gs_dump_Case0043.py | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/TOOLS/SERVER_TOOLS/gs_dump/Opengauss_Function_Tools_gs_dump_Case0043.py | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/TOOLS/SERVER_TOOLS/gs_dump/Opengauss_Function_Tools_gs_dump_Case0043.py | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | """
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : 服务端工具
Case Name : 导出一个压缩比级别不在范围内自定义格式的文件
Description :
1.连接数据库:
2.创建数据库
3.切换到数据库test
4.创建表并插入数据
5.退出数据库
6.source环境变量
7.导出一个压缩比级别不在范围内自定义格式的文件
8.连接数据库,清理环境
Expect :
1.数据库连接成功
2.创建数据库test成功
3.切换到数据库test
4.创建表并插入数据成功
5.退出数据库
6.source环境变量
7.导出失败
8.清理环境成功
History :
"""
import unittest
from yat.test import Node
from yat.test import macro
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger
LOG = Logger()
class Tools(unittest.TestCase):
def setUp(self):
LOG.info('-----Opengauss_Function_Tools_gs_dump_Case0043start-----')
self.dbuser_node = Node('dbuser')
self.constant = Constant()
def test_server_tools(self):
LOG.info('------------------连接数据库并创建数据库-----------------')
sql_cmd1 = ''' drop database if exists test;
create database test;
'''
excute_cmd1 = f''' source {macro.DB_ENV_PATH} ;
gsql -d {self.dbuser_node.db_name}\
-p {self.dbuser_node.db_port} -c "{sql_cmd1}"
'''
LOG.info(excute_cmd1)
msg1 = self.dbuser_node.sh(excute_cmd1).result()
LOG.info(msg1)
self.assertIn(self.constant.CREATE_DATABASE_SUCCESS, msg1)
LOG.info('--------在创建好的数据库中创建表并插入数据--------')
sql_cmd2 = '''
drop table if exists t1;
drop table if exists t2;
drop table if exists t3;
create table t1 (id int);
insert into t1 values(1),(2),(3);
create table t2 (id int);
insert into t2 values(8),(2),(5);
create table t3 (id int);
insert into t3 values(9),(6),(3);
'''
excute_cmd2 = f'''source {macro.DB_ENV_PATH} ;
gsql -d test -p {self.dbuser_node.db_port} -c "{sql_cmd2}"
'''
LOG.info(excute_cmd2)
msg2 = self.dbuser_node.sh(excute_cmd2).result()
LOG.info(msg2)
self.assertIn(self.constant.INSERT_SUCCESS_MSG, msg2)
LOG.info('-------导出一个压缩比级别不在范围内自定义格式的文件------')
excute_cmd3 = f'''source {macro.DB_ENV_PATH} ;
gs_dump -p {self.dbuser_node.db_port} test -F c\
-f {macro.DB_INSTANCE_PATH}/dump_qm -Z 10;
'''
LOG.info(excute_cmd3)
msg3 = self.dbuser_node.sh(excute_cmd3).result()
LOG.info(msg3)
self.assertIn('gs_dump: options -Z/--compress should be set between \
0 and 9', msg3)
def tearDown(self):
LOG.info('-----------------清理环境:删除数据库-----------------')
sql_cmd5 = ''' drop database if exists test; '''
excute_cmd5 = f'''source {macro.DB_ENV_PATH} ;
gsql -d {self.dbuser_node.db_name}\
-p {self.dbuser_node.db_port} -c "{sql_cmd5}";
rm -rf {macro.DB_INSTANCE_PATH}/dump_qm;
'''
LOG.info(excute_cmd5)
msg5 = self.dbuser_node.sh(excute_cmd5).result()
LOG.info(msg5)
LOG.info('----Opengauss_Function_Tools_gs_dump_Case0043finish----')
| 35.247706 | 84 | 0.560906 |
import unittest
from yat.test import Node
from yat.test import macro
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger
LOG = Logger()
class Tools(unittest.TestCase):
def setUp(self):
LOG.info('-----Opengauss_Function_Tools_gs_dump_Case0043start-----')
self.dbuser_node = Node('dbuser')
self.constant = Constant()
def test_server_tools(self):
LOG.info('------------------连接数据库并创建数据库-----------------')
sql_cmd1 = ''' drop database if exists test;
create database test;
'''
excute_cmd1 = f''' source {macro.DB_ENV_PATH} ;
gsql -d {self.dbuser_node.db_name}\
-p {self.dbuser_node.db_port} -c "{sql_cmd1}"
'''
LOG.info(excute_cmd1)
msg1 = self.dbuser_node.sh(excute_cmd1).result()
LOG.info(msg1)
self.assertIn(self.constant.CREATE_DATABASE_SUCCESS, msg1)
LOG.info('--------在创建好的数据库中创建表并插入数据--------')
sql_cmd2 = '''
drop table if exists t1;
drop table if exists t2;
drop table if exists t3;
create table t1 (id int);
insert into t1 values(1),(2),(3);
create table t2 (id int);
insert into t2 values(8),(2),(5);
create table t3 (id int);
insert into t3 values(9),(6),(3);
'''
excute_cmd2 = f'''source {macro.DB_ENV_PATH} ;
gsql -d test -p {self.dbuser_node.db_port} -c "{sql_cmd2}"
'''
LOG.info(excute_cmd2)
msg2 = self.dbuser_node.sh(excute_cmd2).result()
LOG.info(msg2)
self.assertIn(self.constant.INSERT_SUCCESS_MSG, msg2)
LOG.info('-------导出一个压缩比级别不在范围内自定义格式的文件------')
excute_cmd3 = f'''source {macro.DB_ENV_PATH} ;
gs_dump -p {self.dbuser_node.db_port} test -F c\
-f {macro.DB_INSTANCE_PATH}/dump_qm -Z 10;
'''
LOG.info(excute_cmd3)
msg3 = self.dbuser_node.sh(excute_cmd3).result()
LOG.info(msg3)
self.assertIn('gs_dump: options -Z/--compress should be set between \
0 and 9', msg3)
def tearDown(self):
LOG.info('-----------------清理环境:删除数据库-----------------')
sql_cmd5 = ''' drop database if exists test; '''
excute_cmd5 = f'''source {macro.DB_ENV_PATH} ;
gsql -d {self.dbuser_node.db_name}\
-p {self.dbuser_node.db_port} -c "{sql_cmd5}";
rm -rf {macro.DB_INSTANCE_PATH}/dump_qm;
'''
LOG.info(excute_cmd5)
msg5 = self.dbuser_node.sh(excute_cmd5).result()
LOG.info(msg5)
LOG.info('----Opengauss_Function_Tools_gs_dump_Case0043finish----')
| true | true |
f71dd542810c26c05012c9e34bb38d5dd0fcedfb | 1,639 | py | Python | pytopojson/bbox.py | fferrin/pytopojson | 5128136c9502f4e29330b6cc7e524641bff5f95e | [
"0BSD"
] | 11 | 2019-11-15T23:22:52.000Z | 2022-01-22T20:46:30.000Z | pytopojson/bbox.py | fferrin/topojson | 7f90e497d2b54798f51480181c81c330770cb401 | [
"0BSD"
] | 8 | 2019-11-08T03:03:29.000Z | 2022-02-28T09:52:09.000Z | pytopojson/bbox.py | fferrin/topojson | 7f90e497d2b54798f51480181c81c330770cb401 | [
"0BSD"
] | 2 | 2020-07-09T06:45:31.000Z | 2021-03-22T13:38:35.000Z | import math
from pytopojson import transform
class BBox(object):
def __init__(self):
self.transform = transform.Transform()
self.x_0 = math.inf
self.y_0 = self.x_0
self.x_1 = -self.x_0
self.y_1 = -self.x_0
self.t = None
def __call__(self, topology, *args, **kwargs):
self.t = self.transform(topology.get("transform", None))
for arc in topology["arcs"]:
i = 0
n = len(arc)
while i < n:
p = self.t(arc[i], i)
if p[0] < self.x_0:
self.x_0 = p[0]
if self.x_1 < p[0]:
self.x_1 = p[0]
if p[1] < self.y_0:
self.y_0 = p[1]
if self.y_1 < p[1]:
self.y_1 = p[1]
i += 1
for k in topology["objects"]:
self.bbox_geometry(topology["objects"][k])
return [self.x_0, self.y_0, self.x_1, self.y_1]
def bbox_point(self, p):
p = self.t(p)
if p[0] < self.x_0:
self.x_0 = p[0]
if self.x_1 < p[0]:
self.x_1 = p[0]
if p[1] < self.y_0:
self.y_0 = p[1]
if self.y_1 < p[1]:
self.y_1 = p[1]
def bbox_geometry(self, o):
if o["type"] == "GeometryCollection":
for geom in o["geometries"]:
self.bbox_geometry(geom)
elif o["type"] == "Point":
self.bbox_point(o["coordinates"])
elif o["type"] == "MultiPoint":
for coord in o["coordinates"]:
self.bbox_point(coord)
| 27.779661 | 64 | 0.456986 | import math
from pytopojson import transform
class BBox(object):
def __init__(self):
self.transform = transform.Transform()
self.x_0 = math.inf
self.y_0 = self.x_0
self.x_1 = -self.x_0
self.y_1 = -self.x_0
self.t = None
def __call__(self, topology, *args, **kwargs):
self.t = self.transform(topology.get("transform", None))
for arc in topology["arcs"]:
i = 0
n = len(arc)
while i < n:
p = self.t(arc[i], i)
if p[0] < self.x_0:
self.x_0 = p[0]
if self.x_1 < p[0]:
self.x_1 = p[0]
if p[1] < self.y_0:
self.y_0 = p[1]
if self.y_1 < p[1]:
self.y_1 = p[1]
i += 1
for k in topology["objects"]:
self.bbox_geometry(topology["objects"][k])
return [self.x_0, self.y_0, self.x_1, self.y_1]
def bbox_point(self, p):
p = self.t(p)
if p[0] < self.x_0:
self.x_0 = p[0]
if self.x_1 < p[0]:
self.x_1 = p[0]
if p[1] < self.y_0:
self.y_0 = p[1]
if self.y_1 < p[1]:
self.y_1 = p[1]
def bbox_geometry(self, o):
if o["type"] == "GeometryCollection":
for geom in o["geometries"]:
self.bbox_geometry(geom)
elif o["type"] == "Point":
self.bbox_point(o["coordinates"])
elif o["type"] == "MultiPoint":
for coord in o["coordinates"]:
self.bbox_point(coord)
| true | true |
f71dd7ce45e25814001cfdf02ef0e387adca4efa | 3,542 | py | Python | Tools/fastlane-templates.py | fredyshox/AppVideoFramer | 0e43f2828d2e3737451a0cf1ec81e6840796ac30 | [
"MIT"
] | 12 | 2020-08-18T16:47:35.000Z | 2021-07-26T20:05:30.000Z | Tools/fastlane-templates.py | fredyshox/ScreenFramer | 0e43f2828d2e3737451a0cf1ec81e6840796ac30 | [
"MIT"
] | 5 | 2020-08-18T13:50:39.000Z | 2020-08-31T12:41:34.000Z | Tools/fastlane-templates.py | fredyshox/AppVideoFramer | 0e43f2828d2e3737451a0cf1ec81e6840796ac30 | [
"MIT"
] | 1 | 2021-05-30T23:28:04.000Z | 2021-05-30T23:28:04.000Z | #!/usr/bin/env python3
#
# Retrieve templates from fastlane/frameit
#
import sys
import os
from os import path
from shutil import copyfile
from tempfile import gettempdir
import re
import json
import cv2
import numpy as np
from common import sanitize_color, sanitize_device_name, sanitize_device_key, apply_default_color
# URL to frameit-frames repository
FRAMEIT_URL = "https://github.com/fastlane/frameit-frames/archive/gh-pages.zip"
def main():
if len(sys.argv) < 3:
print(f"Usage: {sys.argv[0]} resource_dir contents_file")
exit(1)
resource_dir = sys.argv[1]
contents_path = sys.argv[2]
zip_path = path.join(resource_dir, "gh-pages.zip")
repo_dir = path.join(resource_dir, "frameit-frames-gh-pages")
print("Downloading frameit frames...")
status_code = os.system(f"wget -q --show-progress -O \"{zip_path}\" \"{FRAMEIT_URL}\" && unzip -d \"{resource_dir}\" \"{zip_path}\"")
print(f"Status code: {status_code}")
# path to latest frames
frameit_dir = path.join(repo_dir, "latest")
with open(contents_path, "r") as cf:
contents = json.load(cf)
for frame_path in os.listdir(frameit_dir):
frame_path = path.join(frameit_dir, frame_path)
filename = path.basename(frame_path)
if not path.isfile(frame_path) or not filename_valid(filename):
continue
device_name = sanitize_device_name(filename)
device_key = sanitize_device_key(device_name)
device_color = sanitize_color(filename)
print(f"Found template: {frame_path}")
print(f"Template {device_name} - {device_color}")
image = cv2.imread(frame_path, cv2.IMREAD_UNCHANGED) # read preserving alpha
frame_height, frame_width = image.shape[:2]
ox, oy, width, height = measure_screen_bounds(image)
print(f"==> +{ox}+{oy}, {width}x{height}")
if device_key in contents:
device_info = contents[device_key]
else:
device_info = {
"images": {},
"left": ox,
"top": oy,
"right": ox + width,
"bottom": oy + height,
"res_height": frame_height,
"res_width": frame_width
}
device_info["images"][device_color] = filename
contents[device_key] = device_info
copyfile(frame_path, path.join(resource_dir, filename))
# default colors - first model color which is available in DEFAULT_COLOR array
for key in contents.keys():
apply_default_color(contents, key)
with open(contents_path, "w") as cf:
json.dump(contents, cf, sort_keys=True, indent=4)
print("Cleaning up...")
os.system(f"rm {zip_path} && rm -r {repo_dir}")
def measure_screen_bounds(image):
alpha = image[:, :, 3]
alpha = cv2.threshold(alpha, 252, 255, cv2.THRESH_BINARY_INV)[1] # 99% threshold
# connected component analysis
n, labels, stats, centroids = cv2.connectedComponentsWithStats(alpha, connectivity=8)
# compare centroids to image center
img_center = np.array([alpha.shape[0] // 2, alpha.shape[1] // 2])
# component which contains image center should be screen
screen_label = labels[img_center[0], img_center[1]]
x, y, width, height = stats[screen_label][:4]
return int(x), int(y), int(width), int(height)
def filename_valid(filename):
pattern = "^Apple iP.*\.png$"
return re.search(pattern, filename) is not None
if __name__ == "__main__":
main()
| 35.069307 | 137 | 0.647374 |
import sys
import os
from os import path
from shutil import copyfile
from tempfile import gettempdir
import re
import json
import cv2
import numpy as np
from common import sanitize_color, sanitize_device_name, sanitize_device_key, apply_default_color
FRAMEIT_URL = "https://github.com/fastlane/frameit-frames/archive/gh-pages.zip"
def main():
if len(sys.argv) < 3:
print(f"Usage: {sys.argv[0]} resource_dir contents_file")
exit(1)
resource_dir = sys.argv[1]
contents_path = sys.argv[2]
zip_path = path.join(resource_dir, "gh-pages.zip")
repo_dir = path.join(resource_dir, "frameit-frames-gh-pages")
print("Downloading frameit frames...")
status_code = os.system(f"wget -q --show-progress -O \"{zip_path}\" \"{FRAMEIT_URL}\" && unzip -d \"{resource_dir}\" \"{zip_path}\"")
print(f"Status code: {status_code}")
frameit_dir = path.join(repo_dir, "latest")
with open(contents_path, "r") as cf:
contents = json.load(cf)
for frame_path in os.listdir(frameit_dir):
frame_path = path.join(frameit_dir, frame_path)
filename = path.basename(frame_path)
if not path.isfile(frame_path) or not filename_valid(filename):
continue
device_name = sanitize_device_name(filename)
device_key = sanitize_device_key(device_name)
device_color = sanitize_color(filename)
print(f"Found template: {frame_path}")
print(f"Template {device_name} - {device_color}")
image = cv2.imread(frame_path, cv2.IMREAD_UNCHANGED)
frame_height, frame_width = image.shape[:2]
ox, oy, width, height = measure_screen_bounds(image)
print(f"==> +{ox}+{oy}, {width}x{height}")
if device_key in contents:
device_info = contents[device_key]
else:
device_info = {
"images": {},
"left": ox,
"top": oy,
"right": ox + width,
"bottom": oy + height,
"res_height": frame_height,
"res_width": frame_width
}
device_info["images"][device_color] = filename
contents[device_key] = device_info
copyfile(frame_path, path.join(resource_dir, filename))
for key in contents.keys():
apply_default_color(contents, key)
with open(contents_path, "w") as cf:
json.dump(contents, cf, sort_keys=True, indent=4)
print("Cleaning up...")
os.system(f"rm {zip_path} && rm -r {repo_dir}")
def measure_screen_bounds(image):
alpha = image[:, :, 3]
alpha = cv2.threshold(alpha, 252, 255, cv2.THRESH_BINARY_INV)[1]
n, labels, stats, centroids = cv2.connectedComponentsWithStats(alpha, connectivity=8)
img_center = np.array([alpha.shape[0] // 2, alpha.shape[1] // 2])
screen_label = labels[img_center[0], img_center[1]]
x, y, width, height = stats[screen_label][:4]
return int(x), int(y), int(width), int(height)
def filename_valid(filename):
pattern = "^Apple iP.*\.png$"
return re.search(pattern, filename) is not None
if __name__ == "__main__":
main()
| true | true |
f71dd880479fc3a2e03b6a863aeaab5e3797cfb7 | 3,752 | py | Python | function/python/brightics/function/transform/sql/functions.py | data-weirdo/studio | 48852c4f097f773ce3d408b59f79fda2e2d60470 | [
"Apache-2.0"
] | 1 | 2020-02-08T10:56:29.000Z | 2020-02-08T10:56:29.000Z | function/python/brightics/function/transform/sql/functions.py | data-weirdo/studio | 48852c4f097f773ce3d408b59f79fda2e2d60470 | [
"Apache-2.0"
] | null | null | null | function/python/brightics/function/transform/sql/functions.py | data-weirdo/studio | 48852c4f097f773ce3d408b59f79fda2e2d60470 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2019 Samsung SDS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# -*- coding: utf-8 -*-
import dateutil.parser
import numpy as np
from .serializer import _serialize
from .serializer import _deserialize
import re
"""
constants
"""
def e():
return np.math.e
def pi():
return np.math.pi
"""
lambda functions
"""
log = lambda _: np.math.log(_) if _ is not None else np.math.nan # ?
ln = lambda _: np.math.log(_)
log10 = lambda _: np.math.log10(_)
log2 = lambda _: np.math.log2(_)
exp = lambda _: np.math.exp(_)
exp2 = lambda _: np.math.pow(2, _)
sqrt = lambda _: np.math.sqrt(_)
ceil = lambda _: np.math.ceil(_)
floor = lambda _: np.math.floor(_)
sign = lambda _: int(np.sign(_))
factorial = lambda _: np.math.factorial(_)
pow = lambda a, b: np.math.pow(a, b)
ljust = lambda item, length, lpad_str: str(item).ljust(length, lpad_str) # ?
rjust = lambda item, length, rpad_str: str(item).rjust(length, rpad_str) # ?
is_null = lambda _: 1 if _ is None else 0
"""
regular expression related functions
"""
regexp = lambda exp, str_: False if re.search(exp, str_) is None else True
regexp_replace = lambda initial_str, pattern, replacement: re.sub(pattern, replacement, initial_str)
def regexp_extract(subject, pattern, *index): # todo index??
def _is_empty(tup):
return not tup
if _is_empty(index):
return re.search(pattern, subject).group(1)
else:
return re.search(pattern, subject).group(index[0])
"""
datetime related functions
"""
# todo weekofmonth, datediff, timediff
def datediff(end_isotime, start_isotime):
end_datetime = dateutil.parser.parse(end_isotime)
start_datetime = dateutil.parser.parse(start_isotime)
diff_datetime = end_datetime - start_datetime
return diff_datetime.days
def strftime_a(isotime): # ?
return dateutil.parser.parse(isotime).strftime('%a')
def strftime_aa(isotime): # ?
return dateutil.parser.parse(isotime).strftime('%A')
def strftime_aak(isotime): # ?
w_dict = {'Monday':'월요일',
'Tuesday':'화요일',
'Wednesday':'수요일',
'Thursday':'목요일',
'Friday':'금요일',
'Saturday':'토요일',
'Sunday':'일요일',
}
return w_dict[dateutil.parser.parse(isotime).strftime('%A')]
def strftime_ak(isotime): # ?
w_dict = {'Monday':'월',
'Tuesday':'화',
'Wednesday':'수',
'Thursday':'목',
'Friday':'금',
'Saturday':'토',
'Sunday':'일',
}
return w_dict[dateutil.parser.parse(isotime).strftime('%A')]
"""
array related functions
"""
def array(*args):
return _serialize(np.array(list(args)))
def get_array_element(serialized_list, index):
return _deserialize(serialized_list)[index]
def concat_ws(sep, serialized_list):
arr = _deserialize(serialized_list)
return sep.join([str(item) for item in arr])
def split(str_, *sep):
nargs = len(sep)
if nargs == 0:
return _serialize(str_.split())
else: # todo elif nargs == 1:
return _serialize(str_.split(sep[0]))
def size(serialized_list):
arr = _deserialize(serialized_list)
return len(arr)
| 24.847682 | 100 | 0.647122 |
import dateutil.parser
import numpy as np
from .serializer import _serialize
from .serializer import _deserialize
import re
def e():
return np.math.e
def pi():
return np.math.pi
log = lambda _: np.math.log(_) if _ is not None else np.math.nan
ln = lambda _: np.math.log(_)
log10 = lambda _: np.math.log10(_)
log2 = lambda _: np.math.log2(_)
exp = lambda _: np.math.exp(_)
exp2 = lambda _: np.math.pow(2, _)
sqrt = lambda _: np.math.sqrt(_)
ceil = lambda _: np.math.ceil(_)
floor = lambda _: np.math.floor(_)
sign = lambda _: int(np.sign(_))
factorial = lambda _: np.math.factorial(_)
pow = lambda a, b: np.math.pow(a, b)
ljust = lambda item, length, lpad_str: str(item).ljust(length, lpad_str)
rjust = lambda item, length, rpad_str: str(item).rjust(length, rpad_str)
is_null = lambda _: 1 if _ is None else 0
regexp = lambda exp, str_: False if re.search(exp, str_) is None else True
regexp_replace = lambda initial_str, pattern, replacement: re.sub(pattern, replacement, initial_str)
def regexp_extract(subject, pattern, *index):
def _is_empty(tup):
return not tup
if _is_empty(index):
return re.search(pattern, subject).group(1)
else:
return re.search(pattern, subject).group(index[0])
def datediff(end_isotime, start_isotime):
end_datetime = dateutil.parser.parse(end_isotime)
start_datetime = dateutil.parser.parse(start_isotime)
diff_datetime = end_datetime - start_datetime
return diff_datetime.days
def strftime_a(isotime):
return dateutil.parser.parse(isotime).strftime('%a')
def strftime_aa(isotime):
return dateutil.parser.parse(isotime).strftime('%A')
def strftime_aak(isotime):
w_dict = {'Monday':'월요일',
'Tuesday':'화요일',
'Wednesday':'수요일',
'Thursday':'목요일',
'Friday':'금요일',
'Saturday':'토요일',
'Sunday':'일요일',
}
return w_dict[dateutil.parser.parse(isotime).strftime('%A')]
def strftime_ak(isotime):
w_dict = {'Monday':'월',
'Tuesday':'화',
'Wednesday':'수',
'Thursday':'목',
'Friday':'금',
'Saturday':'토',
'Sunday':'일',
}
return w_dict[dateutil.parser.parse(isotime).strftime('%A')]
def array(*args):
return _serialize(np.array(list(args)))
def get_array_element(serialized_list, index):
return _deserialize(serialized_list)[index]
def concat_ws(sep, serialized_list):
arr = _deserialize(serialized_list)
return sep.join([str(item) for item in arr])
def split(str_, *sep):
nargs = len(sep)
if nargs == 0:
return _serialize(str_.split())
else:
return _serialize(str_.split(sep[0]))
def size(serialized_list):
arr = _deserialize(serialized_list)
return len(arr)
| true | true |
f71dd987760395589e47df40ff7fd75a85d357db | 1,193 | py | Python | google/ads/googleads/v4/enums/types/conversion_action_status.py | batardo/google-ads-python | a39748521847e85138fca593f3be2681352ad024 | [
"Apache-2.0"
] | null | null | null | google/ads/googleads/v4/enums/types/conversion_action_status.py | batardo/google-ads-python | a39748521847e85138fca593f3be2681352ad024 | [
"Apache-2.0"
] | null | null | null | google/ads/googleads/v4/enums/types/conversion_action_status.py | batardo/google-ads-python | a39748521847e85138fca593f3be2681352ad024 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v4.enums",
marshal="google.ads.googleads.v4",
manifest={"ConversionActionStatusEnum",},
)
class ConversionActionStatusEnum(proto.Message):
r"""Container for enum describing possible statuses of a
conversion action.
"""
class ConversionActionStatus(proto.Enum):
r"""Possible statuses of a conversion action."""
UNSPECIFIED = 0
UNKNOWN = 1
ENABLED = 2
REMOVED = 3
HIDDEN = 4
__all__ = tuple(sorted(__protobuf__.manifest))
| 27.744186 | 74 | 0.706622 |
import proto
__protobuf__ = proto.module(
package="google.ads.googleads.v4.enums",
marshal="google.ads.googleads.v4",
manifest={"ConversionActionStatusEnum",},
)
class ConversionActionStatusEnum(proto.Message):
class ConversionActionStatus(proto.Enum):
UNSPECIFIED = 0
UNKNOWN = 1
ENABLED = 2
REMOVED = 3
HIDDEN = 4
__all__ = tuple(sorted(__protobuf__.manifest))
| true | true |
f71ddabf07aa34298f199ac2facae47343cbce6a | 345 | py | Python | build_pipeline/helper/deploy_package/prod.py | jakob-bagterp/timer-for-python | a48b60c8782bbf6d368d6ca2be249054c3b66c21 | [
"MIT"
] | 2 | 2022-03-22T11:14:37.000Z | 2022-03-24T14:27:13.000Z | build_pipeline/helper/deploy_package/prod.py | jakob-bagterp/timer-for-python | a48b60c8782bbf6d368d6ca2be249054c3b66c21 | [
"MIT"
] | null | null | null | build_pipeline/helper/deploy_package/prod.py | jakob-bagterp/timer-for-python | a48b60c8782bbf6d368d6ca2be249054c3b66c21 | [
"MIT"
] | null | null | null | import subprocess
from config.directory import temp_builds
from .. import directory, output_release_file_checksum
def deploy_to_pypi() -> None:
directory.working.set_as_project_base_path()
subprocess.call(f"twine upload {temp_builds()}/*".split())
if __name__ == "__main__":
output_release_file_checksum()
deploy_to_pypi()
| 21.5625 | 62 | 0.75942 | import subprocess
from config.directory import temp_builds
from .. import directory, output_release_file_checksum
def deploy_to_pypi() -> None:
directory.working.set_as_project_base_path()
subprocess.call(f"twine upload {temp_builds()}/*".split())
if __name__ == "__main__":
output_release_file_checksum()
deploy_to_pypi()
| true | true |
f71ddae2c09cec47581d29a64a4e332b9bc0aebc | 2,584 | py | Python | tensorboard/context_test.py | karthikv2k/tensorboard | b39f7bbe6e85e543703e7901914ae51ab4cd51a6 | [
"Apache-2.0"
] | null | null | null | tensorboard/context_test.py | karthikv2k/tensorboard | b39f7bbe6e85e543703e7901914ae51ab4cd51a6 | [
"Apache-2.0"
] | null | null | null | tensorboard/context_test.py | karthikv2k/tensorboard | b39f7bbe6e85e543703e7901914ae51ab4cd51a6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorboard.context."""
import ipaddress
from tensorboard import auth as auth_lib
from tensorboard import context
from tensorboard import test as tb_test
REMOTE_IP = ipaddress.ip_address("192.168.0.1")
X_FORWARDED_FOR_IPS = (ipaddress.ip_address("2001:db8::"), REMOTE_IP)
class RequestContextTest(tb_test.TestCase):
def test_defaults(self):
ctx = context.RequestContext()
self.assertIsInstance(ctx.auth, auth_lib.AuthContext)
self.assertEqual(ctx.x_forwarded_for, ())
def test_args(self):
auth = auth_lib.AuthContext({}, {"REQUEST_METHOD": "GET"})
ctx = context.RequestContext(
auth=auth, remote_ip=REMOTE_IP, x_forwarded_for=X_FORWARDED_FOR_IPS
)
self.assertEqual(ctx.auth, auth)
self.assertEqual(ctx.remote_ip, REMOTE_IP)
self.assertEqual(ctx.x_forwarded_for, X_FORWARDED_FOR_IPS)
def test_environ(self):
environ = {"one": "two", "three": "four"}
auth = auth_lib.AuthContext({}, environ)
req_context = context.from_environ(environ)
self.assertNotEqual(req_context.auth, auth)
self.assertNotEqual(req_context.remote_ip, REMOTE_IP)
self.assertNotEqual(req_context.x_forwarded_for, X_FORWARDED_FOR_IPS)
context.set_in_environ(
environ,
context.from_environ(environ).replace(
auth=auth,
remote_ip=REMOTE_IP,
x_forwarded_for=X_FORWARDED_FOR_IPS,
),
)
self.assertEqual(environ["one"], "two")
self.assertEqual(environ["three"], "four")
req_context = context.from_environ(environ)
self.assertEqual(req_context.auth, auth)
self.assertEqual(req_context.remote_ip, REMOTE_IP)
self.assertEqual(req_context.x_forwarded_for, X_FORWARDED_FOR_IPS)
if __name__ == "__main__":
tb_test.main()
| 38 | 80 | 0.676471 |
import ipaddress
from tensorboard import auth as auth_lib
from tensorboard import context
from tensorboard import test as tb_test
REMOTE_IP = ipaddress.ip_address("192.168.0.1")
X_FORWARDED_FOR_IPS = (ipaddress.ip_address("2001:db8::"), REMOTE_IP)
class RequestContextTest(tb_test.TestCase):
def test_defaults(self):
ctx = context.RequestContext()
self.assertIsInstance(ctx.auth, auth_lib.AuthContext)
self.assertEqual(ctx.x_forwarded_for, ())
def test_args(self):
auth = auth_lib.AuthContext({}, {"REQUEST_METHOD": "GET"})
ctx = context.RequestContext(
auth=auth, remote_ip=REMOTE_IP, x_forwarded_for=X_FORWARDED_FOR_IPS
)
self.assertEqual(ctx.auth, auth)
self.assertEqual(ctx.remote_ip, REMOTE_IP)
self.assertEqual(ctx.x_forwarded_for, X_FORWARDED_FOR_IPS)
def test_environ(self):
environ = {"one": "two", "three": "four"}
auth = auth_lib.AuthContext({}, environ)
req_context = context.from_environ(environ)
self.assertNotEqual(req_context.auth, auth)
self.assertNotEqual(req_context.remote_ip, REMOTE_IP)
self.assertNotEqual(req_context.x_forwarded_for, X_FORWARDED_FOR_IPS)
context.set_in_environ(
environ,
context.from_environ(environ).replace(
auth=auth,
remote_ip=REMOTE_IP,
x_forwarded_for=X_FORWARDED_FOR_IPS,
),
)
self.assertEqual(environ["one"], "two")
self.assertEqual(environ["three"], "four")
req_context = context.from_environ(environ)
self.assertEqual(req_context.auth, auth)
self.assertEqual(req_context.remote_ip, REMOTE_IP)
self.assertEqual(req_context.x_forwarded_for, X_FORWARDED_FOR_IPS)
if __name__ == "__main__":
tb_test.main()
| true | true |
f71ddae2ee593ca6c94ea0a73bcdf57cb6e92fad | 10,955 | py | Python | docs/source/conf.py | florianeinfalt/nodegraph | fa117f069bd618d5aa98dfc62f3ce88acc5c77b2 | [
"Apache-2.0"
] | 1 | 2018-07-10T09:29:04.000Z | 2018-07-10T09:29:04.000Z | docs/source/conf.py | florianeinfalt/nodegraph | fa117f069bd618d5aa98dfc62f3ce88acc5c77b2 | [
"Apache-2.0"
] | null | null | null | docs/source/conf.py | florianeinfalt/nodegraph | fa117f069bd618d5aa98dfc62f3ce88acc5c77b2 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# nodegraph documentation build configuration file, created by
# sphinx-quickstart on Thu Feb 9 15:53:41 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import os
import sys
import sphinx_rtd_theme
import nodegraph
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.todo'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'nodegraph'
copyright = u"2018, Florian Einfalt"
author = u'Florian Einfalt'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = nodegraph.__version__
# The full version, including alpha/beta/rc tags.
release = nodegraph.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'alabaster'
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'nodegraphdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'nodegraph.tex', u'nodegraph Documentation',
u'Florian Einfalt', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'nodegraph',
u'nodegraph Documentation',
[u'Florian Einfalt'], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'nodegraph',
u'nodegraph Documentation',
u'Florian Einfalt',
'nodegraph',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# Todo config
todo_include_todos = True
# Autodoc config
autodoc_default_flags = ['members', 'private-members', 'special-members',
'undoc-members', 'show-inheritance']
def autodoc_skip_member(app, what, name, obj, skip, options):
exclusions = (
'__weakref__',
'__doc__',
'__module__',
'__dict__',
'__repr__',
'__str__',
'__getnewargs__',
'__getstate__',
'__new__',
'__slots__',
'_asdict',
'_fields',
'_make',
'_replace'
)
exclude = name in exclusions
return skip or exclude
def setup(app):
app.connect('autodoc-skip-member', autodoc_skip_member)
| 28.089744 | 80 | 0.695938 |
import os
import sys
import sphinx_rtd_theme
import nodegraph
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.todo'
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'nodegraph'
copyright = u"2018, Florian Einfalt"
author = u'Florian Einfalt'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = nodegraph.__version__
# The full version, including alpha/beta/rc tags.
release = nodegraph.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'alabaster'
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'nodegraphdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'nodegraph.tex', u'nodegraph Documentation',
u'Florian Einfalt', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'nodegraph',
u'nodegraph Documentation',
[u'Florian Einfalt'], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'nodegraph',
u'nodegraph Documentation',
u'Florian Einfalt',
'nodegraph',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
intersphinx_mapping = {'https://docs.python.org/': None}
todo_include_todos = True
autodoc_default_flags = ['members', 'private-members', 'special-members',
'undoc-members', 'show-inheritance']
def autodoc_skip_member(app, what, name, obj, skip, options):
exclusions = (
'__weakref__',
'__doc__',
'__module__',
'__dict__',
'__repr__',
'__str__',
'__getnewargs__',
'__getstate__',
'__new__',
'__slots__',
'_asdict',
'_fields',
'_make',
'_replace'
)
exclude = name in exclusions
return skip or exclude
def setup(app):
app.connect('autodoc-skip-member', autodoc_skip_member)
| true | true |
f71ddbd8e3d55f90309e11e110629a667ccd0405 | 2,563 | py | Python | src/compas_rhino/artists/frameartist.py | Sam-Bouten/compas | 011c7779ded9b69bb602568b470bb0443e336f62 | [
"MIT"
] | null | null | null | src/compas_rhino/artists/frameartist.py | Sam-Bouten/compas | 011c7779ded9b69bb602568b470bb0443e336f62 | [
"MIT"
] | null | null | null | src/compas_rhino/artists/frameartist.py | Sam-Bouten/compas | 011c7779ded9b69bb602568b470bb0443e336f62 | [
"MIT"
] | null | null | null | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import compas_rhino
from compas.artists import PrimitiveArtist
from .artist import RhinoArtist
class FrameArtist(RhinoArtist, PrimitiveArtist):
"""Artist for drawing frames.
Parameters
----------
frame: :class:`compas.geometry.Frame`
A COMPAS frame.
scale: float, optional
Scale factor that controls the length of the axes.
layer : str, optional
The layer that should contain the drawing.
**kwargs : dict, optional
Additional keyword arguments.
For more info, see :class:`RhinoArtist` and :class:`PrimitiveArtist`.
Attributes
----------
scale : float
Scale factor that controls the length of the axes.
Default is ``1.0``.
color_origin : tuple of 3 int between 0 and 255
Default is ``(0, 0, 0)``.
color_xaxis : tuple of 3 int between 0 and 255
Default is ``(255, 0, 0)``.
color_yaxis : tuple of 3 int between 0 and 255
Default is ``(0, 255, 0)``.
color_zaxis : tuple of 3 int between 0 and 255
Default is ``(0, 0, 255)``.
"""
def __init__(self, frame, layer=None, scale=1.0, **kwargs):
super(FrameArtist, self).__init__(primitive=frame, layer=layer, **kwargs)
self.scale = scale or 1.0
self.color_origin = (0, 0, 0)
self.color_xaxis = (255, 0, 0)
self.color_yaxis = (0, 255, 0)
self.color_zaxis = (0, 0, 255)
def draw(self):
"""Draw the frame.
Returns
-------
list[System.Guid]
The GUIDs of the created Rhino objects.
"""
points = []
lines = []
origin = list(self.primitive.point)
X = list(self.primitive.point + self.primitive.xaxis.scaled(self.scale))
Y = list(self.primitive.point + self.primitive.yaxis.scaled(self.scale))
Z = list(self.primitive.point + self.primitive.zaxis.scaled(self.scale))
points = [{'pos': origin, 'color': self.color_origin}]
lines = [
{'start': origin, 'end': X, 'color': self.color_xaxis, 'arrow': 'end'},
{'start': origin, 'end': Y, 'color': self.color_yaxis, 'arrow': 'end'},
{'start': origin, 'end': Z, 'color': self.color_zaxis, 'arrow': 'end'}]
guids = compas_rhino.draw_points(points, layer=self.layer, clear=False, redraw=False)
guids += compas_rhino.draw_lines(lines, layer=self.layer, clear=False, redraw=False)
return guids
| 35.597222 | 93 | 0.612563 | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import compas_rhino
from compas.artists import PrimitiveArtist
from .artist import RhinoArtist
class FrameArtist(RhinoArtist, PrimitiveArtist):
def __init__(self, frame, layer=None, scale=1.0, **kwargs):
super(FrameArtist, self).__init__(primitive=frame, layer=layer, **kwargs)
self.scale = scale or 1.0
self.color_origin = (0, 0, 0)
self.color_xaxis = (255, 0, 0)
self.color_yaxis = (0, 255, 0)
self.color_zaxis = (0, 0, 255)
def draw(self):
points = []
lines = []
origin = list(self.primitive.point)
X = list(self.primitive.point + self.primitive.xaxis.scaled(self.scale))
Y = list(self.primitive.point + self.primitive.yaxis.scaled(self.scale))
Z = list(self.primitive.point + self.primitive.zaxis.scaled(self.scale))
points = [{'pos': origin, 'color': self.color_origin}]
lines = [
{'start': origin, 'end': X, 'color': self.color_xaxis, 'arrow': 'end'},
{'start': origin, 'end': Y, 'color': self.color_yaxis, 'arrow': 'end'},
{'start': origin, 'end': Z, 'color': self.color_zaxis, 'arrow': 'end'}]
guids = compas_rhino.draw_points(points, layer=self.layer, clear=False, redraw=False)
guids += compas_rhino.draw_lines(lines, layer=self.layer, clear=False, redraw=False)
return guids
| true | true |
f71ddc0e02a6d23f3d920561d4cf0c1eb6183b58 | 512 | py | Python | satchmo/apps/shipping/fields.py | jtslade/satchmo-svn | a9d791342ac6c5712de55c26ea4780057e27d452 | [
"BSD-3-Clause"
] | 1 | 2016-05-09T08:15:33.000Z | 2016-05-09T08:15:33.000Z | satchmo/apps/shipping/fields.py | jtslade/satchmo-svn | a9d791342ac6c5712de55c26ea4780057e27d452 | [
"BSD-3-Clause"
] | null | null | null | satchmo/apps/shipping/fields.py | jtslade/satchmo-svn | a9d791342ac6c5712de55c26ea4780057e27d452 | [
"BSD-3-Clause"
] | null | null | null | from django.db import models
from livesettings import config_value_safe
def shipping_choices():
try:
return config_choice_values('SHIPPING','MODULES')
except SettingNotSet:
return ()
class ShippingChoiceCharField(models.CharField):
def __init__(self, choices="__DYNAMIC__", *args, **kwargs):
if choices == "__DYNAMIC__":
kwargs['choices'] = shipping_choices()
super(ShippingChoiceCharField, self).__init__(*args, **kwargs)
| 28.444444 | 70 | 0.65625 | from django.db import models
from livesettings import config_value_safe
def shipping_choices():
try:
return config_choice_values('SHIPPING','MODULES')
except SettingNotSet:
return ()
class ShippingChoiceCharField(models.CharField):
def __init__(self, choices="__DYNAMIC__", *args, **kwargs):
if choices == "__DYNAMIC__":
kwargs['choices'] = shipping_choices()
super(ShippingChoiceCharField, self).__init__(*args, **kwargs)
| true | true |
f71ddc2bacfa536b1875d263aee5e37ce9195687 | 513 | py | Python | setup.py | IamGianluca/algorithms_collection | 59fd2052ecdcb687a61bcdf71d571624adc7b6a2 | [
"MIT"
] | 1 | 2019-09-11T03:22:55.000Z | 2019-09-11T03:22:55.000Z | setup.py | IamGianluca/algorithms | 59fd2052ecdcb687a61bcdf71d571624adc7b6a2 | [
"MIT"
] | null | null | null | setup.py | IamGianluca/algorithms | 59fd2052ecdcb687a61bcdf71d571624adc7b6a2 | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='ml',
version='0.0.1',
description='Collection of Computer Science and Machine Learning algorithms implemented in Python',
long_description=long_description,
packages=find_packages(exclude=['tests']),
)
| 30.176471 | 103 | 0.74269 | from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='ml',
version='0.0.1',
description='Collection of Computer Science and Machine Learning algorithms implemented in Python',
long_description=long_description,
packages=find_packages(exclude=['tests']),
)
| true | true |
f71ddc7950645157e3e41984310acbbb20ea68f5 | 211 | py | Python | 3_1_default_param.py | TechOpsX/python-function | 801009a34e1e82d683d81dcab8c824d6b476ac78 | [
"Apache-2.0"
] | null | null | null | 3_1_default_param.py | TechOpsX/python-function | 801009a34e1e82d683d81dcab8c824d6b476ac78 | [
"Apache-2.0"
] | null | null | null | 3_1_default_param.py | TechOpsX/python-function | 801009a34e1e82d683d81dcab8c824d6b476ac78 | [
"Apache-2.0"
] | null | null | null | def default_param_func(a, b=1):
"""
默认参数必须在参数后面,如default_param_func(a=1, b)是错误的
"""
return a + b
if __name__ == '__main__':
print(default_param_func(1))
print(default_param_func(1, 2))
| 19.181818 | 47 | 0.64455 | def default_param_func(a, b=1):
return a + b
if __name__ == '__main__':
print(default_param_func(1))
print(default_param_func(1, 2))
| true | true |
f71de02ccb3a1c60d7dca33608c48ee41bdab885 | 6,802 | py | Python | UnitTest/lib/googletest/test/googletest-list-tests-unittest.py | SFCMM/LBM | 99bf39e177cb0af94d4073ee9f9aef2e52ba7851 | [
"BSD-3-Clause"
] | 8 | 2020-09-29T06:12:44.000Z | 2021-11-15T08:02:14.000Z | UnitTest/lib/googletest/test/googletest-list-tests-unittest.py | SFCMM/LBM | 99bf39e177cb0af94d4073ee9f9aef2e52ba7851 | [
"BSD-3-Clause"
] | 2 | 2020-10-14T21:49:46.000Z | 2020-10-21T17:12:37.000Z | UnitTest/lib/googletest/test/googletest-list-tests-unittest.py | SFCMM/LBM | 99bf39e177cb0af94d4073ee9f9aef2e52ba7851 | [
"BSD-3-Clause"
] | 2 | 2020-10-14T20:19:11.000Z | 2021-11-15T08:02:14.000Z | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test's --gtest_list_tests flag.
A user can ask Google Test to list all tests by specifying the
--gtest_list_tests flag. This script tests such functionality
by invoking googletest-list-tests-unittest_ (a program written with
Google Test) the command line flags.
"""
import re
import gtest_test_utils
# Constants.
# The command line flag for enabling/disabling listing all tests.
LIST_TESTS_FLAG = 'gtest_list_tests'
# Path to the googletest-list-tests-unittest_ program.
EXE_PATH = gtest_test_utils.GetTestExecutablePath('googletest-list-tests-unittest_')
# The expected output when running googletest-list-tests-unittest_ with
# --gtest_list_tests
EXPECTED_OUTPUT_NO_FILTER_RE = re.compile(r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
Abc\.
Xyz
Def
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
TypedTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
TypedTest/1\. # TypeParam = int\s*\*( __ptr64)?
TestA
TestB
TypedTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
My/TypeParamTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
My/TypeParamTest/1\. # TypeParam = int\s*\*( __ptr64)?
TestA
TestB
My/TypeParamTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
MyInstantiation/ValueParamTest\.
TestA/0 # GetParam\(\) = one line
TestA/1 # GetParam\(\) = two\\nlines
TestA/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
TestB/0 # GetParam\(\) = one line
TestB/1 # GetParam\(\) = two\\nlines
TestB/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
""")
# The expected output when running googletest-list-tests-unittest_ with
# --gtest_list_tests and --gtest_filter=Foo*.
EXPECTED_OUTPUT_FILTER_FOO_RE = re.compile(r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
""")
# Utilities.
def Run(args):
"""Runs googletest-list-tests-unittest_ and returns the list of tests printed."""
return gtest_test_utils.Subprocess([EXE_PATH] + args,
capture_stderr=False).output
# The unit test.
class GTestListTestsUnitTest(gtest_test_utils.TestCase):
"""Tests using the --gtest_list_tests flag to list all tests."""
def RunAndVerify(self, flag_value, expected_output_re, other_flag):
"""Runs googletest-list-tests-unittest_ and verifies that it prints
the correct tests.
Args:
flag_value: value of the --gtest_list_tests flag;
None if the flag should not be present.
expected_output_re: regular expression that matches the expected
output after running command;
other_flag: a different flag to be passed to command
along with gtest_list_tests;
None if the flag should not be present.
"""
if flag_value is None:
flag = ''
flag_expression = 'not set'
elif flag_value == '0':
flag = '--%s=0' % LIST_TESTS_FLAG
flag_expression = '0'
else:
flag = '--%s' % LIST_TESTS_FLAG
flag_expression = '1'
args = [flag]
if other_flag is not None:
args += [other_flag]
output = Run(args)
if expected_output_re:
self.assert_(
expected_output_re.match(output),
('when %s is %s, the output of "%s" is "%s",\n'
'which does not match regex "%s"' %
(LIST_TESTS_FLAG, flag_expression, ' '.join(args), output,
expected_output_re.pattern)))
else:
self.assert_(
not EXPECTED_OUTPUT_NO_FILTER_RE.match(output),
('when %s is %s, the output of "%s" is "%s"' %
(LIST_TESTS_FLAG, flag_expression, ' '.join(args), output)))
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(flag_value=None,
expected_output_re=None,
other_flag=None)
def testFlag(self):
"""Tests using the --gtest_list_tests flag."""
self.RunAndVerify(flag_value='0',
expected_output_re=None,
other_flag=None)
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag=None)
def testOverrideNonFilterFlags(self):
"""Tests that --gtest_list_tests overrides the non-filter flags."""
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag='--gtest_break_on_failure')
def testWithFilterFlags(self):
"""Tests that --gtest_list_tests takes into account the
--gtest_filter flag."""
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE,
other_flag='--gtest_filter=Foo*')
if __name__ == '__main__':
gtest_test_utils.Main()
| 32.859903 | 85 | 0.650103 |
import re
import gtest_test_utils
LIST_TESTS_FLAG = 'gtest_list_tests'
EXE_PATH = gtest_test_utils.GetTestExecutablePath('googletest-list-tests-unittest_')
EXPECTED_OUTPUT_NO_FILTER_RE = re.compile(r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
Abc\.
Xyz
Def
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
TypedTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
TypedTest/1\. # TypeParam = int\s*\*( __ptr64)?
TestA
TestB
TypedTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
My/TypeParamTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
My/TypeParamTest/1\. # TypeParam = int\s*\*( __ptr64)?
TestA
TestB
My/TypeParamTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
MyInstantiation/ValueParamTest\.
TestA/0 # GetParam\(\) = one line
TestA/1 # GetParam\(\) = two\\nlines
TestA/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
TestB/0 # GetParam\(\) = one line
TestB/1 # GetParam\(\) = two\\nlines
TestB/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
""")
EXPECTED_OUTPUT_FILTER_FOO_RE = re.compile(r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
""")
def Run(args):
return gtest_test_utils.Subprocess([EXE_PATH] + args,
capture_stderr=False).output
class GTestListTestsUnitTest(gtest_test_utils.TestCase):
def RunAndVerify(self, flag_value, expected_output_re, other_flag):
if flag_value is None:
flag = ''
flag_expression = 'not set'
elif flag_value == '0':
flag = '--%s=0' % LIST_TESTS_FLAG
flag_expression = '0'
else:
flag = '--%s' % LIST_TESTS_FLAG
flag_expression = '1'
args = [flag]
if other_flag is not None:
args += [other_flag]
output = Run(args)
if expected_output_re:
self.assert_(
expected_output_re.match(output),
('when %s is %s, the output of "%s" is "%s",\n'
'which does not match regex "%s"' %
(LIST_TESTS_FLAG, flag_expression, ' '.join(args), output,
expected_output_re.pattern)))
else:
self.assert_(
not EXPECTED_OUTPUT_NO_FILTER_RE.match(output),
('when %s is %s, the output of "%s" is "%s"' %
(LIST_TESTS_FLAG, flag_expression, ' '.join(args), output)))
def testDefaultBehavior(self):
self.RunAndVerify(flag_value=None,
expected_output_re=None,
other_flag=None)
def testFlag(self):
self.RunAndVerify(flag_value='0',
expected_output_re=None,
other_flag=None)
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag=None)
def testOverrideNonFilterFlags(self):
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag='--gtest_break_on_failure')
def testWithFilterFlags(self):
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE,
other_flag='--gtest_filter=Foo*')
if __name__ == '__main__':
gtest_test_utils.Main()
| true | true |
f71de04a638f2ef61da4c6446ecc8a7c3dc8fac9 | 20,164 | py | Python | qtgui/adapter.py | Petr-By/qtpyvis | 0b9a151ee6b9a56b486c2bece9c1f03414629efc | [
"MIT"
] | 3 | 2017-10-04T14:51:26.000Z | 2017-10-22T09:35:50.000Z | qtgui/adapter.py | CogSciUOS/DeepLearningToolbox | bf07578b9486d8c48e25df357bc4b9963b513b46 | [
"MIT"
] | 13 | 2017-09-05T12:56:11.000Z | 2017-11-22T10:38:27.000Z | qtgui/adapter.py | CogSciUOS/DeepLearningToolbox | bf07578b9486d8c48e25df357bc4b9963b513b46 | [
"MIT"
] | 2 | 2017-09-24T21:39:42.000Z | 2017-10-04T15:29:54.000Z | """This module provides different adapter classes that allow
for a smoother combination of Qt and the Deep Learning ToolBox.
"""
# standard imports
from typing import Iterator, Iterable, Any, Callable
import logging
# Qt imports
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QKeyEvent
from PyQt5.QtWidgets import QComboBox, QListWidget, QListWidgetItem
# GUI imports
from .utils import qtName, protect, QDebug
# logging
LOG = logging.getLogger(__name__)
class ItemAdapter(QDebug):
"""This class provides functionality that can be used by QWidgets that
allow to choose from lists of items, like `QComboBox` and
`QListWidget`. It acts as a translator mapping between the data
structures used in the Deep Learning ToolBox and the Qt widgets.
The QWidgets allow to store items and associated data in different
ways:
* The `QListWidget` uses `QListWidgetItem`s to represent the list items.
Such an item is not a QWidget, but holds some information specifying
display properties (like foreground and background color or icons),
the text value of the item and it allows to store additional
associated user date by introducing specific roles.
* The `QComboBox` does not use an explict class to represent
list items, but it also allows to set display properties and
to store associated information for each item using roles.
Both Widgets have the following comonalities:
* New items can be registered with
`QComboBox.addItem(text, [icon], [userData])` and
`QListWidget.addItem(label=text)`
* Items can be accessed by index:
`QComboBox.itemText(index)` and `QListWidget.item(row).text()`
* Items can be accessed by text:
`QComboBox.findText(text)` gives a single index while
`QList.findItems(text)` returns a list of item objects.
* Items can be removed:
`QComboBox.removeItem(index)` and
`QListWidget.takeItem(QListWidget.item(index))
* There may be a current item (selected item). The numerical index
can be obtained by
`QComboBox.currentIndex()` and `QListWidget.currentRow()`
* The text of the current item can be obtained by
`QComboBox.currentText()` and `QListWidget.currentItem().text()`
* data associated with the current item can be obtained by
`QComboBox.currentData(role)` and `QListWidget.currentItem().data(role)`
"""
_itemToText: Callable[[Any], str] = str
def __init_subclass__(cls, itemType: type = None,
itemToText: Callable[[Any], str] = None,
**kwargs) -> None:
super().__init_subclass__(**kwargs)
if itemType is not None:
setattr(cls, qtName(itemType.__name__), cls._currentItem)
setattr(cls, qtName('set_' + itemType.__name__), cls._currentItem)
if itemToText is not None:
cls._itemToText = staticmethod(itemToText)
print(f"DEBUG1[{cls.__name__}]: itemToText:",
itemToText, cls._itemToText)
def __init__(self, itemToText: Callable[[Any], str] = None,
**kwargs) -> None:
super().__init__(**kwargs)
self.setItemToText(itemToText)
#
# methods to be implemented by subclasses
#
def _items(self) -> Iterator[Any]:
"""An iterator for the items in this
:py:class:`ItemAdapter`.
"""
raise NotImplementedError("A 'ItemAdapter' has to implement "
"the _items() method")
def _addItem(self, item: Any) -> None:
"""Add an item to this :py:class:`ItemAdapter`.
It is assumed that the item is not yet contained in this
:py:class:`ItemAdapter`.
"""
raise NotImplementedError("A 'ItemAdapter' has to implement "
"the _addItem() method")
def _removeItem(self, item: Any) -> None:
"""Remove an item from this :py:class:`ItemAdapter`. It is
assumed that the item is contained in this
:py:class:`ItemAdapter`, otherwise a
:py:class:`ValueError` is raised.
"""
raise NotImplementedError("A 'ItemAdapter' has to implement "
"the _removeElement() method")
def _currentItem(self) -> Any:
"""Get the currently selected item.
This may be `None` if no itm is selected.
"""
raise NotImplementedError("A 'ItemAdapter' has to implement "
"the _currentItem() method")
def _setCurrentItem(self, item: Any) -> None:
"""Select the given entry in this :py:class:`ItemAdapter`.
Arguments
---------
item: Any
The item to become the current item. If the item is not
contained in this :py:class:`ItemAdapter` (e.g. if
`item` is `None`), the current will be set to `None`.
"""
raise NotImplementedError("A 'ItemAdapter' has to implement "
"the _setCurrentItem() method")
#
# Implemented methods
#
def _countItems(self) -> int:
"""Get the number of items in this :py:class:`ItemAdapter`.
"""
return sum(1 for _ in self._items())
def _textForItem(self, item: Any) -> str:
"""Get the text to be display from a given item.
"""
return self._itemToText(item)
def _formatItem(self, item: Any) -> None:
"""May be implemented by a subclass to format an item.
This method is only called if the item is currently displayed
by this :py:class:`ItemAdapter` (has been added and was not removed),
but it may be called several times for the same item (to
trigger an update of this item).
The base implementation does nothing, but derived classes may
overwrite this method to allow for fancy formating.
"""
def _getItemAt(self, index: int) -> Any:
"""
Raises
------
IndexError:
The index provided is invalid.
"""
try:
return next((x for i, x in enumerate(self._items()) if i == index))
except StopIteration:
raise IndexError(f"Index {index} beyond end of items.")
def _getTextAt(self, index: int) -> str:
"""
Raises
------
IndexError:
The index provided is invalid.
"""
return self._textForItem(self._getItemAt(index))
def _indexOfItem(self, item: Any) -> int:
"""
Raises
------
LookupError:
The given item is not found in this :py:class:`ItemAdapter`.
"""
try:
return next(i for i, x in enumerate(self._items()) if x == item)
except StopIteration:
raise LookupError(f"Item {item} not found.")
def _indexOfText(self, text: str) -> int:
"""
Raises
------
LookupError:
The given text is not found in this :py:class:`ItemAdapter`.
"""
try:
return next(i for i, t in enumerate(self._texts()) if t == text)
except StopIteration:
raise LookupError(f"Item with text '{text}' not found")
def _findItem(self, text: str) -> Any:
"""
Raises
------
LookupError:
The given text is not found in this :py:class:`ItemAdapter`.
"""
try:
return next(item for item in self._items()
if self._textForItem(item) == text)
except StopIteration:
raise LookupError(f"Item with text '{text}' not found.")
def _setCurrentText(self, text: str) -> None:
"""
"""
self._setCurrentItem(self._findItem(text))
def _texts(self) -> Iterator[str]:
"""An iterator for the texts presented by this
:py:class:`ItemAdapter`.
"""
for item in self._items():
yield self._textForItem(item)
def _removeText(self, text: str) -> None:
"""Remove the item with the given text. This may be
overwritten by subclasses when a more efficient implementation
is possible.
"""
self._removeItem(self._findItem(text))
def _removeItemAt(self, index: int) -> None:
"""Remove the item at the given index.
Raises
------
IndexError:
The index provided is invalid.
"""
self._removeItem(self._getItemAt(index))
def _removeAllItems(self) -> None:
"""Remove all items in this :py:class:`ItemAdapter`.
"""
try:
self._removeItemAt(0)
except IndexError:
pass # no item left to remove
def _formatAllItems(self) -> None:
"""
"""
for item in self._items():
self._formatItem(item)
def _updateAllItems(self) -> None:
"""Update the display of the list elements. This may be implemented by
subclasses that would like to adapt the style of display
depending on the state of the element.
This method will be called when the list has been updated
(e.g. by directly adding or removing elements, or by filling
the list from some iterable), but subclasses may also call this
method proactively in repsonse to notifications.
"""
#
# public interface
#
def setFromIterable(self, iterable: Iterable) -> None:
"""Set the items in this :py:class:`ItemAdapter` from an
iterable. This will first remove the old items and then
add the new items.
"""
self._removeAllItems()
for item in iterable:
self._addItem(item)
def updateFromIterable(self, iterable: Iterable) -> None:
"""Update the items in this :py:class:`ItemAdapter` from an iterable.
Items from the iterable, that are not yet contained in the
list are added, while items originally contained in this
:py:class:`ItemAdapter`, that are not iterated by the
iterable, are removed.
"""
# 1. Create a set containing the texts for items already contained
# in this list (this is used for bookkeeping).
bookkeeping = set(self._texts())
# 2. Iterate over entries from the iterable and add entries
# missing in this list.
for item in iterable:
text = self._textForItem(item)
if text in bookkeeping:
bookkeeping.remove(text)
else:
self._addItem(item)
# 3. Remove items from this list that are no longer present
for text in bookkeeping:
self._removeText(text)
def setItemToText(self, itemToText: Callable[[Any], str]) -> None:
"""Set the function to be used when converting items
to their textual presentation.
"""
if itemToText is None:
self.__dict__.pop('_itemToText', None)
else:
self._itemToText = itemToText
self._formatAllItems()
@protect
def keyPressEvent(self, event: QKeyEvent) -> None:
"""Process key events. The :py:class:`ItemAdapter` supports
the following keys:
C: clear the currently selected entry
Note: in a :py:class:`QComboBox` this event is only received
if the combobox is closed (not while currently selecting an entry).
"""
key = event.key()
LOG.debug("ItemAdapter[%s].keyPressEvent: key=%d",
type(self).__name__, key)
if key == Qt.Key_C: # clear
self._setCurrentItem(None)
elif key == Qt.Key_Y: # no itemToText function (inherit from super)
self.setItemToText(None)
elif key == Qt.Key_Z: # simple str() as itemToText function (debug)
self.setItemToText(str)
elif hasattr(super(), 'keyPressEvent'):
super().keyPressEvent(event)
else:
event.ignore()
def debug(self) -> None:
"""Ouput debug information for this :py:class:`ItemAdapter`.
"""
if hasattr(super(), 'debug'):
super().debug()
print(f"debug: ItemAdapter[{type(self).__name__}]: "
f"with {self._countItems()} entries:")
for index, item in enumerate(self._items()):
print(f"debug:{'**' if item is self._currentItem() else ' '}"
f"({index+1}) {self._textForItem(item)} "
f"[{repr(item)}]")
class QAdaptedComboBox(ItemAdapter, QComboBox):
"""A :py:class:`QComboBox` implementing the
:py:class:`ItemAdapter` interface.
"""
#
# methods to be implemented by subclasses
#
def _countItems(self) -> int:
"""Get the number of items in this :py:class:`QAdaptedComboBox`.
"""
return self.count()
def _items(self) -> Iterator[Any]:
"""An iterator for the items in this
:py:class:`QAdaptedComboBox`.
"""
for index in range(self.count()):
yield self.itemData(index)
def _texts(self) -> Iterator[str]:
"""An iterator for the texts presented by this
:py:class:`QAdaptedComboBox`.
"""
for index in range(self.count()):
yield self.itemText(index)
def _addItem(self, item: Any) -> None:
"""Add an item to this :py:class:`QAdaptedComboBox`.
It is assumed that the item is not yet contained in this
:py:class:`QAdaptedComboBox`.
"""
self.addItem(self._textForItem(item), item)
self._formatItem(item)
def _removeItem(self, item: Any) -> None:
"""Remove an item from this :py:class:`QAdaptedComboBox`.
It is assumed that the item is contained in this
:py:class:`QAdaptedComboBox`, otherwise a
:py:class:`ValueError` is raised.
"""
self._removeItemAt(self._indexOfItem(item))
def _removeItemAt(self, index: int) -> None:
"""Remove the item at the given index.
"""
self.removeItem(index)
def _removeText(self, text: str) -> None:
"""Remove the item with the given text. This may be
overwritten by subclasses when a more efficient implementation
is possible.
"""
self._removeItemAt(self._indexOfText(text))
def _formatItemAt(self, index: int) -> None:
"""Format the item at the given index to reflect
the state of the underlying item.
This method may be extended by subclasses.
"""
self.setItemText(index, self._textForItem(self.itemData(index)))
def _formatItem(self, item: Any) -> None:
"""Update the format of the item's presentation
in this :py:class:`QAdaptedComboBox`
to reflect its state.
"""
self._formatItemAt(self._indexOfItem(item))
def _formatAllItems(self) -> None:
"""Format all items in this :py:class:`QAdaptedComboBox`.
"""
for index in range(self.count()):
self._formatItemAt(index)
def _currentItem(self) -> Any:
"""Get the currently selected item.
This may be `None` if no itm is selected.
"""
return self.currentData()
def _setCurrentItem(self, item: Any) -> None:
"""Select the given entry in this :py:class:`QAdaptedComboBox`.
Arguments
---------
item: Any
The item to become the current item. If the item is not
contained in this :py:class:`QAdaptedComboBox` (e.g. if
`item` is `None`), the current will be set to `None`.
"""
try:
self.setCurrentIndex(self._indexOfItem(item))
except LookupError:
# For an empty QComboBox or a QComboBox in which no
# current entry is set, the index is -1 (which is also
# returned by QComboBox.findText if the entry is not found).
self.setCurrentIndex(-1)
class QAdaptedListWidget(ItemAdapter, QListWidget):
"""A :py:class:`QListWidget` implementing the
:py:class:`ItemAdapter` interface.
"""
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
self._formater = None
def setListWidgetItemFormater(self, formater:
Callable[[QListWidgetItem], None]) -> None:
"""Set a formater for the list items.
"""
self._formater = formater
self._formatAllItems()
def updateFormat(self) -> None:
"""Update the format of all items in this
:py:class:`QAdaptedListWidget`.
"""
self._formatAllItems()
#
# methods to be implemented by subclasses
#
def _countItems(self) -> int:
"""Get the number of items in this :py:class:`QAdaptedListWidget`.
"""
return self.count()
def _qitem(self, item: Any) -> QListWidgetItem:
"""Get the :py:class:`QListWidgetItem` that holds the given
item.
"""
return next((qitem for qitem in self._qitems()
if qitem.data(Qt.UserRole) is item), None)
def _qitems(self) -> Iterator[QListWidgetItem]:
"""An :py:class:`Iterator` for the :py:class:`QListWidgetItem`
in this :py:class:`QAdaptedListWidget`.
"""
for index in range(self.count()):
yield self.item(index)
def _formatQItem(self, qitem: QListWidgetItem) -> None:
"""Format the given :py:class:`QListWidgetItem` to reflect
the state of the underlying item.
This method may be extended by subclasses.
"""
qitem.setText(self._textForItem(qitem.data(Qt.UserRole)))
if self._formater is not None:
self._formater(qitem)
def _items(self) -> Iterator[Any]:
"""An iterator for the items in this
:py:class:`QAdaptedComboBox`.
"""
for qitem in self._qitems():
yield qitem.data(Qt.UserRole)
def _texts(self) -> Iterator[str]:
"""An iterator for the texts presented by this
:py:class:`QAdaptedListWidget`.
"""
for qitem in self._qitems():
yield qitem.text()
def _addItem(self, item: Any) -> None:
"""Add an item to this :py:class:`QAdaptedComboBox`.
It is assumed that the item is not yet contained in this
:py:class:`QAdaptedListWidget`.
"""
qitem = QListWidgetItem(self._textForItem(item))
qitem.setData(Qt.UserRole, item)
self.addItem(qitem)
self._formatQItem(qitem)
def _formatItem(self, item: Any) -> None:
"""Update the format of the item's presentation
in this :py:class:`QAdaptedListWidget`
to reflect its state.
"""
self._formatQItem(self._qitem(item))
def _formatAllItems(self) -> None:
"""Format all items in this :py:class:`QAdaptedListWidget`.
"""
for qitem in self._qitems():
self._formatQItem(qitem)
def _removeItem(self, item: Any) -> None:
"""Remove an item from this :py:class:`QAdaptedListWidget`.
It is assumed that the item is contained in this
:py:class:`QAdaptedComboBox`, otherwise a
:py:class:`ValueError` is raised.
"""
qitem = self.takeItem(self._indexOfItem(item))
del qitem
def _currentItem(self) -> Any:
"""Get the currently selected item.
This may be `None` if no itm is selected.
"""
qitem = self.currentItem()
return None if qitem is None else qitem.data(Qt.UserRole)
def _setCurrentItem(self, item: Any) -> None:
"""Select the given entry in this :py:class:`QAdaptedListWidget`.
Arguments
---------
item: Any
The item to become the current item. If the item is not
contained in this :py:class:`QAdaptedListWidget` (e.g. if
`item` is `None`), the current will be set to `None`.
"""
try:
self.setCurrentRow(self._indexOfItem(item))
except LookupError:
self.setCurrentRow(-1)
| 34.586621 | 79 | 0.598592 |
from typing import Iterator, Iterable, Any, Callable
import logging
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QKeyEvent
from PyQt5.QtWidgets import QComboBox, QListWidget, QListWidgetItem
from .utils import qtName, protect, QDebug
LOG = logging.getLogger(__name__)
class ItemAdapter(QDebug):
_itemToText: Callable[[Any], str] = str
def __init_subclass__(cls, itemType: type = None,
itemToText: Callable[[Any], str] = None,
**kwargs) -> None:
super().__init_subclass__(**kwargs)
if itemType is not None:
setattr(cls, qtName(itemType.__name__), cls._currentItem)
setattr(cls, qtName('set_' + itemType.__name__), cls._currentItem)
if itemToText is not None:
cls._itemToText = staticmethod(itemToText)
print(f"DEBUG1[{cls.__name__}]: itemToText:",
itemToText, cls._itemToText)
def __init__(self, itemToText: Callable[[Any], str] = None,
**kwargs) -> None:
super().__init__(**kwargs)
self.setItemToText(itemToText)
def _items(self) -> Iterator[Any]:
raise NotImplementedError("A 'ItemAdapter' has to implement "
"the _items() method")
def _addItem(self, item: Any) -> None:
raise NotImplementedError("A 'ItemAdapter' has to implement "
"the _addItem() method")
def _removeItem(self, item: Any) -> None:
raise NotImplementedError("A 'ItemAdapter' has to implement "
"the _removeElement() method")
def _currentItem(self) -> Any:
raise NotImplementedError("A 'ItemAdapter' has to implement "
"the _currentItem() method")
def _setCurrentItem(self, item: Any) -> None:
raise NotImplementedError("A 'ItemAdapter' has to implement "
"the _setCurrentItem() method")
def _countItems(self) -> int:
return sum(1 for _ in self._items())
def _textForItem(self, item: Any) -> str:
return self._itemToText(item)
def _formatItem(self, item: Any) -> None:
def _getItemAt(self, index: int) -> Any:
try:
return next((x for i, x in enumerate(self._items()) if i == index))
except StopIteration:
raise IndexError(f"Index {index} beyond end of items.")
def _getTextAt(self, index: int) -> str:
return self._textForItem(self._getItemAt(index))
def _indexOfItem(self, item: Any) -> int:
try:
return next(i for i, x in enumerate(self._items()) if x == item)
except StopIteration:
raise LookupError(f"Item {item} not found.")
def _indexOfText(self, text: str) -> int:
try:
return next(i for i, t in enumerate(self._texts()) if t == text)
except StopIteration:
raise LookupError(f"Item with text '{text}' not found")
def _findItem(self, text: str) -> Any:
try:
return next(item for item in self._items()
if self._textForItem(item) == text)
except StopIteration:
raise LookupError(f"Item with text '{text}' not found.")
def _setCurrentText(self, text: str) -> None:
self._setCurrentItem(self._findItem(text))
def _texts(self) -> Iterator[str]:
for item in self._items():
yield self._textForItem(item)
def _removeText(self, text: str) -> None:
self._removeItem(self._findItem(text))
def _removeItemAt(self, index: int) -> None:
self._removeItem(self._getItemAt(index))
def _removeAllItems(self) -> None:
try:
self._removeItemAt(0)
except IndexError:
pass
def _formatAllItems(self) -> None:
for item in self._items():
self._formatItem(item)
def _updateAllItems(self) -> None:
def setFromIterable(self, iterable: Iterable) -> None:
self._removeAllItems()
for item in iterable:
self._addItem(item)
def updateFromIterable(self, iterable: Iterable) -> None:
bookkeeping = set(self._texts())
for item in iterable:
text = self._textForItem(item)
if text in bookkeeping:
bookkeeping.remove(text)
else:
self._addItem(item)
for text in bookkeeping:
self._removeText(text)
def setItemToText(self, itemToText: Callable[[Any], str]) -> None:
if itemToText is None:
self.__dict__.pop('_itemToText', None)
else:
self._itemToText = itemToText
self._formatAllItems()
@protect
def keyPressEvent(self, event: QKeyEvent) -> None:
key = event.key()
LOG.debug("ItemAdapter[%s].keyPressEvent: key=%d",
type(self).__name__, key)
if key == Qt.Key_C:
self._setCurrentItem(None)
elif key == Qt.Key_Y:
self.setItemToText(None)
elif key == Qt.Key_Z:
self.setItemToText(str)
elif hasattr(super(), 'keyPressEvent'):
super().keyPressEvent(event)
else:
event.ignore()
def debug(self) -> None:
if hasattr(super(), 'debug'):
super().debug()
print(f"debug: ItemAdapter[{type(self).__name__}]: "
f"with {self._countItems()} entries:")
for index, item in enumerate(self._items()):
print(f"debug:{'**' if item is self._currentItem() else ' '}"
f"({index+1}) {self._textForItem(item)} "
f"[{repr(item)}]")
class QAdaptedComboBox(ItemAdapter, QComboBox):
def _countItems(self) -> int:
return self.count()
def _items(self) -> Iterator[Any]:
for index in range(self.count()):
yield self.itemData(index)
def _texts(self) -> Iterator[str]:
for index in range(self.count()):
yield self.itemText(index)
def _addItem(self, item: Any) -> None:
self.addItem(self._textForItem(item), item)
self._formatItem(item)
def _removeItem(self, item: Any) -> None:
self._removeItemAt(self._indexOfItem(item))
def _removeItemAt(self, index: int) -> None:
self.removeItem(index)
def _removeText(self, text: str) -> None:
self._removeItemAt(self._indexOfText(text))
def _formatItemAt(self, index: int) -> None:
self.setItemText(index, self._textForItem(self.itemData(index)))
def _formatItem(self, item: Any) -> None:
self._formatItemAt(self._indexOfItem(item))
def _formatAllItems(self) -> None:
for index in range(self.count()):
self._formatItemAt(index)
def _currentItem(self) -> Any:
return self.currentData()
def _setCurrentItem(self, item: Any) -> None:
try:
self.setCurrentIndex(self._indexOfItem(item))
except LookupError:
self.setCurrentIndex(-1)
class QAdaptedListWidget(ItemAdapter, QListWidget):
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
self._formater = None
def setListWidgetItemFormater(self, formater:
Callable[[QListWidgetItem], None]) -> None:
self._formater = formater
self._formatAllItems()
def updateFormat(self) -> None:
self._formatAllItems()
def _countItems(self) -> int:
return self.count()
def _qitem(self, item: Any) -> QListWidgetItem:
return next((qitem for qitem in self._qitems()
if qitem.data(Qt.UserRole) is item), None)
def _qitems(self) -> Iterator[QListWidgetItem]:
for index in range(self.count()):
yield self.item(index)
def _formatQItem(self, qitem: QListWidgetItem) -> None:
qitem.setText(self._textForItem(qitem.data(Qt.UserRole)))
if self._formater is not None:
self._formater(qitem)
def _items(self) -> Iterator[Any]:
for qitem in self._qitems():
yield qitem.data(Qt.UserRole)
def _texts(self) -> Iterator[str]:
for qitem in self._qitems():
yield qitem.text()
def _addItem(self, item: Any) -> None:
qitem = QListWidgetItem(self._textForItem(item))
qitem.setData(Qt.UserRole, item)
self.addItem(qitem)
self._formatQItem(qitem)
def _formatItem(self, item: Any) -> None:
self._formatQItem(self._qitem(item))
def _formatAllItems(self) -> None:
for qitem in self._qitems():
self._formatQItem(qitem)
def _removeItem(self, item: Any) -> None:
qitem = self.takeItem(self._indexOfItem(item))
del qitem
def _currentItem(self) -> Any:
qitem = self.currentItem()
return None if qitem is None else qitem.data(Qt.UserRole)
def _setCurrentItem(self, item: Any) -> None:
try:
self.setCurrentRow(self._indexOfItem(item))
except LookupError:
self.setCurrentRow(-1)
| true | true |
f71de07e71e77d9a9f6bff724a046d4846818c02 | 335 | py | Python | exercise040.py | AlissonRaphael/python_exercises | 3f1185c4f2fff24c9fa2ffd6b60f90599044c985 | [
"MIT"
] | null | null | null | exercise040.py | AlissonRaphael/python_exercises | 3f1185c4f2fff24c9fa2ffd6b60f90599044c985 | [
"MIT"
] | null | null | null | exercise040.py | AlissonRaphael/python_exercises | 3f1185c4f2fff24c9fa2ffd6b60f90599044c985 | [
"MIT"
] | null | null | null | nota1 = float(input('Digite a primeira nota: '))
nota2 = float(input('Digite a segunda nota: '))
media = (nota1+nota2)/2
if media >= 7:
print('Aprovado! Com média {:.2f}'.format(media))
elif media >= 5 and media < 7:
print('Recuperação! Com média {:.2f}'.format(media))
else:
print('Reprovado! Com média {:.2f}'.format(media))
| 27.916667 | 54 | 0.656716 | nota1 = float(input('Digite a primeira nota: '))
nota2 = float(input('Digite a segunda nota: '))
media = (nota1+nota2)/2
if media >= 7:
print('Aprovado! Com média {:.2f}'.format(media))
elif media >= 5 and media < 7:
print('Recuperação! Com média {:.2f}'.format(media))
else:
print('Reprovado! Com média {:.2f}'.format(media))
| true | true |
f71de104313a62da31007f789cf4632dbe18de9f | 2,592 | py | Python | bookscrape/crawl/exporters.py | clemfromspace/pybook | ed16c24a3d1caeab07b5111812c8eb07ba598b8a | [
"WTFPL"
] | 12 | 2018-01-20T06:17:46.000Z | 2022-02-01T02:04:07.000Z | bookscrape/crawl/exporters.py | clemfromspace/pybook | ed16c24a3d1caeab07b5111812c8eb07ba598b8a | [
"WTFPL"
] | 6 | 2021-03-18T20:40:35.000Z | 2022-03-11T23:26:11.000Z | bookscrape/crawl/exporters.py | clemfromspace/pybook | ed16c24a3d1caeab07b5111812c8eb07ba598b8a | [
"WTFPL"
] | 1 | 2020-06-02T18:16:12.000Z | 2020-06-02T18:16:12.000Z | """This module contains the exporters for the ``pybook`` package"""
import os
from operator import itemgetter
from scrapy.exporters import BaseItemExporter
from reportlab.lib.pagesizes import letter
from reportlab.platypus import SimpleDocTemplate, Image, PageBreak
from reportlab.lib.units import inch
from bookscrape.exceptions import BookScrapeException
from bookscrape.loggers import logger
class PdfExporter(BaseItemExporter):
"""Exporter to export the crawled items images as a pdf file"""
IMAGE_WIDTH = 7 * inch
IMAGE_HEIGHT = 9.5 * inch
output_dir = None
images_path = None
file_name = None
images = None
def __init__(self, output_dir, images_path, file_name, **kwargs):
self._configure(kwargs, dont_fail=True)
self.output_dir = output_dir
self.images_path = images_path
self.file_name = file_name
self.images = list()
def _clean_images(self):
# Remove the downloaded images
for image in self.images:
try:
os.remove(image[2])
except FileNotFoundError:
pass
def _get_document(self) -> SimpleDocTemplate:
return SimpleDocTemplate(
os.path.join(
self.output_dir,
self.file_name
),
pagesize=letter,
rightMargin=72,
leftMargin=72,
topMargin=72,
bottomMargin=18
)
def finish_exporting(self):
"""Build the document and clean the downloaded images"""
if not self.images: # No images were found :(
raise BookScrapeException('Found no images to export :(')
document = self._get_document()
story = list()
for image in sorted(self.images, key=itemgetter(0, 1)):
story.append(
Image(
image[2],
self.IMAGE_WIDTH,
self.IMAGE_HEIGHT
)
)
story.append(PageBreak())
document.build(story)
self._clean_images()
logger.info(
'Pdf document for the book slug "%s" (%d pages) available: %s' % (
self.file_name.split('_')[0],
len(self.images),
document.filename
)
)
def export_item(self, item):
image_path = os.path.join(
self.images_path,
item['images'][0]['path']
)
self.images.append(
(item['volume_index'], item['page_index'], image_path,)
)
| 27.574468 | 78 | 0.576389 |
import os
from operator import itemgetter
from scrapy.exporters import BaseItemExporter
from reportlab.lib.pagesizes import letter
from reportlab.platypus import SimpleDocTemplate, Image, PageBreak
from reportlab.lib.units import inch
from bookscrape.exceptions import BookScrapeException
from bookscrape.loggers import logger
class PdfExporter(BaseItemExporter):
IMAGE_WIDTH = 7 * inch
IMAGE_HEIGHT = 9.5 * inch
output_dir = None
images_path = None
file_name = None
images = None
def __init__(self, output_dir, images_path, file_name, **kwargs):
self._configure(kwargs, dont_fail=True)
self.output_dir = output_dir
self.images_path = images_path
self.file_name = file_name
self.images = list()
def _clean_images(self):
for image in self.images:
try:
os.remove(image[2])
except FileNotFoundError:
pass
def _get_document(self) -> SimpleDocTemplate:
return SimpleDocTemplate(
os.path.join(
self.output_dir,
self.file_name
),
pagesize=letter,
rightMargin=72,
leftMargin=72,
topMargin=72,
bottomMargin=18
)
def finish_exporting(self):
if not self.images:
raise BookScrapeException('Found no images to export :(')
document = self._get_document()
story = list()
for image in sorted(self.images, key=itemgetter(0, 1)):
story.append(
Image(
image[2],
self.IMAGE_WIDTH,
self.IMAGE_HEIGHT
)
)
story.append(PageBreak())
document.build(story)
self._clean_images()
logger.info(
'Pdf document for the book slug "%s" (%d pages) available: %s' % (
self.file_name.split('_')[0],
len(self.images),
document.filename
)
)
def export_item(self, item):
image_path = os.path.join(
self.images_path,
item['images'][0]['path']
)
self.images.append(
(item['volume_index'], item['page_index'], image_path,)
)
| true | true |
f71de157a79c0a64ecb3c949cd0249af54a78055 | 975 | py | Python | lib/surface/compute/disks/list.py | bopopescu/SDK | e6d9aaee2456f706d1d86e8ec2a41d146e33550d | [
"Apache-2.0"
] | null | null | null | lib/surface/compute/disks/list.py | bopopescu/SDK | e6d9aaee2456f706d1d86e8ec2a41d146e33550d | [
"Apache-2.0"
] | null | null | null | lib/surface/compute/disks/list.py | bopopescu/SDK | e6d9aaee2456f706d1d86e8ec2a41d146e33550d | [
"Apache-2.0"
] | 2 | 2020-11-04T03:08:21.000Z | 2020-11-05T08:14:41.000Z | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for listing persistent disks."""
from googlecloudsdk.api_lib.compute import base_classes
class List(base_classes.ZonalLister):
"""List Google Compute Engine persistent disks."""
@property
def service(self):
return self.compute.disks
@property
def resource_type(self):
return 'disks'
List.detailed_help = base_classes.GetZonalListerHelp('disks')
| 31.451613 | 74 | 0.761026 |
from googlecloudsdk.api_lib.compute import base_classes
class List(base_classes.ZonalLister):
@property
def service(self):
return self.compute.disks
@property
def resource_type(self):
return 'disks'
List.detailed_help = base_classes.GetZonalListerHelp('disks')
| true | true |
f71de17ca6b42fb38fd37ba479f167cdd9b0ea24 | 491 | py | Python | src/utils/plot.py | FedericoBottoni/household-poverty-classifier | 7357cc6a6c08e9cf76cdd79a04cce32a5982fa85 | [
"MIT"
] | null | null | null | src/utils/plot.py | FedericoBottoni/household-poverty-classifier | 7357cc6a6c08e9cf76cdd79a04cce32a5982fa85 | [
"MIT"
] | null | null | null | src/utils/plot.py | FedericoBottoni/household-poverty-classifier | 7357cc6a6c08e9cf76cdd79a04cce32a5982fa85 | [
"MIT"
] | null | null | null | #import numpy as np
#import pandas as pd
#import seaborn as sns
#import matplotlib
#%matplotlib inline
def plot_history(network_history, n_epochs):
print('Plot is not implemented yet')
#matplotlib.use('agg')
#import matplotlib.pyplot as plt
#df = pd.DataFrame(dict(time=np.arange(n_epochs), value=[network_history.history['loss'], network_history.history['val_loss']]))
#g = sns.relplot(x="epochs", y="loss", kind="line", data=network_history)
#g.fig.autofmt_xdate() | 37.769231 | 132 | 0.723014 |
def plot_history(network_history, n_epochs):
print('Plot is not implemented yet')
| true | true |
f71de1c85888c25420aa564e28401bc39d740e2c | 2,650 | py | Python | easy_db/util.py | kpence/easy_db | fbe4c22a79336ec08980221405aca5c65bf02caf | [
"MIT"
] | null | null | null | easy_db/util.py | kpence/easy_db | fbe4c22a79336ec08980221405aca5c65bf02caf | [
"MIT"
] | null | null | null | easy_db/util.py | kpence/easy_db | fbe4c22a79336ec08980221405aca5c65bf02caf | [
"MIT"
] | null | null | null | '''
Utility functions for easy_db.
'''
import os
import sqlite3, pyodbc
from typing import List, Dict, Any
def check_if_file_is_sqlite(filename: str) -> bool:
'''
Check if file is a sqlite database.
See: https://stackoverflow.com/questions/12932607/how-to-check-if-a-sqlite3-database-exists-in-python
'''
if not os.path.isfile(filename):
return False
if os.path.getsize(filename) < 100: # SQLite db file header is 100 bytes (minimum file size)
return False
with open(filename, 'rb') as possible_db_file:
header = possible_db_file.read(100)
if header[:16] == b'SQLite format 3\x00':
return True
else:
return False
def list_of_dicts_from_query(cursor, sql: str, tablename: str, db_type: str, parameters: list=[]) -> List[Dict[str, Any]]:
'''
Query db using cursor, supplied sql, and tablename.
Return list of dicts for query result.
'''
try:
data = cursor.execute(sql, parameters).fetchall()
except (sqlite3.OperationalError, pyodbc.ProgrammingError) as error:
print(f'ERROR querying table {tablename}! Error below:')
print(error)
print(f'SQL: {sql}')
return
if db_type == 'SQLITE3':
columns = [description[0] for description in cursor.description]
elif db_type == 'SQL SERVER':
columns = [column[0] for column in cursor.description]
else:
try:
columns = [row.column_name for row in cursor.columns(table=tablename)]
except UnicodeDecodeError:
print('\nERROR - Unable to read column names.')
print('This may occur if using Access database with column descriptions populated.')
print('Try deleting the column descriptions.\n')
return [{}]
table_data = [dict(zip(columns, row)) for row in data]
return table_data
# set for quickly checking possibly malicious characters
unallowed_characters = {';', '(', ')', '=', '+', "'", '"', '.', '[', ']', ',',
'{', '}', '\\', '/', '`', '~', '!', '@', '#', '$', '%', '^', '&', '*'}
def name_clean(name: str) -> bool:
'''
Check name and return True if it looks clean (not malicious).
Return False if it name could be attempting sql injection.
Used for table names and column names (as these can't be parameterized).
'''
for char in name:
if char in unallowed_characters:
print(f'ERROR!!! Prohibited characters detected in:\n {name}')
return False
if 'DROP' in name.upper():
print(f'ERROR!!! Prohibited characters detected in:\n {name}')
return False
return True
| 33.974359 | 122 | 0.619623 | import os
import sqlite3, pyodbc
from typing import List, Dict, Any
def check_if_file_is_sqlite(filename: str) -> bool:
if not os.path.isfile(filename):
return False
if os.path.getsize(filename) < 100:
return False
with open(filename, 'rb') as possible_db_file:
header = possible_db_file.read(100)
if header[:16] == b'SQLite format 3\x00':
return True
else:
return False
def list_of_dicts_from_query(cursor, sql: str, tablename: str, db_type: str, parameters: list=[]) -> List[Dict[str, Any]]:
try:
data = cursor.execute(sql, parameters).fetchall()
except (sqlite3.OperationalError, pyodbc.ProgrammingError) as error:
print(f'ERROR querying table {tablename}! Error below:')
print(error)
print(f'SQL: {sql}')
return
if db_type == 'SQLITE3':
columns = [description[0] for description in cursor.description]
elif db_type == 'SQL SERVER':
columns = [column[0] for column in cursor.description]
else:
try:
columns = [row.column_name for row in cursor.columns(table=tablename)]
except UnicodeDecodeError:
print('\nERROR - Unable to read column names.')
print('This may occur if using Access database with column descriptions populated.')
print('Try deleting the column descriptions.\n')
return [{}]
table_data = [dict(zip(columns, row)) for row in data]
return table_data
unallowed_characters = {';', '(', ')', '=', '+', "'", '"', '.', '[', ']', ',',
'{', '}', '\\', '/', '`', '~', '!', '@', '#', '$', '%', '^', '&', '*'}
def name_clean(name: str) -> bool:
for char in name:
if char in unallowed_characters:
print(f'ERROR!!! Prohibited characters detected in:\n {name}')
return False
if 'DROP' in name.upper():
print(f'ERROR!!! Prohibited characters detected in:\n {name}')
return False
return True
| true | true |
f71de2d3873a87c6a6788a7e0c239ea5018a0dee | 63 | py | Python | tests/__init__.py | andrewm4894/am4894pd | 7397abe0e1a0c1dee049c63c6d987eb62cf01e31 | [
"MIT"
] | null | null | null | tests/__init__.py | andrewm4894/am4894pd | 7397abe0e1a0c1dee049c63c6d987eb62cf01e31 | [
"MIT"
] | 215 | 2019-11-24T09:41:01.000Z | 2022-03-31T15:26:02.000Z | tests/__init__.py | andrewm4894/am4894pd | 7397abe0e1a0c1dee049c63c6d987eb62cf01e31 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Unit test package for am4894pd."""
| 15.75 | 37 | 0.571429 | true | true | |
f71de2d79d4e6a915befd82063e26f672dc6f81a | 2,056 | py | Python | autovideo/augmentation/color/UniformColorQuantizationToNBits_primitive.py | wanghaisheng/autovideo | ca6c05e522f6ea8cb2043a60195769f3906a3a19 | [
"MIT"
] | 4 | 2021-11-01T15:33:03.000Z | 2022-02-10T10:37:56.000Z | autovideo/augmentation/color/UniformColorQuantizationToNBits_primitive.py | wanghaisheng/autovideo | ca6c05e522f6ea8cb2043a60195769f3906a3a19 | [
"MIT"
] | 2 | 2021-11-08T05:09:00.000Z | 2022-03-08T20:42:02.000Z | autovideo/augmentation/color/UniformColorQuantizationToNBits_primitive.py | wanghaisheng/autovideo | ca6c05e522f6ea8cb2043a60195769f3906a3a19 | [
"MIT"
] | 2 | 2022-02-28T10:03:14.000Z | 2022-03-23T09:00:06.000Z | '''
Copyright 2021 D3M Team
Copyright (c) 2021 DATA Lab at Texas A&M University
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from d3m import container
from d3m.metadata import hyperparams
import imgaug.augmenters as iaa
import typing
from autovideo.utils import construct_primitive_metadata
from autovideo.base.augmentation_base import AugmentationPrimitiveBase
__all__ = ('UniformColorQuantizationToNBitsPrimitive',)
Inputs = container.DataFrame
class Hyperparams(hyperparams.Hyperparams):
nb_bits = hyperparams.Hyperparameter[typing.Union[float,tuple,list]](
default=(1, 8),
description="Number of bits to keep in each image’s array component.",
semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'],
)
seed = hyperparams.Constant[int](
default=0,
description='Minimum workers to extract frames simultaneously',
semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'],
)
class UniformColorQuantizationToNBitsPrimitive(AugmentationPrimitiveBase[Inputs, Hyperparams]):
"""
A primitive which Quantize images by setting 8-B bits of each component to zero.
"""
metadata = construct_primitive_metadata("augmentation", "color_UniformColorQuantizationToNBits")
def _get_function(self):
"""
set up function and parameter of functions
"""
nb_bits = self.hyperparams["nb_bits"]
seed = self.hyperparams["seed"]
return iaa.UniformColorQuantizationToNBits(nb_bits=nb_bits, seed=seed)
| 34.266667 | 100 | 0.754377 |
from d3m import container
from d3m.metadata import hyperparams
import imgaug.augmenters as iaa
import typing
from autovideo.utils import construct_primitive_metadata
from autovideo.base.augmentation_base import AugmentationPrimitiveBase
__all__ = ('UniformColorQuantizationToNBitsPrimitive',)
Inputs = container.DataFrame
class Hyperparams(hyperparams.Hyperparams):
nb_bits = hyperparams.Hyperparameter[typing.Union[float,tuple,list]](
default=(1, 8),
description="Number of bits to keep in each image’s array component.",
semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'],
)
seed = hyperparams.Constant[int](
default=0,
description='Minimum workers to extract frames simultaneously',
semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'],
)
class UniformColorQuantizationToNBitsPrimitive(AugmentationPrimitiveBase[Inputs, Hyperparams]):
metadata = construct_primitive_metadata("augmentation", "color_UniformColorQuantizationToNBits")
def _get_function(self):
nb_bits = self.hyperparams["nb_bits"]
seed = self.hyperparams["seed"]
return iaa.UniformColorQuantizationToNBits(nb_bits=nb_bits, seed=seed)
| true | true |
f71de36f31d52da0ff58916c32eb7b71a25b256b | 1,805 | py | Python | remote_works/checkout/views/delivery.py | tetyanaloskutova/saleor | b3bb51e9c0c4c2febf4aa1e2a7d893e77c331e89 | [
"BSD-3-Clause"
] | 7 | 2019-05-17T14:27:13.000Z | 2021-12-17T22:52:40.000Z | remote_works/checkout/views/delivery.py | tetyanaloskutova/saleor | b3bb51e9c0c4c2febf4aa1e2a7d893e77c331e89 | [
"BSD-3-Clause"
] | 9 | 2019-04-13T09:24:28.000Z | 2019-09-09T15:35:05.000Z | remote_works/checkout/views/delivery.py | tetyanaloskutova/remote-works | b3bb51e9c0c4c2febf4aa1e2a7d893e77c331e89 | [
"BSD-3-Clause"
] | null | null | null | from django.shortcuts import redirect
from django.template.response import TemplateResponse
from ..utils import (
get_cart_data_for_checkout, get_taxes_for_cart,
update_delivery_address_in_anonymous_cart, update_delivery_address_in_cart)
def anonymous_user_delivery_address_view(request, cart):
"""Display the delivery step for a user who is not logged in."""
user_form, address_form, updated = (
update_delivery_address_in_anonymous_cart(
cart, request.POST or None, request.country))
if updated:
return redirect('checkout:delivery-method')
taxes = get_taxes_for_cart(cart, request.taxes)
ctx = get_cart_data_for_checkout(cart, request.discounts, taxes)
ctx.update({
'address_form': address_form,
'user_form': user_form})
return TemplateResponse(request, 'checkout/delivery_address.html', ctx)
def user_delivery_address_view(request, cart):
"""Display the delivery step for a logged in user.
In addition to entering a new address the user has an option of selecting
one of the existing entries from their address book.
"""
cart.email = request.user.email
cart.save(update_fields=['email'])
user_addresses = cart.user.addresses.all()
addresses_form, address_form, updated = update_delivery_address_in_cart(
cart, user_addresses, request.POST or None, request.country)
if updated:
return redirect('checkout:delivery-method')
taxes = get_taxes_for_cart(cart, request.taxes)
ctx = get_cart_data_for_checkout(cart, request.discounts, taxes)
ctx.update({
'additional_addresses': user_addresses,
'address_form': address_form,
'user_form': addresses_form})
return TemplateResponse(request, 'checkout/delivery_address.html', ctx)
| 36.836735 | 79 | 0.735734 | from django.shortcuts import redirect
from django.template.response import TemplateResponse
from ..utils import (
get_cart_data_for_checkout, get_taxes_for_cart,
update_delivery_address_in_anonymous_cart, update_delivery_address_in_cart)
def anonymous_user_delivery_address_view(request, cart):
user_form, address_form, updated = (
update_delivery_address_in_anonymous_cart(
cart, request.POST or None, request.country))
if updated:
return redirect('checkout:delivery-method')
taxes = get_taxes_for_cart(cart, request.taxes)
ctx = get_cart_data_for_checkout(cart, request.discounts, taxes)
ctx.update({
'address_form': address_form,
'user_form': user_form})
return TemplateResponse(request, 'checkout/delivery_address.html', ctx)
def user_delivery_address_view(request, cart):
cart.email = request.user.email
cart.save(update_fields=['email'])
user_addresses = cart.user.addresses.all()
addresses_form, address_form, updated = update_delivery_address_in_cart(
cart, user_addresses, request.POST or None, request.country)
if updated:
return redirect('checkout:delivery-method')
taxes = get_taxes_for_cart(cart, request.taxes)
ctx = get_cart_data_for_checkout(cart, request.discounts, taxes)
ctx.update({
'additional_addresses': user_addresses,
'address_form': address_form,
'user_form': addresses_form})
return TemplateResponse(request, 'checkout/delivery_address.html', ctx)
| true | true |
f71de438177d496f549f66bef245160a2ec87256 | 5,619 | py | Python | utils/performMatch.py | secondfry/school21-randomcoffee | 261b8d562d02b5a79b12603e0b74c90289523408 | [
"MIT"
] | 3 | 2021-02-28T12:00:26.000Z | 2021-03-14T03:00:42.000Z | utils/performMatch.py | secondfry/school21-randomcoffee | 261b8d562d02b5a79b12603e0b74c90289523408 | [
"MIT"
] | null | null | null | utils/performMatch.py | secondfry/school21-randomcoffee | 261b8d562d02b5a79b12603e0b74c90289523408 | [
"MIT"
] | null | null | null | import random
import secrets
from collections import deque
from typing import Deque, Dict, Optional
from config.constants import (CALLBACK_ACTIVE_NO, CALLBACK_ACTIVE_YES,
CALLBACK_CAMPUS_KAZAN, CALLBACK_CAMPUS_MOSCOW,
USER_DATA_V1_AUTHORIZED,
USER_DATA_V1_INTRA_LOGIN,
USER_DATA_V1_MATCH_ACCEPTED,
USER_DATA_V1_MATCH_NOTIFIED,
USER_DATA_V1_MATCH_WITH,
USER_DATA_V1_SETTINGS_ACTIVE,
USER_DATA_V1_SETTINGS_CAMPUS,
USER_DATA_V1_TELEGRAM_USERNAME)
from config.env import ADMIN_IDS
from handlers.commandDump import perform_dump
from handlers.error import handle_common_block_errors, send_error
from telegram import InlineKeyboardButton, InlineKeyboardMarkup, TelegramError
from telegram.ext import CallbackContext
from utils.getters import get_bucket
def send_match_message(ctx: CallbackContext, fromid: int, tologin: str, tohandle: str) -> None:
kbd = [
[
InlineKeyboardButton('Подтвердить встречу', callback_data='match-accept')
]
]
try:
ctx.bot.send_message(
fromid,
text='Твой случайный кофе на этой неделе...\nC пиром {} [tg: @{}]!\n\nПодтверди получение сообщения:'.format(
tologin,
tohandle
),
reply_markup=InlineKeyboardMarkup(kbd)
)
ctx.dispatcher.user_data[fromid][USER_DATA_V1_MATCH_NOTIFIED] = True
except:
# TODO actually handle exception
pass
def match(ctx: CallbackContext, aid: int, bid: int, alogin: str, blogin: str, ahandle: str, bhandle: str) -> None:
ctx.dispatcher.user_data[aid][USER_DATA_V1_MATCH_WITH] = bid
ctx.dispatcher.user_data[bid][USER_DATA_V1_MATCH_WITH] = aid
send_match_message(ctx, aid, blogin, bhandle)
send_match_message(ctx, bid, alogin, ahandle)
def find_peer_from_campus(
uids: Deque[int],
user_campuses: Dict[int, str],
campus: str
) -> Optional[int]:
for uid in uids:
if user_campuses[uid] == campus:
return uid
return None
def perform_match(ctx: CallbackContext) -> None:
perform_dump(ctx, ADMIN_IDS[0])
buckets: Dict[str, Deque[int]] = {
CALLBACK_CAMPUS_KAZAN: deque(),
CALLBACK_CAMPUS_MOSCOW: deque(),
'online': deque(),
'???': deque(),
}
user_campuses = {}
user_handles = {}
user_logins = {}
for uid, udata in ctx.dispatcher.user_data.items():
udata[USER_DATA_V1_MATCH_ACCEPTED] = False
udata[USER_DATA_V1_MATCH_NOTIFIED] = False
udata[USER_DATA_V1_MATCH_WITH] = None
if not udata.get(USER_DATA_V1_AUTHORIZED, False):
udata[USER_DATA_V1_AUTHORIZED] = False
continue
if udata.get(USER_DATA_V1_SETTINGS_ACTIVE, CALLBACK_ACTIVE_NO) != CALLBACK_ACTIVE_YES:
udata[USER_DATA_V1_SETTINGS_ACTIVE] = CALLBACK_ACTIVE_NO
try:
ctx.bot.send_message(uid, text='На этой неделе ты выбрал не идти на случайный кофе.\n'
'Если передумаешь и изменишь настройки, '
'то завтра тебе должно будет подобрать пару.')
except TelegramError as ex:
if not handle_common_block_errors(ctx, uid, ex):
send_error(ctx, uid, udata[USER_DATA_V1_TELEGRAM_USERNAME], udata[USER_DATA_V1_INTRA_LOGIN],
'Can\'t send inactivity notice.', ex)
except Exception as ex:
send_error(ctx, uid, udata[USER_DATA_V1_TELEGRAM_USERNAME], udata[USER_DATA_V1_INTRA_LOGIN],
'Can\'t send inactivity notice.', ex)
continue
bucket = get_bucket(udata)
handle = udata.get(USER_DATA_V1_TELEGRAM_USERNAME, '???')
user_handles[uid] = handle
login = udata.get(USER_DATA_V1_INTRA_LOGIN, '???')
user_logins[uid] = login
campus = udata.get(USER_DATA_V1_SETTINGS_CAMPUS, '???')
user_campuses[uid] = campus
buckets[bucket].append(uid)
for bucket, uids in buckets.items():
random.shuffle(uids, random=lambda: secrets.randbelow(100) / 100.0)
for bucket, uids in buckets.items():
if bucket == '???':
if uids:
ctx.bot.send_message(
ADMIN_IDS[0],
text='For some reason ??? bucket has #{} accounts in it'.format(len(uids))
)
continue
while len(uids) > 1:
a = uids.pop()
b = uids.pop()
match(ctx, a, b, user_logins.get(a), user_logins.get(b), user_handles.get(a), user_handles.get(b))
if not uids:
continue
if bucket != 'online':
a = uids.pop()
b = find_peer_from_campus(buckets['online'], user_campuses, user_campuses[a])
if not b:
continue
match(ctx, a, b, user_logins.get(a), user_logins.get(b), user_handles.get(a), user_handles.get(b))
# TODO reimplement saviour mechanic
# if bucket == 'online':
# a = uids.pop()
# b = SAVIOUR_ID
# match(ctx, a, b, logins.get(a), logins.get(b), handles.get(a), handles.get(b))
buckets.clear()
user_campuses.clear()
user_handles.clear()
user_logins.clear()
perform_dump(ctx, ADMIN_IDS[0])
| 36.019231 | 121 | 0.596903 | import random
import secrets
from collections import deque
from typing import Deque, Dict, Optional
from config.constants import (CALLBACK_ACTIVE_NO, CALLBACK_ACTIVE_YES,
CALLBACK_CAMPUS_KAZAN, CALLBACK_CAMPUS_MOSCOW,
USER_DATA_V1_AUTHORIZED,
USER_DATA_V1_INTRA_LOGIN,
USER_DATA_V1_MATCH_ACCEPTED,
USER_DATA_V1_MATCH_NOTIFIED,
USER_DATA_V1_MATCH_WITH,
USER_DATA_V1_SETTINGS_ACTIVE,
USER_DATA_V1_SETTINGS_CAMPUS,
USER_DATA_V1_TELEGRAM_USERNAME)
from config.env import ADMIN_IDS
from handlers.commandDump import perform_dump
from handlers.error import handle_common_block_errors, send_error
from telegram import InlineKeyboardButton, InlineKeyboardMarkup, TelegramError
from telegram.ext import CallbackContext
from utils.getters import get_bucket
def send_match_message(ctx: CallbackContext, fromid: int, tologin: str, tohandle: str) -> None:
kbd = [
[
InlineKeyboardButton('Подтвердить встречу', callback_data='match-accept')
]
]
try:
ctx.bot.send_message(
fromid,
text='Твой случайный кофе на этой неделе...\nC пиром {} [tg: @{}]!\n\nПодтверди получение сообщения:'.format(
tologin,
tohandle
),
reply_markup=InlineKeyboardMarkup(kbd)
)
ctx.dispatcher.user_data[fromid][USER_DATA_V1_MATCH_NOTIFIED] = True
except:
pass
def match(ctx: CallbackContext, aid: int, bid: int, alogin: str, blogin: str, ahandle: str, bhandle: str) -> None:
ctx.dispatcher.user_data[aid][USER_DATA_V1_MATCH_WITH] = bid
ctx.dispatcher.user_data[bid][USER_DATA_V1_MATCH_WITH] = aid
send_match_message(ctx, aid, blogin, bhandle)
send_match_message(ctx, bid, alogin, ahandle)
def find_peer_from_campus(
uids: Deque[int],
user_campuses: Dict[int, str],
campus: str
) -> Optional[int]:
for uid in uids:
if user_campuses[uid] == campus:
return uid
return None
def perform_match(ctx: CallbackContext) -> None:
perform_dump(ctx, ADMIN_IDS[0])
buckets: Dict[str, Deque[int]] = {
CALLBACK_CAMPUS_KAZAN: deque(),
CALLBACK_CAMPUS_MOSCOW: deque(),
'online': deque(),
'???': deque(),
}
user_campuses = {}
user_handles = {}
user_logins = {}
for uid, udata in ctx.dispatcher.user_data.items():
udata[USER_DATA_V1_MATCH_ACCEPTED] = False
udata[USER_DATA_V1_MATCH_NOTIFIED] = False
udata[USER_DATA_V1_MATCH_WITH] = None
if not udata.get(USER_DATA_V1_AUTHORIZED, False):
udata[USER_DATA_V1_AUTHORIZED] = False
continue
if udata.get(USER_DATA_V1_SETTINGS_ACTIVE, CALLBACK_ACTIVE_NO) != CALLBACK_ACTIVE_YES:
udata[USER_DATA_V1_SETTINGS_ACTIVE] = CALLBACK_ACTIVE_NO
try:
ctx.bot.send_message(uid, text='На этой неделе ты выбрал не идти на случайный кофе.\n'
'Если передумаешь и изменишь настройки, '
'то завтра тебе должно будет подобрать пару.')
except TelegramError as ex:
if not handle_common_block_errors(ctx, uid, ex):
send_error(ctx, uid, udata[USER_DATA_V1_TELEGRAM_USERNAME], udata[USER_DATA_V1_INTRA_LOGIN],
'Can\'t send inactivity notice.', ex)
except Exception as ex:
send_error(ctx, uid, udata[USER_DATA_V1_TELEGRAM_USERNAME], udata[USER_DATA_V1_INTRA_LOGIN],
'Can\'t send inactivity notice.', ex)
continue
bucket = get_bucket(udata)
handle = udata.get(USER_DATA_V1_TELEGRAM_USERNAME, '???')
user_handles[uid] = handle
login = udata.get(USER_DATA_V1_INTRA_LOGIN, '???')
user_logins[uid] = login
campus = udata.get(USER_DATA_V1_SETTINGS_CAMPUS, '???')
user_campuses[uid] = campus
buckets[bucket].append(uid)
for bucket, uids in buckets.items():
random.shuffle(uids, random=lambda: secrets.randbelow(100) / 100.0)
for bucket, uids in buckets.items():
if bucket == '???':
if uids:
ctx.bot.send_message(
ADMIN_IDS[0],
text='For some reason ??? bucket has #{} accounts in it'.format(len(uids))
)
continue
while len(uids) > 1:
a = uids.pop()
b = uids.pop()
match(ctx, a, b, user_logins.get(a), user_logins.get(b), user_handles.get(a), user_handles.get(b))
if not uids:
continue
if bucket != 'online':
a = uids.pop()
b = find_peer_from_campus(buckets['online'], user_campuses, user_campuses[a])
if not b:
continue
match(ctx, a, b, user_logins.get(a), user_logins.get(b), user_handles.get(a), user_handles.get(b))
buckets.clear()
user_campuses.clear()
user_handles.clear()
user_logins.clear()
perform_dump(ctx, ADMIN_IDS[0])
| true | true |
f71de50fee8f2c059e569bd9f0e30902c82d985b | 1,598 | py | Python | portfolio/tests.py | tiagocordeiro/mulhergorila-website | 2ea6232415a152e51324c2b3b4f337039e88d710 | [
"MIT"
] | null | null | null | portfolio/tests.py | tiagocordeiro/mulhergorila-website | 2ea6232415a152e51324c2b3b4f337039e88d710 | [
"MIT"
] | 309 | 2019-03-04T04:49:16.000Z | 2022-03-18T16:11:38.000Z | portfolio/tests.py | vitorpvcampos/mulhergorila-website | 906b68f6e34b7bcb9811b451ee923ccf73e6eb5b | [
"MIT"
] | 2 | 2020-08-28T17:31:43.000Z | 2020-08-28T18:33:15.000Z | from django.test import RequestFactory, TestCase, Client
from .models import Project, Category
from .views import portfolio, portfolio_detail
class PortfolioViewTests(TestCase):
def setUp(self):
# Every test needs access to the request factory.
self.factory = RequestFactory()
self.client = Client()
# Test Category
self.category_sample = Category.objects.create(name='Sample')
# Portfolio projects
self.portfolio_web_01 = Project.objects.create(title='Projeto Web 01',
description='Projeto web Teste',
category=self.category_sample)
def test_portfolio_view_status_code_is_ok(self):
request = self.factory.get('/portfolio/')
response = portfolio(request)
self.assertEqual(response.status_code, 200)
def test_portfolio_detail_view_status_code_is_ok(self):
request = self.factory.get('/portfolio/projeto/projeto-web-01')
response = portfolio_detail(request, slug=self.portfolio_web_01.slug)
self.assertEqual(response.status_code, 200)
def test_project_title_returns(self):
projeto = self.portfolio_web_01
self.assertEquals('Projeto Web 01', projeto.title)
def test_project_str_returns(self):
projeto_str = self.portfolio_web_01
self.assertEquals('Projeto Web 01', projeto_str.__str__())
def test_category_name_returns(self):
categoria = self.category_sample
self.assertEquals('Sample', categoria.name)
| 36.318182 | 87 | 0.667084 | from django.test import RequestFactory, TestCase, Client
from .models import Project, Category
from .views import portfolio, portfolio_detail
class PortfolioViewTests(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.client = Client()
self.category_sample = Category.objects.create(name='Sample')
self.portfolio_web_01 = Project.objects.create(title='Projeto Web 01',
description='Projeto web Teste',
category=self.category_sample)
def test_portfolio_view_status_code_is_ok(self):
request = self.factory.get('/portfolio/')
response = portfolio(request)
self.assertEqual(response.status_code, 200)
def test_portfolio_detail_view_status_code_is_ok(self):
request = self.factory.get('/portfolio/projeto/projeto-web-01')
response = portfolio_detail(request, slug=self.portfolio_web_01.slug)
self.assertEqual(response.status_code, 200)
def test_project_title_returns(self):
projeto = self.portfolio_web_01
self.assertEquals('Projeto Web 01', projeto.title)
def test_project_str_returns(self):
projeto_str = self.portfolio_web_01
self.assertEquals('Projeto Web 01', projeto_str.__str__())
def test_category_name_returns(self):
categoria = self.category_sample
self.assertEquals('Sample', categoria.name)
| true | true |
f71de58ddbe3fce5ab2f8b4b930eedcc41b00c52 | 2,122 | py | Python | Application/modules/modbus/scanner/uid.py | gennaromellone/smod3 | 98e370aad65067862c93b55415cc00db2f24f330 | [
"Apache-2.0"
] | 1 | 2022-02-28T09:16:19.000Z | 2022-02-28T09:16:19.000Z | Application/modules/modbus/scanner/uid.py | gennaromellone/smod3 | 98e370aad65067862c93b55415cc00db2f24f330 | [
"Apache-2.0"
] | null | null | null | Application/modules/modbus/scanner/uid.py | gennaromellone/smod3 | 98e370aad65067862c93b55415cc00db2f24f330 | [
"Apache-2.0"
] | null | null | null | import os
import threading
from System.Core.Global import *
from System.Core.Colors import *
from System.Core.Modbus import *
import ipcalc
class Module:
info = {
'Name': 'Brute Force UID',
'Author': ['@enddo'],
'Description': ("Brute Force UID"),
}
options = {
'RHOSTS' :['' ,True ,'The target address range or CIDR identifier'],
'RPORT' :[502 ,False ,'The port number for modbus protocol'],
'Function' :[1 ,False ,'Function code, Defualt:Read Coils.'],
'Threads' :[1 ,False ,'The number of concurrent threads'],
'Output' :[True ,False ,'The stdout save in output directory']
}
output = ''
def exploit(self):
moduleName = self.info['Name']
print(bcolors.OKBLUE + '[+]' + bcolors.ENDC + ' Module ' + moduleName + ' Start')
ips = list()
for ip in ipcalc.Network(self.options['RHOSTS'][0]):
ips.append(str(ip))
while ips:
for i in range(int(self.options['Threads'][0])):
if(len(ips) > 0):
thread = threading.Thread(target=self.do,args=(ips.pop(0),))
thread.start()
THREADS.append(thread)
else:
break
for thread in THREADS:
thread.join()
if(self.options['Output'][0]):
open(mainPath + '/Output/' + moduleName + '_' + self.options['RHOSTS'][0].replace('/','_') + '.txt','a').write('='*30 + '\n' + self.output + '\n\n')
self.output = ''
def printLine(self,str,color):
self.output += str + '\n'
if(str.find('[+]') != -1):
print(str.replace('[+]', color + '[+]' + bcolors.ENDC))
elif(str.find('[-]') != -1):
print(str.replace('[-]', color + '[+]' + bcolors.ENDC))
else:
print(str)
def do(self,ip):
self.printLine('[+] Start Brute Force UID on : ' + ip,bcolors.OKGREEN)
for i in range(10,11): # Total of 255 (legal) uid
c = connectToTarget(ip,self.options['RPORT'][0])
if c is None:
break
try:
c.sr1(ModbusADU(transId=getTransId(),unitId=i)/ModbusPDU_Read_Generic(funcCode=1),timeout=timeout, verbose=0)
self.printLine('[+] UID on ' + ip + ' is : ' + str(i),bcolors.OKGREEN)
closeConnectionToTarget(c)
except Exception as e:
print(e)
closeConnectionToTarget(c) | 30.314286 | 151 | 0.615928 | import os
import threading
from System.Core.Global import *
from System.Core.Colors import *
from System.Core.Modbus import *
import ipcalc
class Module:
info = {
'Name': 'Brute Force UID',
'Author': ['@enddo'],
'Description': ("Brute Force UID"),
}
options = {
'RHOSTS' :['' ,True ,'The target address range or CIDR identifier'],
'RPORT' :[502 ,False ,'The port number for modbus protocol'],
'Function' :[1 ,False ,'Function code, Defualt:Read Coils.'],
'Threads' :[1 ,False ,'The number of concurrent threads'],
'Output' :[True ,False ,'The stdout save in output directory']
}
output = ''
def exploit(self):
moduleName = self.info['Name']
print(bcolors.OKBLUE + '[+]' + bcolors.ENDC + ' Module ' + moduleName + ' Start')
ips = list()
for ip in ipcalc.Network(self.options['RHOSTS'][0]):
ips.append(str(ip))
while ips:
for i in range(int(self.options['Threads'][0])):
if(len(ips) > 0):
thread = threading.Thread(target=self.do,args=(ips.pop(0),))
thread.start()
THREADS.append(thread)
else:
break
for thread in THREADS:
thread.join()
if(self.options['Output'][0]):
open(mainPath + '/Output/' + moduleName + '_' + self.options['RHOSTS'][0].replace('/','_') + '.txt','a').write('='*30 + '\n' + self.output + '\n\n')
self.output = ''
def printLine(self,str,color):
self.output += str + '\n'
if(str.find('[+]') != -1):
print(str.replace('[+]', color + '[+]' + bcolors.ENDC))
elif(str.find('[-]') != -1):
print(str.replace('[-]', color + '[+]' + bcolors.ENDC))
else:
print(str)
def do(self,ip):
self.printLine('[+] Start Brute Force UID on : ' + ip,bcolors.OKGREEN)
for i in range(10,11):
c = connectToTarget(ip,self.options['RPORT'][0])
if c is None:
break
try:
c.sr1(ModbusADU(transId=getTransId(),unitId=i)/ModbusPDU_Read_Generic(funcCode=1),timeout=timeout, verbose=0)
self.printLine('[+] UID on ' + ip + ' is : ' + str(i),bcolors.OKGREEN)
closeConnectionToTarget(c)
except Exception as e:
print(e)
closeConnectionToTarget(c) | true | true |
f71de830c973483fd3dea6a6825236f67aadd8ee | 12,401 | py | Python | tests/test_cli.py | grassking100/optuna | 3075a1cf6648b3a8f061f904177734a08bb3a3c3 | [
"MIT"
] | null | null | null | tests/test_cli.py | grassking100/optuna | 3075a1cf6648b3a8f061f904177734a08bb3a3c3 | [
"MIT"
] | null | null | null | tests/test_cli.py | grassking100/optuna | 3075a1cf6648b3a8f061f904177734a08bb3a3c3 | [
"MIT"
] | null | null | null | import re
import subprocess
from subprocess import CalledProcessError
import tempfile
import pytest
import optuna
from optuna.cli import _Studies
from optuna.exceptions import CLIUsageError
from optuna.storages.base import DEFAULT_STUDY_NAME_PREFIX
from optuna.storages import RDBStorage
from optuna.testing.storage import StorageSupplier
from optuna import type_checking
if type_checking.TYPE_CHECKING:
from typing import List # NOQA
from optuna.trial import Trial # NOQA
def test_create_study_command():
# type: () -> None
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
# Create study.
command = ["optuna", "create-study", "--storage", storage_url]
subprocess.check_call(command)
# Command output should be in name string format (no-name + UUID).
study_name = str(subprocess.check_output(command).decode().strip())
name_re = r"^no-name-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$"
assert re.match(name_re, study_name) is not None
# study_name should be stored in storage.
study_id = storage.get_study_id_from_name(study_name)
assert study_id == 2
def test_create_study_command_with_study_name():
# type: () -> None
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_name = "test_study"
# Create study with name.
command = ["optuna", "create-study", "--storage", storage_url, "--study-name", study_name]
study_name = str(subprocess.check_output(command).decode().strip())
# Check if study_name is stored in the storage.
study_id = storage.get_study_id_from_name(study_name)
assert storage.get_study_name_from_id(study_id) == study_name
def test_create_study_command_without_storage_url():
# type: () -> None
with pytest.raises(subprocess.CalledProcessError) as err:
subprocess.check_output(["optuna", "create-study"])
usage = err.value.output.decode()
assert usage.startswith("usage:")
def test_create_study_command_with_direction():
# type: () -> None
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
command = ["optuna", "create-study", "--storage", storage_url, "--direction", "minimize"]
study_name = str(subprocess.check_output(command).decode().strip())
study_id = storage.get_study_id_from_name(study_name)
assert storage.get_study_direction(study_id) == optuna.structs.StudyDirection.MINIMIZE
command = ["optuna", "create-study", "--storage", storage_url, "--direction", "maximize"]
study_name = str(subprocess.check_output(command).decode().strip())
study_id = storage.get_study_id_from_name(study_name)
assert storage.get_study_direction(study_id) == optuna.structs.StudyDirection.MAXIMIZE
command = ["optuna", "create-study", "--storage", storage_url, "--direction", "test"]
# --direction should be either 'minimize' or 'maximize'.
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(command)
def test_delete_study_command():
# type: () -> None
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_name = "delete-study-test"
# Create study.
command = ["optuna", "create-study", "--storage", storage_url, "--study-name", study_name]
subprocess.check_call(command)
assert study_name in {s.study_name: s for s in storage.get_all_study_summaries()}
# Delete study.
command = ["optuna", "delete-study", "--storage", storage_url, "--study-name", study_name]
subprocess.check_call(command)
assert study_name not in {s.study_name: s for s in storage.get_all_study_summaries()}
def test_delete_study_command_without_storage_url():
# type: () -> None
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["optuna", "delete-study", "--study-name", "dummy_study"])
def test_study_set_user_attr_command():
# type: () -> None
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
# Create study.
study_name = storage.get_study_name_from_id(storage.create_new_study())
base_command = [
"optuna",
"study",
"set-user-attr",
"--study",
study_name,
"--storage",
storage_url,
]
example_attrs = {"architecture": "ResNet", "baselen_score": "0.002"}
for key, value in example_attrs.items():
subprocess.check_call(base_command + ["--key", key, "--value", value])
# Attrs should be stored in storage.
study_id = storage.get_study_id_from_name(study_name)
study_user_attrs = storage.get_study_user_attrs(study_id)
assert len(study_user_attrs) == 2
assert all([study_user_attrs[k] == v for k, v in example_attrs.items()])
def test_studies_command():
# type: () -> None
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
# First study.
study_1 = optuna.create_study(storage)
# Second study.
study_2 = optuna.create_study(storage, study_name="study_2")
study_2.optimize(objective_func, n_trials=10)
# Run command.
command = ["optuna", "studies", "--storage", storage_url]
output = str(subprocess.check_output(command).decode().strip())
rows = output.split("\n")
def get_row_elements(row_index):
# type: (int) -> List[str]
return [r.strip() for r in rows[row_index].split("|")[1:-1]]
assert len(rows) == 6
assert tuple(get_row_elements(1)) == _Studies._study_list_header
# Check study_name and n_trials for the first study.
elms = get_row_elements(3)
assert elms[0] == study_1.study_name
assert elms[2] == "0"
# Check study_name and n_trials for the second study.
elms = get_row_elements(4)
assert elms[0] == study_2.study_name
assert elms[2] == "10"
def test_create_study_command_with_skip_if_exists():
# type: () -> None
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_name = "test_study"
# Create study with name.
command = ["optuna", "create-study", "--storage", storage_url, "--study-name", study_name]
study_name = str(subprocess.check_output(command).decode().strip())
# Check if study_name is stored in the storage.
study_id = storage.get_study_id_from_name(study_name)
assert storage.get_study_name_from_id(study_id) == study_name
# Try to create the same name study without `--skip-if-exists` flag (error).
command = ["optuna", "create-study", "--storage", storage_url, "--study-name", study_name]
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(command)
# Try to create the same name study with `--skip-if-exists` flag (OK).
command = [
"optuna",
"create-study",
"--storage",
storage_url,
"--study-name",
study_name,
"--skip-if-exists",
]
study_name = str(subprocess.check_output(command).decode().strip())
new_study_id = storage.get_study_id_from_name(study_name)
assert study_id == new_study_id # The existing study instance is reused.
def test_dashboard_command():
# type: () -> None
with StorageSupplier("new") as storage, tempfile.NamedTemporaryFile("r") as tf_report:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_name = storage.get_study_name_from_id(storage.create_new_study())
command = [
"optuna",
"dashboard",
"--study",
study_name,
"--out",
tf_report.name,
"--storage",
storage_url,
]
subprocess.check_call(command)
html = tf_report.read()
assert "<body>" in html
assert "bokeh" in html
@pytest.mark.parametrize(
"origins", [["192.168.111.1:5006"], ["192.168.111.1:5006", "192.168.111.2:5006"]]
)
def test_dashboard_command_with_allow_websocket_origin(origins):
# type: (List[str]) -> None
with StorageSupplier("new") as storage, tempfile.NamedTemporaryFile("r") as tf_report:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_name = storage.get_study_name_from_id(storage.create_new_study())
command = [
"optuna",
"dashboard",
"--study",
study_name,
"--out",
tf_report.name,
"--storage",
storage_url,
]
for origin in origins:
command.extend(["--allow-websocket-origin", origin])
subprocess.check_call(command)
html = tf_report.read()
assert "<body>" in html
assert "bokeh" in html
# An example of objective functions for testing study optimize command
def objective_func(trial):
# type: (Trial) -> float
x = trial.suggest_uniform("x", -10, 10)
return (x + 5) ** 2
def test_study_optimize_command():
# type: () -> None
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_name = storage.get_study_name_from_id(storage.create_new_study())
command = [
"optuna",
"study",
"optimize",
"--study",
study_name,
"--n-trials",
"10",
__file__,
"objective_func",
"--storage",
storage_url,
]
subprocess.check_call(command)
study = optuna.load_study(storage=storage_url, study_name=study_name)
assert len(study.trials) == 10
assert "x" in study.best_params
# Check if a default value of study_name is stored in the storage.
assert storage.get_study_name_from_id(study._study_id).startswith(
DEFAULT_STUDY_NAME_PREFIX
)
def test_study_optimize_command_inconsistent_args():
# type: () -> None
with tempfile.NamedTemporaryFile() as tf:
db_url = "sqlite:///{}".format(tf.name)
# --study argument is missing.
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(
[
"optuna",
"study",
"optimize",
"--storage",
db_url,
"--n-trials",
"10",
__file__,
"objective_func",
]
)
def test_empty_argv():
# type: () -> None
command_empty = ["optuna"]
command_empty_output = str(subprocess.check_output(command_empty))
command_help = ["optuna", "help"]
command_help_output = str(subprocess.check_output(command_help))
assert command_empty_output == command_help_output
def test_check_storage_url():
# type: () -> None
storage_in_args = "sqlite:///args.db"
assert storage_in_args == optuna.cli._check_storage_url(storage_in_args)
with pytest.raises(CLIUsageError):
optuna.cli._check_storage_url(None)
def test_storage_upgrade_command():
# type: () -> None
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
command = ["optuna", "storage", "upgrade"]
with pytest.raises(CalledProcessError):
subprocess.check_call(command)
command.extend(["--storage", storage_url])
subprocess.check_call(command)
| 32.634211 | 98 | 0.625917 | import re
import subprocess
from subprocess import CalledProcessError
import tempfile
import pytest
import optuna
from optuna.cli import _Studies
from optuna.exceptions import CLIUsageError
from optuna.storages.base import DEFAULT_STUDY_NAME_PREFIX
from optuna.storages import RDBStorage
from optuna.testing.storage import StorageSupplier
from optuna import type_checking
if type_checking.TYPE_CHECKING:
from typing import List
from optuna.trial import Trial
def test_create_study_command():
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
command = ["optuna", "create-study", "--storage", storage_url]
subprocess.check_call(command)
study_name = str(subprocess.check_output(command).decode().strip())
name_re = r"^no-name-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$"
assert re.match(name_re, study_name) is not None
study_id = storage.get_study_id_from_name(study_name)
assert study_id == 2
def test_create_study_command_with_study_name():
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_name = "test_study"
command = ["optuna", "create-study", "--storage", storage_url, "--study-name", study_name]
study_name = str(subprocess.check_output(command).decode().strip())
study_id = storage.get_study_id_from_name(study_name)
assert storage.get_study_name_from_id(study_id) == study_name
def test_create_study_command_without_storage_url():
with pytest.raises(subprocess.CalledProcessError) as err:
subprocess.check_output(["optuna", "create-study"])
usage = err.value.output.decode()
assert usage.startswith("usage:")
def test_create_study_command_with_direction():
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
command = ["optuna", "create-study", "--storage", storage_url, "--direction", "minimize"]
study_name = str(subprocess.check_output(command).decode().strip())
study_id = storage.get_study_id_from_name(study_name)
assert storage.get_study_direction(study_id) == optuna.structs.StudyDirection.MINIMIZE
command = ["optuna", "create-study", "--storage", storage_url, "--direction", "maximize"]
study_name = str(subprocess.check_output(command).decode().strip())
study_id = storage.get_study_id_from_name(study_name)
assert storage.get_study_direction(study_id) == optuna.structs.StudyDirection.MAXIMIZE
command = ["optuna", "create-study", "--storage", storage_url, "--direction", "test"]
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(command)
def test_delete_study_command():
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_name = "delete-study-test"
command = ["optuna", "create-study", "--storage", storage_url, "--study-name", study_name]
subprocess.check_call(command)
assert study_name in {s.study_name: s for s in storage.get_all_study_summaries()}
command = ["optuna", "delete-study", "--storage", storage_url, "--study-name", study_name]
subprocess.check_call(command)
assert study_name not in {s.study_name: s for s in storage.get_all_study_summaries()}
def test_delete_study_command_without_storage_url():
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["optuna", "delete-study", "--study-name", "dummy_study"])
def test_study_set_user_attr_command():
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_name = storage.get_study_name_from_id(storage.create_new_study())
base_command = [
"optuna",
"study",
"set-user-attr",
"--study",
study_name,
"--storage",
storage_url,
]
example_attrs = {"architecture": "ResNet", "baselen_score": "0.002"}
for key, value in example_attrs.items():
subprocess.check_call(base_command + ["--key", key, "--value", value])
study_id = storage.get_study_id_from_name(study_name)
study_user_attrs = storage.get_study_user_attrs(study_id)
assert len(study_user_attrs) == 2
assert all([study_user_attrs[k] == v for k, v in example_attrs.items()])
def test_studies_command():
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_1 = optuna.create_study(storage)
study_2 = optuna.create_study(storage, study_name="study_2")
study_2.optimize(objective_func, n_trials=10)
command = ["optuna", "studies", "--storage", storage_url]
output = str(subprocess.check_output(command).decode().strip())
rows = output.split("\n")
def get_row_elements(row_index):
return [r.strip() for r in rows[row_index].split("|")[1:-1]]
assert len(rows) == 6
assert tuple(get_row_elements(1)) == _Studies._study_list_header
elms = get_row_elements(3)
assert elms[0] == study_1.study_name
assert elms[2] == "0"
elms = get_row_elements(4)
assert elms[0] == study_2.study_name
assert elms[2] == "10"
def test_create_study_command_with_skip_if_exists():
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_name = "test_study"
command = ["optuna", "create-study", "--storage", storage_url, "--study-name", study_name]
study_name = str(subprocess.check_output(command).decode().strip())
study_id = storage.get_study_id_from_name(study_name)
assert storage.get_study_name_from_id(study_id) == study_name
command = ["optuna", "create-study", "--storage", storage_url, "--study-name", study_name]
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(command)
command = [
"optuna",
"create-study",
"--storage",
storage_url,
"--study-name",
study_name,
"--skip-if-exists",
]
study_name = str(subprocess.check_output(command).decode().strip())
new_study_id = storage.get_study_id_from_name(study_name)
assert study_id == new_study_id
def test_dashboard_command():
with StorageSupplier("new") as storage, tempfile.NamedTemporaryFile("r") as tf_report:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_name = storage.get_study_name_from_id(storage.create_new_study())
command = [
"optuna",
"dashboard",
"--study",
study_name,
"--out",
tf_report.name,
"--storage",
storage_url,
]
subprocess.check_call(command)
html = tf_report.read()
assert "<body>" in html
assert "bokeh" in html
@pytest.mark.parametrize(
"origins", [["192.168.111.1:5006"], ["192.168.111.1:5006", "192.168.111.2:5006"]]
)
def test_dashboard_command_with_allow_websocket_origin(origins):
with StorageSupplier("new") as storage, tempfile.NamedTemporaryFile("r") as tf_report:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_name = storage.get_study_name_from_id(storage.create_new_study())
command = [
"optuna",
"dashboard",
"--study",
study_name,
"--out",
tf_report.name,
"--storage",
storage_url,
]
for origin in origins:
command.extend(["--allow-websocket-origin", origin])
subprocess.check_call(command)
html = tf_report.read()
assert "<body>" in html
assert "bokeh" in html
def objective_func(trial):
x = trial.suggest_uniform("x", -10, 10)
return (x + 5) ** 2
def test_study_optimize_command():
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
study_name = storage.get_study_name_from_id(storage.create_new_study())
command = [
"optuna",
"study",
"optimize",
"--study",
study_name,
"--n-trials",
"10",
__file__,
"objective_func",
"--storage",
storage_url,
]
subprocess.check_call(command)
study = optuna.load_study(storage=storage_url, study_name=study_name)
assert len(study.trials) == 10
assert "x" in study.best_params
assert storage.get_study_name_from_id(study._study_id).startswith(
DEFAULT_STUDY_NAME_PREFIX
)
def test_study_optimize_command_inconsistent_args():
with tempfile.NamedTemporaryFile() as tf:
db_url = "sqlite:///{}".format(tf.name)
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(
[
"optuna",
"study",
"optimize",
"--storage",
db_url,
"--n-trials",
"10",
__file__,
"objective_func",
]
)
def test_empty_argv():
command_empty = ["optuna"]
command_empty_output = str(subprocess.check_output(command_empty))
command_help = ["optuna", "help"]
command_help_output = str(subprocess.check_output(command_help))
assert command_empty_output == command_help_output
def test_check_storage_url():
storage_in_args = "sqlite:///args.db"
assert storage_in_args == optuna.cli._check_storage_url(storage_in_args)
with pytest.raises(CLIUsageError):
optuna.cli._check_storage_url(None)
def test_storage_upgrade_command():
with StorageSupplier("new") as storage:
assert isinstance(storage, RDBStorage)
storage_url = str(storage.engine.url)
command = ["optuna", "storage", "upgrade"]
with pytest.raises(CalledProcessError):
subprocess.check_call(command)
command.extend(["--storage", storage_url])
subprocess.check_call(command)
| true | true |
f71de8677f972f2c21bacb4a237b9b624aa913e9 | 9,889 | py | Python | docs/conf.py | cclauss/ThreatPrep | b1881be239e7b86d86acc70a207989d459bd9d79 | [
"MIT"
] | 50 | 2016-08-05T03:33:00.000Z | 2022-02-16T13:52:15.000Z | docs/conf.py | cclauss/ThreatPrep | b1881be239e7b86d86acc70a207989d459bd9d79 | [
"MIT"
] | null | null | null | docs/conf.py | cclauss/ThreatPrep | b1881be239e7b86d86acc70a207989d459bd9d79 | [
"MIT"
] | 14 | 2017-06-26T02:54:43.000Z | 2021-11-17T07:38:52.000Z | # -*- coding: utf-8 -*-
#
# threat_prep documentation build configuration file, created by
# sphinx-quickstart on Thu Sep 15 11:37:04 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import sphinx_rtd_theme
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ThreatPrep'
copyright = u'2016, Alex McCormack'
author = u'Alex McCormack'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1.1'
# The full version, including alpha/beta/rc tags.
release = u'0.1.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'threat_prep v0.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'threat_prepdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'threat_prep.tex', u'threat\\_prep Documentation',
u'Alex McCormack', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'threat_prep', u'threat_prep Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'threat_prep', u'threat_prep Documentation',
author, 'threat_prep', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
| 29 | 80 | 0.705329 |
import sphinx_rtd_theme
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'ThreatPrep'
copyright = u'2016, Alex McCormack'
author = u'Alex McCormack'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1.1'
# The full version, including alpha/beta/rc tags.
release = u'0.1.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'threat_prep v0.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'threat_prepdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'threat_prep.tex', u'threat\\_prep Documentation',
u'Alex McCormack', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'threat_prep', u'threat_prep Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'threat_prep', u'threat_prep Documentation',
author, 'threat_prep', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
| true | true |
f71dea523dad478e9fc4cb8ac07d9b39f159e61e | 73,462 | py | Python | test/integration/component/test_accounts.py | redbridge/cloudstack | 2218053fb11d501950e4beb80e9bee4ae472b5b4 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | test/integration/component/test_accounts.py | redbridge/cloudstack | 2218053fb11d501950e4beb80e9bee4ae472b5b4 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | test/integration/component/test_accounts.py | redbridge/cloudstack | 2218053fb11d501950e4beb80e9bee4ae472b5b4 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" P1 tests for Account
"""
#Import Local Modules
from marvin.cloudstackTestCase import cloudstackTestCase
#from marvin.cloudstackAPI import *
from marvin.lib.utils import (random_gen,
cleanup_resources)
from marvin.lib.base import (Domain,
Account,
ServiceOffering,
VirtualMachine,
Network,
User,
NATRule,
Template,
PublicIPAddress)
from marvin.lib.common import (get_domain,
get_zone,
get_template,
list_accounts,
list_virtual_machines,
list_service_offering,
list_templates,
list_users,
get_builtin_template_info,
wait_for_cleanup)
from nose.plugins.attrib import attr
from marvin.cloudstackException import CloudstackAPIException
import time
class Services:
"""Test Account Services
"""
def __init__(self):
self.services = {
"domain": {
"name": "Domain",
},
"account": {
"email": "test@test.com",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended for unique
# username
"password": "fr3sca",
},
"user": {
"email": "user@test.com",
"firstname": "User",
"lastname": "User",
"username": "User",
# Random characters are appended for unique
# username
"password": "fr3sca",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100,
# in MHz
"memory": 128,
# In MBs
},
"virtual_machine": {
"displayname": "Test VM",
"username": "root",
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
# Hypervisor type should be same as
# hypervisor type of cluster
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"template": {
"displaytext": "Public Template",
"name": "Public template",
"ostype": 'CentOS 5.3 (64-bit)',
"url": "",
"hypervisor": '',
"format": '',
"isfeatured": True,
"ispublic": True,
"isextractable": True,
"templatefilter": "self"
},
"natrule": {
"publicport": 22,
"privateport": 22,
"protocol": 'TCP',
},
"ostype": 'CentOS 5.3 (64-bit)',
# Cent OS 5.3 (64 bit)
"sleep": 60,
"timeout": 10,
}
class TestAccounts(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestAccounts, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls._cleanup = [cls.service_offering]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created accounts, domains etc
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg", "selfservice"])
def test_01_create_account(self):
"""Test Create Account and user for that account
"""
# Validate the following
# 1. Create an Account. Verify the account is created.
# 2. Create User associated with that account. Verify the created user
# Create an account
account = Account.create(
self.apiclient,
self.services["account"]
)
self.debug("Created account: %s" % account.name)
self.cleanup.append(account)
list_accounts_response = list_accounts(
self.apiclient,
id=account.id
)
self.assertEqual(
isinstance(list_accounts_response, list),
True,
"Check list accounts for valid data"
)
self.assertNotEqual(
len(list_accounts_response),
0,
"Check List Account response"
)
account_response = list_accounts_response[0]
self.assertEqual(
account.accounttype,
account_response.accounttype,
"Check Account Type of Created account"
)
self.assertEqual(
account.name,
account_response.name,
"Check Account Name of Created account"
)
# Create an User associated with account
user = User.create(
self.apiclient,
self.services["user"],
account=account.name,
domainid=account.domainid
)
self.debug("Created user: %s" % user.id)
list_users_response = list_users(
self.apiclient,
id=user.id
)
self.assertEqual(
isinstance(list_users_response, list),
True,
"Check list users for valid data"
)
self.assertNotEqual(
len(list_users_response),
0,
"Check List User response"
)
user_response = list_users_response[0]
self.assertEqual(
user.username,
user_response.username,
"Check username of Created user"
)
self.assertEqual(
user.state,
user_response.state,
"Check state of created user"
)
return
class TestRemoveUserFromAccount(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestRemoveUserFromAccount, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
# Create an account
cls.account = Account.create(
cls.api_client,
cls.services["account"]
)
cls._cleanup = [
cls.service_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created instance, users etc
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg", "selfservice"])
def test_01_user_remove_VM_running(self):
"""Test Remove one user from the account
"""
# Validate the following
# 1. Create an account with 2 users.
# 2. Start 2 VMs; one for each user of the account
# 3. Remove one user from the account. Verify that account still exists.
# 4. Verify that VM started by the removed user are still running
# Create an User associated with account and VMs
user_1 = User.create(
self.apiclient,
self.services["user"],
account=self.account.name,
domainid=self.account.domainid
)
self.debug("Created user: %s" % user_1.id)
user_2 = User.create(
self.apiclient,
self.services["user"],
account=self.account.name,
domainid=self.account.domainid
)
self.debug("Created user: %s" % user_2.id)
self.cleanup.append(user_2)
vm_1 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id
)
self.debug("Deployed VM in account: %s, ID: %s" % (
self.account.name,
vm_1.id
))
self.cleanup.append(vm_1)
vm_2 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id
)
self.debug("Deployed VM in account: %s, ID: %s" % (
self.account.name,
vm_2.id
))
self.cleanup.append(vm_2)
# Remove one of the user
self.debug("Deleting user: %s" % user_1.id)
user_1.delete(self.apiclient)
# Account should exist after deleting user
accounts_response = list_accounts(
self.apiclient,
id=self.account.id
)
self.assertEqual(
isinstance(accounts_response, list),
True,
"Check for valid list accounts response"
)
self.assertNotEqual(
len(accounts_response),
0,
"Check List Account response"
)
vm_response = list_virtual_machines(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(vm_response, list),
True,
"Check for valid list VM response"
)
self.assertNotEqual(
len(vm_response),
0,
"Check List VM response"
)
# VMs associated with that account should be running
for vm in vm_response:
self.assertEqual(
vm.state,
'Running',
"Check state of VMs associated with account"
)
return
class TestNonRootAdminsPrivileges(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestNonRootAdminsPrivileges, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
# Create an account, domain etc
cls.domain = Domain.create(
cls.api_client,
cls.services["domain"],
)
cls.account = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls._cleanup = [
cls.account,
cls.domain
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created accounts
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg", "selfservice"])
def test_01_non_root_admin_Privileges(self):
"""Test to verify Non Root admin previleges"""
# Validate the following
# 1. Create few accounts/users in ROOT domain
# 2. Verify listAccounts API gives only accounts associated with new
# domain.
# Create accounts for ROOT domain
account_1 = Account.create(
self.apiclient,
self.services["account"]
)
self.debug("Created account: %s" % account_1.name)
self.cleanup.append(account_1)
account_2 = Account.create(
self.apiclient,
self.services["account"]
)
self.debug("Created account: %s" % account_2.name)
self.cleanup.append(account_2)
accounts_response = list_accounts(
self.apiclient,
domainid=self.domain.id,
listall=True
)
self.assertEqual(
isinstance(accounts_response, list),
True,
"Check list accounts response for valid data"
)
self.assertEqual(
len(accounts_response),
1,
"Check List accounts response"
)
# Verify only account associated with domain is listed
for account in accounts_response:
self.assertEqual(
account.domainid,
self.domain.id,
"Check domain ID of account"
)
return
class TestServiceOfferingSiblings(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestServiceOfferingSiblings,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Create Domains, accounts etc
cls.domain_1 = Domain.create(
cls.api_client,
cls.services["domain"]
)
cls.domain_2 = Domain.create(
cls.api_client,
cls.services["domain"]
)
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"],
domainid=cls.domain_1.id
)
# Create account for doamin_1
cls.account_1 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_1.id
)
# Create an account for domain_2
cls.account_2 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_2.id
)
cls._cleanup = [
cls.account_1,
cls.account_2,
cls.service_offering,
cls.domain_1,
cls.domain_2,
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created domains, accounts
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg", "selfservice"])
def test_01_service_offering_siblings(self):
"""Test to verify service offerings at same level in hierarchy"""
# Validate the following
# 1. Verify service offering is visible for domain_1
# 2. Verify service offering is not visible for domain_2
service_offerings = list_service_offering(
self.apiclient,
domainid=self.domain_1.id
)
self.assertEqual(
isinstance(service_offerings, list),
True,
"Check if valid list service offerings response"
)
self.assertNotEqual(
len(service_offerings),
0,
"Check List Service Offerings response"
)
for service_offering in service_offerings:
self.debug("Validating service offering: %s" % service_offering.id)
self.assertEqual(
service_offering.id,
self.service_offering.id,
"Check Service offering ID for domain" + str(self.domain_1.name)
)
# Verify private service offering is not visible to other domain
service_offerings = list_service_offering(
self.apiclient,
domainid=self.domain_2.id
)
self.assertEqual(
service_offerings,
None,
"Check List Service Offerings response for other domain"
)
return
class TestServiceOfferingHierarchy(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestServiceOfferingHierarchy,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Create domain, service offerings etc
cls.domain_1 = Domain.create(
cls.api_client,
cls.services["domain"]
)
cls.domain_2 = Domain.create(
cls.api_client,
cls.services["domain"],
parentdomainid=cls.domain_1.id
)
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"],
domainid=cls.domain_1.id
)
# Create account for doamin_1
cls.account_1 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_1.id
)
# Create an account for domain_2
cls.account_2 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_2.id
)
cls._cleanup = [
cls.account_2,
cls.domain_2,
cls.service_offering,
cls.account_1,
cls.domain_1,
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created instance, volumes and snapshots
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg", "selfservice"])
def test_01_service_offering_hierarchy(self):
"""Test to verify service offerings at same level in hierarchy"""
# Validate the following
# 1. Verify service offering is visible for domain_1
# 2. Verify service offering is also visible for domain_2
service_offerings = list_service_offering(
self.apiclient,
domainid=self.domain_1.id
)
self.assertEqual(
isinstance(service_offerings, list),
True,
"Check List Service Offerings for a valid response"
)
self.assertNotEqual(
len(service_offerings),
0,
"Check List Service Offerings response"
)
for service_offering in service_offerings:
self.assertEqual(
service_offering.id,
self.service_offering.id,
"Check Service offering ID for domain" + str(self.domain_1.name)
)
# Verify private service offering is not visible to other domain
service_offerings = list_service_offering(
self.apiclient,
domainid=self.domain_2.id
)
self.assertEqual(
service_offerings,
None,
"Check List Service Offerings for a valid response"
)
return
class TestTemplateHierarchy(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestTemplateHierarchy, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.hypervisor = cls.testClient.getHypervisorInfo()
cls.services = Services().services
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
# Create domains, accounts and template
cls.domain_1 = Domain.create(
cls.api_client,
cls.services["domain"]
)
cls.domain_2 = Domain.create(
cls.api_client,
cls.services["domain"],
parentdomainid=cls.domain_1.id
)
# Create account for doamin_1
cls.account_1 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_1.id
)
# Create an account for domain_2
cls.account_2 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_2.id
)
builtin_info = get_builtin_template_info(cls.api_client, cls.zone.id)
cls.services["template"]["url"] = builtin_info[0]
cls.services["template"]["hypervisor"] = builtin_info[1]
cls.services["template"]["format"] = builtin_info[2]
# Register new template
cls.template = Template.register(
cls.api_client,
cls.services["template"],
zoneid=cls.zone.id,
account=cls.account_1.name,
domainid=cls.domain_1.id,
hypervisor=cls.hypervisor
)
# Wait for template to download
cls.template.download(cls.api_client)
# Wait for template status to be changed across
time.sleep(60)
cls._cleanup = [
cls.account_2,
cls.domain_2,
cls.template,
cls.account_1,
cls.domain_1,
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created instance, volumes and snapshots
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg"])
def test_01_template_hierarchy(self):
"""Test to verify template at same level in hierarchy"""
# Validate the following
# 1. Verify template is visible for domain_1
# 2. Verify template is also visible for domain_2
# Sleep to ensure that template state is reflected across
templates = list_templates(
self.apiclient,
templatefilter='self',
account=self.account_1.name,
domainid=self.domain_1.id
)
self.assertEqual(
isinstance(templates, list),
True,
"Template response %s is not a list" % templates
)
self.assertNotEqual(
len(templates),
0,
"No templates found"
)
for template in templates:
self.assertEqual(
template.id,
self.template.id,
"Check Template ID for domain" + str(self.domain_1.name)
)
# Verify private service offering is not visible to other domain
templates = list_templates(
self.apiclient,
id=self.template.id,
templatefilter='all',
account=self.account_2.name,
domainid=self.domain_2.id
)
self.assertEqual(
isinstance(templates, list),
True,
"Template response %s is not a list" % templates
)
self.assertNotEqual(
len(templates),
0,
"No templates found"
)
for template in templates:
self.assertEqual(
template.id,
self.template.id,
"Check Template ID for domain" + str(self.domain_2.name)
)
return
class TestAddVmToSubDomain(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestAddVmToSubDomain, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.sub_domain = Domain.create(
cls.api_client,
cls.services["domain"],
parentdomainid=cls.domain.id
)
# Create account for doamin_1
cls.account_1 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
# Create an account for domain_2
cls.account_2 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.sub_domain.id
)
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"],
domainid=cls.domain.id
)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.vm_1 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
templateid=cls.template.id,
accountid=cls.account_1.name,
domainid=cls.account_1.domainid,
serviceofferingid=cls.service_offering.id
)
cls.vm_2 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
templateid=cls.template.id,
accountid=cls.account_2.name,
domainid=cls.account_2.domainid,
serviceofferingid=cls.service_offering.id
)
cls._cleanup = [
cls.account_2,
cls.account_1,
cls.sub_domain,
cls.service_offering
]
return
@classmethod
def tearDownClass(cls):
try:
#Clean up, terminate the created resources
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created resources
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg", "selfservice"])
def test_01_add_vm_to_subdomain(self):
""" Test Sub domain allowed to launch VM when a Domain level zone is created"""
# Validate the following
# 1. Verify VM created by Account_1 is in Running state
# 2. Verify VM created by Account_2 is in Running state
vm_response = list_virtual_machines(
self.apiclient,
id=self.vm_1.id
)
self.assertEqual(
isinstance(vm_response, list),
True,
"Check List VM for a valid response"
)
self.assertNotEqual(
len(vm_response),
0,
"Check List Template response"
)
for vm in vm_response:
self.debug("VM ID: %s and state: %s" % (vm.id, vm.state))
self.assertEqual(
vm.state,
'Running',
"Check State of Virtual machine"
)
vm_response = list_virtual_machines(
self.apiclient,
id=self.vm_2.id
)
self.assertNotEqual(
len(vm_response),
0,
"Check List Template response"
)
for vm in vm_response:
self.debug("VM ID: %s and state: %s" % (vm.id, vm.state))
self.assertEqual(
vm.state,
'Running',
"Check State of Virtual machine"
)
return
class TestUserDetails(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestUserDetails, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls._cleanup = []
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=[
"role",
"accounts",
"simulator",
"advanced",
"advancedns",
"basic",
"eip",
"sg"
])
def test_updateUserDetails(self):
"""Test user update API
"""
# Steps for test scenario
# 1. create a user account
# 2. update the user details (firstname, lastname, user) with
# updateUser API
# 3. listUsers in the account
# 4. delete the account
# Validate the following
# 1. listAccounts should show account created successfully
# 2. updateUser API should return valid response
# 3. user should be updated with new details
self.debug("Creating an user account..")
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup.append(self.account)
# Fetching the user details of account
self.debug(
"Fetching user details for account: %s" %
self.account.name)
users = User.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.debug("Updating the details of user: %s" % user_1.name)
firstname = random_gen()
lastname = random_gen()
self.debug("New firstname: %s, lastname: %s" % (firstname, lastname))
User.update(
self.apiclient,
user_1.id,
firstname=firstname,
lastname=lastname
)
# Fetching the user details of account
self.debug(
"Fetching user details for user: %s" % user_1.name)
users = User.list(
self.apiclient,
id=user_1.id,
listall=True
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.assertEqual(
user_1.firstname,
firstname,
"User's first name should be updated with new one"
)
self.assertEqual(
user_1.lastname,
lastname,
"User's last name should be updated with new one"
)
return
@attr(tags=[
"role",
"accounts",
"simulator",
"advanced",
"advancedns",
"basic",
"eip",
"sg"
])
def test_updateAdminDetails(self):
"""Test update admin details
"""
# Steps for test scenario
# 1. create a admin account
# 2. update the user details (firstname, lastname, user) with
# updateUser API
# 3. listUsers in the account
# 4. delete the account
# Validate the following
# 1. listAccounts should show account created successfully
# 2. updateUser API should return valid response
# 3. user should be updated with new details
self.debug("Creating a ROOT admin account")
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
)
self.cleanup.append(self.account)
# Fetching the user details of account
self.debug(
"Fetching user details for account: %s" %
self.account.name)
users = User.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.debug("Updating the details of user: %s" % user_1.name)
firstname = random_gen()
lastname = random_gen()
self.debug("New firstname: %s, lastname: %s" % (firstname, lastname))
User.update(
self.apiclient,
user_1.id,
firstname=firstname,
lastname=lastname
)
# Fetching the user details of account
self.debug(
"Fetching user details for user: %s" % user_1.name)
users = User.list(
self.apiclient,
id=user_1.id,
listall=True
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.assertEqual(
user_1.firstname,
firstname,
"User's first name should be updated with new one"
)
self.assertEqual(
user_1.lastname,
lastname,
"User's last name should be updated with new one"
)
return
@attr(tags=[
"role",
"accounts",
"simulator",
"advanced",
"advancedns",
"basic",
"eip",
"sg"
])
def test_updateDomainAdminDetails(self):
"""Test update domain admin details
"""
# Steps for test scenario
# 2. update the user details (firstname, lastname, user) with
# updateUser API
# 3. listUsers in the account
# 4. delete the account
# Validate the following
# 1. listAccounts should show account created successfully
# 2. updateUser API should return valid response
# 3. user should be updated with new details
self.debug("Creating a domain admin account")
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup.append(self.account)
# Fetching the user details of account
self.debug(
"Fetching user details for account: %s" %
self.account.name)
users = User.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.debug("Updating the details of user: %s" % user_1.name)
firstname = random_gen()
lastname = random_gen()
self.debug("New firstname: %s, lastname: %s" % (firstname, lastname))
User.update(
self.apiclient,
user_1.id,
firstname=firstname,
lastname=lastname
)
# Fetching the user details of account
self.debug(
"Fetching user details for user: %s" % user_1.name)
users = User.list(
self.apiclient,
id=user_1.id,
listall=True
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.assertEqual(
user_1.firstname,
firstname,
"User's first name should be updated with new one"
)
self.assertEqual(
user_1.lastname,
lastname,
"User's last name should be updated with new one"
)
return
class TestUserLogin(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestUserLogin, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls._cleanup = []
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["login", "accounts", "simulator", "advanced",
"advancedns", "basic", "eip", "sg"])
def test_LoginApiUuidResponse(self):
"""Test if Login API does not return UUID's
"""
# Steps for test scenario
# 1. create a user account
# 2. login to the user account with given credentials (loginCmd)
# 3. delete the user account
# Validate the following
# 1. listAccounts should return account created
# 2. loginResponse should have UUID only is response. Assert by
# checking database id is not same as response id
# Login also succeeds with non NULL sessionId in response
self.debug("Creating an user account..")
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup.append(self.account)
self.debug("Logging into the cloudstack with login API")
respose = User.login(
self.apiclient,
username=self.account.name,
password=self.services["account"]["password"]
)
self.debug("Login API response: %s" % respose)
self.assertNotEqual(
respose.sessionkey,
None,
"Login to the CloudStack should be successful" +
"response shall have non Null key"
)
return
@attr(tags=["login", "accounts", "simulator", "advanced",
"advancedns", "basic", "eip", "sg"])
def test_LoginApiDomain(self):
"""Test login API with domain
"""
# Steps for test scenario
# 1. create a domain
# 2. create user in the domain
# 3. login to the user account above using UUID domain/user
# 4. delete the user account
# Validate the following
# 1. listDomains returns created domain
# 2. listAccounts returns created user
# 3. loginResponse should have UUID only in responses
# Login also succeeds with non NULL sessionId in response
self.debug("Creating a domain for login with API domain test")
domain = Domain.create(
self.apiclient,
self.services["domain"],
parentdomainid=self.domain.id
)
self.debug("Domain: %s is created succesfully." % domain.name)
self.debug(
"Checking if the created domain is listed in list domains API")
domains = Domain.list(self.apiclient, id=domain.id, listall=True)
self.assertEqual(
isinstance(domains, list),
True,
"List domains shall return a valid response"
)
self.debug("Creating an user account in domain: %s" % domain.name)
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.cleanup.append(self.account)
accounts = Account.list(
self.apiclient,
name=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(accounts, list),
True,
"List accounts should return a valid response"
)
self.debug("Logging into the cloudstack with login API")
respose = User.login(
self.apiclient,
username=self.account.name,
password=self.services["account"]["password"],
domainid=domain.id)
self.debug("Login API response: %s" % respose)
self.assertNotEqual(
respose.sessionkey,
None,
"Login to the CloudStack should be successful" +
"response shall have non Null key"
)
return
class TestDomainForceRemove(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestDomainForceRemove, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls._cleanup = []
return
@classmethod
def tearDownClass(cls):
try:
#Clean up, terminate the created resources
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created resources
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["domains", "advanced", "advancedns", "simulator", "selfservice"])
def test_forceDeleteDomain(self):
""" Test delete domain with force option"""
# Steps for validations
# 1. create a domain DOM
# 2. create 2 users under this domain
# 3. deploy 1 VM into each of these user accounts
# 4. create PF / FW rules for port 22 on these VMs for their
# respective accounts
# 5. delete the domain with force=true option
# Validate the following
# 1. listDomains should list the created domain
# 2. listAccounts should list the created accounts
# 3. listvirtualmachines should show the Running VMs
# 4. PF and FW rules should be shown in listFirewallRules
# 5. domain should delete successfully and above three list calls
# should show all the resources now deleted. listRouters should
# not return any routers in the deleted accounts/domains
self.debug("Creating a domain for login with API domain test")
domain = Domain.create(
self.apiclient,
self.services["domain"],
parentdomainid=self.domain.id
)
self.debug("Domain is created succesfully.")
self.debug(
"Checking if the created domain is listed in list domains API")
domains = Domain.list(self.apiclient, id=domain.id, listall=True)
self.assertEqual(
isinstance(domains, list),
True,
"List domains shall return a valid response"
)
self.debug("Creating 2 user accounts in domain: %s" % domain.name)
self.account_1 = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.account_2 = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.debug("Creating a tiny service offering for VM deployment")
self.service_offering = ServiceOffering.create(
self.apiclient,
self.services["service_offering"],
domainid=self.domain.id
)
self.debug("Deploying virtual machine in account 1: %s" %
self.account_1.name)
vm_1 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account_1.name,
domainid=self.account_1.domainid,
serviceofferingid=self.service_offering.id
)
self.debug("Deploying virtual machine in account 2: %s" %
self.account_2.name)
vm_2 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account_2.name,
domainid=self.account_2.domainid,
serviceofferingid=self.service_offering.id
)
networks = Network.list(
self.apiclient,
account=self.account_1.name,
domainid=self.account_1.domainid,
listall=True
)
self.assertEqual(
isinstance(networks, list),
True,
"List networks should return a valid response"
)
network_1 = networks[0]
self.debug("Default network in account 1: %s is %s" % (
self.account_1.name,
network_1.name))
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=network_1.id,
account=self.account_1.name,
domainid=self.account_1.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
#Create NAT rule
nat_rule = NATRule.create(
self.apiclient,
vm_1,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Created PF rule on source NAT: %s" % src_nat.ipaddress)
nat_rules = NATRule.list(self.apiclient, id=nat_rule.id)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT should return a valid port forwarding rules"
)
self.assertNotEqual(
len(nat_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug("Deleting domain with force option")
try:
domain.delete(self.apiclient, cleanup=True)
except Exception as e:
self.debug("Waiting for account.cleanup.interval" +
" to cleanup any remaining resouces")
# Sleep 3*account.gc to ensure that all resources are deleted
wait_for_cleanup(self.apiclient, ["account.cleanup.interval"]*3)
with self.assertRaises(CloudstackAPIException):
Domain.list(
self.apiclient,
id=domain.id,
listall=True
)
self.debug("Checking if the resources in domain are deleted")
with self.assertRaises(CloudstackAPIException):
Account.list(
self.apiclient,
name=self.account_1.name,
domainid=self.account_1.domainid,
listall=True
)
return
@attr(tags=["domains", "advanced", "advancedns", "simulator", "selfservice"])
def test_DeleteDomain(self):
""" Test delete domain without force option"""
# Steps for validations
# 1. create a domain DOM
# 2. create 2 users under this domain
# 3. deploy 1 VM into each of these user accounts
# 4. create PF / FW rules for port 22 on these VMs for their
# respective accounts
# 5. delete the domain with force=false option
# Validate the following
# 1. listDomains should list the created domain
# 2. listAccounts should list the created accounts
# 3. listvirtualmachines should show the Running VMs
# 4. PF and FW rules should be shown in listFirewallRules
# 5. domain deletion should fail saying there are resources under use
self.debug("Creating a domain for login with API domain test")
domain = Domain.create(
self.apiclient,
self.services["domain"],
parentdomainid=self.domain.id
)
self.debug("Domain: %s is created successfully." % domain.name)
self.debug(
"Checking if the created domain is listed in list domains API")
domains = Domain.list(self.apiclient, id=domain.id, listall=True)
self.assertEqual(
isinstance(domains, list),
True,
"List domains shall return a valid response"
)
self.debug("Creating 2 user accounts in domain: %s" % domain.name)
self.account_1 = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.cleanup.append(self.account_1)
self.account_2 = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.cleanup.append(self.account_2)
self.debug("Creating a tiny service offering for VM deployment")
self.service_offering = ServiceOffering.create(
self.apiclient,
self.services["service_offering"],
domainid=self.domain.id
)
self.cleanup.append(self.service_offering)
self.debug("Deploying virtual machine in account 1: %s" %
self.account_1.name)
vm_1 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account_1.name,
domainid=self.account_1.domainid,
serviceofferingid=self.service_offering.id
)
self.debug("Deploying virtual machine in account 2: %s" %
self.account_2.name)
vm_2 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account_2.name,
domainid=self.account_2.domainid,
serviceofferingid=self.service_offering.id
)
networks = Network.list(
self.apiclient,
account=self.account_1.name,
domainid=self.account_1.domainid,
listall=True
)
self.assertEqual(
isinstance(networks, list),
True,
"List networks should return a valid response"
)
network_1 = networks[0]
self.debug("Default network in account 1: %s is %s" % (
self.account_1.name,
network_1.name))
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=network_1.id,
account=self.account_1.name,
domainid=self.account_1.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
#Create NAT rule
nat_rule = NATRule.create(
self.apiclient,
vm_1,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Created PF rule on source NAT: %s" % src_nat.ipaddress)
nat_rules = NATRule.list(self.apiclient, id=nat_rule.id)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT should return a valid port forwarding rules"
)
self.assertNotEqual(
len(nat_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug("Deleting domain without force option")
with self.assertRaises(Exception):
domain.delete(self.apiclient, cleanup=False)
return
| 39.968444 | 88 | 0.449225 |
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.lib.utils import (random_gen,
cleanup_resources)
from marvin.lib.base import (Domain,
Account,
ServiceOffering,
VirtualMachine,
Network,
User,
NATRule,
Template,
PublicIPAddress)
from marvin.lib.common import (get_domain,
get_zone,
get_template,
list_accounts,
list_virtual_machines,
list_service_offering,
list_templates,
list_users,
get_builtin_template_info,
wait_for_cleanup)
from nose.plugins.attrib import attr
from marvin.cloudstackException import CloudstackAPIException
import time
class Services:
def __init__(self):
self.services = {
"domain": {
"name": "Domain",
},
"account": {
"email": "test@test.com",
"firstname": "Test",
"lastname": "User",
"username": "test",
"password": "fr3sca",
},
"user": {
"email": "user@test.com",
"firstname": "User",
"lastname": "User",
"username": "User",
"password": "fr3sca",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100,
"memory": 128,
},
"virtual_machine": {
"displayname": "Test VM",
"username": "root",
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"template": {
"displaytext": "Public Template",
"name": "Public template",
"ostype": 'CentOS 5.3 (64-bit)',
"url": "",
"hypervisor": '',
"format": '',
"isfeatured": True,
"ispublic": True,
"isextractable": True,
"templatefilter": "self"
},
"natrule": {
"publicport": 22,
"privateport": 22,
"protocol": 'TCP',
},
"ostype": 'CentOS 5.3 (64-bit)',
"sleep": 60,
"timeout": 10,
}
class TestAccounts(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestAccounts, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls._cleanup = [cls.service_offering]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg", "selfservice"])
def test_01_create_account(self):
account = Account.create(
self.apiclient,
self.services["account"]
)
self.debug("Created account: %s" % account.name)
self.cleanup.append(account)
list_accounts_response = list_accounts(
self.apiclient,
id=account.id
)
self.assertEqual(
isinstance(list_accounts_response, list),
True,
"Check list accounts for valid data"
)
self.assertNotEqual(
len(list_accounts_response),
0,
"Check List Account response"
)
account_response = list_accounts_response[0]
self.assertEqual(
account.accounttype,
account_response.accounttype,
"Check Account Type of Created account"
)
self.assertEqual(
account.name,
account_response.name,
"Check Account Name of Created account"
)
user = User.create(
self.apiclient,
self.services["user"],
account=account.name,
domainid=account.domainid
)
self.debug("Created user: %s" % user.id)
list_users_response = list_users(
self.apiclient,
id=user.id
)
self.assertEqual(
isinstance(list_users_response, list),
True,
"Check list users for valid data"
)
self.assertNotEqual(
len(list_users_response),
0,
"Check List User response"
)
user_response = list_users_response[0]
self.assertEqual(
user.username,
user_response.username,
"Check username of Created user"
)
self.assertEqual(
user.state,
user_response.state,
"Check state of created user"
)
return
class TestRemoveUserFromAccount(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestRemoveUserFromAccount, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.account = Account.create(
cls.api_client,
cls.services["account"]
)
cls._cleanup = [
cls.service_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg", "selfservice"])
def test_01_user_remove_VM_running(self):
user_1 = User.create(
self.apiclient,
self.services["user"],
account=self.account.name,
domainid=self.account.domainid
)
self.debug("Created user: %s" % user_1.id)
user_2 = User.create(
self.apiclient,
self.services["user"],
account=self.account.name,
domainid=self.account.domainid
)
self.debug("Created user: %s" % user_2.id)
self.cleanup.append(user_2)
vm_1 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id
)
self.debug("Deployed VM in account: %s, ID: %s" % (
self.account.name,
vm_1.id
))
self.cleanup.append(vm_1)
vm_2 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id
)
self.debug("Deployed VM in account: %s, ID: %s" % (
self.account.name,
vm_2.id
))
self.cleanup.append(vm_2)
self.debug("Deleting user: %s" % user_1.id)
user_1.delete(self.apiclient)
accounts_response = list_accounts(
self.apiclient,
id=self.account.id
)
self.assertEqual(
isinstance(accounts_response, list),
True,
"Check for valid list accounts response"
)
self.assertNotEqual(
len(accounts_response),
0,
"Check List Account response"
)
vm_response = list_virtual_machines(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(vm_response, list),
True,
"Check for valid list VM response"
)
self.assertNotEqual(
len(vm_response),
0,
"Check List VM response"
)
for vm in vm_response:
self.assertEqual(
vm.state,
'Running',
"Check state of VMs associated with account"
)
return
class TestNonRootAdminsPrivileges(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestNonRootAdminsPrivileges, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.domain = Domain.create(
cls.api_client,
cls.services["domain"],
)
cls.account = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls._cleanup = [
cls.account,
cls.domain
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg", "selfservice"])
def test_01_non_root_admin_Privileges(self):
account_1 = Account.create(
self.apiclient,
self.services["account"]
)
self.debug("Created account: %s" % account_1.name)
self.cleanup.append(account_1)
account_2 = Account.create(
self.apiclient,
self.services["account"]
)
self.debug("Created account: %s" % account_2.name)
self.cleanup.append(account_2)
accounts_response = list_accounts(
self.apiclient,
domainid=self.domain.id,
listall=True
)
self.assertEqual(
isinstance(accounts_response, list),
True,
"Check list accounts response for valid data"
)
self.assertEqual(
len(accounts_response),
1,
"Check List accounts response"
)
for account in accounts_response:
self.assertEqual(
account.domainid,
self.domain.id,
"Check domain ID of account"
)
return
class TestServiceOfferingSiblings(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestServiceOfferingSiblings,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
cls.domain_1 = Domain.create(
cls.api_client,
cls.services["domain"]
)
cls.domain_2 = Domain.create(
cls.api_client,
cls.services["domain"]
)
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"],
domainid=cls.domain_1.id
)
cls.account_1 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_1.id
)
cls.account_2 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_2.id
)
cls._cleanup = [
cls.account_1,
cls.account_2,
cls.service_offering,
cls.domain_1,
cls.domain_2,
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg", "selfservice"])
def test_01_service_offering_siblings(self):
service_offerings = list_service_offering(
self.apiclient,
domainid=self.domain_1.id
)
self.assertEqual(
isinstance(service_offerings, list),
True,
"Check if valid list service offerings response"
)
self.assertNotEqual(
len(service_offerings),
0,
"Check List Service Offerings response"
)
for service_offering in service_offerings:
self.debug("Validating service offering: %s" % service_offering.id)
self.assertEqual(
service_offering.id,
self.service_offering.id,
"Check Service offering ID for domain" + str(self.domain_1.name)
)
service_offerings = list_service_offering(
self.apiclient,
domainid=self.domain_2.id
)
self.assertEqual(
service_offerings,
None,
"Check List Service Offerings response for other domain"
)
return
class TestServiceOfferingHierarchy(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestServiceOfferingHierarchy,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
cls.domain_1 = Domain.create(
cls.api_client,
cls.services["domain"]
)
cls.domain_2 = Domain.create(
cls.api_client,
cls.services["domain"],
parentdomainid=cls.domain_1.id
)
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"],
domainid=cls.domain_1.id
)
cls.account_1 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_1.id
)
cls.account_2 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_2.id
)
cls._cleanup = [
cls.account_2,
cls.domain_2,
cls.service_offering,
cls.account_1,
cls.domain_1,
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg", "selfservice"])
def test_01_service_offering_hierarchy(self):
service_offerings = list_service_offering(
self.apiclient,
domainid=self.domain_1.id
)
self.assertEqual(
isinstance(service_offerings, list),
True,
"Check List Service Offerings for a valid response"
)
self.assertNotEqual(
len(service_offerings),
0,
"Check List Service Offerings response"
)
for service_offering in service_offerings:
self.assertEqual(
service_offering.id,
self.service_offering.id,
"Check Service offering ID for domain" + str(self.domain_1.name)
)
service_offerings = list_service_offering(
self.apiclient,
domainid=self.domain_2.id
)
self.assertEqual(
service_offerings,
None,
"Check List Service Offerings for a valid response"
)
return
class TestTemplateHierarchy(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestTemplateHierarchy, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.hypervisor = cls.testClient.getHypervisorInfo()
cls.services = Services().services
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.domain_1 = Domain.create(
cls.api_client,
cls.services["domain"]
)
cls.domain_2 = Domain.create(
cls.api_client,
cls.services["domain"],
parentdomainid=cls.domain_1.id
)
cls.account_1 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_1.id
)
cls.account_2 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain_2.id
)
builtin_info = get_builtin_template_info(cls.api_client, cls.zone.id)
cls.services["template"]["url"] = builtin_info[0]
cls.services["template"]["hypervisor"] = builtin_info[1]
cls.services["template"]["format"] = builtin_info[2]
cls.template = Template.register(
cls.api_client,
cls.services["template"],
zoneid=cls.zone.id,
account=cls.account_1.name,
domainid=cls.domain_1.id,
hypervisor=cls.hypervisor
)
cls.template.download(cls.api_client)
time.sleep(60)
cls._cleanup = [
cls.account_2,
cls.domain_2,
cls.template,
cls.account_1,
cls.domain_1,
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg"])
def test_01_template_hierarchy(self):
templates = list_templates(
self.apiclient,
templatefilter='self',
account=self.account_1.name,
domainid=self.domain_1.id
)
self.assertEqual(
isinstance(templates, list),
True,
"Template response %s is not a list" % templates
)
self.assertNotEqual(
len(templates),
0,
"No templates found"
)
for template in templates:
self.assertEqual(
template.id,
self.template.id,
"Check Template ID for domain" + str(self.domain_1.name)
)
templates = list_templates(
self.apiclient,
id=self.template.id,
templatefilter='all',
account=self.account_2.name,
domainid=self.domain_2.id
)
self.assertEqual(
isinstance(templates, list),
True,
"Template response %s is not a list" % templates
)
self.assertNotEqual(
len(templates),
0,
"No templates found"
)
for template in templates:
self.assertEqual(
template.id,
self.template.id,
"Check Template ID for domain" + str(self.domain_2.name)
)
return
class TestAddVmToSubDomain(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestAddVmToSubDomain, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.sub_domain = Domain.create(
cls.api_client,
cls.services["domain"],
parentdomainid=cls.domain.id
)
cls.account_1 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls.account_2 = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.sub_domain.id
)
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"],
domainid=cls.domain.id
)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.vm_1 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
templateid=cls.template.id,
accountid=cls.account_1.name,
domainid=cls.account_1.domainid,
serviceofferingid=cls.service_offering.id
)
cls.vm_2 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
templateid=cls.template.id,
accountid=cls.account_2.name,
domainid=cls.account_2.domainid,
serviceofferingid=cls.service_offering.id
)
cls._cleanup = [
cls.account_2,
cls.account_1,
cls.sub_domain,
cls.service_offering
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "basic", "eip", "advancedns", "sg", "selfservice"])
def test_01_add_vm_to_subdomain(self):
vm_response = list_virtual_machines(
self.apiclient,
id=self.vm_1.id
)
self.assertEqual(
isinstance(vm_response, list),
True,
"Check List VM for a valid response"
)
self.assertNotEqual(
len(vm_response),
0,
"Check List Template response"
)
for vm in vm_response:
self.debug("VM ID: %s and state: %s" % (vm.id, vm.state))
self.assertEqual(
vm.state,
'Running',
"Check State of Virtual machine"
)
vm_response = list_virtual_machines(
self.apiclient,
id=self.vm_2.id
)
self.assertNotEqual(
len(vm_response),
0,
"Check List Template response"
)
for vm in vm_response:
self.debug("VM ID: %s and state: %s" % (vm.id, vm.state))
self.assertEqual(
vm.state,
'Running',
"Check State of Virtual machine"
)
return
class TestUserDetails(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestUserDetails, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls._cleanup = []
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=[
"role",
"accounts",
"simulator",
"advanced",
"advancedns",
"basic",
"eip",
"sg"
])
def test_updateUserDetails(self):
self.debug("Creating an user account..")
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup.append(self.account)
self.debug(
"Fetching user details for account: %s" %
self.account.name)
users = User.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.debug("Updating the details of user: %s" % user_1.name)
firstname = random_gen()
lastname = random_gen()
self.debug("New firstname: %s, lastname: %s" % (firstname, lastname))
User.update(
self.apiclient,
user_1.id,
firstname=firstname,
lastname=lastname
)
self.debug(
"Fetching user details for user: %s" % user_1.name)
users = User.list(
self.apiclient,
id=user_1.id,
listall=True
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.assertEqual(
user_1.firstname,
firstname,
"User's first name should be updated with new one"
)
self.assertEqual(
user_1.lastname,
lastname,
"User's last name should be updated with new one"
)
return
@attr(tags=[
"role",
"accounts",
"simulator",
"advanced",
"advancedns",
"basic",
"eip",
"sg"
])
def test_updateAdminDetails(self):
self.debug("Creating a ROOT admin account")
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
)
self.cleanup.append(self.account)
self.debug(
"Fetching user details for account: %s" %
self.account.name)
users = User.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.debug("Updating the details of user: %s" % user_1.name)
firstname = random_gen()
lastname = random_gen()
self.debug("New firstname: %s, lastname: %s" % (firstname, lastname))
User.update(
self.apiclient,
user_1.id,
firstname=firstname,
lastname=lastname
)
self.debug(
"Fetching user details for user: %s" % user_1.name)
users = User.list(
self.apiclient,
id=user_1.id,
listall=True
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.assertEqual(
user_1.firstname,
firstname,
"User's first name should be updated with new one"
)
self.assertEqual(
user_1.lastname,
lastname,
"User's last name should be updated with new one"
)
return
@attr(tags=[
"role",
"accounts",
"simulator",
"advanced",
"advancedns",
"basic",
"eip",
"sg"
])
def test_updateDomainAdminDetails(self):
self.debug("Creating a domain admin account")
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup.append(self.account)
self.debug(
"Fetching user details for account: %s" %
self.account.name)
users = User.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.debug("Updating the details of user: %s" % user_1.name)
firstname = random_gen()
lastname = random_gen()
self.debug("New firstname: %s, lastname: %s" % (firstname, lastname))
User.update(
self.apiclient,
user_1.id,
firstname=firstname,
lastname=lastname
)
self.debug(
"Fetching user details for user: %s" % user_1.name)
users = User.list(
self.apiclient,
id=user_1.id,
listall=True
)
self.assertEqual(
isinstance(users, list),
True,
"List users should return a valid list for account"
)
user_1 = users[0]
self.assertEqual(
user_1.firstname,
firstname,
"User's first name should be updated with new one"
)
self.assertEqual(
user_1.lastname,
lastname,
"User's last name should be updated with new one"
)
return
class TestUserLogin(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestUserLogin, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls._cleanup = []
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["login", "accounts", "simulator", "advanced",
"advancedns", "basic", "eip", "sg"])
def test_LoginApiUuidResponse(self):
self.debug("Creating an user account..")
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup.append(self.account)
self.debug("Logging into the cloudstack with login API")
respose = User.login(
self.apiclient,
username=self.account.name,
password=self.services["account"]["password"]
)
self.debug("Login API response: %s" % respose)
self.assertNotEqual(
respose.sessionkey,
None,
"Login to the CloudStack should be successful" +
"response shall have non Null key"
)
return
@attr(tags=["login", "accounts", "simulator", "advanced",
"advancedns", "basic", "eip", "sg"])
def test_LoginApiDomain(self):
self.debug("Creating a domain for login with API domain test")
domain = Domain.create(
self.apiclient,
self.services["domain"],
parentdomainid=self.domain.id
)
self.debug("Domain: %s is created succesfully." % domain.name)
self.debug(
"Checking if the created domain is listed in list domains API")
domains = Domain.list(self.apiclient, id=domain.id, listall=True)
self.assertEqual(
isinstance(domains, list),
True,
"List domains shall return a valid response"
)
self.debug("Creating an user account in domain: %s" % domain.name)
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.cleanup.append(self.account)
accounts = Account.list(
self.apiclient,
name=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(accounts, list),
True,
"List accounts should return a valid response"
)
self.debug("Logging into the cloudstack with login API")
respose = User.login(
self.apiclient,
username=self.account.name,
password=self.services["account"]["password"],
domainid=domain.id)
self.debug("Login API response: %s" % respose)
self.assertNotEqual(
respose.sessionkey,
None,
"Login to the CloudStack should be successful" +
"response shall have non Null key"
)
return
class TestDomainForceRemove(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestDomainForceRemove, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls._cleanup = []
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["domains", "advanced", "advancedns", "simulator", "selfservice"])
def test_forceDeleteDomain(self):
self.debug("Creating a domain for login with API domain test")
domain = Domain.create(
self.apiclient,
self.services["domain"],
parentdomainid=self.domain.id
)
self.debug("Domain is created succesfully.")
self.debug(
"Checking if the created domain is listed in list domains API")
domains = Domain.list(self.apiclient, id=domain.id, listall=True)
self.assertEqual(
isinstance(domains, list),
True,
"List domains shall return a valid response"
)
self.debug("Creating 2 user accounts in domain: %s" % domain.name)
self.account_1 = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.account_2 = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.debug("Creating a tiny service offering for VM deployment")
self.service_offering = ServiceOffering.create(
self.apiclient,
self.services["service_offering"],
domainid=self.domain.id
)
self.debug("Deploying virtual machine in account 1: %s" %
self.account_1.name)
vm_1 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account_1.name,
domainid=self.account_1.domainid,
serviceofferingid=self.service_offering.id
)
self.debug("Deploying virtual machine in account 2: %s" %
self.account_2.name)
vm_2 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account_2.name,
domainid=self.account_2.domainid,
serviceofferingid=self.service_offering.id
)
networks = Network.list(
self.apiclient,
account=self.account_1.name,
domainid=self.account_1.domainid,
listall=True
)
self.assertEqual(
isinstance(networks, list),
True,
"List networks should return a valid response"
)
network_1 = networks[0]
self.debug("Default network in account 1: %s is %s" % (
self.account_1.name,
network_1.name))
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=network_1.id,
account=self.account_1.name,
domainid=self.account_1.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
nat_rule = NATRule.create(
self.apiclient,
vm_1,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Created PF rule on source NAT: %s" % src_nat.ipaddress)
nat_rules = NATRule.list(self.apiclient, id=nat_rule.id)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT should return a valid port forwarding rules"
)
self.assertNotEqual(
len(nat_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug("Deleting domain with force option")
try:
domain.delete(self.apiclient, cleanup=True)
except Exception as e:
self.debug("Waiting for account.cleanup.interval" +
" to cleanup any remaining resouces")
wait_for_cleanup(self.apiclient, ["account.cleanup.interval"]*3)
with self.assertRaises(CloudstackAPIException):
Domain.list(
self.apiclient,
id=domain.id,
listall=True
)
self.debug("Checking if the resources in domain are deleted")
with self.assertRaises(CloudstackAPIException):
Account.list(
self.apiclient,
name=self.account_1.name,
domainid=self.account_1.domainid,
listall=True
)
return
@attr(tags=["domains", "advanced", "advancedns", "simulator", "selfservice"])
def test_DeleteDomain(self):
self.debug("Creating a domain for login with API domain test")
domain = Domain.create(
self.apiclient,
self.services["domain"],
parentdomainid=self.domain.id
)
self.debug("Domain: %s is created successfully." % domain.name)
self.debug(
"Checking if the created domain is listed in list domains API")
domains = Domain.list(self.apiclient, id=domain.id, listall=True)
self.assertEqual(
isinstance(domains, list),
True,
"List domains shall return a valid response"
)
self.debug("Creating 2 user accounts in domain: %s" % domain.name)
self.account_1 = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.cleanup.append(self.account_1)
self.account_2 = Account.create(
self.apiclient,
self.services["account"],
domainid=domain.id
)
self.cleanup.append(self.account_2)
self.debug("Creating a tiny service offering for VM deployment")
self.service_offering = ServiceOffering.create(
self.apiclient,
self.services["service_offering"],
domainid=self.domain.id
)
self.cleanup.append(self.service_offering)
self.debug("Deploying virtual machine in account 1: %s" %
self.account_1.name)
vm_1 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account_1.name,
domainid=self.account_1.domainid,
serviceofferingid=self.service_offering.id
)
self.debug("Deploying virtual machine in account 2: %s" %
self.account_2.name)
vm_2 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account_2.name,
domainid=self.account_2.domainid,
serviceofferingid=self.service_offering.id
)
networks = Network.list(
self.apiclient,
account=self.account_1.name,
domainid=self.account_1.domainid,
listall=True
)
self.assertEqual(
isinstance(networks, list),
True,
"List networks should return a valid response"
)
network_1 = networks[0]
self.debug("Default network in account 1: %s is %s" % (
self.account_1.name,
network_1.name))
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=network_1.id,
account=self.account_1.name,
domainid=self.account_1.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
nat_rule = NATRule.create(
self.apiclient,
vm_1,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Created PF rule on source NAT: %s" % src_nat.ipaddress)
nat_rules = NATRule.list(self.apiclient, id=nat_rule.id)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT should return a valid port forwarding rules"
)
self.assertNotEqual(
len(nat_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug("Deleting domain without force option")
with self.assertRaises(Exception):
domain.delete(self.apiclient, cleanup=False)
return
| true | true |
f71ded7209537b4f3b656163107b6d7a91292890 | 4,399 | py | Python | git/refs/reference.py | ifwe/GitPython | 2752f7068fb6fc160f63eabec3964263171593e3 | [
"BSD-3-Clause"
] | null | null | null | git/refs/reference.py | ifwe/GitPython | 2752f7068fb6fc160f63eabec3964263171593e3 | [
"BSD-3-Clause"
] | null | null | null | git/refs/reference.py | ifwe/GitPython | 2752f7068fb6fc160f63eabec3964263171593e3 | [
"BSD-3-Clause"
] | null | null | null | from symbolic import SymbolicReference
from git.util import (
LazyMixin,
Iterable,
)
from gitdb.util import (
isfile,
hex_to_bin
)
__all__ = ["Reference"]
#{ Utilities
def require_remote_ref_path(func):
"""A decorator raising a TypeError if we are not a valid remote, based on the path"""
def wrapper(self, *args):
if not self.is_remote():
raise ValueError("ref path does not point to a remote reference: %s" % self.path)
return func(self, *args)
# END wrapper
wrapper.__name__ = func.__name__
return wrapper
#}END utilites
class Reference(SymbolicReference, LazyMixin, Iterable):
"""Represents a named reference to any object. Subclasses may apply restrictions though,
i.e. Heads can only point to commits."""
__slots__ = tuple()
_points_to_commits_only = False
_resolve_ref_on_create = True
_common_path_default = "refs"
def __init__(self, repo, path, check_path=True):
"""Initialize this instance
:param repo: Our parent repository
:param path:
Path relative to the .git/ directory pointing to the ref in question, i.e.
refs/heads/master
:param check_path: if False, you can provide any path. Otherwise the path must start with the
default path prefix of this type."""
if check_path and not path.startswith(self._common_path_default + '/'):
raise ValueError("Cannot instantiate %r from path %s" % (self.__class__.__name__, path))
super(Reference, self).__init__(repo, path)
def __str__(self):
return self.name
#{ Interface
def set_object(self, object, logmsg=None):
"""Special version which checks if the head-log needs an update as well"""
oldbinsha = None
if logmsg is not None:
head = self.repo.head
if not head.is_detached and head.ref == self:
oldbinsha = self.commit.binsha
# END handle commit retrieval
# END handle message is set
super(Reference, self).set_object(object, logmsg)
if oldbinsha is not None:
# /* from refs.c in git-source
# * Special hack: If a branch is updated directly and HEAD
# * points to it (may happen on the remote side of a push
# * for example) then logically the HEAD reflog should be
# * updated too.
# * A generic solution implies reverse symref information,
# * but finding all symrefs pointing to the given branch
# * would be rather costly for this rare event (the direct
# * update of a branch) to be worth it. So let's cheat and
# * check with HEAD only which should cover 99% of all usage
# * scenarios (even 100% of the default ones).
# */
self.repo.head.log_append(oldbinsha, logmsg)
# END check if the head
# NOTE: Don't have to overwrite properties as the will only work without a the log
@property
def name(self):
""":return: (shortest) Name of this reference - it may contain path components"""
# first two path tokens are can be removed as they are
# refs/heads or refs/tags or refs/remotes
tokens = self.path.split('/')
if len(tokens) < 3:
return self.path # could be refs/HEAD
return '/'.join(tokens[2:])
@classmethod
def iter_items(cls, repo, common_path=None):
"""Equivalent to SymbolicReference.iter_items, but will return non-detached
references as well."""
return cls._iter_items(repo, common_path)
#}END interface
#{ Remote Interface
@property
@require_remote_ref_path
def remote_name(self):
"""
:return:
Name of the remote we are a reference of, such as 'origin' for a reference
named 'origin/master'"""
tokens = self.path.split('/')
# /refs/remotes/<remote name>/<branch_name>
return tokens[2]
@property
@require_remote_ref_path
def remote_head(self):
""":return: Name of the remote head itself, i.e. master.
:note: The returned name is usually not qualified enough to uniquely identify
a branch"""
tokens = self.path.split('/')
return '/'.join(tokens[3:])
#} END remote interface
| 34.367188 | 101 | 0.625369 | from symbolic import SymbolicReference
from git.util import (
LazyMixin,
Iterable,
)
from gitdb.util import (
isfile,
hex_to_bin
)
__all__ = ["Reference"]
def require_remote_ref_path(func):
def wrapper(self, *args):
if not self.is_remote():
raise ValueError("ref path does not point to a remote reference: %s" % self.path)
return func(self, *args)
wrapper.__name__ = func.__name__
return wrapper
class Reference(SymbolicReference, LazyMixin, Iterable):
__slots__ = tuple()
_points_to_commits_only = False
_resolve_ref_on_create = True
_common_path_default = "refs"
def __init__(self, repo, path, check_path=True):
if check_path and not path.startswith(self._common_path_default + '/'):
raise ValueError("Cannot instantiate %r from path %s" % (self.__class__.__name__, path))
super(Reference, self).__init__(repo, path)
def __str__(self):
return self.name
def set_object(self, object, logmsg=None):
oldbinsha = None
if logmsg is not None:
head = self.repo.head
if not head.is_detached and head.ref == self:
oldbinsha = self.commit.binsha
super(Reference, self).set_object(object, logmsg)
if oldbinsha is not None:
# * check with HEAD only which should cover 99% of all usage
# * scenarios (even 100% of the default ones).
# */
self.repo.head.log_append(oldbinsha, logmsg)
# END check if the head
# NOTE: Don't have to overwrite properties as the will only work without a the log
@property
def name(self):
tokens = self.path.split('/')
if len(tokens) < 3:
return self.path
return '/'.join(tokens[2:])
@classmethod
def iter_items(cls, repo, common_path=None):
return cls._iter_items(repo, common_path)
@property
@require_remote_ref_path
def remote_name(self):
tokens = self.path.split('/')
return tokens[2]
@property
@require_remote_ref_path
def remote_head(self):
tokens = self.path.split('/')
return '/'.join(tokens[3:])
| true | true |
f71deef83c9778cff0948c2c78a2a0544ca4476f | 6,517 | py | Python | autoencoder_program_synthesis/model_utils/modules.py | sander102907/autoencoder_program_synthesis | 752954f9ef268908553189a1c3323bad15b39f04 | [
"Apache-2.0"
] | 4 | 2021-08-14T17:38:37.000Z | 2022-02-03T20:47:54.000Z | autoencoder_program_synthesis/model_utils/modules.py | sander102907/autoencoder_program_synthesis | 752954f9ef268908553189a1c3323bad15b39f04 | [
"Apache-2.0"
] | 2 | 2021-04-28T10:41:30.000Z | 2022-02-02T14:30:58.000Z | autoencoder_program_synthesis/model_utils/modules.py | sander102907/autoencoder_program_synthesis | 752954f9ef268908553189a1c3323bad15b39f04 | [
"Apache-2.0"
] | 1 | 2021-08-14T17:38:39.000Z | 2021-08-14T17:38:39.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
class AddGate(nn.Module):
"""
Add gate similar to LSTM add gate: :math: `y = σ(W_mul * inp + b_mul) * tanh(W_add * inp + b_add)`
Outputs information that can be added to some state
where the network learns: if and how much of the input should be added
"""
def __init__(self, dim):
super().__init__()
self.W_mul = nn.Linear(dim, dim, bias=True)
self.W_add = nn.Linear(dim, dim, bias=True)
self.sigmoid = nn.Sigmoid()
def forward(self, inp):
out_mul = self.sigmoid(self.W_mul(inp))
out_add = torch.tanh(self.W_add(inp))
return out_mul * out_add
class PredictiveHidden(nn.Module):
"""
Computes a combined predictive hidden state from two hidden states: :math:`y = tanh(W1 * x1 + W2 * x2)`
"""
def __init__(self, dim):
super().__init__()
# Learnable parameter weights1 -> for calculating: W1 * inp1
self.W1 = nn.Linear(dim, dim, bias=True)
# Learnable parameter weights2 -> for calculating: W2 * inp2
self.W2 = nn.Linear(dim, dim, bias=True)
def forward(self, inp1, inp2):
# predictive hidden state: tanh(W1 * inp1 + W2 * inp2)
h_pred = torch.tanh(self.W1(inp1) + self.W2(inp2))
return h_pred
class TreeTopologyPred(nn.Module):
"""
Computes logits for depth, width and res predictions with linear transformations: dim -> 1
"""
def __init__(self, dim):
super().__init__()
# For topology prediction, we predict whether there are children
self.depth_pred = nn.Linear(dim, 1)
# For topology prediction, we predict whether there are successor siblings
self.width_pred = nn.Linear(dim, 1)
# For predicting whether a token is a reserved keyword of c++ or not
self.res_pred = nn.Linear(dim, 1)
def forward(self, inp):
depth_pred = self.depth_pred(inp)
width_pred = self.width_pred(inp)
res_pred = self.res_pred(inp)
return depth_pred, width_pred, res_pred
class LstmAttention(nn.Module):
"""
ATTENTION-BASED LSTM FOR PSYCHOLOGICAL STRESS DETECTION FROM SPOKEN
LANGUAGE USING DISTANT SUPERVISION
https://arxiv.org/abs/1805.12307
"""
def __init__(self, dim):
super().__init__()
self.attention_weights = nn.Linear(dim, dim)
self.softmax = nn.Softmax(dim=-1)
def forward(self, inp):
u = torch.tanh(self.attention_weights(inp))
a = self.softmax(u)
v = torch.sum(a * inp, dim=-1)
return u * inp
class MultiLayerLSTMCell(nn.Module):
"""
A long short-term memory (LSTM) cell with support for multiple layers.
input_size: The number of expected features in the input
hidden_size: The number of features in the hidden state
num_layers: Number of recurrent layers.
E.g., setting num_layers=2 would mean stacking two LSTM cells together
to form a stacked LSTM cell, with the second LSTM cell taking in outputs of
the first LSTM cell and computing the final results. Default: 1
"""
def __init__(self, input_size, hidden_size, num_layers = 1, recurrent_dropout=0):
super().__init__()
self.num_layers = num_layers
self.rnns = nn.ModuleList([])
self.dropout = nn.Dropout(recurrent_dropout)
# Initialize RNNs with num layers
for i in range(num_layers):
if i == 0:
self.rnns.append(nn.LSTMCell(input_size, hidden_size))
else:
self.rnns.append(nn.LSTMCell(hidden_size, hidden_size))
def forward(self, input, hidden_states):
new_hidden_states = []
for i in range(self.num_layers):
if i == 0:
h, c = self.rnns[i](input, hidden_states[i])
else:
h, c = self.rnns[i](h, hidden_states[i])
# apply recurrent dropout on the outputs of each LSTM cell hidden except the last layer
if i < self.num_layers - 1:
h = self.dropout(h)
new_hidden_states.append((h, c))
return new_hidden_states
class Highway(nn.Module):
"""
Code from:
https://github.com/kefirski/pytorch_RVAE/blob/19103d1298d7d77423c6e7d76dcc190400d7256e/selfModules/highway.py#L5
Highway networks use learned gating mechanisms to regulate information flow, inspired by Long Short-Term Memory (LSTM) recurrent neural networks.
The gating mechanisms allow neural networks to have paths for information to follow across different layers ("information highways")
http://papers.nips.cc/paper/5850-training-very-deep-networks
"""
def __init__(self, size, num_layers, f):
super(Highway, self).__init__()
self.num_layers = num_layers
self.nonlinear = [nn.Linear(size, size) for _ in range(num_layers)]
for i, module in enumerate(self.nonlinear):
self._add_to_parameters(module.parameters(), 'nonlinear_module_{}'.format(i))
self.linear = [nn.Linear(size, size) for _ in range(num_layers)]
for i, module in enumerate(self.linear):
self._add_to_parameters(module.parameters(), 'linear_module_{}'.format(i))
self.gate = [nn.Linear(size, size) for _ in range(num_layers)]
for i, module in enumerate(self.gate):
self._add_to_parameters(module.parameters(), 'gate_module_{}'.format(i))
self.f = f
def forward(self, x):
"""
:param x: tensor with shape of [batch_size, size]
:return: tensor with shape of [batch_size, size]
applies σ(x) ⨀ (f(G(x))) + (1 - σ(x)) ⨀ (Q(x)) transformation | G and Q is affine transformation,
f is non-linear transformation, σ(x) is affine transformation with sigmoid non-linearition
and ⨀ is element-wise multiplication
"""
for layer in range(self.num_layers):
gate = F.sigmoid(self.gate[layer](x))
nonlinear = self.f(self.nonlinear[layer](x))
linear = self.linear[layer](x)
x = gate * nonlinear + (1 - gate) * linear
return x
def _add_to_parameters(self, parameters, name):
for i, parameter in enumerate(parameters):
self.register_parameter(name='{}-{}'.format(name, i), param=parameter)
| 31.946078 | 150 | 0.622833 | import torch
import torch.nn as nn
import torch.nn.functional as F
class AddGate(nn.Module):
def __init__(self, dim):
super().__init__()
self.W_mul = nn.Linear(dim, dim, bias=True)
self.W_add = nn.Linear(dim, dim, bias=True)
self.sigmoid = nn.Sigmoid()
def forward(self, inp):
out_mul = self.sigmoid(self.W_mul(inp))
out_add = torch.tanh(self.W_add(inp))
return out_mul * out_add
class PredictiveHidden(nn.Module):
def __init__(self, dim):
super().__init__()
self.W1 = nn.Linear(dim, dim, bias=True)
self.W2 = nn.Linear(dim, dim, bias=True)
def forward(self, inp1, inp2):
h_pred = torch.tanh(self.W1(inp1) + self.W2(inp2))
return h_pred
class TreeTopologyPred(nn.Module):
def __init__(self, dim):
super().__init__()
self.depth_pred = nn.Linear(dim, 1)
self.width_pred = nn.Linear(dim, 1)
self.res_pred = nn.Linear(dim, 1)
def forward(self, inp):
depth_pred = self.depth_pred(inp)
width_pred = self.width_pred(inp)
res_pred = self.res_pred(inp)
return depth_pred, width_pred, res_pred
class LstmAttention(nn.Module):
def __init__(self, dim):
super().__init__()
self.attention_weights = nn.Linear(dim, dim)
self.softmax = nn.Softmax(dim=-1)
def forward(self, inp):
u = torch.tanh(self.attention_weights(inp))
a = self.softmax(u)
v = torch.sum(a * inp, dim=-1)
return u * inp
class MultiLayerLSTMCell(nn.Module):
def __init__(self, input_size, hidden_size, num_layers = 1, recurrent_dropout=0):
super().__init__()
self.num_layers = num_layers
self.rnns = nn.ModuleList([])
self.dropout = nn.Dropout(recurrent_dropout)
for i in range(num_layers):
if i == 0:
self.rnns.append(nn.LSTMCell(input_size, hidden_size))
else:
self.rnns.append(nn.LSTMCell(hidden_size, hidden_size))
def forward(self, input, hidden_states):
new_hidden_states = []
for i in range(self.num_layers):
if i == 0:
h, c = self.rnns[i](input, hidden_states[i])
else:
h, c = self.rnns[i](h, hidden_states[i])
if i < self.num_layers - 1:
h = self.dropout(h)
new_hidden_states.append((h, c))
return new_hidden_states
class Highway(nn.Module):
def __init__(self, size, num_layers, f):
super(Highway, self).__init__()
self.num_layers = num_layers
self.nonlinear = [nn.Linear(size, size) for _ in range(num_layers)]
for i, module in enumerate(self.nonlinear):
self._add_to_parameters(module.parameters(), 'nonlinear_module_{}'.format(i))
self.linear = [nn.Linear(size, size) for _ in range(num_layers)]
for i, module in enumerate(self.linear):
self._add_to_parameters(module.parameters(), 'linear_module_{}'.format(i))
self.gate = [nn.Linear(size, size) for _ in range(num_layers)]
for i, module in enumerate(self.gate):
self._add_to_parameters(module.parameters(), 'gate_module_{}'.format(i))
self.f = f
def forward(self, x):
for layer in range(self.num_layers):
gate = F.sigmoid(self.gate[layer](x))
nonlinear = self.f(self.nonlinear[layer](x))
linear = self.linear[layer](x)
x = gate * nonlinear + (1 - gate) * linear
return x
def _add_to_parameters(self, parameters, name):
for i, parameter in enumerate(parameters):
self.register_parameter(name='{}-{}'.format(name, i), param=parameter)
| true | true |
f71def27dfce1001fd68a9493b3a1cf29ffe8982 | 13,148 | py | Python | sdk/python/pulumi_azure_nextgen/apimanagement/v20200601preview/subscription.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_nextgen/apimanagement/v20200601preview/subscription.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_nextgen/apimanagement/v20200601preview/subscription.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['Subscription']
class Subscription(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allow_tracing: Optional[pulumi.Input[bool]] = None,
app_type: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
notify: Optional[pulumi.Input[bool]] = None,
owner_id: Optional[pulumi.Input[str]] = None,
primary_key: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
scope: Optional[pulumi.Input[str]] = None,
secondary_key: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
sid: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Subscription details.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] allow_tracing: Determines whether tracing can be enabled
:param pulumi.Input[str] app_type: Determines the type of application which send the create user request. Default is legacy publisher portal.
:param pulumi.Input[str] display_name: Subscription name.
:param pulumi.Input[bool] notify: Notify change in Subscription State.
- If false, do not send any email notification for change of state of subscription
- If true, send email notification of change of state of subscription
:param pulumi.Input[str] owner_id: User (user id path) for whom subscription is being created in form /users/{userId}
:param pulumi.Input[str] primary_key: Primary subscription key. If not specified during request key will be generated automatically.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] scope: Scope like /products/{productId} or /apis or /apis/{apiId}.
:param pulumi.Input[str] secondary_key: Secondary subscription key. If not specified during request key will be generated automatically.
:param pulumi.Input[str] service_name: The name of the API Management service.
:param pulumi.Input[str] sid: Subscription entity Identifier. The entity represents the association between a user and a product in API Management.
:param pulumi.Input[str] state: Initial subscription state. If no value is specified, subscription is created with Submitted state. Possible states are * active – the subscription is active, * suspended – the subscription is blocked, and the subscriber cannot call any APIs of the product, * submitted – the subscription request has been made by the developer, but has not yet been approved or rejected, * rejected – the subscription request has been denied by an administrator, * cancelled – the subscription has been cancelled by the developer or administrator, * expired – the subscription reached its expiration date and was deactivated.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['allow_tracing'] = allow_tracing
__props__['app_type'] = app_type
if display_name is None:
raise TypeError("Missing required property 'display_name'")
__props__['display_name'] = display_name
__props__['notify'] = notify
__props__['owner_id'] = owner_id
__props__['primary_key'] = primary_key
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if scope is None:
raise TypeError("Missing required property 'scope'")
__props__['scope'] = scope
__props__['secondary_key'] = secondary_key
if service_name is None:
raise TypeError("Missing required property 'service_name'")
__props__['service_name'] = service_name
if sid is None:
raise TypeError("Missing required property 'sid'")
__props__['sid'] = sid
__props__['state'] = state
__props__['created_date'] = None
__props__['end_date'] = None
__props__['expiration_date'] = None
__props__['name'] = None
__props__['notification_date'] = None
__props__['start_date'] = None
__props__['state_comment'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:apimanagement/latest:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20160707:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20161010:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20170301:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180101:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180601preview:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20190101:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201preview:Subscription")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Subscription, __self__).__init__(
'azure-nextgen:apimanagement/v20200601preview:Subscription',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Subscription':
"""
Get an existing Subscription resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return Subscription(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="allowTracing")
def allow_tracing(self) -> pulumi.Output[Optional[bool]]:
"""
Determines whether tracing is enabled
"""
return pulumi.get(self, "allow_tracing")
@property
@pulumi.getter(name="createdDate")
def created_date(self) -> pulumi.Output[str]:
"""
Subscription creation date. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
"""
return pulumi.get(self, "created_date")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[Optional[str]]:
"""
The name of the subscription, or null if the subscription has no name.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="endDate")
def end_date(self) -> pulumi.Output[Optional[str]]:
"""
Date when subscription was cancelled or expired. The setting is for audit purposes only and the subscription is not automatically cancelled. The subscription lifecycle can be managed by using the `state` property. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
"""
return pulumi.get(self, "end_date")
@property
@pulumi.getter(name="expirationDate")
def expiration_date(self) -> pulumi.Output[Optional[str]]:
"""
Subscription expiration date. The setting is for audit purposes only and the subscription is not automatically expired. The subscription lifecycle can be managed by using the `state` property. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
"""
return pulumi.get(self, "expiration_date")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="notificationDate")
def notification_date(self) -> pulumi.Output[Optional[str]]:
"""
Upcoming subscription expiration notification date. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
"""
return pulumi.get(self, "notification_date")
@property
@pulumi.getter(name="ownerId")
def owner_id(self) -> pulumi.Output[Optional[str]]:
"""
The user resource identifier of the subscription owner. The value is a valid relative URL in the format of /users/{userId} where {userId} is a user identifier.
"""
return pulumi.get(self, "owner_id")
@property
@pulumi.getter(name="primaryKey")
def primary_key(self) -> pulumi.Output[Optional[str]]:
"""
Subscription primary key. This property will not be filled on 'GET' operations! Use '/listSecrets' POST request to get the value.
"""
return pulumi.get(self, "primary_key")
@property
@pulumi.getter
def scope(self) -> pulumi.Output[str]:
"""
Scope like /products/{productId} or /apis or /apis/{apiId}.
"""
return pulumi.get(self, "scope")
@property
@pulumi.getter(name="secondaryKey")
def secondary_key(self) -> pulumi.Output[Optional[str]]:
"""
Subscription secondary key. This property will not be filled on 'GET' operations! Use '/listSecrets' POST request to get the value.
"""
return pulumi.get(self, "secondary_key")
@property
@pulumi.getter(name="startDate")
def start_date(self) -> pulumi.Output[Optional[str]]:
"""
Subscription activation date. The setting is for audit purposes only and the subscription is not automatically activated. The subscription lifecycle can be managed by using the `state` property. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
"""
return pulumi.get(self, "start_date")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
Subscription state. Possible states are * active – the subscription is active, * suspended – the subscription is blocked, and the subscriber cannot call any APIs of the product, * submitted – the subscription request has been made by the developer, but has not yet been approved or rejected, * rejected – the subscription request has been denied by an administrator, * cancelled – the subscription has been cancelled by the developer or administrator, * expired – the subscription reached its expiration date and was deactivated.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="stateComment")
def state_comment(self) -> pulumi.Output[Optional[str]]:
"""
Optional subscription comment added by an administrator when the state is changed to the 'rejected'.
"""
return pulumi.get(self, "state_comment")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type for API Management resource.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 52.174603 | 730 | 0.668239 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['Subscription']
class Subscription(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allow_tracing: Optional[pulumi.Input[bool]] = None,
app_type: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
notify: Optional[pulumi.Input[bool]] = None,
owner_id: Optional[pulumi.Input[str]] = None,
primary_key: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
scope: Optional[pulumi.Input[str]] = None,
secondary_key: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
sid: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['allow_tracing'] = allow_tracing
__props__['app_type'] = app_type
if display_name is None:
raise TypeError("Missing required property 'display_name'")
__props__['display_name'] = display_name
__props__['notify'] = notify
__props__['owner_id'] = owner_id
__props__['primary_key'] = primary_key
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if scope is None:
raise TypeError("Missing required property 'scope'")
__props__['scope'] = scope
__props__['secondary_key'] = secondary_key
if service_name is None:
raise TypeError("Missing required property 'service_name'")
__props__['service_name'] = service_name
if sid is None:
raise TypeError("Missing required property 'sid'")
__props__['sid'] = sid
__props__['state'] = state
__props__['created_date'] = None
__props__['end_date'] = None
__props__['expiration_date'] = None
__props__['name'] = None
__props__['notification_date'] = None
__props__['start_date'] = None
__props__['state_comment'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:apimanagement/latest:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20160707:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20161010:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20170301:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180101:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180601preview:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20190101:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201:Subscription"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201preview:Subscription")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Subscription, __self__).__init__(
'azure-nextgen:apimanagement/v20200601preview:Subscription',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Subscription':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return Subscription(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="allowTracing")
def allow_tracing(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "allow_tracing")
@property
@pulumi.getter(name="createdDate")
def created_date(self) -> pulumi.Output[str]:
return pulumi.get(self, "created_date")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="endDate")
def end_date(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "end_date")
@property
@pulumi.getter(name="expirationDate")
def expiration_date(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "expiration_date")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="notificationDate")
def notification_date(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "notification_date")
@property
@pulumi.getter(name="ownerId")
def owner_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "owner_id")
@property
@pulumi.getter(name="primaryKey")
def primary_key(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "primary_key")
@property
@pulumi.getter
def scope(self) -> pulumi.Output[str]:
return pulumi.get(self, "scope")
@property
@pulumi.getter(name="secondaryKey")
def secondary_key(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "secondary_key")
@property
@pulumi.getter(name="startDate")
def start_date(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "start_date")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
return pulumi.get(self, "state")
@property
@pulumi.getter(name="stateComment")
def state_comment(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "state_comment")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| true | true |
f71defd528de6547eb1596d3cd76b72ff5dc824b | 1,262 | py | Python | EKMRC/src/test_gnn.py | yyHaker/EKMRC-is-your-need | 483e2d9d822907ef36a39333933fd939dac1cea0 | [
"Apache-2.0"
] | 4 | 2020-09-21T01:50:21.000Z | 2021-03-23T10:19:09.000Z | EKMRC/src/test_gnn.py | yyHaker/EKMRC-is-your-need | 483e2d9d822907ef36a39333933fd939dac1cea0 | [
"Apache-2.0"
] | null | null | null | EKMRC/src/test_gnn.py | yyHaker/EKMRC-is-your-need | 483e2d9d822907ef36a39333933fd939dac1cea0 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
'''
@File : test_gnn.py
@Author : yyhaker
@Contact : 572176750@qq.com
@Time : 2020/04/22 15:19:24
'''
# here put the import lib
import torch
from torch_geometric.data import Data
import torch.nn.functional as F
from torch_geometric.nn import GCNConv
edge_index = torch.tensor([[0, 2],
[2, 0],
[3, 2],
[2, 3]], dtype=torch.long)
x = torch.tensor([[-1], [0], [1]], dtype=torch.float)
data = Data(x=x, edge_index=edge_index.t().contiguous())
device = torch.device('cuda')
data = data.to(device)
class Net(torch.nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = GCNConv(1, 16)
self.conv2 = GCNConv(16, 2)
def forward(self, data):
x, edge_index = data.x, data.edge_index
x = self.conv1(x, edge_index)
x = F.relu(x)
x = F.dropout(x, training=self.training)
x = self.conv2(x, edge_index)
return F.log_softmax(x, dim=1)
model = Net().to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=0.01, weight_decay=5e-4)
model.train()
for epoch in range(200):
# optimizer.zero_grad()
out = model(data) | 24.745098 | 76 | 0.588748 |
import torch
from torch_geometric.data import Data
import torch.nn.functional as F
from torch_geometric.nn import GCNConv
edge_index = torch.tensor([[0, 2],
[2, 0],
[3, 2],
[2, 3]], dtype=torch.long)
x = torch.tensor([[-1], [0], [1]], dtype=torch.float)
data = Data(x=x, edge_index=edge_index.t().contiguous())
device = torch.device('cuda')
data = data.to(device)
class Net(torch.nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = GCNConv(1, 16)
self.conv2 = GCNConv(16, 2)
def forward(self, data):
x, edge_index = data.x, data.edge_index
x = self.conv1(x, edge_index)
x = F.relu(x)
x = F.dropout(x, training=self.training)
x = self.conv2(x, edge_index)
return F.log_softmax(x, dim=1)
model = Net().to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=0.01, weight_decay=5e-4)
model.train()
for epoch in range(200):
out = model(data) | true | true |
f71df01b8099b2c1ebabc7c547e4cedc327ddd71 | 1,835 | py | Python | aliyun-python-sdk-vs/aliyunsdkvs/request/v20181212/ModifyDeviceCaptureRequest.py | jia-jerry/aliyun-openapi-python-sdk | e90f3683a250cfec5b681b5f1d73a68f0dc9970d | [
"Apache-2.0"
] | 1 | 2021-03-08T02:59:17.000Z | 2021-03-08T02:59:17.000Z | aliyun-python-sdk-vs/aliyunsdkvs/request/v20181212/ModifyDeviceCaptureRequest.py | jia-jerry/aliyun-openapi-python-sdk | e90f3683a250cfec5b681b5f1d73a68f0dc9970d | [
"Apache-2.0"
] | 1 | 2020-05-31T14:51:47.000Z | 2020-05-31T14:51:47.000Z | aliyun-python-sdk-vs/aliyunsdkvs/request/v20181212/ModifyDeviceCaptureRequest.py | jia-jerry/aliyun-openapi-python-sdk | e90f3683a250cfec5b681b5f1d73a68f0dc9970d | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvs.endpoint import endpoint_data
class ModifyDeviceCaptureRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'vs', '2018-12-12', 'ModifyDeviceCapture','vs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Video(self):
return self.get_query_params().get('Video')
def set_Video(self,Video):
self.add_query_param('Video',Video)
def get_Id(self):
return self.get_query_params().get('Id')
def set_Id(self,Id):
self.add_query_param('Id',Id)
def get_Image(self):
return self.get_query_params().get('Image')
def set_Image(self,Image):
self.add_query_param('Image',Image)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId) | 32.767857 | 76 | 0.749319 |
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvs.endpoint import endpoint_data
class ModifyDeviceCaptureRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'vs', '2018-12-12', 'ModifyDeviceCapture','vs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Video(self):
return self.get_query_params().get('Video')
def set_Video(self,Video):
self.add_query_param('Video',Video)
def get_Id(self):
return self.get_query_params().get('Id')
def set_Id(self,Id):
self.add_query_param('Id',Id)
def get_Image(self):
return self.get_query_params().get('Image')
def set_Image(self,Image):
self.add_query_param('Image',Image)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId) | true | true |
f71df0441d2b046fea41993a5a9dd7faa1f1b11c | 2,639 | py | Python | examples/ex_graph.py | MicrohexHQ/src | c079873c182067002b6a7a5564094ea0a4fe0aef | [
"BSD-3-Clause"
] | 2 | 2019-07-08T11:58:27.000Z | 2019-07-08T13:23:57.000Z | examples/ex_graph.py | Bia10/src | 15b9ab2535222e492cd21b8528c27f763fb799d6 | [
"BSD-3-Clause"
] | null | null | null | examples/ex_graph.py | Bia10/src | 15b9ab2535222e492cd21b8528c27f763fb799d6 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import print_function
# -----------------------------------------------------------------------
# This is an example illustrating how to use the user graphing functionality
# in Python
# (c) Hex-Rays
#
from idaapi import *
class GraphCloser(action_handler_t):
def __init__(self, graph):
action_handler_t.__init__(self)
self.graph = graph
def activate(self, ctx):
self.graph.Close()
def update(self, ctx):
return AST_ENABLE_ALWAYS
class ColorChanger(action_handler_t):
def __init__(self, graph):
action_handler_t.__init__(self)
self.graph = graph
def activate(self, ctx):
self.graph.color = self.graph.color ^ 0xffffff
self.graph.Refresh()
return 1
def update(self, ctx):
return AST_ENABLE_ALWAYS
class MyGraph(GraphViewer):
def __init__(self, funcname, result):
self.title = "call graph of " + funcname
GraphViewer.__init__(self, self.title)
self.funcname = funcname
self.result = result
self.color = 0xff00ff
def OnRefresh(self):
self.Clear()
id = self.AddNode((self.funcname, self.color))
for x in self.result.keys():
callee = self.AddNode((x, self.color))
self.AddEdge(id, callee)
return True
def OnGetText(self, node_id):
return self[node_id]
def OnPopup(self, form, popup_handle):
# graph closer
actname = "graph_closer:%s" % self.title
desc = action_desc_t(actname, "Close: %s" % self.title, GraphCloser(self))
attach_dynamic_action_to_popup(form, popup_handle, desc)
# color changer
actname = "color_changer:%s" % self.title
desc = action_desc_t(actname, "Change colors: %s" % self.title, ColorChanger(self))
attach_dynamic_action_to_popup(form, popup_handle, desc)
def show_graph():
f = idaapi.get_func(here())
if not f:
print("Must be in a function")
return
# Iterate through all function instructions and take only call instructions
result = {}
tmp = idaapi.insn_t()
for x in [x for x in FuncItems(f.start_ea) if (idaapi.decode_insn(tmp, x) and idaapi.is_call_insn(tmp))]:
for xref in XrefsFrom(x, idaapi.XREF_FAR):
if not xref.iscode: continue
t = get_func_name(xref.to)
if not t:
t = hex(xref.to)
result[t] = True
g = MyGraph(get_func_name(f.start_ea), result)
if g.Show():
return g
else:
return None
g = show_graph()
if g:
print("Graph created and displayed!")
| 29 | 109 | 0.610837 | from __future__ import print_function
from idaapi import *
class GraphCloser(action_handler_t):
def __init__(self, graph):
action_handler_t.__init__(self)
self.graph = graph
def activate(self, ctx):
self.graph.Close()
def update(self, ctx):
return AST_ENABLE_ALWAYS
class ColorChanger(action_handler_t):
def __init__(self, graph):
action_handler_t.__init__(self)
self.graph = graph
def activate(self, ctx):
self.graph.color = self.graph.color ^ 0xffffff
self.graph.Refresh()
return 1
def update(self, ctx):
return AST_ENABLE_ALWAYS
class MyGraph(GraphViewer):
def __init__(self, funcname, result):
self.title = "call graph of " + funcname
GraphViewer.__init__(self, self.title)
self.funcname = funcname
self.result = result
self.color = 0xff00ff
def OnRefresh(self):
self.Clear()
id = self.AddNode((self.funcname, self.color))
for x in self.result.keys():
callee = self.AddNode((x, self.color))
self.AddEdge(id, callee)
return True
def OnGetText(self, node_id):
return self[node_id]
def OnPopup(self, form, popup_handle):
actname = "graph_closer:%s" % self.title
desc = action_desc_t(actname, "Close: %s" % self.title, GraphCloser(self))
attach_dynamic_action_to_popup(form, popup_handle, desc)
actname = "color_changer:%s" % self.title
desc = action_desc_t(actname, "Change colors: %s" % self.title, ColorChanger(self))
attach_dynamic_action_to_popup(form, popup_handle, desc)
def show_graph():
f = idaapi.get_func(here())
if not f:
print("Must be in a function")
return
result = {}
tmp = idaapi.insn_t()
for x in [x for x in FuncItems(f.start_ea) if (idaapi.decode_insn(tmp, x) and idaapi.is_call_insn(tmp))]:
for xref in XrefsFrom(x, idaapi.XREF_FAR):
if not xref.iscode: continue
t = get_func_name(xref.to)
if not t:
t = hex(xref.to)
result[t] = True
g = MyGraph(get_func_name(f.start_ea), result)
if g.Show():
return g
else:
return None
g = show_graph()
if g:
print("Graph created and displayed!")
| true | true |
f71df0d4285088125c53c87edad42557c8ce5e8c | 542 | py | Python | alembic/versions/2016111514_add_primary_key_to_worker__54725ffc62f3.py | millerjohnp/codalab-worksheets | d6fc37864e7a8966380fc9d73865b10e434d6678 | [
"Apache-2.0"
] | 1 | 2021-01-02T03:33:58.000Z | 2021-01-02T03:33:58.000Z | alembic/versions/2016111514_add_primary_key_to_worker__54725ffc62f3.py | millerjohnp/codalab-worksheets | d6fc37864e7a8966380fc9d73865b10e434d6678 | [
"Apache-2.0"
] | null | null | null | alembic/versions/2016111514_add_primary_key_to_worker__54725ffc62f3.py | millerjohnp/codalab-worksheets | d6fc37864e7a8966380fc9d73865b10e434d6678 | [
"Apache-2.0"
] | 1 | 2020-03-13T08:16:17.000Z | 2020-03-13T08:16:17.000Z | """Add primary key to worker_dependency
Revision ID: 54725ffc62f3
Revises: 730e212b938
Create Date: 2016-11-15 14:02:41.621934
"""
# revision identifiers, used by Alembic.
revision = '54725ffc62f3'
down_revision = '730e212b938'
from alembic import op
def upgrade():
# Cannot add primary key with auto-increment natively in alembic
# Note that this is MySQL-specific
op.execute("ALTER TABLE `worker_dependency` ADD `id` INT PRIMARY KEY AUTO_INCREMENT FIRST;")
def downgrade():
op.drop_column('worker_dependency', 'id')
| 22.583333 | 96 | 0.745387 |
revision = '54725ffc62f3'
down_revision = '730e212b938'
from alembic import op
def upgrade():
op.execute("ALTER TABLE `worker_dependency` ADD `id` INT PRIMARY KEY AUTO_INCREMENT FIRST;")
def downgrade():
op.drop_column('worker_dependency', 'id')
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.