hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cc943e2b995b45726437e5ee883f6be80fde8633 | 4,730 | py | Python | fifth_semester/Visualizer/visualizer.py | MajronMan/agh_stuff | d045e3bd47ac17880526203d9993d9b2389a9ffe | [
"Unlicense"
] | 2 | 2019-03-02T19:31:57.000Z | 2019-04-03T19:54:39.000Z | fifth_semester/Visualizer/visualizer.py | MajronMan/agh_stuff | d045e3bd47ac17880526203d9993d9b2389a9ffe | [
"Unlicense"
] | null | null | null | fifth_semester/Visualizer/visualizer.py | MajronMan/agh_stuff | d045e3bd47ac17880526203d9993d9b2389a9ffe | [
"Unlicense"
] | 1 | 2019-04-03T18:26:50.000Z | 2019-04-03T18:26:50.000Z | import struct
import numpy as np
import matplotlib.pyplot as plt
import pyaudio
import threading
from pyqtgraph.Qt import QtGui, QtCore
import pyqtgraph as pg
class AudioRecorder():
FORMAT = pyaudio.paInt16
RATE = 44100
CHUNK = 1024
CHANNELS = 1
def __init__(self):
self.stream = self.init_pyaudio()
def init_pyaudio(self):
p = pyaudio.PyAudio()
return p.open(format=self.FORMAT,
channels=self.CHANNELS,
rate=self.RATE,
input=True,
frames_per_buffer=self.CHUNK)
def parameters(self):
x = np.linspace(0, self.CHUNK / self.RATE, self.CHUNK)
frequencies = np.fft.fftfreq(self.CHUNK, x[1] - x[0])
return x, frequencies
def read(self):
raw = self.stream.read()
return np.array(struct.unpack("%dh" % self.CHUNK, raw))
class AudioTransformer():
SMOOTHING = 0.75
WIDTH = 1200
HEIGHT = 800
def __init__(self, recorder = AudioRecorder()):
self.recorder = recorder
self.x, self.frequencies = recorder.parameters()
self.y = np.zeros(x.shape)
self.FFT = np.abs(np.fft.fft(y)).clip(min=1)
def transform_next(self):
new_data = self.recorder.read()
self.y = self.y * self.SMOOTHING
self.y += (1 - self.SMOOTHING) * new_data
transformed = (np.abs(np.fft.fft(y)) * np.blackman(len(y))).clip(min=1)
self.FFT = self.FFT * self.SMOOTHING
self.FFT += (1 - self.SMOOTHING) * transformed
self.FFT = np.where(self.FFT > 11, self.FFT, 1)
return (self.x, self.y, self.frequencies, self.FFT)
class Plotter():
WIDTH = 800
HEIGHT = 600
def __init__(self, transformer = AudioTransformer(), visualization=None, show_signal=False, show_transform=False, show_visualization=True):
self.running = True
self.transformer = transformer
self.visualizer = Visualizer(visualization)
self.show_visualization = show_visualization
self.show_transform = show_transform
self.show_signal = show_signal
def stop(self):
self.running = False
def init_widget(self):
app = QtGui.QApplication([])
app.aboutToQuit.connect(self.stop)
w = QtGui.QMainWindow()
widget = pg.GraphicsLayoutWidget()
w.show()
w.resize(self.WIDTH, self.HEIGHT)
w.setCentralWidget(widget)
w.setWindowTitle('VISUALIZER')
return widget
def plot(self):
widget = self.init_widget()
if self.show_signal:
signal_plot = widget.addPlot(row=0, col=0)
signal_plot.setRange(yRange=[-2 ** 15, 2 ** 15])
if self.show_transform:
transform_plot = widget.addPlot(row=1, col=0)
transform_plot.setRange(yRange=[0, 6])
if self.show_visualization:
visualization_plot = widget.addPlot(row=2, col=0)
visualization_plot.setRange(yRange=[0, 10000])
while self.running:
x, y, frequencies, FFT = self.transformer.transform_next()
if self.show_signal:
signal_plot.plot(x, y, clear=True)
if self.show_transform:
transform_plot.plot(frequencies, FFT, clear=True)
if self.show_visualization:
self.visualizer.visualize(x, y, frequencies, FFT, visualization_plot)
pg.QtGui.QApplication.processEvents()
class Visualizer():
def __init__(self, visualization):
self.visualization = visualizations[visualization]
def prepareFFT(self, xRange, yRange, binNumber, lines):
FFT = np.zeros(self.CHUNK)
mem = np.zeros((self.CHUNK, self.CHUNK))
vx = np.arange(binNumber)
vy = np.arange(0, yRange)
q = np.zeros((lines, binNumber))
return FFT, mem, vx, vy, q
def signal_power(self, fft):
p = np.sum(fft)
return p / (self.CHUNK * 20000)
def calculate_mean(self, binNumber, fft):
mean = np.zeros(binNumber)
middle = fft.shape[0] // 2
delta = middle // binNumber
for i in range(binNumber):
mean[i] = np.mean(fft[middle + delta * i: middle + delta * (i + 1)])
return mean
def visualize(x, y, frequencies, FFT, plot):
pass
if __name__ == "__main__":
try:
vs = Visualizer()
x, y, xf = vs.initialize_vars()
runFlag = [True]
plotting = threading.Thread(target=vs.plot, args=(
x, y, xf, runFlag, False, False, True))
plotting.start()
plotting.join(1000)
runFlag[0] = False
except ValueError:
pass
| 31.118421 | 143 | 0.596406 |
8ed00a1fffdff79f5790a641332f6eae2ed82d4c | 2,658 | py | Python | trace_feature/tests/models/test_method.py | vitorbribas/trace_feature | 7bfc79d01a6a28418d966d7a7044ad3868c6e203 | [
"MIT"
] | null | null | null | trace_feature/tests/models/test_method.py | vitorbribas/trace_feature | 7bfc79d01a6a28418d966d7a7044ad3868c6e203 | [
"MIT"
] | null | null | null | trace_feature/tests/models/test_method.py | vitorbribas/trace_feature | 7bfc79d01a6a28418d966d7a7044ad3868c6e203 | [
"MIT"
] | null | null | null | import pytest
import json
import os
from trace_feature.core.models import Method
class TestMethodInstance:
@pytest.fixture
def method_attributes(self):
'''Returns a dictionary of attributes for a method'''
attributes = {
'line': 2,
'method_id': '/home/user/documents/example-project/app/models/file_example.rb' + 'example_method' + '2',
'method_name': 'example_method',
'class_name': 'Example',
'class_path': '/home/user/documents/example-project/app/models/file_example.rb',
'abc_score': 3,
'complexity': 1,
'number_of_lines': 5,
'content': 'def example_method\n end\n\n',
}
return attributes
@pytest.fixture
def empty_method(self):
'''Returns a Method with empty or blank attributes'''
return Method()
@pytest.fixture
def method(self, method_attributes):
'''Returns a Method with setted attributes'''
return Method(line=method_attributes['line'],
method_id=method_attributes['method_id'],
method_name=method_attributes['method_name'],
class_name=method_attributes['class_name'],
class_path=method_attributes['class_path'],
abc_score=method_attributes['abc_score'],
complexity=method_attributes['complexity'],
number_of_lines=method_attributes['number_of_lines'],
content=method_attributes['content'])
def test_default_initial_attributes(self, empty_method):
assert empty_method.line == None
assert empty_method.method_id == ""
assert empty_method.method_name == ""
assert empty_method.class_name == ""
assert empty_method.class_path == ""
assert empty_method.abc_score == 0
assert empty_method.complexity == 0
assert empty_method.number_of_lines == 0
assert empty_method.content == ""
def test_setting_attributes(self, method, method_attributes):
assert method.line == method_attributes['line']
assert method.method_id == method_attributes['method_id']
assert method.method_name == method_attributes['method_name']
assert method.class_name == method_attributes['class_name']
assert method.class_path == method_attributes['class_path']
assert method.abc_score == method_attributes['abc_score']
assert method.complexity == method_attributes['complexity']
assert method.number_of_lines == method_attributes['number_of_lines']
assert method.content == method_attributes['content']
def test_str_print(self, method):
assert method.__str__() == ''
def test_obj_dict(self, method):
assert method.obj_dict() == method.__dict__
| 38.521739 | 110 | 0.692626 |
95dd59409dea232b2262fa300988a7aa6f73fe73 | 625 | py | Python | Source/FaceRecognition/Utils/ResourceLocalizer.py | robertkarol/ReDe-Multiagent-Face-Recognition-System | df17cebecc51b2fafb01e07a9bb68e9e4e04163a | [
"MIT"
] | null | null | null | Source/FaceRecognition/Utils/ResourceLocalizer.py | robertkarol/ReDe-Multiagent-Face-Recognition-System | df17cebecc51b2fafb01e07a9bb68e9e4e04163a | [
"MIT"
] | 7 | 2020-04-24T08:22:20.000Z | 2021-05-21T16:11:52.000Z | Source/FaceRecognition/Utils/ResourceLocalizer.py | robertkarol/ReDe-Multiagent-Face-Recognition-System | df17cebecc51b2fafb01e07a9bb68e9e4e04163a | [
"MIT"
] | 1 | 2020-04-26T15:05:07.000Z | 2020-04-26T15:05:07.000Z | from Utils.Singleton import SingletonMeta
import configparser
class ResourceLocalizer(metaclass=SingletonMeta):
def __init__(self, resource_file):
self.__parser = configparser.ConfigParser()
self.__parser.read(resource_file)
@property
def facenet_model(self):
return self.__parser['MODELS']['FACENET_MODEL']
@property
def recognition_system_configuration_file(self):
return self.__parser['RECOGNITION_SYSTEM']['CONFIGURATION_FILE']
@property
def detection_system_configuration_file(self):
return self.__parser['DETECTION_SYSTEM']['CONFIGURATION_FILE']
| 29.761905 | 72 | 0.744 |
c4e36f65bfd14081a18e6f719378abebdacf0587 | 1,240 | py | Python | 12_challenge/12_challenge.py | monoranjanqxf2/wtfiswronghere | a99f1a9b9920bec86e39e7efd6e8dba5506d94d1 | [
"MIT"
] | null | null | null | 12_challenge/12_challenge.py | monoranjanqxf2/wtfiswronghere | a99f1a9b9920bec86e39e7efd6e8dba5506d94d1 | [
"MIT"
] | null | null | null | 12_challenge/12_challenge.py | monoranjanqxf2/wtfiswronghere | a99f1a9b9920bec86e39e7efd6e8dba5506d94d1 | [
"MIT"
] | 1 | 2020-01-06T13:10:39.000Z | 2020-01-06T13:10:39.000Z | """
We will use this script to teach Python to absolute beginners
The script is an example of Fizz-Buzz implemented in Python
The FizzBuzz problem:
For all integers between 1 and 99 (include both):
# print fizz for multiples of 3
# print buzz for multiples of 5
# print fizzbuzz for multiples of 3 and 5"
"""
class FizzBuzz():
def __init__(self):
"Initializer"
# adding some redundant declarations on purpose
# we will make our script 'tighter' in one of coming exercises
self.num1 = 3
self.num2 = 5
self.three_mul = 'fizz'
self.five_mul = 'buzz'
def fizzbuzz(self,max_num):
"This method implements FizzBuzz"
# Google for 'range in python' to see what it does
for i in range(1,max_num+1):
# % or modulo division gives you the remainder
if i%self.num1==0 and i%self.num2==0:
print(i,self.three_mul+self.five_mul)
elif i%self.num1==0:
print(i,self.three_mul)
elif i%self.num2==0:
print(i,self.five_mul)
#----START OF SCRIPT
if __name__=='__main__':
#Testing the fizzbuzz class
test_obj = FizzBuzz()
test_obj.fizzbuzz(100) | 31 | 70 | 0.618548 |
2e09cd46e58420b5abe701a733abcbe75e0d8c43 | 4,316 | py | Python | generators/ahoughton.py | juangallostra/moonboard | d4a35857d480ee4bed06faee44e0347e1070b6b8 | [
"MIT"
] | null | null | null | generators/ahoughton.py | juangallostra/moonboard | d4a35857d480ee4bed06faee44e0347e1070b6b8 | [
"MIT"
] | null | null | null | generators/ahoughton.py | juangallostra/moonboard | d4a35857d480ee4bed06faee44e0347e1070b6b8 | [
"MIT"
] | null | null | null | from typing import Any, List
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from generators.base_generator import BaseGenerator
from selenium.webdriver.common.by import By
import os
class AhoughtonGenerator(BaseGenerator):
"""
Generate problems from the Ahoughton website.
Inherits from BaseGenerator, which assumes the class
implements a generate() method.
:param year: Moonboard layout year for which we want to generate problems. Defaults to 2016.
:type year: int, optional
:param path: driver path, defaults to 'C:/.selenium_drivers/chromedriver.exe'
:type path: str, optional
"""
def __init__(self, year: int = 2016, driver_path: str = "") -> None:
# CSS selectors used to identify the elements on the page
# Directly extracted from the website
self.MOVE_CSS_CLASS = 'm-fadeIn'
self.GENERATE_BUTTON_CSS_CLASS = 'red'
self.CSS_MOONBOARD_LAYOUT_SELECTORS = {
2016: 'div.option:nth-child(1) > label:nth-child(2) > input:nth-child(1)',
2017: 'div.option:nth-child(1) > label:nth-child(3) > input:nth-child(1)'
}
self.year = year
self.driver_path = driver_path
super().__init__()
def _configure_chrome_driver(self, *args, headless: bool = True) -> Options:
"""
Set and return the configuration of the driver used to load the page
and generate a new problem. This is a Chrome-specific configuration.
It disables browser extensions and gpu and sets the headless option to True.
:param *args: arguments to pass to the driver
:param headless: whether to launch the browser driver in headless mode. Defaults to True.
:type headless: bool, optional
:return: driver configuration options
:rtype: Options
"""
chrome_options = Options()
chrome_options.headless = headless
for arg in args:
chrome_options.add_argument(arg)
return chrome_options
def _get_chrome_driver(self) -> webdriver:
"""
Get the configured driver used to load the page and generate a new problem.
:return: the configured driver
:rtype: webdriver
"""
if not os.path.isfile(self.driver_path):
import chromedriver_binary
return webdriver.Chrome(
options=self._configure_chrome_driver(
"--disable-extensions",
"--disable-gpu",
headless=True
)
)
return webdriver.Chrome(
service=Service(self.driver_path),
options=self._configure_chrome_driver(
"--disable-extensions",
"--disable-gpu",
headless=True
)
)
def _parse_moves(self, moves: List[Any]) -> List[str]:
"""
Given the list of moves extracted from the website, parse them into a list of strings
where each element are the coordinates of a hold from the problem.
:param moves: List of moves extracted from the website html
:type moves: List[Any]
:return: Processed list of moves as moonboard coordinates (['D18', 'G13', ...])
:rtype: List[str]
"""
parsed_moves = []
for move in moves:
move_coords = move.get_attribute('id') # D18 or whatever
parsed_moves.append(move_coords)
return parsed_moves
def generate(self) -> str:
"""
Generate a new problem from the Ahoughton website.
:return: The problem generated by the website
:rtype: str
"""
driver = self._get_chrome_driver()
driver.get("https://ahoughton.com/moon")
# Select moonboard layout
driver.find_element(
By.CSS_SELECTOR, self.CSS_MOONBOARD_LAYOUT_SELECTORS[self.year]).click()
# generate a new climb
driver.find_elements(By.CLASS_NAME, self.GENERATE_BUTTON_CSS_CLASS)[
0].click()
# get moves
moves = driver.find_elements(By.CLASS_NAME, self.MOVE_CSS_CLASS)
# Close the driver
# driver.close()
return self._parse_moves(moves)
| 37.530435 | 97 | 0.628823 |
e97623ca7719e4c225b678984733c06330fb8b6f | 2,896 | py | Python | Clustering/code.py | debapriyaroy95/ga-learner-dsmp-repo | a69b9ce5ea77b030f77fd41a81eae1ec791fdf65 | [
"MIT"
] | null | null | null | Clustering/code.py | debapriyaroy95/ga-learner-dsmp-repo | a69b9ce5ea77b030f77fd41a81eae1ec791fdf65 | [
"MIT"
] | null | null | null | Clustering/code.py | debapriyaroy95/ga-learner-dsmp-repo | a69b9ce5ea77b030f77fd41a81eae1ec791fdf65 | [
"MIT"
] | null | null | null | # --------------
# import packages
import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
# Load Offers
offers = pd.read_excel(path, sheet_name=0)
# Load Transactions
transactions = pd.read_excel(path, sheet_name=1)
transactions['n'] = 1
# Merge dataframes
df = pd.merge(transactions, offers)
# Look at the first 5 rows
print(df.head())
# --------------
# Code starts here
# create pivot table
matrix = df.pivot_table(index='Customer Last Name', columns='Offer #' , values='n')
# replace missing values with 0
matrix.fillna(0, inplace=True)
# reindex pivot table
matrix.reset_index(inplace=True)
# display first 5 rows
print(matrix.head())
# Code ends here
# --------------
# import packages
from sklearn.cluster import KMeans
# Code starts here
# initialize KMeans object
cluster = KMeans(n_clusters=5,init='k-means++', max_iter=300, n_init=10, random_state=0)
# create 'cluster' column
matrix['cluster'] = cluster.fit_predict(matrix[matrix.columns[1:]])
print(matrix.head())
# Code ends here
# --------------
# import packages
from sklearn.decomposition import PCA
# Code starts here
# initialize pca object with 2 components
pca = PCA(n_components=2, random_state=0)
# create 'x' and 'y' columns donoting observation locations in decomposed form
matrix['x'] = pca.fit_transform(matrix[matrix.columns[1:]])[:,0]
matrix['y'] = pca.fit_transform(matrix[matrix.columns[1:]])[:,1]
# dataframe to visualize clusters by customer names
clusters = matrix.iloc[:,[0,33,34,35]]
# visualize clusters
clusters.plot.scatter(x='x', y='y', c='cluster', colormap='viridis')
# Code ends here
# --------------
# Code starts here
# merge 'clusters' and 'transactions'
data = pd.merge(clusters, transactions)
print(data.head())
print('='*25)
# merge `data` and `offers`
data = pd.merge(offers, data)
print(data.head())
print('='*25)
# initialzie empty dictionary
champagne = {}
# iterate over every cluster
for val in data.cluster.unique():
# observation falls in that cluster
new_df = data[data.cluster == val]
# sort cluster according to type of 'Varietal'
counts = new_df['Varietal'].value_counts(ascending=False)
# check if 'Champagne' is ordered mostly
if counts.index[0] == 'Champagne':
# add it to 'champagne'
champagne[val] = (counts[0])
# get cluster with maximum orders of 'Champagne'
cluster_champagne = max(champagne, key=champagne.get)
# print out cluster number
print(cluster_champagne)
# print('='*50)
# --------------
# Code starts here
# empty dictionary
discount = {}
# iterate over cluster numbers
for val in data.cluster.unique():
new_df = data[data.cluster == val]
counts = new_df['Discount (%)'].sum()/len(new_df)
discount[val] = (counts)
# cluster with maximum average discount
cluster_discount = max(discount, key=discount.get)
# Code ends here
| 21.451852 | 88 | 0.690608 |
aaf53e38bea5ab8afcf628d29690ab10531201f4 | 3,512 | py | Python | src/db.py | brauliobarahona/RAPT-dataset | ec842544fe8af39d2f44604c06784b4dd6e24108 | [
"MIT"
] | 2 | 2020-06-15T09:26:46.000Z | 2020-06-15T14:39:48.000Z | src/db.py | brauliobarahona/RAPT-dataset | ec842544fe8af39d2f44604c06784b4dd6e24108 | [
"MIT"
] | null | null | null | src/db.py | brauliobarahona/RAPT-dataset | ec842544fe8af39d2f44604c06784b4dd6e24108 | [
"MIT"
] | 1 | 2021-01-23T15:22:29.000Z | 2021-01-23T15:22:29.000Z | from mysql.connector import MySQLConnection, Error
from configparser import ConfigParser
import pandas as pd
import numpy as np
from src.utils import *
def read_db_config(filename='program_config.ini', section='mysql_shiftable'):
"""
:param filename: path to program_config.ini
:param section: where to look for db info in .ini file
:returns: kwargs for MySQLConnection
Example::
config = read_db_config('program_config.ini', section='mysql_nonshiftable')
connection = MySQLConnection(**config)
"""
# create parser and read ini configuration file
parser = ConfigParser()
parser.read(filename)
# get section, default to mysql
db = {}
if parser.has_section(section):
items = parser.items(section)
for item in items:
db[item[0]] = item[1]
else:
raise Exception('{0} not found in the {1} file'.format(section, filename))
return db
def getSensorNamesForHouse(cursor, abbreviation):
"""
:param cursor: db cursor
:param abbreviation: abbrevation for house
:returns: array of all sensors as string
"""
query = """ SELECT NAME
FROM
Sensors
WHERE
NAME LIKE '{0}%'"""
query = query.format(abbreviation)
cursor.execute(query)
rows = cursor.fetchall()
return [r[0] for r in rows]
def getDFForQuery(cursor, query):
"""
:param cursor: db connection cursor
:param query: database query
:returns: (not Empty: Bool, pandas dataframe of result)
"""
cursor.execute(query)
rows = cursor.fetchall()
if len(rows) != 0:
data = pd.DataFrame(rows)
data.columns = cursor.column_names
return (True,data)
else:
return (False, pd.DataFrame())
def getDFForQueryAnon(cursor, query, key):
"""
:param cursor: db connection cursor
:param query: database query
:param key: key to anonymise data
:returns: (not Empty: Bool, pandas dataframe of result which is anonymized in addition)
"""
cursor.execute(query)
rows = cursor.fetchall()
if len(rows) != 0:
data = pd.DataFrame(rows)
data.columns = cursor.column_names
anonymise(data, key)
return (True, data)
else:
return (False, pd.DataFrame())
def queryForSensor(name, startD, endD):
"""
often used in combination with getDFForQuery::
success, result = getDFForQuery(queryForSensor("name", "2018-12-12 12:12:12", "2019-12-12 12:12:12"))
:param name: name of sensor
:param stardD: startdate as string
:param endD: endDate as string:
:returns: query string to select all values between startdate and enddate
"""
return 'SELECT DateTime, Value as {0} FROM SensorValues as sv WHERE SensorName = "{0}" AND DateTime >= "{1}" AND DateTime < "{2}" ORDER BY DateTime'.format(name,startD,endD)
#return capturePeriod for a specific sensor name
def getCapturePeriodForSensorName(cursor, name):
"""
assumes name exists in db
:param cursor: db connection cursor
:param name: name of sensor
:returns: capture peroid of (sensor)name
"""
query = """ SELECT CapturePeriod
FROM
SensorValues
WHERE
SensorName = '{0}' LIMIT 1"""
query = query.format(name)
cursor.execute(query)
rows = cursor.fetchall()
return [r[0] for r in rows][0]
| 25.449275 | 177 | 0.626708 |
e3e955201f266286b4e82ee78e15f1c9eef4375c | 13,908 | py | Python | system_tests/storage.py | Ofekmeister/google-cloud-python | 07dd51bc447beca67b8da1c66f1dfb944ef70418 | [
"Apache-2.0"
] | null | null | null | system_tests/storage.py | Ofekmeister/google-cloud-python | 07dd51bc447beca67b8da1c66f1dfb944ef70418 | [
"Apache-2.0"
] | null | null | null | system_tests/storage.py | Ofekmeister/google-cloud-python | 07dd51bc447beca67b8da1c66f1dfb944ef70418 | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tempfile
import time
import unittest
import httplib2
import six
from google.cloud import _helpers
from google.cloud.environment_vars import TESTS_PROJECT
from google.cloud import exceptions
from google.cloud import storage
from google.cloud.storage._helpers import _base64_md5hash
from system_test_utils import unique_resource_id
from retry import RetryErrors
from retry import RetryResult
retry_429 = RetryErrors(exceptions.TooManyRequests)
HTTP = httplib2.Http()
_helpers.PROJECT = TESTS_PROJECT
class Config(object):
"""Run-time configuration to be modified at set-up.
This is a mutable stand-in to allow test set-up to modify
global state.
"""
CLIENT = None
TEST_BUCKET = None
def setUpModule():
Config.CLIENT = storage.Client()
bucket_name = 'new' + unique_resource_id()
# In the **very** rare case the bucket name is reserved, this
# fails with a ConnectionError.
Config.TEST_BUCKET = Config.CLIENT.bucket(bucket_name)
retry_429(Config.TEST_BUCKET.create)()
def tearDownModule():
retry = RetryErrors(exceptions.Conflict)
retry(Config.TEST_BUCKET.delete)(force=True)
class TestStorageBuckets(unittest.TestCase):
def setUp(self):
self.case_buckets_to_delete = []
def tearDown(self):
with Config.CLIENT.batch():
for bucket_name in self.case_buckets_to_delete:
bucket = Config.CLIENT.bucket(bucket_name)
retry_429(bucket.delete)()
def test_create_bucket(self):
new_bucket_name = 'a-new-bucket' + unique_resource_id('-')
self.assertRaises(exceptions.NotFound,
Config.CLIENT.get_bucket, new_bucket_name)
created = Config.CLIENT.create_bucket(new_bucket_name)
self.case_buckets_to_delete.append(new_bucket_name)
self.assertEqual(created.name, new_bucket_name)
def test_list_buckets(self):
buckets_to_create = [
'new' + unique_resource_id(),
'newer' + unique_resource_id(),
'newest' + unique_resource_id(),
]
created_buckets = []
for bucket_name in buckets_to_create:
bucket = Config.CLIENT.bucket(bucket_name)
retry_429(bucket.create)()
self.case_buckets_to_delete.append(bucket_name)
# Retrieve the buckets.
all_buckets = Config.CLIENT.list_buckets()
created_buckets = [bucket for bucket in all_buckets
if bucket.name in buckets_to_create]
self.assertEqual(len(created_buckets), len(buckets_to_create))
class TestStorageFiles(unittest.TestCase):
FILES = {
'logo': {
'path': 'system_tests/data/CloudPlatform_128px_Retina.png',
},
'big': {
'path': 'system_tests/data/five-point-one-mb-file.zip',
},
'simple': {
'path': 'system_tests/data/simple.txt',
}
}
@classmethod
def setUpClass(cls):
super(TestStorageFiles, cls).setUpClass()
for file_data in cls.FILES.values():
with open(file_data['path'], 'rb') as file_obj:
file_data['hash'] = _base64_md5hash(file_obj)
cls.bucket = Config.TEST_BUCKET
def setUp(self):
self.case_blobs_to_delete = []
def tearDown(self):
for blob in self.case_blobs_to_delete:
blob.delete()
class TestStorageWriteFiles(TestStorageFiles):
def test_large_file_write_from_stream(self):
blob = self.bucket.blob('LargeFile')
file_data = self.FILES['big']
with open(file_data['path'], 'rb') as file_obj:
blob.upload_from_file(file_obj)
self.case_blobs_to_delete.append(blob)
md5_hash = blob.md5_hash
if not isinstance(md5_hash, six.binary_type):
md5_hash = md5_hash.encode('utf-8')
self.assertEqual(md5_hash, file_data['hash'])
def test_small_file_write_from_filename(self):
blob = self.bucket.blob('SmallFile')
file_data = self.FILES['simple']
blob.upload_from_filename(file_data['path'])
self.case_blobs_to_delete.append(blob)
md5_hash = blob.md5_hash
if not isinstance(md5_hash, six.binary_type):
md5_hash = md5_hash.encode('utf-8')
self.assertEqual(md5_hash, file_data['hash'])
def test_write_metadata(self):
filename = self.FILES['logo']['path']
blob_name = os.path.basename(filename)
blob = storage.Blob(blob_name, bucket=self.bucket)
blob.upload_from_filename(filename)
self.case_blobs_to_delete.append(blob)
# NOTE: This should not be necessary. We should be able to pass
# it in to upload_file and also to upload_from_string.
blob.content_type = 'image/png'
self.assertEqual(blob.content_type, 'image/png')
def test_direct_write_and_read_into_file(self):
blob = self.bucket.blob('MyBuffer')
file_contents = b'Hello World'
blob.upload_from_string(file_contents)
self.case_blobs_to_delete.append(blob)
same_blob = self.bucket.blob('MyBuffer')
same_blob.reload() # Initialize properties.
temp_filename = tempfile.mktemp()
with open(temp_filename, 'wb') as file_obj:
same_blob.download_to_file(file_obj)
with open(temp_filename, 'rb') as file_obj:
stored_contents = file_obj.read()
self.assertEqual(file_contents, stored_contents)
def test_copy_existing_file(self):
filename = self.FILES['logo']['path']
blob = storage.Blob('CloudLogo', bucket=self.bucket)
blob.upload_from_filename(filename)
self.case_blobs_to_delete.append(blob)
new_blob = self.bucket.copy_blob(blob, self.bucket, 'CloudLogoCopy')
self.case_blobs_to_delete.append(new_blob)
base_contents = blob.download_as_string()
copied_contents = new_blob.download_as_string()
self.assertEqual(base_contents, copied_contents)
class TestStorageListFiles(TestStorageFiles):
FILENAMES = ('CloudLogo1', 'CloudLogo2', 'CloudLogo3')
@classmethod
def setUpClass(cls):
super(TestStorageListFiles, cls).setUpClass()
# Make sure bucket empty before beginning.
for blob in cls.bucket.list_blobs():
blob.delete()
logo_path = cls.FILES['logo']['path']
blob = storage.Blob(cls.FILENAMES[0], bucket=cls.bucket)
blob.upload_from_filename(logo_path)
cls.suite_blobs_to_delete = [blob]
# Copy main blob onto remaining in FILENAMES.
for filename in cls.FILENAMES[1:]:
new_blob = cls.bucket.copy_blob(blob, cls.bucket, filename)
cls.suite_blobs_to_delete.append(new_blob)
@classmethod
def tearDownClass(cls):
for blob in cls.suite_blobs_to_delete:
blob.delete()
def test_list_files(self):
def _all_in_list(blobs):
return len(blobs) == len(self.FILENAMES)
def _all_blobs():
return list(self.bucket.list_blobs())
retry = RetryResult(_all_in_list)
all_blobs = retry(_all_blobs)()
self.assertEqual(sorted(blob.name for blob in all_blobs),
sorted(self.FILENAMES))
def test_paginate_files(self):
truncation_size = 1
count = len(self.FILENAMES) - truncation_size
iterator = self.bucket.list_blobs(max_results=count)
response = iterator.get_next_page_response()
blobs = list(iterator.get_items_from_response(response))
self.assertEqual(len(blobs), count)
self.assertEqual(iterator.page_number, 1)
self.assertTrue(iterator.next_page_token is not None)
response = iterator.get_next_page_response()
last_blobs = list(iterator.get_items_from_response(response))
self.assertEqual(len(last_blobs), truncation_size)
class TestStoragePseudoHierarchy(TestStorageFiles):
FILENAMES = (
'file01.txt',
'parent/file11.txt',
'parent/child/file21.txt',
'parent/child/file22.txt',
'parent/child/grand/file31.txt',
'parent/child/other/file32.txt',
)
@classmethod
def setUpClass(cls):
super(TestStoragePseudoHierarchy, cls).setUpClass()
# Make sure bucket empty before beginning.
for blob in cls.bucket.list_blobs():
try:
blob.delete()
except exceptions.NotFound: # eventual consistency
pass
simple_path = cls.FILES['simple']['path']
blob = storage.Blob(cls.FILENAMES[0], bucket=cls.bucket)
blob.upload_from_filename(simple_path)
cls.suite_blobs_to_delete = [blob]
for filename in cls.FILENAMES[1:]:
new_blob = cls.bucket.copy_blob(blob, cls.bucket, filename)
cls.suite_blobs_to_delete.append(new_blob)
@classmethod
def tearDownClass(cls):
for blob in cls.suite_blobs_to_delete:
blob.delete()
def test_root_level_w_delimiter(self):
iterator = self.bucket.list_blobs(delimiter='/')
response = iterator.get_next_page_response()
blobs = list(iterator.get_items_from_response(response))
self.assertEqual([blob.name for blob in blobs], ['file01.txt'])
self.assertEqual(iterator.page_number, 1)
self.assertTrue(iterator.next_page_token is None)
self.assertEqual(iterator.prefixes, set(['parent/']))
def test_first_level(self):
iterator = self.bucket.list_blobs(delimiter='/', prefix='parent/')
response = iterator.get_next_page_response()
blobs = list(iterator.get_items_from_response(response))
self.assertEqual([blob.name for blob in blobs], ['parent/file11.txt'])
self.assertEqual(iterator.page_number, 1)
self.assertTrue(iterator.next_page_token is None)
self.assertEqual(iterator.prefixes, set(['parent/child/']))
def test_second_level(self):
expected_names = [
'parent/child/file21.txt',
'parent/child/file22.txt',
]
def _all_in_list(pair):
_, blobs = pair
return [blob.name for blob in blobs] == expected_names
def _all_blobs():
iterator = self.bucket.list_blobs(delimiter='/',
prefix='parent/child/')
response = iterator.get_next_page_response()
blobs = list(iterator.get_items_from_response(response))
return iterator, blobs
retry = RetryResult(_all_in_list)
iterator, _ = retry(_all_blobs)()
self.assertEqual(iterator.page_number, 1)
self.assertTrue(iterator.next_page_token is None)
self.assertEqual(iterator.prefixes,
set(['parent/child/grand/', 'parent/child/other/']))
def test_third_level(self):
# Pseudo-hierarchy can be arbitrarily deep, subject to the limit
# of 1024 characters in the UTF-8 encoded name:
# https://cloud.google.com/storage/docs/bucketnaming#objectnames
# Exercise a layer deeper to illustrate this.
iterator = self.bucket.list_blobs(delimiter='/',
prefix='parent/child/grand/')
response = iterator.get_next_page_response()
blobs = list(iterator.get_items_from_response(response))
self.assertEqual([blob.name for blob in blobs],
['parent/child/grand/file31.txt'])
self.assertEqual(iterator.page_number, 1)
self.assertTrue(iterator.next_page_token is None)
self.assertEqual(iterator.prefixes, set())
class TestStorageSignURLs(TestStorageFiles):
def setUp(self):
super(TestStorageSignURLs, self).setUp()
logo_path = self.FILES['logo']['path']
with open(logo_path, 'rb') as file_obj:
self.LOCAL_FILE = file_obj.read()
blob = self.bucket.blob('LogoToSign.jpg')
blob.upload_from_string(self.LOCAL_FILE)
self.case_blobs_to_delete.append(blob)
def tearDown(self):
for blob in self.case_blobs_to_delete:
if blob.exists():
blob.delete()
def test_create_signed_read_url(self):
blob = self.bucket.blob('LogoToSign.jpg')
expiration = int(time.time() + 5)
signed_url = blob.generate_signed_url(expiration, method='GET',
client=Config.CLIENT)
response, content = HTTP.request(signed_url, method='GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, self.LOCAL_FILE)
def test_create_signed_delete_url(self):
blob = self.bucket.blob('LogoToSign.jpg')
expiration = int(time.time() + 283473274)
signed_delete_url = blob.generate_signed_url(expiration,
method='DELETE',
client=Config.CLIENT)
response, content = HTTP.request(signed_delete_url, method='DELETE')
self.assertEqual(response.status, 204)
self.assertEqual(content, b'')
# Check that the blob has actually been deleted.
self.assertFalse(blob.exists())
| 35.845361 | 78 | 0.651424 |
2447616e9d28c01dcfe971a82cb7f62d3103ef10 | 10,363 | py | Python | src/compas/datastructures/mesh/operations/collapse.py | mpopescu/compas | 55f259607deea501f862cbaea79bd97d7e56ead6 | [
"MIT"
] | null | null | null | src/compas/datastructures/mesh/operations/collapse.py | mpopescu/compas | 55f259607deea501f862cbaea79bd97d7e56ead6 | [
"MIT"
] | 9 | 2019-09-11T08:53:19.000Z | 2019-09-16T08:35:39.000Z | src/compas/datastructures/mesh/core/operations/collapse.py | Licini/compas | 34f65adb3d0abc3f403312ffba62aa76f3376292 | [
"MIT"
] | null | null | null | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
__all__ = [
'mesh_collapse_edge',
'trimesh_collapse_edge',
]
def is_collapse_legal(mesh, u, v, allow_boundary=False):
"""Verify if the requested collapse is legal for a triangle mesh.
Parameters
----------
mesh : compas.datastructures.Mesh
The mesh.
u : str
The vertex to collapse towards.
v : str
The vertex to collapse.
Returns
-------
bool
`True` if the collapse is legal.
`False` otherwise.
"""
u_on = mesh.is_vertex_on_boundary(u)
v_on = mesh.is_vertex_on_boundary(v)
if v_on and not u_on:
return False
# collapsing of boundary vertices is currently not supported
# change this to `and` to support collapsing to or from the boundary
if not allow_boundary:
if u_on or v_on:
return False
fkey_uv = mesh.halfedge[u][v]
fkey_vu = mesh.halfedge[v][u]
# check for contained faces
for nbr in mesh.halfedge[u]:
if nbr in mesh.halfedge[v]:
fkey_nbr_v = mesh.halfedge[nbr][v]
fkey_u_nbr = mesh.halfedge[u][nbr]
if fkey_nbr_v is None and fkey_u_nbr is None:
return False
# in a trimesh
# u and v should have one neighbor in common
# and uv-nbr or vu-nbr
# should define a face
# check if UV > NBR is a face
if mesh.halfedge[v][nbr] == fkey_uv and mesh.halfedge[nbr][u] != fkey_uv:
return False
# check if VU > NBR is a face
if mesh.halfedge[u][nbr] == fkey_vu and mesh.halfedge[nbr][v] != fkey_vu:
return False
for nbr in mesh.halfedge[v]:
if nbr in mesh.halfedge[u]:
# check if UV > NBR is a face
if mesh.halfedge[v][nbr] == fkey_uv and mesh.halfedge[nbr][u] != fkey_uv:
return False
# check if V > U > NBR is a face
if mesh.halfedge[u][nbr] == fkey_vu and mesh.halfedge[nbr][v] != fkey_vu:
return False
return True
def mesh_collapse_edge(mesh, u, v, t=0.5, allow_boundary=False, fixed=None):
"""Collapse an edge to its first or second vertex, or to an intermediate point.
Parameters
----------
mesh : compas.datastructures.Mesh
Instance of a mesh.
u : str
The first vertex of the (half-) edge.
v : str
The second vertex of the (half-) edge.
t : float (0.5)
Determines where to collapse to.
If `t == 0.0` collapse to `u`.
If `t == 1.0` collapse to `v`.
If `0.0 < t < 1.0`, collapse to a point between `u` and `v`.
allow_boundary : bool (False)
Allow collapses involving boundary vertices.
fixed : list (None)
A list of identifiers of vertices that should stay fixed.
Returns
-------
None
Raises
------
ValueError
If `u` and `v` are not neighbors.
"""
if t < 0.0:
raise ValueError('Parameter t should be greater than or equal to 0.')
if t > 1.0:
raise ValueError('Parameter t should be smaller than or equal to 1.')
# check collapse conditions
if not is_collapse_legal(mesh, u, v, allow_boundary=allow_boundary):
return False
# compare to fixed
fixed = fixed or []
if v in fixed or u in fixed:
return False
# move U
x, y, z = mesh.edge_point(u, v, t)
mesh.vertex[u]['x'] = x
mesh.vertex[u]['y'] = y
mesh.vertex[u]['z'] = z
# UV face
fkey = mesh.halfedge[u][v]
if fkey is None:
del mesh.halfedge[u][v]
else:
face = mesh.face_vertices(fkey)
f = len(face)
# switch between UV face sizes
# note: in a trimesh this is not necessary!
if f < 3:
raise Exception("Invalid mesh face: {}".format(fkey))
if f == 3:
# delete UV
o = face[face.index(u) - 1]
del mesh.halfedge[u][v]
del mesh.halfedge[v][o]
del mesh.halfedge[o][u]
del mesh.face[fkey]
else:
# u > v > d => u > d
d = mesh.face_vertex_descendant(fkey, v)
face.remove(v)
del mesh.halfedge[u][v]
del mesh.halfedge[v][d]
mesh.halfedge[u][d] = fkey
# VU face
fkey = mesh.halfedge[v][u]
if fkey is None:
del mesh.halfedge[v][u]
else:
face = mesh.face_vertices(fkey)
f = len(face)
# switch between VU face sizes
# note: in a trimesh this is not necessary!
if f < 3:
raise Exception("Invalid mesh face: {}".format(fkey))
if f == 3:
# delete UV
o = face[face.index(v) - 1]
del mesh.halfedge[v][u] # the collapsing halfedge
del mesh.halfedge[u][o]
del mesh.halfedge[o][v]
del mesh.face[fkey]
else:
# a > v > u => a > u
a = mesh.face_vertex_ancestor(fkey, v)
face.remove(v)
del mesh.halfedge[a][v]
del mesh.halfedge[v][u]
mesh.halfedge[a][u] = fkey
# V neighbors and halfedges coming into V
for nbr, fkey in list(mesh.halfedge[v].items()):
if fkey is None:
mesh.halfedge[u][nbr] = None
del mesh.halfedge[v][nbr]
else:
# a > v > nbr => a > u > nbr
face = mesh.face[fkey]
a = mesh.face_vertex_ancestor(fkey, v)
face[face.index(v)] = u
if v in mesh.halfedge[a]:
del mesh.halfedge[a][v]
del mesh.halfedge[v][nbr]
mesh.halfedge[a][u] = fkey
mesh.halfedge[u][nbr] = fkey
# only update what will not be updated in the previous part
# verify what this is exactly
# nbr > v > d => nbr > u > d
if v in mesh.halfedge[nbr]:
fkey = mesh.halfedge[nbr][v]
del mesh.halfedge[nbr][v]
mesh.halfedge[nbr][u] = fkey
# delete V
del mesh.halfedge[v]
del mesh.vertex[v]
# split this up into more efficient cases
# - both not on boundary
# - u on boundary
# - v on boundary
# - u and v on boundary
def trimesh_collapse_edge(mesh, u, v, t=0.5, allow_boundary=False, fixed=None):
"""Collapse an edge to its first or second vertex, or to an intermediate
point.
Notes
-----
An edge can only be collapsed if the collapse is `legal`. A collapse is
legal if it meets the following requirements:
* any vertex `w` that is a neighbor of both `u` and `v` is a face of the mesh
* `u` and `v` are not on the boundary
* ...
See [] for a detailed explanation of these requirements.
Parameters
----------
mesh : compas.datastructures.Mesh
Instance of a mesh.
u : str
The first vertex of the (half-) edge.
v : str
The second vertex of the (half-) edge.
t : float (0.5)
Determines where to collapse to.
If `t == 0.0` collapse to `u`.
If `t == 1.0` collapse to `v`.
If `0.0 < t < 1.0`, collapse to a point between `u` and `v`.
allow_boundary : bool (False)
Allow collapses involving vertices on the boundary.
fixed : list (None)
Identifiers of the vertices that should stay fixed.
Returns
-------
None
Raises
------
ValueError
If `u` and `v` are not neighbors.
Examples
--------
>>>
"""
if t < 0.0:
raise ValueError('Parameter t should be greater than or equal to 0.')
if t > 1.0:
raise ValueError('Parameter t should be smaller than or equal to 1.')
# check collapse conditions
if not is_collapse_legal(mesh, u, v, allow_boundary=allow_boundary):
return False
if mesh.is_vertex_on_boundary(u):
t = 0.0
# compare to fixed
fixed = fixed or []
if v in fixed or u in fixed:
return False
# move U
x, y, z = mesh.edge_point(u, v, t)
mesh.vertex[u]['x'] = x
mesh.vertex[u]['y'] = y
mesh.vertex[u]['z'] = z
# UV face
fkey = mesh.halfedge[u][v]
if fkey is None:
del mesh.halfedge[u][v]
else:
face = mesh.face[fkey]
o = face[face.index(u) - 1]
del mesh.halfedge[u][v]
del mesh.halfedge[v][o]
del mesh.halfedge[o][u]
del mesh.face[fkey]
if len(mesh.halfedge[o]) < 2:
del mesh.halfedge[o]
del mesh.vertex[o]
del mesh.halfedge[u][o]
# VU face
fkey = mesh.halfedge[v][u]
if fkey is None:
del mesh.halfedge[v][u]
else:
face = mesh.face[fkey]
o = face[face.index(v) - 1]
del mesh.halfedge[v][u]
del mesh.halfedge[u][o]
del mesh.halfedge[o][v]
del mesh.face[fkey]
if len(mesh.halfedge[o]) < 2:
del mesh.halfedge[o]
del mesh.vertex[o]
del mesh.halfedge[v][o]
# neighborhood of V
for nbr, fkey in list(mesh.halfedge[v].items()):
if fkey is None:
mesh.halfedge[u][nbr] = None
del mesh.halfedge[v][nbr]
else:
# a > v > nbr => a > u > nbr
face = mesh.face[fkey]
a = face[face.index(v) - 1]
mesh.face[fkey] = [a, u, nbr]
if v in mesh.halfedge[a]:
del mesh.halfedge[a][v]
del mesh.halfedge[v][nbr]
mesh.halfedge[a][u] = fkey
mesh.halfedge[u][nbr] = fkey
mesh.halfedge[nbr][a] = fkey
# nbr > v > d => nbr > u > d
if v in mesh.halfedge[nbr]:
mesh.halfedge[nbr][u] = mesh.halfedge[nbr][v]
del mesh.halfedge[nbr][v]
# delete V
del mesh.halfedge[v]
del mesh.vertex[v]
# clean up
for nu in mesh.halfedge[u]:
for nbr in mesh.halfedge[nu]:
if nbr == v:
mesh.halfedge[nu][u] = mesh.halfedge[nu][v]
del mesh.halfedge[nu][v]
return True
# ==============================================================================
# Main
# ==============================================================================
if __name__ == "__main__":
pass
| 27.128272 | 85 | 0.535173 |
9d249e4b0f4fa58c9a5267f06911981ff1ef6028 | 350 | py | Python | config/settings/heroku.py | chaudbak/instanotifier | d29bc6bd9b7a003403886bfff1376b2c1925cc74 | [
"MIT"
] | null | null | null | config/settings/heroku.py | chaudbak/instanotifier | d29bc6bd9b7a003403886bfff1376b2c1925cc74 | [
"MIT"
] | 6 | 2020-06-06T01:27:17.000Z | 2022-02-10T11:20:17.000Z | config/settings/heroku.py | chaudbak/instanotifier | d29bc6bd9b7a003403886bfff1376b2c1925cc74 | [
"MIT"
] | null | null | null | from .production import * # noqa
# disable emailing, as long as mailgun is not connected to heroku
ACCOUNT_EMAIL_VERIFICATION = 'none'
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.console.EmailBackend')
# Logging into console instead of mail_admins
LOGGING['loggers']['django.request']['handlers'] = ['console']
| 35 | 101 | 0.771429 |
bdc0e8d24776ddda253409b4b18c19c9d1ab0811 | 1,336 | py | Python | puzzle/day15.py | robhansen/advent2021 | 390d49dd237200a8939b0d5486bbeb37c079807c | [
"MIT"
] | null | null | null | puzzle/day15.py | robhansen/advent2021 | 390d49dd237200a8939b0d5486bbeb37c079807c | [
"MIT"
] | null | null | null | puzzle/day15.py | robhansen/advent2021 | 390d49dd237200a8939b0d5486bbeb37c079807c | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import sys
import networkx as nx
if len(sys.argv) != 2:
print("Help: {} <filename>".format(sys.argv[0]))
sys.exit(0)
def solve_graph(filename, expansion):
graph = nx.DiGraph()
risks = []
with open(sys.argv[1]) as file:
lines = file.readlines()
for i in range(expansion):
for line in lines:
new_row = []
for j in range(expansion):
new_row.extend([((int(x)+i+j-1)%9)+1 for x in list(line.rstrip())])
risks.append(new_row)
for y in range(len(risks)):
for x in range(len(risks[0])):
# add right and down edges in each direction
if x < len(risks[0])-1:
graph.add_edge((x,y), (x+1,y), weight=risks[y][x+1])
graph.add_edge((x+1,y), (x,y), weight=risks[y][x])
if y < len(risks)-1:
graph.add_edge((x,y), (x,y+1), weight=risks[y+1][x])
graph.add_edge((x,y+1), (x,y), weight=risks[y][x])
length = nx.shortest_path_length(graph, source=(0,0), target=(len(risks[0])-1,len(risks)-1),weight='weight',method='bellman-ford')
print("Shortest path with {} expansions = {}".format(expansion, length))
solve_graph(sys.argv[1], 1)
solve_graph(sys.argv[1], 5) | 37.111111 | 135 | 0.537425 |
55837a7305e5060124fc4c0f8246558e1ea88fac | 5,994 | py | Python | blousebrothers/confs/migrations/0001_initial.py | sladinji/blousebrothers | 461de3ba011c0aaed3f0014136c4497b6890d086 | [
"MIT"
] | 1 | 2022-01-27T11:58:10.000Z | 2022-01-27T11:58:10.000Z | blousebrothers/confs/migrations/0001_initial.py | sladinji/blousebrothers | 461de3ba011c0aaed3f0014136c4497b6890d086 | [
"MIT"
] | 5 | 2021-03-19T00:01:54.000Z | 2022-03-11T23:46:21.000Z | blousebrothers/confs/migrations/0001_initial.py | sladinji/blousebrothers | 461de3ba011c0aaed3f0014136c4497b6890d086 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-09-19 11:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import oscar.models.fields.autoslugfield
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('answer', models.CharField(blank=True, max_length=256, null=True, verbose_name='Proposition')),
('explaination', models.CharField(blank=True, max_length=256, null=True, verbose_name='Explication')),
('explaination_image', models.ImageField(blank=True, max_length=255, null=True, upload_to='images/products/%Y/%m/', verbose_name='Image')),
('correct', models.BooleanField(default=False, verbose_name='Correct')),
('index', models.PositiveIntegerField(default=0, verbose_name='Ordre')),
],
),
migrations.CreateModel(
name='Conference',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
('title', models.CharField(max_length=64, verbose_name='Titre')),
('type', models.CharField(choices=[('DCP', 'DCP'), ('QI', 'QI')], default='DP', max_length=10, verbose_name='Type')),
('slug', oscar.models.fields.autoslugfield.AutoSlugField(blank=True, editable=False, max_length=128, populate_from='title', unique=True, verbose_name='Slug')),
('summary', models.CharField(help_text='Ce résumé doit décrire le contenu de la conférence en moins de 140 caractères.', max_length=140, verbose_name='Résumé')),
('statement', models.TextField(verbose_name='Énoncé')),
('edition_progress', models.PositiveIntegerField(default=0, verbose_name='Progression')),
],
),
migrations.CreateModel(
name='ConferenceImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(max_length=255, upload_to='images/products/%Y/%m/', verbose_name='Image')),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
('caption', models.CharField(blank=True, max_length=200, verbose_name='Légende')),
('index', models.PositiveIntegerField(default=0, verbose_name='Ordre')),
('conf', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='images', to='confs.Conference')),
],
),
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128, verbose_name='Item')),
('number', models.IntegerField(verbose_name='Numéro')),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.TextField(max_length=64, verbose_name='Enoncé')),
('index', models.PositiveIntegerField(default=0, verbose_name='Ordre')),
('conf', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='questions', to='confs.Conference', verbose_name='Conference')),
],
),
migrations.CreateModel(
name='QuestionImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(max_length=255, upload_to='images/products/%Y/%m/', verbose_name='Image')),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
('caption', models.CharField(blank=True, max_length=200, verbose_name='Libellé')),
('index', models.PositiveIntegerField(default=0, verbose_name='Ordre')),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='images', to='confs.Question')),
],
),
migrations.CreateModel(
name='Speciality',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128, verbose_name='Matière')),
],
),
migrations.AddField(
model_name='conference',
name='items',
field=models.ManyToManyField(related_name='conferences', to='confs.Item', verbose_name='Items'),
),
migrations.AddField(
model_name='conference',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='conference',
name='specialities',
field=models.ManyToManyField(related_name='conferences', to='confs.Speciality', verbose_name='Spécialités'),
),
migrations.AddField(
model_name='answer',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='answers', to='confs.Question'),
),
]
| 54 | 177 | 0.616283 |
3105cb8d06bef224891d687714cc5977d1b4f26e | 14,816 | py | Python | networks/datagen.py | GuangmingZhu/ContinuousGR | 2b35fe673405744a694b90f1e89943846b3e1de1 | [
"MIT"
] | 18 | 2018-08-23T13:38:29.000Z | 2022-02-24T09:19:14.000Z | networks/datagen.py | NIDONGDEA/ContinuousGR | 2b35fe673405744a694b90f1e89943846b3e1de1 | [
"MIT"
] | 1 | 2021-09-16T10:20:18.000Z | 2021-09-16T10:20:18.000Z | networks/datagen.py | NIDONGDEA/ContinuousGR | 2b35fe673405744a694b90f1e89943846b3e1de1 | [
"MIT"
] | 2 | 2019-10-21T23:12:28.000Z | 2022-02-28T01:53:48.000Z | import numpy as np
import tensorflow as tf
keras=tf.contrib.keras
import inputs as data
import threading
## Iteration
def minibatches(inputs=None, targets=None, batch_size=None, shuffle=False):
assert len(inputs) == len(targets)
if shuffle:
indices = np.arange(len(inputs))
np.random.shuffle(indices)
for start_idx in range(0, len(inputs) - batch_size + 1, batch_size):
if shuffle:
excerpt = indices[start_idx:start_idx + batch_size]
else:
excerpt = slice(start_idx, start_idx + batch_size)
yield inputs[excerpt], targets[excerpt]
## Threading
def threading_data(data=None, fn=None, **kwargs):
# define function for threading
def apply_fn(results, i, data, kwargs):
results[i] = fn(data, **kwargs)
## start multi-threaded reading.
results = [None] * len(data) ## preallocate result list
threads = []
for i in range(len(data)):
t = threading.Thread(
name='threading_and_return',
target=apply_fn,
args=(results, i, data[i], kwargs)
)
t.start()
threads.append(t)
## <Milo> wait for all threads to complete
for t in threads:
t.join()
return np.asarray(results)
## isoTrainImageGenerator
def isoTrainImageGenerator(filepath, batch_size, depth, num_classes, modality):
X_train,y_train = data.load_iso_video_list(filepath)
X_tridx = np.asarray(np.arange(0, len(y_train)), dtype=np.int32)
y_train = np.asarray(y_train, dtype=np.int32)
while 1:
for X_indices, y_label_t in minibatches(X_tridx, y_train,
batch_size, shuffle=True):
# Read data for each batch
image_path = []
image_fcnt = []
image_olen = []
image_start = []
is_training = []
for data_a in range(batch_size):
X_index_a = X_indices[data_a]
key_str = '%06d' % X_index_a
image_path.append(X_train[key_str]['videopath'])
image_fcnt.append(X_train[key_str]['framecnt'])
image_olen.append(depth)
image_start.append(1)
is_training.append(True) # Training
image_info = zip(image_path,image_fcnt,image_olen,image_start,is_training)
if modality==0: #RGB
X_data_t = threading_data([_ for _ in image_info],
data.prepare_iso_rgb_data)
elif modality==1: #Depth
X_data_t = threading_data([_ for _ in image_info],
data.prepare_iso_depth_data)
elif modality==2: #Flow
X_data_t = threading_data([_ for _ in image_info],
data.prepare_iso_flow_data)
y_hot_label_t = keras.utils.to_categorical(y_label_t, num_classes=num_classes)
yield (X_data_t, y_hot_label_t)
## isoTestImageGenerator
def isoTestImageGenerator(filepath, batch_size, depth, num_classes, modality):
X_test,y_test = data.load_iso_video_list(filepath)
X_teidx = np.asarray(np.arange(0, len(y_test)), dtype=np.int32)
y_test = np.asarray(y_test, dtype=np.int32)
while 1:
for X_indices, y_label_t in minibatches(X_teidx, y_test,
batch_size, shuffle=False):
# Read data for each batch
image_path = []
image_fcnt = []
image_olen = []
image_start = []
is_training = []
for data_a in range(batch_size):
X_index_a = X_indices[data_a]
key_str = '%06d' % X_index_a
image_path.append(X_test[key_str]['videopath'])
image_fcnt.append(X_test[key_str]['framecnt'])
image_olen.append(depth)
image_start.append(1)
is_training.append(False) # Testing
image_info = zip(image_path,image_fcnt,image_olen,image_start,is_training)
if modality==0: #RGB
X_data_t = threading_data([_ for _ in image_info],
data.prepare_iso_rgb_data)
elif modality==1: #Depth
X_data_t = threading_data([_ for _ in image_info],
data.prepare_iso_depth_data)
elif modality==2: #Flow
X_data_t = threading_data([_ for _ in image_info],
data.prepare_iso_flow_data)
y_hot_label_t = keras.utils.to_categorical(y_label_t, num_classes=num_classes)
yield (X_data_t, y_hot_label_t)
## conTrainImageGenerator
def conTrainImageGenerator(filepath, batch_size, depth, num_classes, modality):
X_train = data.load_con_video_list(filepath)
X_tridx = np.asarray(np.arange(0, len(X_train)), dtype=np.int32)
while 1:
for X_indices,_ in minibatches(X_tridx, X_tridx,
batch_size, shuffle=True):
# Read data for each batch
image_path = []
image_fcnt = []
image_olen = []
image_start = []
is_training = []
y_label_t = []
for data_a in range(batch_size):
X_index_a = X_indices[data_a]
# Read data for each batch
idx = X_indices[data_a]
video_path = X_train[idx].split(' ')[0]
starti = int(X_train[idx].split(' ')[1].split(',')[0])
endi = int(X_train[idx].split(' ')[1].split(',')[1].split(':')[0])
label = int(X_train[idx].split(' ')[1].split(',')[1].split(':')[1])-1
image_path.append(video_path)
image_fcnt.append(endi-starti+1)
image_olen.append(depth)
image_start.append(starti)
is_training.append(True) # Training
y_label_t.append(label)
image_info = zip(image_path,image_fcnt,image_olen,image_start,is_training)
if modality==0: #RGB
X_data_t = threading_data([_ for _ in image_info],
data.prepare_iso_rgb_data)
elif modality==1: #Depth
X_data_t = threading_data([_ for _ in image_info],
data.prepare_iso_depth_data)
elif modality==2: #Flow
X_data_t = threading_data([_ for _ in image_info],
data.prepare_iso_flow_data)
y_hot_label_t = keras.utils.to_categorical(y_label_t, num_classes=num_classes)
yield (X_data_t, y_hot_label_t)
## conTestImageGenerator
def conTestImageGenerator(filepath, batch_size, depth, num_classes, modality):
X_test = data.load_con_video_list(filepath)
X_teidx = np.asarray(np.arange(0, len(X_test)), dtype=np.int32)
while 1:
for X_indices,_ in minibatches(X_teidx, X_teidx,
batch_size, shuffle=False):
# Read data for each batch
image_path = []
image_fcnt = []
image_olen = []
image_start = []
is_training = []
y_label_t = []
for data_a in range(batch_size):
X_index_a = X_indices[data_a]
# Read data for each batch
idx = X_indices[data_a]
video_path = X_test[idx].split(' ')[0]
starti = int(X_test[idx].split(' ')[1].split(',')[0])
endi = int(X_test[idx].split(' ')[1].split(',')[1].split(':')[0])
label = int(X_test[idx].split(' ')[1].split(',')[1].split(':')[1])-1
image_path.append(video_path)
image_fcnt.append(endi-starti+1)
image_olen.append(depth)
image_start.append(starti)
is_training.append(False) # Testing
y_label_t.append(label)
image_info = zip(image_path,image_fcnt,image_olen,image_start,is_training)
if modality==0: #RGB
X_data_t = threading_data([_ for _ in image_info],
data.prepare_iso_rgb_data)
elif modality==1: #Depth
X_data_t = threading_data([_ for _ in image_info],
data.prepare_iso_depth_data)
elif modality==2: #Flow
X_data_t = threading_data([_ for _ in image_info],
data.prepare_iso_flow_data)
y_hot_label_t = keras.utils.to_categorical(y_label_t, num_classes=num_classes)
yield (X_data_t, y_hot_label_t)
## conTrainImageBoundaryGenerator
def conTrainImageBoundaryGenerator(filepath, batch_size, depth, num_classes, modality):
X_train = data.load_con_video_list(filepath)
X_tridx = np.asarray(np.arange(0, len(X_train)), dtype=np.int32)
while 1:
for X_indices,_ in minibatches(X_tridx, X_tridx,
batch_size, shuffle=True):
# Read data for each batch
video_label = []
idx = X_indices[0]
video_path = X_train[idx].split(' ')[0]
segcnt = len(X_train[idx].split(' '))
starti = endi = 0
for i in range(1, segcnt):
seginfo = X_train[idx].split(' ')[i]
starti = int(seginfo.split(',')[0])
if starti <= endi:
starti = endi + 1
endi = int(seginfo.split(',')[1].split(':')[0])
label = int(seginfo.split(',')[1].split(':')[1])-1
for j in range(starti, endi+1):
video_label.append(label)
if endi != len(video_label):
print 'invalid: endi - %d, len(video_label) - %d'%(endi, len(video_label))
video_fcnt = len(video_label)
if len(video_label)<=depth:
video_olen = len(video_label)
else:
video_olen = depth
is_training = True # Training
if modality==0: #RGB
X_data_t,y_label = data.prepare_con_rgb_data(video_path, video_fcnt, video_olen, video_label, is_training)
if modality==1: #Depth
X_data_t,y_label = data.prepare_con_depth_data(video_path, video_fcnt, video_olen, video_label, is_training)
if modality==2: #Flow
X_data_t,y_label = data.prepare_con_flow_data(video_path, video_fcnt, video_olen, video_label, is_training)
y_bound = np.zeros((len(y_label),), dtype=np.int32)
for idx in range(2,len(y_label)-2):
if y_label[idx-1]==y_label[idx] and y_label[idx+1]==y_label[idx+2] and y_label[idx]!=y_label[idx+1]:
y_bound[idx-1]=1
y_bound[idx]=1
y_bound[idx+1]=1
y_bound[idx+2]=1
y_bound[0]=y_bound[1]=1
y_bound[len(y_label)-1]=y_bound[len(y_label)-2]=1
yield (np.reshape(X_data_t,(1,video_olen,112,112,3)), y_bound)
## conTestImageBoundaryGenerator
def conTestImageBoundaryGenerator(filepath, batch_size, depth, num_classes, modality):
X_test = data.load_con_video_list(filepath)
X_teidx = np.asarray(np.arange(0, len(X_test)), dtype=np.int32)
while 1:
for X_indices,_ in minibatches(X_teidx, X_teidx,
batch_size, shuffle=False):
# Read data for each batch
video_label = []
idx = X_indices[0]
video_path = X_test[idx].split(' ')[0]
segcnt = len(X_test[idx].split(' '))
starti = endi = 0
for i in range(1, segcnt):
seginfo = X_test[idx].split(' ')[i]
starti = int(seginfo.split(',')[0])
if starti <= endi:
starti = endi + 1
endi = int(seginfo.split(',')[1].split(':')[0])
label = int(seginfo.split(',')[1].split(':')[1])-1
for j in range(starti, endi+1):
video_label.append(label)
if endi != len(video_label):
print 'invalid: endi - %d, len(video_label) - %d'%(endi, len(video_label))
video_fcnt = len(video_label)
if len(video_label)<=depth:
video_olen = len(video_label)
else:
video_olen = depth
is_training = False # Testing
if modality==0: #RGB
X_data_t,y_label = data.prepare_con_rgb_data(video_path, video_fcnt, video_olen, video_label, is_training)
if modality==1: #Depth
X_data_t,y_label = data.prepare_con_depth_data(video_path, video_fcnt, video_olen, video_label, is_training)
if modality==2: #Flow
X_data_t,y_label = data.prepare_con_flow_data(video_path, video_fcnt, video_olen, video_label, is_training)
y_bound = np.zeros((len(y_label),), dtype=np.int32)
for idx in range(2,len(y_label)-2):
if y_label[idx-1]==y_label[idx] and y_label[idx+1]==y_label[idx+2] and y_label[idx]!=y_label[idx+1]:
y_bound[idx-1]=1
y_bound[idx]=1
y_bound[idx+1]=1
y_bound[idx+2]=1
y_bound[0]=y_bound[1]=1
y_bound[len(y_label)-1]=y_bound[len(y_label)-2]=1
yield (np.reshape(X_data_t,(1,video_olen,112,112,3)), y_bound)
## jesterTrainImageGenerator
def jesterTrainImageGenerator(filepath, batch_size, depth, num_classes, modality):
X_train,y_train = data.load_iso_video_list(filepath)
X_tridx = np.asarray(np.arange(0, len(y_train)), dtype=np.int32)
y_train = np.asarray(y_train, dtype=np.int32)
while 1:
for X_indices, y_label_t in minibatches(X_tridx, y_train,
batch_size, shuffle=True):
# Read data for each batch
image_path = []
image_fcnt = []
image_olen = []
is_training = []
for data_a in range(batch_size):
X_index_a = X_indices[data_a]
key_str = '%06d' % X_index_a
image_path.append(X_train[key_str]['videopath'])
image_fcnt.append(X_train[key_str]['framecnt'])
image_olen.append(depth)
is_training.append(True) # Training
image_info = zip(image_path,image_fcnt,image_olen,is_training)
if modality==0: #RGB
X_data_t = threading_data([_ for _ in image_info], data.prepare_jester_rgb_data)
if modality==2: #Flow
X_data_t = threading_data([_ for _ in image_info], data.prepare_jester_flow_data)
y_hot_label_t = keras.utils.to_categorical(y_label_t, num_classes=num_classes)
yield (X_data_t, y_hot_label_t)
## jesterTestImageGenerator
def jesterTestImageGenerator(filepath, batch_size, depth, num_classes, modality):
X_test,y_test = data.load_iso_video_list(filepath)
X_teidx = np.asarray(np.arange(0, len(y_test)), dtype=np.int32)
y_test = np.asarray(y_test, dtype=np.int32)
while 1:
for X_indices, y_label_t in minibatches(X_teidx, y_test,
batch_size, shuffle=False):
# Read data for each batch
image_path = []
image_fcnt = []
image_olen = []
is_training = []
for data_a in range(batch_size):
X_index_a = X_indices[data_a]
key_str = '%06d' % X_index_a
image_path.append(X_test[key_str]['videopath'])
image_fcnt.append(X_test[key_str]['framecnt'])
image_olen.append(depth)
is_training.append(False) # Testing
image_info = zip(image_path,image_fcnt,image_olen,is_training)
if modality==0: #RGB
X_data_t = threading_data([_ for _ in image_info], data.prepare_jester_rgb_data)
if modality==2: #Flow
X_data_t = threading_data([_ for _ in image_info], data.prepare_jester_flow_data)
y_hot_label_t = keras.utils.to_categorical(y_label_t, num_classes=num_classes)
yield (X_data_t, y_hot_label_t)
| 42.820809 | 116 | 0.625742 |
fa1e033b58ed18536792af9773e6b16680da2d6e | 15,011 | py | Python | reid/scripts/triplet_reid/datasets/lip.py | VisualComputingInstitute/CROWDBOT_perception | df98f3f658c39fb3fa4ac0456f1214f7918009f6 | [
"MIT"
] | 1 | 2022-03-07T06:24:27.000Z | 2022-03-07T06:24:27.000Z | reid/scripts/triplet_reid/datasets/lip.py | VisualComputingInstitute/CROWDBOT_perception | df98f3f658c39fb3fa4ac0456f1214f7918009f6 | [
"MIT"
] | null | null | null | reid/scripts/triplet_reid/datasets/lip.py | VisualComputingInstitute/CROWDBOT_perception | df98f3f658c39fb3fa4ac0456f1214f7918009f6 | [
"MIT"
] | null | null | null | """
There are multiple folders. One for LIP (What we want), one for Fashion Design (Ac), and one for multiple people (CIHP)
Folder Structure
Testing_images/Testing_images/testing_images: Test images
TrainVal_images/TrainVal_images/train_images: train images, ignore text files
TrainVal_images/TrainVal_images/val_images: train images, ignore text files
TrainVal_parsing_annotations/TrainVal_images/train_images: Train segmetation map
TrainVal_parsing_annotations/TrainVal_images/val_images: Val segmentation map
TrainVal_pose_annotations: json files of pose annotation
from source with caching.
"""
import pandas as pd
from logger import get_logger
import os
from .pose_dataset import JointInfo
from datasets import register_dataset
from datasets.utils import HeaderItem
from datasets.pose_dataset import PoseDataset
from builders import transform_builder
import numpy as np
from settings import Config
from evaluation import Evaluation
import torch
from writers.dummy import DummyWriter
from writers.memory import MemoryWriter
from utils import cache_result_on_disk
from metrics import calculate_pckh
from metrics import calc_seg_score
from transforms.flip_lr_with_pairs import FliplrWithPairs
import imgaug as ia
from metrics import fast_hist
def make_joint_info():
short_names = [
'r_ank', 'r_kne', 'r_hip', 'l_hip', 'l_kne', 'l_ank', 'b_pelv', 'b_spine',
'b_neck', 'b_head', 'r_wri', 'r_elb', 'r_sho', 'l_sho', 'l_elb', 'l_wri']
full_names = [
'right ankle', 'right knee', 'right hip', 'left hip', 'left knee',
'left ankle', 'pelvis', 'spine', 'neck', 'head', 'right wrist',
'right elbow', 'right shoulder', 'left shoulder', 'left elbow',
'left wrist']
joint_info = JointInfo(short_names, full_names)
j = joint_info.ids
joint_info.stick_figure_edges = [
(j.l_sho, j.l_elb), (j.r_sho, j.r_elb), (j.l_elb, j.l_wri),
(j.r_elb, j.r_wri), (j.l_hip, j.l_kne), (j.r_hip, j.r_kne),
(j.l_kne, j.l_ank), (j.r_kne, j.r_ank), (j.b_neck, j.b_head),
(j.b_pelv, j.b_spine)]
return joint_info
CLASSES = {
0: "Background",
1: "Hat",
2: "Hair",
3: "Glove",
4: "Sunglasses",
5: "UpperClothes",
6: "Dress",
7: "Coat",
8: "Socks",
9: "Pants",
10: "Jumpsuits",
11: "Scarf",
12: "Skirt",
13: "Face",
14: "Left-arm",
15: "Right-arm",
16: "Left-leg",
17: "Right-leg",
18: "Left-shoe",
19: "Right-shoe"
}
class SegInfo(object):
# pickle does not like namedtuple
def __init__(self, id_to_label, pairs):
self.id_to_label = id_to_label
self.pairs = pairs
def make_seg_info():
id_to_label = CLASSES
label_to_id = {value: key for key, value in id_to_label.items()}
def build_pairs(label_to_id):
pairs = dict()
for label in label_to_id:
if label.startswith('Left'):
pair1 = label_to_id[label]
label2 = 'Right' + label[len('Left'):]
pair2 = label_to_id[label2]
elif label.startswith('Right'):
pair1 = label_to_id[label]
label2 = 'Left' + label[len('Right'):]
pair2 = label_to_id[label2]
else:
continue
pairs[pair1] = pair2
return pairs
pairs = build_pairs(label_to_id)
return SegInfo(id_to_label, pairs)
COLS = ["image_id",
"r_ank_x", "r_ank_y", "r_ank_v",
"r_kne_x", "r_kne_y", "r_kne_v",
"r_hip_x", "r_hip_y", "r_hip_v",
"l_hip_x", "l_hip_y", "l_hip_v",
"l_kne_x", "l_kne_y", "l_kne_v",
"l_ank_x", "l_ank_y", "l_ank_v",
"b_pel_x", "b_pel_y", "b_pel_v",
"b_spi_x", "b_spi_y", "b_spi_v",
"b_nec_x", "b_nec_y", "b_nec_v",
"b_hea_x", "b_hea_y", "b_hea_v",
"r_wri_x", "r_wri_y", "r_wri_v",
"r_elb_x", "r_elb_y", "r_elb_v",
"r_sho_x", "r_sho_y", "r_sho_v",
"l_sho_x", "l_sho_y", "l_sho_v",
"l_elb_x", "l_elb_y", "l_elb_v",
"l_wri_x", "l_wri_y", "l_wri_v"]
@cache_result_on_disk('cached/lip', [0, 1], forced=False)
def make_dataset(data_path, split="train"):
"""
Makes the LIP dataset.
TODO Test set will not work.
"""
# load images
logger = get_logger()
if split == "train":
img_data_path = os.path.join(data_path, 'train_images')
seg_data_path = os.path.join(data_path, 'TrainVal_parsing_annotations', 'train_segmentations')
pose_anno_path = os.path.join(data_path, 'TrainVal_pose_annotations', 'lip_train_set.csv')
elif split == "val":
img_data_path = os.path.join(data_path, 'val_images')
seg_data_path = os.path.join(data_path, 'TrainVal_parsing_annotations', 'val_segmentations')
pose_anno_path = os.path.join(data_path, 'TrainVal_pose_annotations', 'lip_val_set.csv')
elif split == "test":
# TODO
img_data_path = os.path.join(data_path, 'test_images')
seg_data_path = None
pose_anno_path = None
raise NotImplementedError
pose_anno = pd.read_csv(pose_anno_path, header=0, names=COLS)
joint_info = make_joint_info()
data = []
for index, datum in pose_anno.iterrows():
image_id = datum['image_id'][:-len('.jpg')]
img_path = os.path.join(img_data_path, image_id + '.jpg')
if not os.path.isfile(img_path):
logger.warning('File %s was not found', img_path)
continue
seg_path = os.path.join(seg_data_path, image_id + '.png')
if not os.path.isfile(seg_path):
logger.warning('File %s was not found', seg_path)
continue
coords = datum[1:]
coords = coords.reshape(-1, 3)
# drop visual column
coords = coords[:, [0, 1]]
head_size = None
# TODO Is this correct
head_size = np.linalg.norm(coords[joint_info.ids.b_head] - coords[joint_info.ids.b_neck])
d = {
'path': img_path,
'coords': coords,
'seg_path': seg_path,
'head_size': head_size
}
data.append(d)
header = {
'path': HeaderItem((), ""),
'coords': HeaderItem((), ""),
'seg': HeaderItem((), "")
}
seg_info = make_seg_info()
info = {
'joint_info': joint_info,
'num_joints': joint_info.n_joints,
'seg_info': seg_info,
'num_seg_classes': len(CLASSES)
}
return data, header, info
@register_dataset('lip')
class Lip(PoseDataset):
"""
Look into person
"""
def __init__(self, data, header, info, flip_prob, *args, **kwargs):
super().__init__("lip", data, header, info, *args, **kwargs)
seg_info = info['seg_info']
joint_info = info['joint_info']
self.flip_prob = flip_prob
self.flip_transform = FliplrWithPairs(p=flip_prob,
keypoint_pairs=joint_info.mirror_mapping_pairs,
segmentation_pairs=seg_info.pairs)
def __getitem__(self, index):
datum = self.data[index]
datum = datum.copy()
img = self.loader_fn(datum['path'])
shape = img.shape
coords = datum['coords']
# image is a 3 channel png with identical channels
seg = np.array(self.loader_fn(datum['seg_path']))[:, :, 0]
if self.transform is not None:
# flip transform is outside the pipeline
# segmentation label flipping is not yet supported
# do before possible normalization
num_seg_classes = self.info['num_seg_classes']
if self.flip_prob > 0:
# only execute if the probability is greater 0
# if the image will be flipped is decided by augmenter
det_flip = self.flip_transform.to_deterministic()
#det_flip = self.flip_transform
img = det_flip.augment_image(img)
seg = ia.SegmentationMapOnImage(seg, shape=seg.shape, nb_classes=num_seg_classes)
seg = det_flip.augment_segmentation_maps(seg).get_arr_int()
keypoints_on_image = ia.KeypointsOnImage.from_coords_array(coords, shape=shape)
keypoints_on_image = det_flip.augment_keypoints([keypoints_on_image])
coords = keypoints_on_image[0].get_coords_array()
self.transform.to_deterministic()
img = self.transform.augment_image(img)
seg = self.transform.augment_segmentation(seg, num_seg_classes)
# the shape of the original image
coords = self.transform.augment_keypoint(coords, shape)
# the shape of the augmented image
coords = self.normalize_pose_keypoints(coords, img.shape)
# we need to save the shape to restore the orginal coordinates
datum['height'] = shape[0]
datum['width'] = shape[1]
datum['coords'] = coords
datum['img'] = img
# TODO why long?? Otherwise error in loss
datum['seg'] = np.array(seg, dtype=np.int64)
return datum
def __len__(self):
return len(self.data)
@staticmethod
def build(cfg, *args, **kwargs):
split = cfg['split']
evaluate = cfg.get('evaluate', 'both')
#default to zero to avoid messing up validation
flip_prob = cfg.get('flip_prob', 0.0)
data_dir = Config.LIP_DATA
data, header, info = make_dataset(data_dir, split)
transform = transform_builder.build(cfg['transform'], info)
dataset = Lip(data, header, info, flip_prob, transform, *args, **kwargs)
# TODO very temporay solution
# Looking for a better solution building the evaluation
# to avoid passing too many parameters.
dataset.evaluate_mode = evaluate
return dataset
def get_evaluation(self, model):
pose = segmentation = False
if self.evaluate_mode == 'pose':
pose = True
elif self.evaluate_mode == 'segmentation':
segmentation = True
else:
pose = segmentation = True
joint_info = self.info['joint_info']
num_seg_classes = self.info['num_seg_classes']
if pose and segmentation:
print("LIP: Pose and Segmentation Evaluation started")
return LipPoseSegmentationEvaluation(model, joint_info, num_seg_classes)
elif pose:
print("LIP: Pose Evaluation started")
joint_info = self.info['joint_info']
return LipPoseEvaluation(model, joint_info)
elif segmentation:
print("LIP: Segmentation Evaluation started")
return LipSegmentationEvaluation(model, num_seg_classes)
raise RuntimeError("Not the expected outputs available")
class LipSegmentationEvaluation(Evaluation):
def __init__(self, model, num_classes):
super().__init__("Lip")
self.num_classes = num_classes
self.hist = np.zeros((num_classes, num_classes))
def get_writer(self, output_path):
# for now do everything in memory
self.writer = DummyWriter()
return self.writer
def before_saving(self, endpoints, data):
# Change to Update and remove get_writer function?
predictions = torch.argmax(endpoints['sem-logits'], dim=1).detach().cpu().numpy()
# batch size of one
assert predictions.shape[0] == 1
pred = predictions[0]
gt = data['seg'].detach().cpu().numpy()[0]
self.hist += fast_hist(gt.flatten(), pred.flatten(), self.num_classes)
return {}
def score(self):
score = calc_seg_score(self.hist)
return score
class LipPoseEvaluation(Evaluation):
def __init__(self, model, joint_info):
super().__init__("Lip")
self.joint_info = joint_info
def get_writer(self, output_path):
# for now do everything in memory
self.writer = MemoryWriter()
return self.writer
def before_saving(self, endpoints, data):
data_to_write = {
"pose": endpoints['pose'].cpu(),
"coords": data['coords'],
"head_size": data['head_size'],
"height": data['height'],
"width": data['width']
}
return data_to_write
@staticmethod
def _score(pose, coords, height, width, head_size, ids):
# no inplace
pose = pose.copy()
coords = coords.copy()
# coords are between 0 and 1, rescale for correct error
# broadcast to all joints
pose[:, :, 0] *= width[:, None]
pose[:, :, 1] *= height[:, None]
coords[:, :, 0] *= width[:, None]
coords[:, :, 1] *= height[:, None]
def calc_dist(array1, array2):
return np.linalg.norm(array1 - array2, axis=2)
# TODO ignore head not visible in evaluation
dist = calc_dist(pose, coords)
pck_all, pck_joint = calculate_pckh(dist, head_size)
score = {}
sn = "PCKh {} @ {}"
#threshold: values
for t, v in pck_joint.items():
score[sn.format(t, "Head")] = (v[ids['b_head']] + v[ids['b_neck']]) / 2
score[sn.format(t, "Shoulder")] = (v[ids['l_sho']] + v[ids['r_sho']]) / 2
score[sn.format(t, "Elbow")] = (v[ids['l_elb']] + v[ids['r_elb']]) / 2
score[sn.format(t, "Wrist")] = (v[ids['l_wri']] + v[ids['r_wri']]) / 2
score[sn.format(t, "Hip")] = (v[ids['l_hip']] + v[ids['r_hip']]) / 2
score[sn.format(t, "Knee")] = (v[ids['l_kne']] + v[ids['r_kne']]) / 2
score[sn.format(t, "Ankle")] = (v[ids['l_ank']] + v[ids['r_ank']]) / 2
for t, v in pck_all.items():
score[sn.format(t, "All")] = v
return score
def score(self):
data = self.writer.data
height = np.concatenate(data['height'])
width = np.concatenate(data['width'])
head_size = np.concatenate(data['head_size'])
pose = np.concatenate(data['pose']) # prediction
coords = np.concatenate(data['coords']) # gt
return self._score(pose, coords, height, width, head_size, self.joint_info.ids)
class LipPoseSegmentationEvaluation(Evaluation):
def __init__(self, model, joint_info, num_seg_classes):
super().__init__("Lip")
self.pose = LipPoseEvaluation(model, joint_info)
self.seg = LipSegmentationEvaluation(model, num_seg_classes)
def get_writer(self, output_path):
self.writer = MemoryWriter()
self.seg.writer = self.writer
self.pose.writer = self.writer
return self.writer
def before_saving(self, endpoints, data):
pose_data = self.pose.before_saving(endpoints, data)
seg_data = self.seg.before_saving(endpoints, data)
return {**pose_data, **seg_data}
def score(self):
pose_score = self.pose.score()
seg_score = self.seg.score()
return {**pose_score, **seg_score}
| 35.655582 | 119 | 0.609087 |
6288332de5a16f96095efc62d701c7223fdf22e0 | 1,209 | py | Python | dashboard/dashboard/urls.py | kurganITteacher/python-adv | 3ebc048598eea00f12ebdb6a502b2455465a04c0 | [
"Apache-2.0"
] | 1 | 2021-09-02T10:21:35.000Z | 2021-09-02T10:21:35.000Z | dashboard/dashboard/urls.py | kurganITteacher/python-adv | 3ebc048598eea00f12ebdb6a502b2455465a04c0 | [
"Apache-2.0"
] | null | null | null | dashboard/dashboard/urls.py | kurganITteacher/python-adv | 3ebc048598eea00f12ebdb6a502b2455465a04c0 | [
"Apache-2.0"
] | 2 | 2021-03-14T07:44:18.000Z | 2021-04-25T18:18:02.000Z | """dashboard URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from rest_framework.routers import DefaultRouter
import authapp.views as authapp
import mainapp.views as mainapp
router = DefaultRouter()
router.register('users', authapp.UserViewSet)
router.register('projects', mainapp.ProjectViewSet)
router.register('project-tasks', mainapp.ProjectTaskViewSet)
urlpatterns = [
path('', mainapp.ProjectList.as_view()),
path('project/tasks/', mainapp.ProjectTaskList.as_view()),
path('api/', include(router.urls)),
path('admin/', admin.site.urls),
]
| 33.583333 | 77 | 0.732837 |
8ac13af62c48b075a1f05ffe5711a71daa42d97b | 2,403 | py | Python | apps/purchases/admin.py | jorgesaw/kstore | 4ec6612eeeb96edb7b7bd374fd0520733c58451c | [
"MIT"
] | null | null | null | apps/purchases/admin.py | jorgesaw/kstore | 4ec6612eeeb96edb7b7bd374fd0520733c58451c | [
"MIT"
] | 5 | 2021-03-19T10:16:00.000Z | 2022-02-10T09:16:32.000Z | apps/purchases/admin.py | jorgesaw/kstore | 4ec6612eeeb96edb7b7bd374fd0520733c58451c | [
"MIT"
] | null | null | null | """Purchases admin."""
# Django
from django.contrib import admin
# Actions Mixin
from apps.utils.admin.actions import (
ActionDownloadData,
ActionFiscalStatus
)
# Admin
from apps.persons.admin import AddressAdmin
# Models
from apps.purchases.models import (
Supplier,
Purchase,
ItemPurchase
)
@admin.register(Supplier)
class SupplierAdmin(admin.ModelAdmin):
"""Purchase admin."""
fields = ( ('id_card', 'fiscal_id_card', 'last_name'), ('first_name', 'birth_date'), ('movile', 'telephone'), )
readonly_fields = ('created', 'modified')
list_display = ('id_card', 'fiscal_id_card', 'full_name', 'movile')
ordering = ('last_name', 'first_name', 'id_card')
search_fields = ('id_card', 'fiscal_id_card', 'last_name', 'first_name')
date_hierarchy = 'modified'
list_filter = ('last_name',)
inlines = [AddressAdmin,]
class ItemPurchaseAdmin(admin.TabularInline):
"""Item purchase admin."""
model = ItemPurchase
autocomplete_fields = ('product',)
fields = ('product', 'price', 'quantity', 'subtotal', 'discount', 'total')
readonly_fields = ('subtotal', 'total')
list_display = ('product', 'price', 'quantity', 'subtotal', 'discount', 'total')
ordering = ('id',)
list_editable = ('price', 'quantity', 'discount')
list_select_related = ('product',)
extra = 1
@admin.register(Purchase)
class PurchaseAdmin(ActionFiscalStatus, admin.ModelAdmin):
"""Purchase admin."""
autocomplete_fields = ('supplier',)
fieldsets = (
('Datos compra', {
'fields': ( ('number_purchase', 'date_purchase',
'invoice_num', 'invoice_date', 'is_fiscal'), ('supplier', 'subtotal', 'total') )
}),
('Datos complementarios', {
'classes': ('collapse',),
'fields': ('discount', 'tax', 'observations'),
}),
)
readonly_fields = ('created', 'modified', 'subtotal', 'total')
list_display = ('number_purchase', 'date_purchase', 'supplier', 'is_fiscal', 'total')
ordering = ('date_purchase',)
search_fields = ('number_purchase', 'supplier__id_card', 'supplier__last_name')
date_hierarchy = 'date_purchase'
list_filter = ('date_purchase', 'supplier__id_card', 'supplier__last_name')
list_select_related = ('supplier',)
actions = ['fiscal_emited', 'fiscal_not_emited']
inlines = [ItemPurchaseAdmin,]
| 32.04 | 115 | 0.644611 |
3bf05a75d06b31621d6077b188d997fb5339c675 | 746 | py | Python | horizon/test/test_dashboards/cats/tigers/urls.py | stackhpc/horizon | 0899f67657e0be62dd9e6be327c63bccb4607dc6 | [
"Apache-2.0"
] | 930 | 2015-01-04T08:06:03.000Z | 2022-03-13T18:47:13.000Z | horizon/test/test_dashboards/cats/tigers/urls.py | nyzsirt/horizon | 53dd2dbd39c50b665ebe2d2a877496169f01a13f | [
"Apache-2.0"
] | 106 | 2019-01-18T03:06:55.000Z | 2019-11-29T05:06:18.000Z | horizon/test/test_dashboards/cats/tigers/urls.py | nyzsirt/horizon | 53dd2dbd39c50b665ebe2d2a877496169f01a13f | [
"Apache-2.0"
] | 1,040 | 2015-01-01T18:48:28.000Z | 2022-03-19T08:35:18.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import url
from horizon.test.test_dashboards.cats.tigers.views import IndexView
urlpatterns = [
url(r'^$', IndexView.as_view(), name='index'),
]
| 37.3 | 78 | 0.727882 |
03e9dc5ac2ed701156161de3fee9443ae4b47c5d | 1,510 | py | Python | tests/test_integration/test_init_db.py | timohaas/postgraas_server | 2d94e0262414cf860f5fb2d974e05de5249f8b87 | [
"Apache-2.0"
] | 31 | 2016-07-13T20:08:24.000Z | 2022-03-13T04:53:45.000Z | tests/test_integration/test_init_db.py | timohaas/postgraas_server | 2d94e0262414cf860f5fb2d974e05de5249f8b87 | [
"Apache-2.0"
] | 35 | 2016-09-05T12:08:28.000Z | 2020-04-03T17:46:31.000Z | tests/test_integration/test_init_db.py | timohaas/postgraas_server | 2d94e0262414cf860f5fb2d974e05de5249f8b87 | [
"Apache-2.0"
] | 11 | 2016-09-08T20:59:51.000Z | 2021-03-19T16:06:45.000Z | # coding=utf-8
import contextlib
import json
import os
import psycopg2
from psycopg2.sql import SQL
from postgraas_server.configuration import get_user, get_password
CLUSTER_CONFIG = {
"metadb": {
"db_name": os.environ.get('PGDATABASE', 'postgres'),
"db_username": os.environ.get('PGUSER', 'postgres'),
"db_pwd": os.environ.get('PGPASSWORD', 'postgres'),
"host": os.environ.get('PGHOST', 'localhost'),
"port": os.environ.get('PGPORT', '5432'),
}
}
@contextlib.contextmanager
def _get_db_con():
with psycopg2.connect(
database=CLUSTER_CONFIG['metadb']['db_name'],
user=get_user(CLUSTER_CONFIG),
host=CLUSTER_CONFIG['metadb']['host'],
port=CLUSTER_CONFIG['metadb']['port'],
password=get_password(CLUSTER_CONFIG),
) as con:
yield con
def test_smoke(tmpdir):
cfg = tmpdir.join('application.cfg')
with open(cfg.strpath, "w") as fp:
json.dump(CLUSTER_CONFIG, fp)
with _get_db_con() as con:
with con.cursor() as cur:
cur.execute(SQL("DROP TABLE IF EXISTS db_instance"))
con.commit()
with tmpdir.as_cwd():
import postgraas_server.init_db
postgraas_server.init_db.main()
with _get_db_con() as con:
with con.cursor() as cur:
cur.execute(SQL("SELECT * FROM db_instance"))
assert 0 == len(cur.fetchall())
cur.execute(SQL("DROP TABLE db_instance"))
con.commit()
| 26.491228 | 65 | 0.62053 |
68e74e46270c9c28eb7700d5b9d266818f4c5d86 | 4,300 | py | Python | tests/peering/test_id_generation.py | tavaresrodrigo/kopf | 97e1c7a926705a79dabce2931e96a924252b61df | [
"MIT"
] | 855 | 2020-08-19T09:40:38.000Z | 2022-03-31T19:13:29.000Z | tests/peering/test_id_generation.py | tavaresrodrigo/kopf | 97e1c7a926705a79dabce2931e96a924252b61df | [
"MIT"
] | 715 | 2019-12-23T14:17:35.000Z | 2022-03-30T20:54:45.000Z | tests/peering/test_id_generation.py | tavaresrodrigo/kopf | 97e1c7a926705a79dabce2931e96a924252b61df | [
"MIT"
] | 97 | 2019-04-25T09:32:54.000Z | 2022-03-30T10:15:30.000Z | import os
import freezegun
import pytest
from kopf._core.engines.peering import detect_own_id
SAME_GOOD = [
('some-host.example.com', 'other-host.example.com'),
('some-host', 'other-host'),
]
# Priorities: A (left) should be selected over B (right).
GOOD_BAD = [
('some-host.example.com', 'some-host'),
('some-host.example.com', '1.2.3.4'),
('some-host.example.com', '::1'),
('some-host.example.com', '1.0...0.ip6.arpa'),
('some-host.example.com', '4.3.2.1.in-addr.arpa'),
('some-host.example.com', '1.2.3.4'),
('some-host', '1.2.3.4'),
('some-host', '::1'),
('some-host', '1.0...0.ip6.arpa'),
('some-host', '4.3.2.1.in-addr.arpa'),
('some-host', '1.2.3.4'),
]
@pytest.fixture(autouse=True)
def _intercept_os_calls(mocker):
mocker.patch('getpass.getuser', return_value='some-user')
mocker.patch('socket.gethostname')
mocker.patch('socket.gethostbyaddr')
@pytest.mark.parametrize('manual', [True, False])
def test_from_a_pod_id(mocker, manual):
mocker.patch('socket.gethostname', return_value='some-host')
mocker.patch('socket.gethostbyaddr', side_effect=lambda fqdn: (fqdn, [], []))
mocker.patch.dict(os.environ, POD_ID='some-pod-1')
own_id = detect_own_id(manual=manual)
assert own_id == 'some-pod-1'
def test_suffixes_appended(mocker):
mocker.patch('random.choices', return_value='random-str')
mocker.patch('socket.gethostname', return_value='some-host')
mocker.patch('socket.gethostbyaddr', side_effect=lambda fqdn: (fqdn, [], []))
with freezegun.freeze_time('2020-12-31T23:59:59.123456'):
own_id = detect_own_id(manual=False)
assert own_id == 'some-user@some-host/20201231235959/random-str'
def test_suffixes_ignored(mocker):
mocker.patch('socket.gethostname', return_value='some-host')
mocker.patch('socket.gethostbyaddr', side_effect=lambda fqdn: (fqdn, [], []))
own_id = detect_own_id(manual=True)
assert own_id == 'some-user@some-host'
@pytest.mark.parametrize('good1, good2', SAME_GOOD)
def test_good_hostnames_over_good_aliases__symmetric(mocker, good1, good2):
mocker.patch('socket.gethostname', return_value=good1)
mocker.patch('socket.gethostbyaddr', side_effect=lambda fqdn: (fqdn, [good2], []))
own_id = detect_own_id(manual=True)
assert own_id == f'some-user@{good1}'
mocker.patch('socket.gethostname', return_value=good2)
mocker.patch('socket.gethostbyaddr', side_effect=lambda fqdn: (fqdn, [good1], []))
own_id = detect_own_id(manual=True)
assert own_id == f'some-user@{good2}'
@pytest.mark.parametrize('good1, good2', SAME_GOOD)
def test_good_aliases_over_good_addresses__symmetric(mocker, good1, good2):
mocker.patch('socket.gethostname', return_value='localhost')
mocker.patch('socket.gethostbyaddr', side_effect=lambda fqdn: (fqdn, [good1], [good2]))
own_id = detect_own_id(manual=True)
assert own_id == f'some-user@{good1}'
mocker.patch('socket.gethostname', return_value='localhost')
mocker.patch('socket.gethostbyaddr', side_effect=lambda fqdn: (fqdn, [good2], [good1]))
own_id = detect_own_id(manual=True)
assert own_id == f'some-user@{good2}'
@pytest.mark.parametrize('good, bad', GOOD_BAD)
def test_good_aliases_over_bad_hostnames(mocker, good, bad):
mocker.patch('socket.gethostname', return_value=bad)
mocker.patch('socket.gethostbyaddr', side_effect=lambda fqdn: (fqdn, [good], []))
own_id = detect_own_id(manual=True)
assert own_id == f'some-user@{good}'
@pytest.mark.parametrize('good, bad', GOOD_BAD)
def test_good_addresses_over_bad_aliases(mocker, good, bad):
mocker.patch('socket.gethostname', return_value='localhost')
mocker.patch('socket.gethostbyaddr', side_effect=lambda fqdn: (fqdn, [bad], [good]))
own_id = detect_own_id(manual=True)
assert own_id == f'some-user@{good}'
@pytest.mark.parametrize('fqdn', [
'my-host',
'my-host.local',
'my-host.localdomain',
'my-host.local.localdomain',
'my-host.localdomain.local',
])
def test_useless_suffixes_removed(mocker, fqdn):
mocker.patch('socket.gethostname', return_value=fqdn)
mocker.patch('socket.gethostbyaddr', side_effect=lambda fqdn: (fqdn, [], []))
own_id = detect_own_id(manual=True)
assert own_id == 'some-user@my-host'
| 37.391304 | 91 | 0.696279 |
d9fb186af22e8036d6f1eaf15e0a15da2a2e516d | 15,172 | py | Python | numpydoc/docscrape_sphinx.py | rsumner33/numpydoc | 7c8125430af3e6511d67d10d3913dba05d95f4fd | [
"BSD-2-Clause"
] | null | null | null | numpydoc/docscrape_sphinx.py | rsumner33/numpydoc | 7c8125430af3e6511d67d10d3913dba05d95f4fd | [
"BSD-2-Clause"
] | null | null | null | numpydoc/docscrape_sphinx.py | rsumner33/numpydoc | 7c8125430af3e6511d67d10d3913dba05d95f4fd | [
"BSD-2-Clause"
] | null | null | null | from __future__ import division, absolute_import, print_function
import sys
import re
import inspect
import textwrap
import pydoc
import collections
import os
from jinja2 import FileSystemLoader
from jinja2.sandbox import SandboxedEnvironment
import sphinx
from sphinx.jinja2glue import BuiltinTemplateLoader
from .docscrape import NumpyDocString, FunctionDoc, ClassDoc
if sys.version_info[0] >= 3:
sixu = lambda s: s
else:
sixu = lambda s: unicode(s, 'unicode_escape')
IMPORT_MATPLOTLIB_RE = r'\b(import +matplotlib|from +matplotlib +import)\b'
class SphinxDocString(NumpyDocString):
def __init__(self, docstring, config={}):
NumpyDocString.__init__(self, docstring, config=config)
self.load_config(config)
def load_config(self, config):
self.use_plots = config.get('use_plots', False)
self.use_blockquotes = config.get('use_blockquotes', False)
self.class_members_toctree = config.get('class_members_toctree', True)
self.template = config.get('template', None)
if self.template is None:
template_dirs = [os.path.join(os.path.dirname(__file__), 'templates')]
template_loader = FileSystemLoader(template_dirs)
template_env = SandboxedEnvironment(loader=template_loader)
self.template = template_env.get_template('numpydoc_docstring.rst')
# string conversion routines
def _str_header(self, name, symbol='`'):
return ['.. rubric:: ' + name, '']
def _str_field_list(self, name):
return [':' + name + ':']
def _str_indent(self, doc, indent=4):
out = []
for line in doc:
out += [' '*indent + line]
return out
def _str_signature(self):
return ['']
if self['Signature']:
return ['``%s``' % self['Signature']] + ['']
else:
return ['']
def _str_summary(self):
return self['Summary'] + ['']
def _str_extended_summary(self):
return self['Extended Summary'] + ['']
def _str_returns(self, name='Returns'):
typed_fmt = '**%s** : %s'
untyped_fmt = '**%s**'
out = []
if self[name]:
out += self._str_field_list(name)
out += ['']
for param, param_type, desc in self[name]:
if param_type:
out += self._str_indent([typed_fmt % (param.strip(),
param_type)])
else:
out += self._str_indent([untyped_fmt % param.strip()])
if desc and self.use_blockquotes:
out += ['']
elif not desc:
desc = ['..']
out += self._str_indent(desc, 8)
out += ['']
return out
def _escape_args_and_kwargs(self, name):
if name[:2] == '**':
return r'\*\*' + name[2:]
elif name[:1] == '*':
return r'\*' + name[1:]
else:
return name
def _process_param(self, param, desc, fake_autosummary):
"""Determine how to display a parameter
Emulates autosummary behavior if fake_autosummary
Parameters
----------
param : str
The name of the parameter
desc : list of str
The parameter description as given in the docstring. This is
ignored when autosummary logic applies.
fake_autosummary : bool
If True, autosummary-style behaviour will apply for params
that are attributes of the class and have a docstring.
Returns
-------
display_param : str
The marked up parameter name for display. This may include a link
to the corresponding attribute's own documentation.
desc : list of str
A list of description lines. This may be identical to the input
``desc``, if ``autosum is None`` or ``param`` is not a class
attribute, or it will be a summary of the class attribute's
docstring.
Notes
-----
This does not have the autosummary functionality to display a method's
signature, and hence is not used to format methods. It may be
complicated to incorporate autosummary's signature mangling, as it
relies on Sphinx's plugin mechanism.
"""
display_param = '**%s**' % param
if not fake_autosummary:
return display_param, desc
param_obj = getattr(self._obj, param, None)
if not (callable(param_obj)
or isinstance(param_obj, property)
or inspect.isgetsetdescriptor(param_obj)):
param_obj = None
obj_doc = pydoc.getdoc(param_obj)
if not (param_obj and obj_doc):
return display_param, desc
prefix = getattr(self, '_name', '')
if prefix:
autosum_prefix = '~%s.' % prefix
link_prefix = '%s.' % prefix
else:
autosum_prefix = ''
link_prefix = ''
# Referenced object has a docstring
display_param = ':obj:`%s <%s%s>`' % (param,
link_prefix,
param)
if obj_doc:
# Overwrite desc. Take summary logic of autosummary
desc = re.split('\n\s*\n', obj_doc.strip(), 1)[0]
# XXX: Should this have DOTALL?
# It does not in autosummary
m = re.search(r"^([A-Z].*?\.)(?:\s|$)",
' '.join(desc.split()))
if m:
desc = m.group(1).strip()
else:
desc = desc.partition('\n')[0]
desc = desc.split('\n')
return display_param, desc
def _str_param_list(self, name, fake_autosummary=False):
"""Generate RST for a listing of parameters or similar
Parameter names are displayed as bold text, and descriptions
are in blockquotes. Descriptions may therefore contain block
markup as well.
Parameters
----------
name : str
Section name (e.g. Parameters)
fake_autosummary : bool
When True, the parameter names may correspond to attributes of the
object beign documented, usually ``property`` instances on a class.
In this case, names will be linked to fuller descriptions.
Returns
-------
rst : list of str
"""
out = []
if self[name]:
out += self._str_field_list(name)
out += ['']
for param, param_type, desc in self[name]:
display_param, desc = self._process_param(param, desc,
fake_autosummary)
if param_type:
out += self._str_indent(['%s : %s' % (display_param,
param_type)])
else:
out += self._str_indent([display_param])
if desc and self.use_blockquotes:
out += ['']
elif not desc:
# empty definition
desc = ['..']
out += self._str_indent(desc, 8)
out += ['']
return out
@property
def _obj(self):
if hasattr(self, '_cls'):
return self._cls
elif hasattr(self, '_f'):
return self._f
return None
def _str_member_list(self, name):
"""
Generate a member listing, autosummary:: table where possible,
and a table where not.
"""
out = []
if self[name]:
out += ['.. rubric:: %s' % name, '']
prefix = getattr(self, '_name', '')
if prefix:
prefix = '~%s.' % prefix
autosum = []
others = []
for param, param_type, desc in self[name]:
param = param.strip()
# Check if the referenced member can have a docstring or not
param_obj = getattr(self._obj, param, None)
if not (callable(param_obj)
or isinstance(param_obj, property)
or inspect.isdatadescriptor(param_obj)):
param_obj = None
if param_obj and pydoc.getdoc(param_obj):
# Referenced object has a docstring
autosum += [" %s%s" % (prefix, param)]
else:
others.append((param, param_type, desc))
if autosum:
out += ['.. autosummary::']
if self.class_members_toctree:
out += [' :toctree:']
out += [''] + autosum
if others:
maxlen_0 = max(3, max([len(x[0]) + 4 for x in others]))
hdr = sixu("=") * maxlen_0 + sixu(" ") + sixu("=") * 10
fmt = sixu('%%%ds %%s ') % (maxlen_0,)
out += ['', '', hdr]
for param, param_type, desc in others:
desc = sixu(" ").join(x.strip() for x in desc).strip()
if param_type:
desc = "(%s) %s" % (param_type, desc)
out += [fmt % ("**" + param.strip() + "**", desc)]
out += [hdr]
out += ['']
return out
def _str_section(self, name):
out = []
if self[name]:
out += self._str_header(name)
content = textwrap.dedent("\n".join(self[name])).split("\n")
out += content
out += ['']
return out
def _str_see_also(self, func_role):
out = []
if self['See Also']:
see_also = super(SphinxDocString, self)._str_see_also(func_role)
out = ['.. seealso::', '']
out += self._str_indent(see_also[2:])
return out
def _str_warnings(self):
out = []
if self['Warnings']:
out = ['.. warning::', '']
out += self._str_indent(self['Warnings'])
out += ['']
return out
def _str_index(self):
idx = self['index']
out = []
if len(idx) == 0:
return out
out += ['.. index:: %s' % idx.get('default', '')]
for section, references in idx.items():
if section == 'default':
continue
elif section == 'refguide':
out += [' single: %s' % (', '.join(references))]
else:
out += [' %s: %s' % (section, ','.join(references))]
out += ['']
return out
def _str_references(self):
out = []
if self['References']:
out += self._str_header('References')
if isinstance(self['References'], str):
self['References'] = [self['References']]
out.extend(self['References'])
out += ['']
# Latex collects all references to a separate bibliography,
# so we need to insert links to it
if sphinx.__version__ >= "0.6":
out += ['.. only:: latex', '']
else:
out += ['.. latexonly::', '']
items = []
for line in self['References']:
m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I)
if m:
items.append(m.group(1))
out += [' ' + ", ".join(["[%s]_" % item for item in items]), '']
return out
def _str_examples(self):
examples_str = "\n".join(self['Examples'])
if (self.use_plots and re.search(IMPORT_MATPLOTLIB_RE, examples_str)
and 'plot::' not in examples_str):
out = []
out += self._str_header('Examples')
out += ['.. plot::', '']
out += self._str_indent(self['Examples'])
out += ['']
return out
else:
return self._str_section('Examples')
def __str__(self, indent=0, func_role="obj"):
ns = {
'signature': self._str_signature(),
'index': self._str_index(),
'summary': self._str_summary(),
'extended_summary': self._str_extended_summary(),
'parameters': self._str_param_list('Parameters'),
'returns': self._str_returns('Returns'),
'yields': self._str_returns('Yields'),
'other_parameters': self._str_param_list('Other Parameters'),
'raises': self._str_param_list('Raises'),
'warns': self._str_param_list('Warns'),
'warnings': self._str_warnings(),
'see_also': self._str_see_also(func_role),
'notes': self._str_section('Notes'),
'references': self._str_references(),
'examples': self._str_examples(),
'attributes': self._str_param_list('Attributes',
fake_autosummary=True),
'methods': self._str_member_list('Methods'),
}
ns = dict((k, '\n'.join(v)) for k, v in ns.items())
rendered = self.template.render(**ns)
return '\n'.join(self._str_indent(rendered.split('\n'), indent))
class SphinxFunctionDoc(SphinxDocString, FunctionDoc):
def __init__(self, obj, doc=None, config={}):
self.load_config(config)
FunctionDoc.__init__(self, obj, doc=doc, config=config)
class SphinxClassDoc(SphinxDocString, ClassDoc):
def __init__(self, obj, doc=None, func_doc=None, config={}):
self.load_config(config)
ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config)
class SphinxObjDoc(SphinxDocString):
def __init__(self, obj, doc=None, config={}):
self._f = obj
self.load_config(config)
SphinxDocString.__init__(self, doc, config=config)
def get_doc_object(obj, what=None, doc=None, config={}, builder=None):
if what is None:
if inspect.isclass(obj):
what = 'class'
elif inspect.ismodule(obj):
what = 'module'
elif isinstance(obj, collections.Callable):
what = 'function'
else:
what = 'object'
template_dirs = [os.path.join(os.path.dirname(__file__), 'templates')]
if builder is not None:
template_loader = BuiltinTemplateLoader()
template_loader.init(builder, dirs=template_dirs)
else:
template_loader = FileSystemLoader(template_dirs)
template_env = SandboxedEnvironment(loader=template_loader)
config['template'] = template_env.get_template('numpydoc_docstring.rst')
if what == 'class':
return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc,
config=config)
elif what in ('function', 'method'):
return SphinxFunctionDoc(obj, doc=doc, config=config)
else:
if doc is None:
doc = pydoc.getdoc(obj)
return SphinxObjDoc(obj, doc, config=config)
| 35.283721 | 82 | 0.522937 |
40a9bb904b0862f6df3efa388a014a8e0a218a9f | 2,765 | py | Python | blocks.py | innova-foundation/snakes-on-a-chain | 243a1e5739f61ee5f72b13dcdee2ca8ede11a776 | [
"MIT"
] | null | null | null | blocks.py | innova-foundation/snakes-on-a-chain | 243a1e5739f61ee5f72b13dcdee2ca8ede11a776 | [
"MIT"
] | null | null | null | blocks.py | innova-foundation/snakes-on-a-chain | 243a1e5739f61ee5f72b13dcdee2ca8ede11a776 | [
"MIT"
] | null | null | null | from innovarpc.authproxy import AuthServiceProxy, JSONRPCException
import time
import sys
import datetime
import urllib
import json
from influxdb import InfluxDBClient
# rpc_user and rpc_password are set in the innova.conf file
rpc_connection = AuthServiceProxy("http://%s:%s@127.0.0.1:14531"%("rpcuser", "rpcpassword"))
#test
blocktest = rpc_connection.getblockcount()
print(blocktest)
#for i in range(3):
# print(i)
# block = rpc_connection.getblockbynumber(i)
# print(block)
# Configure InfluxDB connection variables
host = "127.0.0.1" # My Ubuntu NUC
port = 8086 # default port
user = "admin" # the user/password with write access
password = "admin"
dbname = "blocks" # the database we created earlier
interval = 60 # Sample period in seconds
# Create the InfluxDB client object
client = InfluxDBClient(host, port, user, password, dbname)
# think of measurement as a SQL table, it's not...but...
measurement = "measurement"
# location will be used as a grouping tag later
blockchain = "innova"
# Run until you get a ctrl^c
#def main():
import time
#for i in range(2499428, 2499437):
# print(i)
blockcount = rpc_connection.getblockcount()
block = rpc_connection.getblockbynumber(blockcount)
grafanatime = block['time'] * 1000000000
hash = block['hash']
size = block['size']
height = block['height']
version = block['version']
merkleroot = block['merkleroot']
mint = int(block['mint'])
timed = block['time']
nonce = block['nonce']
bits = block['bits']
difficulty = float(block['difficulty'])
blocktrust = block['blocktrust']
chaintrust = block['chaintrust']
chainwork = block['chainwork']
previousblockhash = block['previousblockhash']
#nextblockhash = block['nextblockhash']
flags = block['flags']
proofhash = block['proofhash']
entropybit = block['entropybit']
modifier = block['modifier']
modifierchecksum = block['modifierchecksum']
data = [
{
"measurement": measurement,
"tags": {
"blockchain": blockchain,
},
"time": grafanatime,
"fields": {
#"block" : i,
"hash" : hash,
"size" : size,
"height" : height,
"version" : version,
"merkleroot" : merkleroot,
"mint" : mint,
"time" : timed,
"nonce" : nonce,
"bits" : bits,
"difficulty" : difficulty,
"blocktrust" : blocktrust,
"chaintrust" : chaintrust,
"chainwork" : chainwork,
# "nextblockhash" : nextblockhash,
"flags" : flags,
"proofhash" : proofhash,
"entropybit" : entropybit,
"modifier" : modifier,
"modifierchecksum" : modifierchecksum
}
}
]
# Send the JSON data to InfluxDB
print(difficulty)
client.write_points(data)
| 27.107843 | 92 | 0.657866 |
327342a200bf219c37182dcc40240c6253e46107 | 14,014 | py | Python | onegreek/old.comments/migrations/0001_initial.py | goldhand/onegreek | 1ad105f15608284a9e80802734f0c6222413a4a0 | [
"BSD-3-Clause"
] | 1 | 2019-06-13T11:46:08.000Z | 2019-06-13T11:46:08.000Z | onegreek/old.comments/migrations/0001_initial.py | goldhand/onegreek | 1ad105f15608284a9e80802734f0c6222413a4a0 | [
"BSD-3-Clause"
] | null | null | null | onegreek/old.comments/migrations/0001_initial.py | goldhand/onegreek | 1ad105f15608284a9e80802734f0c6222413a4a0 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Comment'
db.create_table('comments', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(related_name='content_type_set_for_comment', to=orm['contenttypes.ContentType'])),
('object_pk', self.gf('django.db.models.fields.TextField')()),
('site', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['sites.Site'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='comment_comments', null=True, to=orm['users.User'])),
('user_name', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('user_email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('user_url', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('comment', self.gf('django.db.models.fields.TextField')(max_length=3000)),
('submit_date', self.gf('django.db.models.fields.DateTimeField')(default=None)),
('ip_address', self.gf('django.db.models.fields.GenericIPAddressField')(max_length=39, null=True, blank=True)),
('is_public', self.gf('django.db.models.fields.BooleanField')(default=True)),
('is_removed', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal(u'comments', ['Comment'])
# Adding model 'CommentFlag'
db.create_table('django_comment_flags', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='comment_flags', to=orm['users.User'])),
('comment', self.gf('django.db.models.fields.related.ForeignKey')(related_name='flags', to=orm['comments.Comment'])),
('flag', self.gf('django.db.models.fields.CharField')(max_length=30, db_index=True)),
('flag_date', self.gf('django.db.models.fields.DateTimeField')(default=None)),
))
db.send_create_signal(u'comments', ['CommentFlag'])
# Adding unique constraint on 'CommentFlag', fields ['user', 'comment', 'flag']
db.create_unique('django_comment_flags', ['user_id', 'comment_id', 'flag'])
def backwards(self, orm):
# Removing unique constraint on 'CommentFlag', fields ['user', 'comment', 'flag']
db.delete_unique('django_comment_flags', ['user_id', 'comment_id', 'flag'])
# Deleting model 'Comment'
db.delete_table('comments')
# Deleting model 'CommentFlag'
db.delete_table('django_comment_flags')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'chapters.chapter': {
'Meta': {'object_name': 'Chapter'},
u'_awards_excerpt': ('django.db.models.fields.TextField', [], {}),
u'_description_excerpt': ('django.db.models.fields.TextField', [], {}),
u'_philanthropy_excerpt': ('django.db.models.fields.TextField', [], {}),
u'_potential_new_members_excerpt': ('django.db.models.fields.TextField', [], {}),
'awards': ('model_utils.fields.SplitField', [], {u'no_excerpt_field': 'True'}),
'cost': ('django.db.models.fields.IntegerField', [], {}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'description': ('model_utils.fields.SplitField', [], {u'no_excerpt_field': 'True'}),
'facebook': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'fb_status': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'fraternity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['fraternities.Fraternity']", 'null': 'True', 'blank': 'True'}),
'gpa': ('django.db.models.fields.FloatField', [], {}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'philanthropy': ('model_utils.fields.SplitField', [], {u'no_excerpt_field': 'True'}),
'potential_new_members': ('model_utils.fields.SplitField', [], {u'no_excerpt_field': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('model_utils.fields.StatusField', [], {'default': "'excellence'", 'max_length': '100', u'no_check_for_status': 'True'}),
'status_changed': ('model_utils.fields.MonitorField', [], {'default': 'datetime.datetime.now', u'monitor': "u'status'"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'university': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['universities.University']", 'null': 'True', 'blank': 'True'})
},
u'comments.comment': {
'Meta': {'ordering': "('submit_date',)", 'object_name': 'Comment', 'db_table': "'comments'"},
'comment': ('django.db.models.fields.TextField', [], {'max_length': '3000'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'content_type_set_for_comment'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True', 'blank': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_removed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_pk': ('django.db.models.fields.TextField', [], {}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'submit_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comment_comments'", 'null': 'True', 'to': u"orm['users.User']"}),
'user_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'user_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'user_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
u'comments.commentflag': {
'Meta': {'unique_together': "[('user', 'comment', 'flag')]", 'object_name': 'CommentFlag', 'db_table': "'django_comment_flags'"},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flags'", 'to': u"orm['comments.Comment']"}),
'flag': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_index': 'True'}),
'flag_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comment_flags'", 'to': u"orm['users.User']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'fraternities.fraternity': {
'Meta': {'object_name': 'Fraternity'},
u'_description_excerpt': ('django.db.models.fields.TextField', [], {}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'description': ('model_utils.fields.SplitField', [], {u'no_excerpt_field': 'True'}),
'facebook': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'fb_status': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'gpa': ('django.db.models.fields.FloatField', [], {}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'universities.university': {
'Meta': {'object_name': 'University'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
u'users.user': {
'Meta': {'object_name': 'User'},
'chapter': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['chapters.Chapter']", 'null': 'True', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gpa': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'highschool_gpa': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'hometown': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'major': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'phone': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'university_email': ('django.db.models.fields.EmailField', [], {'max_length': '255', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'year': ('django.db.models.fields.IntegerField', [], {'default': '0'})
}
}
complete_apps = ['comments'] | 78.290503 | 187 | 0.582489 |
057802d483edd0ae074d6f518e212449f90cd92d | 7,047 | py | Python | venv/lib/python2.7/site-packages/ansible/module_utils/network/common/network.py | haind27/test01 | 7f86c0a33eb0874a6c3f5ff9a923fd0cfc8ef852 | [
"MIT"
] | 37 | 2017-08-15T15:02:43.000Z | 2021-07-23T03:44:31.000Z | ansible/ansible/module_utils/network/common/network.py | SergeyCherepanov/ansible | 875711cd2fd6b783c812241c2ed7a954bf6f670f | [
"MIT"
] | 12 | 2018-01-10T05:25:25.000Z | 2021-11-28T06:55:48.000Z | ansible/ansible/module_utils/network/common/network.py | SergeyCherepanov/ansible | 875711cd2fd6b783c812241c2ed7a954bf6f670f | [
"MIT"
] | 49 | 2017-08-15T09:52:13.000Z | 2022-03-21T17:11:54.000Z | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c) 2015 Peter Sprygada, <psprygada@ansible.com>
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
from ansible.module_utils.network.common.parsing import Cli
from ansible.module_utils._text import to_native
from ansible.module_utils.six import iteritems
NET_TRANSPORT_ARGS = dict(
host=dict(required=True),
port=dict(type='int'),
username=dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
password=dict(no_log=True, fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD'])),
ssh_keyfile=dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
authorize=dict(default=False, fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
auth_pass=dict(no_log=True, fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS'])),
provider=dict(type='dict', no_log=True),
transport=dict(choices=list()),
timeout=dict(default=10, type='int')
)
NET_CONNECTION_ARGS = dict()
NET_CONNECTIONS = dict()
def _transitional_argument_spec():
argument_spec = {}
for key, value in iteritems(NET_TRANSPORT_ARGS):
value['required'] = False
argument_spec[key] = value
return argument_spec
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
elif val is not None:
return [val]
else:
return list()
class ModuleStub(object):
def __init__(self, argument_spec, fail_json):
self.params = dict()
for key, value in argument_spec.items():
self.params[key] = value.get('default')
self.fail_json = fail_json
class NetworkError(Exception):
def __init__(self, msg, **kwargs):
super(NetworkError, self).__init__(msg)
self.kwargs = kwargs
class Config(object):
def __init__(self, connection):
self.connection = connection
def __call__(self, commands, **kwargs):
lines = to_list(commands)
return self.connection.configure(lines, **kwargs)
def load_config(self, commands, **kwargs):
commands = to_list(commands)
return self.connection.load_config(commands, **kwargs)
def get_config(self, **kwargs):
return self.connection.get_config(**kwargs)
def save_config(self):
return self.connection.save_config()
class NetworkModule(AnsibleModule):
def __init__(self, *args, **kwargs):
connect_on_load = kwargs.pop('connect_on_load', True)
argument_spec = NET_TRANSPORT_ARGS.copy()
argument_spec['transport']['choices'] = NET_CONNECTIONS.keys()
argument_spec.update(NET_CONNECTION_ARGS.copy())
if kwargs.get('argument_spec'):
argument_spec.update(kwargs['argument_spec'])
kwargs['argument_spec'] = argument_spec
super(NetworkModule, self).__init__(*args, **kwargs)
self.connection = None
self._cli = None
self._config = None
try:
transport = self.params['transport'] or '__default__'
cls = NET_CONNECTIONS[transport]
self.connection = cls()
except KeyError:
self.fail_json(msg='Unknown transport or no default transport specified')
except (TypeError, NetworkError) as exc:
self.fail_json(msg=to_native(exc), exception=traceback.format_exc())
if connect_on_load:
self.connect()
@property
def cli(self):
if not self.connected:
self.connect()
if self._cli:
return self._cli
self._cli = Cli(self.connection)
return self._cli
@property
def config(self):
if not self.connected:
self.connect()
if self._config:
return self._config
self._config = Config(self.connection)
return self._config
@property
def connected(self):
return self.connection._connected
def _load_params(self):
super(NetworkModule, self)._load_params()
provider = self.params.get('provider') or dict()
for key, value in provider.items():
for args in [NET_TRANSPORT_ARGS, NET_CONNECTION_ARGS]:
if key in args:
if self.params.get(key) is None and value is not None:
self.params[key] = value
def connect(self):
try:
if not self.connected:
self.connection.connect(self.params)
if self.params['authorize']:
self.connection.authorize(self.params)
self.log('connected to %s:%s using %s' % (self.params['host'],
self.params['port'], self.params['transport']))
except NetworkError as exc:
self.fail_json(msg=to_native(exc), exception=traceback.format_exc())
def disconnect(self):
try:
if self.connected:
self.connection.disconnect()
self.log('disconnected from %s' % self.params['host'])
except NetworkError as exc:
self.fail_json(msg=to_native(exc), exception=traceback.format_exc())
def register_transport(transport, default=False):
def register(cls):
NET_CONNECTIONS[transport] = cls
if default:
NET_CONNECTIONS['__default__'] = cls
return cls
return register
def add_argument(key, value):
NET_CONNECTION_ARGS[key] = value
| 34.544118 | 99 | 0.672769 |
d48ece7f814fe24a81b5cb2307677382e2da7cf7 | 1,665 | py | Python | examples/tiger/tiger_observation.py | wecacuee/POMDPy | c23ebf90f5815db4326564110487779961736b60 | [
"MIT"
] | 210 | 2015-04-23T17:05:50.000Z | 2022-03-14T08:00:00.000Z | examples/tiger/tiger_observation.py | wecacuee/POMDPy | c23ebf90f5815db4326564110487779961736b60 | [
"MIT"
] | 15 | 2015-04-13T05:36:14.000Z | 2019-05-06T19:14:50.000Z | examples/tiger/tiger_observation.py | wecacuee/POMDPy | c23ebf90f5815db4326564110487779961736b60 | [
"MIT"
] | 76 | 2016-08-18T03:54:08.000Z | 2022-01-26T09:08:23.000Z | from __future__ import print_function
from pomdpy.discrete_pomdp import DiscreteObservation
class TigerObservation(DiscreteObservation):
"""
For num_doors = 2, there is an 85 % of hearing the roaring coming from the tiger door.
There is a 15 % of hearing the roaring come from the reward door.
source_of_roar[0] = 0 (door 1)
source_of_roar[1] = 1 (door 2)
or vice versa
"""
def __init__(self, source_of_roar):
if source_of_roar is not None:
super(TigerObservation, self).__init__((1, 0)[source_of_roar[0]])
else:
super(TigerObservation, self).__init__(-1)
self.source_of_roar = source_of_roar
def copy(self):
return TigerObservation(self.source_of_roar)
def equals(self, other_observation):
return self.source_of_roar == other_observation.source_or_roar
def distance_to(self, other_observation):
return (1, 0)[self.source_of_roar == other_observation.source_of_roar]
def hash(self):
return self.bin_number
def print_observation(self):
if self.source_of_roar is None:
print("No observation from entering a terminal state")
elif self.source_of_roar[0]:
print("Roaring is heard coming from door 1")
else:
print("Roaring is heard coming from door 2")
def to_string(self):
if self.source_of_roar is None:
obs = "No observation from entering a terminal state"
elif self.source_of_roar[0]:
obs = "Roaring is heard coming from door 1"
else:
obs = "Roaring is heard coming from door 2"
return obs
| 31.415094 | 90 | 0.658859 |
7ee988bb73271d26e1ed0ae96fe34dcf33681e03 | 2,069 | py | Python | src/the_tale/the_tale/game/cards/tt_services.py | devapromix/the-tale | 2a10efd3270734f8cf482b4cfbc5353ef8f0494c | [
"BSD-3-Clause"
] | 1 | 2020-04-02T11:51:20.000Z | 2020-04-02T11:51:20.000Z | src/the_tale/the_tale/game/cards/tt_services.py | devapromix/the-tale | 2a10efd3270734f8cf482b4cfbc5353ef8f0494c | [
"BSD-3-Clause"
] | null | null | null | src/the_tale/the_tale/game/cards/tt_services.py | devapromix/the-tale | 2a10efd3270734f8cf482b4cfbc5353ef8f0494c | [
"BSD-3-Clause"
] | null | null | null |
import smart_imports
smart_imports.all()
class StorageClient(tt_api_storage.Client):
def protobuf_to_item(self, pb_item):
id = uuid.UUID(pb_item.id)
card = objects.Card.deserialize(uid=id,
data=s11n.from_json(pb_item.data),
storage=relations.STORAGE(pb_item.storage_id))
return id, card
def Create(self, owner_id, card, storage):
return tt_protocol_storage_pb2.OperationCreate(item_id=card.uid.hex,
owner_id=owner_id,
storage_id=storage.value,
base_type=card.item_base_type,
full_type=card.item_full_type,
data=s11n.to_json(card.serialize()))
def Destroy(self, owner_id, card):
return tt_protocol_storage_pb2.OperationDestroy(item_id=card.uid.hex,
owner_id=owner_id)
def ChangeOwner(self, old_owner_id, new_owner_id, card_id, new_storage):
return tt_protocol_storage_pb2.OperationChangeOwner(item_id=card_id.hex,
old_owner_id=old_owner_id,
new_owner_id=new_owner_id,
new_storage_id=new_storage.value)
def ChangeStorage(self, owner_id, card, old_storage, new_storage):
return tt_protocol_storage_pb2.OperationChangeStorage(item_id=card.uid.hex,
owner_id=owner_id,
old_storage_id=old_storage.value,
new_storage_id=new_storage.value)
storage = StorageClient(entry_point=conf.settings.TT_STORAGE_ENTRY_POINT)
| 49.261905 | 95 | 0.491542 |
91f85ad5f9cedb56fd469780e6d4e97cd9f58f23 | 304 | py | Python | api/urls_api.py | flaiming/bookfinder | 59154d106b62680668087e46eca9c0bf9cdaf336 | [
"MIT"
] | null | null | null | api/urls_api.py | flaiming/bookfinder | 59154d106b62680668087e46eca9c0bf9cdaf336 | [
"MIT"
] | null | null | null | api/urls_api.py | flaiming/bookfinder | 59154d106b62680668087e46eca9c0bf9cdaf336 | [
"MIT"
] | null | null | null | from django.urls import path, include
from rest_framework import routers
from .views_api import BookViewSet
from .views import book_by_isbn
app_name = "api"
router = routers.SimpleRouter()
router.register(r'books', BookViewSet)
urlpatterns = [
path('book-by-isbn/', book_by_isbn),
] + router.urls
| 21.714286 | 40 | 0.766447 |
835f785ae7f9be53ddbb984ebd0d9a5b3e5b5f06 | 1,488 | py | Python | proxies/proxies.py | ivelinahristova/webanalysis | ff73b65799dc1465b9138a8742ea74b9da171c8d | [
"MIT"
] | null | null | null | proxies/proxies.py | ivelinahristova/webanalysis | ff73b65799dc1465b9138a8742ea74b9da171c8d | [
"MIT"
] | 8 | 2021-04-10T17:55:31.000Z | 2021-04-19T14:45:14.000Z | proxies/proxies.py | ivelinahristova/webanalysis | ff73b65799dc1465b9138a8742ea74b9da171c8d | [
"MIT"
] | null | null | null | import random
import re
import requests
from bs4 import BeautifulSoup
from typing import Text
class Proxies:
__proxy_pointer = 0
def __init__(self) -> None:
self.__proxies = self.get_proxies()
self.__user_agents = [
'Mozilla/5.0 CK={} (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) \
like Gecko',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 \
(KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 \
(KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36'
]
def get_proxies(self) -> []:
response = requests.get('https://free-proxy-list.net/')
if response.status_code == requests.codes.ok:
page_soup = BeautifulSoup(response.text, 'html.parser')
textarea = page_soup.find('textarea').text
proxies = re.findall('\d+\.\d+\.\d+\.\d+\:\d+', textarea)
return proxies
else:
return []
def get_proxy(self) -> Text:
if self.__proxy_pointer == len(self.__proxies):
return False
proxy = 'http://' + self.__proxies[self.__proxy_pointer]
return proxy
def next_proxy(self) -> None:
self.__proxy_pointer = self.__proxy_pointer + 1
def get_user_agent(self) -> Text:
return self.__user_agents[
random.randint(0, (len(self.__user_agents) - 1))
]
| 31.659574 | 78 | 0.589382 |
d296e1fbededbe9f972a3ac3ae3f122063c25ff8 | 419 | py | Python | Chapter09/adapter_1.py | TranQuangDuc/Clean-Code-in-Python | 3c4b4a2fde2ccf28d2e0ec5002b2e1921704164e | [
"MIT"
] | 402 | 2018-08-19T03:09:40.000Z | 2022-03-30T08:10:26.000Z | Chapter09/adapter_1.py | TranQuangDuc/Clean-Code-in-Python | 3c4b4a2fde2ccf28d2e0ec5002b2e1921704164e | [
"MIT"
] | 137 | 2021-01-05T11:21:04.000Z | 2022-03-31T11:10:11.000Z | Chapter09/adapter_1.py | TranQuangDuc/Clean-Code-in-Python | 3c4b4a2fde2ccf28d2e0ec5002b2e1921704164e | [
"MIT"
] | 140 | 2018-09-16T05:47:46.000Z | 2022-03-31T03:20:30.000Z | """Clean Code in Python - Chapter 9: Common Design Patterns
> Adapter (Inheritance)
"""
from _adapter_base import UsernameLookup
class UserSource(UsernameLookup):
def fetch(self, user_id, username):
user_namespace = self._adapt_arguments(user_id, username)
return self.search(user_namespace)
@staticmethod
def _adapt_arguments(user_id, username):
return f"{user_id}:{username}"
| 24.647059 | 65 | 0.72315 |
1f2c859ea4000e31bdb13da01de7837399015b04 | 4,516 | py | Python | afs2datasource/postgresHelper.py | stacy0416/afs2-datasource | 25d498fc56eb142f2e97ea2d274d534245a4c301 | [
"Apache-2.0"
] | 4 | 2019-07-19T05:40:37.000Z | 2021-03-31T05:49:30.000Z | afs2datasource/postgresHelper.py | stacy0416/afs2-datasource | 25d498fc56eb142f2e97ea2d274d534245a4c301 | [
"Apache-2.0"
] | 1 | 2019-08-13T18:27:40.000Z | 2019-08-13T18:27:40.000Z | afs2datasource/postgresHelper.py | stacy0416/afs2-datasource | 25d498fc56eb142f2e97ea2d274d534245a4c301 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 WISE-PaaS/AFS
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import afs2datasource.constant as const
import afs2datasource.utils as utils
import psycopg2
from psycopg2.extras import execute_values
import pandas as pd
from functools import wraps
class PostgresHelper():
def __init__(self, dataDir):
self._connection = None
data = utils.get_data_from_dataDir(dataDir)
self.username, self.password, self.host, self.port, self.database = utils.get_credential_from_dataDir(data)
async def connect(self):
if self._connection is None:
self._connection = psycopg2.connect(database=self.database, user=self.username, password=self.password, host=self.host, port=self.port)
def disconnect(self):
if self._connection:
self._connection.close()
self._connection = None
async def execute_query(self, querySql):
cursor = self._connection.cursor()
cursor.execute(querySql)
columns = [desc[0] for desc in cursor.description]
data = list(cursor.fetchall())
data = pd.DataFrame(data=data, columns=columns)
return data
def check_query(self, querySql):
if type(querySql) is not str:
raise ValueError('querySql is invalid')
return querySql
def check_table_name(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
table_name = kwargs.get('table_name')
if not table_name:
raise ValueError('table_name is necessary')
check_table_name = table_name.split('.')
if len(check_table_name) < 2:
raise ValueError('table_name is invalid. ex.{schema}.{table}')
return func(self, *args, **kwargs)
return wrapper
@check_table_name
def is_table_exist(self, table_name):
cursor = self._connection.cursor()
table_name = table_name.split('.')
schema = table_name[0]
table = table_name[1]
command = "select * from information_schema.tables"
cursor.execute(command)
for d in cursor.fetchall():
if d[1] == schema and d[2] == table:
return True
return False
def is_file_exist(self, table_name, file_name):
raise NotImplementedError('Postgres not implement.')
@check_table_name
def create_table(self, table_name, columns):
table_name = table_name.split('.')
schema = table_name[0]
table = table_name[1]
cursor = self._connection.cursor()
command = 'CREATE SCHEMA IF NOT EXISTS {schema} AUTHORIZATION "{username}"'.format(schema=schema, username=self.username)
cursor.execute(command)
command = 'CREATE TABLE {schema}.{table} ('.format(schema=schema, table=table)
fields = []
for col in columns:
field = '{name} {type}'.format(name=col['name'], type=col['type'])
if col['is_primary']:
field += ' PRIMARY KEY'
if col['is_not_null']:
field += ' NOT NULL'
fields.append(field)
command += ','.join(fields) + ')'
cursor.execute(command)
self._connection.commit()
@check_table_name
def insert(self, table_name, columns, records):
cursor = self._connection.cursor()
for record in records:
if len(record) != len(columns):
raise IndexError('record {} and columns do not match'.format(record))
records = [tuple(record) for record in records]
command = 'INSERT INTO {table_name}('.format(table_name=table_name)
command += ','.join(columns) + ') VALUES %s'
execute_values(cursor, command,(records))
self._connection.commit()
@check_table_name
async def delete_table(self, table_name):
cursor = self._connection.cursor()
command = 'DROP TABLE IF EXISTS {table_name}'.format(table_name=table_name)
cursor.execute(command)
self._connection.commit()
@check_table_name
def delete_record(self, table_name, condition):
cursor = self._connection.cursor()
command = 'DELETE FROM {table_name} WHERE {condition}'.format(table_name=table_name, condition=condition)
cursor.execute(command)
self._connection.commit()
| 35.84127 | 141 | 0.697298 |
3fdd21d6800c0d1b6769f36086a6058fbfd4f921 | 2,141 | py | Python | src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_client_factory.py | henrypan/azure-cli | 8de0ab5216ed3dc700546ae9a3c485710322376b | [
"MIT"
] | null | null | null | src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_client_factory.py | henrypan/azure-cli | 8de0ab5216ed3dc700546ae9a3c485710322376b | [
"MIT"
] | null | null | null | src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_client_factory.py | henrypan/azure-cli | 8de0ab5216ed3dc700546ae9a3c485710322376b | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
def _compute_client_factory(**_):
from azure.cli.core.profiles import ResourceType
from azure.cli.core.commands.client_factory import get_mgmt_service_client
return get_mgmt_service_client(ResourceType.MGMT_COMPUTE)
def _subscription_client_factory(**_):
from azure.mgmt.resource import SubscriptionClient
from azure.cli.core.commands.client_factory import get_subscription_service_client
return get_subscription_service_client(SubscriptionClient)
def cf_ni(_):
from azure.cli.core.profiles import ResourceType
from azure.cli.core.commands.client_factory import get_mgmt_service_client
# TODO: Remove hard coded api-version once
# https://github.com/Azure/azure-rest-api-specs/issues/570
# is fixed.
ni = get_mgmt_service_client(ResourceType.MGMT_NETWORK).network_interfaces
ni.api_version = '2016-03-30'
return ni
def cf_avail_set(_):
return _compute_client_factory().availability_sets
def cf_vm(_):
return _compute_client_factory().virtual_machines
def cf_vm_ext(_):
return _compute_client_factory().virtual_machine_extensions
def cf_vm_ext_image(_):
return _compute_client_factory().virtual_machine_extension_images
def cf_vm_image(_):
return _compute_client_factory().virtual_machine_images
def cf_usage(_):
return _compute_client_factory().usage
def cf_vmss(_):
return _compute_client_factory().virtual_machine_scale_sets
def cf_vmss_vm(_):
return _compute_client_factory().virtual_machine_scale_set_vms
def cf_vm_sizes(_):
return _compute_client_factory().virtual_machine_sizes
def cf_disks(_):
return _compute_client_factory().disks
def cf_snapshots(_):
return _compute_client_factory().snapshots
def cf_images(_):
return _compute_client_factory().images
| 28.171053 | 94 | 0.721625 |
d2e6bd3b83dbc79afd351d606177265e8e9e82af | 2,455 | py | Python | src/main.py | jkuharev/groot | 61ac86ef05d5b90a3c277a95d096e77558b21101 | [
"BSD-2-Clause"
] | null | null | null | src/main.py | jkuharev/groot | 61ac86ef05d5b90a3c277a95d096e77558b21101 | [
"BSD-2-Clause"
] | null | null | null | src/main.py | jkuharev/groot | 61ac86ef05d5b90a3c277a95d096e77558b21101 | [
"BSD-2-Clause"
] | null | null | null | from cfg import cfg
from umqttsimple import MQTTClient
import ubinascii
import machine
from machine import Pin
from machine import ADC
import utime
import time
delayPerIteration = 3
deepSleepMinutes = cfg['defaultSleepMinutes']
clientID = b"esp8266_" + ubinascii.hexlify( machine.unique_id() )
client = MQTTClient(clientID, cfg['mqttServer'], user=cfg['mqttUser'], password=cfg['mqttPass'], port=cfg['mqttPort'])
# create ADC object on ADC pin
adc = ADC(0)
# D5:14,D6:12,D7:13
# A=12, B=14, C=13 # because I messed when soldering
pins = [Pin(i, Pin.OUT) for i in cfg['sensorPins']]
#
def readSensors() :
t = utime.localtime()
stmp = "{:04d}.{:02d}.{:02d} {:02d}:{:02d}:{:02d}".format( t[0],t[1],t[2],t[3]+2,t[4],t[5] )
print(" current time stamp: " + stmp)
res = []
print(" switching off all sensors ... ")
for p in pins :
p.off()
valueOff = str( adc.read() )
print(" sensor off value: " + valueOff)
client.publish(cfg['mqttPath'] + "/valueOff", valueOff )
for i in range(0, len(pins)) :
p = pins[i]
p.on()
v = adc.read()
print(" sensor " + str(i+1) + " value: " + str( v ) )
client.publish(cfg['mqttPath'] + "/moisture" + chr(65+i), str( v ))
res.append(v)
p.off()
client.publish(cfg['mqttPath'] + "/lastUpdate", stmp)
return res
#
def onMessage(topic, msg) :
global deepSleepMinutes
print("Topic: %s, Message: %s" % (topic, msg))
try :
deepSleepMinutes = int(msg)
print("deepSleepMinutes from MQTT = " + str(deepSleepMinutes) )
except :
print("failed to convert MQTT message to float: " + str(msg))
#
try :
client.connect()
client.set_callback(onMessage)
client.subscribe(bytes(cfg['mqttPath'] + "/deepSleepMinutes","ascii"))
# we make 3 iterations to read sensors and publish results
for i in range(1, 4) :
# read mqqt input
client.check_msg()
print("------------------------------------------------------------")
print("iteration {:g}".format(i))
readSensors()
time.sleep(delayPerIteration)
print("------------------------------------------------------------")
# set default value for deep sleep delay
if deepSleepMinutes < 1 :
client.publish(cfg['mqttPath'] + "/deepSleepMinutes", str(cfg['defaultSleepMinutes']) )
client.disconnect()
except Exception as e:
print(e)
deepSleepMS= deepSleepMinutes * 60 * 1000
# deep sleep on
if deepSleepMinutes > 0 :
print("going to deep sleep for {:g} minutes.".format(deepSleepMinutes))
time.sleep(1)
machine.deepsleep( deepSleepMS )
# | 29.22619 | 118 | 0.643585 |
37b990d9b27c0b21f4e173e5843afa76b2781a2b | 123 | py | Python | seconds2years.py | chapman-cpsc-230/hw1-ochoa117 | 3e65fffebe5afea88297d51886094ef6d9728da7 | [
"MIT"
] | null | null | null | seconds2years.py | chapman-cpsc-230/hw1-ochoa117 | 3e65fffebe5afea88297d51886094ef6d9728da7 | [
"MIT"
] | 1 | 2016-02-25T06:02:26.000Z | 2016-02-25T06:02:26.000Z | seconds2years.py | chapman-cpsc-230/hw1-ochoa117 | 3e65fffebe5afea88297d51886094ef6d9728da7 | [
"MIT"
] | null | null | null | s = 10**9
m = s/60
h = m/60
d = h/24
y = s/(365*24*60*60)
print "A newborn baby in Norway can expect to live", y, "years."
| 17.571429 | 64 | 0.593496 |
199b8fb3aee8cc10cdf4d7229fe6c8fb16136776 | 9,906 | py | Python | rest_framework_simplejwt/tokens.py | rj76/django-rest-framework-simplejwt | b057218d65f8f6bf9e347b63526c77af3849f245 | [
"BSD-2-Clause",
"MIT"
] | null | null | null | rest_framework_simplejwt/tokens.py | rj76/django-rest-framework-simplejwt | b057218d65f8f6bf9e347b63526c77af3849f245 | [
"BSD-2-Clause",
"MIT"
] | null | null | null | rest_framework_simplejwt/tokens.py | rj76/django-rest-framework-simplejwt | b057218d65f8f6bf9e347b63526c77af3849f245 | [
"BSD-2-Clause",
"MIT"
] | null | null | null | from datetime import timedelta
from uuid import uuid4
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from .exceptions import TokenBackendError, TokenError
from .settings import api_settings
from .token_blacklist.models import BlacklistedToken, OutstandingToken
from .utils import (
aware_utcnow, datetime_from_epoch, datetime_to_epoch, format_lazy,
)
class Token:
"""
A class which validates and wraps an existing JWT or can be used to build a
new JWT.
"""
token_type = None
lifetime = None
def __init__(self, token=None, verify=True):
"""
!!!! IMPORTANT !!!! MUST raise a TokenError with a user-facing error
message if the given token is invalid, expired, or otherwise not safe
to use.
"""
if self.token_type is None or self.lifetime is None:
raise TokenError(_('Cannot create token with no type or lifetime'))
self.token = token
self.current_time = aware_utcnow()
# Set up token
if token is not None:
# An encoded token was provided
from .state import get_token_backend
token_backend = get_token_backend()
# Decode token
try:
self.payload = token_backend.decode(token, verify=verify)
except TokenBackendError:
raise TokenError(_('Token is invalid or expired'))
if verify:
self.verify()
else:
# New token. Skip all the verification steps.
self.payload = {api_settings.TOKEN_TYPE_CLAIM: self.token_type}
# Set "exp" claim with default value
self.set_exp(from_time=self.current_time, lifetime=self.lifetime)
# Set "jti" claim
self.set_jti()
def __repr__(self):
return repr(self.payload)
def __getitem__(self, key):
return self.payload[key]
def __setitem__(self, key, value):
self.payload[key] = value
def __delitem__(self, key):
del self.payload[key]
def __contains__(self, key):
return key in self.payload
def get(self, key, default=None):
return self.payload.get(key, default)
def __str__(self):
"""
Signs and returns a token as a base64 encoded string.
"""
from .state import get_token_backend
token_backend = get_token_backend()
return token_backend.encode(self.payload)
def verify(self):
"""
Performs additional validation steps which were not performed when this
token was decoded. This method is part of the "public" API to indicate
the intention that it may be overridden in subclasses.
"""
# According to RFC 7519, the "exp" claim is OPTIONAL
# (https://tools.ietf.org/html/rfc7519#section-4.1.4). As a more
# correct behavior for authorization tokens, we require an "exp"
# claim. We don't want any zombie tokens walking around.
self.check_exp()
# Ensure token id is present
if api_settings.JTI_CLAIM not in self.payload:
raise TokenError(_('Token has no id'))
self.verify_token_type()
def verify_token_type(self):
"""
Ensures that the token type claim is present and has the correct value.
"""
try:
token_type = self.payload[api_settings.TOKEN_TYPE_CLAIM]
except KeyError:
raise TokenError(_('Token has no type'))
if self.token_type != token_type:
raise TokenError(_('Token has wrong type'))
def set_jti(self):
"""
Populates the configured jti claim of a token with a string where there
is a negligible probability that the same string will be chosen at a
later time.
See here:
https://tools.ietf.org/html/rfc7519#section-4.1.7
"""
self.payload[api_settings.JTI_CLAIM] = uuid4().hex
def set_exp(self, claim='exp', from_time=None, lifetime=None):
"""
Updates the expiration time of a token.
"""
if from_time is None:
from_time = self.current_time
if lifetime is None:
lifetime = self.lifetime
self.payload[claim] = datetime_to_epoch(from_time + lifetime)
def check_exp(self, claim='exp', current_time=None):
"""
Checks whether a timestamp value in the given claim has passed (since
the given datetime value in `current_time`). Raises a TokenError with
a user-facing error message if so.
"""
if current_time is None:
current_time = self.current_time
try:
claim_value = self.payload[claim]
except KeyError:
raise TokenError(format_lazy(_("Token has no '{}' claim"), claim))
claim_time = datetime_from_epoch(claim_value)
if claim_time <= current_time:
raise TokenError(format_lazy(_("Token '{}' claim has expired"), claim))
@classmethod
def for_user(cls, user):
"""
Returns an authorization token for the given user that will be provided
after authenticating the user's credentials.
"""
user_id = getattr(user, api_settings.USER_ID_FIELD)
if not isinstance(user_id, int):
user_id = str(user_id)
token = cls()
token[api_settings.USER_ID_CLAIM] = user_id
return token
class BlacklistMixin:
"""
If the `rest_framework_simplejwt.token_blacklist` app was configured to be
used, tokens created from `BlacklistMixin` subclasses will insert
themselves into an outstanding token list and also check for their
membership in a token blacklist.
"""
if 'rest_framework_simplejwt.token_blacklist' in settings.INSTALLED_APPS:
def verify(self, *args, **kwargs):
self.check_blacklist()
super().verify(*args, **kwargs)
def check_blacklist(self):
"""
Checks if this token is present in the token blacklist. Raises
`TokenError` if so.
"""
jti = self.payload[api_settings.JTI_CLAIM]
if BlacklistedToken.objects.filter(token__jti=jti).exists():
raise TokenError(_('Token is blacklisted'))
def blacklist(self):
"""
Ensures this token is included in the outstanding token list and
adds it to the blacklist.
"""
jti = self.payload[api_settings.JTI_CLAIM]
exp = self.payload['exp']
# Ensure outstanding token exists with given jti
token, _ = OutstandingToken.objects.get_or_create(
jti=jti,
defaults={
'token': str(self),
'expires_at': datetime_from_epoch(exp),
},
)
return BlacklistedToken.objects.get_or_create(token=token)
@classmethod
def for_user(cls, user):
"""
Adds this token to the outstanding token list.
"""
token = super().for_user(user)
jti = token[api_settings.JTI_CLAIM]
exp = token['exp']
OutstandingToken.objects.create(
user=user,
jti=jti,
token=str(token),
created_at=token.current_time,
expires_at=datetime_from_epoch(exp),
)
return token
class SlidingToken(BlacklistMixin, Token):
token_type = 'sliding'
lifetime = api_settings.SLIDING_TOKEN_LIFETIME
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.token is None:
# Set sliding refresh expiration claim if new token
self.set_exp(
api_settings.SLIDING_TOKEN_REFRESH_EXP_CLAIM,
from_time=self.current_time,
lifetime=api_settings.SLIDING_TOKEN_REFRESH_LIFETIME,
)
class RefreshToken(BlacklistMixin, Token):
token_type = 'refresh'
lifetime = api_settings.REFRESH_TOKEN_LIFETIME
no_copy_claims = (
api_settings.TOKEN_TYPE_CLAIM,
'exp',
# Both of these claims are included even though they may be the same.
# It seems possible that a third party token might have a custom or
# namespaced JTI claim as well as a default "jti" claim. In that case,
# we wouldn't want to copy either one.
api_settings.JTI_CLAIM,
'jti',
)
@property
def access_token(self):
"""
Returns an access token created from this refresh token. Copies all
claims present in this refresh token to the new access token except
those claims listed in the `no_copy_claims` attribute.
"""
access = AccessToken()
# Use instantiation time of refresh token as relative timestamp for
# access token "exp" claim. This ensures that both a refresh and
# access token expire relative to the same time if they are created as
# a pair.
access.set_exp(from_time=self.current_time)
no_copy = self.no_copy_claims
for claim, value in self.payload.items():
if claim in no_copy:
continue
access[claim] = value
return access
class AccessToken(Token):
token_type = 'access'
lifetime = api_settings.ACCESS_TOKEN_LIFETIME
class UntypedToken(Token):
token_type = 'untyped'
lifetime = timedelta(seconds=0)
def verify_token_type(self):
"""
Untyped tokens do not verify the "token_type" claim. This is useful
when performing general validation of a token's signature and other
properties which do not relate to the token's intended use.
"""
pass
| 32.267101 | 83 | 0.615385 |
8d3ce8498a96a2095a2b73d8addd3e02e06059c5 | 84 | py | Python | devel/lib/python2.7/dist-packages/ferns_detector/msg/__init__.py | gentaiscool/ros-vrep-slam | afae1b35eb2538fcc9ff68952b85c5e4791c46d2 | [
"MIT"
] | 39 | 2018-08-28T21:28:07.000Z | 2022-03-12T10:30:40.000Z | devel/lib/python2.7/dist-packages/ferns_detector/msg/__init__.py | zliucr/ros-vrep-slam | afae1b35eb2538fcc9ff68952b85c5e4791c46d2 | [
"MIT"
] | 1 | 2019-03-14T09:23:56.000Z | 2019-04-27T15:54:53.000Z | devel/lib/python2.7/dist-packages/ferns_detector/msg/__init__.py | zliucr/ros-vrep-slam | afae1b35eb2538fcc9ff68952b85c5e4791c46d2 | [
"MIT"
] | 14 | 2018-07-12T06:59:48.000Z | 2021-03-31T08:27:39.000Z | from ._DetectedPoint import *
from ._Detection import *
from ._ModelStatus import *
| 21 | 29 | 0.785714 |
d50355bde051443290f85de0039c0c2d4f67a366 | 7,024 | py | Python | silk/profiling/profiler.py | idonethis/silk | 1975b74019e4ba67238e14d395d91f0eb5e8dce2 | [
"MIT"
] | 1 | 2016-02-16T09:24:26.000Z | 2016-02-16T09:24:26.000Z | silk/profiling/profiler.py | JoshData/silk | f025dff691d8ce11a17e4a9499084e1c05598450 | [
"MIT"
] | null | null | null | silk/profiling/profiler.py | JoshData/silk | f025dff691d8ce11a17e4a9499084e1c05598450 | [
"MIT"
] | null | null | null | import inspect
import logging
import time
import traceback
from django.conf import settings
from django.utils import timezone
import six
from silk.collector import DataCollector
from silk.config import SilkyConfig
from silk.models import _time_taken
Logger = logging.getLogger('silk')
class silk_meta_profiler(object):
"""Used in the profiling of Silk itself."""
def __init__(self):
super(silk_meta_profiler, self).__init__()
self.start_time = None
@property
def _should_meta_profile(self):
return SilkyConfig().SILKY_META
def __enter__(self):
if self._should_meta_profile:
self.start_time = timezone.now()
def __exit__(self, exc_type, exc_val, exc_tb):
if self._should_meta_profile:
end_time = timezone.now()
exception_raised = exc_type is not None
if exception_raised:
Logger.error('Exception when performing meta profiling, dumping trace below')
traceback.print_exception(exc_type, exc_val, exc_tb)
request = DataCollector().request
if request:
curr = request.meta_time or 0
request.meta_time = curr + _time_taken(self.start_time, end_time)
else:
Logger.error('Cant perform meta profile due to no request model in DataCollector. '
'Has Silk middleware been installed properly?')
def __call__(self, target):
if self._should_meta_profile:
def wrapped_target(*args, **kwargs):
request = DataCollector().request
if request:
start_time = timezone.now()
result = target(*args, **kwargs)
end_time = timezone.now()
curr = request.meta_time or 0
request.meta_time = curr + _time_taken(start_time, end_time)
else:
Logger.error('Cant perform meta profile due to no request model in DataCollector. '
'Has Silk middleware been installed properly?')
result = target(*args, **kwargs)
return result
return wrapped_target
return target
# noinspection PyPep8Naming
class silk_profile(object):
def __init__(self, name=None, _dynamic=False):
super(silk_profile, self).__init__()
self.name = name
self.profile = None
self._queries_before = None
self._queries_after = None
self._dynamic = _dynamic
def _query_identifiers_from_collector(self):
return [x for x in DataCollector().queries]
def _start_queries(self):
"""record queries that have been executed before profiling began"""
self._queries_before = self._query_identifiers_from_collector()
def _end_queries(self):
"""record queries that have been executed after profiling has finished"""
self._queries_after = self._query_identifiers_from_collector()
def __enter__(self):
if self._silk_installed() and self._should_profile():
with silk_meta_profiler():
self._start_queries()
if not self.name:
raise ValueError('silk_profile used as a context manager must have a name')
frame = inspect.currentframe()
frames = inspect.getouterframes(frame)
outer_frame = frames[1]
path = outer_frame[1]
line_num = outer_frame[2]
request = DataCollector().request
self.profile = {
'name': self.name,
'file_path': path,
'line_num': line_num,
'dynamic': self._dynamic,
'request': request,
'start_time': timezone.now(),
}
else:
Logger.warn('Cannot execute silk_profile as silk is not installed correctly.')
def _finalise_queries(self):
collector = DataCollector()
self._end_queries()
assert self.profile, 'no profile was created'
diff = set(self._queries_after).difference(set(self._queries_before))
self.profile['queries'] = diff
collector.register_profile(self.profile)
# noinspection PyUnusedLocal
def __exit__(self, exc_type, exc_val, exc_tb):
if self._silk_installed() and self._should_profile():
with silk_meta_profiler():
start_time = None
exception_raised = exc_type is not None
self.profile['exception_raised'] = exception_raised
self.profile['end_time'] = timezone.now()
self._finalise_queries()
def _silk_installed(self):
app_installed = 'silk' in settings.INSTALLED_APPS
middleware_installed = 'silk.middleware.SilkyMiddleware' in settings.MIDDLEWARE_CLASSES
return app_installed and middleware_installed
def _should_profile(self):
return DataCollector().request is not None
def __call__(self, target):
if self._silk_installed():
def wrapped_target(*args, **kwargs):
with silk_meta_profiler():
try:
func_code = six.get_function_code(target)
except AttributeError:
raise NotImplementedError('Profile not implemented to decorate type %s' % target.__class__.__name__)
line_num = func_code.co_firstlineno
file_path = func_code.co_filename
func_name = target.__name__
if not self.name:
self.name = func_name
self.profile = {
'func_name': func_name,
'name': self.name,
'file_path': file_path,
'line_num': line_num,
'dynamic': self._dynamic,
'start_time': timezone.now(),
'request': DataCollector().request
}
self._start_queries()
try:
result = target(*args, **kwargs)
except Exception:
self.profile['exception_raised'] = True
raise
finally:
with silk_meta_profiler():
self.profile['end_time'] = timezone.now()
self._finalise_queries()
return result
return wrapped_target
else:
Logger.warn('Cannot execute silk_profile as silk is not installed correctly.')
return target
def distinct_queries(self):
queries = [x for x in self._queries_after if not x in self._queries_before]
return queries
@silk_profile()
def blah():
time.sleep(1)
if __name__ == '__main__':
blah() | 37.164021 | 124 | 0.575028 |
6fa2eb1d3bca8d31c0a2113581334ec616cd49d9 | 1,918 | py | Python | scripts/yaml2rdf_module.py | ASKtraining/tools | c28b8c59f69dcbc28d5cf8b400a6395930d5bef4 | [
"CC0-1.0"
] | null | null | null | scripts/yaml2rdf_module.py | ASKtraining/tools | c28b8c59f69dcbc28d5cf8b400a6395930d5bef4 | [
"CC0-1.0"
] | null | null | null | scripts/yaml2rdf_module.py | ASKtraining/tools | c28b8c59f69dcbc28d5cf8b400a6395930d5bef4 | [
"CC0-1.0"
] | null | null | null | '''
Converts ASKotec training module meta-data
(from module.yaml) into an RDF/Turtle.
'''
import glob
import os
from rdflib.namespace import DC, DCTERMS, DOAP, FOAF, SKOS, OWL, RDF, RDFS, VOID, XMLNS, XSD
import wget
from yaml2rdf_shared import *
def convert_module_yaml_to_rdf(yaml_cont, g):
'''
Converts ASKotec training module meta-data content
into an RDF/Turtle string.
'''
supported_version = "1.0"
if version_compare(yaml_cont['version'], supported_version) < 0:
raise 'The content version is not supported by this converter. Please get a newer version!'
y = yaml_cont['module']
pre_path = 'module'
m_s = ASKM[str2id(y['name'])]
ensure_resource_turtles(y, g)
for res_ttl in glob.glob('resource_*.ttl'):
g.parse(res_ttl, format='ttl')
for res_s,_,_ in g.triples((None, RDF['type'], ASK['Resource'])):
g.add(( m_s, ASK.resource, res_s ))
g.add(( m_s, RDF.type, ASK.Module ))
g.add(( m_s, RDFS.label, rdf_str(y['name']) ))
if 'manual' in y:
g.add(( m_s, ASK.manual, rdf_path(y['manual']) ))
elif os.path.exists('manual.md'):
g.add(( m_s, ASK.manual, rdf_path('manual.md') ))
else:
conv_fail('Entry not found "%s", and default path "%s" does not exist'
% (pre_path + '.' + 'manual', os.path.curdir + '/manual.md'))
g.add(( m_s, ASK.release, rdf_str(y['release']) ))
g.add(( m_s, SCHEMA.duration, rdf_duration(y['duration']) ))
g.add(( m_s, ASK.maxParticipants, rdf_int(int(y['max-participants'])) ))
g.add(( m_s, ASK.compatibility, rdf_str(y['compatibility']) ))
if 'blog' in y:
g.add(( m_s, ASK.blog, rdf_url(y['blog']) ))
if 'issues' in y:
g.add(( m_s, ASK.issues, rdf_url(y['issues']) ))
if 'new-issue' in y:
g.add(( m_s, ASK.newIssue, rdf_url(y['new-issue']) ))
conv_authors(y, g, m_s)
conv_licenses(y, g, m_s)
| 34.872727 | 99 | 0.620438 |
cad55570a0c95225d853efb8a46d067098eee2ae | 6,970 | py | Python | src/cascade/input_data/db/configuration.py | adolgert/cascade | 2084e07c9ee5e901dd407b817220de882c7246a3 | [
"MIT"
] | null | null | null | src/cascade/input_data/db/configuration.py | adolgert/cascade | 2084e07c9ee5e901dd407b817220de882c7246a3 | [
"MIT"
] | null | null | null | src/cascade/input_data/db/configuration.py | adolgert/cascade | 2084e07c9ee5e901dd407b817220de882c7246a3 | [
"MIT"
] | null | null | null | import json
from cascade.core.db import cursor
from cascade.core.log import getLoggers
from cascade.input_data.configuration import SettingsError
from cascade.input_data.configuration.form import Configuration
CODELOG, MATHLOG = getLoggers(__name__)
def load_settings(ec, meid=None, mvid=None, settings_file=None):
CODELOG.debug(f"meid {meid} mvid {mvid} settings {settings_file}")
if len([c for c in [meid, mvid, settings_file] if c is not None]) != 1:
raise ValueError(
"Must supply exactly one of mvid, meid or settings_file")
if meid:
raw_settings, found_mvid = load_raw_settings_meid(ec, meid)
elif mvid:
raw_settings, found_mvid = load_raw_settings_mvid(ec, mvid)
elif settings_file:
raw_settings, found_mvid = load_raw_settings_file(ec, settings_file)
else:
raise RuntimeError(f"Either meid, mvid, or file must be specified.")
ec.parameters.model_version_id = found_mvid
return json_settings_to_frozen_settings(raw_settings, found_mvid)
def load_raw_settings_meid(ec, modelable_entity_id):
"""
Given a meid, get settings for the latest corresponding mvid.
Args:
modelable_entity_id (int,str): The MEID.
Returns:
dict: Settings as a JSON dictionary of dictionaries.
int: Model version ID that is the latest one associated with this meid.
"""
ec.parameters.modelable_entity_id = modelable_entity_id
mvid = latest_model_version(ec)
MATHLOG.info(
f"No model version specified so using the latest version for "
f"model {modelable_entity_id} which is {mvid}")
raw_settings = settings_json_from_epiviz(ec, mvid)
return raw_settings, mvid
def load_raw_settings_mvid(ec, mvid):
"""Given an mvid, get its settings.
Args:
mvid (int,str): Model version ID.
Returns:
dict: Settings as a JSON dictionary of dictionaries.
int: Model version ID that was passed in.
"""
raw_settings = settings_json_from_epiviz(ec, mvid)
return raw_settings, mvid
def load_raw_settings_file(ec, settings_file):
"""Given a settings file, get the latest mvid for its meid.
Args:
settings_file (str): Model version ID.
Returns:
dict: Settings as a JSON dictionary of dictionaries.
int: Model version ID, either found in file or latest from meid.
"""
with open(str(settings_file), "r") as f:
try:
raw_settings = json.load(f)
except json.decoder.JSONDecodeError as jde:
MATHLOG.error(
f"The format of the JSON in {settings_file} has an error. {jde}")
raise
if "model" in raw_settings and "modelable_entity_id" in raw_settings["model"]:
ec.parameters.modelable_entity_id = raw_settings["model"]["modelable_entity_id"]
else:
raise SettingsError(
f"The settings file should have a modelable_entity_id in it. "
f"It would be under model and then modelable_entity_id.")
if "model_version_id" in raw_settings["model"]:
mvid = raw_settings["model"]["model_version_id"]
MATHLOG.info(f"Using mvid {mvid} from the settings file.")
else:
mvid = latest_model_version(ec)
MATHLOG.info(f"Using mvid {mvid} from latest model version.")
return raw_settings, mvid
def json_settings_to_frozen_settings(raw_settings, mvid=None):
"""Converts a settings file in the form of a dict (from JSON usually)
into a Configuration object. If that conversion fails, report errors
with an exception.
Args:
raw_settings (dict): Dict of dicts, representing the JSON settings.
mvid (int,optional): Model version ID to put into the settings.
Returns:
Configuration: Represents validated settings.
"""
if "model_version_id" not in raw_settings["model"] or not raw_settings["model"]["model_version_id"]:
raw_settings["model"]["model_version_id"] = mvid
settings = Configuration(raw_settings)
errors = settings.validate_and_normalize()
if errors:
print(f"Configuration does not validate {errors}")
raise SettingsError("Configuration does not validate", errors,
raw_settings)
return settings
def settings_json_from_epiviz(execution_context, model_version_id):
query = """select parameter_json from at_model_parameter where model_version_id = %(model_version_id)s"""
with cursor(execution_context) as c:
c.execute(query, args={"model_version_id": model_version_id})
raw_data = c.fetchall()
if len(raw_data) == 0:
raise ValueError(f"No parameters for model version {model_version_id}")
if len(raw_data) > 1:
raise ValueError(f"Multiple parameter entries for model version {model_version_id}")
config_data = json.loads(raw_data[0][0])
# Fix bugs in epiviz
# TODO: remove this once EPI-999 is resolved
config_data = trim_config(config_data)
if config_data is DO_REMOVE:
config_data = {}
return config_data
DO_REMOVE = object()
def trim_config(source):
""" This function represents the approach to missing data which the viz
team says it will enforce in the front end, though that hasn't happened
yet.
"""
trimmed = None
remove = True
if isinstance(source, dict):
trimmed, remove = _trim_dict(source)
elif isinstance(source, list):
trimmed, remove = _trim_list(source)
else:
if source is not None and source != "":
trimmed = source
remove = False
if remove:
return DO_REMOVE
else:
return trimmed
def _trim_dict(source_dict):
trimmed = {}
remove = True
for k, v in source_dict.items():
# Removing keys prefixed by "__" because they represent garbage that
# the GUI framework sticks in sometimes but isn't useful to us.
if not k.startswith("__"):
tv = trim_config(v)
if tv is not DO_REMOVE:
trimmed[k] = tv
remove = False
return trimmed, remove
def _trim_list(source_list):
trimmed = []
remove = True
for v in source_list:
tv = trim_config(v)
if tv is not DO_REMOVE:
trimmed.append(tv)
remove = False
return trimmed, remove
def latest_model_version(execution_context):
model_id = execution_context.parameters.modelable_entity_id
query = """
select model_version_id from epi.model_version
where modelable_entity_id = %(modelable_entity_id)s
order by last_updated desc
limit 1
"""
with cursor(execution_context) as c:
c.execute(query, args={"modelable_entity_id": model_id})
result = c.fetchone()
if result is not None:
return result[0]
else:
raise RuntimeError(
f"No model version for modelable entity id {model_id} in database.")
| 33.671498 | 109 | 0.672597 |
631bfbca73b3356ec820b27ff03b383ece2afac9 | 9,042 | py | Python | ansible/venv/lib/python2.7/site-packages/ansible/modules/network/fortios/fortios_waf_signature.py | gvashchenkolineate/gvashchenkolineate_infra_trytravis | 0fb18850afe0d8609693ba4b23f29c7cda17d97f | [
"MIT"
] | 17 | 2017-06-07T23:15:01.000Z | 2021-08-30T14:32:36.000Z | ansible/venv/lib/python2.7/site-packages/ansible/modules/network/fortios/fortios_waf_signature.py | gvashchenkolineate/gvashchenkolineate_infra_trytravis | 0fb18850afe0d8609693ba4b23f29c7cda17d97f | [
"MIT"
] | 32 | 2018-10-09T04:13:42.000Z | 2020-05-11T07:20:28.000Z | ansible/venv/lib/python2.7/site-packages/ansible/modules/network/fortios/fortios_waf_signature.py | gvashchenkolineate/gvashchenkolineate_infra_trytravis | 0fb18850afe0d8609693ba4b23f29c7cda17d97f | [
"MIT"
] | 11 | 2018-10-09T00:14:53.000Z | 2021-11-03T10:54:09.000Z | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_waf_signature
short_description: Hidden table for datasource in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify waf feature and signature category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
waf_signature:
description:
- Hidden table for datasource.
default: null
type: dict
suboptions:
desc:
description:
- Signature description.
type: str
id:
description:
- Signature ID.
required: true
type: int
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Hidden table for datasource.
fortios_waf_signature:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
waf_signature:
desc: "<your_own_value>"
id: "4"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_waf_signature_data(json):
option_list = ['desc', 'id']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def waf_signature(data, fos):
vdom = data['vdom']
state = data['state']
waf_signature_data = data['waf_signature']
filtered_data = underscore_to_hyphen(filter_waf_signature_data(waf_signature_data))
if state == "present":
return fos.set('waf',
'signature',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('waf',
'signature',
mkey=filtered_data['id'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_waf(data, fos):
if data['waf_signature']:
resp = waf_signature(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"waf_signature": {
"required": False, "type": "dict", "default": None,
"options": {
"desc": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_waf(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_waf(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| 28.613924 | 97 | 0.616788 |
9b3e80a0b4d944cf75e64d17d138408a682e96f0 | 8,630 | py | Python | datadotworld/client/_swagger/models/file_create_or_update_request.py | DanialBetres/data.world-py | 0e3acf2be9a07c5ab62ecac9289eb662088d54c7 | [
"Apache-2.0"
] | 99 | 2017-01-23T16:24:18.000Z | 2022-03-30T22:51:58.000Z | datadotworld/client/_swagger/models/file_create_or_update_request.py | DanialBetres/data.world-py | 0e3acf2be9a07c5ab62ecac9289eb662088d54c7 | [
"Apache-2.0"
] | 77 | 2017-01-26T04:33:06.000Z | 2022-03-11T09:39:50.000Z | datadotworld/client/_swagger/models/file_create_or_update_request.py | DanialBetres/data.world-py | 0e3acf2be9a07c5ab62ecac9289eb662088d54c7 | [
"Apache-2.0"
] | 29 | 2017-01-25T16:55:23.000Z | 2022-01-31T01:44:15.000Z | # coding: utf-8
"""
data.world API
# data.world in a nutshell data.world is a productive, secure platform for modern data teamwork. We bring together your data practitioners, subject matter experts, and other stakeholders by removing costly barriers to data discovery, comprehension, integration, and sharing. Everything your team needs to quickly understand and use data stays with it. Social features and integrations encourage collaborators to ask and answer questions, share discoveries, and coordinate closely while still using their preferred tools. Our focus on interoperability helps you enhance your own data with data from any source, including our vast and growing library of free public datasets. Sophisticated permissions, auditing features, and more make it easy to manage who views your data and what they do with it. # Conventions ## Authentication All data.world API calls require an API token. OAuth2 is the preferred and most secure method for authenticating users of your data.world applications. Visit our [oauth documentation](https://apidocs.data.world/toolkit/oauth) for additional information. Alternatively, you can obtain a token for _personal use or testing_ by navigating to your profile settings, under the Advanced tab ([https://data.world/settings/advanced](https://data.world/settings/advanced)). Authentication must be provided in API requests via the `Authorization` header. For example, for a user whose API token is `my_api_token`, the request header should be `Authorization: Bearer my_api_token` (note the `Bearer` prefix). ## Content type By default, `application/json` is the content type used in request and response bodies. Exceptions are noted in respective endpoint documentation. ## HTTPS only Our APIs can only be accessed via HTTPS. # Interested in building data.world apps? Check out our [developer portal](https://apidocs.data.world) for tips on how to get started, tutorials, and to interact with the API endpoints right within your browser.
OpenAPI spec version: 0.21.0
Contact: help@data.world
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class FileCreateOrUpdateRequest(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'description': 'str',
'labels': 'list[str]',
'name': 'str',
'source': 'FileSourceCreateOrUpdateRequest'
}
attribute_map = {
'description': 'description',
'labels': 'labels',
'name': 'name',
'source': 'source'
}
def __init__(self, description=None, labels=None, name=None, source=None):
"""
FileCreateOrUpdateRequest - a model defined in Swagger
"""
self._description = None
self._labels = None
self._name = None
self._source = None
if description is not None:
self.description = description
if labels is not None:
self.labels = labels
self.name = name
if source is not None:
self.source = source
@property
def description(self):
"""
Gets the description of this FileCreateOrUpdateRequest.
File description.
:return: The description of this FileCreateOrUpdateRequest.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""
Sets the description of this FileCreateOrUpdateRequest.
File description.
:param description: The description of this FileCreateOrUpdateRequest.
:type: str
"""
if description is not None and len(description) > 240:
raise ValueError("Invalid value for `description`, length must be less than or equal to `240`")
if description is not None and len(description) < 1:
raise ValueError("Invalid value for `description`, length must be greater than or equal to `1`")
self._description = description
@property
def labels(self):
"""
Gets the labels of this FileCreateOrUpdateRequest.
File labels.
:return: The labels of this FileCreateOrUpdateRequest.
:rtype: list[str]
"""
return self._labels
@labels.setter
def labels(self, labels):
"""
Sets the labels of this FileCreateOrUpdateRequest.
File labels.
:param labels: The labels of this FileCreateOrUpdateRequest.
:type: list[str]
"""
allowed_values = ["raw data", "documentation", "visualization", "clean data", "script", "report"]
if not set(labels).issubset(set(allowed_values)):
raise ValueError(
"Invalid values for `labels` [{0}], must be a subset of [{1}]"
.format(", ".join(map(str, set(labels)-set(allowed_values))),
", ".join(map(str, allowed_values)))
)
self._labels = labels
@property
def name(self):
"""
Gets the name of this FileCreateOrUpdateRequest.
File name. Should include type extension always when possible. Must not include slashes.
:return: The name of this FileCreateOrUpdateRequest.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this FileCreateOrUpdateRequest.
File name. Should include type extension always when possible. Must not include slashes.
:param name: The name of this FileCreateOrUpdateRequest.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
if name is not None and len(name) > 128:
raise ValueError("Invalid value for `name`, length must be less than or equal to `128`")
if name is not None and len(name) < 1:
raise ValueError("Invalid value for `name`, length must be greater than or equal to `1`")
if name is not None and not re.search('^[^\/]+$', name):
raise ValueError("Invalid value for `name`, must be a follow pattern or equal to `/^[^\/]+$/`")
self._name = name
@property
def source(self):
"""
Gets the source of this FileCreateOrUpdateRequest.
:return: The source of this FileCreateOrUpdateRequest.
:rtype: FileSourceCreateOrUpdateRequest
"""
return self._source
@source.setter
def source(self, source):
"""
Sets the source of this FileCreateOrUpdateRequest.
:param source: The source of this FileCreateOrUpdateRequest.
:type: FileSourceCreateOrUpdateRequest
"""
self._source = source
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, FileCreateOrUpdateRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 38.185841 | 1,984 | 0.626304 |
536f838f99622a3a6e8b02c5725003c7cd089bc0 | 96 | py | Python | venv/lib/python3.8/site-packages/numpy/polynomial/tests/test_legendre.py | Retraces/UkraineBot | 3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71 | [
"MIT"
] | 2 | 2022-03-13T01:58:52.000Z | 2022-03-31T06:07:54.000Z | venv/lib/python3.8/site-packages/numpy/polynomial/tests/test_legendre.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | 19 | 2021-11-20T04:09:18.000Z | 2022-03-23T15:05:55.000Z | venv/lib/python3.8/site-packages/numpy/polynomial/tests/test_legendre.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | null | null | null | /home/runner/.cache/pip/pool/6f/f6/db/947b3417f056c3d112b92ab9d99b0b29c40a151e5ea8f7ff4a9a5616aa | 96 | 96 | 0.895833 |
2bae9047f4eb4cca85b7fde3374520976737ba7e | 10,790 | py | Python | venv/lib/python3.9/site-packages/urllib3/_collections.py | qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3 | 630dcef73e6a258b6e9a52f934e2dd912ce741f8 | [
"Apache-2.0"
] | 20 | 2021-03-11T11:32:02.000Z | 2021-12-20T15:15:20.000Z | virtual/lib/python3.8/site-packages/urllib3/_collections.py | AokoMercyline/E-commerce | 833ad9fd5777e35146dfccf809e78e3c94d997a8 | [
"MIT"
] | 10 | 2021-02-03T00:49:56.000Z | 2021-04-16T20:57:58.000Z | virtual/lib/python3.8/site-packages/urllib3/_collections.py | AokoMercyline/E-commerce | 833ad9fd5777e35146dfccf809e78e3c94d997a8 | [
"MIT"
] | 6 | 2021-04-15T05:55:01.000Z | 2021-11-15T16:16:51.000Z | from __future__ import absolute_import
try:
from collections.abc import Mapping, MutableMapping
except ImportError:
from collections import Mapping, MutableMapping
try:
from threading import RLock
except ImportError: # Platform-specific: No threads available
class RLock:
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
from collections import OrderedDict
from .exceptions import InvalidHeader
from .packages.six import iterkeys, itervalues, PY3
__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
_Null = object()
class RecentlyUsedContainer(MutableMapping):
"""
Provides a thread-safe dict-like container which maintains up to
``maxsize`` keys while throwing away the least-recently-used keys beyond
``maxsize``.
:param maxsize:
Maximum number of recent elements to retain.
:param dispose_func:
Every time an item is evicted from the container,
``dispose_func(value)`` is called. Callback which will get called
"""
ContainerCls = OrderedDict
def __init__(self, maxsize=10, dispose_func=None):
self._maxsize = maxsize
self.dispose_func = dispose_func
self._container = self.ContainerCls()
self.lock = RLock()
def __getitem__(self, key):
# Re-insert the item, moving it to the end of the eviction line.
with self.lock:
item = self._container.pop(key)
self._container[key] = item
return item
def __setitem__(self, key, value):
evicted_value = _Null
with self.lock:
# Possibly evict the existing value of 'key'
evicted_value = self._container.get(key, _Null)
self._container[key] = value
# If we didn't evict an existing value, we might have to evict the
# least recently used item from the beginning of the container.
if len(self._container) > self._maxsize:
_key, evicted_value = self._container.popitem(last=False)
if self.dispose_func and evicted_value is not _Null:
self.dispose_func(evicted_value)
def __delitem__(self, key):
with self.lock:
value = self._container.pop(key)
if self.dispose_func:
self.dispose_func(value)
def __len__(self):
with self.lock:
return len(self._container)
def __iter__(self):
raise NotImplementedError(
"Iteration over this class is unlikely to be threadsafe."
)
def clear(self):
with self.lock:
# Copy pointers to all values, then wipe the mapping
values = list(itervalues(self._container))
self._container.clear()
if self.dispose_func:
for value in values:
self.dispose_func(value)
def keys(self):
with self.lock:
return list(iterkeys(self._container))
class HTTPHeaderDict(MutableMapping):
"""
:param headers:
An iterable of field-value pairs. Must not contain multiple field names
when compared case-insensitively.
:param kwargs:
Additional field-value pairs to pass in to ``dict.update``.
A ``dict`` like container for storing HTTP Headers.
Field names are stored and compared case-insensitively in compliance with
RFC 7230. Iteration provides the first case-sensitive key seen for each
case-insensitive pair.
Using ``__setitem__`` syntax overwrites fields that compare equal
case-insensitively in order to maintain ``dict``'s api. For fields that
compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
in a loop.
If multiple fields that are equal case-insensitively are passed to the
constructor or ``.update``, the behavior is undefined and some will be
lost.
>>> headers = HTTPHeaderDict()
>>> headers.add('Set-Cookie', 'foo=bar')
>>> headers.add('set-cookie', 'baz=quxx')
>>> headers['content-length'] = '7'
>>> headers['SET-cookie']
'foo=bar, baz=quxx'
>>> headers['Content-Length']
'7'
"""
def __init__(self, headers=None, **kwargs):
super(HTTPHeaderDict, self).__init__()
self._container = OrderedDict()
if headers is not None:
if isinstance(headers, HTTPHeaderDict):
self._copy_from(headers)
else:
self.extend(headers)
if kwargs:
self.extend(kwargs)
def __setitem__(self, key, val):
self._container[key.lower()] = [key, val]
return self._container[key.lower()]
def __getitem__(self, key):
val = self._container[key.lower()]
return ", ".join(val[1:])
def __delitem__(self, key):
del self._container[key.lower()]
def __contains__(self, key):
return key.lower() in self._container
def __eq__(self, other):
if not isinstance(other, Mapping) and not hasattr(other, "keys"):
return False
if not isinstance(other, type(self)):
other = type(self)(other)
return dict((k.lower(), v) for k, v in self.itermerged()) == dict(
(k.lower(), v) for k, v in other.itermerged()
)
def __ne__(self, other):
return not self.__eq__(other)
if not PY3: # Python 2
iterkeys = MutableMapping.iterkeys
itervalues = MutableMapping.itervalues
__marker = object()
def __len__(self):
return len(self._container)
def __iter__(self):
# Only provide the originally cased names
for vals in self._container.values():
yield vals[0]
def pop(self, key, default=__marker):
"""D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
"""
# Using the MutableMapping function directly fails due to the private marker.
# Using ordinary dict.pop would expose the internal structures.
# So let's reinvent the wheel.
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def discard(self, key):
try:
del self[key]
except KeyError:
pass
def add(self, key, val):
"""Adds a (name, value) pair, doesn't overwrite the value if it already
exists.
>>> headers = HTTPHeaderDict(foo='bar')
>>> headers.add('Foo', 'baz')
>>> headers['foo']
'bar, baz'
"""
key_lower = key.lower()
new_vals = [key, val]
# Keep the common case aka no item present as fast as possible
vals = self._container.setdefault(key_lower, new_vals)
if new_vals is not vals:
vals.append(val)
def extend(self, *args, **kwargs):
"""Generic import function for any type of header-like object.
Adapted version of MutableMapping.update in order to insert items
with self.add instead of self.__setitem__
"""
if len(args) > 1:
raise TypeError(
"extend() takes at most 1 positional "
"arguments ({0} given)".format(len(args))
)
other = args[0] if len(args) >= 1 else ()
if isinstance(other, HTTPHeaderDict):
for key, val in other.iteritems():
self.add(key, val)
elif isinstance(other, Mapping):
for key in other:
self.add(key, other[key])
elif hasattr(other, "keys"):
for key in other.keys():
self.add(key, other[key])
else:
for key, value in other:
self.add(key, value)
for key, value in kwargs.items():
self.add(key, value)
def getlist(self, key, default=__marker):
"""Returns a list of all the values for the named field. Returns an
empty list if the key doesn't exist."""
try:
vals = self._container[key.lower()]
except KeyError:
if default is self.__marker:
return []
return default
else:
return vals[1:]
# Backwards compatibility for httplib
getheaders = getlist
getallmatchingheaders = getlist
iget = getlist
# Backwards compatibility for http.cookiejar
get_all = getlist
def __repr__(self):
return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
def _copy_from(self, other):
for key in other:
val = other.getlist(key)
if isinstance(val, list):
# Don't need to convert tuples
val = list(val)
self._container[key.lower()] = [key] + val
def copy(self):
clone = type(self)()
clone._copy_from(self)
return clone
def iteritems(self):
"""Iterate over all header lines, including duplicate ones."""
for key in self:
vals = self._container[key.lower()]
for val in vals[1:]:
yield vals[0], val
def itermerged(self):
"""Iterate over all headers, merging duplicate ones together."""
for key in self:
val = self._container[key.lower()]
yield val[0], ", ".join(val[1:])
def items(self):
return list(self.iteritems())
@classmethod
def from_httplib(cls, message): # Python 2
"""Read headers from a Python 2 httplib message object."""
# python2.7 does not expose a proper API for exporting multiheaders
# efficiently. This function re-reads raw lines from the message
# object and extracts the multiheaders properly.
obs_fold_continued_leaders = (" ", "\t")
headers = []
for line in message.headers:
if line.startswith(obs_fold_continued_leaders):
if not headers:
# We received a header line that starts with OWS as described
# in RFC-7230 S3.2.4. This indicates a multiline header, but
# there exists no previous header to which we can attach it.
raise InvalidHeader(
"Header continuation with no previous header: %s" % line
)
else:
key, value = headers[-1]
headers[-1] = (key, value + " " + line.strip())
continue
key, value = line.split(":", 1)
headers.append((key, value.strip()))
return cls(headers)
| 32.017804 | 86 | 0.59203 |
0433a7c6714cb0ac6c7bae4742c23acf3e4eb4cd | 3,807 | py | Python | tests/conftest.py | s-m-i-t-a/flask-cdn-ng | d27626a3f2a39828bed63859d206e1bf9d8e3aec | [
"MIT"
] | 1 | 2017-09-02T07:39:17.000Z | 2017-09-02T07:39:17.000Z | tests/conftest.py | s-m-i-t-a/flask-cdn-ng | d27626a3f2a39828bed63859d206e1bf9d8e3aec | [
"MIT"
] | null | null | null | tests/conftest.py | s-m-i-t-a/flask-cdn-ng | d27626a3f2a39828bed63859d206e1bf9d8e3aec | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import socket
import sys
import time
import six
import pytest
from itertools import chain
from multiprocessing import Process
from flask import Flask
from flask.ext.cdn import CDN
from six.moves.urllib.request import urlopen
from six.moves.urllib.parse import urljoin
def pytest_generate_tests(metafunc):
if hasattr(metafunc.function, 'cdn_manifest'):
metafunc.parametrize('app', ['cdn_manifest'], indirect=True)
def config():
return {
'TESTING': True,
'SECRET_KEY': 'secret',
'CDN_DOMAIN': 'mycdnname.cloudfront.net',
}
def cdnmanifest(config):
cdn_manifest = {
'CDN_MANIFEST': True,
'CDN_TIMESTAMP': False,
'CDN_MANIFEST_URL': 'http://localhost:5555/MANIFEST',
}
return {key: value for (key, value) in chain(config.items(), cdn_manifest.items())}
def create_app(config):
app = Flask(__name__)
# config
for key, value in config.items():
app.config[key] = value
CDN(app)
@app.route('/')
def index():
return 'INDEX'
return app
@pytest.fixture
def app(request):
if getattr(request, 'param', '') == 'cdn_manifest':
app = create_app(cdnmanifest(config()))
else:
app = create_app(config())
# Establish an application context before running the tests.
ctx = app.app_context()
ctx.push()
def teardown():
ctx.pop()
request.addfinalizer(teardown)
return app
@pytest.fixture
def client(app):
return app.test_client()
@pytest.fixture
def url_for(app):
return app.jinja_env.globals['url_for']
@six.python_2_unicode_compatible
class Server(object):
def __init__(self, application):
self.application = application
self.schema = 'http'
self.host = 'localhost'
self.port = self._get_free_port()
self.start()
def _run(self, host, port):
# close all outputs
sys.stdout.close()
sys.stdout = open(os.devnull)
sys.stderr.close()
sys.stderr = sys.stdout
self.application.run(host=host, port=port)
def _get_free_port(self, base_port=5555):
for i in range(50000):
port = base_port + i
try:
test_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
test_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
test_socket.bind((self.host, port))
test_socket.close()
break
except IOError:
pass
return port
def is_alive(self, max_retries=5):
'''
Return True if server in child process respond.
max_retries -- number of tries
'''
for i in range(max_retries):
try:
urlopen(self.url)
return True
except IOError:
time.sleep(2 ** i)
return False
def start(self):
self.p = Process(target=self._run,
kwargs={'host': self.host, 'port': self.port})
self.p.start()
if not self.is_alive():
# TODO: raise exception or log some message
self.stop()
def stop(self):
self.p.terminate()
self.p.join()
def restart(self):
self.stop()
self.start()
@property
def url(self):
return '%s://%s:%d' % (self.schema, self.host, self.port)
def __add__(self, other):
return urljoin(str(self), other)
def __str__(self):
return self.url
def __repr__(self):
return '<LiveServer listening at %s>' % self.url
@pytest.fixture
def server(request, app):
server = Server(application=app)
request.addfinalizer(server.stop)
return server
| 21.630682 | 87 | 0.595482 |
a2ba8456039d4584e5998d619f36747d58018418 | 5,220 | py | Python | train_tf2/exporter_main_v2.py | alfanyRizaMahendra/tf2-object-detection-api | 79a56f591744ae795b37b8c9c3ade212c37dba5f | [
"MIT"
] | 549 | 2020-01-02T05:14:57.000Z | 2022-03-29T18:34:12.000Z | train_tf2/exporter_main_v2.py | alfanyRizaMahendra/tf2-object-detection-api | 79a56f591744ae795b37b8c9c3ade212c37dba5f | [
"MIT"
] | 98 | 2020-01-21T09:41:30.000Z | 2022-03-12T00:53:06.000Z | train_tf2/exporter_main_v2.py | alfanyRizaMahendra/tf2-object-detection-api | 79a56f591744ae795b37b8c9c3ade212c37dba5f | [
"MIT"
] | 233 | 2020-01-18T03:46:27.000Z | 2022-03-19T03:17:47.000Z | # Lint as: python2, python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Tool to export an object detection model for inference.
Prepares an object detection tensorflow graph for inference using model
configuration and a trained checkpoint. Outputs associated checkpoint files,
a SavedModel, and a copy of the model config.
The inference graph contains one of three input nodes depending on the user
specified option.
* `image_tensor`: Accepts a uint8 4-D tensor of shape [1, None, None, 3]
* `float_image_tensor`: Accepts a float32 4-D tensor of shape
[1, None, None, 3]
* `encoded_image_string_tensor`: Accepts a 1-D string tensor of shape [None]
containing encoded PNG or JPEG images. Image resolutions are expected to be
the same if more than 1 image is provided.
* `tf_example`: Accepts a 1-D string tensor of shape [None] containing
serialized TFExample protos. Image resolutions are expected to be the same
if more than 1 image is provided.
and the following output nodes returned by the model.postprocess(..):
* `num_detections`: Outputs float32 tensors of the form [batch]
that specifies the number of valid boxes per image in the batch.
* `detection_boxes`: Outputs float32 tensors of the form
[batch, num_boxes, 4] containing detected boxes.
* `detection_scores`: Outputs float32 tensors of the form
[batch, num_boxes] containing class scores for the detections.
* `detection_classes`: Outputs float32 tensors of the form
[batch, num_boxes] containing classes for the detections.
Example Usage:
--------------
python exporter_main_v2.py \
--input_type image_tensor \
--pipeline_config_path path/to/ssd_inception_v2.config \
--trained_checkpoint_dir path/to/checkpoint \
--output_directory path/to/exported_model_directory
The expected output would be in the directory
path/to/exported_model_directory (which is created if it does not exist)
holding two subdirectories (corresponding to checkpoint and SavedModel,
respectively) and a copy of the pipeline config.
Config overrides (see the `config_override` flag) are text protobufs
(also of type pipeline_pb2.TrainEvalPipelineConfig) which are used to override
certain fields in the provided pipeline_config_path. These are useful for
making small changes to the inference graph that differ from the training or
eval config.
Example Usage (in which we change the second stage post-processing score
threshold to be 0.5):
python exporter_main_v2.py \
--input_type image_tensor \
--pipeline_config_path path/to/ssd_inception_v2.config \
--trained_checkpoint_dir path/to/checkpoint \
--output_directory path/to/exported_model_directory \
--config_override " \
model{ \
faster_rcnn { \
second_stage_post_processing { \
batch_non_max_suppression { \
score_threshold: 0.5 \
} \
} \
} \
}"
"""
from absl import app
from absl import flags
import tensorflow.compat.v2 as tf
from google.protobuf import text_format
from object_detection import exporter_lib_v2
from object_detection.protos import pipeline_pb2
tf.enable_v2_behavior()
FLAGS = flags.FLAGS
flags.DEFINE_string('input_type', 'image_tensor', 'Type of input node. Can be '
'one of [`image_tensor`, `encoded_image_string_tensor`, '
'`tf_example`, `float_image_tensor`]')
flags.DEFINE_string('pipeline_config_path', None,
'Path to a pipeline_pb2.TrainEvalPipelineConfig config '
'file.')
flags.DEFINE_string('trained_checkpoint_dir', None,
'Path to trained checkpoint directory')
flags.DEFINE_string('output_directory', None, 'Path to write outputs.')
flags.DEFINE_string('config_override', '',
'pipeline_pb2.TrainEvalPipelineConfig '
'text proto to override pipeline_config_path.')
flags.mark_flag_as_required('pipeline_config_path')
flags.mark_flag_as_required('trained_checkpoint_dir')
flags.mark_flag_as_required('output_directory')
def main(_):
pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
with tf.io.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
text_format.Merge(f.read(), pipeline_config)
text_format.Merge(FLAGS.config_override, pipeline_config)
exporter_lib_v2.export_inference_graph(
FLAGS.input_type, pipeline_config, FLAGS.trained_checkpoint_dir,
FLAGS.output_directory)
if __name__ == '__main__':
app.run(main)
| 41.102362 | 80 | 0.721839 |
9fafc976cef1625e91791bd460c516874f2ca9d2 | 1,241 | py | Python | group/migrations/0001_initial.py | amanpandey-crypto/synergee | bf24cd08ec417eda84ffc5ad373a220e763a79eb | [
"MIT"
] | null | null | null | group/migrations/0001_initial.py | amanpandey-crypto/synergee | bf24cd08ec417eda84ffc5ad373a220e763a79eb | [
"MIT"
] | 3 | 2021-06-10T20:30:44.000Z | 2021-10-02T08:23:25.000Z | group/migrations/0001_initial.py | amanpandey-crypto/synergee | bf24cd08ec417eda84ffc5ad373a220e763a79eb | [
"MIT"
] | 6 | 2021-01-24T08:21:59.000Z | 2021-10-03T11:33:02.000Z | # Generated by Django 3.1.5 on 2021-01-09 21:01
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Interest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(max_length=100)),
('familiarity', models.PositiveIntegerField(default=10)),
],
),
migrations.CreateModel(
name='Member',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=56)),
('last_name', models.CharField(max_length=56)),
('image', models.URLField()),
('email', models.EmailField(max_length=56)),
('github_url', models.URLField(blank=True, null=True)),
('linkedin_url', models.URLField(blank=True, null=True)),
('interests', models.ManyToManyField(to='group.Interest')),
],
),
]
| 34.472222 | 114 | 0.559226 |
763b3d1330ab9c536e2b511e992fde7857542177 | 36,267 | py | Python | fire/core.py | DaniloZZZ/python-fire | fb7ee3a716020f6a04c0e55967612f9322d16893 | [
"Apache-2.0"
] | 10 | 2021-05-31T07:18:08.000Z | 2022-03-19T09:20:11.000Z | fire/core.py | DaniloZZZ/python-fire | fb7ee3a716020f6a04c0e55967612f9322d16893 | [
"Apache-2.0"
] | 1 | 2021-08-03T12:23:01.000Z | 2021-08-10T08:35:22.000Z | fire/core.py | DaniloZZZ/python-fire | fb7ee3a716020f6a04c0e55967612f9322d16893 | [
"Apache-2.0"
] | 2 | 2021-12-09T07:23:21.000Z | 2022-03-31T06:13:10.000Z | # Copyright (C) 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python Fire is a library for creating CLIs from absolutely any Python object.
You can call Fire on any Python object:
functions, classes, modules, objects, dictionaries, lists, tuples, etc.
They all work!
Python Fire turns any Python object into a command line interface.
Simply call the Fire function as your main method to create a CLI.
When using Fire to build a CLI, your main method includes a call to Fire. Eg:
def main(argv):
fire.Fire(Component)
A Fire CLI command is run by consuming the arguments in the command in order to
access a member of current component, call the current component (if it's a
function), or instantiate the current component (if it's a class). The target
component begins as Component, and at each operation the component becomes the
result of the preceding operation.
For example "command fn arg1 arg2" might access the "fn" property of the initial
target component, and then call that function with arguments 'arg1' and 'arg2'.
Additional examples are available in the examples directory.
Fire Flags, common to all Fire CLIs, must go after a separating "--". For
example, to get help for a command you might run: `command -- --help`.
The available flags for all Fire CLIs are:
-v --verbose: Include private members in help and usage information.
-h --help: Provide help and usage information for the command.
-i --interactive: Drop into a Python REPL after running the command.
--completion: Write the Bash completion script for the tool to stdout.
--completion fish: Write the Fish completion script for the tool to stdout.
--separator SEPARATOR: Use SEPARATOR in place of the default separator, '-'.
--trace: Get the Fire Trace for the command.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import inspect
import json
import os
import pipes
import re
import shlex
import sys
import types
from fire import completion
from fire import decorators
from fire import formatting
from fire import helptext
from fire import inspectutils
from fire import interact
from fire import parser
from fire import trace
from fire import value_types
from fire.console import console_io
import six
def Fire(component=None, command=None, name=None):
"""This function, Fire, is the main entrypoint for Python Fire.
Executes a command either from the `command` argument or from sys.argv by
recursively traversing the target object `component`'s members consuming
arguments, evaluating functions, and instantiating classes as it goes.
When building a CLI with Fire, your main method should call this function.
Args:
component: The initial target component.
command: Optional. If supplied, this is the command executed. If not
supplied, then the command is taken from sys.argv instead. This can be
a string or a list of strings; a list of strings is preferred.
name: Optional. The name of the command as entered at the command line.
Used in interactive mode and for generating the completion script.
Returns:
The result of executing the Fire command. Execution begins with the initial
target component. The component is updated by using the command arguments
to either access a member of the current component, call the current
component (if it's a function), or instantiate the current component (if
it's a class). When all arguments are consumed and there's no function left
to call or class left to instantiate, the resulting current component is
the final result.
Raises:
ValueError: If the command argument is supplied, but not a string or a
sequence of arguments.
FireExit: When Fire encounters a FireError, Fire will raise a FireExit with
code 2. When used with the help or trace flags, Fire will raise a
FireExit with code 0 if successful.
"""
name = name or os.path.basename(sys.argv[0])
# Get args as a list.
if isinstance(command, six.string_types):
args = shlex.split(command)
elif isinstance(command, (list, tuple)):
args = command
elif command is None:
# Use the command line args by default if no command is specified.
args = sys.argv[1:]
else:
raise ValueError('The command argument must be a string or a sequence of '
'arguments.')
args, flag_args = parser.SeparateFlagArgs(args)
argparser = parser.CreateParser()
parsed_flag_args, unused_args = argparser.parse_known_args(flag_args)
context = {}
if parsed_flag_args.interactive or component is None:
# Determine the calling context.
caller = inspect.stack()[1]
caller_frame = caller[0]
caller_globals = caller_frame.f_globals
caller_locals = caller_frame.f_locals
context.update(caller_globals)
context.update(caller_locals)
component_trace = _Fire(component, args, parsed_flag_args, context, name)
if component_trace.HasError():
_DisplayError(component_trace)
raise FireExit(2, component_trace)
if component_trace.show_trace and component_trace.show_help:
output = ['Fire trace:\n{trace}\n'.format(trace=component_trace)]
result = component_trace.GetResult()
help_text = helptext.HelpText(
result, trace=component_trace, verbose=component_trace.verbose)
output.append(help_text)
Display(output, out=sys.stderr)
raise FireExit(0, component_trace)
if component_trace.show_trace:
output = ['Fire trace:\n{trace}'.format(trace=component_trace)]
Display(output, out=sys.stderr)
raise FireExit(0, component_trace)
if component_trace.show_help:
result = component_trace.GetResult()
help_text = helptext.HelpText(
result, trace=component_trace, verbose=component_trace.verbose)
output = [help_text]
Display(output, out=sys.stderr)
raise FireExit(0, component_trace)
# The command succeeded normally; print the result.
_PrintResult(component_trace, verbose=component_trace.verbose)
result = component_trace.GetResult()
return result
def Display(lines, out):
text = '\n'.join(lines) + '\n'
console_io.More(text, out=out)
def CompletionScript(name, component, shell):
"""Returns the text of the completion script for a Fire CLI."""
return completion.Script(name, component, shell=shell)
class FireError(Exception):
"""Exception used by Fire when a Fire command cannot be executed.
These exceptions are not raised by the Fire function, but rather are caught
and added to the FireTrace.
"""
class FireExit(SystemExit): # pylint: disable=g-bad-exception-name
"""An exception raised by Fire to the client in the case of a FireError.
The trace of the Fire program is available on the `trace` property.
This exception inherits from SystemExit, so clients may explicitly catch it
with `except SystemExit` or `except FireExit`. If not caught, this exception
will cause the client program to exit without a stacktrace.
"""
def __init__(self, code, component_trace):
"""Constructs a FireExit exception.
Args:
code: (int) Exit code for the Fire CLI.
component_trace: (FireTrace) The trace for the Fire command.
"""
super(FireExit, self).__init__(code)
self.trace = component_trace
def _IsHelpShortcut(component_trace, remaining_args):
"""Determines if the user is trying to access help without '--' separator.
For example, mycmd.py --help instead of mycmd.py -- --help.
Args:
component_trace: (FireTrace) The trace for the Fire command.
remaining_args: List of remaining args that haven't been consumed yet.
Returns:
True if help is requested, False otherwise.
"""
show_help = False
if remaining_args:
target = remaining_args[0]
if target in ('-h', '--help'):
# Check if --help would be consumed as a keyword argument, or is a member.
component = component_trace.GetResult()
if inspect.isclass(component) or inspect.isroutine(component):
fn_spec = inspectutils.GetFullArgSpec(component)
_, remaining_kwargs, _ = _ParseKeywordArgs(remaining_args, fn_spec)
show_help = target in remaining_kwargs
else:
members = dict(inspect.getmembers(component))
show_help = target not in members
if show_help:
component_trace.show_help = True
command = '{cmd} -- --help'.format(cmd=component_trace.GetCommand())
print('INFO: Showing help with the command {cmd}.\n'.format(
cmd=pipes.quote(command)), file=sys.stderr)
return show_help
def _PrintResult(component_trace, verbose=False):
"""Prints the result of the Fire call to stdout in a human readable way."""
# TODO(dbieber): Design human readable deserializable serialization method
# and move serialization to its own module.
result = component_trace.GetResult()
if value_types.HasCustomStr(result):
# If the object has a custom __str__ method, rather than one inherited from
# object, then we use that to serialize the object.
print(str(result))
return
if isinstance(result, (list, set, frozenset, types.GeneratorType)):
for i in result:
print(_OneLineResult(i))
elif inspect.isgeneratorfunction(result):
raise NotImplementedError
elif isinstance(result, dict) and value_types.IsSimpleGroup(result):
print(_DictAsString(result, verbose))
elif isinstance(result, tuple):
print(_OneLineResult(result))
elif isinstance(result, value_types.VALUE_TYPES):
if result is not None:
print(result)
else:
help_text = helptext.HelpText(
result, trace=component_trace, verbose=verbose)
output = [help_text]
Display(output, out=sys.stdout)
def _DisplayError(component_trace):
"""Prints the Fire trace and the error to stdout."""
result = component_trace.GetResult()
output = []
show_help = False
for help_flag in ('-h', '--help'):
if help_flag in component_trace.elements[-1].args:
show_help = True
if show_help:
command = '{cmd} -- --help'.format(cmd=component_trace.GetCommand())
print('INFO: Showing help with the command {cmd}.\n'.format(
cmd=pipes.quote(command)), file=sys.stderr)
help_text = helptext.HelpText(result, trace=component_trace,
verbose=component_trace.verbose)
output.append(help_text)
Display(output, out=sys.stderr)
else:
print(formatting.Error('ERROR: ')
+ component_trace.elements[-1].ErrorAsStr(),
file=sys.stderr)
error_text = helptext.UsageText(result, trace=component_trace,
verbose=component_trace.verbose)
print(error_text, file=sys.stderr)
def _DictAsString(result, verbose=False):
"""Returns a dict as a string.
Args:
result: The dict to convert to a string
verbose: Whether to include 'hidden' members, those keys starting with _.
Returns:
A string representing the dict
"""
# We need to do 2 iterations over the items in the result dict
# 1) Getting visible items and the longest key for output formatting
# 2) Actually construct the output lines
class_attrs = inspectutils.GetClassAttrsDict(result)
result_visible = {
key: value for key, value in result.items()
if completion.MemberVisible(result, key, value,
class_attrs=class_attrs, verbose=verbose)
}
if not result_visible:
return '{}'
longest_key = max(len(str(key)) for key in result_visible.keys())
format_string = '{{key:{padding}s}} {{value}}'.format(padding=longest_key + 1)
lines = []
for key, value in result.items():
if completion.MemberVisible(result, key, value, class_attrs=class_attrs,
verbose=verbose):
line = format_string.format(key=str(key) + ':',
value=_OneLineResult(value))
lines.append(line)
return '\n'.join(lines)
def _OneLineResult(result):
"""Returns result serialized to a single line string."""
# TODO(dbieber): Ensure line is fewer than eg 120 characters.
if isinstance(result, six.string_types):
return str(result).replace('\n', ' ')
# TODO(dbieber): Show a small amount of usage information about the function
# or module if it fits cleanly on the line.
if inspect.isfunction(result):
return '<function {name}>'.format(name=result.__name__)
if inspect.ismodule(result):
return '<module {name}>'.format(name=result.__name__)
try:
# Don't force conversion to ascii.
return json.dumps(result, ensure_ascii=False)
except (TypeError, ValueError):
return str(result).replace('\n', ' ')
def _Fire(component, args, parsed_flag_args, context, name=None):
"""Execute a Fire command on a target component using the args supplied.
Arguments that come after a final isolated '--' are treated as Flags, eg for
interactive mode or completion script generation.
Other arguments are consumed by the execution of the Fire command, eg in the
traversal of the members of the component, or in calling a function or
instantiating a class found during the traversal.
The steps performed by this method are:
1. Parse any Flag args (the args after the final --)
2. Start with component as the current component.
2a. If the current component is a class, instantiate it using args from args.
2b. If the component is a routine, call it using args from args.
2c. If the component is a sequence, index into it using an arg from
args.
2d. If possible, access a member from the component using an arg from args.
2e. If the component is a callable object, call it using args from args.
2f. Repeat 2a-2e until no args remain.
Note: Only the first applicable rule from 2a-2e is applied in each iteration.
After each iteration of step 2a-2e, the current component is updated to be the
result of the applied rule.
3a. Embed into ipython REPL if interactive mode is selected.
3b. Generate a completion script if that flag is provided.
In step 2, arguments will only ever be consumed up to a separator; a single
step will never consume arguments from both sides of a separator.
The separator defaults to a hyphen (-), and can be overwritten with the
--separator Fire argument.
Args:
component: The target component for Fire.
args: A list of args to consume in Firing on the component, usually from
the command line.
parsed_flag_args: The values of the flag args (e.g. --verbose, --separator)
that are part of every Fire CLI.
context: A dict with the local and global variables available at the call
to Fire.
name: Optional. The name of the command. Used in interactive mode and in
the tab completion script.
Returns:
FireTrace of components starting with component, tracing Fire's execution
path as it consumes args.
Raises:
ValueError: If there are arguments that cannot be consumed.
ValueError: If --completion is specified but no name available.
"""
verbose = parsed_flag_args.verbose
interactive = parsed_flag_args.interactive
separator = parsed_flag_args.separator
show_completion = parsed_flag_args.completion
show_help = parsed_flag_args.help
show_trace = parsed_flag_args.trace
# component can be a module, class, routine, object, etc.
if component is None:
component = context
initial_component = component
component_trace = trace.FireTrace(
initial_component=initial_component, name=name, separator=separator,
verbose=verbose, show_help=show_help, show_trace=show_trace)
instance = None
remaining_args = args
while True:
last_component = component
initial_args = remaining_args
if not remaining_args and (show_help or interactive or show_trace
or show_completion is not None):
# Don't initialize the final class or call the final function unless
# there's a separator after it, and instead process the current component.
break
if _IsHelpShortcut(component_trace, remaining_args):
remaining_args = []
break
saved_args = []
used_separator = False
if separator in remaining_args:
# For the current component, only use arguments up to the separator.
separator_index = remaining_args.index(separator)
saved_args = remaining_args[separator_index + 1:]
remaining_args = remaining_args[:separator_index]
used_separator = True
assert separator not in remaining_args
handled = False
candidate_errors = []
is_callable = inspect.isclass(component) or inspect.isroutine(component)
is_callable_object = callable(component) and not is_callable
is_sequence = isinstance(component, (list, tuple))
is_map = isinstance(component, dict) or inspectutils.IsNamedTuple(component)
if not handled and is_callable:
# The component is a class or a routine; we'll try to initialize it or
# call it.
is_class = inspect.isclass(component)
try:
component, remaining_args = _CallAndUpdateTrace(
component,
remaining_args,
component_trace,
treatment='class' if is_class else 'routine',
target=component.__name__)
handled = True
except FireError as error:
candidate_errors.append((error, initial_args))
if handled and last_component is initial_component:
# If the initial component is a class, keep an instance for use with -i.
instance = component
if not handled and is_sequence and remaining_args:
# The component is a tuple or list; we'll try to access a member.
arg = remaining_args[0]
try:
index = int(arg)
component = component[index]
handled = True
except (ValueError, IndexError):
error = FireError(
'Unable to index into component with argument:', arg)
candidate_errors.append((error, initial_args))
if handled:
remaining_args = remaining_args[1:]
filename = None
lineno = None
component_trace.AddAccessedProperty(
component, index, [arg], filename, lineno)
if not handled and is_map and remaining_args:
# The component is a dict or other key-value map; try to access a member.
target = remaining_args[0]
# Treat namedtuples as dicts when handling them as a map.
if inspectutils.IsNamedTuple(component):
component_dict = component._asdict() # pytype: disable=attribute-error
else:
component_dict = component
if target in component_dict:
component = component_dict[target]
handled = True
elif target.replace('-', '_') in component_dict:
component = component_dict[target.replace('-', '_')]
handled = True
else:
# The target isn't present in the dict as a string key, but maybe it is
# a key as another type.
# TODO(dbieber): Consider alternatives for accessing non-string keys.
for key, value in component_dict.items():
if target == str(key):
component = value
handled = True
break
if handled:
remaining_args = remaining_args[1:]
filename = None
lineno = None
component_trace.AddAccessedProperty(
component, target, [target], filename, lineno)
else:
error = FireError('Cannot find key:', target)
candidate_errors.append((error, initial_args))
if not handled and remaining_args:
# Object handler. We'll try to access a member of the component.
try:
target = remaining_args[0]
component, consumed_args, remaining_args = _GetMember(
component, remaining_args)
handled = True
filename, lineno = inspectutils.GetFileAndLine(component)
component_trace.AddAccessedProperty(
component, target, consumed_args, filename, lineno)
except FireError as error:
# Couldn't access member.
candidate_errors.append((error, initial_args))
if not handled and is_callable_object:
# The component is a callable object; we'll try to call it.
try:
component, remaining_args = _CallAndUpdateTrace(
component,
remaining_args,
component_trace,
treatment='callable')
handled = True
except FireError as error:
candidate_errors.append((error, initial_args))
if not handled and candidate_errors:
error, initial_args = candidate_errors[0]
component_trace.AddError(error, initial_args)
return component_trace
if used_separator:
# Add back in the arguments from after the separator.
if remaining_args:
remaining_args = remaining_args + [separator] + saved_args
elif (inspect.isclass(last_component)
or inspect.isroutine(last_component)):
remaining_args = saved_args
component_trace.AddSeparator()
elif component is not last_component:
remaining_args = [separator] + saved_args
else:
# It was an unnecessary separator.
remaining_args = saved_args
if component is last_component and remaining_args == initial_args:
# We're making no progress.
break
if remaining_args:
component_trace.AddError(
FireError('Could not consume arguments:', remaining_args),
initial_args)
return component_trace
if show_completion is not None:
if name is None:
raise ValueError('Cannot make completion script without command name')
script = CompletionScript(name, initial_component, shell=show_completion)
component_trace.AddCompletionScript(script)
if interactive:
variables = context.copy()
if name is not None:
variables[name] = initial_component
variables['component'] = initial_component
variables['result'] = component
variables['trace'] = component_trace
if instance is not None:
variables['self'] = instance
interact.Embed(variables, verbose)
component_trace.AddInteractiveMode()
return component_trace
def _GetMember(component, args):
"""Returns a subcomponent of component by consuming an arg from args.
Given a starting component and args, this function gets a member from that
component, consuming one arg in the process.
Args:
component: The component from which to get a member.
args: Args from which to consume in the search for the next component.
Returns:
component: The component that was found by consuming an arg.
consumed_args: The args that were consumed by getting this member.
remaining_args: The remaining args that haven't been consumed yet.
Raises:
FireError: If we cannot consume an argument to get a member.
"""
members = dir(component)
arg = args[0]
arg_names = [
arg,
arg.replace('-', '_'), # treat '-' as '_'.
]
for arg_name in arg_names:
if arg_name in members:
return getattr(component, arg_name), [arg], args[1:]
raise FireError('Could not consume arg:', arg)
def _CallAndUpdateTrace(component, args, component_trace, treatment='class',
target=None):
"""Call the component by consuming args from args, and update the FireTrace.
The component could be a class, a routine, or a callable object. This function
calls the component and adds the appropriate action to component_trace.
Args:
component: The component to call
args: Args for calling the component
component_trace: FireTrace object that contains action trace
treatment: Type of treatment used. Indicating whether we treat the component
as a class, a routine, or a callable.
target: Target in FireTrace element, default is None. If the value is None,
the component itself will be used as target.
Returns:
component: The object that is the result of the callable call.
remaining_args: The remaining args that haven't been consumed yet.
"""
if not target:
target = component
filename, lineno = inspectutils.GetFileAndLine(component)
metadata = decorators.GetMetadata(component)
fn = component.__call__ if treatment == 'callable' else component
parse = _MakeParseFn(fn, metadata)
(varargs, kwargs), consumed_args, remaining_args, capacity = parse(args)
component = fn(*varargs, **kwargs)
if treatment == 'class':
action = trace.INSTANTIATED_CLASS
elif treatment == 'routine':
action = trace.CALLED_ROUTINE
else:
action = trace.CALLED_CALLABLE
component_trace.AddCalledComponent(
component, target, consumed_args, filename, lineno, capacity,
action=action)
return component, remaining_args
def _MakeParseFn(fn, metadata):
"""Creates a parse function for fn.
Args:
fn: The function or class to create the parse function for.
metadata: Additional metadata about the component the parse function is for.
Returns:
A parse function for fn. The parse function accepts a list of arguments
and returns (varargs, kwargs), remaining_args. The original function fn
can then be called with fn(*varargs, **kwargs). The remaining_args are
the leftover args from the arguments to the parse function.
"""
fn_spec = inspectutils.GetFullArgSpec(fn)
# Note: num_required_args is the number of positional arguments without
# default values. All of these arguments are required.
num_required_args = len(fn_spec.args) - len(fn_spec.defaults)
required_kwonly = set(fn_spec.kwonlyargs) - set(fn_spec.kwonlydefaults)
def _ParseFn(args):
"""Parses the list of `args` into (varargs, kwargs), remaining_args."""
kwargs, remaining_kwargs, remaining_args = _ParseKeywordArgs(args, fn_spec)
# Note: _ParseArgs modifies kwargs.
parsed_args, kwargs, remaining_args, capacity = _ParseArgs(
fn_spec.args, fn_spec.defaults, num_required_args, kwargs,
remaining_args, metadata)
if fn_spec.varargs or fn_spec.varkw:
# If we're allowed *varargs or **kwargs, there's always capacity.
capacity = True
extra_kw = set(kwargs) - set(fn_spec.kwonlyargs)
if fn_spec.varkw is None and extra_kw:
raise FireError('Unexpected kwargs present:', extra_kw)
missing_kwonly = set(required_kwonly) - set(kwargs)
if missing_kwonly:
raise FireError('Missing required flags:', missing_kwonly)
# If we accept *varargs, then use all remaining arguments for *varargs.
if fn_spec.varargs is not None:
varargs, remaining_args = remaining_args, []
else:
varargs = []
for index, value in enumerate(varargs):
varargs[index] = _ParseValue(value, None, None, metadata)
varargs = parsed_args + varargs
remaining_args += remaining_kwargs
consumed_args = args[:len(args) - len(remaining_args)]
return (varargs, kwargs), consumed_args, remaining_args, capacity
return _ParseFn
def _ParseArgs(fn_args, fn_defaults, num_required_args, kwargs,
remaining_args, metadata):
"""Parses the positional and named arguments from the available supplied args.
Modifies kwargs, removing args as they are used.
Args:
fn_args: A list of argument names that the target function accepts,
including positional and named arguments, but not the varargs or kwargs
names.
fn_defaults: A list of the default values in the function argspec.
num_required_args: The number of required arguments from the function's
argspec. This is the number of arguments without a default value.
kwargs: Dict with named command line arguments and their values.
remaining_args: The remaining command line arguments, which may still be
used as positional arguments.
metadata: Metadata about the function, typically from Fire decorators.
Returns:
parsed_args: A list of values to be used as positional arguments for calling
the target function.
kwargs: The input dict kwargs modified with the used kwargs removed.
remaining_args: A list of the supplied args that have not been used yet.
capacity: Whether the call could have taken args in place of defaults.
Raises:
FireError: If additional positional arguments are expected, but none are
available.
"""
accepts_positional_args = metadata.get(decorators.ACCEPTS_POSITIONAL_ARGS)
capacity = False # If we see a default get used, we'll set capacity to True
# Select unnamed args.
parsed_args = []
for index, arg in enumerate(fn_args):
value = kwargs.pop(arg, None)
if value is not None: # A value is specified at the command line.
value = _ParseValue(value, index, arg, metadata)
parsed_args.append(value)
else: # No value has been explicitly specified.
if remaining_args and accepts_positional_args:
# Use a positional arg.
value = remaining_args.pop(0)
value = _ParseValue(value, index, arg, metadata)
parsed_args.append(value)
elif index < num_required_args:
raise FireError(
'The function received no value for the required argument:', arg)
else:
# We're past the args for which there's no default value.
# There's a default value for this arg.
capacity = True
default_index = index - num_required_args # index into the defaults.
parsed_args.append(fn_defaults[default_index])
for key, value in kwargs.items():
kwargs[key] = _ParseValue(value, None, key, metadata)
return parsed_args, kwargs, remaining_args, capacity
def _ParseKeywordArgs(args, fn_spec):
"""Parses the supplied arguments for keyword arguments.
Given a list of arguments, finds occurrences of --name value, and uses 'name'
as the keyword and 'value' as the value. Constructs and returns a dictionary
of these keyword arguments, and returns a list of the remaining arguments.
Only if fn_keywords is None, this only finds argument names used by the
function, specified through fn_args.
This returns the values of the args as strings. They are later processed by
_ParseArgs, which converts them to the appropriate type.
Args:
args: A list of arguments.
fn_spec: The inspectutils.FullArgSpec describing the given callable.
Returns:
kwargs: A dictionary mapping keywords to values.
remaining_kwargs: A list of the unused kwargs from the original args.
remaining_args: A list of the unused arguments from the original args.
Raises:
FireError: If a single-character flag is passed that could refer to multiple
possible args.
"""
kwargs = {}
remaining_kwargs = []
remaining_args = []
fn_keywords = fn_spec.varkw
fn_args = fn_spec.args + fn_spec.kwonlyargs
if not args:
return kwargs, remaining_kwargs, remaining_args
skip_argument = False
for index, argument in enumerate(args):
if skip_argument:
skip_argument = False
continue
if _IsFlag(argument):
# This is a named argument. We get its value from this arg or the next.
# Terminology:
# argument: A full token from the command line, e.g. '--alpha=10'
# stripped_argument: An argument without leading hyphens.
# key: The contents of the stripped argument up to the first equal sign.
# "shortcut flag": refers to an argument where the key is just the first
# letter of a longer keyword.
# keyword: The Python function argument being set by this argument.
# value: The unparsed value for that Python function argument.
contains_equals = '=' in argument
stripped_argument = argument.lstrip('-')
if contains_equals:
key, value = stripped_argument.split('=', 1)
else:
key = stripped_argument
key = key.replace('-', '_')
is_bool_syntax = (not contains_equals and
(index + 1 == len(args) or _IsFlag(args[index + 1])))
# Determine the keyword.
keyword = '' # Indicates no valid keyword has been found yet.
if (key in fn_args
or (is_bool_syntax and key.startswith('no') and key[2:] in fn_args)
or fn_keywords):
keyword = key
elif len(key) == 1:
# This may be a shortcut flag.
matching_fn_args = [arg for arg in fn_args if arg[0] == key]
if len(matching_fn_args) == 1:
keyword = matching_fn_args[0]
elif len(matching_fn_args) > 1:
raise FireError("The argument '{}' is ambiguous as it could "
"refer to any of the following arguments: {}".format(
argument, matching_fn_args))
# Determine the value.
if not keyword:
got_argument = False
elif contains_equals:
# Already got the value above.
got_argument = True
elif is_bool_syntax:
# There's no next arg or the next arg is a Flag, so we consider this
# flag to be a boolean.
got_argument = True
if keyword in fn_args:
value = 'True'
elif keyword.startswith('no'):
keyword = keyword[2:]
value = 'False'
else:
value = 'True'
else:
# The assert should pass. Otherwise either contains_equals or
# is_bool_syntax would have been True.
assert index + 1 < len(args)
value = args[index + 1]
got_argument = True
# In order for us to consume the argument as a keyword arg, we either:
# Need to be explicitly expecting the keyword, or we need to be
# accepting **kwargs.
skip_argument = not contains_equals and not is_bool_syntax
if got_argument:
kwargs[keyword] = value
else:
remaining_kwargs.append(argument)
if skip_argument:
remaining_kwargs.append(args[index + 1])
else: # not _IsFlag(argument)
remaining_args.append(argument)
return kwargs, remaining_kwargs, remaining_args
def _IsFlag(argument):
"""Determines if the argument is a flag argument.
If it starts with a hyphen and isn't a negative number, it's a flag.
Args:
argument: A command line argument that may or may not be a flag.
Returns:
A boolean indicating whether the argument is a flag.
"""
return _IsSingleCharFlag(argument) or _IsMultiCharFlag(argument)
def _IsSingleCharFlag(argument):
"""Determines if the argument is a single char flag (e.g. '-a')."""
return re.match('^-[a-zA-Z]$', argument) or re.match('^-[a-zA-Z]=', argument)
def _IsMultiCharFlag(argument):
"""Determines if the argument is a multi char flag (e.g. '--alpha')."""
return argument.startswith('--') or re.match('^-[a-zA-Z]', argument)
def _ParseValue(value, index, arg, metadata):
"""Parses value, a string, into the appropriate type.
The function used to parse value is determined by the remaining arguments.
Args:
value: The string value to be parsed, typically a command line argument.
index: The index of the value in the function's argspec.
arg: The name of the argument the value is being parsed for.
metadata: Metadata about the function, typically from Fire decorators.
Returns:
value, parsed into the appropriate type for calling a function.
"""
parse_fn = parser.DefaultParseValue
# We check to see if any parse function from the fn metadata applies here.
parse_fns = metadata.get(decorators.FIRE_PARSE_FNS)
if parse_fns:
default = parse_fns['default']
positional = parse_fns['positional']
named = parse_fns['named']
if index is not None and 0 <= index < len(positional):
parse_fn = positional[index]
elif arg in named:
parse_fn = named[arg]
elif default is not None:
parse_fn = default
return parse_fn(value)
| 37.196923 | 80 | 0.703367 |
c6be62c29e03d824fad4faec3dfc71486b07457d | 78 | py | Python | front/server.py | rivo2302/ikulture | d8220e299beac23ab362ab4b2748eda64a41a28e | [
"MIT"
] | null | null | null | front/server.py | rivo2302/ikulture | d8220e299beac23ab362ab4b2748eda64a41a28e | [
"MIT"
] | null | null | null | front/server.py | rivo2302/ikulture | d8220e299beac23ab362ab4b2748eda64a41a28e | [
"MIT"
] | null | null | null | from webservice import app
if __name__ == '__main__':
app.run(debug=True) | 19.5 | 26 | 0.717949 |
679fee711a4752d16305068a5582e6c944521713 | 9,847 | py | Python | mqtty/view/topic_list.py | masayukig/mqtty | 7b2439959bb1d308e0cb4f0e98316e8ee8df6aa2 | [
"Apache-2.0"
] | null | null | null | mqtty/view/topic_list.py | masayukig/mqtty | 7b2439959bb1d308e0cb4f0e98316e8ee8df6aa2 | [
"Apache-2.0"
] | 9 | 2017-08-23T08:34:55.000Z | 2017-12-16T13:39:50.000Z | mqtty/view/topic_list.py | masayukig/mqtty | 7b2439959bb1d308e0cb4f0e98316e8ee8df6aa2 | [
"Apache-2.0"
] | 1 | 2019-06-04T17:48:15.000Z | 2019-06-04T17:48:15.000Z | # Copyright 2014 OpenStack Foundation
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import urwid
from mqtty import keymap
from mqtty import mywid
from mqtty.view import message_list as view_message_list
from mqtty.view import mouse_scroll_decorator
class ColumnInfo(object):
def __init__(self, name, packing, value):
self.name = name
self.packing = packing
self.value = value
self.options = (packing, value)
if packing == 'given':
self.spacing = value + 1
else:
self.spacing = (value * 8) + 1
COLUMNS = [
ColumnInfo('No.', 'given', 5),
ColumnInfo('Topic', 'weight', 1),
ColumnInfo('# of MSG', 'given', 9),
]
class TopicListHeader(urwid.WidgetWrap):
def __init__(self):
cols = [(5, urwid.Text(u' No.')),
urwid.Text(u' Topic'),
(9, urwid.Text(u'# of MSG')),
]
super(TopicListHeader, self).__init__(urwid.Columns(cols))
@mouse_scroll_decorator.ScrollByWheel
class TopicListView(urwid.WidgetWrap, mywid.Searchable):
title = "Topics"
def getCommands(self):
return [
(keymap.TOGGLE_LIST_SUBSCRIBED,
"Toggle whether only subscribed projects or all projects are"
" listed"),
(keymap.TOGGLE_LIST_REVIEWED,
"Toggle listing of projects with unreviewed changes"),
(keymap.TOGGLE_SUBSCRIBED,
"Toggle the subscription flag for the selected project"),
(keymap.REFRESH,
"Sync subscribed projects"),
(keymap.TOGGLE_MARK,
"Toggle the process mark for the selected project"),
(keymap.NEW_PROJECT_TOPIC,
"Create project topic"),
(keymap.DELETE_PROJECT_TOPIC,
"Delete selected project topic"),
(keymap.MOVE_PROJECT_TOPIC,
"Move selected project to topic"),
(keymap.COPY_PROJECT_TOPIC,
"Copy selected project to topic"),
(keymap.REMOVE_PROJECT_TOPIC,
"Remove selected project from topic"),
(keymap.RENAME_PROJECT_TOPIC,
"Rename selected project topic"),
(keymap.INTERACTIVE_SEARCH,
"Interactive search"),
]
def help(self):
key = self.app.config.keymap.formatKeys
commands = self.getCommands()
return [(c[0], key(c[0]), c[1]) for c in commands]
def __init__(self, app):
super(TopicListView, self).__init__(urwid.Pile([]))
self.log = logging.getLogger('mqtty.view.topic_list')
self.searchInit()
self.app = app
self.unreviewed = True
self.subscribed = True
self.reverse = False
self.project_rows = {}
self.topic_rows = {}
self.open_topics = set()
self.sort_by = 'name'
self.listbox = urwid.ListBox(urwid.SimpleFocusListWalker([]))
self.refresh()
self.header = TopicListHeader()
self._w.contents.append((app.header, ('pack', 1)))
self._w.contents.append((urwid.Divider(), ('pack', 1)))
self._w.contents.append(
(urwid.AttrWrap(self.header, 'table-header'), ('pack', 1)))
self._w.contents.append((self.listbox, ('weight', 1)))
self._w.set_focus(3)
def selectable(self):
return True
def sizing(self):
return frozenset([urwid.FIXED])
def refresh(self):
self.log.debug('topic_list refresh called ===============')
len(self.listbox.body)
# for row in self.listbox.body:
# self.listbox.body.remove(row)
i = 0
with self.app.db.getSession() as session:
topic_list = session.getTopics(sort_by=self.sort_by)
if self.reverse:
topic_list.reverse()
for topic in topic_list:
num_msg = len(session.getMessagesByTopic(topic))
key = topic.key
row = self.topic_rows.get(key)
if not row:
row = TopicRow(topic, num_msg, self.onSelect)
self.listbox.body.append(row)
self.topic_rows[key] = row
else:
row.update(topic, num_msg)
i = i + 1
self.title = "Topics: " + str(i)
self.app.status.update(title=self.title)
# if i > 0:
# self.listbox.body.pop()
# for key in self.app.db.topics:
# self.log.debug(key)
# self.listbox.body.append(TopicRow(Topic(key, key + "_name")))
def clearTopicList(self):
for key, value in self.topic_rows.items():
self.listbox.body.remove(value)
self.topic_rows = {}
def keypress(self, size, key):
if self.searchKeypress(size, key):
return None
if not self.app.input_buffer:
key = super(TopicListView, self).keypress(size, key)
keys = self.app.input_buffer + [key]
commands = self.app.config.keymap.getCommands(keys)
ret = self.handleCommands(commands)
if ret is True:
if keymap.FURTHER_INPUT not in commands:
self.app.clearInputBuffer()
return None
return key
def handleCommands(self, commands):
self.log.debug('handleCommands called')
if keymap.REFRESH in commands:
self.refresh()
self.app.status.update()
return True
if keymap.SORT_BY_NUMBER in commands:
if not len(self.listbox.body):
return True
self.sort_by = 'key'
self.clearTopicList()
self.refresh()
return True
if keymap.SORT_BY_TOPIC in commands:
if not len(self.listbox.body):
return True
self.sort_by = 'name'
self.clearTopicList()
self.refresh()
return True
if not len(self.listbox.body):
return True
if self.reverse:
self.reverse = False
else:
self.reverse = True
self.clearTopicList()
self.refresh()
return True
if keymap.INTERACTIVE_SEARCH in commands:
self.searchStart()
return True
def onSelect(self, button, data):
topic = data
self.app.changeScreen(view_message_list.MessageListView(
self.app, topic))
class TopicListColumns(object):
def updateColumns(self):
del self.columns.contents[:]
cols = self.columns.contents
options = self.columns.options
for colinfo in COLUMNS:
if colinfo.name in self.enabled_columns:
attr = colinfo.name.lower().replace(' ', '_')
cols.append((getattr(self, attr),
options(*colinfo.options)))
for c in self.category_columns:
cols.append(c)
class TopicRow(urwid.Button, TopicListColumns):
topic_focus_map = {None: 'focused',
# 'subscribed-project': 'focused-subscribed-project',
# 'marked-project': 'focused-marked-project',
}
def selectable(self):
return True
def _setName(self, name):
self.topic_name = name
name = name
if self.mark:
name = '%' + name
else:
name = ' ' + name
self.name.set_text(name)
def __init__(self, topic, num_msg, callback=None):
super(TopicRow, self).__init__('', on_press=callback,
user_data=(topic))
self.mark = False
self._style = None
# self.topic_key = topic.key
self.name = mywid.SearchableText('')
self._setName(topic.name)
# FIXME: showing 'topic_key' is just for debugging. This should be
# removed.
self.topic_key = urwid.Text(u'', align=urwid.RIGHT)
self.name.set_wrap_mode('clip')
self.num_msg = urwid.Text(u'', align=urwid.RIGHT)
col = urwid.Columns([
('fixed', 5, self.topic_key),
self.name,
('fixed', 9, self.num_msg),
])
self.row_style = urwid.AttrMap(col, '')
self._w = urwid.AttrMap(self.row_style, None,
focus_map=self.topic_focus_map)
self._style = None # 'subscribed-project'
self.row_style.set_attr_map({None: self._style})
# self.num_msg = num_msg
self.update(topic, num_msg)
def search(self, search, attribute):
return self.name.search(search, attribute)
def update(self, topic, num_msg):
# FIXME: showing 'topic_key' is just for debugging. This should be
# removed.
self.topic_key.set_text('%i ' % topic.key)
self.num_msg.set_text('%i ' % num_msg)
# self._setName(str(topic.key) + " " + topic.name + " " + str(num_msg))
def toggleMark(self):
self.mark = not self.mark
if self.mark:
style = 'marked-topic'
else:
style = self._style
self.row_style.set_attr_map({None: style})
self._setName(self.topic_name)
| 33.838488 | 79 | 0.573271 |
44a5cd3f4d5c9290f317faf0b8a35ec358dfda55 | 2,499 | py | Python | sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_patch.py | dicolanl/azure-sdk-for-python | 6207bfc24973f60dc779a939c23123a4fcb4f598 | [
"MIT"
] | null | null | null | sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_patch.py | dicolanl/azure-sdk-for-python | 6207bfc24973f60dc779a939c23123a4fcb4f598 | [
"MIT"
] | null | null | null | sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_patch.py | dicolanl/azure-sdk-for-python | 6207bfc24973f60dc779a939c23123a4fcb4f598 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------
import six
from .models import TextRecord
def _validate_text_records(records):
if not records:
raise ValueError("Input records can not be empty or None")
if isinstance(records, six.string_types):
raise TypeError("Input records cannot be a string.")
if isinstance(records, dict):
raise TypeError("Input records cannot be a dict")
if not all(isinstance(x, six.string_types) for x in records):
if not all(
isinstance(x, (dict, TextRecord))
for x in records
):
raise TypeError(
"Mixing string and dictionary/object record input unsupported."
)
request_batch = []
for idx, doc in enumerate(records):
if isinstance(doc, six.string_types):
record = {"id": str(idx), "text": doc}
request_batch.append(record)
else:
request_batch.append(doc)
return request_batch
def _get_positional_body(*args, **kwargs):
"""Verify args and kwargs are valid, and then return the positional body, if users passed it in."""
if len(args) > 1:
raise TypeError("There can only be one positional argument, which is the POST body of this request.")
if args and "options" in kwargs:
raise TypeError(
"You have already supplied the request body as a positional parameter, "
"you can not supply it as a keyword argument as well."
)
return args[0] if args else None
def _verify_qna_id_and_question(query_knowledgebase_options):
"""For query_knowledge_base we require either `question` or `qna_id`."""
try:
qna_id = query_knowledgebase_options.qna_id
question = query_knowledgebase_options.question
except AttributeError:
qna_id = query_knowledgebase_options.get("qna_id") or query_knowledgebase_options.get("qnaId")
question = query_knowledgebase_options.get("question")
if not (qna_id or question):
raise TypeError(
"You need to pass in either `qna_id` or `question`."
)
if qna_id and question:
raise TypeError("You can not specify both `qna_id` and `question`.")
| 38.446154 | 109 | 0.623449 |
6ded8d45258c816324d27f66fc182561e37ad3e1 | 1,695 | py | Python | third_party/upb/python/pb_unit_tests/well_known_types_test_wrapper.py | echo80313/grpc | 93cdc8b77e7b3fe4a3afec1c9c7e29b3f02ec3cf | [
"Apache-2.0"
] | 1 | 2021-12-01T03:10:14.000Z | 2021-12-01T03:10:14.000Z | third_party/upb/python/pb_unit_tests/well_known_types_test_wrapper.py | echo80313/grpc | 93cdc8b77e7b3fe4a3afec1c9c7e29b3f02ec3cf | [
"Apache-2.0"
] | 4 | 2022-02-27T18:59:37.000Z | 2022-02-27T18:59:53.000Z | third_party/upb/python/pb_unit_tests/well_known_types_test_wrapper.py | echo80313/grpc | 93cdc8b77e7b3fe4a3afec1c9c7e29b3f02ec3cf | [
"Apache-2.0"
] | 1 | 2020-12-13T22:14:29.000Z | 2020-12-13T22:14:29.000Z | # Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Google LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Google LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.protobuf.internal import well_known_types_test
import unittest
if __name__ == '__main__':
unittest.main(module=well_known_types_test, verbosity=2)
| 54.677419 | 81 | 0.774631 |
962f25dda50afc3044a68a3ee4c67f7860a81382 | 3,487 | py | Python | st2common/tests/unit/test_rbac_utils.py | ekhavana/st2 | 2b47b0e317a2dfd7d92d63ec6dcf706493148890 | [
"Apache-2.0"
] | null | null | null | st2common/tests/unit/test_rbac_utils.py | ekhavana/st2 | 2b47b0e317a2dfd7d92d63ec6dcf706493148890 | [
"Apache-2.0"
] | null | null | null | st2common/tests/unit/test_rbac_utils.py | ekhavana/st2 | 2b47b0e317a2dfd7d92d63ec6dcf706493148890 | [
"Apache-2.0"
] | null | null | null | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from st2tests.base import DbTestCase
from st2tests.config import parse_args
from st2common.models.db.auth import UserDB
from st2common.models.db.rbac import UserRoleAssignmentDB
from st2common.rbac.types import SystemRole
from st2common.rbac.utils import user_is_system_admin
from st2common.rbac.utils import user_is_admin
from st2common.rbac.utils import user_has_role
from st2common.rbac.migrations import insert_system_roles
__all__ = [
'RBACUtilsTestCase'
]
class RBACUtilsTestCase(DbTestCase):
@classmethod
def setUpClass(cls):
super(RBACUtilsTestCase, cls).setUpClass()
# TODO: Put in the base rbac db test case
insert_system_roles()
# Add mock users - system admin, admin, non-admin
cls.system_admin_user = UserDB(name='system_admin_user')
cls.system_admin_user.save()
cls.admin_user = UserDB(name='admin_user')
cls.admin_user.save()
cls.regular_user = UserDB(name='regular_user')
cls.regular_user.save()
# Add system admin role assignment
role_assignment_1 = UserRoleAssignmentDB(user=cls.system_admin_user.name,
role=SystemRole.SYSTEM_ADMIN)
role_assignment_1.save()
# Add admin role assignment
role_assignment_2 = UserRoleAssignmentDB(user=cls.admin_user.name,
role=SystemRole.ADMIN)
role_assignment_2.save()
def setUp(self):
parse_args()
def test_is_system_admin(self):
# Make sure RBAC is enabled for the tests
cfg.CONF.set_override(name='enable', override=True, group='rbac')
# System Admin user
self.assertTrue(user_is_system_admin(user_db=self.system_admin_user))
# Admin user
self.assertFalse(user_is_system_admin(user_db=self.admin_user))
# Regular user
self.assertFalse(user_is_system_admin(user_db=self.regular_user))
def test_is_admin(self):
# Make sure RBAC is enabled for the tests
cfg.CONF.set_override(name='enable', override=True, group='rbac')
# Admin user
self.assertTrue(user_is_admin(user_db=self.admin_user))
# Regular user
self.assertFalse(user_is_admin(user_db=self.regular_user))
def test_has_role(self):
# Make sure RBAC is enabled for the tests
cfg.CONF.set_override(name='enable', override=True, group='rbac')
# Admin user
self.assertTrue(user_has_role(user_db=self.admin_user, role=SystemRole.ADMIN))
# Regular user
self.assertFalse(user_has_role(user_db=self.regular_user, role=SystemRole.ADMIN))
| 35.948454 | 89 | 0.706338 |
92d426722a81b89a6e6842f4e4d8a0a8dbedcd9c | 1,788 | py | Python | examples/test.py | Skyscanner/pyfailsafe | 84c88e8ededefb1c0bc3e3ec9b5e8207aafe0812 | [
"Apache-2.0"
] | 75 | 2016-09-21T09:50:55.000Z | 2022-03-31T13:17:46.000Z | examples/test.py | rmariano/pyfailsafe | 179df4c9d162f265f5c9aa49350b595e01e5ab68 | [
"Apache-2.0"
] | 20 | 2016-09-26T20:37:00.000Z | 2021-09-17T00:32:34.000Z | examples/test.py | rmariano/pyfailsafe | 179df4c9d162f265f5c9aa49350b595e01e5ab68 | [
"Apache-2.0"
] | 9 | 2016-09-25T15:17:15.000Z | 2020-04-07T17:01:28.000Z | # Copyright 2016 Skyscanner Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import pytest
loop = asyncio.get_event_loop()
def test_simple_failsafe_should_return_when_user_exists():
from examples.simple_failsafe import GitHubClient
github_client = GitHubClient()
result = loop.run_until_complete(
github_client.get_repositories_by_user('skyscanner')
)
assert result is not None
def test_simple_failsafe_should_raise_when_user_not_exists():
from examples.simple_failsafe import GitHubClient, UserNotFoundError
github_client = GitHubClient()
with pytest.raises(UserNotFoundError):
loop.run_until_complete(
github_client.get_repositories_by_user('not-existing-user')
)
def test_failsafe_with_fallback():
from examples.failsafe_with_fallback import GitHubClient
github_client = GitHubClient()
result = loop.run_until_complete(
github_client.get_repositories_by_user('skyscanner')
)
assert result is not None
def test_fallback_failsafe():
from examples.fallback_failsafe import GitHubClient
github_client = GitHubClient()
result = loop.run_until_complete(
github_client.get_repositories_by_user('skyscanner')
)
assert result is not None
| 28.83871 | 84 | 0.762304 |
e9e4cc5431419356b3f2398b61fe498cc9310e43 | 10,768 | py | Python | redash/query_runner/pg.py | techscience9/redash | 32669b148ccba47b118c8d390031903a18b0253d | [
"BSD-2-Clause"
] | 3 | 2018-06-01T00:05:18.000Z | 2019-03-07T14:03:10.000Z | redash/query_runner/pg.py | techscience9/redash | 32669b148ccba47b118c8d390031903a18b0253d | [
"BSD-2-Clause"
] | 6 | 2021-01-21T16:43:27.000Z | 2022-02-27T09:18:00.000Z | redash/query_runner/pg.py | tradingfoe/redash-clone | 94065b8dce0e27f6f40a7adc2b99e078b03115b3 | [
"BSD-2-Clause"
] | 5 | 2018-06-02T11:12:44.000Z | 2020-05-13T18:34:33.000Z | import os
import logging
import select
import psycopg2
from psycopg2.extras import Range
from redash.query_runner import *
from redash.utils import JSONEncoder, json_dumps, json_loads
logger = logging.getLogger(__name__)
types_map = {
20: TYPE_INTEGER,
21: TYPE_INTEGER,
23: TYPE_INTEGER,
700: TYPE_FLOAT,
1700: TYPE_FLOAT,
701: TYPE_FLOAT,
16: TYPE_BOOLEAN,
1082: TYPE_DATE,
1114: TYPE_DATETIME,
1184: TYPE_DATETIME,
1014: TYPE_STRING,
1015: TYPE_STRING,
1008: TYPE_STRING,
1009: TYPE_STRING,
2951: TYPE_STRING
}
class PostgreSQLJSONEncoder(JSONEncoder):
def default(self, o):
if isinstance(o, Range):
# From: https://github.com/psycopg/psycopg2/pull/779
if o._bounds is None:
return ''
items = [
o._bounds[0],
str(o._lower), ', ',
str(o._upper), o._bounds[1]
]
return ''.join(items)
return super(PostgreSQLJSONEncoder, self).default(o)
def _wait(conn, timeout=None):
while 1:
try:
state = conn.poll()
if state == psycopg2.extensions.POLL_OK:
break
elif state == psycopg2.extensions.POLL_WRITE:
select.select([], [conn.fileno()], [], timeout)
elif state == psycopg2.extensions.POLL_READ:
select.select([conn.fileno()], [], [], timeout)
else:
raise psycopg2.OperationalError("poll() returned %s" % state)
except select.error:
raise psycopg2.OperationalError("select.error received")
class PostgreSQL(BaseSQLQueryRunner):
noop_query = "SELECT 1"
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"user": {
"type": "string"
},
"password": {
"type": "string"
},
"host": {
"type": "string",
"default": "127.0.0.1"
},
"port": {
"type": "number",
"default": 5432
},
"dbname": {
"type": "string",
"title": "Database Name"
},
"sslmode": {
"type": "string",
"title": "SSL Mode",
"default": "prefer"
}
},
"order": ['host', 'port', 'user', 'password'],
"required": ["dbname"],
"secret": ["password"]
}
@classmethod
def type(cls):
return "pg"
def _get_definitions(self, schema, query):
results, error = self.run_query(query, None)
if error is not None:
raise Exception("Failed getting schema.")
results = json_loads(results)
for row in results['rows']:
if row['table_schema'] != 'public':
table_name = u'{}.{}'.format(row['table_schema'],
row['table_name'])
else:
table_name = row['table_name']
if table_name not in schema:
schema[table_name] = {'name': table_name, 'columns': []}
schema[table_name]['columns'].append(row['column_name'])
def _get_tables(self, schema):
'''
relkind constants per https://www.postgresql.org/docs/10/static/catalog-pg-class.html
r = regular table
v = view
m = materialized view
f = foreign table
p = partitioned table (new in 10)
---
i = index
S = sequence
t = TOAST table
c = composite type
'''
query = """
SELECT s.nspname as table_schema,
c.relname as table_name,
a.attname as column_name
FROM pg_class c
JOIN pg_namespace s
ON c.relnamespace = s.oid
AND s.nspname NOT IN ('pg_catalog', 'information_schema')
JOIN pg_attribute a
ON a.attrelid = c.oid
AND a.attnum > 0
AND NOT a.attisdropped
WHERE c.relkind IN ('m', 'f', 'p')
UNION
SELECT table_schema,
table_name,
column_name
FROM information_schema.columns
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
"""
self._get_definitions(schema, query)
return schema.values()
def _get_connection(self):
connection = psycopg2.connect(
user=self.configuration.get('user'),
password=self.configuration.get('password'),
host=self.configuration.get('host'),
port=self.configuration.get('port'),
dbname=self.configuration.get('dbname'),
sslmode=self.configuration.get('sslmode'),
async_=True)
return connection
def run_query(self, query, user):
connection = self._get_connection()
_wait(connection, timeout=10)
cursor = connection.cursor()
try:
cursor.execute(query)
_wait(connection)
if cursor.description is not None:
columns = self.fetch_columns([(i[0], types_map.get(i[1], None))
for i in cursor.description])
rows = [
dict(zip((c['name'] for c in columns), row))
for row in cursor
]
data = {'columns': columns, 'rows': rows}
error = None
json_data = json_dumps(data,
ignore_nan=True,
cls=PostgreSQLJSONEncoder)
else:
error = 'Query completed but it returned no data.'
json_data = None
except (select.error, OSError) as e:
error = "Query interrupted. Please retry."
json_data = None
except psycopg2.DatabaseError as e:
error = e.message
json_data = None
except (KeyboardInterrupt, InterruptException):
connection.cancel()
error = "Query cancelled by user."
json_data = None
finally:
connection.close()
return json_data, error
class Redshift(PostgreSQL):
@classmethod
def type(cls):
return "redshift"
def _get_connection(self):
sslrootcert_path = os.path.join(os.path.dirname(__file__),
'./files/redshift-ca-bundle.crt')
connection = psycopg2.connect(
user=self.configuration.get('user'),
password=self.configuration.get('password'),
host=self.configuration.get('host'),
port=self.configuration.get('port'),
dbname=self.configuration.get('dbname'),
sslmode=self.configuration.get('sslmode', 'prefer'),
sslrootcert=sslrootcert_path,
async_=True)
return connection
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"user": {
"type": "string"
},
"password": {
"type": "string"
},
"host": {
"type": "string"
},
"port": {
"type": "number"
},
"dbname": {
"type": "string",
"title": "Database Name"
},
"sslmode": {
"type": "string",
"title": "SSL Mode",
"default": "prefer"
},
"adhoc_query_group": {
"type": "string",
"title": "Query Group for Adhoc Queries",
"default": "default"
},
"scheduled_query_group": {
"type": "string",
"title": "Query Group for Scheduled Queries",
"default": "default"
},
},
"order": ['host', 'port', 'user', 'password', 'dbname', 'sslmode', 'adhoc_query_group', 'scheduled_query_group'],
"required": ["dbname", "user", "password", "host", "port"],
"secret": ["password"]
}
def annotate_query(self, query, metadata):
annotated = super(Redshift, self).annotate_query(query, metadata)
if metadata.get('Scheduled', False):
query_group = self.configuration.get('scheduled_query_group')
else:
query_group = self.configuration.get('adhoc_query_group')
if query_group:
set_query_group = 'set query_group to {};'.format(query_group)
annotated = '{}\n{}'.format(set_query_group, annotated)
return annotated
def _get_tables(self, schema):
# Use svv_columns to include internal & external (Spectrum) tables and views data for Redshift
# https://docs.aws.amazon.com/redshift/latest/dg/r_SVV_COLUMNS.html
# Use HAS_SCHEMA_PRIVILEGE(), SVV_EXTERNAL_SCHEMAS and HAS_TABLE_PRIVILEGE() to filter
# out tables the current user cannot access.
# https://docs.aws.amazon.com/redshift/latest/dg/r_HAS_SCHEMA_PRIVILEGE.html
# https://docs.aws.amazon.com/redshift/latest/dg/r_SVV_EXTERNAL_SCHEMAS.html
# https://docs.aws.amazon.com/redshift/latest/dg/r_HAS_TABLE_PRIVILEGE.html
query = """
WITH tables AS (
SELECT DISTINCT table_name,
table_schema,
column_name,
ordinal_position AS pos
FROM svv_columns
WHERE table_schema NOT IN ('pg_internal','pg_catalog','information_schema')
)
SELECT table_name, table_schema, column_name
FROM tables
WHERE
HAS_SCHEMA_PRIVILEGE(table_schema, 'USAGE') AND
(
table_schema IN (SELECT schemaname FROM SVV_EXTERNAL_SCHEMAS) OR
HAS_TABLE_PRIVILEGE('"' || table_schema || '"."' || table_name || '"', 'SELECT')
)
ORDER BY table_name, pos
"""
self._get_definitions(schema, query)
return schema.values()
class CockroachDB(PostgreSQL):
@classmethod
def type(cls):
return "cockroach"
register(PostgreSQL)
register(Redshift)
register(CockroachDB)
| 31.48538 | 125 | 0.509287 |
66d6d6c877760e04e97dbec214952957a37e3f6c | 1,901 | py | Python | mara_cron/config.py | mara/mara-cron | c98d7029267e85ad045910ff7f16bae7b7d5fdda | [
"MIT"
] | null | null | null | mara_cron/config.py | mara/mara-cron | c98d7029267e85ad045910ff7f16bae7b7d5fdda | [
"MIT"
] | null | null | null | mara_cron/config.py | mara/mara-cron | c98d7029267e85ad045910ff7f16bae7b7d5fdda | [
"MIT"
] | 1 | 2022-02-22T17:16:46.000Z | 2022-02-22T17:16:46.000Z | import typing as t
def enabled() -> bool:
"""
A global parameter which lets you disable/activate all cron jobs
Default value: False.
"""
return False
def instance_name() -> t.Optional[str]:
"""
Lets you define a name of your mara instance.
Cron jobs will be executed per instance.
"""
return None
def user() -> t.Union[bool, str]:
"""
The crontab user:
- you can return the name of crontab user
- you can enter 'true' in case you want to get the crontab of the current user
By default the current user is taken
"""
return True
def tabfile() -> t.Optional[str]:
"""
The crontab file. Use '/etc/crontab' in order to use
By default not set. This will override the user() config.
"""
return None
def log_path(): # -> t.Optional[t.Union[str, pathlib.Path]]:
"""
The log path to which to log the command output.
When the sign '$' is used in the path, shell quotation is disabled, otherwise the path name
will be quoted.
Note: You will have to ensure that the folder exist, otherwise logging will be skipped.
It is recommended to use a different log path per instance e.g. '/var/log/mara/my_instance_name/'
Possible options:
Use a path to a folder e.g. '/var/log/mara'. You will have to make sure that the folder exists.
Mara will automatically generate a log file named '$(date +%Y%m%d_%H%M%S).log',
e.g. mara-cron_20211124_152800.log
Alternatively you can define the name of the file as well by adding it to the path. You can add
e.g. the date to a log file by using the following path: '/var/log/mara/mara_$(date +%Y%m%d).log'
Each execution will append to the file. NOTE: When several jobs run at the same time, it will be
maybe hard to determine to which execution the log line belongs.
"""
return None
| 29.246154 | 105 | 0.659127 |
3eb210babb06a068fa90bb5eab82f5b6a4cd7139 | 1,596 | py | Python | manila/scheduler/filters/retry.py | nidhimittalhada/access_group_repo | 62f3365bc5fb728fcca692a9b3977690fabcd78f | [
"Apache-2.0"
] | 1 | 2015-05-28T22:28:08.000Z | 2015-05-28T22:28:08.000Z | manila/scheduler/filters/retry.py | nidhimittalhada/access_group_repo | 62f3365bc5fb728fcca692a9b3977690fabcd78f | [
"Apache-2.0"
] | 5 | 2015-08-13T15:17:28.000Z | 2016-08-02T02:55:01.000Z | manila/scheduler/filters/retry.py | nidhimittalhada/access_group_repo | 62f3365bc5fb728fcca692a9b3977690fabcd78f | [
"Apache-2.0"
] | 2 | 2015-08-29T08:19:58.000Z | 2016-08-02T02:46:10.000Z | # Copyright (c) 2012 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
from manila.scheduler.filters import base_host
LOG = log.getLogger(__name__)
class RetryFilter(base_host.BaseHostFilter):
"""Filter out already tried nodes for scheduling purposes."""
def host_passes(self, host_state, filter_properties):
"""Skip nodes that have already been attempted."""
retry = filter_properties.get('retry', None)
if not retry:
# Re-scheduling is disabled
LOG.debug("Re-scheduling is disabled")
return True
hosts = retry.get('hosts', [])
host = host_state.host
passes = host not in hosts
pass_msg = "passes" if passes else "fails"
LOG.debug("Host %(host)s %(pass_msg)s. Previously tried hosts: "
"%(hosts)s",
{"host": host, "pass_msg": pass_msg, "hosts": hosts})
# Host passes if it's not in the list of previously attempted hosts:
return passes
| 34.695652 | 78 | 0.661654 |
f54ac3ba72986ebedd13b3295fe5f01f054d19d1 | 5,296 | py | Python | server/organisation/apps.py | coll-gate/collgate | 8c2ff1c59adda2bf318040f588c05263317a2812 | [
"MIT"
] | 2 | 2017-07-04T16:19:09.000Z | 2019-08-16T04:54:47.000Z | server/organisation/apps.py | coll-gate/collgate | 8c2ff1c59adda2bf318040f588c05263317a2812 | [
"MIT"
] | null | null | null | server/organisation/apps.py | coll-gate/collgate | 8c2ff1c59adda2bf318040f588c05263317a2812 | [
"MIT"
] | 1 | 2018-04-13T08:28:09.000Z | 2018-04-13T08:28:09.000Z | # -*- coding: utf-8; -*-
#
# @file apps.py
# @brief coll-gate organisation module main
# @author Frédéric SCHERMA (INRA UMR1095)
# @date 2017-01-03
# @copyright Copyright (c) 2017 INRA/CIRAD
# @license MIT (see LICENSE file)
# @details
from django.utils.translation import ugettext_lazy as _
from igdectk.common.apphelpers import ApplicationMain
from igdectk.module import AUTH_STAFF
from igdectk.module.manager import module_manager
from igdectk.module.menu import MenuEntry
from igdectk.module.module import Module, ModuleMenu
from igdectk.bootstrap.customglyph import FaGlyph
from main.config import configuration
class CollGateOrganisation(ApplicationMain):
name = '.'.join(__name__.split('.')[0:-1])
def __init__(self, app_name, app_module):
super(CollGateOrganisation, self).__init__(app_name, app_module)
# different types of format for type of descriptors for this module
self.format_types = []
def ready(self):
super().ready()
# register descriptor cache category
from main.cache import cache_manager
cache_manager.register('organisation')
from messenger.cache import client_cache_manager
client_cache_manager.register('organisation')
from main.models import main_register_models
main_register_models(CollGateOrganisation.name)
from audit.models import audit_register_models
audit_register_models(CollGateOrganisation.name)
# create a module organisation
organisation_module = Module('organisation', base_url='coll-gate')
organisation_module.include_urls((
'base',
'organisationtype',
'grc',
'organisation',
'establishment',
'person',
'conservatory'
)
)
# add the describable entities models
from .models import Organisation, Establishment, Person, Conservatory
# descriptor_module
from django.apps import apps
descriptor_app = apps.get_app_config('descriptor')
descriptor_app.describable_entities += [
Organisation,
Establishment,
Person,
Conservatory
]
# organisation menu
menu_organisation = ModuleMenu('administration', _('Administration'), order=999, auth=AUTH_STAFF)
menu_organisation.add_entry(
MenuEntry('grc-details', _('Manage GRC'), "#organisation/grc/", icon=FaGlyph('cloud'), order=1))
menu_organisation.add_entry(
MenuEntry('organisation',
_('Manage organisations'),
"#organisation/organisation/",
icon=FaGlyph('map-marker'),
order=2))
menu_organisation.add_entry(
MenuEntry('create-organisation',
_('Create an organisation or a partner'),
"~organisation/organisation/create/",
icon=FaGlyph('plus'),
order=34))
organisation_module.add_menu(menu_organisation)
module_manager.register_module(organisation_module)
if self.is_run_mode():
self.post_ready()
def post_ready(self):
from organisation.models import GRC
if self.is_table_exists(GRC):
# check if there is a unique GRC model instance
num_grcs = len(GRC.objects.all())
if num_grcs == 0:
self.logger.info("Missing GRC configuration. Create a unique GRC model instance. Need configuration.")
grc = GRC()
grc.save()
configuration.partial("organisation", "GRC instance", "GRC instance created. Need configuration.")
elif num_grcs > 1:
configuration.wrong(
"organisation",
"GRC instance",
"Invalid GRC configuration. Only a unique GRC could be configured.")
else:
configuration.validate("organisation", "GRC instance", "GRC instance detected.")
from descriptor.models import Layout
if self.is_table_exists(Layout):
# keep descriptor layout for organisation and establishment.
if not Layout.objects.filter(name="organisation").exists():
configuration.wrong(
"organisation",
"Organisation descriptor layout",
"Missing organisation descriptor layout. Be sure to have installed fixtures.")
else:
configuration.validate(
"organisation",
"Organisation descriptor layout",
"Organisation descriptor layout detected.")
if not Layout.objects.filter(name="establishment").exists():
configuration.wrong(
"organisation",
"Establishment descriptor layout",
"Missing establishment descriptor layout. Be sure to have installed fixtures.")
else:
configuration.validate(
"organisation",
"Establishment descriptor layout",
"Establishment descriptor layout detected.")
| 37.560284 | 118 | 0.608195 |
f688ea61bf2d3dd6b5db8d0812600e4925c04437 | 14,492 | py | Python | kicad_schlib.py | cpavlin1/kicad-pcbtool | ebe6d1891ddd45e0c8da8ccc9db13f32881aaaea | [
"CC0-1.0"
] | null | null | null | kicad_schlib.py | cpavlin1/kicad-pcbtool | ebe6d1891ddd45e0c8da8ccc9db13f32881aaaea | [
"CC0-1.0"
] | null | null | null | kicad_schlib.py | cpavlin1/kicad-pcbtool | ebe6d1891ddd45e0c8da8ccc9db13f32881aaaea | [
"CC0-1.0"
] | null | null | null | """KiCad schematic symbol library
This can load a KiCad schematic library so that it can be manipulated as a
set of objects and re-exported.
"""
import re
import shlex
FILL_FG = 1
FILL_BG = -1
FILL_NONE = 0
KICAD_TO_FILL = {"F": FILL_FG, "f": FILL_BG, "N": FILL_NONE}
FILL_TO_KICAD = {FILL_FG: "F", FILL_BG: "f", FILL_NONE: "N"}
# Pin electrical types
PIN_INPUT = "I"
PIN_OUTPUT = "O"
PIN_BIDI = "B"
PIN_TRISTATE = "T"
PIN_PASSIVE = "P"
PIN_UNSPECIFIED = "U"
PIN_POWER_IN = "W"
PIN_POWER_OUT = "w"
PIN_OPEN_COLL = "C"
PIN_OPEN_EMIT = "E"
PIN_NC = "N"
# Pin orientations
PIN_RIGHT = "R"
PIN_LEFT = "L"
PIN_UP = "U"
PIN_DOWN = "D"
# Pin styles
PIN_HIDDEN = "N"
PIN_ACTIVELOW = "I"
PIN_CLOCK = "C"
PIN_LOWCLOCK = "IC"
PIN_LOWIN = "L"
PIN_CLOCKLOW = "CL"
PIN_LOWOUT = "V"
PIN_FALLING = "F"
PIN_NONLOGIC = "NX"
def readfile (f):
"""Read in a file, returning a list of symbol objects."""
objects = []
while True:
obj = KicadSchSymbol.createFromLibFile (f)
if obj is None:
break
objects.append (obj)
return objects
def writefile (f, objects):
"""Write a list of objects out to a file."""
f.write ("EESchema-LIBRARY Version 2.3\n")
f.write ("#encoding utf-8\n")
for i in objects:
i.writeOut (f)
f.write ("#\n")
f.write ("#End Library\n")
class KicadSchSymbol (object):
"""This represents a full schematic symbol. It contains a set of elements
which can be manipulated.
There is nothing else in a library file, so a file is just a list of these.
"""
def __init__ (self):
self.definition = None
self.referenceField = None
self.valueField = None
self.footprintField = None
self.otherFields = []
self.footprintFilters = []
self.aliases = []
self.graphics = []
self.pins = []
def writeOut (self, f):
"""Write the symbol into a file"""
f.write ("#\n")
f.write ("# %s\n" % self.definition.name)
f.write ("#\n")
self.definition.writeOut (f)
self.referenceField.writeOut (f)
self.valueField.writeOut (f)
self.footprintField.writeOut (f)
for i in self.otherFields:
i.writeOut (f)
if self.aliases:
f.write ("ALIAS ")
f.write (" ".join (self.aliases))
f.write ("\n")
if self.footprintFilters:
f.write ("$FPLIST\n ")
f.write (" ".join (self.footprintFilters))
f.write ("\n$ENDFPLIST\n")
f.write ("DRAW\n")
for i in self.graphics:
i.writeOut (f)
for i in self.pins:
i.writeOut (f)
f.write ("ENDDRAW\n")
f.write ("ENDDEF\n")
@classmethod
def createFromLibFile (cls, f):
"""Create a KicadSchSymbol from a library file. Creates just one;
returns None at EOF.
"""
newobj = cls ()
state = "root"
for line in f:
line = line.partition ("#")[0].strip ()
if not line:
continue
if state == "root":
if line.startswith ("DEF "):
newobj.definition = Definition (line)
elif line.startswith ("F0 "):
newobj.referenceField = Field (line)
elif line.startswith ("F1 "):
newobj.valueField = Field (line)
elif line.startswith ("F2 "):
newobj.footprintField = Field (line)
elif re.match (r"F\d+ ", line):
newobj.otherFields.append (Field (line))
elif line.startswith ("ALIAS "):
newobj.aliases.extend (line.split ()[1:])
elif line == "$FPLIST":
state = "fplist"
elif line == "DRAW":
state = "draw"
elif line == "ENDDEF":
return newobj
elif line.startswith ("EESchema-LIBRARY"):
continue
else:
raise ValueError ("cannot interpret line: " + line)
elif state == "fplist":
if line == "$ENDFPLIST":
state = "root"
else:
newobj.footprintFilters.extend (i.strip () for i in line.split ())
elif state == "draw":
if line.startswith ("A "):
newobj.graphics.append (Arc (line))
elif line.startswith ("C "):
newobj.graphics.append (Circle (line))
elif line.startswith ("P "):
newobj.graphics.append (Polyline (line))
elif line.startswith ("S "):
newobj.graphics.append (Rectangle (line))
elif line.startswith ("T "):
newobj.graphics.append (Text (line))
elif line.startswith ("X "):
newobj.pins.append (Pin (line))
elif line == "ENDDRAW":
state = "root"
else:
raise ValueError ("cannot interpret line: " + line)
# KiCad has some horrid data duplication that means a few things must be
# edited in multiple places. Use these properties whenever you can to fix
# that.
@property
def name (self):
return self.definition.name
@name.setter
def name (self, v):
self.definition.name = v
self.valueField.text = v
@property
def reference (self):
return self.definition.reference
@reference.setter
def reference (self, v):
self.definition.reference = v
self.referenceField.text = v
class Definition (object):
def __init__ (self, line):
line = line.split ()
self.name = line[1]
self.reference = line[2]
self.text_offset = int (line[4])
self.draw_numbers = bool (line[5] == "Y")
self.draw_names = bool (line[6] == "Y")
self.unit_count = int (line[7])
self.units_locked = bool (line[8] == "L")
self.is_power = bool (line[9] == "P")
def writeOut (self, f):
line = "DEF {name} {ref} 0 {toff} {dnums} {dnames} {units} {locked} {power}\n"
f.write (line.format (
name = self.name,
ref = self.reference,
toff = self.text_offset,
dnums = ("Y" if self.draw_numbers else "N"),
dnames = ("Y" if self.draw_names else "N"),
units = self.unit_count,
locked = ("L" if self.units_locked else "F"),
power = ("P" if self.is_power else "N")))
class Field (object):
def __init__ (self, line):
line = shlex.split (line)
self.num = int (line[0][1:])
self.text = line[1]
self.posx = int (line[2])
self.posy = int (line[3])
self.size = int (line[4])
self.vertical = bool (line[5] == "V")
self.visible = bool (line[6] == "V")
self.horiz_just = line[7] # L R or C
self.vert_just = line[8] # L R or C
def writeOut (self, f):
line = "F{num} \"{text}\" {posx} {posy} {size} {orient} {visible} {hjust} {vjust}\n"
f.write (line.format (
num = self.num,
text = self.text,
posx = self.posx,
posy = self.posy,
size = self.size,
orient = ("V" if self.vertical else "H"),
visible = ("V" if self.visible else "I"),
hjust = self.horiz_just,
vjust = self.vert_just))
class Arc (object):
def __init__ (self, line):
line = line.split ()
self.posx = int(line[1])
self.posy = int(line[2])
self.radius = int(line[3])
self.start_angle = int(line[4])
self.end_angle = int(line[5])
self.unit = int(line[6])
self.convert = int(line[7])
self.thickness = int(line[8])
self.fill = KICAD_TO_FILL[line[9]]
self.startx = int(line[10])
self.starty = int(line[11])
self.endx = int(line[12])
self.endy = int(line[13])
def writeOut (self, f):
line = "A {posx} {posy} {radius} {sangle} {eangle} {unit} {conv} {thick} {fill} {sx} {sy} {ex} {ey}\n"
f.write (line.format (
posx = self.posx,
posy = self.posy,
radius = self.radius,
sangle = self.start_angle,
eangle = self.end_angle,
unit = self.unit,
conv = self.convert,
thick = self.thickness,
fill = FILL_TO_KICAD[self.fill],
sx = self.startx,
sy = self.starty,
ex = self.endx,
ey = self.endy))
class Circle (object):
def __init__ (self, line):
line = line.split ()
self.posx = int (line[1])
self.posy = int (line[2])
self.radius = int (line[3])
self.unit = int (line[4])
self.convert = int (line[5])
self.thickness = int (line[6])
self.fill = KICAD_TO_FILL[line[7]]
def writeOut (self, f):
line = "C {posx} {posy} {radius} {unit} {convert} {thickness} {fill}\n"
f.write (line.format (
posx = self.posx,
posy = self.posy,
radius = self.radius,
unit = self.unit,
convert = self.convert,
thickness = self.thickness,
fill = FILL_TO_KICAD[self.fill]))
class Polyline (object):
def __init__ (self, line):
line = line.split ()
self.unit = int (line[2])
self.convert = int (line[3])
self.thickness = int(line[4])
self.fill = KICAD_TO_FILL[line[-1]]
points = [int(i) for i in line[5:-1]]
# Pairwise (x y) (x y)
self.points = list (zip (points[::2], points[1::2]))
def writeOut (self, f):
line = "P {npoints} {unit} {convert} {thickness} {points} {fill}\n"
f.write (line.format (
npoints = len (self.points),
unit = self.unit,
convert = self.convert,
thickness = self.thickness,
fill = FILL_TO_KICAD[self.fill],
points = " ".join(" %d %d" % i for i in self.points)))
class Rectangle (object):
def __init__ (self, line):
line = line.split ()
self.startx = int (line[1])
self.starty = int (line[2])
self.endx = int (line[3])
self.endy = int (line[4])
self.unit = int (line[5])
self.convert = int (line[6])
self.thickness = int (line[7])
self.fill = KICAD_TO_FILL[line[8]]
def writeOut (self, f):
line = "S {startx} {starty} {endx} {endy} {unit} {convert} {thickness} {fill}\n"
f.write (line.format (
startx = self.startx,
starty = self.starty,
endx = self.endx,
endy = self.endy,
unit = self.unit,
convert = self.convert,
thickness = self.thickness,
fill = self.fill))
class Text (object):
def __init__ (self, line):
line = line.split ()
self.vertical = bool (int (line[1]) != 0)
self.posx = int (line[2])
self.posy = int (line[3])
self.size = int (line[4])
# line[5] is text_type. fuckin documentation doesn't even explain this
self.unit = int (line[6])
self.convert = int (line[7])
self.text = line[8].replace ("~", " ")
self.italic = bool (line[9] == "Italic")
self.bold = bool (int (line[10]))
self.horiz_just = line[11]
self.vert_just = line[12]
def writeOut (self, f):
line = "T {vert} {posx} {posy} {size} 0 {unit} {conv} {text} {italic} {bold} {hjust} {vjust}\n"
f.write (line.format (
vert = (900 if self.vertical else 0),
posx = self.posx,
posy = self.posy,
size = self.size,
unit = self.unit,
conv = self.convert,
text = self.text.replace (" ", "~"),
italic = ("Italic" if self.italic else "Normal"),
bold = (1 if self.bold else 0),
hjust = self.horiz_just,
vjust = self.vert_just))
class Pin (object):
def __init__ (self, line):
line = line.split ()
self.name = line[1]
self.num = line[2]
self.posx = int (line[3])
self.posy = int (line[4])
self.length = int (line[5])
self.direction = line[6]
self.name_size = int (line[7])
self.num_size = int (line[8])
self.unit = int (line[9])
self.convert = int (line[10])
self.elec_type = line[11]
if len (line) > 12:
self.style = line[12]
else:
self.style = None
def writeOut (self, f):
line = "X {name} {num} {posx} {posy} {length} {direction} {name_size} {num_size} {unit} {convert} {elec_type}{style}\n"
f.write (line.format (
name = self.name,
num = self.num,
posx = self.posx,
posy = self.posy,
length = self.length,
direction = self.direction,
name_size = self.name_size,
num_size = self.num_size,
unit = self.unit,
convert = self.convert,
elec_type = self.elec_type,
style = ("" if self.style is None else (" " + self.style))))
def script1():
# open conn-100mil.lib.old and split the CONN-100MIL-M-* into shrouded and
# unshrouded versions
import copy
with open ("conn-100mil.lib.old") as f:
symbs = readfile (f)
newsymbs = []
for i in symbs:
if i.definition.name.startswith ("CONN-100MIL-M"):
i.footprintFilters.remove (i.definition.name + "-SHROUD")
i.footprintField.text = "conn-100mil:" + i.footprintFilters[0]
i.footprintField.visible = False
shrouded = copy.deepcopy (i)
shrouded.name = shrouded.name + "-SHROUD"
shrouded.footprintFilters[0] += "-SHROUD"
shrouded.footprintField.text = "conn-100mil:" + shrouded.footprintFilters[0]
newsymbs.append (shrouded)
else:
i.footprintField.text = "conn-100mil:" + i.footprintFilters[0]
i.footprintField.visible = False
symbs.extend (newsymbs)
with open ("conn-100mil.lib", "w") as f:
writefile (f, symbs)
if __name__ == '__main__':
script1 ()
| 32.420582 | 127 | 0.522633 |
17d4ab3ac8d2d22da30d7211371b2a7fa82a4337 | 1,500 | py | Python | problems/19/Solver.py | tmct/adventOfCode2016 | bd5699ca179b873f9da01514903b1dd493a46b7b | [
"MIT"
] | null | null | null | problems/19/Solver.py | tmct/adventOfCode2016 | bd5699ca179b873f9da01514903b1dd493a46b7b | [
"MIT"
] | null | null | null | problems/19/Solver.py | tmct/adventOfCode2016 | bd5699ca179b873f9da01514903b1dd493a46b7b | [
"MIT"
] | null | null | null | class Solver:
def __init__(self):
self.adjacent_elves = {}
def solve(self, start_number):
recipient = 1
remaining = start_number
power = 1
while remaining > 1:
new_remaining = remaining // 2
remaining_was_odd = remaining % 2
power *= 2
remaining = new_remaining
if remaining_was_odd:
recipient += power
return recipient
def solve_b(self, start_number):
self.adjacent_elves = {index: (index - 1, index + 1) for index in range(start_number)}
self.adjacent_elves[0] = (start_number - 1, 1)
self.adjacent_elves[start_number - 1] = (start_number - 2, 0)
first_elf_to_delete = start_number // 2
current_elf = first_elf_to_delete
elves_remaining = start_number
while elves_remaining > 1:
elf_to_delete = current_elf
current_elf = self.adjacent_elves[current_elf][1]
if elves_remaining % 2:
current_elf = self.adjacent_elves[current_elf][1]
self.delete_elf(elf_to_delete)
elves_remaining -= 1
return current_elf + 1
def delete_elf(self, target_elf):
before, after = self.adjacent_elves[target_elf]
two_before = self.adjacent_elves[before][0]
self.adjacent_elves[before] = (two_before, after)
two_after = self.adjacent_elves[after][1]
self.adjacent_elves[after] = (before, two_after)
| 35.714286 | 94 | 0.612 |
05c5e562c5b3c11eebf317a83d7294f8f62a7f7f | 14,911 | py | Python | sympy/polys/multivariate_resultants.py | iamabhishek0/sympy | c461bd1ff9d178d1012b04fd0bf37ee3abb02cdd | [
"BSD-3-Clause"
] | 2 | 2019-12-16T16:02:58.000Z | 2020-01-20T04:07:18.000Z | sympy/polys/multivariate_resultants.py | iamabhishek0/sympy | c461bd1ff9d178d1012b04fd0bf37ee3abb02cdd | [
"BSD-3-Clause"
] | null | null | null | sympy/polys/multivariate_resultants.py | iamabhishek0/sympy | c461bd1ff9d178d1012b04fd0bf37ee3abb02cdd | [
"BSD-3-Clause"
] | 1 | 2020-02-06T17:54:20.000Z | 2020-02-06T17:54:20.000Z | """
This module contains functions for two multivariate resultants. These
are:
- Dixon's resultant.
- Macaulay's resultant.
Multivariate resultants are used to identify whether a multivariate
system has common roots. That is when the resultant is equal to zero.
"""
from sympy import IndexedBase, Matrix, Mul, Poly
from sympy import rem, prod, degree_list, diag, simplify
from sympy.core.compatibility import range
from sympy.polys.monomials import itermonomials, monomial_deg
from sympy.polys.orderings import monomial_key
from sympy.polys.polytools import poly_from_expr, total_degree
from sympy.functions.combinatorial.factorials import binomial
from itertools import combinations_with_replacement
from sympy.utilities.exceptions import SymPyDeprecationWarning
class DixonResultant():
"""
A class for retrieving the Dixon's resultant of a multivariate
system.
Examples
========
>>> from sympy.core import symbols
>>> from sympy.polys.multivariate_resultants import DixonResultant
>>> x, y = symbols('x, y')
>>> p = x + y
>>> q = x ** 2 + y ** 3
>>> h = x ** 2 + y
>>> dixon = DixonResultant(variables=[x, y], polynomials=[p, q, h])
>>> poly = dixon.get_dixon_polynomial()
>>> matrix = dixon.get_dixon_matrix(polynomial=poly)
>>> matrix
Matrix([
[ 0, 0, -1, 0, -1],
[ 0, -1, 0, -1, 0],
[-1, 0, 1, 0, 0],
[ 0, -1, 0, 0, 1],
[-1, 0, 0, 1, 0]])
>>> matrix.det()
0
See Also
========
Notebook in examples: sympy/example/notebooks.
References
==========
.. [1] [Kapur1994]_
.. [2] [Palancz08]_
"""
def __init__(self, polynomials, variables):
"""
A class that takes two lists, a list of polynomials and list of
variables. Returns the Dixon matrix of the multivariate system.
Parameters
----------
polynomials : list of polynomials
A list of m n-degree polynomials
variables: list
A list of all n variables
"""
self.polynomials = polynomials
self.variables = variables
self.n = len(self.variables)
self.m = len(self.polynomials)
a = IndexedBase("alpha")
# A list of n alpha variables (the replacing variables)
self.dummy_variables = [a[i] for i in range(self.n)]
# A list of the d_max of each variable.
self._max_degrees = [max(degree_list(poly)[i] for poly in self.polynomials)
for i in range(self.n)]
@property
def max_degrees(self):
SymPyDeprecationWarning(feature="max_degrees",
issue=17763,
deprecated_since_version="1.5").warn()
return self._max_degrees
def get_dixon_polynomial(self):
r"""
Returns
=======
dixon_polynomial: polynomial
Dixon's polynomial is calculated as:
delta = Delta(A) / ((x_1 - a_1) ... (x_n - a_n)) where,
A = |p_1(x_1,... x_n), ..., p_n(x_1,... x_n)|
|p_1(a_1,... x_n), ..., p_n(a_1,... x_n)|
|... , ..., ...|
|p_1(a_1,... a_n), ..., p_n(a_1,... a_n)|
"""
if self.m != (self.n + 1):
raise ValueError('Method invalid for given combination.')
# First row
rows = [self.polynomials]
temp = list(self.variables)
for idx in range(self.n):
temp[idx] = self.dummy_variables[idx]
substitution = {var: t for var, t in zip(self.variables, temp)}
rows.append([f.subs(substitution) for f in self.polynomials])
A = Matrix(rows)
terms = zip(self.variables, self.dummy_variables)
product_of_differences = Mul(*[a - b for a, b in terms])
dixon_polynomial = (A.det() / product_of_differences).factor()
return poly_from_expr(dixon_polynomial, self.dummy_variables)[0]
def get_upper_degree(self):
SymPyDeprecationWarning(feature="get_upper_degree",
useinstead="get_max_degrees",
issue=17763,
deprecated_since_version="1.5").warn()
list_of_products = [self.variables[i] ** self._max_degrees[i]
for i in range(self.n)]
product = prod(list_of_products)
product = Poly(product).monoms()
return monomial_deg(*product)
def get_max_degrees(self, polynomial):
r"""
Returns a list of the maximum degree of each variable appearing
in the coefficients of the Dixon polynomial. The coefficients are
viewed as polys in x_1, ... , x_n.
"""
deg_lists = [degree_list(Poly(poly, self.variables))
for poly in polynomial.coeffs()]
max_degrees = [max(degs) for degs in zip(*deg_lists)]
return max_degrees
def get_dixon_matrix(self, polynomial):
r"""
Construct the Dixon matrix from the coefficients of polynomial
\alpha. Each coefficient is viewed as a polynomial of x_1, ...,
x_n.
"""
max_degrees = self.get_max_degrees(polynomial)
# list of column headers of the Dixon matrix.
monomials = itermonomials(self.variables, max_degrees)
monomials = sorted(monomials, reverse=True,
key=monomial_key('lex', self.variables))
dixon_matrix = Matrix([[Poly(c, *self.variables).coeff_monomial(m)
for m in monomials]
for c in polynomial.coeffs()])
# remove columns if needed
if dixon_matrix.shape[0] != dixon_matrix.shape[1]:
keep = [column for column in range(dixon_matrix.shape[-1])
if any([element != 0 for element
in dixon_matrix[:, column]])]
dixon_matrix = dixon_matrix[:, keep]
return dixon_matrix
def KSY_precondition(self, matrix):
"""
Test for the validity of the Kapur-Saxena-Yang precondition.
The precondition requires that the column corresponding to the
monomial 1 = x_1 ^ 0 * x_2 ^ 0 * ... * x_n ^ 0 is not a linear
combination of the remaining ones. In sympy notation this is
the last column. For the precondition to hold the last non-zero
row of the rref matrix should be of the form [0, 0, ..., 1].
"""
if matrix.is_zero:
return False
m, n = matrix.shape
# simplify the matrix and keep only its non-zero rows
matrix = simplify(matrix.rref()[0])
rows = [i for i in range(m) if any(matrix[i, j] != 0 for j in range(n))]
matrix = matrix[rows,:]
condition = Matrix([[0]*(n-1) + [1]])
if matrix[-1,:] == condition:
return True
else:
return False
def delete_zero_rows_and_columns(self, matrix):
"""Remove the zero rows and columns of the matrix."""
rows = [i for i in range(matrix.rows) if not matrix.row(i).is_zero]
cols = [j for j in range(matrix.cols) if not matrix.col(j).is_zero]
return matrix[rows, cols]
def product_leading_entries(self, matrix):
"""Calculate the product of the leading entries of the matrix."""
res = 1
for row in range(matrix.rows):
for el in matrix.row(row):
if el != 0:
res = res * el
break
return res
def get_KSY_Dixon_resultant(self, matrix):
"""Calculate the Kapur-Saxena-Yang approach to the Dixon Resultant."""
matrix = self.delete_zero_rows_and_columns(matrix)
_, U, _ = matrix.LUdecomposition()
matrix = self.delete_zero_rows_and_columns(simplify(U))
return self.product_leading_entries(matrix)
class MacaulayResultant():
"""
A class for calculating the Macaulay resultant. Note that the
polynomials must be homogenized and their coefficients must be
given as symbols.
Examples
========
>>> from sympy.core import symbols
>>> from sympy.polys.multivariate_resultants import MacaulayResultant
>>> x, y, z = symbols('x, y, z')
>>> a_0, a_1, a_2 = symbols('a_0, a_1, a_2')
>>> b_0, b_1, b_2 = symbols('b_0, b_1, b_2')
>>> c_0, c_1, c_2,c_3, c_4 = symbols('c_0, c_1, c_2, c_3, c_4')
>>> f = a_0 * y - a_1 * x + a_2 * z
>>> g = b_1 * x ** 2 + b_0 * y ** 2 - b_2 * z ** 2
>>> h = c_0 * y * z ** 2 - c_1 * x ** 3 + c_2 * x ** 2 * z - c_3 * x * z ** 2 + c_4 * z ** 3
>>> mac = MacaulayResultant(polynomials=[f, g, h], variables=[x, y, z])
>>> mac.monomial_set
[x**4, x**3*y, x**3*z, x**2*y**2, x**2*y*z, x**2*z**2, x*y**3,
x*y**2*z, x*y*z**2, x*z**3, y**4, y**3*z, y**2*z**2, y*z**3, z**4]
>>> matrix = mac.get_matrix()
>>> submatrix = mac.get_submatrix(matrix)
>>> submatrix
Matrix([
[-a_1, a_0, a_2, 0],
[ 0, -a_1, 0, 0],
[ 0, 0, -a_1, 0],
[ 0, 0, 0, -a_1]])
See Also
========
Notebook in examples: sympy/example/notebooks.
References
==========
.. [1] [Bruce97]_
.. [2] [Stiller96]_
"""
def __init__(self, polynomials, variables):
"""
Parameters
==========
variables: list
A list of all n variables
polynomials : list of sympy polynomials
A list of m n-degree polynomials
"""
self.polynomials = polynomials
self.variables = variables
self.n = len(variables)
# A list of the d_max of each polynomial.
self.degrees = [total_degree(poly, *self.variables) for poly
in self.polynomials]
self.degree_m = self._get_degree_m()
self.monomials_size = self.get_size()
# The set T of all possible monomials of degree degree_m
self.monomial_set = self.get_monomials_of_certain_degree(self.degree_m)
def _get_degree_m(self):
r"""
Returns
=======
degree_m: int
The degree_m is calculated as 1 + \sum_1 ^ n (d_i - 1),
where d_i is the degree of the i polynomial
"""
return 1 + sum(d - 1 for d in self.degrees)
def get_size(self):
r"""
Returns
=======
size: int
The size of set T. Set T is the set of all possible
monomials of the n variables for degree equal to the
degree_m
"""
return binomial(self.degree_m + self.n - 1, self.n - 1)
def get_monomials_of_certain_degree(self, degree):
"""
Returns
=======
monomials: list
A list of monomials of a certain degree.
"""
monomials = [Mul(*monomial) for monomial
in combinations_with_replacement(self.variables,
degree)]
return sorted(monomials, reverse=True,
key=monomial_key('lex', self.variables))
def get_row_coefficients(self):
"""
Returns
=======
row_coefficients: list
The row coefficients of Macaulay's matrix
"""
row_coefficients = []
divisible = []
for i in range(self.n):
if i == 0:
degree = self.degree_m - self.degrees[i]
monomial = self.get_monomials_of_certain_degree(degree)
row_coefficients.append(monomial)
else:
divisible.append(self.variables[i - 1] **
self.degrees[i - 1])
degree = self.degree_m - self.degrees[i]
poss_rows = self.get_monomials_of_certain_degree(degree)
for div in divisible:
for p in poss_rows:
if rem(p, div) == 0:
poss_rows = [item for item in poss_rows
if item != p]
row_coefficients.append(poss_rows)
return row_coefficients
def get_matrix(self):
"""
Returns
=======
macaulay_matrix: Matrix
The Macaulay numerator matrix
"""
rows = []
row_coefficients = self.get_row_coefficients()
for i in range(self.n):
for multiplier in row_coefficients[i]:
coefficients = []
poly = Poly(self.polynomials[i] * multiplier,
*self.variables)
for mono in self.monomial_set:
coefficients.append(poly.coeff_monomial(mono))
rows.append(coefficients)
macaulay_matrix = Matrix(rows)
return macaulay_matrix
def get_reduced_nonreduced(self):
r"""
Returns
=======
reduced: list
A list of the reduced monomials
non_reduced: list
A list of the monomials that are not reduced
Definition
==========
A polynomial is said to be reduced in x_i, if its degree (the
maximum degree of its monomials) in x_i is less than d_i. A
polynomial that is reduced in all variables but one is said
simply to be reduced.
"""
divisible = []
for m in self.monomial_set:
temp = []
for i, v in enumerate(self.variables):
temp.append(bool(total_degree(m, v) >= self.degrees[i]))
divisible.append(temp)
reduced = [i for i, r in enumerate(divisible)
if sum(r) < self.n - 1]
non_reduced = [i for i, r in enumerate(divisible)
if sum(r) >= self.n -1]
return reduced, non_reduced
def get_submatrix(self, matrix):
r"""
Returns
=======
macaulay_submatrix: Matrix
The Macaulay denominator matrix. Columns that are non reduced are kept.
The row which contains one of the a_{i}s is dropped. a_{i}s
are the coefficients of x_i ^ {d_i}.
"""
reduced, non_reduced = self.get_reduced_nonreduced()
# if reduced == [], then det(matrix) should be 1
if reduced == []:
return diag([1])
# reduced != []
reduction_set = [v ** self.degrees[i] for i, v
in enumerate(self.variables)]
ais = list([self.polynomials[i].coeff(reduction_set[i])
for i in range(self.n)])
reduced_matrix = matrix[:, reduced]
keep = []
for row in range(reduced_matrix.rows):
check = [ai in reduced_matrix[row, :] for ai in ais]
if True not in check:
keep.append(row)
return matrix[keep, non_reduced]
| 32.274892 | 96 | 0.553551 |
4aeaf175504deb2432eb92c1a08e0317d63e3763 | 3,299 | py | Python | huaweicloud-sdk-bss/huaweicloudsdkbss/v2/model/list_service_types_response.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 64 | 2020-06-12T07:05:07.000Z | 2022-03-30T03:32:50.000Z | huaweicloud-sdk-bss/huaweicloudsdkbss/v2/model/list_service_types_response.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 11 | 2020-07-06T07:56:54.000Z | 2022-01-11T11:14:40.000Z | huaweicloud-sdk-bss/huaweicloudsdkbss/v2/model/list_service_types_response.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 24 | 2020-06-08T11:42:13.000Z | 2022-03-04T06:44:08.000Z | # coding: utf-8
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ListServiceTypesResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'service_types': 'list[ServiceType]'
}
attribute_map = {
'service_types': 'service_types'
}
def __init__(self, service_types=None):
"""ListServiceTypesResponse - a model defined in huaweicloud sdk"""
super(ListServiceTypesResponse, self).__init__()
self._service_types = None
self.discriminator = None
if service_types is not None:
self.service_types = service_types
@property
def service_types(self):
"""Gets the service_types of this ListServiceTypesResponse.
返回的云服务类型信息,具体参见表3。
:return: The service_types of this ListServiceTypesResponse.
:rtype: list[ServiceType]
"""
return self._service_types
@service_types.setter
def service_types(self, service_types):
"""Sets the service_types of this ListServiceTypesResponse.
返回的云服务类型信息,具体参见表3。
:param service_types: The service_types of this ListServiceTypesResponse.
:type: list[ServiceType]
"""
self._service_types = service_types
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListServiceTypesResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.686957 | 81 | 0.581085 |
50e0e5a22159288e161921eb1fd25e46702ad613 | 99 | py | Python | 2.py | chauhanmahavir/Python-Basics | c250a9eee203e1188a968ba2c60262442719fa49 | [
"MIT"
] | 1 | 2020-08-05T05:38:44.000Z | 2020-08-05T05:38:44.000Z | 2.py | chauhanmahavir/Python-Basics | c250a9eee203e1188a968ba2c60262442719fa49 | [
"MIT"
] | null | null | null | 2.py | chauhanmahavir/Python-Basics | c250a9eee203e1188a968ba2c60262442719fa49 | [
"MIT"
] | null | null | null | print('hii'+str(5))
print(int(8)+5);
print(float(8.5)+5);
print(int(8.5)+5);
#print(int('C'))
| 16.5 | 21 | 0.555556 |
964515eee09db29ac6709cdab4664e9d8b6074f1 | 45,886 | py | Python | certbot/certbot/_internal/storage.py | radek-sprta/certbot | a6772043d6631341b525c4d69b47d6ef2d8b5d02 | [
"Apache-2.0"
] | 1 | 2020-01-23T05:57:42.000Z | 2020-01-23T05:57:42.000Z | certbot/certbot/_internal/storage.py | radek-sprta/certbot | a6772043d6631341b525c4d69b47d6ef2d8b5d02 | [
"Apache-2.0"
] | null | null | null | certbot/certbot/_internal/storage.py | radek-sprta/certbot | a6772043d6631341b525c4d69b47d6ef2d8b5d02 | [
"Apache-2.0"
] | null | null | null | """Renewable certificates storage."""
import datetime
import glob
import logging
import re
import shutil
import stat
import configobj
import parsedatetime
import pytz
import six
import certbot
from certbot import crypto_util
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot._internal import cli
from certbot._internal import constants
from certbot._internal import error_handler
from certbot._internal.plugins import disco as plugins_disco
from certbot.compat import filesystem
from certbot.compat import os
from certbot.plugins import common as plugins_common
logger = logging.getLogger(__name__)
ALL_FOUR = ("cert", "privkey", "chain", "fullchain")
README = "README"
CURRENT_VERSION = util.get_strict_version(certbot.__version__)
BASE_PRIVKEY_MODE = 0o600
def renewal_conf_files(config):
"""Build a list of all renewal configuration files.
:param certbot.interfaces.IConfig config: Configuration object
:returns: list of renewal configuration files
:rtype: `list` of `str`
"""
result = glob.glob(os.path.join(config.renewal_configs_dir, "*.conf"))
result.sort()
return result
def renewal_file_for_certname(config, certname):
"""Return /path/to/certname.conf in the renewal conf directory"""
path = os.path.join(config.renewal_configs_dir, "{0}.conf".format(certname))
if not os.path.exists(path):
raise errors.CertStorageError("No certificate found with name {0} (expected "
"{1}).".format(certname, path))
return path
def cert_path_for_cert_name(config, cert_name):
""" If `--cert-name` was specified, but you need a value for `--cert-path`.
:param `configuration.NamespaceConfig` config: parsed command line arguments
:param str cert_name: cert name.
"""
cert_name_implied_conf = renewal_file_for_certname(config, cert_name)
fullchain_path = configobj.ConfigObj(cert_name_implied_conf)["fullchain"]
with open(fullchain_path) as f:
cert_path = (fullchain_path, f.read())
return cert_path
def config_with_defaults(config=None):
"""Merge supplied config, if provided, on top of builtin defaults."""
defaults_copy = configobj.ConfigObj(constants.RENEWER_DEFAULTS)
defaults_copy.merge(config if config is not None else configobj.ConfigObj())
return defaults_copy
def add_time_interval(base_time, interval, textparser=parsedatetime.Calendar()):
"""Parse the time specified time interval, and add it to the base_time
The interval can be in the English-language format understood by
parsedatetime, e.g., '10 days', '3 weeks', '6 months', '9 hours', or
a sequence of such intervals like '6 months 1 week' or '3 days 12
hours'. If an integer is found with no associated unit, it is
interpreted by default as a number of days.
:param datetime.datetime base_time: The time to be added with the interval.
:param str interval: The time interval to parse.
:returns: The base_time plus the interpretation of the time interval.
:rtype: :class:`datetime.datetime`"""
if interval.strip().isdigit():
interval += " days"
# try to use the same timezone, but fallback to UTC
tzinfo = base_time.tzinfo or pytz.UTC
return textparser.parseDT(interval, base_time, tzinfo=tzinfo)[0]
def write_renewal_config(o_filename, n_filename, archive_dir, target, relevant_data):
"""Writes a renewal config file with the specified name and values.
:param str o_filename: Absolute path to the previous version of config file
:param str n_filename: Absolute path to the new destination of config file
:param str archive_dir: Absolute path to the archive directory
:param dict target: Maps ALL_FOUR to their symlink paths
:param dict relevant_data: Renewal configuration options to save
:returns: Configuration object for the new config file
:rtype: configobj.ConfigObj
"""
config = configobj.ConfigObj(o_filename)
config["version"] = certbot.__version__
config["archive_dir"] = archive_dir
for kind in ALL_FOUR:
config[kind] = target[kind]
if "renewalparams" not in config:
config["renewalparams"] = {}
config.comments["renewalparams"] = ["",
"Options used in "
"the renewal process"]
config["renewalparams"].update(relevant_data)
for k in config["renewalparams"].keys():
if k not in relevant_data:
del config["renewalparams"][k]
if "renew_before_expiry" not in config:
default_interval = constants.RENEWER_DEFAULTS["renew_before_expiry"]
config.initial_comment = ["renew_before_expiry = " + default_interval]
# TODO: add human-readable comments explaining other available
# parameters
logger.debug("Writing new config %s.", n_filename)
# Ensure that the file exists
open(n_filename, 'a').close()
# Copy permissions from the old version of the file, if it exists.
if os.path.exists(o_filename):
current_permissions = stat.S_IMODE(os.lstat(o_filename).st_mode)
filesystem.chmod(n_filename, current_permissions)
with open(n_filename, "wb") as f:
config.write(outfile=f)
return config
def rename_renewal_config(prev_name, new_name, cli_config):
"""Renames cli_config.certname's config to cli_config.new_certname.
:param .NamespaceConfig cli_config: parsed command line
arguments
"""
prev_filename = renewal_filename_for_lineagename(cli_config, prev_name)
new_filename = renewal_filename_for_lineagename(cli_config, new_name)
if os.path.exists(new_filename):
raise errors.ConfigurationError("The new certificate name "
"is already in use.")
try:
filesystem.replace(prev_filename, new_filename)
except OSError:
raise errors.ConfigurationError("Please specify a valid filename "
"for the new certificate name.")
def update_configuration(lineagename, archive_dir, target, cli_config):
"""Modifies lineagename's config to contain the specified values.
:param str lineagename: Name of the lineage being modified
:param str archive_dir: Absolute path to the archive directory
:param dict target: Maps ALL_FOUR to their symlink paths
:param .NamespaceConfig cli_config: parsed command line
arguments
:returns: Configuration object for the updated config file
:rtype: configobj.ConfigObj
"""
config_filename = renewal_filename_for_lineagename(cli_config, lineagename)
temp_filename = config_filename + ".new"
# If an existing tempfile exists, delete it
if os.path.exists(temp_filename):
os.unlink(temp_filename)
# Save only the config items that are relevant to renewal
values = relevant_values(vars(cli_config.namespace))
write_renewal_config(config_filename, temp_filename, archive_dir, target, values)
filesystem.replace(temp_filename, config_filename)
return configobj.ConfigObj(config_filename)
def get_link_target(link):
"""Get an absolute path to the target of link.
:param str link: Path to a symbolic link
:returns: Absolute path to the target of link
:rtype: str
:raises .CertStorageError: If link does not exists.
"""
try:
target = os.readlink(link)
except OSError:
raise errors.CertStorageError(
"Expected {0} to be a symlink".format(link))
if not os.path.isabs(target):
target = os.path.join(os.path.dirname(link), target)
return os.path.abspath(target)
def _write_live_readme_to(readme_path, is_base_dir=False):
prefix = ""
if is_base_dir:
prefix = "[cert name]/"
with open(readme_path, "w") as f:
logger.debug("Writing README to %s.", readme_path)
f.write("This directory contains your keys and certificates.\n\n"
"`{prefix}privkey.pem` : the private key for your certificate.\n"
"`{prefix}fullchain.pem`: the certificate file used in most server software.\n"
"`{prefix}chain.pem` : used for OCSP stapling in Nginx >=1.3.7.\n"
"`{prefix}cert.pem` : will break many server configurations, and "
"should not be used\n"
" without reading further documentation (see link below).\n\n"
"WARNING: DO NOT MOVE OR RENAME THESE FILES!\n"
" Certbot expects these files to remain in this location in order\n"
" to function properly!\n\n"
"We recommend not moving these files. For more information, see the Certbot\n"
"User Guide at https://certbot.eff.org/docs/using.html#where-are-my-"
"certificates.\n".format(prefix=prefix))
def _relevant(namespaces, option):
"""
Is this option one that could be restored for future renewal purposes?
:param namespaces: plugin namespaces for configuration options
:type namespaces: `list` of `str`
:param str option: the name of the option
:rtype: bool
"""
from certbot._internal import renewal
return (option in renewal.CONFIG_ITEMS or
any(option.startswith(namespace) for namespace in namespaces))
def relevant_values(all_values):
"""Return a new dict containing only items relevant for renewal.
:param dict all_values: The original values.
:returns: A new dictionary containing items that can be used in renewal.
:rtype dict:
"""
plugins = plugins_disco.PluginsRegistry.find_all()
namespaces = [plugins_common.dest_namespace(plugin) for plugin in plugins]
rv = dict(
(option, value)
for option, value in six.iteritems(all_values)
if _relevant(namespaces, option) and cli.option_was_set(option, value))
# We always save the server value to help with forward compatibility
# and behavioral consistency when versions of Certbot with different
# server defaults are used.
rv["server"] = all_values["server"]
return rv
def lineagename_for_filename(config_filename):
"""Returns the lineagename for a configuration filename.
"""
if not config_filename.endswith(".conf"):
raise errors.CertStorageError(
"renewal config file name must end in .conf")
return os.path.basename(config_filename[:-len(".conf")])
def renewal_filename_for_lineagename(config, lineagename):
"""Returns the lineagename for a configuration filename.
"""
return os.path.join(config.renewal_configs_dir, lineagename) + ".conf"
def _relpath_from_file(archive_dir, from_file):
"""Path to a directory from a file"""
return os.path.relpath(archive_dir, os.path.dirname(from_file))
def full_archive_path(config_obj, cli_config, lineagename):
"""Returns the full archive path for a lineagename
Uses cli_config to determine archive path if not available from config_obj.
:param configobj.ConfigObj config_obj: Renewal conf file contents (can be None)
:param configuration.NamespaceConfig cli_config: Main config file
:param str lineagename: Certificate name
"""
if config_obj and "archive_dir" in config_obj:
return config_obj["archive_dir"]
return os.path.join(cli_config.default_archive_dir, lineagename)
def _full_live_path(cli_config, lineagename):
"""Returns the full default live path for a lineagename"""
return os.path.join(cli_config.live_dir, lineagename)
def delete_files(config, certname):
"""Delete all files related to the certificate.
If some files are not found, ignore them and continue.
"""
renewal_filename = renewal_file_for_certname(config, certname)
# file exists
full_default_archive_dir = full_archive_path(None, config, certname)
full_default_live_dir = _full_live_path(config, certname)
try:
renewal_config = configobj.ConfigObj(renewal_filename)
except configobj.ConfigObjError:
# config is corrupted
logger.warning("Could not parse %s. You may wish to manually "
"delete the contents of %s and %s.", renewal_filename,
full_default_live_dir, full_default_archive_dir)
raise errors.CertStorageError(
"error parsing {0}".format(renewal_filename))
finally:
# we couldn't read it, but let's at least delete it
# if this was going to fail, it already would have.
os.remove(renewal_filename)
logger.debug("Removed %s", renewal_filename)
# cert files and (hopefully) live directory
# it's not guaranteed that the files are in our default storage
# structure. so, first delete the cert files.
directory_names = set()
for kind in ALL_FOUR:
link = renewal_config.get(kind)
try:
os.remove(link)
logger.debug("Removed %s", link)
except OSError:
logger.debug("Unable to delete %s", link)
directory = os.path.dirname(link)
directory_names.add(directory)
# if all four were in the same directory, and the only thing left
# is the README file (or nothing), delete that directory.
# this will be wrong in very few but some cases.
if len(directory_names) == 1:
# delete the README file
directory = directory_names.pop()
readme_path = os.path.join(directory, README)
try:
os.remove(readme_path)
logger.debug("Removed %s", readme_path)
except OSError:
logger.debug("Unable to delete %s", readme_path)
# if it's now empty, delete the directory
try:
os.rmdir(directory) # only removes empty directories
logger.debug("Removed %s", directory)
except OSError:
logger.debug("Unable to remove %s; may not be empty.", directory)
# archive directory
try:
archive_path = full_archive_path(renewal_config, config, certname)
shutil.rmtree(archive_path)
logger.debug("Removed %s", archive_path)
except OSError:
logger.debug("Unable to remove %s", archive_path)
class RenewableCert(interfaces.RenewableCert):
"""Renewable certificate.
Represents a lineage of certificates that is under the management of
Certbot, indicated by the existence of an associated renewal
configuration file.
Note that the notion of "current version" for a lineage is
maintained on disk in the structure of symbolic links, and is not
explicitly stored in any instance variable in this object. The
RenewableCert object is able to determine information about the
current (or other) version by accessing data on disk, but does not
inherently know any of this information except by examining the
symbolic links as needed. The instance variables mentioned below
point to symlinks that reflect the notion of "current version" of
each managed object, and it is these paths that should be used when
configuring servers to use the certificate managed in a lineage.
These paths are normally within the "live" directory, and their
symlink targets -- the actual cert files -- are normally found
within the "archive" directory.
:ivar str cert: The path to the symlink representing the current
version of the certificate managed by this lineage.
:ivar str privkey: The path to the symlink representing the current
version of the private key managed by this lineage.
:ivar str chain: The path to the symlink representing the current version
of the chain managed by this lineage.
:ivar str fullchain: The path to the symlink representing the
current version of the fullchain (combined chain and cert)
managed by this lineage.
:ivar configobj.ConfigObj configuration: The renewal configuration
options associated with this lineage, obtained from parsing the
renewal configuration file and/or systemwide defaults.
"""
def __init__(self, config_filename, cli_config, update_symlinks=False):
"""Instantiate a RenewableCert object from an existing lineage.
:param str config_filename: the path to the renewal config file
that defines this lineage.
:param .NamespaceConfig: parsed command line arguments
:raises .CertStorageError: if the configuration file's name didn't end
in ".conf", or the file is missing or broken.
"""
self.cli_config = cli_config
self._lineagename = lineagename_for_filename(config_filename)
# self.configuration should be used to read parameters that
# may have been chosen based on default values from the
# systemwide renewal configuration; self.configfile should be
# used to make and save changes.
try:
self.configfile = configobj.ConfigObj(config_filename)
except configobj.ConfigObjError:
raise errors.CertStorageError(
"error parsing {0}".format(config_filename))
# TODO: Do we actually use anything from defaults and do we want to
# read further defaults from the systemwide renewal configuration
# file at this stage?
self.configuration = config_with_defaults(self.configfile)
if not all(x in self.configuration for x in ALL_FOUR):
raise errors.CertStorageError(
"renewal config file {0} is missing a required "
"file reference".format(self.configfile))
conf_version = self.configuration.get("version")
if (conf_version is not None and
util.get_strict_version(conf_version) > CURRENT_VERSION):
logger.info(
"Attempting to parse the version %s renewal configuration "
"file found at %s with version %s of Certbot. This might not "
"work.", conf_version, config_filename, certbot.__version__)
self.cert = self.configuration["cert"]
self.privkey = self.configuration["privkey"]
self.chain = self.configuration["chain"]
self.fullchain = self.configuration["fullchain"]
self.live_dir = os.path.dirname(self.cert)
self._fix_symlinks()
if update_symlinks:
self._update_symlinks()
self._check_symlinks()
@property
def key_path(self):
"""Duck type for self.privkey"""
return self.privkey
@property
def cert_path(self):
"""Duck type for self.cert"""
return self.cert
@property
def chain_path(self):
"""Duck type for self.chain"""
return self.chain
@property
def fullchain_path(self):
"""Duck type for self.fullchain"""
return self.fullchain
@property
def lineagename(self):
"""Name given to the certificate lineage.
:rtype: str
"""
return self._lineagename
@property
def target_expiry(self):
"""The current target certificate's expiration datetime
:returns: Expiration datetime of the current target certificate
:rtype: :class:`datetime.datetime`
"""
return crypto_util.notAfter(self.current_target("cert"))
@property
def archive_dir(self):
"""Returns the default or specified archive directory"""
return full_archive_path(self.configuration,
self.cli_config, self.lineagename)
def relative_archive_dir(self, from_file):
"""Returns the default or specified archive directory as a relative path
Used for creating symbolic links.
"""
return _relpath_from_file(self.archive_dir, from_file)
@property
def is_test_cert(self):
"""Returns true if this is a test cert from a staging server."""
server = self.configuration["renewalparams"].get("server", None)
if server:
return util.is_staging(server)
return False
def _check_symlinks(self):
"""Raises an exception if a symlink doesn't exist"""
for kind in ALL_FOUR:
link = getattr(self, kind)
if not os.path.islink(link):
raise errors.CertStorageError(
"expected {0} to be a symlink".format(link))
target = get_link_target(link)
if not os.path.exists(target):
raise errors.CertStorageError("target {0} of symlink {1} does "
"not exist".format(target, link))
def _update_symlinks(self):
"""Updates symlinks to use archive_dir"""
for kind in ALL_FOUR:
link = getattr(self, kind)
previous_link = get_link_target(link)
new_link = os.path.join(self.relative_archive_dir(link),
os.path.basename(previous_link))
os.unlink(link)
os.symlink(new_link, link)
def _consistent(self):
"""Are the files associated with this lineage self-consistent?
:returns: Whether the files stored in connection with this
lineage appear to be correct and consistent with one
another.
:rtype: bool
"""
# Each element must be referenced with an absolute path
for x in (self.cert, self.privkey, self.chain, self.fullchain):
if not os.path.isabs(x):
logger.debug("Element %s is not referenced with an "
"absolute path.", x)
return False
# Each element must exist and be a symbolic link
for x in (self.cert, self.privkey, self.chain, self.fullchain):
if not os.path.islink(x):
logger.debug("Element %s is not a symbolic link.", x)
return False
for kind in ALL_FOUR:
link = getattr(self, kind)
target = get_link_target(link)
# Each element's link must point within the cert lineage's
# directory within the official archive directory
if not os.path.samefile(os.path.dirname(target), self.archive_dir):
logger.debug("Element's link does not point within the "
"cert lineage's directory within the "
"official archive directory. Link: %s, "
"target directory: %s, "
"archive directory: %s. If you've specified "
"the archive directory in the renewal configuration "
"file, you may need to update links by running "
"certbot update_symlinks.",
link, os.path.dirname(target), self.archive_dir)
return False
# The link must point to a file that exists
if not os.path.exists(target):
logger.debug("Link %s points to file %s that does not exist.",
link, target)
return False
# The link must point to a file that follows the archive
# naming convention
pattern = re.compile(r"^{0}([0-9]+)\.pem$".format(kind))
if not pattern.match(os.path.basename(target)):
logger.debug("%s does not follow the archive naming "
"convention.", target)
return False
# It is NOT required that the link's target be a regular
# file (it may itself be a symlink). But we should probably
# do a recursive check that ultimately the target does
# exist?
# XXX: Additional possible consistency checks (e.g.
# cryptographic validation of the chain being a chain,
# the chain matching the cert, and the cert matching
# the subject key)
# XXX: All four of the targets are in the same directory
# (This check is redundant with the check that they
# are all in the desired directory!)
# len(set(os.path.basename(self.current_target(x)
# for x in ALL_FOUR))) == 1
return True
def _fix(self):
"""Attempt to fix defects or inconsistencies in this lineage.
.. todo:: Currently unimplemented.
"""
# TODO: Figure out what kinds of fixes are possible. For
# example, checking if there is a valid version that
# we can update the symlinks to. (Maybe involve
# parsing keys and certs to see if they exist and
# if a key corresponds to the subject key of a cert?)
# TODO: In general, the symlink-reading functions below are not
# cautious enough about the possibility that links or their
# targets may not exist. (This shouldn't happen, but might
# happen as a result of random tampering by a sysadmin, or
# filesystem errors, or crashes.)
def _previous_symlinks(self):
"""Returns the kind and path of all symlinks used in recovery.
:returns: list of (kind, symlink) tuples
:rtype: list
"""
previous_symlinks = []
for kind in ALL_FOUR:
link_dir = os.path.dirname(getattr(self, kind))
link_base = "previous_{0}.pem".format(kind)
previous_symlinks.append((kind, os.path.join(link_dir, link_base)))
return previous_symlinks
def _fix_symlinks(self):
"""Fixes symlinks in the event of an incomplete version update.
If there is no problem with the current symlinks, this function
has no effect.
"""
previous_symlinks = self._previous_symlinks()
if all(os.path.exists(link[1]) for link in previous_symlinks):
for kind, previous_link in previous_symlinks:
current_link = getattr(self, kind)
if os.path.lexists(current_link):
os.unlink(current_link)
os.symlink(os.readlink(previous_link), current_link)
for _, link in previous_symlinks:
if os.path.exists(link):
os.unlink(link)
def current_target(self, kind):
"""Returns full path to which the specified item currently points.
:param str kind: the lineage member item ("cert", "privkey",
"chain", or "fullchain")
:returns: The path to the current version of the specified
member.
:rtype: str or None
"""
if kind not in ALL_FOUR:
raise errors.CertStorageError("unknown kind of item")
link = getattr(self, kind)
if not os.path.exists(link):
logger.debug("Expected symlink %s for %s does not exist.",
link, kind)
return None
return get_link_target(link)
def current_version(self, kind):
"""Returns numerical version of the specified item.
For example, if kind is "chain" and the current chain link
points to a file named "chain7.pem", returns the integer 7.
:param str kind: the lineage member item ("cert", "privkey",
"chain", or "fullchain")
:returns: the current version of the specified member.
:rtype: int
"""
if kind not in ALL_FOUR:
raise errors.CertStorageError("unknown kind of item")
pattern = re.compile(r"^{0}([0-9]+)\.pem$".format(kind))
target = self.current_target(kind)
if target is None or not os.path.exists(target):
logger.debug("Current-version target for %s "
"does not exist at %s.", kind, target)
target = ""
matches = pattern.match(os.path.basename(target))
if matches:
return int(matches.groups()[0])
logger.debug("No matches for target %s.", kind)
return None
def version(self, kind, version):
"""The filename that corresponds to the specified version and kind.
.. warning:: The specified version may not exist in this
lineage. There is no guarantee that the file path returned
by this method actually exists.
:param str kind: the lineage member item ("cert", "privkey",
"chain", or "fullchain")
:param int version: the desired version
:returns: The path to the specified version of the specified member.
:rtype: str
"""
if kind not in ALL_FOUR:
raise errors.CertStorageError("unknown kind of item")
where = os.path.dirname(self.current_target(kind))
return os.path.join(where, "{0}{1}.pem".format(kind, version))
def available_versions(self, kind):
"""Which alternative versions of the specified kind of item exist?
The archive directory where the current version is stored is
consulted to obtain the list of alternatives.
:param str kind: the lineage member item (
``cert``, ``privkey``, ``chain``, or ``fullchain``)
:returns: all of the version numbers that currently exist
:rtype: `list` of `int`
"""
if kind not in ALL_FOUR:
raise errors.CertStorageError("unknown kind of item")
where = os.path.dirname(self.current_target(kind))
files = os.listdir(where)
pattern = re.compile(r"^{0}([0-9]+)\.pem$".format(kind))
matches = [pattern.match(f) for f in files]
return sorted([int(m.groups()[0]) for m in matches if m])
def newest_available_version(self, kind):
"""Newest available version of the specified kind of item?
:param str kind: the lineage member item (``cert``,
``privkey``, ``chain``, or ``fullchain``)
:returns: the newest available version of this member
:rtype: int
"""
return max(self.available_versions(kind))
def latest_common_version(self):
"""Newest version for which all items are available?
:returns: the newest available version for which all members
(``cert, ``privkey``, ``chain``, and ``fullchain``) exist
:rtype: int
"""
# TODO: this can raise CertStorageError if there is no version overlap
# (it should probably return None instead)
# TODO: this can raise a spurious AttributeError if the current
# link for any kind is missing (it should probably return None)
versions = [self.available_versions(x) for x in ALL_FOUR]
return max(n for n in versions[0] if all(n in v for v in versions[1:]))
def next_free_version(self):
"""Smallest version newer than all full or partial versions?
:returns: the smallest version number that is larger than any
version of any item currently stored in this lineage
:rtype: int
"""
# TODO: consider locking/mutual exclusion between updating processes
# This isn't self.latest_common_version() + 1 because we don't want
# collide with a version that might exist for one file type but not
# for the others.
return max(self.newest_available_version(x) for x in ALL_FOUR) + 1
def ensure_deployed(self):
"""Make sure we've deployed the latest version.
:returns: False if a change was needed, True otherwise
:rtype: bool
May need to recover from rare interrupted / crashed states."""
if self.has_pending_deployment():
logger.warning("Found a new cert /archive/ that was not linked to in /live/; "
"fixing...")
self.update_all_links_to(self.latest_common_version())
return False
return True
def has_pending_deployment(self):
"""Is there a later version of all of the managed items?
:returns: ``True`` if there is a complete version of this
lineage with a larger version number than the current
version, and ``False`` otherwise
:rtype: bool
"""
# TODO: consider whether to assume consistency or treat
# inconsistent/consistent versions differently
smallest_current = min(self.current_version(x) for x in ALL_FOUR)
return smallest_current < self.latest_common_version()
def _update_link_to(self, kind, version):
"""Make the specified item point at the specified version.
(Note that this method doesn't verify that the specified version
exists.)
:param str kind: the lineage member item ("cert", "privkey",
"chain", or "fullchain")
:param int version: the desired version
"""
if kind not in ALL_FOUR:
raise errors.CertStorageError("unknown kind of item")
link = getattr(self, kind)
filename = "{0}{1}.pem".format(kind, version)
# Relative rather than absolute target directory
target_directory = os.path.dirname(os.readlink(link))
# TODO: it could be safer to make the link first under a temporary
# filename, then unlink the old link, then rename the new link
# to the old link; this ensures that this process is able to
# create symlinks.
# TODO: we might also want to check consistency of related links
# for the other corresponding items
os.unlink(link)
os.symlink(os.path.join(target_directory, filename), link)
def update_all_links_to(self, version):
"""Change all member objects to point to the specified version.
:param int version: the desired version
"""
with error_handler.ErrorHandler(self._fix_symlinks):
previous_links = self._previous_symlinks()
for kind, link in previous_links:
os.symlink(self.current_target(kind), link)
for kind in ALL_FOUR:
self._update_link_to(kind, version)
for _, link in previous_links:
os.unlink(link)
def names(self):
"""What are the subject names of this certificate?
:returns: the subject names
:rtype: `list` of `str`
:raises .CertStorageError: if could not find cert file.
"""
target = self.current_target("cert")
if target is None:
raise errors.CertStorageError("could not find cert file")
with open(target) as f:
return crypto_util.get_names_from_cert(f.read())
def ocsp_revoked(self, version=None):
# pylint: disable=no-self-use,unused-argument
"""Is the specified cert version revoked according to OCSP?
Also returns True if the cert version is declared as intended
to be revoked according to Let's Encrypt OCSP extensions.
(If no version is specified, uses the current version.)
This method is not yet implemented and currently always returns
False.
:param int version: the desired version number
:returns: whether the certificate is or will be revoked
:rtype: bool
"""
# XXX: This query and its associated network service aren't
# implemented yet, so we currently return False (indicating that the
# certificate is not revoked).
return False
def autorenewal_is_enabled(self):
"""Is automatic renewal enabled for this cert?
If autorenew is not specified, defaults to True.
:returns: True if automatic renewal is enabled
:rtype: bool
"""
return ("autorenew" not in self.configuration["renewalparams"] or
self.configuration["renewalparams"].as_bool("autorenew"))
def should_autorenew(self):
"""Should we now try to autorenew the most recent cert version?
This is a policy question and does not only depend on whether
the cert is expired. (This considers whether autorenewal is
enabled, whether the cert is revoked, and whether the time
interval for autorenewal has been reached.)
Note that this examines the numerically most recent cert version,
not the currently deployed version.
:returns: whether an attempt should now be made to autorenew the
most current cert version in this lineage
:rtype: bool
"""
if self.autorenewal_is_enabled():
# Consider whether to attempt to autorenew this cert now
# Renewals on the basis of revocation
if self.ocsp_revoked(self.latest_common_version()):
logger.debug("Should renew, certificate is revoked.")
return True
# Renews some period before expiry time
default_interval = constants.RENEWER_DEFAULTS["renew_before_expiry"]
interval = self.configuration.get("renew_before_expiry", default_interval)
expiry = crypto_util.notAfter(self.version(
"cert", self.latest_common_version()))
now = pytz.UTC.fromutc(datetime.datetime.utcnow())
if expiry < add_time_interval(now, interval):
logger.debug("Should renew, less than %s before certificate "
"expiry %s.", interval,
expiry.strftime("%Y-%m-%d %H:%M:%S %Z"))
return True
return False
@classmethod
def new_lineage(cls, lineagename, cert, privkey, chain, cli_config):
"""Create a new certificate lineage.
Attempts to create a certificate lineage -- enrolled for
potential future renewal -- with the (suggested) lineage name
lineagename, and the associated cert, privkey, and chain (the
associated fullchain will be created automatically). Optional
configurator and renewalparams record the configuration that was
originally used to obtain this cert, so that it can be reused
later during automated renewal.
Returns a new RenewableCert object referring to the created
lineage. (The actual lineage name, as well as all the relevant
file paths, will be available within this object.)
:param str lineagename: the suggested name for this lineage
(normally the current cert's first subject DNS name)
:param str cert: the initial certificate version in PEM format
:param str privkey: the private key in PEM format
:param str chain: the certificate chain in PEM format
:param .NamespaceConfig cli_config: parsed command line
arguments
:returns: the newly-created RenewalCert object
:rtype: :class:`storage.renewableCert`
"""
# Examine the configuration and find the new lineage's name
for i in (cli_config.renewal_configs_dir, cli_config.default_archive_dir,
cli_config.live_dir):
if not os.path.exists(i):
filesystem.makedirs(i, 0o700)
logger.debug("Creating directory %s.", i)
config_file, config_filename = util.unique_lineage_name(
cli_config.renewal_configs_dir, lineagename)
base_readme_path = os.path.join(cli_config.live_dir, README)
if not os.path.exists(base_readme_path):
_write_live_readme_to(base_readme_path, is_base_dir=True)
# Determine where on disk everything will go
# lineagename will now potentially be modified based on which
# renewal configuration file could actually be created
lineagename = lineagename_for_filename(config_filename)
archive = full_archive_path(None, cli_config, lineagename)
live_dir = _full_live_path(cli_config, lineagename)
if os.path.exists(archive):
config_file.close()
raise errors.CertStorageError(
"archive directory exists for " + lineagename)
if os.path.exists(live_dir):
config_file.close()
raise errors.CertStorageError(
"live directory exists for " + lineagename)
filesystem.mkdir(archive)
filesystem.mkdir(live_dir)
logger.debug("Archive directory %s and live "
"directory %s created.", archive, live_dir)
# Put the data into the appropriate files on disk
target = {kind: os.path.join(live_dir, kind + ".pem") for kind in ALL_FOUR}
archive_target = {kind: os.path.join(archive, kind + "1.pem") for kind in ALL_FOUR}
for kind in ALL_FOUR:
os.symlink(_relpath_from_file(archive_target[kind], target[kind]), target[kind])
with open(target["cert"], "wb") as f:
logger.debug("Writing certificate to %s.", target["cert"])
f.write(cert)
with util.safe_open(archive_target["privkey"], "wb", chmod=BASE_PRIVKEY_MODE) as f:
logger.debug("Writing private key to %s.", target["privkey"])
f.write(privkey)
# XXX: Let's make sure to get the file permissions right here
with open(target["chain"], "wb") as f:
logger.debug("Writing chain to %s.", target["chain"])
f.write(chain)
with open(target["fullchain"], "wb") as f:
# assumes that OpenSSL.crypto.dump_certificate includes
# ending newline character
logger.debug("Writing full chain to %s.", target["fullchain"])
f.write(cert + chain)
# Write a README file to the live directory
readme_path = os.path.join(live_dir, README)
_write_live_readme_to(readme_path)
# Document what we've done in a new renewal config file
config_file.close()
# Save only the config items that are relevant to renewal
values = relevant_values(vars(cli_config.namespace))
new_config = write_renewal_config(config_filename, config_filename, archive,
target, values)
return cls(new_config.filename, cli_config)
def save_successor(self, prior_version, new_cert,
new_privkey, new_chain, cli_config):
"""Save new cert and chain as a successor of a prior version.
Returns the new version number that was created.
.. note:: this function does NOT update links to deploy this
version
:param int prior_version: the old version to which this version
is regarded as a successor (used to choose a privkey, if the
key has not changed, but otherwise this information is not
permanently recorded anywhere)
:param bytes new_cert: the new certificate, in PEM format
:param bytes new_privkey: the new private key, in PEM format,
or ``None``, if the private key has not changed
:param bytes new_chain: the new chain, in PEM format
:param .NamespaceConfig cli_config: parsed command line
arguments
:returns: the new version number that was created
:rtype: int
"""
# XXX: assumes official archive location rather than examining links
# XXX: consider using os.open for availability of os.O_EXCL
# XXX: ensure file permissions are correct; also create directories
# if needed (ensuring their permissions are correct)
# Figure out what the new version is and hence where to save things
self.cli_config = cli_config
target_version = self.next_free_version()
target = {kind: os.path.join(self.archive_dir, "{0}{1}.pem".format(kind, target_version))
for kind in ALL_FOUR}
old_privkey = os.path.join(
self.archive_dir, "privkey{0}.pem".format(prior_version))
# Distinguish the cases where the privkey has changed and where it
# has not changed (in the latter case, making an appropriate symlink
# to an earlier privkey version)
if new_privkey is None:
# The behavior below keeps the prior key by creating a new
# symlink to the old key or the target of the old key symlink.
if os.path.islink(old_privkey):
old_privkey = os.readlink(old_privkey)
else:
old_privkey = "privkey{0}.pem".format(prior_version)
logger.debug("Writing symlink to old private key, %s.", old_privkey)
os.symlink(old_privkey, target["privkey"])
else:
with util.safe_open(target["privkey"], "wb", chmod=BASE_PRIVKEY_MODE) as f:
logger.debug("Writing new private key to %s.", target["privkey"])
f.write(new_privkey)
# Preserve gid and (mode & MASK_FOR_PRIVATE_KEY_PERMISSIONS)
# from previous privkey in this lineage.
mode = filesystem.compute_private_key_mode(old_privkey, BASE_PRIVKEY_MODE)
filesystem.copy_ownership_and_apply_mode(
old_privkey, target["privkey"], mode, copy_user=False, copy_group=True)
# Save everything else
with open(target["cert"], "wb") as f:
logger.debug("Writing certificate to %s.", target["cert"])
f.write(new_cert)
with open(target["chain"], "wb") as f:
logger.debug("Writing chain to %s.", target["chain"])
f.write(new_chain)
with open(target["fullchain"], "wb") as f:
logger.debug("Writing full chain to %s.", target["fullchain"])
f.write(new_cert + new_chain)
symlinks = dict((kind, self.configuration[kind]) for kind in ALL_FOUR)
# Update renewal config file
self.configfile = update_configuration(
self.lineagename, self.archive_dir, symlinks, cli_config)
self.configuration = config_with_defaults(self.configfile)
return target_version
| 40.643047 | 97 | 0.646668 |
aa4a7df725cc81bc8522f51a5bce49d50b132123 | 420 | py | Python | tests/test_pandas.py | ehw-fit/py-paretoarchive | 4c061435ddc1c573faf705e1111d012378c5dc4b | [
"BSD-3-Clause"
] | 1 | 2020-05-05T10:18:36.000Z | 2020-05-05T10:18:36.000Z | tests/test_pandas.py | ehw-fit/py-paretoarchive | 4c061435ddc1c573faf705e1111d012378c5dc4b | [
"BSD-3-Clause"
] | 1 | 2020-05-05T10:21:22.000Z | 2020-05-13T06:53:58.000Z | tests/test_pandas.py | ehw-fit/py-paretoarchive | 4c061435ddc1c573faf705e1111d012378c5dc4b | [
"BSD-3-Clause"
] | 1 | 2020-05-05T10:23:00.000Z | 2020-05-05T10:23:00.000Z | from paretoarchive.pandas import pareto
import pandas as pd
def test_df():
df = pd.DataFrame(
[[1, 3, 3], [1, 2, 3], [1, 1, 2]], columns=["a", "b", "c"]
)
assert (pareto(df, ["a", "b"]).index == [2]).all()
assert (pareto(df, ["a", "b", "c"]).index == [2]).all()
assert (pareto(df, ["a", "b", "c"], minimizeObjective2=False).index == [0, 2]).all()
if __name__ == "__main__":
test_df() | 26.25 | 88 | 0.521429 |
46ef2ca6adc6cb9e19be7abf08115f986dbd2973 | 9,943 | py | Python | datacraft/preprocessor.py | bbux-dev/datagen | 14b1fb7906fcc0991a6bfb40c4fb37fb346a7a09 | [
"MIT"
] | null | null | null | datacraft/preprocessor.py | bbux-dev/datagen | 14b1fb7906fcc0991a6bfb40c4fb37fb346a7a09 | [
"MIT"
] | 5 | 2021-10-17T04:48:19.000Z | 2021-12-08T19:53:28.000Z | datacraft/preprocessor.py | bbux-dev/datagen | 14b1fb7906fcc0991a6bfb40c4fb37fb346a7a09 | [
"MIT"
] | null | null | null | """
Module for preprocessing spec before generating values. Exists to handle shorthand notation and
pushing params from URL form of field?param=value in to config object.
"""
import re
import json
import logging
from urllib.parse import parse_qs
from .exceptions import SpecException
from . import registries
_log = logging.getLogger(__name__)
@registries.Registry.preprocessors('default')
def _preprocess_spec(raw_spec: dict, is_refs: bool = False) -> dict:
"""
Preprocesses the spec into a format that is easier to use.
Pushes all url params in keys into config object. Converts shorthand specs into full specs
Args:
raw_spec: to preprocess
is_refs: is this the refs section of the spec
Returns:
the reformatted spec
"""
updated_specs = {}
for key, spec in raw_spec.items():
if key == 'refs':
updated_specs[key] = _preprocess_spec(raw_spec[key], True)
continue
if key == 'field_groups':
updated_specs[key] = spec
continue
if '?' not in key:
_update_no_params(key, spec, updated_specs)
else:
_update_with_params(key, spec, updated_specs)
return updated_specs
@registries.Registry.preprocessors('csv-select')
def _preprocess_csv_select(raw_spec: dict, is_refs: bool = False) -> dict:
"""
Converts and csv-select elements into standard csv ones
Args:
raw_spec: to process
is_refs: is this the refs section of the spec
Returns:
converted spec
"""
updated_specs = {}
for key, spec in raw_spec.items():
if key == 'refs':
# run preprocessors on refs too
updated_specs['refs'] = _preprocess_csv_select(spec, True)
if 'type' in spec and spec['type'] == 'csv_select':
# convention is that refs have upper case names
config_ref_name = _add_config_ref_if_needed(key, is_refs, raw_spec, spec, updated_specs)
if config_ref_name is None:
raise SpecException(f'field {key} in csv_select has invalid configuration for csv type data: {spec}')
for name, column in spec.get('data', {}).items():
cast = None
if isinstance(column, dict):
column_number = column.get('col')
cast = column.get('cast', None)
else:
column_number = column
if ':' in name:
name, cast = name.split(':', 2)
spec_for_column = {
'type': 'csv',
'config': {
'column': column_number,
'config_ref': config_ref_name
}
}
if cast:
spec_for_column['config']['cast'] = cast # type: ignore
if name not in raw_spec:
updated_specs[name] = spec_for_column
else:
alt_name = f'{name}-{column_number}'
updated_specs[alt_name] = spec_for_column
else:
updated_specs[key] = spec
return updated_specs
def _add_config_ref_if_needed(key, is_refs, raw_spec, spec, updated_specs):
""" adds in the config ref element to appropriate location if it is required.
If required return name of config ref, if config is empty returns None for name """
config_ref_name = f'{key}_config_ref'
if is_refs:
config_ref_name = config_ref_name.upper()
config = spec.get('config')
if config is None or len(config) == 0:
return None
config_ref = {
'type': 'config_ref',
'config': config
}
if is_refs:
updated_specs[config_ref_name] = config_ref
elif 'refs' not in raw_spec:
updated_specs['refs'] = {config_ref_name: config_ref}
else:
updated_specs['refs'][config_ref_name] = config_ref
return config_ref_name
@registries.Registry.preprocessors('nested')
def _preprocess_nested(raw_spec: dict, is_refs: bool = False) -> dict:
"""
Converts all nested elements
Args:
raw_spec: to process
is_refs: is this the refs section of the spec
Returns:
converted spec
"""
updated_specs = {} # type: ignore
if 'refs' in raw_spec:
if 'refs' in updated_specs:
updated_specs['refs'].update(_preprocess_spec(raw_spec['refs'], True))
else:
updated_specs['refs'] = _preprocess_spec(raw_spec['refs'], True)
for key, spec in raw_spec.items():
if key == 'refs':
# run preprocessors on refs too
updated_refs = _preprocess_spec(spec, True)
updated_refs = _preprocess_csv_select(updated_refs, True)
# in case we have nested nested elements
updated_refs = _preprocess_nested(updated_refs, True)
updated_specs['refs'] = updated_refs
continue
if 'type' in spec and spec['type'] == 'nested':
if 'fields' not in spec:
raise SpecException('Missing fields key for nested spec: ' + json.dumps(spec))
fields = spec['fields']
updated = _preprocess_spec(fields)
updated = _preprocess_csv_select(updated)
# in case we have nested nested elements
updated = _preprocess_nested(updated)
# this may have created a refs element, need to move this to the root
_update_root_refs(updated_specs, updated)
spec['fields'] = updated
updated_specs[key] = spec
else:
updated_specs[key] = spec
return updated_specs
@registries.Registry.preprocessors('type_check')
def _preprocess_verify_types(raw_spec: dict, is_refs: bool = False) -> dict:
""" log only checks """
for key, field_spec in raw_spec.items():
if key == 'refs':
_preprocess_verify_types(field_spec)
continue
type_name = field_spec.get('type')
if registries.lookup_type(type_name) is None:
_log.warning('Unknown type key: %s for spec %s, known types are %s',
type_name, field_spec, registries.registered_types())
return raw_spec
def _update_root_refs(updated_specs, updated):
"""
Updates to root refs if needed by popping the refs from the updated and merging with existing refs or creating
a new refs element
Args:
updated_specs: specs being updated
updated: current updated spec that may have refs injected into it
"""
if 'refs' in updated:
refs = updated.pop('refs')
if 'refs' in updated_specs:
updated_specs.get('refs').update(refs)
else:
updated_specs['refs'] = refs
def _update_with_params(key, spec, updated_specs):
"""
handles the case that there are ?param=value portions in the key
These get stripped out and pushed into the config object
"""
newkey, spectype, params = _parse_key(key)
if newkey in updated_specs:
raise SpecException(f'Field {key} defined multiple times: ' + json.dumps(spec))
# the updated spec to populate
updated = _convert_to_values_if_needed(spec, spectype)
config = updated.get('config', {})
config.update(params)
updated['config'] = config
if spectype:
updated['type'] = spectype
updated_specs[newkey] = updated
def _update_no_params(key, spec, updated_specs):
"""
handles the case when there are no ?param=value portions in the key
key may have name:type notation that still needs to be handled
"""
if ':' in key:
newkey, spectype = key.split(':', 2)
if not _is_spec_data(spec, spectype):
spec['type'] = spectype
else:
spec = {
'type': spectype,
'data': spec
}
else:
newkey = key
# check for conflicts
if key in updated_specs:
raise SpecException(f'Field {key} defined multiple times: ' + json.dumps(spec))
spectype = spec.get('type') if isinstance(spec, dict) else None
updated = _convert_to_values_if_needed(spec, spectype)
updated_specs[newkey] = updated
def _convert_to_values_if_needed(spec, spectype):
"""converts to a values spec if this is data only"""
if _is_spec_data(spec, spectype):
return {
'type': 'values',
'data': spec
}
return spec
def _parse_key(field_name):
"""
Expected key to have URL format. Two main forms:
1. field:field_type?param1=val¶m2=val...
2. field?param1=val...
"""
parts = re.split(r'\?', field_name)
key_type = parts[0].split(':')
parsed_query = parse_qs(parts[1])
if len(key_type) > 1:
newkey = key_type[0]
spectype = key_type[1]
else:
newkey = parts[0]
spectype = None
config = {}
for key, value in parsed_query.items():
if len(value) == 1:
config[key.strip()] = value[0]
else:
config[key.strip()] = value
return newkey, spectype, config
def _is_spec_data(spec, spectype):
"""
Checks to see if the spec is data only
Args:
spec: to check
spectype: if any available
Returns:
true if only data, false if it is a spec
"""
if spec == 'nested' or spectype == 'nested':
return False
# if it is not a dictionary, then it is definitely not a spec
if not isinstance(spec, dict):
return True
for core_field in ['type', 'data', 'config', 'ref', 'refs', 'fields']:
if core_field in spec:
return False
# if empty, then may be using abbreviated notation i.e. field:type?param=value...
if len(spec) == 0:
return False
# didn't find any core fields, and spec is not empty, so this must be data
return True
| 33.033223 | 117 | 0.606557 |
def99d557cc72dd596ac9d00ab8e56f1047e22a9 | 47 | py | Python | server/websockets/signals/__init__.py | nking1232/html5-msoy | 6e026f1989b15310ad67c050beb69a168c3bdd5f | [
"MIT"
] | null | null | null | server/websockets/signals/__init__.py | nking1232/html5-msoy | 6e026f1989b15310ad67c050beb69a168c3bdd5f | [
"MIT"
] | null | null | null | server/websockets/signals/__init__.py | nking1232/html5-msoy | 6e026f1989b15310ad67c050beb69a168c3bdd5f | [
"MIT"
] | 2 | 2020-12-18T19:19:38.000Z | 2020-12-18T19:53:56.000Z | from .world_signals import participants_changed | 47 | 47 | 0.914894 |
2748826f9b83a8bb961d8ebfc1ce9613d166eaca | 1,957 | py | Python | hyperlpr_py3/e2e.py | kitten23/HyperLPR | a0061b188324e001ce153b3274e5ff1348d8066d | [
"Apache-2.0"
] | 1,104 | 2016-07-11T11:20:32.000Z | 2022-03-31T08:44:48.000Z | hyperlpr_py3/e2e.py | kitten23/HyperLPR | a0061b188324e001ce153b3274e5ff1348d8066d | [
"Apache-2.0"
] | 48 | 2017-04-12T06:01:07.000Z | 2022-03-15T05:49:45.000Z | hyperlpr_py3/e2e.py | kitten23/HyperLPR | a0061b188324e001ce153b3274e5ff1348d8066d | [
"Apache-2.0"
] | 360 | 2016-07-11T11:20:34.000Z | 2022-03-29T06:31:38.000Z | #coding=utf-8
from keras import backend as K
from keras.models import load_model
from keras.layers import *
import numpy as np
import random
import string
import cv2
from . import e2emodel as model
chars = ["京", "沪", "津", "渝", "冀", "晋", "蒙", "辽", "吉", "黑", "苏", "浙", "皖", "闽", "赣", "鲁", "豫", "鄂", "湘", "粤", "桂",
"琼", "川", "贵", "云", "藏", "陕", "甘", "青", "宁", "新", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A",
"B", "C", "D", "E", "F", "G", "H", "J", "K", "L", "M", "N", "P", "Q", "R", "S", "T", "U", "V", "W", "X",
"Y", "Z","港","学","使","警","澳","挂","军","北","南","广","沈","兰","成","济","海","民","航","空"
];
pred_model = model.construct_model("./model/ocr_plate_all_w_rnn_2.h5")
import time
def fastdecode(y_pred):
results = ""
confidence = 0.0
table_pred = y_pred.reshape(-1, len(chars)+1)
res = table_pred.argmax(axis=1)
for i,one in enumerate(res):
if one<len(chars) and (i==0 or (one!=res[i-1])):
results+= chars[one]
confidence+=table_pred[i][one]
confidence/= len(results)
return results,confidence
def recognizeOne(src):
# x_tempx= cv2.imread(src)
x_tempx = src
# x_tempx = cv2.bitwise_not(x_tempx)
x_temp = cv2.resize(x_tempx,( 160,40))
x_temp = x_temp.transpose(1, 0, 2)
t0 = time.time()
y_pred = pred_model.predict(np.array([x_temp]))
y_pred = y_pred[:,2:,:]
# plt.imshow(y_pred.reshape(16,66))
# plt.show()
#
# cv2.imshow("x_temp",x_tempx)
# cv2.waitKey(0)
return fastdecode(y_pred)
#
#
# import os
#
# path = "/Users/yujinke/PycharmProjects/HyperLPR_Python_web/cache/finemapping"
# for filename in os.listdir(path):
# if filename.endswith(".png") or filename.endswith(".jpg") or filename.endswith(".bmp"):
# x = os.path.join(path,filename)
# recognizeOne(x)
# # print time.time() - t0
#
# # cv2.imshow("x",x)
# # cv2.waitKey()
| 30.578125 | 117 | 0.536536 |
28efdeced28e7834b1a33d5a3a00bfd8fe6ae201 | 1,015 | py | Python | awesomeplace/contrib/sites/migrations/0003_set_site_domain_and_name.py | junngo/AwesomePlace | d029fed8e3daa66fb30fc531c1f9c7c62cecf3ac | [
"MIT"
] | null | null | null | awesomeplace/contrib/sites/migrations/0003_set_site_domain_and_name.py | junngo/AwesomePlace | d029fed8e3daa66fb30fc531c1f9c7c62cecf3ac | [
"MIT"
] | null | null | null | awesomeplace/contrib/sites/migrations/0003_set_site_domain_and_name.py | junngo/AwesomePlace | d029fed8e3daa66fb30fc531c1f9c7c62cecf3ac | [
"MIT"
] | null | null | null | """
To understand why this file is here, please read:
http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django
"""
from django.conf import settings
from django.db import migrations
def update_site_forward(apps, schema_editor):
"""Set site domain and name."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
"domain": "awesomeplace.com",
"name": "awesomeplace",
},
)
def update_site_backward(apps, schema_editor):
"""Revert site domain and name to default."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID, defaults={"domain": "example.com", "name": "example.com"}
)
class Migration(migrations.Migration):
dependencies = [("sites", "0002_alter_domain_unique")]
operations = [migrations.RunPython(update_site_forward, update_site_backward)]
| 29 | 129 | 0.68867 |
519d77cf0ae64ace52c3b20f552eed57d246221a | 3,502 | py | Python | algo/rnd.py | pcchenxi/skew-explore | fee27419a6168711eae23bc803875e36afe56438 | [
"MIT"
] | 3 | 2020-08-17T20:20:08.000Z | 2021-05-25T07:44:35.000Z | algo/rnd.py | yqj13777866390/skew-explore | fee27419a6168711eae23bc803875e36afe56438 | [
"MIT"
] | null | null | null | algo/rnd.py | yqj13777866390/skew-explore | fee27419a6168711eae23bc803875e36afe56438 | [
"MIT"
] | 1 | 2021-01-10T02:07:27.000Z | 2021-01-10T02:07:27.000Z | """
This code is inspired by the implementation of "random-network-distillation"
from OpenAI (https://github.com/openai/random-network-distillation)
"""
import time
import numpy as np
import tensorflow as tf
import logging
class RandomNetworkDistilation(object):
def __init__(self, input_dim = 2, learning_rate=1e-4):
self.learning_rate = learning_rate
self.input_dim = input_dim
self.encoded_f_size = 1024
self.proportion_of_exp_used_for_predictor_update = 1
tf.reset_default_graph()
self.build()
self.sess = tf.InteractiveSession()
self.sess.run(tf.global_variables_initializer())
logging.info('init rnd network with input_dim %f' % (input_dim))
# Build the netowrk and the loss functions
def build(self):
self.obs = tf.placeholder(name='obs', dtype=tf.float32, shape=[None, self.input_dim])
# Random target network.
logging.info("MlpTarget: using '%s' shape %s as input" % (self.obs.name, str(self.obs.shape)))
xr = tf.cast(self.obs, tf.float32)
xr = tf.layers.dense(inputs=xr, units=32 * 1, activation=tf.nn.leaky_relu)
xr = tf.layers.dense(inputs=xr, units=32 * 2 * 1, activation=tf.nn.leaky_relu)
xr = tf.layers.dense(inputs=xr, units=32 * 2 * 1, activation=tf.nn.leaky_relu)
X_r = tf.layers.dense(inputs=xr, units=self.encoded_f_size, activation=None)
# Predictor network.
xrp = tf.cast(self.obs, tf.float32)
xrp = tf.layers.dense(inputs=xrp, units=32 * 1, activation=tf.nn.leaky_relu)
xrp = tf.layers.dense(inputs=xrp, units=32 * 2 * 1, activation=tf.nn.leaky_relu)
xrp = tf.layers.dense(inputs=xrp, units=32 * 2 * 1, activation=tf.nn.leaky_relu)
X_r_hat = tf.layers.dense(inputs=xrp, units=128, activation=tf.nn.relu)
X_r_hat = tf.layers.dense(inputs=X_r_hat, units=self.encoded_f_size, activation=None)
self.int_rew = tf.reduce_mean(tf.square(tf.stop_gradient(X_r) - X_r_hat), axis=-1, keep_dims=True)
targets = tf.stop_gradient(X_r)
# self.aux_loss = tf.reduce_mean(tf.square(noisy_targets-X_r_hat))
self.aux_loss = tf.reduce_mean(tf.square(targets - X_r_hat), -1)
mask = tf.random_uniform(shape=tf.shape(self.aux_loss), minval=0., maxval=1., dtype=tf.float32)
mask = tf.cast(mask < self.proportion_of_exp_used_for_predictor_update, tf.float32)
self.aux_loss = tf.reduce_sum(mask * self.aux_loss) / tf.maximum(tf.reduce_sum(mask), 1.)
self.train_op = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.aux_loss)
def get_intrinsic_reward(self, x):
reward = self.sess.run(self.int_rew,feed_dict={self.obs: x})
weight = 1000
return reward * weight
def train_single_step(self, x):
_, losses = self.sess.run(
[self.train_op, self.aux_loss],
feed_dict={self.obs: x}
)
return losses
def rnd_trainer(model, data, num_epoch=15, batch_size = 500):
data_size = len(data)
data_index = np.arange(data_size)
for epoch in range(num_epoch):
np.random.shuffle(data_index)
for i in range(0, data_size, batch_size):
# Get a batch
inds = data_index[i:i+batch_size]
batch = data[inds]
losses = model.train_single_step(batch)
logging_info = 'epoch: ' + str(epoch) + ' loss: ' + str(losses)
logging.info(logging_info)
| 41.2 | 106 | 0.658481 |
e8b93707e1b289e015f868c2ad52ecdc69b35687 | 1,452 | py | Python | tests/test_data.py | abandonsea/DeepTreeAttention | 2ac8a3c42948d8efe157c8f38c059473067eed08 | [
"MIT"
] | 1 | 2022-03-29T05:40:56.000Z | 2022-03-29T05:40:56.000Z | tests/test_data.py | abandonsea/DeepTreeAttention | 2ac8a3c42948d8efe157c8f38c059473067eed08 | [
"MIT"
] | null | null | null | tests/test_data.py | abandonsea/DeepTreeAttention | 2ac8a3c42948d8efe157c8f38c059473067eed08 | [
"MIT"
] | null | null | null | #Test data module
from src import data
import pandas as pd
def test_TreeData_setup(config, ROOT):
#One site's worth of data
config["regenerate"] = True
csv_file = "{}/tests/data/sample_neon.csv".format(ROOT)
dm = data.TreeData(config=config, csv_file=csv_file, data_dir="{}/tests/data".format(ROOT), debug=True)
dm.setup()
test = pd.read_csv("{}/tests/data/processed/test.csv".format(ROOT))
train = pd.read_csv("{}/tests/data/processed/train.csv".format(ROOT))
assert not test.empty
assert not train.empty
assert not any([x in train.image_path.unique() for x in test.image_path.unique()])
assert all([x in ["image_path","label","site","taxonID","siteID","plotID","individualID","point_id","box_id","RGB_tile"] for x in train.columns])
def test_TreeDataset(dm, config,tmpdir, ROOT):
#Train loader
data_loader = data.TreeDataset(csv_file="{}/tests/data/processed/train.csv".format(ROOT), config=config, image_size=config["image_size"])
individuals, inputs, label = data_loader[0]
image = inputs["HSI"]
assert image.shape == (3, config["image_size"], config["image_size"])
#Test loader
data_loader = data.TreeDataset(csv_file="{}/tests/data/processed/test.csv".format(ROOT), train=False, config=config)
annotations = pd.read_csv("{}/tests/data/processed/test.csv".format(ROOT))
assert len(data_loader) == annotations.shape[0]
| 45.375 | 149 | 0.683196 |
69b5d108dbdb4ef2aca643f3a00e1d19be67c822 | 1,073 | py | Python | numexpr/__init__.py | erdc/numexpr | ed3ca13b58548d68fbc8b9a380990aac1e4897c9 | [
"MIT"
] | null | null | null | numexpr/__init__.py | erdc/numexpr | ed3ca13b58548d68fbc8b9a380990aac1e4897c9 | [
"MIT"
] | null | null | null | numexpr/__init__.py | erdc/numexpr | ed3ca13b58548d68fbc8b9a380990aac1e4897c9 | [
"MIT"
] | null | null | null | """
Numexpr is a fast numerical expression evaluator for NumPy. With it,
expressions that operate on arrays (like "3*a+4*b") are accelerated
and use less memory than doing the same calculation in Python.
See:
http://code.google.com/p/numexpr/
for more info about it.
"""
from __config__ import show as show_config, get_info
if get_info('mkl'):
use_vml = True
else:
use_vml = False
from cpuinfo import cpu
if cpu.is_AMD() or cpu.is_Intel():
is_cpu_amd_intel = True
else:
is_cpu_amd_intel = False
import os.path
from numexpr.expressions import E
from numexpr.necompiler import NumExpr, disassemble, evaluate
from numexpr.tests import test, print_versions
from numexpr.utils import (
get_vml_version, set_vml_accuracy_mode, set_vml_num_threads,
set_num_threads, detect_number_of_cores)
# Initialize the number of threads to be used
ncores = detect_number_of_cores()
set_num_threads(ncores)
# The default for VML is 1 thread (see #39)
set_vml_num_threads(1)
import version
dirname = os.path.dirname(__file__)
__version__ = version.version
| 22.829787 | 69 | 0.771668 |
dfb4bb67e1211a76d51c9e3c0219868abc9f5a12 | 15,376 | py | Python | pycalib/scoring.py | JonathanWenger/pycalib | 0c023465c5a73d5b90b27e9ff6848a649f45c834 | [
"MIT"
] | 14 | 2019-06-18T17:38:30.000Z | 2022-02-18T09:52:44.000Z | pycalib/scoring.py | JonathanWenger/pycalib | 0c023465c5a73d5b90b27e9ff6848a649f45c834 | [
"MIT"
] | 3 | 2019-10-25T03:26:35.000Z | 2022-03-15T18:34:32.000Z | pycalib/scoring.py | JonathanWenger/pycalib | 0c023465c5a73d5b90b27e9ff6848a649f45c834 | [
"MIT"
] | 8 | 2019-10-12T02:02:46.000Z | 2022-02-03T13:16:32.000Z | """Scoring functions and metrics for classification models."""
import time
import numpy as np
import copy
import matplotlib.pyplot as plt
import scipy.stats
import sklearn.metrics
import sklearn.utils.validation
def accuracy(y, p_pred):
"""
Computes the accuracy.
Parameters
----------
y : array-like
Ground truth labels.
p_pred : array-like
Array of confidence estimates.
Returns
-------
accuracy : float
"""
return sklearn.metrics.accuracy_score(y_true=y, y_pred=np.argmax(p_pred, axis=1))
def error(y, p_pred):
"""
Computes the classification error.
Parameters
----------
y : array-like
Ground truth labels.
p_pred : array-like
Array of confidence estimates.
Returns
-------
error : float
"""
return 1 - accuracy(y=y, p_pred=p_pred)
def odds_correctness(y, p_pred):
"""
Computes the odds of making a correct prediction.
Parameters
----------
y : array-like
Ground truth labels.
p_pred : array-like
Array of confidence estimates.
Returns
-------
odds : float
"""
return accuracy(y=y, p_pred=p_pred) / error(y=y, p_pred=p_pred)
def expected_calibration_error(y, p_pred, n_bins=100, n_classes=None, p=1):
"""
Computes the expected calibration error ECE_p.
Computes the empirical p-expected calibration error for a vector of confidence
estimates by binning.
Parameters
----------
y : array-like
Ground truth labels.
p_pred : array-like
Array of confidence estimates.
n_bins : int, default=15
Number of bins of :math:`[\\frac{1}{n_{\\text{classes}},1]` for the confidence estimates.
n_classes : int default=None
Number of classes. Estimated from `y` and `y_pred` if not given.
p : int, default=1
Power of the calibration error, :math:`1 \\leq p \\leq \\infty`.
Returns
-------
float
Expected calibration error
"""
# Check input
y = sklearn.utils.validation.column_or_1d(y)
y_pred = np.argmax(p_pred, axis=1)
y_pred = sklearn.utils.validation.column_or_1d(y_pred)
if n_classes is None:
n_classes = np.unique(np.concatenate([y, y_pred])).shape[0]
# Compute bin means
bin_range = [1 / n_classes, 1]
bins = np.linspace(bin_range[0], bin_range[1], n_bins + 1)
# Find prediction confidence
p_max = np.max(p_pred, axis=1)
# Compute empirical accuracy
empirical_acc = scipy.stats.binned_statistic(p_max, (y_pred == y).astype(int),
bins=n_bins, range=bin_range)[0]
nanindices = np.where(np.logical_not(np.isnan(empirical_acc)))[0]
# Perfect calibration
calibrated_acc = np.linspace(bin_range[0] + bin_range[1] / (2 * n_bins), bin_range[1] - bin_range[1] / (2 * n_bins),
n_bins)
# Expected calibration error
weights_ece = np.histogram(p_max, bins)[0][nanindices]
if p < np.inf:
ece = np.average(abs(empirical_acc[nanindices] - calibrated_acc[nanindices]) ** p,
weights=weights_ece)
elif np.isinf(p):
ece = np.max(abs(empirical_acc[nanindices] - calibrated_acc[nanindices]))
return ece
def sharpness(y, p_pred, ddof=1):
"""
Computes the empirical sharpness of a classifier.
Computes the empirical sharpness of a classifier by computing the sample variance of a
vector of confidence estimates.
Parameters
----------
y : array-like
Ground truth labels. Dummy argument for consistent cross validation.
p_pred : array-like
Array of confidence estimates
ddof : int, optional, default=1
Degrees of freedom for the variance estimator.
Returns
-------
float
Sharpness
"""
# Number of classes
n_classes = np.shape(p_pred)[1]
# Find prediction confidence
p_max = np.max(p_pred, axis=1)
# Compute sharpness
sharp = np.var(p_max, ddof=ddof) * 4 * n_classes ** 2 / (n_classes - 1) ** 2
return sharp
def overconfidence(y, p_pred):
"""
Computes the overconfidence of a classifier.
Computes the empirical overconfidence of a classifier on a test sample by evaluating
the average confidence on the false predictions.
Parameters
----------
y : array-like
Ground truth labels
p_pred : array-like
Array of confidence estimates
Returns
-------
float
Overconfidence
"""
# Find prediction and confidence
y_pred = np.argmax(p_pred, axis=1)
p_max = np.max(p_pred, axis=1)
return np.average(p_max[y_pred != y])
def underconfidence(y, p_pred):
"""
Computes the underconfidence of a classifier.
Computes the empirical underconfidence of a classifier on a test sample by evaluating
the average uncertainty on the correct predictions.
Parameters
----------
y : array-like
Ground truth labels
p_pred : array-like
Array of confidence estimates
Returns
-------
float
Underconfidence
"""
# Find prediction and confidence
y_pred = np.argmax(p_pred, axis=1)
p_max = np.max(p_pred, axis=1)
return np.average(1 - p_max[y_pred == y])
def ratio_over_underconfidence(y, p_pred):
"""
Computes the ratio of over- and underconfidence of a classifier.
Computes the empirical ratio of over- and underconfidence of a classifier on a test sample.
Parameters
----------
y : array-like
Ground truth labels
p_pred : array-like
Array of confidence estimates
Returns
-------
float
Ratio of over- and underconfidence
"""
return overconfidence(y=y, p_pred=p_pred) / underconfidence(y = y, p_pred=p_pred)
def average_confidence(y, p_pred):
"""
Computes the average confidence in the prediction
Parameters
----------
y : array-like
Ground truth labels. Here a dummy variable for cross validation.
p_pred : array-like
Array of confidence estimates.
Returns
-------
avg_conf:float
Average confidence in prediction.
"""
return np.mean(np.max(p_pred, axis=1))
def weighted_abs_conf_difference(y, p_pred):
"""
Computes the weighted absolute difference between over and underconfidence.
Parameters
----------
y : array-like
Ground truth labels. Here a dummy variable for cross validation.
p_pred : array-like
Array of confidence estimates.
Returns
-------
weighted_abs_diff: float
Accuracy weighted absolute difference between over and underconfidence.
"""
y_pred = np.argmax(p_pred, axis=1)
of = overconfidence(y, p_pred)
uf = underconfidence(y, p_pred)
return abs((1 - np.average(y == y_pred)) * of - np.average(y == y_pred) * uf)
def brier_score(y, p_pred):
"""
Compute the Brier score.
The smaller the Brier score, the better, hence the naming with "loss".
Across all items in a set N predictions, the Brier score measures the
mean squared difference between (1) the predicted probability assigned
to the possible outcomes for item i, and (2) the actual outcome.
Therefore, the lower the Brier score is for a set of predictions, the
better the predictions are calibrated. Note that the Brier score always
takes on a value between zero and one, since this is the largest
possible difference between a predicted probability (which must be
between zero and one) and the actual outcome (which can take on values
of only 0 and 1). The Brier loss is composed of refinement loss and
calibration loss.
Note: We interface the `sklearn.metrics.brier_score_loss` method here to provide a consistent method signature.
Parameters
----------
y : array-like
Ground truth labels. Here a dummy variable for cross validation.
p_pred : array-like
Array of confidence estimates.
Returns
-------
score : float
Brier score
"""
p = np.clip(p_pred[:, 1], a_min=0, a_max=1)
return sklearn.metrics.brier_score_loss(y, p)
def precision(y, p_pred, **kwargs):
"""
Computes the precision.
Parameters
----------
y
p_pred
Returns
-------
"""
y_pred = np.argmax(p_pred, axis=1)
return sklearn.metrics.precision_score(y_true=y, y_pred=y_pred, **kwargs)
def recall(y, p_pred, **kwargs):
"""
Computes the recall.
Parameters
----------
y
p_pred
Returns
-------
"""
y_pred = np.argmax(p_pred, axis=1)
return sklearn.metrics.recall_score(y_true=y, y_pred=y_pred, **kwargs)
class MultiScorer:
"""
Use this class to encapsulate and/or aggregate multiple scoring functions so that it can be passed as an argument
for scoring in scikit's cross_val_score function. Instances of this class are also callables, with signature as
needed by `cross_val_score`. Evaluating multiple scoring function in this way versus scikit learns native way in the
`cross_validate` function avoids the unnecessary overhead of predicting anew for each scorer. This class is slightly
adapted from Kyriakos Stylianopoulos's implementation [1]_.
.. [1] https://github.com/StKyr/multiscorer
"""
def __init__(self, metrics, plots):
"""
Create a new instance of MultiScorer.
Parameters
----------
metrics: dict
The metrics to be used by the scorer.
The dictionary must have as key a name (str) for the metric and as value a tuple containing the metric
function itself and a dict literal of the additional named arguments to be passed to the function. The
metric function should be one of the `sklearn.metrics` function or any other callable with the same
signature: `metric(y_true, p_pred, **kwargs)`.
plots: dict
Plots to be generated for each CV run.
"""
self.metrics = metrics
self.plots = plots
self.results = {}
self._called = False
self.n_folds = 0
for metric in metrics.keys():
self.results[metric] = []
self.results["cal_time"] = []
def __call__(self, estimator, X, y):
"""
To be called by for evaluation from sklearn's GridSearchCV or cross_val_score. Parameters are as they are
defined in the respective documentation.
Returns
-------
dummy: float
A dummy value of 0.5 just for compatibility reasons.
"""
self.n_folds += 1
# Predict probabilities
start_time = time.time()
p_pred = estimator.predict_proba(X)
cal_time = time.time() - start_time
# Compute metrics
for key in self.metrics.keys():
# Evaluate metric and save
metric, kwargs = self.metrics[key]
self.results[key].append(metric(y, p_pred, **kwargs))
self.results["cal_time"].append(cal_time)
# Generate plots
for key in self.plots.keys():
# Evaluate plots and save
plot_fun, kwargs = self.plots[key]
# Plots in CV runs
# TODO: make this safe for no filename argument
kwargs_copy = copy.deepcopy(kwargs)
kwargs_copy["filename"] = kwargs.get("filename", "") + "_" + str(self.n_folds)
plot_fun(y=y, p_pred=p_pred, **kwargs_copy)
plt.close("all")
# Set evaluation to true
self._called = True
# Return dummy value
return 0.5
def get_metric_names(self):
"""
Get all the metric names as given when initialized.
Returns
-------
metric_names: list
A list containing the given names (str) of the metrics
"""
return self.metrics.keys()
def get_results(self, metric=None, fold='all'):
"""
Get the results of a specific or all the metrics.
This method should be called after the object itself has been called so that the metrics are applied.
Parameters
----------
metric: str or None (default)
The given name of a metric to return its result(s). If omitted the results of all metrics will be returned.
fold: int in range [1, number_of_folds] or 'all' (Default)
Get the metric(s) results for the specific fold.
The number of folds corresponds to the number of times the instance is called.
If its value is a number, either the score of a single metric for that fold or a dictionary of the (single)
scores for that fold will be returned, depending on the value of `metric` parameter. If its value is 'all',
either a list of a single metric or a dictionary containing the lists of scores for all folds will be
returned, depending on the value of `metric` parameter.
Returns
-------
metric_result_for_one_fold
The result of the designated metric function for the specific fold, if `metric` parameter was not omitted
and an integer value was given to `fold` parameter. If the value of `metric` does not correspond to a
metric name, `None` will be returned.
all_metric_results_for_one_fold: dict
A dict having as keys the names of the metrics and as values their results for the specific fold.
This will be returned only if `metric` parameter was omitted and an integer value was given to `fold`
parameter.
metric_results_for_all_folds: list
A list of length number_of_folds containing the results of all folds for the specific metric, if `metric`
parameter was not omitted and value 'all' was given to `fold`. If the value of `metric` does not correspond
to a metric name, `None` will be returned.
all_metric_results_for_all_folds: dict of lists
A dict having as keys the names of the metrics and as values lists (of length number_of_folds) of their
results for all folds. This will be returned only if `metric` parameter was omitted and 'all' value was
given to `fold` parameter.
Raises
------
UserWarning
If this method is called before the instance is called for evaluation.
ValueError
If the value for `fold` parameter is not appropriate.
"""
if not self._called:
raise UserWarning('Evaluation has not been performed yet.')
if isinstance(fold, str) and fold == 'all':
if metric is None:
return self.results
else:
return self.results[metric]
elif isinstance(fold, int):
if fold not in range(1, self.n_folds + 1):
raise ValueError('Invalid fold index: ' + str(fold))
if metric is None:
res = dict()
for key in self.results.keys():
res[key] = self.results[key][fold - 1]
return res
else:
return self.results[metric][fold - 1]
else:
raise ValueError('Unexpected fold value: %s' % (str(fold)))
| 31.252033 | 120 | 0.628382 |
035b12313110325137341581db802ea2b7346fa2 | 6,627 | py | Python | oauth_provider/views.py | Riidr/django-oauth-plus | 013052fdaf1b80a1611c288daa5fecbcdcaffcd4 | [
"BSD-3-Clause"
] | null | null | null | oauth_provider/views.py | Riidr/django-oauth-plus | 013052fdaf1b80a1611c288daa5fecbcdcaffcd4 | [
"BSD-3-Clause"
] | null | null | null | oauth_provider/views.py | Riidr/django-oauth-plus | 013052fdaf1b80a1611c288daa5fecbcdcaffcd4 | [
"BSD-3-Clause"
] | null | null | null | from urllib import urlencode
import oauth2 as oauth
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseRedirect
from django.views.decorators.csrf import csrf_exempt
from django.utils.translation import ugettext as _
from django.core.urlresolvers import get_callable
from decorators import oauth_required
from forms import AuthorizeRequestTokenForm
from store import store, InvalidConsumerError, InvalidTokenError
from utils import verify_oauth_request, get_oauth_request, require_params, send_oauth_error
from consts import OUT_OF_BAND
OAUTH_AUTHORIZE_VIEW = 'OAUTH_AUTHORIZE_VIEW'
OAUTH_CALLBACK_VIEW = 'OAUTH_CALLBACK_VIEW'
INVALID_PARAMS_RESPONSE = send_oauth_error(oauth.Error(
_('Invalid request parameters.')))
@csrf_exempt
def request_token(request):
oauth_request = get_oauth_request(request)
if oauth_request is None:
return INVALID_PARAMS_RESPONSE
missing_params = require_params(oauth_request, ('oauth_callback',))
if missing_params is not None:
return missing_params
try:
consumer = store.get_consumer(request, oauth_request, oauth_request['oauth_consumer_key'])
except InvalidConsumerError:
return HttpResponseBadRequest('Invalid Consumer.')
if not verify_oauth_request(request, oauth_request, consumer):
return HttpResponseBadRequest('Could not verify OAuth request.')
try:
request_token = store.create_request_token(request, oauth_request, consumer, oauth_request['oauth_callback'])
except oauth.Error, err:
return send_oauth_error(err)
ret = urlencode({
'oauth_token': request_token.key,
'oauth_token_secret': request_token.secret,
'oauth_callback_confirmed': 'true'
})
return HttpResponse(ret, content_type='application/x-www-form-urlencoded')
@login_required
def user_authorization(request, form_class=AuthorizeRequestTokenForm):
if 'oauth_token' not in request.REQUEST:
return HttpResponseBadRequest('No request token specified.')
oauth_request = get_oauth_request(request)
try:
request_token = store.get_request_token(request, oauth_request, request.REQUEST['oauth_token'])
except InvalidTokenError:
return HttpResponseBadRequest('Invalid request token.')
consumer = store.get_consumer_for_request_token(request, oauth_request, request_token)
if request.method == 'POST':
form = form_class(request.POST)
if request.session.get('oauth', '') == request_token.key and form.is_valid():
request.session['oauth'] = ''
if form.cleaned_data['authorize_access']:
request_token = store.authorize_request_token(request, oauth_request, request_token)
args = { 'oauth_token': request_token.key }
else:
args = { 'error': _('Access not granted by user.') }
if request_token.callback is not None and request_token.callback != OUT_OF_BAND:
response = HttpResponseRedirect('%s&%s' % (request_token.get_callback_url(), urlencode(args)))
else:
# try to get custom callback view
callback_view_str = getattr(settings, OAUTH_CALLBACK_VIEW,
'oauth_provider.views.fake_callback_view')
try:
callback_view = get_callable(callback_view_str)
except AttributeError:
raise Exception, "%s view doesn't exist." % callback_view_str
response = callback_view(request, **args)
else:
response = send_oauth_error(oauth.Error(_('Action not allowed.')))
else:
# try to get custom authorize view
authorize_view_str = getattr(settings, OAUTH_AUTHORIZE_VIEW,
'oauth_provider.views.fake_authorize_view')
try:
authorize_view = get_callable(authorize_view_str)
except AttributeError:
raise Exception, "%s view doesn't exist." % authorize_view_str
params = oauth_request.get_normalized_parameters()
# set the oauth flag
request.session['oauth'] = request_token.key
response = authorize_view(request, request_token, request_token.get_callback_url(), params)
return response
@csrf_exempt
def access_token(request):
oauth_request = get_oauth_request(request)
if oauth_request is None:
return INVALID_PARAMS_RESPONSE
missing_params = require_params(oauth_request, ('oauth_token', 'oauth_verifier'))
if missing_params is not None:
return missing_params
try:
request_token = store.get_request_token(request, oauth_request, oauth_request['oauth_token'])
except InvalidTokenError:
return HttpResponseBadRequest('Invalid request token.')
try:
consumer = store.get_consumer(request, oauth_request, oauth_request['oauth_consumer_key'])
except InvalidConsumerError:
return HttpResponseBadRequest('Invalid consumer.')
if not verify_oauth_request(request, oauth_request, consumer, request_token):
return HttpResponseBadRequest('Could not verify OAuth request.')
if oauth_request.get('oauth_verifier', None) != request_token.verifier:
return HttpResponseBadRequest('Invalid OAuth verifier.')
if not request_token.is_approved:
return HttpResponseBadRequest('Request Token not approved by the user.')
access_token = store.create_access_token(request, oauth_request, consumer, request_token)
ret = urlencode({
'oauth_token': access_token.key,
'oauth_token_secret': access_token.secret
})
return HttpResponse(ret, content_type='application/x-www-form-urlencoded')
@oauth_required
def protected_resource_example(request):
"""
Test view for accessing a Protected Resource.
"""
return HttpResponse('Protected Resource access!')
@login_required
def fake_authorize_view(request, token, callback, params):
"""
Fake view for tests. It must return an ``HttpResponse``.
You need to define your own in ``settings.OAUTH_AUTHORIZE_VIEW``.
"""
return HttpResponse('Fake authorize view for %s with params: %s.' % (token.consumer.name, params))
def fake_callback_view(request, **args):
"""
Fake view for tests. It must return an ``HttpResponse``.
You can define your own in ``settings.OAUTH_CALLBACK_VIEW``.
"""
return HttpResponse('Fake callback view.')
| 40.408537 | 117 | 0.705146 |
04d7f2976a862223101654a23779f1b67feac378 | 553 | py | Python | processed_dataset/mat2obj.py | xiangruhuang/Learning2Sync | 7d2bda047079547d3bb7c69a1c84bbc6301ab044 | [
"BSD-3-Clause"
] | 43 | 2019-01-22T23:05:47.000Z | 2021-09-21T07:24:22.000Z | processed_dataset/mat2obj.py | xiangruhuang/Learning2Sync | 7d2bda047079547d3bb7c69a1c84bbc6301ab044 | [
"BSD-3-Clause"
] | 1 | 2020-10-20T05:47:40.000Z | 2020-11-12T23:02:05.000Z | processed_dataset/mat2obj.py | xiangruhuang/Learning2Sync | 7d2bda047079547d3bb7c69a1c84bbc6301ab044 | [
"BSD-3-Clause"
] | 6 | 2019-03-02T03:59:46.000Z | 2021-10-07T15:58:08.000Z | import sys
import scipy.io as sio
import glob
## OBJ file
#v -0.3925 -0.8111 2.0260
s = int(sys.argv[1])
with open('scenes', 'r') as fin:
scene_id = fin.readlines()[s].strip()
mats = glob.glob('scannet/%s/*.mat' % scene_id)
for mat_f in mats:
obj = mat_f.replace('.mat', '.obj')
mat = sio.loadmat(mat_f)
#print(mat.keys())
with open(obj, 'w') as fout:
fout.write('# OBJ file\n')
v = mat['vertex']
assert v.shape[0] == 3
for i in range(v.shape[1]):
fout.write('v %.4f %.4f %.4f\n' % (v[0, i], v[1, i], v[2, i]))
| 21.269231 | 68 | 0.573237 |
3b8b353ae6381ed6dddccd2884abe5c9fd992001 | 443 | py | Python | django_blog/comment/admin.py | csyu12/Django-blog | e1f2ed886ede84c5baa518b7fbdd2900fb52f534 | [
"MIT"
] | null | null | null | django_blog/comment/admin.py | csyu12/Django-blog | e1f2ed886ede84c5baa518b7fbdd2900fb52f534 | [
"MIT"
] | null | null | null | django_blog/comment/admin.py | csyu12/Django-blog | e1f2ed886ede84c5baa518b7fbdd2900fb52f534 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .adminforms import CommentAdminForm
from .models import Comment
from django_blog.custom_site import custom_site
@admin.register(Comment, site=custom_site)
class CommentAdmin(admin.ModelAdmin):
form = CommentAdminForm
list_display = ('target', 'nickname', 'website',
'email', 'status', 'created_time')
list_display_links = ('nickname', )
list_filter = ('target', )
| 26.058824 | 54 | 0.713318 |
04b890b08b595d1086de81d19c952e378700013c | 5,858 | py | Python | tests/playground.py | marza-animation-planet/das | 1c7460dfdd5f138d8317c72900e90b23c0c28c7b | [
"MIT"
] | 4 | 2018-11-19T01:36:01.000Z | 2022-02-28T03:41:12.000Z | tests/playground.py | marza-animation-planet/das | 1c7460dfdd5f138d8317c72900e90b23c0c28c7b | [
"MIT"
] | 1 | 2021-12-26T11:57:07.000Z | 2022-03-16T07:18:01.000Z | tests/playground.py | marza-animation-planet/das | 1c7460dfdd5f138d8317c72900e90b23c0c28c7b | [
"MIT"
] | 2 | 2019-03-30T10:28:12.000Z | 2022-03-04T17:58:39.000Z | import os
import re
import sys
import glob
thisdir = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.join(thisdir, "..", "python"))
dirs = map(lambda y: thisdir + "/" + y, filter(lambda x: re.match(r"test\d+", x), os.listdir(thisdir)))
os.environ["DAS_SCHEMA_PATH"] = os.pathsep.join(dirs)
import das # pylint: disable=import-error
def print_types():
print("=== Print default for all 'hud' schema types")
stl = das.list_schema_types("hud")
for st in stl:
print("=== %s" % st)
v = das.make_default(st)
print(type(v).__name__)
das.pprint(v)
if hasattr(v, "_schema_type"):
print(v._schema_type)
def test_mixin1():
print("=== Mixin tests using timeline.ClipSource schema type ===")
class Range(das.Mixin):
@classmethod
def get_schema_type(klass):
return "timeline.Range"
def __init__(self, *args, **kwargs):
super(Range, self).__init__(*args, **kwargs)
def expand(self, start, end):
cs, ce = self[0], self[1]
if start < cs:
cs = start
if end > ce:
ce = end
self[0], self[1] = cs, ce
class ClipSource(das.Mixin):
@classmethod
def get_schema_type(klass):
return "timeline.ClipSource"
def __init__(self, *args, **kwargs):
super(ClipSource, self).__init__(*args, **kwargs)
def set_media(self, path):
_, ext = map(lambda x: x.lower(), os.path.splitext(path))
if ext == ".fbx":
print("Get range from FBX file")
elif ext == ".abc":
print("Get range from Alembic file")
elif ext == ".mov":
print("Get range from Movie file")
self.media = os.path.abspath(path).replace("\\", "/")
def set_clip_offsets(self, start, end):
data_start, data_end = self.dataRange
clip_start = min(data_end, data_start + max(0, start))
clip_end = max(data_start, data_end + min(end, 0))
if clip_start == data_start and clip_end == data_end:
self.clipRange = None
else:
self.clipRange = (clip_start, clip_end)
das.register_mixins(Range, ClipSource)
print("-- make def (1)")
dv = das.make_default("timeline.ClipSource")
print("-- write (1)")
das.write(dv, "./out.tl")
print("-- make def (2)")
cs = das.make_default("timeline.ClipSource")
print("-- read (1)")
cs = das.read("./out.tl")
das.pprint(cs)
cs.dataRange = (100, 146)
cs.dataRange.expand(102, 150)
cs.set_media("./source.mov")
cs.set_clip_offsets(1, -1)
das.pprint(cs)
print("-- write (2)")
das.write(cs, "./out.tl")
c = das.copy(cs)
das.pprint(c)
for k, v in c.iteritems():
print("%s = %s" % (k, v))
os.remove("./out.tl")
def test_mixin2():
class Fn(das.mixin.Mixin):
@classmethod
def get_schema_type(klass):
return "timeline.ClipSource"
def __init__(self, *args, **kwargs):
super(Fn, self).__init__()
def _copy(self):
print("Fn._copy")
return self
def pprint(self):
das.pprint(self)
class Fn2(das.mixin.Mixin):
@classmethod
def get_schema_type(klass):
return "timeline.ClipSource"
def __init__(self, *args, **kwargs):
super(Fn2, self).__init__()
def echo(self):
print("From Fn2 Mixin")
class Fn3(das.mixin.Mixin):
@classmethod
def get_schema_type(klass):
return "timeline.Range"
def __init__(self, *args, **kwargs):
super(Fn3, self).__init__()
def echo(self):
print("From Fn3 Mixin")
data = das.make_default("timeline.ClipSource")
try:
data.pprint()
except Exception, e:
print(str(e))
das.mixin.bind([Fn, Fn2], data)
das.mixin.bind(Fn2, data)
das.mixin.bind(Fn, data)
try:
das.mixin.bind(Fn3, data)
except Exception, e:
print(str(e))
data.pprint()
c = data._copy()
c = das.copy(data)
das.mixin.bind(Fn2, c, reset=True)
c.echo()
try:
c.pprint()
except Exception, e:
print(str(e))
def name_conflicts():
print("=== Name conflict resolution ===")
d = das.make_default("conflicts.DictMethod")
das.pprint(d)
print("keys = %s" % d.keys)
print("_keys() -> %s" % d._keys())
print("values = %s" % d.values)
print("_values() -> %s" % d._values())
print("items() -> %s" % d.items())
for k, v in d.items():
print("%s = %s" % (k, v))
das.pprint(d)
d._clear()
das.pprint(d)
def do_shopping():
b = das.make_default("shopping.basket")
b.items.append(das.make("shopping.item", name="carottes", value=110))
b.items.append(das.make("shopping.item", name="meat", value=320))
das.pprint(b)
for c in ["yen", "euro", "dollar"]:
print("%f %s(s)" % (b.value_in(c), c))
def do_multior():
b = das.make_default("multior.Parameter")
das.pprint(b)
b.min = 1
b.max = 10.0
b.softMin = False
b.softMax = "hello"
try:
b.min = [0]
except:
pass
das.pprint(b)
print(b._get_schema_type())
print(das.get_schema_type("multior.Value"))
if __name__ == "__main__":
args = sys.argv[1:]
nargs = len(args)
funcs = {"print_types": print_types,
"test_mixin1": test_mixin1,
"name_conflicts": name_conflicts,
"test_mixin2": test_mixin2,
"do_shopping": do_shopping,
"do_multior": do_multior}
if nargs == 0:
print("Please specify function(s) to run (%s or all)" % ", ".join(funcs.keys()))
sys.exit(0)
if "all" in args:
for f in funcs.values():
f()
else:
for arg in args:
f = funcs.get(arg, None)
if f is None:
print("Ignore non-existing function '%s'" % arg)
else:
f()
| 26.151786 | 103 | 0.573916 |
944f9fee7822bbbfe2a0192af5c60352898be749 | 3,726 | py | Python | tests/test_exceptional.py | tdhopper/exception | d6e857fb2f9ccb225fe6ae572723a74eded6fa8d | [
"MIT"
] | 2 | 2016-06-17T03:51:17.000Z | 2017-02-02T18:36:19.000Z | tests/test_exceptional.py | tdhopper/exceptional | d6e857fb2f9ccb225fe6ae572723a74eded6fa8d | [
"MIT"
] | null | null | null | tests/test_exceptional.py | tdhopper/exceptional | d6e857fb2f9ccb225fe6ae572723a74eded6fa8d | [
"MIT"
] | 1 | 2017-08-12T06:11:43.000Z | 2017-08-12T06:11:43.000Z |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function
import six
import os
from subprocess import check_output
"""
test_exception
----------------------------------
Tests for `exception` module.
"""
import pytest
from exception import exception
@pytest.fixture
def simple_traceback():
return """Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ZeroDivisionError: integer division or modulo by zero"""
@pytest.fixture
def simple_traceback_buffer():
return six.StringIO("""Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ZeroDivisionError: integer division or modulo by zero""")
@pytest.fixture
def mock_filename(mocker):
mocker.patch('fileinput.filename', spec=True)
exception.fileinput.filename.return_value = "X"
def test_trivial(mock_filename, simple_traceback):
"""Check that a simple example is fine"""
errors = exception.extract_errors(simple_traceback.split("\n"))
out = [error for filename, error in errors]
assert len(out) == 1
assert "".join(simple_traceback.split('\n')) == out[0]
# Add some trash before and afterthe
trace2 = "a\n{}\na\n\n\n".format(simple_traceback)
errors = list(exception.extract_errors(trace2.split("\n")))
out = [error for filename, error in errors]
assert len(out) == 1
assert "".join(simple_traceback.split('\n')) == out[0]
assert errors[0][0] == "X"
def test_file(mock_filename, simple_traceback, simple_traceback_buffer):
"""Correctly parses an input buffer"""
errors = exception.extract_errors(simple_traceback_buffer.readlines())
out = [error for filename, error in errors]
assert len(out) == 1
assert "".join(simple_traceback) == out[0]
def test_multiple_exceptions(mock_filename, simple_traceback):
"""Extract two exceptions from a string"""
trace1 = simple_traceback
trace2 = simple_traceback.replace("ZeroDivisionError", "ValueError")
traceback = "{}\n{}".format(trace1, trace2)
errors = exception.extract_errors(traceback.split("\n"))
out = [error for filename, error in errors]
assert len(out) == 2
assert "".join(trace1.split('\n')) == out[0]
assert "".join(trace2.split('\n')) == out[1]
def test_deduplicate(mock_filename, simple_traceback):
"""Duplicate exceptions in sequence are ignored"""
traceback = "{}\n{}".format(simple_traceback, simple_traceback)
errors = exception.extract_errors(traceback.split("\n"))
out = [error for filename, error in errors]
assert len(out) == 1
assert "".join(simple_traceback.split('\n')) == out[0]
@pytest.mark.skipif('/home/travis/' in os.environ.get('PATH', ''),
reason="Can't get path to work on Travis")
def test_cli():
print(os.environ)
root = os.path.dirname(__file__)
result = check_output(["exception", "-f", "{}/fixtures/real_log.log".format(root)])
true = """### {}/fixtures/real_log.log ###
Traceback (most recent call last):
File "local/lib/python2.7/site-packages/pykafka/cluster.py", line 242, in _request_metadata
ssl_config=self._ssl_config)
File "local/lib/python2.7/site-packages/pykafka/broker.py", line 97, in __init__
self.connect()
File "local/lib/python2.7/site-packages/pykafka/broker.py", line 211, in connect
self._connection.connect(self._socket_timeout_ms)
File "local/lib/python2.7/site-packages/pykafka/connection.py", line 169, in connect
(self.source_host, self.source_port)
File "/usr/lib/python2.7/socket.py", line 571, in create_connection
raise err
error: [Errno 111] Connection refused
----------------------------------------
""".format(root)
assert result == true
| 32.12069 | 93 | 0.684648 |
4215f026f36cb6dea964f8bc0417ac5449e5e258 | 4,165 | py | Python | engine/songslover.py | Iamdavidonuh/getmusic | 28ad107ff6c3539687d677533696d7bacadeabb9 | [
"MIT"
] | null | null | null | engine/songslover.py | Iamdavidonuh/getmusic | 28ad107ff6c3539687d677533696d7bacadeabb9 | [
"MIT"
] | null | null | null | engine/songslover.py | Iamdavidonuh/getmusic | 28ad107ff6c3539687d677533696d7bacadeabb9 | [
"MIT"
] | null | null | null | import re
from engine.root import BaseEngine
# from lxml import etree
"""Figure out a way to combine both fetch and search"""
"""Also find a way to differentiate between album and track children when using Search not Fetch"""
class Songslover(BaseEngine):
engine_name = "songslover"
page_path = "page"
tracks_category = "category"
def __init__(self):
super().__init__()
self.site_uri = "https://songslover.vip/"
self.request_method = self.GET
def search(self, query=None, page=None, category=None, **kwargs):
soup = self.get_soup(url=self.get_formated_url(category="albums", page=2))
response = self.parse_parent_object(soup)
return response
"""Implement to """
def get_url_path(self, page=None, category=None):
if page <= 0 or page is None:
page = 1
if page >= 251:
page = 250
return (
(category, self.page_path, str(page))
if category == self.ALBUM
else (self.tracks_category, category, self.page_path, str(page))
)
def parse_parent_object(self, soup, **kwargs):
return list(
self.parse_single_object(self.get_soup(elem["href"]))
for elem in soup.select("article h2 a")
)
def parse_single_object(self, soup, category="album", **kwargs):
try:
artist, title = soup.select(
'div[class="post-inner"] h1 span[itemprop="name"]'
)[0].text.split(" –")
artist, title = artist.strip(), title.strip()
except Exception:
artist = title = soup.select(
'div[class="post-inner"] h1 span[itemprop="name"]'
)[0].text
try:
art_link = soup.select('div[class="entry"] img[src]')[0]["src"]
except Exception:
art_link = None
if category == self.TRACK:
regex_group = [
soup.find(text=re.compile(".*(Save).*(Link)$")),
soup.find(text=re.compile(".*(Save).*(Link).*(Server){1}.*(2){1}$")),
soup.find(text=re.compile(".*(Download)$")),
soup.find(text=re.compile(".*(Download).*(This){1}.*(Track){1}$")),
soup.find(text=re.compile(".*(Save).*(File)$")),
]
valid_group = list(i for i in regex_group if i != None)
if len(valid_group) >= 1:
download_link = valid_group[0].find_previous("a")["href"]
download_link = None
return download_link
try:
download_link = soup.find(
text=re.compile(".*(All).*(in).*(One).*(Server).*(2).*")
).find_previous("a")["href"]
except Exception:
download_link = None
response_group = [
soup.select("li strong a"),
soup.select("p span strong a"),
soup.select('tr td div[class="wpmd"] a'),
soup.select('span[style="color: #99cc00;"] a'),
soup.select('span[style="color: #ff99cc;"] a'),
]
valid_group = list(i for i in response_group if i != [])
if len(valid_group) <= 0:
return None
response_elements = valid_group[0]
for element in response_elements:
try:
song_title = element.text
if song_title is None:
continue
song_link = element["href"]
keywords = [
"Server",
"Youtube",
"Apple Store",
"Apple Music",
"ITunes",
"Amazon Music",
"Amazon Store",
"Buy Album",
"Download Album",
]
keyword = [i for i in keywords if i in song_title]
if any(keyword):
continue
elif song_title.startswith("Download"):
song_title = song_title[8:]
except Exception:
pass
return dict(download_link=download_link, art_link=art_link)
| 36.535088 | 99 | 0.515246 |
2e2e11d5ffd1c011b5dce25c38929e0e737ce843 | 36,854 | py | Python | openstack_dashboard/api/nova.py | HoonMinJeongUm/Hoonmin-Horizon | 5c9569ff9a60e81e4acc6f5216ee239653152ea4 | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/api/nova.py | HoonMinJeongUm/Hoonmin-Horizon | 5c9569ff9a60e81e4acc6f5216ee239653152ea4 | [
"Apache-2.0"
] | 3 | 2021-01-21T14:27:55.000Z | 2021-06-10T23:08:49.000Z | openstack_dashboard/api/nova.py | Surfndez/horizon | a56765b6b3dbc09fd467b83a57bea2433ae3909e | [
"Apache-2.0"
] | null | null | null | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 OpenStack Foundation
# Copyright 2012 Nebula, Inc.
# Copyright (c) 2012 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import collections
import logging
from operator import attrgetter
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from novaclient import api_versions
from novaclient import client as nova_client
from novaclient import exceptions as nova_exceptions
from novaclient.v2 import instance_action as nova_instance_action
from novaclient.v2 import list_extensions as nova_list_extensions
from novaclient.v2 import servers as nova_servers
from horizon import exceptions as horizon_exceptions
from horizon.utils import functions as utils
from horizon.utils.memoized import memoized
from horizon.utils.memoized import memoized_with_request
from openstack_dashboard.api import base
from openstack_dashboard.api import microversions
from openstack_dashboard.contrib.developer.profiler import api as profiler
LOG = logging.getLogger(__name__)
# Supported compute versions
VERSIONS = base.APIVersionManager("compute", preferred_version=2)
VERSIONS.load_supported_version(1.1, {"client": nova_client, "version": 1.1})
VERSIONS.load_supported_version(2, {"client": nova_client, "version": 2})
# API static values
INSTANCE_ACTIVE_STATE = 'ACTIVE'
VOLUME_STATE_AVAILABLE = "available"
DEFAULT_QUOTA_NAME = 'default'
INSECURE = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
CACERT = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
@memoized
def get_microversion(request, features):
client = novaclient(request)
min_ver, max_ver = api_versions._get_server_version_range(client)
return (microversions.get_microversion_for_features(
'nova', features, api_versions.APIVersion, min_ver, max_ver))
def is_feature_available(request, features):
return bool(get_microversion(request, features))
class VolumeMultiattachNotSupported(horizon_exceptions.HorizonException):
status_code = 400
class VNCConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_vnc_console method.
"""
_attrs = ['url', 'type']
class SPICEConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_spice_console method.
"""
_attrs = ['url', 'type']
class RDPConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_rdp_console method.
"""
_attrs = ['url', 'type']
class SerialConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_serial_console method.
"""
_attrs = ['url', 'type']
class MKSConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_mks_console method.
"""
_attrs = ['url', 'type']
class Server(base.APIResourceWrapper):
"""Simple wrapper around novaclient.server.Server.
Preserves the request info so image name can later be retrieved.
"""
_attrs = ['addresses', 'attrs', 'id', 'image', 'links', 'description',
'metadata', 'name', 'private_ip', 'public_ip', 'status', 'uuid',
'image_name', 'VirtualInterfaces', 'flavor', 'key_name', 'fault',
'tenant_id', 'user_id', 'created', 'locked',
'OS-EXT-STS:power_state', 'OS-EXT-STS:task_state',
'OS-EXT-SRV-ATTR:instance_name', 'OS-EXT-SRV-ATTR:host',
'OS-EXT-AZ:availability_zone', 'OS-DCF:diskConfig']
def __init__(self, apiresource, request):
super(Server, self).__init__(apiresource)
self.request = request
# TODO(gabriel): deprecate making a call to Glance as a fallback.
@property
def image_name(self):
import glanceclient.exc as glance_exceptions
from openstack_dashboard.api import glance
if not self.image:
return None
elif hasattr(self.image, 'name'):
return self.image.name
elif 'name' in self.image:
return self.image['name']
else:
try:
image = glance.image_get(self.request, self.image['id'])
self.image['name'] = image.name
return image.name
except (glance_exceptions.ClientException,
horizon_exceptions.ServiceCatalogException):
self.image['name'] = None
return None
@property
def internal_name(self):
return getattr(self, 'OS-EXT-SRV-ATTR:instance_name', "")
@property
def availability_zone(self):
return getattr(self, 'OS-EXT-AZ:availability_zone', "")
@property
def host_server(self):
return getattr(self, 'OS-EXT-SRV-ATTR:host', '')
class Hypervisor(base.APIDictWrapper):
"""Simple wrapper around novaclient.hypervisors.Hypervisor."""
_attrs = ['manager', '_loaded', '_info', 'hypervisor_hostname', 'id',
'servers']
@property
def servers(self):
# if hypervisor doesn't have servers, the attribute is not present
servers = []
try:
servers = self._apidict.servers
except Exception:
pass
return servers
class NovaUsage(base.APIResourceWrapper):
"""Simple wrapper around contrib/simple_usage.py."""
_attrs = ['start', 'server_usages', 'stop', 'tenant_id',
'total_local_gb_usage', 'total_memory_mb_usage',
'total_vcpus_usage', 'total_hours']
def get_summary(self):
return {'instances': self.total_active_instances,
'memory_mb': self.memory_mb,
'vcpus': self.vcpus,
'vcpu_hours': self.vcpu_hours,
'local_gb': self.local_gb,
'disk_gb_hours': self.disk_gb_hours,
'memory_mb_hours': self.memory_mb_hours}
@property
def total_active_instances(self):
return sum(1 for s in self.server_usages if s['ended_at'] is None)
@property
def vcpus(self):
return sum(s['vcpus'] for s in self.server_usages
if s['ended_at'] is None)
@property
def vcpu_hours(self):
return getattr(self, "total_vcpus_usage", 0)
@property
def local_gb(self):
return sum(s['local_gb'] for s in self.server_usages
if s['ended_at'] is None)
@property
def memory_mb(self):
return sum(s['memory_mb'] for s in self.server_usages
if s['ended_at'] is None)
@property
def disk_gb_hours(self):
return getattr(self, "total_local_gb_usage", 0)
@property
def memory_mb_hours(self):
return getattr(self, "total_memory_mb_usage", 0)
class FlavorExtraSpec(object):
def __init__(self, flavor_id, key, val):
self.flavor_id = flavor_id
self.id = key
self.key = key
self.value = val
class QuotaSet(base.QuotaSet):
# We don't support nova-network, so we exclude nova-network relatd
# quota fields from the response.
ignore_quotas = {
"floating_ips",
"fixed_ips",
"security_groups",
"security_group_rules",
}
def get_auth_params_from_request(request):
"""Extracts properties needed by novaclient call from the request object.
These will be used to memoize the calls to novaclient.
"""
return (
request.user.username,
request.user.token.id,
request.user.tenant_id,
request.user.token.project.get('domain_id'),
base.url_for(request, 'compute'),
base.url_for(request, 'identity')
)
@memoized_with_request(get_auth_params_from_request)
def novaclient(request_auth_params, version=None):
(
username,
token_id,
project_id,
project_domain_id,
nova_url,
auth_url
) = request_auth_params
if version is None:
version = VERSIONS.get_active_version()['version']
c = nova_client.Client(version,
username,
token_id,
project_id=project_id,
project_domain_id=project_domain_id,
auth_url=auth_url,
insecure=INSECURE,
cacert=CACERT,
http_log_debug=settings.DEBUG,
auth_token=token_id,
endpoint_override=nova_url)
return c
def upgrade_api(request, client, version):
"""Ugrade the nova API to the specified version if possible."""
min_ver, max_ver = api_versions._get_server_version_range(client)
if min_ver <= api_versions.APIVersion(version) <= max_ver:
client = novaclient(request, version)
return client
@profiler.trace
def server_vnc_console(request, instance_id, console_type='novnc'):
return VNCConsole(novaclient(request).servers.get_vnc_console(
instance_id, console_type)['console'])
@profiler.trace
def server_spice_console(request, instance_id, console_type='spice-html5'):
return SPICEConsole(novaclient(request).servers.get_spice_console(
instance_id, console_type)['console'])
@profiler.trace
def server_rdp_console(request, instance_id, console_type='rdp-html5'):
return RDPConsole(novaclient(request).servers.get_rdp_console(
instance_id, console_type)['console'])
@profiler.trace
def server_serial_console(request, instance_id, console_type='serial'):
return SerialConsole(novaclient(request).servers.get_serial_console(
instance_id, console_type)['console'])
@profiler.trace
def server_mks_console(request, instance_id, console_type='mks'):
microver = get_microversion(request, "remote_console_mks")
return MKSConsole(novaclient(request, microver).servers.get_mks_console(
instance_id, console_type)['remote_console'])
@profiler.trace
def flavor_create(request, name, memory, vcpu, disk, flavorid='auto',
ephemeral=0, swap=0, metadata=None, is_public=True,
rxtx_factor=1):
flavor = novaclient(request).flavors.create(name, memory, vcpu, disk,
flavorid=flavorid,
ephemeral=ephemeral,
swap=swap, is_public=is_public,
rxtx_factor=rxtx_factor)
if (metadata):
flavor_extra_set(request, flavor.id, metadata)
return flavor
@profiler.trace
def flavor_delete(request, flavor_id):
novaclient(request).flavors.delete(flavor_id)
@profiler.trace
def flavor_get(request, flavor_id, get_extras=False):
flavor = novaclient(request).flavors.get(flavor_id)
if get_extras:
flavor.extras = flavor_get_extras(request, flavor.id, True, flavor)
return flavor
@profiler.trace
@memoized
def flavor_list(request, is_public=True, get_extras=False):
"""Get the list of available instance sizes (flavors)."""
flavors = novaclient(request).flavors.list(is_public=is_public)
if get_extras:
for flavor in flavors:
flavor.extras = flavor_get_extras(request, flavor.id, True, flavor)
return flavors
@profiler.trace
def update_pagination(entities, page_size, marker, sort_dir, sort_key,
reversed_order):
has_more_data = has_prev_data = False
if len(entities) > page_size:
has_more_data = True
entities.pop()
if marker is not None:
has_prev_data = True
# first page condition when reached via prev back
elif reversed_order and marker is not None:
has_more_data = True
# last page condition
elif marker is not None:
has_prev_data = True
# restore the original ordering here
if reversed_order:
entities = sorted(entities, key=lambda entity:
(getattr(entity, sort_key) or '').lower(),
reverse=(sort_dir == 'asc'))
return entities, has_more_data, has_prev_data
@profiler.trace
@memoized
def flavor_list_paged(request, is_public=True, get_extras=False, marker=None,
paginate=False, sort_key="name", sort_dir="desc",
reversed_order=False):
"""Get the list of available instance sizes (flavors)."""
has_more_data = False
has_prev_data = False
if paginate:
if reversed_order:
sort_dir = 'desc' if sort_dir == 'asc' else 'asc'
page_size = utils.get_page_size(request)
flavors = novaclient(request).flavors.list(is_public=is_public,
marker=marker,
limit=page_size + 1,
sort_key=sort_key,
sort_dir=sort_dir)
flavors, has_more_data, has_prev_data = update_pagination(
flavors, page_size, marker, sort_dir, sort_key, reversed_order)
else:
flavors = novaclient(request).flavors.list(is_public=is_public)
if get_extras:
for flavor in flavors:
flavor.extras = flavor_get_extras(request, flavor.id, True, flavor)
return (flavors, has_more_data, has_prev_data)
@profiler.trace
@memoized
def flavor_access_list(request, flavor=None):
"""Get the list of access instance sizes (flavors)."""
return novaclient(request).flavor_access.list(flavor=flavor)
@profiler.trace
def add_tenant_to_flavor(request, flavor, tenant):
"""Add a tenant to the given flavor access list."""
return novaclient(request).flavor_access.add_tenant_access(
flavor=flavor, tenant=tenant)
@profiler.trace
def remove_tenant_from_flavor(request, flavor, tenant):
"""Remove a tenant from the given flavor access list."""
return novaclient(request).flavor_access.remove_tenant_access(
flavor=flavor, tenant=tenant)
@profiler.trace
def flavor_get_extras(request, flavor_id, raw=False, flavor=None):
"""Get flavor extra specs."""
if flavor is None:
flavor = novaclient(request).flavors.get(flavor_id)
extras = flavor.get_keys()
if raw:
return extras
return [FlavorExtraSpec(flavor_id, key, value) for
key, value in extras.items()]
@profiler.trace
def flavor_extra_delete(request, flavor_id, keys):
"""Unset the flavor extra spec keys."""
flavor = novaclient(request).flavors.get(flavor_id)
return flavor.unset_keys(keys)
@profiler.trace
def flavor_extra_set(request, flavor_id, metadata):
"""Set the flavor extra spec keys."""
flavor = novaclient(request).flavors.get(flavor_id)
if (not metadata): # not a way to delete keys
return None
return flavor.set_keys(metadata)
@profiler.trace
def snapshot_create(request, instance_id, name):
return novaclient(request).servers.create_image(instance_id, name)
@profiler.trace
def keypair_create(request, name):
return novaclient(request).keypairs.create(name)
@profiler.trace
def keypair_import(request, name, public_key):
return novaclient(request).keypairs.create(name, public_key)
@profiler.trace
def keypair_delete(request, name):
novaclient(request).keypairs.delete(name)
@profiler.trace
def keypair_list(request):
return novaclient(request).keypairs.list()
@profiler.trace
def keypair_get(request, name):
return novaclient(request).keypairs.get(name)
@profiler.trace
def server_create(request, name, image, flavor, key_name, user_data,
security_groups, block_device_mapping=None,
block_device_mapping_v2=None, nics=None,
availability_zone=None, instance_count=1, admin_pass=None,
disk_config=None, config_drive=None, meta=None,
scheduler_hints=None, description=None):
microversion = get_microversion(request, ("instance_description",
"auto_allocated_network"))
nova_client = novaclient(request, version=microversion)
# NOTE(amotoki): Handling auto allocated network
# Nova API 2.37 or later, it accepts a special string 'auto' for nics
# which means nova uses a network that is available for a current project
# if one exists and otherwise it creates a network automatically.
# This special handling is processed here as JS side assumes 'nics'
# is a list and it is easiest to handle it here.
if nics:
is_auto_allocate = any(nic.get('net-id') == '__auto_allocate__'
for nic in nics)
if is_auto_allocate:
nics = 'auto'
kwargs = {}
if description is not None:
kwargs['description'] = description
return Server(nova_client.servers.create(
name.strip(), image, flavor, userdata=user_data,
security_groups=security_groups,
key_name=key_name, block_device_mapping=block_device_mapping,
block_device_mapping_v2=block_device_mapping_v2,
nics=nics, availability_zone=availability_zone,
min_count=instance_count, admin_pass=admin_pass,
disk_config=disk_config, config_drive=config_drive,
meta=meta, scheduler_hints=scheduler_hints, **kwargs), request)
@profiler.trace
def server_delete(request, instance_id):
novaclient(request).servers.delete(instance_id)
def get_novaclient_with_locked_status(request):
microversion = get_microversion(request, "locked_attribute")
return novaclient(request, version=microversion)
def get_novaclient_with_instance_desc(request):
microversion = get_microversion(request, "instance_description")
return novaclient(request, version=microversion)
@profiler.trace
def server_get(request, instance_id):
return Server(get_novaclient_with_instance_desc(request).servers.get(
instance_id), request)
@profiler.trace
def server_list(request, search_opts=None, detailed=True):
nova_client = get_novaclient_with_locked_status(request)
page_size = utils.get_page_size(request)
paginate = False
if search_opts is None:
search_opts = {}
elif 'paginate' in search_opts:
paginate = search_opts.pop('paginate')
if paginate:
search_opts['limit'] = page_size + 1
all_tenants = search_opts.get('all_tenants', False)
if all_tenants:
search_opts['all_tenants'] = True
else:
search_opts['project_id'] = request.user.tenant_id
servers = [Server(s, request)
for s in nova_client.servers.list(detailed, search_opts)]
has_more_data = False
if paginate and len(servers) > page_size:
servers.pop(-1)
has_more_data = True
elif paginate and len(servers) == getattr(settings, 'API_RESULT_LIMIT',
1000):
has_more_data = True
return (servers, has_more_data)
@profiler.trace
def server_console_output(request, instance_id, tail_length=None):
"""Gets console output of an instance."""
return novaclient(request).servers.get_console_output(instance_id,
length=tail_length)
@profiler.trace
def server_pause(request, instance_id):
novaclient(request).servers.pause(instance_id)
@profiler.trace
def server_unpause(request, instance_id):
novaclient(request).servers.unpause(instance_id)
@profiler.trace
def server_suspend(request, instance_id):
novaclient(request).servers.suspend(instance_id)
@profiler.trace
def server_resume(request, instance_id):
novaclient(request).servers.resume(instance_id)
@profiler.trace
def server_shelve(request, instance_id):
novaclient(request).servers.shelve(instance_id)
@profiler.trace
def server_unshelve(request, instance_id):
novaclient(request).servers.unshelve(instance_id)
@profiler.trace
def server_reboot(request, instance_id, soft_reboot=False):
hardness = nova_servers.REBOOT_HARD
if soft_reboot:
hardness = nova_servers.REBOOT_SOFT
novaclient(request).servers.reboot(instance_id, hardness)
@profiler.trace
def server_rebuild(request, instance_id, image_id, password=None,
disk_config=None, description=None):
kwargs = {}
if description:
kwargs['description'] = description
return get_novaclient_with_instance_desc(request).servers.rebuild(
instance_id, image_id, password, disk_config, **kwargs)
@profiler.trace
def server_update(request, instance_id, name, description=None):
return get_novaclient_with_instance_desc(request).servers.update(
instance_id, name=name.strip(), description=description)
@profiler.trace
def server_migrate(request, instance_id):
novaclient(request).servers.migrate(instance_id)
@profiler.trace
def server_live_migrate(request, instance_id, host, block_migration=False,
disk_over_commit=False):
novaclient(request).servers.live_migrate(instance_id, host,
block_migration,
disk_over_commit)
@profiler.trace
def server_resize(request, instance_id, flavor, disk_config=None, **kwargs):
novaclient(request).servers.resize(instance_id, flavor,
disk_config, **kwargs)
@profiler.trace
def server_confirm_resize(request, instance_id):
novaclient(request).servers.confirm_resize(instance_id)
@profiler.trace
def server_revert_resize(request, instance_id):
novaclient(request).servers.revert_resize(instance_id)
@profiler.trace
def server_start(request, instance_id):
novaclient(request).servers.start(instance_id)
@profiler.trace
def server_stop(request, instance_id):
novaclient(request).servers.stop(instance_id)
@profiler.trace
def server_lock(request, instance_id):
microversion = get_microversion(request, "locked_attribute")
novaclient(request, version=microversion).servers.lock(instance_id)
@profiler.trace
def server_unlock(request, instance_id):
microversion = get_microversion(request, "locked_attribute")
novaclient(request, version=microversion).servers.unlock(instance_id)
@profiler.trace
def server_metadata_update(request, instance_id, metadata):
novaclient(request).servers.set_meta(instance_id, metadata)
@profiler.trace
def server_metadata_delete(request, instance_id, keys):
novaclient(request).servers.delete_meta(instance_id, keys)
@profiler.trace
def tenant_quota_get(request, tenant_id):
return QuotaSet(novaclient(request).quotas.get(tenant_id))
@profiler.trace
def tenant_quota_update(request, tenant_id, **kwargs):
if kwargs:
novaclient(request).quotas.update(tenant_id, **kwargs)
@profiler.trace
def default_quota_get(request, tenant_id):
return QuotaSet(novaclient(request).quotas.defaults(tenant_id))
@profiler.trace
def default_quota_update(request, **kwargs):
novaclient(request).quota_classes.update(DEFAULT_QUOTA_NAME, **kwargs)
def _get_usage_marker(usage):
marker = None
if hasattr(usage, 'server_usages') and usage.server_usages:
marker = usage.server_usages[-1].get('instance_id')
return marker
def _get_usage_list_marker(usage_list):
marker = None
if usage_list:
marker = _get_usage_marker(usage_list[-1])
return marker
def _merge_usage(usage, next_usage):
usage.server_usages.extend(next_usage.server_usages)
usage.total_hours += next_usage.total_hours
usage.total_memory_mb_usage += next_usage.total_memory_mb_usage
usage.total_vcpus_usage += next_usage.total_vcpus_usage
usage.total_local_gb_usage += next_usage.total_local_gb_usage
def _merge_usage_list(usages, next_usage_list):
for next_usage in next_usage_list:
if next_usage.tenant_id in usages:
_merge_usage(usages[next_usage.tenant_id], next_usage)
else:
usages[next_usage.tenant_id] = next_usage
@profiler.trace
def usage_get(request, tenant_id, start, end):
client = upgrade_api(request, novaclient(request), '2.40')
usage = client.usage.get(tenant_id, start, end)
if client.api_version >= api_versions.APIVersion('2.40'):
# If the number of instances used to calculate the usage is greater
# than max_limit, the usage will be split across multiple requests
# and the responses will need to be merged back together.
marker = _get_usage_marker(usage)
while marker:
next_usage = client.usage.get(tenant_id, start, end, marker=marker)
marker = _get_usage_marker(next_usage)
if marker:
_merge_usage(usage, next_usage)
return NovaUsage(usage)
@profiler.trace
def usage_list(request, start, end):
client = upgrade_api(request, novaclient(request), '2.40')
usage_list = client.usage.list(start, end, True)
if client.api_version >= api_versions.APIVersion('2.40'):
# If the number of instances used to calculate the usage is greater
# than max_limit, the usage will be split across multiple requests
# and the responses will need to be merged back together.
usages = collections.OrderedDict()
_merge_usage_list(usages, usage_list)
marker = _get_usage_list_marker(usage_list)
while marker:
next_usage_list = client.usage.list(start, end, True,
marker=marker)
marker = _get_usage_list_marker(next_usage_list)
if marker:
_merge_usage_list(usages, next_usage_list)
usage_list = usages.values()
return [NovaUsage(u) for u in usage_list]
@profiler.trace
def get_password(request, instance_id, private_key=None):
return novaclient(request).servers.get_password(instance_id, private_key)
@profiler.trace
def instance_volume_attach(request, volume_id, instance_id, device):
from openstack_dashboard.api import cinder
# If we have a multiattach volume, we need to use microversion>=2.60.
volume = cinder.volume_get(request, volume_id)
if volume.multiattach:
version = get_microversion(request, 'multiattach')
if version:
client = novaclient(request, version)
else:
raise VolumeMultiattachNotSupported(
_('Multiattach volumes are not yet supported.'))
else:
client = novaclient(request)
return client.volumes.create_server_volume(
instance_id, volume_id, device)
@profiler.trace
def instance_volume_detach(request, instance_id, att_id):
return novaclient(request).volumes.delete_server_volume(instance_id,
att_id)
@profiler.trace
def instance_volumes_list(request, instance_id):
from openstack_dashboard.api import cinder
volumes = novaclient(request).volumes.get_server_volumes(instance_id)
for volume in volumes:
volume_data = cinder.cinderclient(request).volumes.get(volume.id)
volume.name = cinder.Volume(volume_data).name
return volumes
@profiler.trace
def hypervisor_list(request):
return novaclient(request).hypervisors.list()
@profiler.trace
def hypervisor_stats(request):
return novaclient(request).hypervisors.statistics()
@profiler.trace
def hypervisor_search(request, query, servers=True):
return novaclient(request).hypervisors.search(query, servers)
@profiler.trace
def evacuate_host(request, host, target=None, on_shared_storage=False):
# TODO(jmolle) This should be change for nova atomic api host_evacuate
hypervisors = novaclient(request).hypervisors.search(host, True)
response = []
err_code = None
for hypervisor in hypervisors:
hyper = Hypervisor(hypervisor)
# if hypervisor doesn't have servers, the attribute is not present
for server in hyper.servers:
try:
novaclient(request).servers.evacuate(server['uuid'],
target,
on_shared_storage)
except nova_exceptions.ClientException as err:
err_code = err.code
msg = _("Name: %(name)s ID: %(uuid)s")
msg = msg % {'name': server['name'], 'uuid': server['uuid']}
response.append(msg)
if err_code:
msg = _('Failed to evacuate instances: %s') % ', '.join(response)
raise nova_exceptions.ClientException(err_code, msg)
return True
@profiler.trace
def migrate_host(request, host, live_migrate=False, disk_over_commit=False,
block_migration=False):
hypervisors = novaclient(request).hypervisors.search(host, True)
response = []
err_code = None
for hyper in hypervisors:
for server in getattr(hyper, "servers", []):
try:
if live_migrate:
instance = server_get(request, server['uuid'])
# Checking that instance can be live-migrated
if instance.status in ["ACTIVE", "PAUSED"]:
novaclient(request).servers.live_migrate(
server['uuid'],
None,
block_migration,
disk_over_commit
)
else:
novaclient(request).servers.migrate(server['uuid'])
else:
novaclient(request).servers.migrate(server['uuid'])
except nova_exceptions.ClientException as err:
err_code = err.code
msg = _("Name: %(name)s ID: %(uuid)s")
msg = msg % {'name': server['name'], 'uuid': server['uuid']}
response.append(msg)
if err_code:
msg = _('Failed to migrate instances: %s') % ', '.join(response)
raise nova_exceptions.ClientException(err_code, msg)
return True
@profiler.trace
def tenant_absolute_limits(request, reserved=False, tenant_id=None):
# Nova does not allow to specify tenant_id for non-admin users
# even if tenant_id matches a tenant_id of the user.
if tenant_id == request.user.tenant_id:
tenant_id = None
limits = novaclient(request).limits.get(reserved=reserved,
tenant_id=tenant_id).absolute
limits_dict = {}
for limit in limits:
if limit.value < 0:
# Workaround for nova bug 1370867 that absolute_limits
# returns negative value for total.*Used instead of 0.
# For such case, replace negative values with 0.
if limit.name.startswith('total') and limit.name.endswith('Used'):
limits_dict[limit.name] = 0
else:
# -1 is used to represent unlimited quotas
limits_dict[limit.name] = float("inf")
else:
limits_dict[limit.name] = limit.value
return limits_dict
@profiler.trace
def availability_zone_list(request, detailed=False):
zones = novaclient(request).availability_zones.list(detailed=detailed)
zones.sort(key=attrgetter('zoneName'))
return zones
@profiler.trace
def server_group_list(request):
return novaclient(request).server_groups.list()
@profiler.trace
def server_group_create(request, **kwargs):
microversion = get_microversion(request, "servergroup_soft_policies")
return novaclient(request, version=microversion).server_groups.create(
**kwargs)
@profiler.trace
def server_group_delete(request, servergroup_id):
novaclient(request).server_groups.delete(servergroup_id)
@profiler.trace
def server_group_get(request, servergroup_id):
microversion = get_microversion(request, "servergroup_user_info")
return novaclient(request, version=microversion).server_groups.get(
servergroup_id)
@profiler.trace
def service_list(request, binary=None):
return novaclient(request).services.list(binary=binary)
@profiler.trace
def service_enable(request, host, binary):
return novaclient(request).services.enable(host, binary)
@profiler.trace
def service_disable(request, host, binary, reason=None):
if reason:
return novaclient(request).services.disable_log_reason(host,
binary, reason)
else:
return novaclient(request).services.disable(host, binary)
@profiler.trace
def aggregate_details_list(request):
result = []
c = novaclient(request)
for aggregate in c.aggregates.list():
result.append(c.aggregates.get_details(aggregate.id))
return result
@profiler.trace
def aggregate_create(request, name, availability_zone=None):
return novaclient(request).aggregates.create(name, availability_zone)
@profiler.trace
def aggregate_delete(request, aggregate_id):
return novaclient(request).aggregates.delete(aggregate_id)
@profiler.trace
def aggregate_get(request, aggregate_id):
return novaclient(request).aggregates.get(aggregate_id)
@profiler.trace
def aggregate_update(request, aggregate_id, values):
novaclient(request).aggregates.update(aggregate_id, values)
@profiler.trace
def aggregate_set_metadata(request, aggregate_id, metadata):
return novaclient(request).aggregates.set_metadata(aggregate_id, metadata)
@profiler.trace
def add_host_to_aggregate(request, aggregate_id, host):
novaclient(request).aggregates.add_host(aggregate_id, host)
@profiler.trace
def remove_host_from_aggregate(request, aggregate_id, host):
novaclient(request).aggregates.remove_host(aggregate_id, host)
@profiler.trace
def interface_attach(request,
server, port_id=None, net_id=None, fixed_ip=None):
return novaclient(request).servers.interface_attach(server,
port_id,
net_id,
fixed_ip)
@profiler.trace
def interface_detach(request, server, port_id):
return novaclient(request).servers.interface_detach(server, port_id)
@profiler.trace
@memoized_with_request(novaclient)
def list_extensions(nova_api):
"""List all nova extensions, except the ones in the blacklist."""
blacklist = set(getattr(settings,
'OPENSTACK_NOVA_EXTENSIONS_BLACKLIST', []))
return tuple(
extension for extension in
nova_list_extensions.ListExtManager(nova_api).show_all()
if extension.name not in blacklist
)
@profiler.trace
@memoized_with_request(list_extensions, 1)
def extension_supported(extension_name, extensions):
"""Determine if nova supports a given extension name.
Example values for the extension_name include AdminActions, ConsoleOutput,
etc.
"""
for extension in extensions:
if extension.name == extension_name:
return True
return False
@profiler.trace
def can_set_server_password():
features = getattr(settings, 'OPENSTACK_HYPERVISOR_FEATURES', {})
return features.get('can_set_password', False)
@profiler.trace
def instance_action_list(request, instance_id):
return nova_instance_action.InstanceActionManager(
novaclient(request)).list(instance_id)
@profiler.trace
def can_set_mount_point():
"""Return the Hypervisor's capability of setting mount points."""
hypervisor_features = getattr(
settings, "OPENSTACK_HYPERVISOR_FEATURES", {})
return hypervisor_features.get("can_set_mount_point", False)
@profiler.trace
def requires_keypair():
features = getattr(settings, 'OPENSTACK_HYPERVISOR_FEATURES', {})
return features.get('requires_keypair', False)
def can_set_quotas():
features = getattr(settings, 'OPENSTACK_HYPERVISOR_FEATURES', {})
return features.get('enable_quotas', True)
| 32.964222 | 79 | 0.677593 |
9a6bdd7f36617635c9fe2ab4f9290b7b4b1bfac7 | 108,076 | py | Python | lib/sqlalchemy/engine/base.py | karlicoss/sqlalchemy | 402cca8f2ac42a08fba7a200c4e1e086e2081aad | [
"MIT"
] | 1 | 2021-04-04T10:13:08.000Z | 2021-04-04T10:13:08.000Z | lib/sqlalchemy/engine/base.py | karlicoss/sqlalchemy | 402cca8f2ac42a08fba7a200c4e1e086e2081aad | [
"MIT"
] | null | null | null | lib/sqlalchemy/engine/base.py | karlicoss/sqlalchemy | 402cca8f2ac42a08fba7a200c4e1e086e2081aad | [
"MIT"
] | null | null | null | # engine/base.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from __future__ import with_statement
import contextlib
import sys
from .interfaces import Connectable
from .interfaces import ExceptionContext
from .util import _distill_params
from .util import _distill_params_20
from .. import exc
from .. import inspection
from .. import log
from .. import util
from ..sql import compiler
from ..sql import util as sql_util
"""Defines :class:`_engine.Connection` and :class:`_engine.Engine`.
"""
_EMPTY_EXECUTION_OPTS = util.immutabledict()
class Connection(Connectable):
"""Provides high-level functionality for a wrapped DB-API connection.
Provides execution support for string-based SQL statements as well as
:class:`_expression.ClauseElement`, :class:`.Compiled` and
:class:`.DefaultGenerator`
objects. Provides a :meth:`begin` method to return :class:`.Transaction`
objects.
The Connection object is **not** thread-safe. While a Connection can be
shared among threads using properly synchronized access, it is still
possible that the underlying DBAPI connection may not support shared
access between threads. Check the DBAPI documentation for details.
The Connection object represents a single DBAPI connection checked out
from the connection pool. In this state, the connection pool has no affect
upon the connection, including its expiration or timeout state. For the
connection pool to properly manage connections, connections should be
returned to the connection pool (i.e. ``connection.close()``) whenever the
connection is not in use.
.. index::
single: thread safety; Connection
"""
_is_future = False
_sqla_logger_namespace = "sqlalchemy.engine.Connection"
def __init__(
self,
engine,
connection=None,
close_with_result=False,
_branch_from=None,
_execution_options=None,
_dispatch=None,
_has_events=None,
):
"""Construct a new Connection."""
self.engine = engine
self.dialect = engine.dialect
self.__branch_from = _branch_from
if _branch_from:
# branching is always "from" the root connection
assert _branch_from.__branch_from is None
self._dbapi_connection = connection
self._execution_options = _execution_options
self._echo = _branch_from._echo
self.should_close_with_result = False
self.dispatch = _dispatch
self._has_events = _branch_from._has_events
else:
self._dbapi_connection = (
connection
if connection is not None
else engine.raw_connection()
)
self._transaction = self._nested_transaction = None
self.__savepoint_seq = 0
self.__in_begin = False
self.should_close_with_result = close_with_result
self.__can_reconnect = True
self._echo = self.engine._should_log_info()
if _has_events is None:
# if _has_events is sent explicitly as False,
# then don't join the dispatch of the engine; we don't
# want to handle any of the engine's events in that case.
self.dispatch = self.dispatch._join(engine.dispatch)
self._has_events = _has_events or (
_has_events is None and engine._has_events
)
assert not _execution_options
self._execution_options = engine._execution_options
if self._has_events or self.engine._has_events:
self.dispatch.engine_connect(self, _branch_from is not None)
@property
def _schema_translate_map(self):
return self._execution_options.get("schema_translate_map", None)
def schema_for_object(self, obj):
"""Return the schema name for the given schema item taking into
account current schema translate map.
"""
name = obj.schema
schema_translate_map = self._execution_options.get(
"schema_translate_map", None
)
if (
schema_translate_map
and name in schema_translate_map
and obj._use_schema_map
):
return schema_translate_map[name]
else:
return name
def _branch(self):
"""Return a new Connection which references this Connection's
engine and connection; but does not have close_with_result enabled,
and also whose close() method does nothing.
.. deprecated:: 1.4 the "branching" concept will be removed in
SQLAlchemy 2.0 as well as the "Connection.connect()" method which
is the only consumer for this.
The Core uses this very sparingly, only in the case of
custom SQL default functions that are to be INSERTed as the
primary key of a row where we need to get the value back, so we have
to invoke it distinctly - this is a very uncommon case.
Userland code accesses _branch() when the connect()
method is called. The branched connection
acts as much as possible like the parent, except that it stays
connected when a close() event occurs.
"""
return self.engine._connection_cls(
self.engine,
self._dbapi_connection,
_branch_from=self.__branch_from if self.__branch_from else self,
_execution_options=self._execution_options,
_has_events=self._has_events,
_dispatch=self.dispatch,
)
def _generate_for_options(self):
"""define connection method chaining behavior for execution_options"""
if self._is_future:
return self
else:
c = self.__class__.__new__(self.__class__)
c.__dict__ = self.__dict__.copy()
return c
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self.close()
def execution_options(self, **opt):
r""" Set non-SQL options for the connection which take effect
during execution.
The method returns a copy of this :class:`_engine.Connection`
which references
the same underlying DBAPI connection, but also defines the given
execution options which will take effect for a call to
:meth:`execute`. As the new :class:`_engine.Connection`
references the same
underlying resource, it's usually a good idea to ensure that the copies
will be discarded immediately, which is implicit if used as in::
result = connection.execution_options(stream_results=True).\
execute(stmt)
Note that any key/value can be passed to
:meth:`_engine.Connection.execution_options`,
and it will be stored in the
``_execution_options`` dictionary of the :class:`_engine.Connection`.
It
is suitable for usage by end-user schemes to communicate with
event listeners, for example.
The keywords that are currently recognized by SQLAlchemy itself
include all those listed under :meth:`.Executable.execution_options`,
as well as others that are specific to :class:`_engine.Connection`.
:param autocommit: Available on: Connection, statement.
When True, a COMMIT will be invoked after execution
when executed in 'autocommit' mode, i.e. when an explicit
transaction is not begun on the connection. Note that this
is **library level, not DBAPI level autocommit**. The DBAPI
connection will remain in a real transaction unless the
"AUTOCOMMIT" isolation level is used.
.. deprecated:: 1.4 The "autocommit" execution option is deprecated
and will be removed in SQLAlchemy 2.0. See
:ref:`migration_20_autocommit` for discussion.
:param compiled_cache: Available on: Connection.
A dictionary where :class:`.Compiled` objects
will be cached when the :class:`_engine.Connection`
compiles a clause
expression into a :class:`.Compiled` object. This dictionary will
supersede the statement cache that may be configured on the
:class:`_engine.Engine` itself. If set to None, caching
is disabled, even if the engine has a configured cache size.
Note that the ORM makes use of its own "compiled" caches for
some operations, including flush operations. The caching
used by the ORM internally supersedes a cache dictionary
specified here.
:param isolation_level: Available on: :class:`_engine.Connection`.
Set the transaction isolation level for the lifespan of this
:class:`_engine.Connection` object.
Valid values include those string
values accepted by the :paramref:`_sa.create_engine.isolation_level`
parameter passed to :func:`_sa.create_engine`. These levels are
semi-database specific; see individual dialect documentation for
valid levels.
The isolation level option applies the isolation level by emitting
statements on the DBAPI connection, and **necessarily affects the
original Connection object overall**, not just the copy that is
returned by the call to :meth:`_engine.Connection.execution_options`
method. The isolation level will remain at the given setting until
the DBAPI connection itself is returned to the connection pool, i.e.
the :meth:`_engine.Connection.close` method on the original
:class:`_engine.Connection` is called,
where an event handler will emit
additional statements on the DBAPI connection in order to revert the
isolation level change.
.. warning:: The ``isolation_level`` execution option should
**not** be used when a transaction is already established, that
is, the :meth:`_engine.Connection.begin`
method or similar has been
called. A database cannot change the isolation level on a
transaction in progress, and different DBAPIs and/or
SQLAlchemy dialects may implicitly roll back or commit
the transaction, or not affect the connection at all.
.. note:: The ``isolation_level`` execution option is implicitly
reset if the :class:`_engine.Connection` is invalidated, e.g. via
the :meth:`_engine.Connection.invalidate` method, or if a
disconnection error occurs. The new connection produced after
the invalidation will not have the isolation level re-applied
to it automatically.
.. seealso::
:paramref:`_sa.create_engine.isolation_level`
- set per :class:`_engine.Engine` isolation level
:meth:`_engine.Connection.get_isolation_level`
- view current level
:ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
:ref:`PostgreSQL Transaction Isolation <postgresql_isolation_level>`
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
:ref:`SQL Server Transaction Isolation <mssql_isolation_level>`
:ref:`session_transaction_isolation` - for the ORM
:param no_parameters: When ``True``, if the final parameter
list or dictionary is totally empty, will invoke the
statement on the cursor as ``cursor.execute(statement)``,
not passing the parameter collection at all.
Some DBAPIs such as psycopg2 and mysql-python consider
percent signs as significant only when parameters are
present; this option allows code to generate SQL
containing percent signs (and possibly other characters)
that is neutral regarding whether it's executed by the DBAPI
or piped into a script that's later invoked by
command line tools.
:param stream_results: Available on: Connection, statement.
Indicate to the dialect that results should be
"streamed" and not pre-buffered, if possible. This is a limitation
of many DBAPIs. The flag is currently understood within a subset
of dialects within the PostgreSQL and MySQL categories, and
may be supported by other third party dialects as well.
.. seealso::
:ref:`engine_stream_results`
:param schema_translate_map: Available on: Connection, Engine.
A dictionary mapping schema names to schema names, that will be
applied to the :paramref:`_schema.Table.schema` element of each
:class:`_schema.Table`
encountered when SQL or DDL expression elements
are compiled into strings; the resulting schema name will be
converted based on presence in the map of the original name.
.. versionadded:: 1.1
.. seealso::
:ref:`schema_translating`
.. seealso::
:meth:`_engine.Engine.execution_options`
:meth:`.Executable.execution_options`
:meth:`_engine.Connection.get_execution_options`
""" # noqa
c = self._generate_for_options()
c._execution_options = c._execution_options.union(opt)
if self._has_events or self.engine._has_events:
self.dispatch.set_connection_execution_options(c, opt)
self.dialect.set_connection_execution_options(c, opt)
return c
def get_execution_options(self):
"""Get the non-SQL options which will take effect during execution.
.. versionadded:: 1.3
.. seealso::
:meth:`_engine.Connection.execution_options`
"""
return self._execution_options
@property
def closed(self):
"""Return True if this connection is closed."""
# note this is independent for a "branched" connection vs.
# the base
return self._dbapi_connection is None and not self.__can_reconnect
@property
def invalidated(self):
"""Return True if this connection was invalidated."""
# prior to 1.4, "invalid" was stored as a state independent of
# "closed", meaning an invalidated connection could be "closed",
# the _dbapi_connection would be None and closed=True, yet the
# "invalid" flag would stay True. This meant that there were
# three separate states (open/valid, closed/valid, closed/invalid)
# when there is really no reason for that; a connection that's
# "closed" does not need to be "invalid". So the state is now
# represented by the two facts alone.
if self.__branch_from:
return self.__branch_from.invalidated
return self._dbapi_connection is None and not self.closed
@property
def connection(self):
"""The underlying DB-API connection managed by this Connection.
.. seealso::
:ref:`dbapi_connections`
"""
if self._dbapi_connection is None:
try:
return self._revalidate_connection()
except (exc.PendingRollbackError, exc.ResourceClosedError):
raise
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
else:
return self._dbapi_connection
def get_isolation_level(self):
"""Return the current isolation level assigned to this
:class:`_engine.Connection`.
This will typically be the default isolation level as determined
by the dialect, unless if the
:paramref:`.Connection.execution_options.isolation_level`
feature has been used to alter the isolation level on a
per-:class:`_engine.Connection` basis.
This attribute will typically perform a live SQL operation in order
to procure the current isolation level, so the value returned is the
actual level on the underlying DBAPI connection regardless of how
this state was set. Compare to the
:attr:`_engine.Connection.default_isolation_level` accessor
which returns the dialect-level setting without performing a SQL
query.
.. versionadded:: 0.9.9
.. seealso::
:attr:`_engine.Connection.default_isolation_level`
- view default level
:paramref:`_sa.create_engine.isolation_level`
- set per :class:`_engine.Engine` isolation level
:paramref:`.Connection.execution_options.isolation_level`
- set per :class:`_engine.Connection` isolation level
"""
try:
return self.dialect.get_isolation_level(self.connection)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
@property
def default_isolation_level(self):
"""The default isolation level assigned to this
:class:`_engine.Connection`.
This is the isolation level setting that the
:class:`_engine.Connection`
has when first procured via the :meth:`_engine.Engine.connect` method.
This level stays in place until the
:paramref:`.Connection.execution_options.isolation_level` is used
to change the setting on a per-:class:`_engine.Connection` basis.
Unlike :meth:`_engine.Connection.get_isolation_level`,
this attribute is set
ahead of time from the first connection procured by the dialect,
so SQL query is not invoked when this accessor is called.
.. versionadded:: 0.9.9
.. seealso::
:meth:`_engine.Connection.get_isolation_level`
- view current level
:paramref:`_sa.create_engine.isolation_level`
- set per :class:`_engine.Engine` isolation level
:paramref:`.Connection.execution_options.isolation_level`
- set per :class:`_engine.Connection` isolation level
"""
return self.dialect.default_isolation_level
def _invalid_transaction(self):
if self.invalidated:
raise exc.PendingRollbackError(
"Can't reconnect until invalid %stransaction is rolled "
"back."
% (
"savepoint "
if self._nested_transaction is not None
else ""
),
code="8s2b",
)
else:
raise exc.PendingRollbackError(
"This connection is on an inactive %stransaction. "
"Please rollback() fully before proceeding."
% (
"savepoint "
if self._nested_transaction is not None
else ""
),
code="8s2a",
)
def _revalidate_connection(self):
if self.__branch_from:
return self.__branch_from._revalidate_connection()
if self.__can_reconnect and self.invalidated:
if self._transaction is not None:
self._invalid_transaction()
self._dbapi_connection = self.engine.raw_connection(
_connection=self
)
return self._dbapi_connection
raise exc.ResourceClosedError("This Connection is closed")
@property
def _still_open_and_dbapi_connection_is_valid(self):
return self._dbapi_connection is not None and getattr(
self._dbapi_connection, "is_valid", False
)
@property
def info(self):
"""Info dictionary associated with the underlying DBAPI connection
referred to by this :class:`_engine.Connection`, allowing user-defined
data to be associated with the connection.
The data here will follow along with the DBAPI connection including
after it is returned to the connection pool and used again
in subsequent instances of :class:`_engine.Connection`.
"""
return self.connection.info
@util.deprecated_20(":meth:`.Connection.connect`")
def connect(self, close_with_result=False):
"""Returns a branched version of this :class:`_engine.Connection`.
The :meth:`_engine.Connection.close` method on the returned
:class:`_engine.Connection` can be called and this
:class:`_engine.Connection` will remain open.
This method provides usage symmetry with
:meth:`_engine.Engine.connect`, including for usage
with context managers.
"""
return self._branch()
def invalidate(self, exception=None):
"""Invalidate the underlying DBAPI connection associated with
this :class:`_engine.Connection`.
The underlying DBAPI connection is literally closed (if
possible), and is discarded. Its source connection pool will
typically lazily create a new connection to replace it.
Upon the next use (where "use" typically means using the
:meth:`_engine.Connection.execute` method or similar),
this :class:`_engine.Connection` will attempt to
procure a new DBAPI connection using the services of the
:class:`_pool.Pool` as a source of connectivity (e.g.
a "reconnection").
If a transaction was in progress (e.g. the
:meth:`_engine.Connection.begin` method has been called) when
:meth:`_engine.Connection.invalidate` method is called, at the DBAPI
level all state associated with this transaction is lost, as
the DBAPI connection is closed. The :class:`_engine.Connection`
will not allow a reconnection to proceed until the
:class:`.Transaction` object is ended, by calling the
:meth:`.Transaction.rollback` method; until that point, any attempt at
continuing to use the :class:`_engine.Connection` will raise an
:class:`~sqlalchemy.exc.InvalidRequestError`.
This is to prevent applications from accidentally
continuing an ongoing transactional operations despite the
fact that the transaction has been lost due to an
invalidation.
The :meth:`_engine.Connection.invalidate` method,
just like auto-invalidation,
will at the connection pool level invoke the
:meth:`_events.PoolEvents.invalidate` event.
.. seealso::
:ref:`pool_connection_invalidation`
"""
if self.__branch_from:
return self.__branch_from.invalidate(exception=exception)
if self.invalidated:
return
if self.closed:
raise exc.ResourceClosedError("This Connection is closed")
if self._still_open_and_dbapi_connection_is_valid:
self._dbapi_connection.invalidate(exception)
self._dbapi_connection = None
def detach(self):
"""Detach the underlying DB-API connection from its connection pool.
E.g.::
with engine.connect() as conn:
conn.detach()
conn.execute(text("SET search_path TO schema1, schema2"))
# work with connection
# connection is fully closed (since we used "with:", can
# also call .close())
This :class:`_engine.Connection` instance will remain usable.
When closed
(or exited from a context manager context as above),
the DB-API connection will be literally closed and not
returned to its originating pool.
This method can be used to insulate the rest of an application
from a modified state on a connection (such as a transaction
isolation level or similar).
"""
self._dbapi_connection.detach()
def _autobegin(self):
self.begin()
def begin(self):
"""Begin a transaction and return a transaction handle.
The returned object is an instance of :class:`.Transaction`.
This object represents the "scope" of the transaction,
which completes when either the :meth:`.Transaction.rollback`
or :meth:`.Transaction.commit` method is called.
Nested calls to :meth:`.begin` on the same :class:`_engine.Connection`
will return new :class:`.Transaction` objects that represent
an emulated transaction within the scope of the enclosing
transaction, that is::
trans = conn.begin() # outermost transaction
trans2 = conn.begin() # "nested"
trans2.commit() # does nothing
trans.commit() # actually commits
Calls to :meth:`.Transaction.commit` only have an effect
when invoked via the outermost :class:`.Transaction` object, though the
:meth:`.Transaction.rollback` method of any of the
:class:`.Transaction` objects will roll back the
transaction.
.. seealso::
:meth:`_engine.Connection.begin_nested` - use a SAVEPOINT
:meth:`_engine.Connection.begin_twophase` -
use a two phase /XID transaction
:meth:`_engine.Engine.begin` - context manager available from
:class:`_engine.Engine`
"""
if self._is_future:
assert not self.__branch_from
elif self.__branch_from:
return self.__branch_from.begin()
if self.__in_begin:
# for dialects that emit SQL within the process of
# dialect.do_begin() or dialect.do_begin_twophase(), this
# flag prevents "autobegin" from being emitted within that
# process, while allowing self._transaction to remain at None
# until it's complete.
return
elif self._transaction is None:
self._transaction = RootTransaction(self)
return self._transaction
else:
if self._is_future:
raise exc.InvalidRequestError(
"a transaction is already begun for this connection"
)
else:
return MarkerTransaction(self)
def begin_nested(self):
"""Begin a nested transaction and return a transaction handle.
The returned object is an instance of :class:`.NestedTransaction`.
Nested transactions require SAVEPOINT support in the
underlying database. Any transaction in the hierarchy may
``commit`` and ``rollback``, however the outermost transaction
still controls the overall ``commit`` or ``rollback`` of the
transaction of a whole.
.. seealso::
:meth:`_engine.Connection.begin`
:meth:`_engine.Connection.begin_twophase`
"""
if self._is_future:
assert not self.__branch_from
elif self.__branch_from:
return self.__branch_from.begin_nested()
if self._transaction is None:
self.begin()
return NestedTransaction(self)
def begin_twophase(self, xid=None):
"""Begin a two-phase or XA transaction and return a transaction
handle.
The returned object is an instance of :class:`.TwoPhaseTransaction`,
which in addition to the methods provided by
:class:`.Transaction`, also provides a
:meth:`~.TwoPhaseTransaction.prepare` method.
:param xid: the two phase transaction id. If not supplied, a
random id will be generated.
.. seealso::
:meth:`_engine.Connection.begin`
:meth:`_engine.Connection.begin_twophase`
"""
if self.__branch_from:
return self.__branch_from.begin_twophase(xid=xid)
if self._transaction is not None:
raise exc.InvalidRequestError(
"Cannot start a two phase transaction when a transaction "
"is already in progress."
)
if xid is None:
xid = self.engine.dialect.create_xid()
return TwoPhaseTransaction(self, xid)
def recover_twophase(self):
return self.engine.dialect.do_recover_twophase(self)
def rollback_prepared(self, xid, recover=False):
self.engine.dialect.do_rollback_twophase(self, xid, recover=recover)
def commit_prepared(self, xid, recover=False):
self.engine.dialect.do_commit_twophase(self, xid, recover=recover)
def in_transaction(self):
"""Return True if a transaction is in progress."""
if self.__branch_from is not None:
return self.__branch_from.in_transaction()
return self._transaction is not None and self._transaction.is_active
def in_nested_transaction(self):
"""Return True if a transaction is in progress."""
if self.__branch_from is not None:
return self.__branch_from.in_nested_transaction()
return (
self._nested_transaction is not None
and self._nested_transaction.is_active
)
def get_transaction(self):
"""Return the current root transaction in progress, if any.
.. versionadded:: 1.4
"""
if self.__branch_from is not None:
return self.__branch_from.get_transaction()
return self._transaction
def get_nested_transaction(self):
"""Return the current nested transaction in progress, if any.
.. versionadded:: 1.4
"""
if self.__branch_from is not None:
return self.__branch_from.get_nested_transaction()
return self._nested_transaction
def _begin_impl(self, transaction):
assert not self.__branch_from
if self._echo:
self.engine.logger.info("BEGIN (implicit)")
if self._has_events or self.engine._has_events:
self.dispatch.begin(self)
self.__in_begin = True
try:
self.engine.dialect.do_begin(self.connection)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
finally:
self.__in_begin = False
def _rollback_impl(self):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.rollback(self)
if self._still_open_and_dbapi_connection_is_valid:
if self._echo:
self.engine.logger.info("ROLLBACK")
try:
self.engine.dialect.do_rollback(self.connection)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
def _commit_impl(self, autocommit=False):
assert not self.__branch_from
if autocommit:
util.warn_deprecated_20(
"The current statement is being autocommitted using "
"implicit autocommit, which will be removed in "
"SQLAlchemy 2.0. "
"Use the .begin() method of Engine or Connection in order to "
"use an explicit transaction for DML and DDL statements."
)
if self._has_events or self.engine._has_events:
self.dispatch.commit(self)
if self._echo:
self.engine.logger.info("COMMIT")
try:
self.engine.dialect.do_commit(self.connection)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
def _savepoint_impl(self, name=None):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.savepoint(self, name)
if name is None:
self.__savepoint_seq += 1
name = "sa_savepoint_%s" % self.__savepoint_seq
if self._still_open_and_dbapi_connection_is_valid:
self.engine.dialect.do_savepoint(self, name)
return name
def _rollback_to_savepoint_impl(self, name):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.rollback_savepoint(self, name, None)
if self._still_open_and_dbapi_connection_is_valid:
self.engine.dialect.do_rollback_to_savepoint(self, name)
def _release_savepoint_impl(self, name):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.release_savepoint(self, name, None)
if self._still_open_and_dbapi_connection_is_valid:
self.engine.dialect.do_release_savepoint(self, name)
def _begin_twophase_impl(self, transaction):
assert not self.__branch_from
if self._echo:
self.engine.logger.info("BEGIN TWOPHASE (implicit)")
if self._has_events or self.engine._has_events:
self.dispatch.begin_twophase(self, transaction.xid)
if self._still_open_and_dbapi_connection_is_valid:
self.__in_begin = True
try:
self.engine.dialect.do_begin_twophase(self, transaction.xid)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
finally:
self.__in_begin = False
def _prepare_twophase_impl(self, xid):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.prepare_twophase(self, xid)
if self._still_open_and_dbapi_connection_is_valid:
assert isinstance(self._transaction, TwoPhaseTransaction)
try:
self.engine.dialect.do_prepare_twophase(self, xid)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
def _rollback_twophase_impl(self, xid, is_prepared):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.rollback_twophase(self, xid, is_prepared)
if self._still_open_and_dbapi_connection_is_valid:
assert isinstance(self._transaction, TwoPhaseTransaction)
try:
self.engine.dialect.do_rollback_twophase(
self, xid, is_prepared
)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
def _commit_twophase_impl(self, xid, is_prepared):
assert not self.__branch_from
if self._has_events or self.engine._has_events:
self.dispatch.commit_twophase(self, xid, is_prepared)
if self._still_open_and_dbapi_connection_is_valid:
assert isinstance(self._transaction, TwoPhaseTransaction)
try:
self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
def _autorollback(self):
if self.__branch_from:
self.__branch_from._autorollback()
if not self.in_transaction():
self._rollback_impl()
def _warn_for_legacy_exec_format(self):
util.warn_deprecated_20(
"The connection.execute() method in "
"SQLAlchemy 2.0 will accept parameters as a single "
"dictionary or a "
"single sequence of dictionaries only. "
"Parameters passed as keyword arguments, tuples or positionally "
"oriened dictionaries and/or tuples "
"will no longer be accepted."
)
def close(self):
"""Close this :class:`_engine.Connection`.
This results in a release of the underlying database
resources, that is, the DBAPI connection referenced
internally. The DBAPI connection is typically restored
back to the connection-holding :class:`_pool.Pool` referenced
by the :class:`_engine.Engine` that produced this
:class:`_engine.Connection`. Any transactional state present on
the DBAPI connection is also unconditionally released via
the DBAPI connection's ``rollback()`` method, regardless
of any :class:`.Transaction` object that may be
outstanding with regards to this :class:`_engine.Connection`.
After :meth:`_engine.Connection.close` is called, the
:class:`_engine.Connection` is permanently in a closed state,
and will allow no further operations.
"""
if self.__branch_from:
assert not self._is_future
util.warn_deprecated_20(
"The .close() method on a so-called 'branched' connection is "
"deprecated as of 1.4, as are 'branched' connections overall, "
"and will be removed in a future release. If this is a "
"default-handling function, don't close the connection."
)
self._dbapi_connection = None
self.__can_reconnect = False
return
if self._transaction:
self._transaction.close()
if self._dbapi_connection is not None:
conn = self._dbapi_connection
conn.close()
if conn._reset_agent is self._transaction:
conn._reset_agent = None
# There is a slight chance that conn.close() may have
# triggered an invalidation here in which case
# _dbapi_connection would already be None, however usually
# it will be non-None here and in a "closed" state.
self._dbapi_connection = None
self.__can_reconnect = False
def scalar(self, object_, *multiparams, **params):
"""Executes and returns the first column of the first row.
The underlying result/cursor is closed after execution.
"""
return self.execute(object_, *multiparams, **params).scalar()
def execute(self, statement, *multiparams, **params):
r"""Executes a SQL statement construct and returns a
:class:`_engine.CursorResult`.
:param statement: The statement to be executed. May be
one of:
* a plain string (deprecated)
* any :class:`_expression.ClauseElement` construct that is also
a subclass of :class:`.Executable`, such as a
:func:`_expression.select` construct
* a :class:`.FunctionElement`, such as that generated
by :data:`.func`, will be automatically wrapped in
a SELECT statement, which is then executed.
* a :class:`.DDLElement` object
* a :class:`.DefaultGenerator` object
* a :class:`.Compiled` object
.. deprecated:: 2.0 passing a string to
:meth:`_engine.Connection.execute` is
deprecated and will be removed in version 2.0. Use the
:func:`_expression.text` construct with
:meth:`_engine.Connection.execute`, or the
:meth:`_engine.Connection.exec_driver_sql`
method to invoke a driver-level
SQL string.
:param \*multiparams/\**params: represent bound parameter
values to be used in the execution. Typically,
the format is either a collection of one or more
dictionaries passed to \*multiparams::
conn.execute(
table.insert(),
{"id":1, "value":"v1"},
{"id":2, "value":"v2"}
)
...or individual key/values interpreted by \**params::
conn.execute(
table.insert(), id=1, value="v1"
)
In the case that a plain SQL string is passed, and the underlying
DBAPI accepts positional bind parameters, a collection of tuples
or individual values in \*multiparams may be passed::
conn.execute(
"INSERT INTO table (id, value) VALUES (?, ?)",
(1, "v1"), (2, "v2")
)
conn.execute(
"INSERT INTO table (id, value) VALUES (?, ?)",
1, "v1"
)
Note above, the usage of a question mark "?" or other
symbol is contingent upon the "paramstyle" accepted by the DBAPI
in use, which may be any of "qmark", "named", "pyformat", "format",
"numeric". See `pep-249 <http://www.python.org/dev/peps/pep-0249/>`_
for details on paramstyle.
To execute a textual SQL statement which uses bound parameters in a
DBAPI-agnostic way, use the :func:`_expression.text` construct.
.. deprecated:: 2.0 use of tuple or scalar positional parameters
is deprecated. All params should be dicts or sequences of dicts.
Use :meth:`.exec_driver_sql` to execute a plain string with
tuple or scalar positional parameters.
"""
if isinstance(statement, util.string_types):
util.warn_deprecated_20(
"Passing a string to Connection.execute() is "
"deprecated and will be removed in version 2.0. Use the "
"text() construct, "
"or the Connection.exec_driver_sql() method to invoke a "
"driver-level SQL string."
)
return self._exec_driver_sql(
statement,
multiparams,
params,
_EMPTY_EXECUTION_OPTS,
future=False,
)
try:
meth = statement._execute_on_connection
except AttributeError as err:
util.raise_(
exc.ObjectNotExecutableError(statement), replace_context=err
)
else:
return meth(self, multiparams, params, _EMPTY_EXECUTION_OPTS)
def _execute_function(self, func, multiparams, params, execution_options):
"""Execute a sql.FunctionElement object."""
return self._execute_clauseelement(
func.select(), multiparams, params, execution_options
)
def _execute_default(
self,
default,
multiparams,
params,
# migrate is calling this directly :(
execution_options=_EMPTY_EXECUTION_OPTS,
):
"""Execute a schema.ColumnDefault object."""
execution_options = self._execution_options.merge_with(
execution_options
)
distilled_parameters = _distill_params(self, multiparams, params)
if self._has_events or self.engine._has_events:
(
distilled_params,
event_multiparams,
event_params,
) = self._invoke_before_exec_event(
default, distilled_parameters, execution_options
)
try:
conn = self._dbapi_connection
if conn is None:
conn = self._revalidate_connection()
dialect = self.dialect
ctx = dialect.execution_ctx_cls._init_default(
dialect, self, conn, execution_options
)
except (exc.PendingRollbackError, exc.ResourceClosedError):
raise
except BaseException as e:
self._handle_dbapi_exception(e, None, None, None, None)
ret = ctx._exec_default(None, default, None)
if self.should_close_with_result:
self.close()
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(
self,
default,
event_multiparams,
event_params,
execution_options,
ret,
)
return ret
def _execute_ddl(self, ddl, multiparams, params, execution_options):
"""Execute a schema.DDL object."""
execution_options = ddl._execution_options.merge_with(
self._execution_options, execution_options
)
distilled_parameters = _distill_params(self, multiparams, params)
if self._has_events or self.engine._has_events:
(
distilled_params,
event_multiparams,
event_params,
) = self._invoke_before_exec_event(
ddl, distilled_parameters, execution_options
)
exec_opts = self._execution_options.merge_with(execution_options)
schema_translate_map = exec_opts.get("schema_translate_map", None)
dialect = self.dialect
compiled = ddl.compile(
dialect=dialect, schema_translate_map=schema_translate_map
)
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_ddl,
compiled,
None,
execution_options,
compiled,
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(
self,
ddl,
event_multiparams,
event_params,
execution_options,
ret,
)
return ret
def _invoke_before_exec_event(
self, elem, distilled_params, execution_options
):
if len(distilled_params) == 1:
event_multiparams, event_params = [], distilled_params[0]
else:
event_multiparams, event_params = distilled_params, {}
for fn in self.dispatch.before_execute:
elem, event_multiparams, event_params = fn(
self,
elem,
event_multiparams,
event_params,
execution_options,
)
if event_multiparams:
distilled_params = list(event_multiparams)
if event_params:
raise exc.InvalidRequestError(
"Event handler can't return non-empty multiparams "
"and params at the same time"
)
elif event_params:
distilled_params = [event_params]
else:
distilled_params = []
return distilled_params, event_multiparams, event_params
def _execute_clauseelement(
self, elem, multiparams, params, execution_options
):
"""Execute a sql.ClauseElement object."""
execution_options = elem._execution_options.merge_with(
self._execution_options, execution_options
)
distilled_params = _distill_params(self, multiparams, params)
has_events = self._has_events or self.engine._has_events
if has_events:
(
distilled_params,
event_multiparams,
event_params,
) = self._invoke_before_exec_event(
elem, distilled_params, execution_options
)
if distilled_params:
# ensure we don't retain a link to the view object for keys()
# which links to the values, which we don't want to cache
keys = sorted(distilled_params[0])
for_executemany = len(distilled_params) > 1
else:
keys = []
for_executemany = False
dialect = self.dialect
schema_translate_map = execution_options.get(
"schema_translate_map", None
)
compiled_cache = execution_options.get(
"compiled_cache", self.engine._compiled_cache
)
compiled_sql, extracted_params, cache_hit = elem._compile_w_cache(
dialect=dialect,
compiled_cache=compiled_cache,
column_keys=keys,
for_executemany=for_executemany,
schema_translate_map=schema_translate_map,
linting=self.dialect.compiler_linting | compiler.WARN_LINTING,
)
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_compiled,
compiled_sql,
distilled_params,
execution_options,
compiled_sql,
distilled_params,
elem,
extracted_params,
cache_hit=cache_hit,
)
if has_events:
self.dispatch.after_execute(
self,
elem,
event_multiparams,
event_params,
execution_options,
ret,
)
return ret
def _execute_compiled(
self,
compiled,
multiparams,
params,
execution_options=_EMPTY_EXECUTION_OPTS,
):
"""Execute a sql.Compiled object.
TODO: why do we have this? likely deprecate or remove
"""
execution_options = compiled.execution_options.merge_with(
self._execution_options, execution_options
)
distilled_parameters = _distill_params(self, multiparams, params)
if self._has_events or self.engine._has_events:
(
distilled_params,
event_multiparams,
event_params,
) = self._invoke_before_exec_event(
compiled, distilled_parameters, execution_options
)
dialect = self.dialect
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_compiled,
compiled,
distilled_parameters,
execution_options,
compiled,
distilled_parameters,
None,
None,
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(
self,
compiled,
event_multiparams,
event_params,
execution_options,
ret,
)
return ret
def _exec_driver_sql(
self, statement, multiparams, params, execution_options, future
):
execution_options = self._execution_options.merge_with(
execution_options
)
distilled_parameters = _distill_params(self, multiparams, params)
if not future:
if self._has_events or self.engine._has_events:
(
distilled_params,
event_multiparams,
event_params,
) = self._invoke_before_exec_event(
statement, distilled_parameters, execution_options
)
dialect = self.dialect
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_statement,
statement,
distilled_parameters,
execution_options,
statement,
distilled_parameters,
)
if not future:
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(
self,
statement,
event_multiparams,
event_params,
execution_options,
ret,
)
return ret
def _execute_20(
self,
statement,
parameters=None,
execution_options=_EMPTY_EXECUTION_OPTS,
):
args_10style, kwargs_10style = _distill_params_20(parameters)
try:
meth = statement._execute_on_connection
except AttributeError as err:
util.raise_(
exc.ObjectNotExecutableError(statement), replace_context=err
)
else:
return meth(self, args_10style, kwargs_10style, execution_options)
def exec_driver_sql(
self, statement, parameters=None, execution_options=None
):
r"""Executes a SQL statement construct and returns a
:class:`_engine.CursorResult`.
:param statement: The statement str to be executed. Bound parameters
must use the underlying DBAPI's paramstyle, such as "qmark",
"pyformat", "format", etc.
:param parameters: represent bound parameter values to be used in the
execution. The format is one of: a dictionary of named parameters,
a tuple of positional parameters, or a list containing either
dictionaries or tuples for multiple-execute support.
E.g. multiple dictionaries::
conn.exec_driver_sql(
"INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)",
[{"id":1, "value":"v1"}, {"id":2, "value":"v2"}]
)
Single dictionary::
conn.exec_driver_sql(
"INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)",
dict(id=1, value="v1")
)
Single tuple::
conn.exec_driver_sql(
"INSERT INTO table (id, value) VALUES (?, ?)",
(1, 'v1')
)
.. note:: The :meth:`_engine.Connection.exec_driver_sql` method does
not participate in the
:meth:`_events.ConnectionEvents.before_execute` and
:meth:`_events.ConnectionEvents.after_execute` events. To
intercept calls to :meth:`_engine.Connection.exec_driver_sql`, use
:meth:`_events.ConnectionEvents.before_cursor_execute` and
:meth:`_events.ConnectionEvents.after_cursor_execute`.
.. seealso::
:pep:`249`
"""
args_10style, kwargs_10style = _distill_params_20(parameters)
return self._exec_driver_sql(
statement,
args_10style,
kwargs_10style,
execution_options,
future=True,
)
def _execute_context(
self,
dialect,
constructor,
statement,
parameters,
execution_options,
*args,
**kw
):
"""Create an :class:`.ExecutionContext` and execute, returning
a :class:`_engine.CursorResult`."""
branched = self
if self.__branch_from:
# if this is a "branched" connection, do everything in terms
# of the "root" connection, *except* for .close(), which is
# the only feature that branching provides
self = self.__branch_from
try:
conn = self._dbapi_connection
if conn is None:
conn = self._revalidate_connection()
context = constructor(
dialect, self, conn, execution_options, *args, **kw
)
except (exc.PendingRollbackError, exc.ResourceClosedError):
raise
except BaseException as e:
self._handle_dbapi_exception(
e, util.text_type(statement), parameters, None, None
)
if (
self._transaction
and not self._transaction.is_active
or (
self._nested_transaction
and not self._nested_transaction.is_active
)
):
self._invalid_transaction()
if self._is_future and self._transaction is None:
self._autobegin()
context.pre_exec()
cursor, statement, parameters = (
context.cursor,
context.statement,
context.parameters,
)
if not context.executemany:
parameters = parameters[0]
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_cursor_execute:
statement, parameters = fn(
self,
cursor,
statement,
parameters,
context,
context.executemany,
)
if self._echo:
self.engine.logger.info(statement)
stats = context._get_cache_stats()
if not self.engine.hide_parameters:
self.engine.logger.info(
"[%s] %r",
stats,
sql_util._repr_params(
parameters, batches=10, ismulti=context.executemany
),
)
else:
self.engine.logger.info(
"[%s] [SQL parameters hidden due to hide_parameters=True]"
% (stats,)
)
evt_handled = False
try:
if context.executemany:
if self.dialect._has_events:
for fn in self.dialect.dispatch.do_executemany:
if fn(cursor, statement, parameters, context):
evt_handled = True
break
if not evt_handled:
self.dialect.do_executemany(
cursor, statement, parameters, context
)
elif not parameters and context.no_parameters:
if self.dialect._has_events:
for fn in self.dialect.dispatch.do_execute_no_params:
if fn(cursor, statement, context):
evt_handled = True
break
if not evt_handled:
self.dialect.do_execute_no_params(
cursor, statement, context
)
else:
if self.dialect._has_events:
for fn in self.dialect.dispatch.do_execute:
if fn(cursor, statement, parameters, context):
evt_handled = True
break
if not evt_handled:
self.dialect.do_execute(
cursor, statement, parameters, context
)
if self._has_events or self.engine._has_events:
self.dispatch.after_cursor_execute(
self,
cursor,
statement,
parameters,
context,
context.executemany,
)
context.post_exec()
result = context._setup_result_proxy()
if not self._is_future:
should_close_with_result = branched.should_close_with_result
if not result._soft_closed and should_close_with_result:
result._autoclose_connection = True
if (
# usually we're in a transaction so avoid relatively
# expensive / legacy should_autocommit call
self._transaction is None
and context.should_autocommit
):
self._commit_impl(autocommit=True)
# for "connectionless" execution, we have to close this
# Connection after the statement is complete.
# legacy stuff.
if should_close_with_result and context._soft_closed:
assert not self._is_future
# CursorResult already exhausted rows / has no rows.
# close us now
branched.close()
except BaseException as e:
self._handle_dbapi_exception(
e, statement, parameters, cursor, context
)
return result
def _cursor_execute(self, cursor, statement, parameters, context=None):
"""Execute a statement + params on the given cursor.
Adds appropriate logging and exception handling.
This method is used by DefaultDialect for special-case
executions, such as for sequences and column defaults.
The path of statement execution in the majority of cases
terminates at _execute_context().
"""
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_cursor_execute:
statement, parameters = fn(
self, cursor, statement, parameters, context, False
)
if self._echo:
self.engine.logger.info(statement)
self.engine.logger.info("%r", parameters)
try:
for fn in (
()
if not self.dialect._has_events
else self.dialect.dispatch.do_execute
):
if fn(cursor, statement, parameters, context):
break
else:
self.dialect.do_execute(cursor, statement, parameters, context)
except BaseException as e:
self._handle_dbapi_exception(
e, statement, parameters, cursor, context
)
if self._has_events or self.engine._has_events:
self.dispatch.after_cursor_execute(
self, cursor, statement, parameters, context, False
)
def _safe_close_cursor(self, cursor):
"""Close the given cursor, catching exceptions
and turning into log warnings.
"""
try:
cursor.close()
except Exception:
# log the error through the connection pool's logger.
self.engine.pool.logger.error(
"Error closing cursor", exc_info=True
)
_reentrant_error = False
_is_disconnect = False
def _handle_dbapi_exception(
self, e, statement, parameters, cursor, context
):
exc_info = sys.exc_info()
is_exit_exception = not isinstance(e, Exception)
if not self._is_disconnect:
self._is_disconnect = (
isinstance(e, self.dialect.dbapi.Error)
and not self.closed
and self.dialect.is_disconnect(
e,
self._dbapi_connection if not self.invalidated else None,
cursor,
)
) or (is_exit_exception and not self.closed)
invalidate_pool_on_disconnect = not is_exit_exception
if self._reentrant_error:
util.raise_(
exc.DBAPIError.instance(
statement,
parameters,
e,
self.dialect.dbapi.Error,
hide_parameters=self.engine.hide_parameters,
dialect=self.dialect,
ismulti=context.executemany
if context is not None
else None,
),
with_traceback=exc_info[2],
from_=e,
)
self._reentrant_error = True
try:
# non-DBAPI error - if we already got a context,
# or there's no string statement, don't wrap it
should_wrap = isinstance(e, self.dialect.dbapi.Error) or (
statement is not None
and context is None
and not is_exit_exception
)
if should_wrap:
sqlalchemy_exception = exc.DBAPIError.instance(
statement,
parameters,
e,
self.dialect.dbapi.Error,
hide_parameters=self.engine.hide_parameters,
connection_invalidated=self._is_disconnect,
dialect=self.dialect,
ismulti=context.executemany
if context is not None
else None,
)
else:
sqlalchemy_exception = None
newraise = None
if (
self._has_events or self.engine._has_events
) and not self._execution_options.get(
"skip_user_error_events", False
):
ctx = ExceptionContextImpl(
e,
sqlalchemy_exception,
self.engine,
self,
cursor,
statement,
parameters,
context,
self._is_disconnect,
invalidate_pool_on_disconnect,
)
for fn in self.dispatch.handle_error:
try:
# handler returns an exception;
# call next handler in a chain
per_fn = fn(ctx)
if per_fn is not None:
ctx.chained_exception = newraise = per_fn
except Exception as _raised:
# handler raises an exception - stop processing
newraise = _raised
break
if self._is_disconnect != ctx.is_disconnect:
self._is_disconnect = ctx.is_disconnect
if sqlalchemy_exception:
sqlalchemy_exception.connection_invalidated = (
ctx.is_disconnect
)
# set up potentially user-defined value for
# invalidate pool.
invalidate_pool_on_disconnect = (
ctx.invalidate_pool_on_disconnect
)
if should_wrap and context:
context.handle_dbapi_exception(e)
if not self._is_disconnect:
if cursor:
self._safe_close_cursor(cursor)
with util.safe_reraise(warn_only=True):
self._autorollback()
if newraise:
util.raise_(newraise, with_traceback=exc_info[2], from_=e)
elif should_wrap:
util.raise_(
sqlalchemy_exception, with_traceback=exc_info[2], from_=e
)
else:
util.raise_(exc_info[1], with_traceback=exc_info[2])
finally:
del self._reentrant_error
if self._is_disconnect:
del self._is_disconnect
if not self.invalidated:
dbapi_conn_wrapper = self._dbapi_connection
if invalidate_pool_on_disconnect:
self.engine.pool._invalidate(dbapi_conn_wrapper, e)
self.invalidate(e)
if self.should_close_with_result:
assert not self._is_future
self.close()
@classmethod
def _handle_dbapi_exception_noconnection(cls, e, dialect, engine):
exc_info = sys.exc_info()
is_disconnect = dialect.is_disconnect(e, None, None)
should_wrap = isinstance(e, dialect.dbapi.Error)
if should_wrap:
sqlalchemy_exception = exc.DBAPIError.instance(
None,
None,
e,
dialect.dbapi.Error,
hide_parameters=engine.hide_parameters,
connection_invalidated=is_disconnect,
)
else:
sqlalchemy_exception = None
newraise = None
if engine._has_events:
ctx = ExceptionContextImpl(
e,
sqlalchemy_exception,
engine,
None,
None,
None,
None,
None,
is_disconnect,
True,
)
for fn in engine.dispatch.handle_error:
try:
# handler returns an exception;
# call next handler in a chain
per_fn = fn(ctx)
if per_fn is not None:
ctx.chained_exception = newraise = per_fn
except Exception as _raised:
# handler raises an exception - stop processing
newraise = _raised
break
if sqlalchemy_exception and is_disconnect != ctx.is_disconnect:
sqlalchemy_exception.connection_invalidated = (
is_disconnect
) = ctx.is_disconnect
if newraise:
util.raise_(newraise, with_traceback=exc_info[2], from_=e)
elif should_wrap:
util.raise_(
sqlalchemy_exception, with_traceback=exc_info[2], from_=e
)
else:
util.raise_(exc_info[1], with_traceback=exc_info[2])
def _run_ddl_visitor(self, visitorcallable, element, **kwargs):
"""run a DDL visitor.
This method is only here so that the MockConnection can change the
options given to the visitor so that "checkfirst" is skipped.
"""
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
@util.deprecated(
"1.4",
"The :meth:`_engine.Connection.transaction` "
"method is deprecated and will be "
"removed in a future release. Use the :meth:`_engine.Engine.begin` "
"context manager instead.",
)
def transaction(self, callable_, *args, **kwargs):
r"""Execute the given function within a transaction boundary.
The function is passed this :class:`_engine.Connection`
as the first argument, followed by the given \*args and \**kwargs,
e.g.::
def do_something(conn, x, y):
conn.execute(text("some statement"), {'x':x, 'y':y})
conn.transaction(do_something, 5, 10)
The operations inside the function are all invoked within the
context of a single :class:`.Transaction`.
Upon success, the transaction is committed. If an
exception is raised, the transaction is rolled back
before propagating the exception.
.. note::
The :meth:`.transaction` method is superseded by
the usage of the Python ``with:`` statement, which can
be used with :meth:`_engine.Connection.begin`::
with conn.begin():
conn.execute(text("some statement"), {'x':5, 'y':10})
As well as with :meth:`_engine.Engine.begin`::
with engine.begin() as conn:
conn.execute(text("some statement"), {'x':5, 'y':10})
.. seealso::
:meth:`_engine.Engine.begin` - engine-level transactional
context
:meth:`_engine.Engine.transaction` - engine-level version of
:meth:`_engine.Connection.transaction`
"""
kwargs["_sa_skip_warning"] = True
trans = self.begin()
try:
ret = self.run_callable(callable_, *args, **kwargs)
trans.commit()
return ret
except:
with util.safe_reraise():
trans.rollback()
@util.deprecated(
"1.4",
"The :meth:`_engine.Connection.run_callable` "
"method is deprecated and will "
"be removed in a future release. Use a context manager instead.",
)
def run_callable(self, callable_, *args, **kwargs):
r"""Given a callable object or function, execute it, passing
a :class:`_engine.Connection` as the first argument.
The given \*args and \**kwargs are passed subsequent
to the :class:`_engine.Connection` argument.
This function, along with :meth:`_engine.Engine.run_callable`,
allows a function to be run with a :class:`_engine.Connection`
or :class:`_engine.Engine` object without the need to know
which one is being dealt with.
"""
return callable_(self, *args, **kwargs)
class ExceptionContextImpl(ExceptionContext):
"""Implement the :class:`.ExceptionContext` interface."""
def __init__(
self,
exception,
sqlalchemy_exception,
engine,
connection,
cursor,
statement,
parameters,
context,
is_disconnect,
invalidate_pool_on_disconnect,
):
self.engine = engine
self.connection = connection
self.sqlalchemy_exception = sqlalchemy_exception
self.original_exception = exception
self.execution_context = context
self.statement = statement
self.parameters = parameters
self.is_disconnect = is_disconnect
self.invalidate_pool_on_disconnect = invalidate_pool_on_disconnect
class Transaction(object):
"""Represent a database transaction in progress.
The :class:`.Transaction` object is procured by
calling the :meth:`_engine.Connection.begin` method of
:class:`_engine.Connection`::
from sqlalchemy import create_engine
engine = create_engine("postgresql://scott:tiger@localhost/test")
connection = engine.connect()
trans = connection.begin()
connection.execute(text("insert into x (a, b) values (1, 2)"))
trans.commit()
The object provides :meth:`.rollback` and :meth:`.commit`
methods in order to control transaction boundaries. It
also implements a context manager interface so that
the Python ``with`` statement can be used with the
:meth:`_engine.Connection.begin` method::
with connection.begin():
connection.execute(text("insert into x (a, b) values (1, 2)"))
The Transaction object is **not** threadsafe.
.. seealso::
:meth:`_engine.Connection.begin`
:meth:`_engine.Connection.begin_twophase`
:meth:`_engine.Connection.begin_nested`
.. index::
single: thread safety; Transaction
"""
__slots__ = ()
_is_root = False
def __init__(self, connection):
raise NotImplementedError()
def _do_deactivate(self):
"""do whatever steps are necessary to set this transaction as
"deactive", however leave this transaction object in place as far
as the connection's state.
for a "real" transaction this should roll back the transaction
and ensure this transaction is no longer a reset agent.
this is used for nesting of marker transactions where the marker
can set the "real" transaction as rolled back, however it stays
in place.
for 2.0 we hope to remove this nesting feature.
"""
raise NotImplementedError()
def _do_close(self):
raise NotImplementedError()
def _do_rollback(self):
raise NotImplementedError()
def _do_commit(self):
raise NotImplementedError()
@property
def is_valid(self):
return self.is_active and not self.connection.invalidated
def close(self):
"""Close this :class:`.Transaction`.
If this transaction is the base transaction in a begin/commit
nesting, the transaction will rollback(). Otherwise, the
method returns.
This is used to cancel a Transaction without affecting the scope of
an enclosing transaction.
"""
try:
self._do_close()
finally:
assert not self.is_active
def rollback(self):
"""Roll back this :class:`.Transaction`."""
try:
self._do_rollback()
finally:
assert not self.is_active
def commit(self):
"""Commit this :class:`.Transaction`."""
try:
self._do_commit()
finally:
assert not self.is_active
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
if type_ is None and self.is_active:
try:
self.commit()
except:
with util.safe_reraise():
self.rollback()
else:
self.rollback()
class MarkerTransaction(Transaction):
"""A 'marker' transaction that is used for nested begin() calls.
.. deprecated:: 1.4 future connection for 2.0 won't support this pattern.
"""
__slots__ = ("connection", "_is_active", "_transaction")
def __init__(self, connection):
assert connection._transaction is not None
if not connection._transaction.is_active:
raise exc.InvalidRequestError(
"the current transaction on this connection is inactive. "
"Please issue a rollback first."
)
self.connection = connection
if connection._nested_transaction is not None:
self._transaction = connection._nested_transaction
else:
self._transaction = connection._transaction
self._is_active = True
@property
def is_active(self):
return self._is_active and self._transaction.is_active
def _deactivate(self):
self._is_active = False
def _do_close(self):
# does not actually roll back the root
self._deactivate()
def _do_rollback(self):
# does roll back the root
if self._is_active:
try:
self._transaction._do_deactivate()
finally:
self._deactivate()
def _do_commit(self):
self._deactivate()
class RootTransaction(Transaction):
_is_root = True
__slots__ = ("connection", "is_active")
def __init__(self, connection):
assert connection._transaction is None
self.connection = connection
self._connection_begin_impl()
connection._transaction = self
self.is_active = True
# the SingletonThreadPool used with sqlite memory can share the same
# DBAPI connection / fairy among multiple Connection objects. while
# this is not ideal, it is a still-supported use case which at the
# moment occurs in the test suite due to how some of pytest fixtures
# work out
if connection._dbapi_connection._reset_agent is None:
connection._dbapi_connection._reset_agent = self
def _deactivate_from_connection(self):
if self.is_active:
assert self.connection._transaction is self
self.is_active = False
if (
self.connection._dbapi_connection is not None
and self.connection._dbapi_connection._reset_agent is self
):
self.connection._dbapi_connection._reset_agent = None
elif self.connection._transaction is not self:
util.warn("transaction already deassociated from connection")
# we have tests that want to make sure the pool handles this
# correctly. TODO: how to disable internal assertions cleanly?
# else:
# if self.connection._dbapi_connection is not None:
# assert (
# self.connection._dbapi_connection._reset_agent is not self
# )
def _do_deactivate(self):
# called from a MarkerTransaction to cancel this root transaction.
# the transaction stays in place as connection._transaction, but
# is no longer active and is no longer the reset agent for the
# pooled connection. the connection won't support a new begin()
# until this transaction is explicitly closed, rolled back,
# or committed.
assert self.connection._transaction is self
if self.is_active:
self._connection_rollback_impl()
# handle case where a savepoint was created inside of a marker
# transaction that refers to a root. nested has to be cancelled
# also.
if self.connection._nested_transaction:
self.connection._nested_transaction._cancel()
self._deactivate_from_connection()
def _connection_begin_impl(self):
self.connection._begin_impl(self)
def _connection_rollback_impl(self):
self.connection._rollback_impl()
def _connection_commit_impl(self):
self.connection._commit_impl()
def _close_impl(self, try_deactivate=False):
try:
if self.is_active:
self._connection_rollback_impl()
if self.connection._nested_transaction:
self.connection._nested_transaction._cancel()
finally:
if self.is_active or try_deactivate:
self._deactivate_from_connection()
if self.connection._transaction is self:
self.connection._transaction = None
assert not self.is_active
assert self.connection._transaction is not self
def _do_close(self):
self._close_impl()
def _do_rollback(self):
self._close_impl(try_deactivate=True)
def _do_commit(self):
if self.is_active:
assert self.connection._transaction is self
try:
self._connection_commit_impl()
finally:
# whether or not commit succeeds, cancel any
# nested transactions, make this transaction "inactive"
# and remove it as a reset agent
if self.connection._nested_transaction:
self.connection._nested_transaction._cancel()
self._deactivate_from_connection()
# ...however only remove as the connection's current transaction
# if commit succeeded. otherwise it stays on so that a rollback
# needs to occur.
self.connection._transaction = None
else:
if self.connection._transaction is self:
self.connection._invalid_transaction()
else:
raise exc.InvalidRequestError("This transaction is inactive")
assert not self.is_active
assert self.connection._transaction is not self
class NestedTransaction(Transaction):
"""Represent a 'nested', or SAVEPOINT transaction.
A new :class:`.NestedTransaction` object may be procured
using the :meth:`_engine.Connection.begin_nested` method.
The interface is the same as that of :class:`.Transaction`.
"""
__slots__ = ("connection", "is_active", "_savepoint", "_previous_nested")
def __init__(self, connection):
assert connection._transaction is not None
self.connection = connection
self._savepoint = self.connection._savepoint_impl()
self.is_active = True
self._previous_nested = connection._nested_transaction
connection._nested_transaction = self
def _deactivate_from_connection(self):
if self.connection._nested_transaction is self:
self.connection._nested_transaction = self._previous_nested
else:
util.warn(
"nested transaction already deassociated from connection"
)
def _cancel(self):
# called by RootTransaction when the outer transaction is
# committed, rolled back, or closed to cancel all savepoints
# without any action being taken
self.is_active = False
self._deactivate_from_connection()
if self._previous_nested:
self._previous_nested._cancel()
def _close_impl(self, deactivate_from_connection):
try:
if self.is_active and self.connection._transaction.is_active:
self.connection._rollback_to_savepoint_impl(self._savepoint)
finally:
self.is_active = False
if deactivate_from_connection:
self._deactivate_from_connection()
def _do_deactivate(self):
self._close_impl(False)
def _do_close(self):
self._close_impl(True)
def _do_rollback(self):
self._close_impl(True)
def _do_commit(self):
if self.is_active:
try:
self.connection._release_savepoint_impl(self._savepoint)
finally:
# nested trans becomes inactive on failed release
# unconditionally. this prevents it from trying to
# emit SQL when it rolls back.
self.is_active = False
# but only de-associate from connection if it succeeded
self._deactivate_from_connection()
else:
if self.connection._nested_transaction is self:
self.connection._invalid_transaction()
else:
raise exc.InvalidRequestError(
"This nested transaction is inactive"
)
class TwoPhaseTransaction(RootTransaction):
"""Represent a two-phase transaction.
A new :class:`.TwoPhaseTransaction` object may be procured
using the :meth:`_engine.Connection.begin_twophase` method.
The interface is the same as that of :class:`.Transaction`
with the addition of the :meth:`prepare` method.
"""
__slots__ = ("connection", "is_active", "xid", "_is_prepared")
def __init__(self, connection, xid):
self._is_prepared = False
self.xid = xid
super(TwoPhaseTransaction, self).__init__(connection)
def prepare(self):
"""Prepare this :class:`.TwoPhaseTransaction`.
After a PREPARE, the transaction can be committed.
"""
if not self.is_active:
raise exc.InvalidRequestError("This transaction is inactive")
self.connection._prepare_twophase_impl(self.xid)
self._is_prepared = True
def _connection_begin_impl(self):
self.connection._begin_twophase_impl(self)
def _connection_rollback_impl(self):
self.connection._rollback_twophase_impl(self.xid, self._is_prepared)
def _connection_commit_impl(self):
self.connection._commit_twophase_impl(self.xid, self._is_prepared)
class Engine(Connectable, log.Identified):
"""
Connects a :class:`~sqlalchemy.pool.Pool` and
:class:`~sqlalchemy.engine.interfaces.Dialect` together to provide a
source of database connectivity and behavior.
An :class:`_engine.Engine` object is instantiated publicly using the
:func:`~sqlalchemy.create_engine` function.
.. seealso::
:doc:`/core/engines`
:ref:`connections_toplevel`
"""
_execution_options = _EMPTY_EXECUTION_OPTS
_has_events = False
_connection_cls = Connection
_sqla_logger_namespace = "sqlalchemy.engine.Engine"
_is_future = False
_schema_translate_map = None
def __init__(
self,
pool,
dialect,
url,
logging_name=None,
echo=None,
query_cache_size=500,
execution_options=None,
hide_parameters=False,
):
self.pool = pool
self.url = url
self.dialect = dialect
if logging_name:
self.logging_name = logging_name
self.echo = echo
self.hide_parameters = hide_parameters
if query_cache_size != 0:
self._compiled_cache = util.LRUCache(
query_cache_size, size_alert=self._lru_size_alert
)
else:
self._compiled_cache = None
log.instance_logger(self, echoflag=echo)
if execution_options:
self.update_execution_options(**execution_options)
def _lru_size_alert(self, cache):
if self._should_log_info:
self.logger.info(
"Compiled cache size pruning from %d items to %d. "
"Increase cache size to reduce the frequency of pruning.",
len(cache),
cache.capacity,
)
@property
def engine(self):
return self
def clear_compiled_cache(self):
"""Clear the compiled cache associated with the dialect.
This applies **only** to the built-in cache that is established
via the :paramref:`_engine.create_engine.query_cache_size` parameter.
It will not impact any dictionary caches that were passed via the
:paramref:`.Connection.execution_options.query_cache` parameter.
.. versionadded:: 1.4
"""
if self._compiled_cache:
self._compiled_cache.clear()
def update_execution_options(self, **opt):
r"""Update the default execution_options dictionary
of this :class:`_engine.Engine`.
The given keys/values in \**opt are added to the
default execution options that will be used for
all connections. The initial contents of this dictionary
can be sent via the ``execution_options`` parameter
to :func:`_sa.create_engine`.
.. seealso::
:meth:`_engine.Connection.execution_options`
:meth:`_engine.Engine.execution_options`
"""
self._execution_options = self._execution_options.union(opt)
self.dispatch.set_engine_execution_options(self, opt)
self.dialect.set_engine_execution_options(self, opt)
def execution_options(self, **opt):
"""Return a new :class:`_engine.Engine` that will provide
:class:`_engine.Connection` objects with the given execution options.
The returned :class:`_engine.Engine` remains related to the original
:class:`_engine.Engine` in that it shares the same connection pool and
other state:
* The :class:`_pool.Pool` used by the new :class:`_engine.Engine`
is the
same instance. The :meth:`_engine.Engine.dispose`
method will replace
the connection pool instance for the parent engine as well
as this one.
* Event listeners are "cascaded" - meaning, the new
:class:`_engine.Engine`
inherits the events of the parent, and new events can be associated
with the new :class:`_engine.Engine` individually.
* The logging configuration and logging_name is copied from the parent
:class:`_engine.Engine`.
The intent of the :meth:`_engine.Engine.execution_options` method is
to implement "sharding" schemes where multiple :class:`_engine.Engine`
objects refer to the same connection pool, but are differentiated
by options that would be consumed by a custom event::
primary_engine = create_engine("mysql://")
shard1 = primary_engine.execution_options(shard_id="shard1")
shard2 = primary_engine.execution_options(shard_id="shard2")
Above, the ``shard1`` engine serves as a factory for
:class:`_engine.Connection`
objects that will contain the execution option
``shard_id=shard1``, and ``shard2`` will produce
:class:`_engine.Connection`
objects that contain the execution option ``shard_id=shard2``.
An event handler can consume the above execution option to perform
a schema switch or other operation, given a connection. Below
we emit a MySQL ``use`` statement to switch databases, at the same
time keeping track of which database we've established using the
:attr:`_engine.Connection.info` dictionary,
which gives us a persistent
storage space that follows the DBAPI connection::
from sqlalchemy import event
from sqlalchemy.engine import Engine
shards = {"default": "base", shard_1: "db1", "shard_2": "db2"}
@event.listens_for(Engine, "before_cursor_execute")
def _switch_shard(conn, cursor, stmt,
params, context, executemany):
shard_id = conn._execution_options.get('shard_id', "default")
current_shard = conn.info.get("current_shard", None)
if current_shard != shard_id:
cursor.execute("use %s" % shards[shard_id])
conn.info["current_shard"] = shard_id
.. seealso::
:meth:`_engine.Connection.execution_options`
- update execution options
on a :class:`_engine.Connection` object.
:meth:`_engine.Engine.update_execution_options`
- update the execution
options for a given :class:`_engine.Engine` in place.
:meth:`_engine.Engine.get_execution_options`
"""
return self._option_cls(self, opt)
def get_execution_options(self):
"""Get the non-SQL options which will take effect during execution.
.. versionadded: 1.3
.. seealso::
:meth:`_engine.Engine.execution_options`
"""
return self._execution_options
@property
def name(self):
"""String name of the :class:`~sqlalchemy.engine.interfaces.Dialect`
in use by this :class:`Engine`."""
return self.dialect.name
@property
def driver(self):
"""Driver name of the :class:`~sqlalchemy.engine.interfaces.Dialect`
in use by this :class:`Engine`."""
return self.dialect.driver
echo = log.echo_property()
def __repr__(self):
return "Engine(%r)" % (self.url,)
def dispose(self):
"""Dispose of the connection pool used by this
:class:`_engine.Engine`.
This has the effect of fully closing all **currently checked in**
database connections. Connections that are still checked out
will **not** be closed, however they will no longer be associated
with this :class:`_engine.Engine`,
so when they are closed individually,
eventually the :class:`_pool.Pool` which they are associated with will
be garbage collected and they will be closed out fully, if
not already closed on checkin.
A new connection pool is created immediately after the old one has
been disposed. This new pool, like all SQLAlchemy connection pools,
does not make any actual connections to the database until one is
first requested, so as long as the :class:`_engine.Engine`
isn't used again,
no new connections will be made.
.. seealso::
:ref:`engine_disposal`
"""
self.pool.dispose()
self.pool = self.pool.recreate()
self.dispatch.engine_disposed(self)
def _execute_default(self, default):
with self.connect() as conn:
return conn._execute_default(default, (), {})
@contextlib.contextmanager
def _optional_conn_ctx_manager(self, connection=None):
if connection is None:
with self.connect() as conn:
yield conn
else:
yield connection
class _trans_ctx(object):
def __init__(self, conn, transaction, close_with_result):
self.conn = conn
self.transaction = transaction
self.close_with_result = close_with_result
def __enter__(self):
return self.conn
def __exit__(self, type_, value, traceback):
if type_ is not None:
self.transaction.rollback()
else:
if self.transaction.is_active:
self.transaction.commit()
if not self.close_with_result:
self.conn.close()
def begin(self, close_with_result=False):
"""Return a context manager delivering a :class:`_engine.Connection`
with a :class:`.Transaction` established.
E.g.::
with engine.begin() as conn:
conn.execute(
text("insert into table (x, y, z) values (1, 2, 3)")
)
conn.execute(text("my_special_procedure(5)"))
Upon successful operation, the :class:`.Transaction`
is committed. If an error is raised, the :class:`.Transaction`
is rolled back.
The ``close_with_result`` flag is normally ``False``, and indicates
that the :class:`_engine.Connection` will be closed when the operation
is complete. When set to ``True``, it indicates the
:class:`_engine.Connection` is in "single use" mode, where the
:class:`_engine.CursorResult` returned by the first call to
:meth:`_engine.Connection.execute` will close the
:class:`_engine.Connection` when
that :class:`_engine.CursorResult` has exhausted all result rows.
.. seealso::
:meth:`_engine.Engine.connect` - procure a
:class:`_engine.Connection` from
an :class:`_engine.Engine`.
:meth:`_engine.Connection.begin` - start a :class:`.Transaction`
for a particular :class:`_engine.Connection`.
"""
if self._connection_cls._is_future:
conn = self.connect()
else:
conn = self.connect(close_with_result=close_with_result)
try:
trans = conn.begin()
except:
with util.safe_reraise():
conn.close()
return Engine._trans_ctx(conn, trans, close_with_result)
@util.deprecated(
"1.4",
"The :meth:`_engine.Engine.transaction` "
"method is deprecated and will be "
"removed in a future release. Use the :meth:`_engine.Engine.begin` "
"context "
"manager instead.",
)
def transaction(self, callable_, *args, **kwargs):
r"""Execute the given function within a transaction boundary.
The function is passed a :class:`_engine.Connection` newly procured
from :meth:`_engine.Engine.connect` as the first argument,
followed by the given \*args and \**kwargs.
e.g.::
def do_something(conn, x, y):
conn.execute(text("some statement"), {'x':x, 'y':y})
engine.transaction(do_something, 5, 10)
The operations inside the function are all invoked within the
context of a single :class:`.Transaction`.
Upon success, the transaction is committed. If an
exception is raised, the transaction is rolled back
before propagating the exception.
.. note::
The :meth:`.transaction` method is superseded by
the usage of the Python ``with:`` statement, which can
be used with :meth:`_engine.Engine.begin`::
with engine.begin() as conn:
conn.execute(text("some statement"), {'x':5, 'y':10})
.. seealso::
:meth:`_engine.Engine.begin` - engine-level transactional
context
:meth:`_engine.Connection.transaction`
- connection-level version of
:meth:`_engine.Engine.transaction`
"""
kwargs["_sa_skip_warning"] = True
with self.connect() as conn:
return conn.transaction(callable_, *args, **kwargs)
@util.deprecated(
"1.4",
"The :meth:`_engine.Engine.run_callable` "
"method is deprecated and will be "
"removed in a future release. Use the :meth:`_engine.Engine.connect` "
"context manager instead.",
)
def run_callable(self, callable_, *args, **kwargs):
r"""Given a callable object or function, execute it, passing
a :class:`_engine.Connection` as the first argument.
The given \*args and \**kwargs are passed subsequent
to the :class:`_engine.Connection` argument.
This function, along with :meth:`_engine.Connection.run_callable`,
allows a function to be run with a :class:`_engine.Connection`
or :class:`_engine.Engine` object without the need to know
which one is being dealt with.
"""
kwargs["_sa_skip_warning"] = True
with self.connect() as conn:
return conn.run_callable(callable_, *args, **kwargs)
def _run_ddl_visitor(self, visitorcallable, element, **kwargs):
with self.begin() as conn:
conn._run_ddl_visitor(visitorcallable, element, **kwargs)
@util.deprecated_20(
":meth:`_engine.Engine.execute`",
alternative="All statement execution in SQLAlchemy 2.0 is performed "
"by the :meth:`_engine.Connection.execute` method of "
":class:`_engine.Connection`, "
"or in the ORM by the :meth:`.Session.execute` method of "
":class:`.Session`.",
)
def execute(self, statement, *multiparams, **params):
"""Executes the given construct and returns a
:class:`_engine.CursorResult`.
The arguments are the same as those used by
:meth:`_engine.Connection.execute`.
Here, a :class:`_engine.Connection` is acquired using the
:meth:`_engine.Engine.connect` method, and the statement executed
with that connection. The returned :class:`_engine.CursorResult`
is flagged
such that when the :class:`_engine.CursorResult` is exhausted and its
underlying cursor is closed, the :class:`_engine.Connection`
created here
will also be closed, which allows its associated DBAPI connection
resource to be returned to the connection pool.
"""
connection = self.connect(close_with_result=True)
return connection.execute(statement, *multiparams, **params)
@util.deprecated_20(
":meth:`_engine.Engine.scalar`",
alternative="All statement execution in SQLAlchemy 2.0 is performed "
"by the :meth:`_engine.Connection.execute` method of "
":class:`_engine.Connection`, "
"or in the ORM by the :meth:`.Session.execute` method of "
":class:`.Session`; the :meth:`_future.Result.scalar` "
"method can then be "
"used to return a scalar result.",
)
def scalar(self, statement, *multiparams, **params):
"""Executes and returns the first column of the first row.
The underlying result/cursor is closed after execution.
"""
return self.execute(statement, *multiparams, **params).scalar()
def _execute_clauseelement(
self,
elem,
multiparams=None,
params=None,
execution_options=_EMPTY_EXECUTION_OPTS,
):
connection = self.connect(close_with_result=True)
return connection._execute_clauseelement(
elem, multiparams, params, execution_options
)
def _execute_compiled(
self,
compiled,
multiparams,
params,
execution_options=_EMPTY_EXECUTION_OPTS,
):
connection = self.connect(close_with_result=True)
return connection._execute_compiled(
compiled, multiparams, params, execution_options
)
def connect(self, close_with_result=False):
"""Return a new :class:`_engine.Connection` object.
The :class:`_engine.Connection` object is a facade that uses a DBAPI
connection internally in order to communicate with the database. This
connection is procured from the connection-holding :class:`_pool.Pool`
referenced by this :class:`_engine.Engine`. When the
:meth:`_engine.Connection.close` method of the
:class:`_engine.Connection` object
is called, the underlying DBAPI connection is then returned to the
connection pool, where it may be used again in a subsequent call to
:meth:`_engine.Engine.connect`.
"""
return self._connection_cls(self, close_with_result=close_with_result)
@util.deprecated(
"1.4",
"The :meth:`_engine.Engine.table_names` "
"method is deprecated and will be "
"removed in a future release. Please refer to "
":meth:`_reflection.Inspector.get_table_names`.",
)
def table_names(self, schema=None, connection=None):
"""Return a list of all table names available in the database.
:param schema: Optional, retrieve names from a non-default schema.
:param connection: Optional, use a specified connection.
"""
with self._optional_conn_ctx_manager(connection) as conn:
insp = inspection.inspect(conn)
return insp.get_table_names(schema)
@util.deprecated(
"1.4",
"The :meth:`_engine.Engine.has_table` "
"method is deprecated and will be "
"removed in a future release. Please refer to "
":meth:`_reflection.Inspector.has_table`.",
)
def has_table(self, table_name, schema=None):
"""Return True if the given backend has a table of the given name.
.. seealso::
:ref:`metadata_reflection_inspector` - detailed schema inspection
using the :class:`_reflection.Inspector` interface.
:class:`.quoted_name` - used to pass quoting information along
with a schema identifier.
"""
with self._optional_conn_ctx_manager(None) as conn:
insp = inspection.inspect(conn)
return insp.has_table(table_name, schema=schema)
def _wrap_pool_connect(self, fn, connection):
dialect = self.dialect
try:
return fn()
except dialect.dbapi.Error as e:
if connection is None:
Connection._handle_dbapi_exception_noconnection(
e, dialect, self
)
else:
util.raise_(
sys.exc_info()[1], with_traceback=sys.exc_info()[2]
)
def raw_connection(self, _connection=None):
"""Return a "raw" DBAPI connection from the connection pool.
The returned object is a proxied version of the DBAPI
connection object used by the underlying driver in use.
The object will have all the same behavior as the real DBAPI
connection, except that its ``close()`` method will result in the
connection being returned to the pool, rather than being closed
for real.
This method provides direct DBAPI connection access for
special situations when the API provided by
:class:`_engine.Connection`
is not needed. When a :class:`_engine.Connection` object is already
present, the DBAPI connection is available using
the :attr:`_engine.Connection.connection` accessor.
.. seealso::
:ref:`dbapi_connections`
"""
return self._wrap_pool_connect(self.pool.connect, _connection)
class OptionEngineMixin(object):
_sa_propagate_class_events = False
def __init__(self, proxied, execution_options):
self._proxied = proxied
self.url = proxied.url
self.dialect = proxied.dialect
self.logging_name = proxied.logging_name
self.echo = proxied.echo
self._compiled_cache = proxied._compiled_cache
self.hide_parameters = proxied.hide_parameters
log.instance_logger(self, echoflag=self.echo)
# note: this will propagate events that are assigned to the parent
# engine after this OptionEngine is created. Since we share
# the events of the parent we also disallow class-level events
# to apply to the OptionEngine class directly.
#
# the other way this can work would be to transfer existing
# events only, using:
# self.dispatch._update(proxied.dispatch)
#
# that might be more appropriate however it would be a behavioral
# change for logic that assigns events to the parent engine and
# would like it to take effect for the already-created sub-engine.
self.dispatch = self.dispatch._join(proxied.dispatch)
self._execution_options = proxied._execution_options
self.update_execution_options(**execution_options)
def _get_pool(self):
return self._proxied.pool
def _set_pool(self, pool):
self._proxied.pool = pool
pool = property(_get_pool, _set_pool)
def _get_has_events(self):
return self._proxied._has_events or self.__dict__.get(
"_has_events", False
)
def _set_has_events(self, value):
self.__dict__["_has_events"] = value
_has_events = property(_get_has_events, _set_has_events)
class OptionEngine(OptionEngineMixin, Engine):
pass
Engine._option_cls = OptionEngine
| 35.365183 | 84 | 0.60736 |
a0a5d18479c4ba6d05db1f13153db096392f04d5 | 16,202 | py | Python | toontown/racing/RaceGlobals.py | philicheese2003/ToontownProjectAltisServer | cfa225d1bdddacdbd29b621382347fce17e1dc66 | [
"Apache-2.0"
] | 3 | 2020-01-02T08:43:36.000Z | 2020-07-05T08:59:02.000Z | toontown/racing/RaceGlobals.py | AnythingTechPro/Project-Altis | 7ead614abdb5072ca06323982de461f4e775d1b3 | [
"Apache-2.0"
] | null | null | null | toontown/racing/RaceGlobals.py | AnythingTechPro/Project-Altis | 7ead614abdb5072ca06323982de461f4e775d1b3 | [
"Apache-2.0"
] | 2 | 2021-02-25T06:02:05.000Z | 2021-06-19T03:11:22.000Z | TrackSignDuration = 15
RaceCountdown = 3
MaxRacers = 4
MaxTickets = 99999
Practice = 0
ToonBattle = 1
Circuit = 2
Speedway = 0
Rural = 1
Urban = 2
RT_Speedway_1 = 0
RT_Speedway_1_rev = 1
RT_Rural_1 = 20
RT_Rural_1_rev = 21
RT_Urban_1 = 40
RT_Urban_1_rev = 41
RT_Speedway_2 = 60
RT_Speedway_2_rev = 61
RT_Rural_2 = 62
RT_Rural_2_rev = 63
RT_Urban_2 = 64
RT_Urban_2_rev = 65
KARTING_TICKETS_HOLIDAY_MULTIPLIER = 2
def getTrackGenre(trackId):
if trackId in (RT_Speedway_1,
RT_Speedway_1_rev,
RT_Speedway_2,
RT_Speedway_2_rev):
return Speedway
elif trackId in (RT_Rural_1,
RT_Rural_1_rev,
RT_Rural_2,
RT_Rural_2_rev):
return Rural
else:
return Urban
RT_Speedway_1_Gags = ((923.052, -1177.431, 0.024),
(926.099, -1187.345, 0.024),
(925.68, -1197.327, 0.024),
(925.169, -1209.502, 0.024),
(394.009, 209.219, 0.025),
(279.109, 279.744, 0.025),
(204.366, 316.238, 0.025),
(118.646, 358.009, 0.025),
(-1462.098, 791.722, 0.025),
(-1459.446, 730.064, 0.025),
(-1450.731, 666.811, 0.025),
(-1438.388, 615.1, 0.025))
RT_Speedway_2_Gags = ((-355.18, -2430.1, -0.126728),
(-343.456, -2421.43, -0.0116951),
(-329.644, -2411.06, -0.0169053),
(-315.054, -2402.91, -0.0800667),
(243.293, -906.412, 0.021832),
(216.555, -910.885, -0.146125),
(192.16, -915.93, -0.242366),
(165.941, -922.381, -0.247588),
(-840.626, 2405.96, 58.4195),
(-868.154, 2370.54, 56.7396),
(-896.126, 2332.55, 53.8607),
(-921.952, 2291.16, 49.8209))
RT_Speedway_1_rev_Gags = ((1364.601, -664.452, 0.025),
(1312.491, -588.218, 0.025),
(1251.775, -509.556, 0.025),
(1214.052, -461.743, 0.025),
(-976.044, 995.072, 0.025),
(-1043.917, 1018.78, 0.025),
(-1124.555, 1038.362, 0.025),
(-1187.95, 1047.006, 0.025),
(-1174.542, -208.968, 0.025),
(-1149.34, -270.698, 0.025),
(-1121.2, -334.367, 0.025),
(-1090.627, -392.662, 0.026))
RT_Rural_1_Gags = ((814.276, -552.928, 2.107),
(847.738, -551.97, 2.106),
(889.265, -549.569, 2.107),
(922.022, -554.813, 2.106),
(1791.42, 2523.91, 2.106),
(1754.14, 2540.25, 2.107),
(1689.66, 2557.28, 2.107),
(1614.01, 2577.16, 2.106),
(-1839.0, 654.477, 86.83),
(-1894.33, 640.125, 80.39),
(-1955.3, 625.09, 73.07),
(-2016.99, 611.746, 65.86))
RT_Rural_2_Gags = ((2001.53, 560.532, 198.912),
(2002.45, 574.292, 198.912),
(2003.42, 588.612, 198.912),
(2004, 602.849, 198.912),
(-2107.4, 2209.67, 198.913),
(-2086.13, 2224.31, 198.913),
(-2058.11, 2244.31, 198.912),
(-2023.85, 2268.77, 198.912),
(-331.746, -1010.57, 222.332),
(-358.595, -1007.68, 225.129),
(-388.556, -1004.87, 228.239),
(-410.122, -1003.03, 230.482),
(69.763, -2324.5, 198.912),
(63.5314, -2334.02, 198.913),
(57.9662, -2349.14, 198.913),
(51.8838, -2363.87, 198.913))
RT_Urban_1_Gags = ((51.9952, 2431.62, 55.7053),
(39.5407, 2421.64, 65.7053),
(27.7504, 2411.67, 55.7053),
(15.55, 2401.65, 65.7053),
(-1008.36, 2116.41, 0.0246798),
(-1050.31, 2099.78, 0.025),
(-1092.26, 2083.15, 0.0253202),
(-1134.21, 2066.52, 0.0256404),
(-1966.68, 1139.32, 1.76981),
(-1970.46, 1120.57, 1.76981),
(-1974.18, 1101.82, 1.76981),
(-1977.93, 1084.07, 1.76981),
(1419.05, -2987.18, 0.025),
(1411.09, -3004.09, 0.025),
(1403.13, -3021.01, 0.025),
(1395.17, -3037.92, 0.025),
(948.131, -1216.77, 0.025),
(935.545, -1204.09, 0.025),
(922.959, -1191.41, 0.025),
(909.959, -1177.41, 0.025))
RT_Urban_2_Gags = ((-2761.49, -3070.97, -0.255122),
(-2730.18, -3084.09, -0.255153),
(-2701.45, -3096.26, -0.255669),
(-2669.81, -3108.9, -0.255252),
(735.479, -423.828, 23.7334),
(759.026, -427.198, 23.0068),
(783.232, -430.659, 22.2569),
(809.914, -434.476, 21.4326),
(3100.09, 240.411, 23.4672),
(3089.09, 242.019, 23.5251),
(3077.68, 243.688, 23.6857),
(3064.82, 245.567, 23.8771),
(-10.7389, 2980.48, -0.255609),
(-41.2644, 2974.53, -0.255122),
(-69.8423, 2989.98, -0.255682),
(-102.331, 2986.1, -0.255637),
(-1978.67, 588.981, -0.255685),
(-1977.07, 560.797, -0.255415),
(-1948.58, 544.782, -0.255122),
(-1943.42, 510.262, -0.255866))
RT_Urban_1_rev_Gags = ((1034.43, -366.371, 0.025),
(1051.84, -360.473, 0.025),
(1069.25, -354.575, 0.025),
(1086.66, -348.677, 0.025),
(1849.66, -2807.21, 0.0246158),
(1858.55, -2795.99, 0.0246158),
(1867.44, -2784.76, 0.0246158),
(1876.33, -2773.53, 0.0246158),
(316.342, -44.9529, 0.025),
(305.173, -63.4405, 0.025),
(294.004, -81.9281, 0.025),
(282.835, -100.416, 0.025),
(-762.377, 2979.25, 0.025),
(-753.029, 2995.69, 0.025),
(-743.681, 3012.14, 0.025),
(-734.333, 3028.58, 0.025),
(470.628, 1828.32, 55.0),
(481.284, 1836.89, 55.0),
(491.941, 1845.47, 55.0),
(502.597, 1854.04, 55.0))
Speedway_1_Boosts = (((-320, 685, 1), (415, 0, 0)),)
Speedway_1_Rev_Boosts = (((-320, 685, 0.1), (235, 0, 0)),)
Speedway_2_Boosts = (((-120, 430, 1.0), (-50, 0, 0)),)
Speedway_2_Rev_Boosts = (((176, 625, 1.0), (130, 0, 0)),)
Rural_1_Boosts = (((3132.64, 859.56, 5.0), (384.44, 363.5, 0)), ((-3050.33, -1804.97, 207.7), (229.4, 353.25, 342.9)))
Rural_1_Rev_Boosts = (((3132.64, 859.56, 5.0), (197.1, -2.25, 0)), ((-3151.34, -1569.56, 200.621), (189.46, 182.75, 195.255)))
Rural_2_Boosts = (((873.255, -593.664, 199.5), (87.715, 0, 0)), ((-1747.62, 801.56, 199.5), (-126.516, 0, 0)))
Rural_2_Rev_Boosts = (((-428.004, -243.692, 324.516), (51.428, 6, 1)), ((-384.043, 211.62, 193.5), (-127.859, 1, 0)))
Urban_1_Boosts = (((677.057, 1618.24, 0.025), (35.9995, 0, 0)), ((-2250.35, 1618.1, 0.0241526), (-154.8, 0, 0)), ((400.13, -1090.26, 0.025), (-175.204, 0, 0)))
Urban_1_Rev_Boosts = (((488.739, -2055.07, 0.025), (3.59753, 0, 0)), ((-1737.29, 588.138, 0.025), (26.3975, 0, 0)), ((-212.314, 2638.34, 0.025), (-128.404, 0, 0)))
Urban_2_Boosts = (((358.134, -1655.42, 0.3), (-4.95, 1, 0)), ((2058.77, 2560.03, 0.3), (77.31, 0, 0)), ((-3081.33, -1037.55, 0.25), (177.359, 0, 0)))
Urban_2_Rev_Boosts = (((-2007.38, 484.878, 0.25), (30.9102, 0, 0)), ((2646.51, 1455.15, 0.25), (-120.172, 0, 0)), ((-472.215, -2048.21, 0.25), (136.192, 0, 0)))
def RaceInfo2RacePadId(trackId, trackType):
rev = trackId % 2
if not rev:
if trackType == Practice:
padId = 0
else:
padId = 2
elif trackType == Practice:
padId = 1
else:
padId = 3
return padId
def getTrackGenreString(genreId):
genreStrings = ['Speedway', 'Country', 'City']
return genreStrings[genreId].lower()
def getTunnelSignName(genreId, padId):
if genreId == 2 and padId == 0:
return 'tunne1l_citysign'
elif genreId == 1 and padId == 0:
return 'tunnel_countrysign1'
else:
return 'tunnel%s_%ssign' % (padId + 1, getTrackGenreString(genreId))
RacePadId2RaceInfo = {0: (0, Practice, 3),
1: (1, Practice, 3),
2: (0, ToonBattle, 3),
3: (1, ToonBattle, 3)}
def getGenreFromString(string):
if string == 'town':
return Urban
elif string == 'stadium':
return Speedway
else:
return Rural
def getTrackListByType(genre, type):
return Rural
def getTrackListByType(genre, type):
genreDict = {Urban: [[RT_Urban_1, RT_Urban_2], [RT_Urban_1_rev, RT_Urban_2_rev]],
Rural: [[RT_Rural_1, RT_Rural_2], [RT_Rural_1_rev, RT_Rural_2_rev]],
Speedway: [[RT_Speedway_1, RT_Speedway_2], [RT_Speedway_1_rev, RT_Speedway_2_rev]]}
trackIdList = genreDict.get(genre)
return trackIdList[type]
def getCanonicalPadId(padId):
return padId % 4
def getNextRaceInfo(prevTrackId, genreString, padId):
genre = getGenreFromString(genreString)
cPadId = getCanonicalPadId(padId)
raceInfo = RacePadId2RaceInfo.get(cPadId)
trackList = getTrackListByType(genre, raceInfo[0])
if trackList.count(prevTrackId) == 0:
trackId = trackList[1]
else:
index = trackList.index(prevTrackId)
index += 1
index %= len(trackList)
trackId = trackList[index]
return (trackId, raceInfo[1], raceInfo[2])
TrackPath = 'phase_6/models/karting/'
TrackDict = {RT_Speedway_1: (TrackPath + 'RT_SpeedwayA',
240.0,
115.0,
(50, 500),
RT_Speedway_1_Gags,
Speedway_1_Boosts,
1.0,
'GS_Race_SS.ogg',
(0.01, 0.015)),
RT_Speedway_1_rev: (TrackPath + 'RT_SpeedwayA',
240.0,
115.0,
(50, 500),
RT_Speedway_1_rev_Gags,
Speedway_1_Rev_Boosts,
1.0,
'GS_Race_SS.ogg',
(0.01, 0.015)),
RT_Speedway_2: (TrackPath + 'RT_SpeedwayB',
335.0,
210.0,
(75, 1000),
RT_Speedway_2_Gags,
Speedway_2_Boosts,
1.0,
'GS_Race_SS.ogg',
(0.01, 0.015)),
RT_Speedway_2_rev: (TrackPath + 'RT_SpeedwayB',
335.0,
210.0,
(75, 1000),
RT_Speedway_2_Gags,
Speedway_2_Rev_Boosts,
1.0,
'GS_Race_SS.ogg',
(0.01, 0.015)),
RT_Rural_1: (TrackPath + 'RT_RuralB',
360.0,
230.0,
(100, 500),
RT_Rural_1_Gags,
Rural_1_Boosts,
0.75,
'GS_Race_RR.ogg',
(0.003, 0.004)),
RT_Rural_1_rev: (TrackPath + 'RT_RuralB',
360.0,
230.0,
(100, 500),
RT_Rural_1_Gags,
Rural_1_Rev_Boosts,
0.75,
'GS_Race_RR.ogg',
(0.003, 0.004)),
RT_Rural_2: (TrackPath + 'RT_RuralB2',
480.0,
360.0,
(150, 1000),
RT_Rural_2_Gags,
Rural_2_Boosts,
0.75,
'GS_Race_RR.ogg',
(0.003, 0.004)),
RT_Rural_2_rev: (TrackPath + 'RT_RuralB2',
480.0,
360.0,
(150, 1000),
RT_Rural_2_Gags,
Rural_2_Rev_Boosts,
0.75,
'GS_Race_RR.ogg',
(0.003, 0.004)),
RT_Urban_1: (TrackPath + 'RT_UrbanA',
480.0,
305.0,
(300, 500),
RT_Urban_1_Gags,
Urban_1_Boosts,
1.0,
'GS_Race_CC.ogg',
(0.002, 0.003)),
RT_Urban_1_rev: (TrackPath + 'RT_UrbanA',
480.0,
305.0,
(300, 500),
RT_Urban_1_rev_Gags,
Urban_1_Rev_Boosts,
1.0,
'GS_Race_CC.ogg',
(0.002, 0.003)),
RT_Urban_2: (TrackPath + 'RT_UrbanB',
480.0,
280.0,
(400, 1000),
RT_Urban_2_Gags,
Urban_2_Boosts,
1.0,
'GS_Race_CC.ogg',
(0.002, 0.003)),
RT_Urban_2_rev: (TrackPath + 'RT_UrbanB',
480.0,
280.0,
(400, 1000),
RT_Urban_2_Gags,
Urban_2_Rev_Boosts,
1.0,
'GS_Race_CC.ogg',
(0.002, 0.003))}
TrackIds = TrackDict.keys()
TrackIds.sort()
def getEntryFee(trackId, raceType):
fee = 0
if raceType == ToonBattle:
fee = TrackDict[trackId][3][0]
elif raceType == Circuit:
fee = TrackDict[trackId][3][1]
return fee
def getQualifyingTime(trackId):
return TrackDict[trackId][1]
def getDefaultRecordTime(trackId):
return TrackDict[trackId][2]
Daily = 0
Weekly = 1
AllTime = 2
PeriodDict = {Daily: 10,
Weekly: 100,
AllTime: 1000}
PeriodIds = PeriodDict.keys()
NumRecordPeriods = len(PeriodIds)
NumRecordsPerPeriod = 10
Winnings = [4.0,
2.0,
1.5,
1.15]
PracticeWinnings = 20
SpeedwayQuals = 0
RuralQuals = 1
UrbanQuals = 2
SpeedwayWins = 3
RuralWins = 4
UrbanWins = 5
CircuitWins = 6
TwoPlayerWins = 7
ThreePlayerWins = 8
FourPlayerWins = 9
CircuitSweeps = 10
CircuitQuals = 11
QualsList = [SpeedwayQuals, RuralQuals, UrbanQuals]
WinsList = [SpeedwayWins, RuralWins, UrbanWins]
SpeedwayQuals1 = 0
SpeedwayQuals2 = 1
SpeedwayQuals3 = 2
RuralQuals1 = 3
RuralQuals2 = 4
RuralQuals3 = 5
UrbanQuals1 = 6
UrbanQuals2 = 7
UrbanQuals3 = 8
TotalQuals = 9
SpeedwayWins1 = 10
SpeedwayWins2 = 11
SpeedwayWins3 = 12
RuralWins1 = 13
RuralWins2 = 14
RuralWins3 = 15
UrbanWins1 = 16
UrbanWins2 = 17
UrbanWins3 = 18
TotalWins = 19
CircuitQuals1 = 20
CircuitQuals2 = 21
CircuitQuals3 = 22
CircuitWins1 = 23
CircuitWins2 = 24
CircuitWins3 = 25
CircuitSweeps1 = 26
CircuitSweeps2 = 27
CircuitSweeps3 = 28
GrandTouring = 29
NumTrophies = 30
TenTrophyCup = 30
TwentyTrophyCup = 31
ThirtyTrophyCup = 32
TrophyCups = [TenTrophyCup, TwentyTrophyCup, ThirtyTrophyCup]
NumCups = 3
SpeedwayQualsList = [SpeedwayQuals1, SpeedwayQuals2, SpeedwayQuals3]
RuralQualsList = [RuralQuals1, RuralQuals2, RuralQuals3]
UrbanQualsList = [UrbanQuals1, UrbanQuals2, UrbanQuals3]
SpeedwayWinsList = [SpeedwayWins1, SpeedwayWins2, SpeedwayWins3]
RuralWinsList = [RuralWins1, RuralWins2, RuralWins3]
UrbanWinsList = [UrbanWins1, UrbanWins2, UrbanWins3]
CircuitWinsList = [CircuitWins1, CircuitWins2, CircuitWins3]
CircuitSweepsList = [CircuitSweeps1, CircuitSweeps2, CircuitSweeps3]
CircuitQualList = [CircuitQuals1, CircuitQuals2, CircuitQuals3]
AllQualsList = [SpeedwayQualsList, RuralQualsList, UrbanQualsList]
AllWinsList = [SpeedwayWinsList, RuralWinsList, UrbanWinsList]
TrophiesPerCup = NumTrophies / NumCups
QualifiedRaces = [1, 10, 100]
TotalQualifiedRaces = 100
WonRaces = [1, 10, 100]
TotalWonRaces = 100
WonCircuitRaces = [1, 5, 25]
SweptCircuitRaces = [1, 5, 25]
QualifiedCircuitRaces = [1, 5, 25]
LBSubscription = {'stadium': [(RT_Speedway_1, Daily),
(RT_Speedway_1, Weekly),
(RT_Speedway_1, AllTime),
(RT_Speedway_1_rev, Daily),
(RT_Speedway_1_rev, Weekly),
(RT_Speedway_1_rev, AllTime),
(RT_Speedway_2, Daily),
(RT_Speedway_2, Weekly),
(RT_Speedway_2, AllTime),
(RT_Speedway_2_rev, Daily),
(RT_Speedway_2_rev, Weekly),
(RT_Speedway_2_rev, AllTime)],
'country': [(RT_Rural_1, Daily),
(RT_Rural_1, Weekly),
(RT_Rural_1, AllTime),
(RT_Rural_1_rev, Daily),
(RT_Rural_1_rev, Weekly),
(RT_Rural_1_rev, AllTime),
(RT_Rural_2, Daily),
(RT_Rural_2, Weekly),
(RT_Rural_2, AllTime),
(RT_Rural_2_rev, Daily),
(RT_Rural_2_rev, Weekly),
(RT_Rural_2_rev, AllTime)],
'city': [(RT_Urban_1, Daily),
(RT_Urban_1, Weekly),
(RT_Urban_1, AllTime),
(RT_Urban_1_rev, Daily),
(RT_Urban_1_rev, Weekly),
(RT_Urban_1_rev, AllTime),
(RT_Urban_2, Daily),
(RT_Urban_2, Weekly),
(RT_Urban_2, AllTime),
(RT_Urban_2_rev, Daily),
(RT_Urban_2_rev, Weekly),
(RT_Urban_2_rev, AllTime)]}
BANANA = 1
TURBO = 2
ANVIL = 3
PIE = 4
GagFreq = [[PIE,
BANANA,
BANANA,
BANANA,
TURBO,
PIE],
[PIE,
BANANA,
BANANA,
TURBO,
ANVIL,
PIE],
[PIE,
BANANA,
TURBO,
TURBO,
ANVIL,
PIE],
[BANANA,
TURBO,
TURBO,
TURBO,
ANVIL,
PIE]]
AnvilSquishDuration = 3
CircuitLoops = [[RT_Speedway_1, RT_Rural_1, RT_Urban_1],
[RT_Speedway_1_rev, RT_Rural_1_rev, RT_Urban_1_rev],
[RT_Speedway_2, RT_Rural_2, RT_Urban_2],
[RT_Speedway_2_rev, RT_Rural_2_rev, RT_Urban_2_rev]]
CircuitPoints = [10,
8,
6,
4]
def getCircuitLoop(startingTrack):
circuitLoop = [startingTrack]
for loop in CircuitLoops:
if startingTrack in loop:
print loop
numTracks = len(loop)
tempLoop = loop * 2
startingIndex = tempLoop.index(startingTrack)
circuitLoop = tempLoop[startingIndex:startingIndex + numTracks]
break
return circuitLoop
Exit_UserReq = 0
Exit_Barrier = 1
Exit_Slow = 2
Exit_BarrierNoRefund = 3
| 29.140288 | 163 | 0.575917 |
8d8846a821a417a67cb9d7cef3e3eae3d756a2d6 | 3,465 | py | Python | examples/pip_parse/pip_parse_test.py | axivion/rules_python | 7740b22d0bae942af0797967f2617daa19834cb3 | [
"Apache-2.0"
] | null | null | null | examples/pip_parse/pip_parse_test.py | axivion/rules_python | 7740b22d0bae942af0797967f2617daa19834cb3 | [
"Apache-2.0"
] | null | null | null | examples/pip_parse/pip_parse_test.py | axivion/rules_python | 7740b22d0bae942af0797967f2617daa19834cb3 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import os
import subprocess
import unittest
from pathlib import Path
class PipInstallTest(unittest.TestCase):
maxDiff = None
def test_entry_point_void_return(self):
env = os.environ.get("YAMLLINT_ENTRY_POINT")
self.assertIsNotNone(env)
entry_point = Path(env)
self.assertTrue(entry_point.exists())
proc = subprocess.run(
[entry_point, "--version"],
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
self.assertEqual(proc.stdout.decode("utf-8").strip(), "yamllint 1.26.3")
# yamllint entry_point is of the form `def run(argv=None):`
with self.assertRaises(subprocess.CalledProcessError) as context:
subprocess.run(
[entry_point, "--option-does-not-exist"],
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
self.assertIn("returned non-zero exit status 2", str(context.exception))
def test_entry_point_int_return(self):
env = os.environ.get("SPHINX_BUILD_ENTRY_POINT")
self.assertIsNotNone(env)
entry_point = Path(env)
self.assertTrue(entry_point.exists())
proc = subprocess.run(
[entry_point, "--version"],
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# sphinx-build uses args[0] for its name, only assert the version here
self.assertTrue(proc.stdout.decode("utf-8").strip().endswith("4.2.0"))
# sphinx-build entry_point is of the form `def main(argv: List[str] = sys.argv[1:]) -> int:`
with self.assertRaises(subprocess.CalledProcessError) as context:
subprocess.run(
[entry_point, "--option-does-not-exist"],
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
self.assertIn("returned non-zero exit status 2", str(context.exception))
def test_data(self):
env = os.environ.get("WHEEL_DATA_CONTENTS")
self.assertIsNotNone(env)
self.assertListEqual(
env.split(" "),
[
"external/pypi_s3cmd/s3cmd-2.1.0.data/data/share/doc/packages/s3cmd/INSTALL.md",
"external/pypi_s3cmd/s3cmd-2.1.0.data/data/share/doc/packages/s3cmd/LICENSE",
"external/pypi_s3cmd/s3cmd-2.1.0.data/data/share/doc/packages/s3cmd/NEWS",
"external/pypi_s3cmd/s3cmd-2.1.0.data/data/share/doc/packages/s3cmd/README.md",
"external/pypi_s3cmd/s3cmd-2.1.0.data/data/share/man/man1/s3cmd.1",
"external/pypi_s3cmd/s3cmd-2.1.0.data/scripts/s3cmd",
],
)
def test_dist_info(self):
env = os.environ.get("WHEEL_DIST_INFO_CONTENTS")
self.assertIsNotNone(env)
self.assertListEqual(
env.split(" "),
[
"external/pypi_requests/requests-2.25.1.dist-info/LICENSE",
"external/pypi_requests/requests-2.25.1.dist-info/METADATA",
"external/pypi_requests/requests-2.25.1.dist-info/RECORD",
"external/pypi_requests/requests-2.25.1.dist-info/WHEEL",
"external/pypi_requests/requests-2.25.1.dist-info/top_level.txt",
],
)
if __name__ == "__main__":
unittest.main()
| 36.473684 | 100 | 0.597114 |
a3e4f7f23afd0b9584b9203fd26c43e4ba7acef1 | 997 | py | Python | azure-mgmt-network/azure/mgmt/network/v2017_10_01/models/endpoint_service_result_paged.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 4 | 2016-06-17T23:25:29.000Z | 2022-03-30T22:37:45.000Z | azure/mgmt/network/v2017_10_01/models/endpoint_service_result_paged.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 54 | 2016-03-25T17:25:01.000Z | 2018-10-22T17:27:54.000Z | azure/mgmt/network/v2017_10_01/models/endpoint_service_result_paged.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 3 | 2016-05-03T20:49:46.000Z | 2017-10-05T21:05:27.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class EndpointServiceResultPaged(Paged):
"""
A paging container for iterating over a list of :class:`EndpointServiceResult <azure.mgmt.network.v2017_10_01.models.EndpointServiceResult>` object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[EndpointServiceResult]'}
}
def __init__(self, *args, **kwargs):
super(EndpointServiceResultPaged, self).__init__(*args, **kwargs)
| 35.607143 | 151 | 0.599799 |
6f7fa28028ba37c131152170dae52e91aa2ae713 | 333 | py | Python | lab/downscaling/worker/DummyWorker.py | voschezang/distributed-systems | 6132dc33414d942378cd2b835408701c31075c91 | [
"MIT"
] | null | null | null | lab/downscaling/worker/DummyWorker.py | voschezang/distributed-systems | 6132dc33414d942378cd2b835408701c31075c91 | [
"MIT"
] | null | null | null | lab/downscaling/worker/DummyWorker.py | voschezang/distributed-systems | 6132dc33414d942378cd2b835408701c31075c91 | [
"MIT"
] | 1 | 2020-02-16T15:16:45.000Z | 2020-02-16T15:16:45.000Z | from lab.util import message
import time
from lab.master.WorkerInterface import WorkerInterface
class DummyWorker(WorkerInterface):
def run(self):
"""
Runs the worker
"""
while True:
time.sleep(1)
self.send_debug_message("Debugging successful!")
| 22.2 | 64 | 0.591592 |
544b53adcafa05f0da50d090375e2dd015db5561 | 13,441 | py | Python | studio/parsers/loader.py | ObaraEmmanuel/Formation | 31244cbceb1bb405007f5f051ae2102ab021e779 | [
"MIT"
] | 71 | 2020-06-19T11:17:35.000Z | 2022-03-24T02:51:54.000Z | studio/parsers/loader.py | ObaraEmmanuel/Formation | 31244cbceb1bb405007f5f051ae2102ab021e779 | [
"MIT"
] | 11 | 2020-06-20T19:04:02.000Z | 2022-03-01T04:42:36.000Z | studio/parsers/loader.py | ObaraEmmanuel/Formation | 31244cbceb1bb405007f5f051ae2102ab021e779 | [
"MIT"
] | 6 | 2020-11-15T01:38:53.000Z | 2021-12-25T07:14:23.000Z | """
Conversions of design to xml and back
"""
# ======================================================================= #
# Copyright (c) 2020 Hoverset Group. #
# ======================================================================= #
import tkinter as tk
from formation.formats import infer_format, BaseAdapter, Node
from studio.feature.variablepane import VariablePane
from studio.lib.variables import VariableItem
from studio.lib import legacy, native
from studio.lib.menu import menu_config, MENU_ITEM_TYPES
from studio.lib.pseudo import Container, PseudoWidget
from studio.lib.events import make_event
from studio.lib.layouts import GridLayoutStrategy
from studio.preferences import Preferences
pref = Preferences.acquire()
def get_widget_impl(widget):
if not hasattr(widget, 'impl'):
return widget.__class__.__module__ + "." + widget.__class__.__name__
return widget.impl.__module__ + "." + widget.impl.__name__
class BaseStudioAdapter(BaseAdapter):
_designer_alternates = {
'tkinter': legacy,
'tkinter.ttk': native,
'Tkinter': legacy,
'ttk': native
}
@classmethod
def _get_class(cls, node):
module, impl = node.get_mod_impl()
if module in cls._designer_alternates:
module = cls._designer_alternates.get(module)
else:
raise ModuleNotFoundError("module {} not implemented by designer".format(module))
if hasattr(module, impl):
return getattr(module, impl)
elif impl == 'Panedwindow' and module == native:
orient = node.attrib.get("attr", {}).get("orient")
if orient == tk.HORIZONTAL:
return native.HorizontalPanedWindow
else:
return native.VerticalPanedWindow
raise NotImplementedError("class {} does not have a designer implementation variant in {}".format(impl, module))
@classmethod
def generate(cls, widget: PseudoWidget, parent=None):
attr = widget.get_altered_options()
node = Node(parent, get_widget_impl(widget))
node.attrib['name'] = widget.id
node["attr"] = attr
layout_options = widget.layout.get_altered_options_for(widget)
node["layout"] = layout_options
if hasattr(widget, "_event_map_"):
for binding in widget._event_map_.values():
bind_dict = binding._asdict()
# id is not needed and will be recreated on loading
bind_dict.pop("id")
# convert field values to string
bind_dict = {k: str(bind_dict[k]) for k in bind_dict}
event_node = Node(node, "event")
event_node.attrib.update(bind_dict)
if isinstance(widget, Container) and widget.layout_strategy.__class__ == GridLayoutStrategy:
layout = widget.layout_strategy
if hasattr(widget, "_row_conf"):
for row in widget._row_conf:
r_info = layout.get_row_def(None, row)
modified = {i: str(r_info[i]["value"]) for i in r_info if r_info[i]["value"] != r_info[i]["default"]}
row_node = Node(node, "grid")
row_node.attrib["row"] = str(row)
row_node.attrib.update(modified)
if hasattr(widget, "_column_conf"):
for column in widget._column_conf:
c_info = layout.get_column_def(None, column)
modified = {i: str(c_info[i]["value"]) for i in c_info if c_info[i]["value"] != c_info[i]["default"]}
column_node = Node(node, "grid")
column_node.attrib["column"] = str(column)
column_node.attrib.update(modified)
return node
@classmethod
def load(cls, node, designer, parent, bounds=None):
obj_class = cls._get_class(node)
attrib = node.attrib
styles = attrib.get("attr", {})
if obj_class in (native.VerticalPanedWindow, native.HorizontalPanedWindow):
# use copy to maintain integrity of XMLForm on pop
styles = dict(styles)
if 'orient' in styles:
styles.pop('orient')
layout = attrib.get("layout", {})
obj = designer.load(obj_class, attrib["name"], parent, styles, layout, bounds)
for sub_node in node:
if sub_node.type == "event":
binding = make_event(**sub_node.attrib)
if not hasattr(obj, "_event_map_"):
obj._event_map_ = {}
obj._event_map_[binding.id] = binding
elif sub_node.type == "grid":
# we may pop stuff so use a copy
sub_attrib = dict(sub_node.attrib)
if sub_attrib.get("column"):
column = sub_attrib.pop("column")
obj.columnconfigure(column, sub_attrib)
if not hasattr(obj, "_column_conf"):
obj._column_conf = set()
obj._column_conf.add(int(column))
elif sub_attrib.get("row"):
row = sub_attrib.pop("row")
obj.rowconfigure(row, sub_attrib)
if not hasattr(obj, "_row_conf"):
obj._row_conf = set()
obj._row_conf.add(int(row))
return obj
@staticmethod
def get_altered_options(widget):
keys = widget.configure()
# items with a length of two or less are just alias definitions such as 'bd' and 'borderwidth' so we ignore them
# compare the last and 2nd last item to see whether options have been altered
return {key: keys[key][-1] for key in keys if keys[key][-1] != keys[key][-2] and len(keys[key]) > 2}
class MenuStudioAdapter(BaseStudioAdapter):
_types = [tk.COMMAND, tk.CHECKBUTTON, tk.RADIOBUTTON, tk.SEPARATOR, tk.CASCADE]
@staticmethod
def get_item_options(menu, index):
keys = menu_config(menu, index)
if 'menu' in keys:
keys.pop('menu')
return {key: keys[key][-1] for key in keys if keys[key][-1] != keys[key][-2]}
@classmethod
def generate(cls, widget: PseudoWidget, parent=None):
node = BaseStudioAdapter.generate(widget, parent)
node.remove_attrib('menu', 'attr')
if widget.configure().get('menu')[-1]:
menu = widget.nametowidget(widget['menu'])
cls._menu_to_xml(node, menu)
return node
@classmethod
def load(cls, node, designer, parent, bounds=None):
widget = BaseStudioAdapter.load(node, designer, parent, bounds)
cls._menu_from_xml(node, None, widget)
return widget
@classmethod
def _menu_from_xml(cls, node, menu=None, widget=None):
for sub_node in node:
if sub_node.type == "event":
continue
attrib = sub_node.attrib
if sub_node.type in MenuStudioAdapter._types and menu is not None:
menu.add(sub_node.type)
menu_config(menu, menu.index(tk.END), **attrib.get("menu", {}))
continue
obj_class = cls._get_class(sub_node)
if obj_class == legacy.Menu:
menu_obj = obj_class(widget, **attrib.get("attr", {}))
if widget:
widget.configure(menu=menu_obj)
elif menu:
menu.add(tk.CASCADE, menu=menu_obj)
menu_config(menu, menu.index(tk.END), **attrib.get("menu", {}))
cls._menu_from_xml(sub_node, menu_obj)
@classmethod
def _menu_to_xml(cls, node, menu: legacy.Menu, **item_opt):
if not menu:
return
size = menu.index(tk.END)
if size is None:
# menu is empty
size = -1
menu_node = Node(node, get_widget_impl(menu))
menu_node["attr"] = cls.get_altered_options(menu)
menu_node["menu"] = item_opt
for i in range(size + 1):
if menu.type(i) == tk.CASCADE:
cls._menu_to_xml(menu_node,
menu.nametowidget(menu.entrycget(i, 'menu')), **cls.get_item_options(menu, i))
elif menu.type(i) != 'tearoff':
sub_node = Node(menu_node, menu.type(i))
sub_node["menu"] = cls.get_item_options(menu, i)
return menu_node
class VariableStudioAdapter(BaseStudioAdapter):
@classmethod
def generate(cls, variable: VariableItem, parent=None):
node = Node(
parent,
get_widget_impl(variable.var),
{"attr": {'name': variable.name, 'value': variable.value}}
)
return node
@classmethod
def load(cls, node, *_):
# we only need the node argument; ignore the rest
var_manager: VariablePane = VariablePane.get_instance()
attributes = node.attrib.get("attr", {})
var_manager.add_var(VariableItem.supported_types.get(node.type, tk.StringVar), **attributes)
class DesignBuilder:
_adapter_map = {
legacy.Menubutton: MenuStudioAdapter,
native.Menubutton: MenuStudioAdapter,
}
_ignore_tags = (
*MENU_ITEM_TYPES,
"event",
"grid"
)
def __init__(self, designer):
self.designer = designer
self.root = None
@classmethod
def add_adapter(cls, adapter, *obj_classes):
"""
Connect an external adapter for a specific set of object types to the builder.
"""
for obj_class in obj_classes:
cls._adapter_map[obj_class] = adapter
def generate(self):
"""
Convert the current contents of the designer to xml. Note that only
the root widget and its child widgets are converted to xml
:return:
"""
self.root = self.to_tree(self.designer.root_obj)
self._variables_to_tree(self.root)
def get_adapter(self, widget_class):
return self._adapter_map.get(widget_class, BaseStudioAdapter)
def load(self, path, designer):
self.root = infer_format(path)(path=path).load()
self._load_variables(self.root)
return self._load_widgets(self.root, designer, designer)
def _load_variables(self, node):
for sub_node in node:
if sub_node.is_var():
VariableStudioAdapter.load(sub_node)
def load_section(self, node, parent, bounds=None):
"""
Load lxml node as a widget/group of widgets in the designer under a specific container
:param parent: Container widget to contain new widget group/section
:param node: lxml node to be loaded as a widget/group
:param bounds: tuple of 4 elements describing the intended location of
the new loaded widget. If left as None, node layout attributes will
be used instead
:return:
"""
return self._load_widgets(node, self.designer, parent, bounds)
def _load_widgets(self, node, designer, parent, bounds=None):
line_info = node.get_source_line_info()
try:
adapter = self.get_adapter(BaseStudioAdapter._get_class(node))
widget = adapter.load(node, designer, parent, bounds)
except Exception as e:
# Append line number causing error before re-raising for easier debugging by user
raise e.__class__("{}{}".format(line_info, e)) from e
if not isinstance(widget, Container):
# We dont need to load child tags of non-container widgets
return widget
for sub_node in node:
if sub_node.is_var() or sub_node.type in self._ignore_tags:
# ignore variables and non widget nodes
continue
self._load_widgets(sub_node, designer, widget)
return widget
def to_tree(self, widget, parent=None):
"""
Convert a PseudoWidget widget and its children to a node
:param widget: widget to be converted to an xml node
:param parent: The intended xml node to act as parent to the created
xml node
:return: the widget converted to a :class:Node instance.
"""
adapter = self.get_adapter(widget.__class__)
node = adapter.generate(widget, parent)
if isinstance(widget, Container):
for child in widget._children:
self.to_tree(child, node)
return node
def _variables_to_tree(self, parent):
variables = VariablePane.get_instance().variables
for var_item in variables:
VariableStudioAdapter.generate(var_item, parent)
def write(self, path):
"""
Writes contents of the designer to a file specified by path
:param path: Path to file to be written to
:return: String
"""
file_loader = infer_format(path)
pref_path = f"designer::{file_loader.name.lower()}"
pref.set_default(pref_path, {})
with open(path, 'w') as dump:
# generate an upto-date tree first
self.generate()
dump.write(file_loader(node=self.root).generate(**pref.get(pref_path)))
def __eq__(self, other):
if isinstance(other, DesignBuilder):
return self.root == other.root
return False
def __ne__(self, other):
return not (self == other)
| 39.648968 | 121 | 0.59445 |
eb3fc21c64b442801446899451b15a879392bc1c | 2,313 | py | Python | resources/produto.py | rykehg/produtosPyFlaskJWTTests | a3728b353a6855e3b3bc6e5c5171549f1181e140 | [
"MIT"
] | null | null | null | resources/produto.py | rykehg/produtosPyFlaskJWTTests | a3728b353a6855e3b3bc6e5c5171549f1181e140 | [
"MIT"
] | null | null | null | resources/produto.py | rykehg/produtosPyFlaskJWTTests | a3728b353a6855e3b3bc6e5c5171549f1181e140 | [
"MIT"
] | null | null | null | from flask_jwt_extended import jwt_required
from flask_restful import Resource, reqparse
from models.produtos import ProdutoModel
class Produtos(Resource):
@jwt_required
def get(self):
# SELECT * FROM produtos
produtos = ProdutoModel.query.all()
return {'Produtos': [produto.json() for produto in
produtos]}
class Produto(Resource):
attributes = reqparse.RequestParser()
attributes.add_argument('nome', type=str, required=True,
help="The field 'nome' cannot be left blank.")
attributes.add_argument('quantidade')
attributes.add_argument('valor', type=str, required=True,
help="The field 'valor' cannot be left blank.")
attributes.add_argument('descricao')
@jwt_required
def get(self, produto_id):
produto = ProdutoModel.find_produto(produto_id)
if produto:
return produto.json()
return {'message': 'Produto not found.'}, 404
@jwt_required
def post(self, produto_id):
if ProdutoModel.find_produto(produto_id):
return {"message": "Produto id '{}' already exists."
.format(produto_id)}, 400 # Bad Request
data = Produto.attributes.parse_args()
if not (data['nome'] and data['valor']):
return {'message': 'Request is missing required fields.'}, 400
produto = ProdutoModel(produto_id, **data)
produto.save_produto()
return produto.json(), 201
@jwt_required
def put(self, produto_id):
data = Produto.attributes.parse_args()
if not (data['nome'] and data['valor']):
return {'message': 'Request is missing required fields.'}, 400
produto_encontrado = ProdutoModel.find_produto(produto_id)
if produto_encontrado:
produto_encontrado.update_produto(**data)
produto_encontrado.save_produto()
return produto_encontrado.json(), 200
return {'message': 'Produto not found.'}, 404
@jwt_required
def delete(self, produto_id):
produto = ProdutoModel.find_produto(produto_id)
if produto:
produto.delete_produto()
return {'message': 'Produto deleted.'}
return {'message': 'Produto not found.'}, 404
| 34.014706 | 75 | 0.629486 |
fbb2f9252b5e68fc10e2a58b88491f23ef743285 | 2,373 | bzl | Python | test/com/facebook/buck/features/dotnet/testdata/csharp_udr/build_rules/csharp_binary.bzl | jasonnam/buck | 1ddbbf986312b30413aa36cac337267536a11f04 | [
"Apache-2.0"
] | null | null | null | test/com/facebook/buck/features/dotnet/testdata/csharp_udr/build_rules/csharp_binary.bzl | jasonnam/buck | 1ddbbf986312b30413aa36cac337267536a11f04 | [
"Apache-2.0"
] | null | null | null | test/com/facebook/buck/features/dotnet/testdata/csharp_udr/build_rules/csharp_binary.bzl | jasonnam/buck | 1ddbbf986312b30413aa36cac337267536a11f04 | [
"Apache-2.0"
] | null | null | null | load("//build_rules:csharp_binary.bzl", "csharp_compile")
"""
Implementation of a simple C# binary that can use built in csharp_library() and
prebuilt_dotnet_library() rules.
Note that by default the stdlib is not linked in.
Add 'mscorlib.dll' to `system_assemblies` to get this behavior.
"""
def _csharp_binary_impl(ctx):
copied_artifacts, output_exe = csharp_compile(
ctx,
ctx.attr._toolchain[DotnetLegacyToolchainInfo],
ctx.attr.deps,
ctx.attr.system_assemblies,
ctx.attr.srcs,
ctx.attr.optimize,
"exe",
out_name,
ctx.attr.main,
)
return [
DefaultInfo(named_outputs = {"dlls": copied_artifacts}, default_outputs = [output_exe]),
RunInfo(env = {}, args = [output_exe]),
]
csharp_binary = rule(
implementation = _csharp_binary_impl,
attrs = {
"deps": attr.dep_list(
doc = (
"A list of C# dependencies. These will be copied to the destination " +
"directory and linked by csc"
),
providers = [DotnetLibraryProviderInfo],
),
"main": attr.string(doc = (
"The main entry point to the program. This should be the fully " +
"qualified class name that contains Main. If not provided, C# will " +
"attempt to divine this automatically. " +
"See https://docs.microsoft.com/en-us/dotnet/csharp/language-reference/compiler-options/main-compiler-option"
)),
"optimize": attr.bool(doc = "Whether to pass the -optimize flag to csc"),
"out": attr.string(doc = (
"The name of the output file. If not provided, `name` will be used " +
"and suffixed with .exe",
)),
"srcs": attr.source_list(
doc = "List of sources that should be compiled in the resulting .exe",
allow_empty = False,
),
"system_assemblies": attr.string_list(doc = (
"A list of system assemblies that are required and should be linked. " +
"e.g. \"mscorlib.dll\""
)),
"_toolchain": attr.dep(
default = "//toolchains:dotnet",
providers = [DotnetLegacyToolchainInfo],
docs = "The legacy_toolchain() that points to the built in dotnet toolchain",
),
},
executable = True,
)
| 35.954545 | 121 | 0.596713 |
0180ae07d1af93e6ea0588f0e7de8e525b4c6bb6 | 1,105 | py | Python | tests/test_todos.py | ejmoyer/flask-todo | 52e749a9bc93852fd1d634ca300d9213b3394f14 | [
"MIT"
] | null | null | null | tests/test_todos.py | ejmoyer/flask-todo | 52e749a9bc93852fd1d634ca300d9213b3394f14 | [
"MIT"
] | null | null | null | tests/test_todos.py | ejmoyer/flask-todo | 52e749a9bc93852fd1d634ca300d9213b3394f14 | [
"MIT"
] | null | null | null | import pytest
def test_todo_list(client):
# View the home page and check to see the header and a to-do item
response = client.get('/')
assert b'clean room' in response.data
# Mock data should show three to-do items, one of which is complete
assert response.data.count(b'<li class="">') == 2
assert response.data.count(b'<li class="completed">') == 1
def test_delete(client):
response = client.post('/deletetask', data={'task_to_delete': 'do homework'})
assert response.data.count(b'<li class="completed">') == 0
def test_mark_complete(client):
response = client.post('/done', data={'done': 'do homework'})
assert response
def test_edit_task(client):
exist_response = client.get('/') # Check if Edit link exists in Index
assert b'Edit</a>' in exist_response.data
page_response = client.get('/1/edittask') # Check if the Edit Page exists
assert page_response
change_response = client.post('/1/edittask', data={'newdesc': 'test change'}) # Test editting a todo
assert change_response
assert b'test change' in client.get('/').data
| 34.53125 | 104 | 0.686878 |
c0b27f671fa14dbf1675ccff6887c7cfad8383b2 | 1,227 | py | Python | renku/core/management/migrations/m_0008__dataset_metadata.py | TaoSunVoyage/renku-python | 858fe84ce2925a49d9b62638dc601f581e24353e | [
"Apache-2.0"
] | null | null | null | renku/core/management/migrations/m_0008__dataset_metadata.py | TaoSunVoyage/renku-python | 858fe84ce2925a49d9b62638dc601f581e24353e | [
"Apache-2.0"
] | null | null | null | renku/core/management/migrations/m_0008__dataset_metadata.py | TaoSunVoyage/renku-python | 858fe84ce2925a49d9b62638dc601f581e24353e | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright 2020 - Swiss Data Science Center (SDSC)
# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
# Eidgenössische Technische Hochschule Zürich (ETHZ).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dataset metadata migrations."""
from renku.core.management.migrations.models.v8 import get_client_datasets
def migrate(client):
"""Migration function."""
_fix_dataset_metadata(client)
def _fix_dataset_metadata(client):
for dataset in get_client_datasets(client):
dataset.files = _get_unique_files(dataset.files)
dataset.to_yaml()
def _get_unique_files(files):
mapping = {f.path: f for f in files}
return list(mapping.values())
| 33.162162 | 75 | 0.746536 |
4d43877e0e1f9b3f5fcb2b6c9d1adb47ff9a5eae | 29,158 | py | Python | specutils/fitting/fitmodels.py | hamogu/specutils | b873f2ac9b3c207c9e670246d102f46a9606d6ed | [
"BSD-3-Clause"
] | null | null | null | specutils/fitting/fitmodels.py | hamogu/specutils | b873f2ac9b3c207c9e670246d102f46a9606d6ed | [
"BSD-3-Clause"
] | null | null | null | specutils/fitting/fitmodels.py | hamogu/specutils | b873f2ac9b3c207c9e670246d102f46a9606d6ed | [
"BSD-3-Clause"
] | null | null | null | import operator
import itertools
import numpy as np
from scipy.signal import convolve
import astropy.units as u
from astropy.stats import sigma_clipped_stats
from ..manipulation.utils import excise_regions
from ..analysis import fwhm, centroid
from ..utils import QuantityModel
from ..manipulation import extract_region, noise_region_uncertainty
from ..spectra.spectral_region import SpectralRegion
from ..spectra.spectrum1d import Spectrum1D
from astropy.modeling import fitting, Model, models
from astropy.table import QTable
__all__ = ['find_lines_threshold', 'find_lines_derivative', 'fit_lines',
'estimate_line_parameters']
# Define the initial estimators
# This is the default methods to use to estimate astropy model
# parameters. This is based on only a small subset of the astropy
# models but it was determined that this is a decent start as most
# fitting will probably use one of these.
#
# Each method list must take a Spectrum1D object and should return
# a Quantity.
_parameter_estimators = {
'Gaussian1D': {
'amplitude': lambda s: max(s.flux),
'mean': lambda s: centroid(s, region=None),
'stddev': lambda s: fwhm(s)
},
'Lorentz1D': {
'amplitude': lambda s: max(s.flux),
'x_0': lambda s: centroid(s, region=None),
'fwhm': lambda s: fwhm(s)
},
'Voigt1D': {
'x_0': lambda s: centroid(s, region=None),
'amplitude_L': lambda s: max(s.flux),
'fwhm_L': lambda s: fwhm(s) / np.sqrt(2),
'fwhm_G': lambda s: fwhm(s) / np.sqrt(2)
}
}
def _set_parameter_estimators(model):
"""
Helper method used in method below.
"""
if model.__class__.__name__ in _parameter_estimators:
model._constraints['parameter_estimator'] = _parameter_estimators[model.__class__.__name__]
return model
def estimate_line_parameters(spectrum, model):
"""
The input ``model`` parameters will be estimated from the input ``spectrum``. The
``model`` can be specified with default parameters, for example ``Gaussian1D()``.
Parameters
----------
spectrum : `~specutils.Spectrum1D`
The spectrum object from which we will estimate the model parameters.
model : `~astropy.modeling.Model`
Model for which we want to estimate parameters from the spectrum.
Returns
-------
model : `~astropy.modeling.Model`
Model with parameters estimated.
"""
if not 'parameter_estimator' in model._constraints:
model = _set_parameter_estimators(model)
# Estimate the parameters based on the estimators already
# attached to the model
if 'parameter_estimator' in model._constraints:
for param, estimator in model._constraints['parameter_estimator'].items():
setattr(model, param, estimator(spectrum))
# No estimators.
else:
raise Exception('No method to estimate parameters')
return model
def _consecutive(data, stepsize=1):
return np.split(data, np.where(np.diff(data) != stepsize)[0]+1)
def find_lines_threshold(spectrum, noise_factor=1):
"""
Find the emission and absorption lines in a spectrum. The method
here is based on deviations larger than the spectrum's uncertainty by the
``noise_factor``.
This method only works with continuum-subtracted spectra and the uncertainty
must be defined on the spectrum. To add the uncertainty, one could use
`~specutils.manipulation.noise_region_uncertainty` to add the uncertainty.
Parameters
----------
spectrum : `~specutils.Spectrum1D`
The spectrum object in which the lines will be found.
noise_factor : float
``noise_factor`` multiplied by the spectrum's``uncertainty``, used for
thresholding.
Returns
-------
qtable: `~astropy.table.QTable`
Table of emission and absorption lines. Line center (``line_center``),
line type (``line_type``) and index of line center (``line_center_index``)
are stored for each line.
"""
# Threshold based on noise estimate and factor.
uncertainty = spectrum.uncertainty
inds = np.where(np.abs(spectrum.flux) > (noise_factor*uncertainty.array)*spectrum.flux.unit)[0]
pos_inds = inds[spectrum.flux.value[inds] > 0]
line_inds_grouped = _consecutive(pos_inds, stepsize=1)
if len(line_inds_grouped[0]) > 0:
emission_inds = [inds[np.argmax(spectrum.flux.value[inds])] for inds in line_inds_grouped]
else:
emission_inds = []
#
# Find the absorption lines
#
neg_inds = inds[spectrum.flux.value[inds] < 0]
line_inds_grouped = _consecutive(neg_inds, stepsize=1)
if len(line_inds_grouped[0]) > 0:
absorption_inds = [inds[np.argmin(spectrum.flux.value[inds])] for inds in line_inds_grouped]
else:
absorption_inds = []
#
# Create the QTable to return the lines
#
qtable = QTable()
qtable['line_center'] = list(itertools.chain(*[spectrum.spectral_axis.value[emission_inds],
spectrum.spectral_axis.value[absorption_inds]]))*spectrum.spectral_axis.unit
qtable['line_type'] = ['emission']*len(emission_inds) + ['absorption']*len(absorption_inds)
qtable['line_center_index'] = list(itertools.chain(*[emission_inds, absorption_inds]))
return qtable
def find_lines_derivative(spectrum, flux_threshold=None):
"""
Find the emission and absorption lines in a spectrum. The method
here is based on finding the zero crossings in the derivative
of the spectrum.
Parameters
----------
spectrum : Spectrum1D
The spectrum object over which the equivalent width will be calculated.
flux_threshold : float, `~astropy.units.Quantity` or None
The threshold a pixel must be above to be considered part of a line. If
a float, will assume the same units as ``spectrum.flux``. This threshold
is above and beyond the derivative searching step. Default is None so no
thresholding. The threshold is positive for emission lines and negative
for absorption lines.
Returns
-------
qtable: `~astropy.table.QTable`
Table of emission and absorption lines. Line center (``line_center``),
line type (``line_type``) and index of line center (``line_center_index``)
are stored for each line.
"""
# Take the derivative to find the zero crossings which correspond to
# the peaks (positive or negative)
kernel = [1, 0, -1]
dY = convolve(spectrum.flux, kernel, 'valid')
# Use sign flipping to determine direction of change
S = np.sign(dY)
ddS = convolve(S, kernel, 'valid')
# Add units if needed.
if flux_threshold is not None and isinstance(flux_threshold, (int, float)):
flux_threshold = float(flux_threshold) * spectrum.flux.unit
#
# Emmision lines
#
# Find all the indices that appear to be part of a +ve peak
candidates = np.where(dY > 0)[0] + (len(kernel) - 1)
line_inds = sorted(set(candidates).intersection(np.where(ddS == -2)[0] + 1))
if flux_threshold is not None:
line_inds = np.array(line_inds)[spectrum.flux[line_inds] > flux_threshold]
# Now group them and find the max highest point.
line_inds_grouped = _consecutive(line_inds, stepsize=1)
if len(line_inds_grouped[0]) > 0:
emission_inds = [inds[np.argmax(spectrum.flux[inds])] for inds in line_inds_grouped]
else:
emission_inds = []
#
# Absorption lines
#
# Find all the indices that appear to be part of a -ve peak
candidates = np.where(dY < 0)[0] + (len(kernel) - 1)
line_inds = sorted(set(candidates).intersection(np.where(ddS == 2)[0] + 1))
if flux_threshold is not None:
line_inds = np.array(line_inds)[spectrum.flux[line_inds] < -flux_threshold]
# Now group them and find the max highest point.
line_inds_grouped = _consecutive(line_inds, stepsize=1)
if len(line_inds_grouped[0]) > 0:
absorption_inds = [inds[np.argmin(spectrum.flux[inds])] for inds in line_inds_grouped]
else:
absorption_inds = []
#
# Create the QTable to return the lines
#
qtable = QTable()
qtable['line_center'] = list(itertools.chain(*[spectrum.spectral_axis.value[emission_inds],
spectrum.spectral_axis.value[absorption_inds]]))*spectrum.spectral_axis.unit
qtable['line_type'] = ['emission']*len(emission_inds) + ['absorption']*len(absorption_inds)
qtable['line_center_index'] = list(itertools.chain(*[emission_inds, absorption_inds]))
return qtable
def fit_lines(spectrum, model, fitter=fitting.LevMarLSQFitter(),
exclude_regions=None, weights=None, window=None,
**kwargs):
"""
Fit the input models to the spectrum. The parameter values of the
input models will be used as the initial conditions for the fit.
Parameters
----------
spectrum : Spectrum1D
The spectrum object over which the equivalent width will be calculated.
model: `~astropy.modeling.Model` or list of `~astropy.modeling.Model`
The model or list of models that contain the initial guess.
exclude_regions : list of `~specutils.SpectralRegion`
List of regions to exclude in the fitting.
weights : list (NOT IMPLEMENTED YET)
List of weights to define importance of fitting regions.
window : `~specutils.SpectralRegion` or list of `~specutils.SpectralRegion`
Regions of the spectrum to use in the fitting. If None, then the
whole spectrum will be used in the fitting.
Additional keyword arguments are passed directly into the call to the
``fitter``.
Returns
-------
models : Compound model of `~astropy.modeling.Model`
A compound model of models with fitted parameters.
Notes
-----
* Could add functionality to set the bounds in
``model`` if they are not set.
* The models in the list of ``model`` are added
together and passed as a compound model to the
`~astropy.modeling.fitting.Fitter` class instance.
"""
#
# If we are to exclude certain regions, then remove them.
#
if exclude_regions is not None:
spectrum = excise_regions(spectrum, exclude_regions)
#
# Make the model a list if not already
#
single_model_in = not isinstance(model, list)
if single_model_in:
model = [model]
#
# If a single model is passed in then just do that.
#
fitted_models = []
for modeli, model_guess in enumerate(model):
#
# Determine the window if it is not None. There
# are several options here:
# window = 4 * u.Angstrom -> Quantity
# window = (4*u.Angstrom, 6*u.Angstrom) -> tuple
# window = (4, 6)*u.Angstrom -> Quantity
#
#
# Determine the window if there is one
#
if window is not None and isinstance(window, list):
model_window = window[modeli]
elif window is not None:
model_window = window
else:
model_window = None
#
# Check to see if the model has units. If it does not
# have units then we are going to ignore them.
#
ignore_units = getattr(model_guess, model_guess.param_names[0]).unit is None
fit_model = _fit_lines(spectrum, model_guess, fitter,
exclude_regions, weights, model_window,
ignore_units, **kwargs)
fitted_models.append(fit_model)
if single_model_in:
fitted_models = fitted_models[0]
return fitted_models
def _fit_lines(spectrum, model, fitter=fitting.LevMarLSQFitter(),
exclude_regions=None, weights=None, window=None, ignore_units=False,
**kwargs):
"""
Fit the input model (initial conditions) to the spectrum. Output will be
the same model with the parameters set based on the fitting.
spectrum, model -> model
Parameters
----------
spectrum : Spectrum1D
The spectrum object over which the equivalent width will be calculated.
model: `~astropy.modeling.Model`
The model or that contain the initial guess.
exclude_regions : list of `~specutils.SpectralRegion`
List of regions to exclude in the fitting.
weights : list (NOT IMPLEMENTED YET)
List of weights to define importance of fitting regions.
window : `~specutils.SpectralRegion` or list of `~specutils.SpectralRegion`
Regions of the spectrum to use in the fitting. If None, then the
whole spectrum will be used in the fitting.
ignore_units : bool
If True, then ignore any units on the input model parameters.
(This would effectively be assuming the model and spectrum have the same units.)
Returns
-------
model : Compound model of `~astropy.modeling.Model`
A compound model of models with fitted parameters.
Notes
-----
* Could add functionality to set the bounds in ``model`` if they are not set.
* Additional keyword arguments are passed directly into
the call to the ``fitter``.
"""
if weights is not None:
raise NotImplementedError('Weights are not yet implemented.')
#
# If we are to exclude certain regions, then remove them.
#
if exclude_regions is not None:
spectrum = excise_regions(spectrum, exclude_regions)
dispersion = spectrum.spectral_axis
dispersion_unit = spectrum.spectral_axis.unit
flux = spectrum.flux
flux_unit = spectrum.flux.unit
#
# Determine the window if it is not None. There
# are several options here:
# window = 4 * u.Angstrom -> Quantity
# window = (4*u.Angstrom, 6*u.Angstrom) -> tuple
# window = (4, 6)*u.Angstrom -> Quantity
#
#
# Determine the window if there is one
#
# In this case the window defines the area around the center of each model
if window is not None and isinstance(window, (float, int)):
center = model.mean
indices = np.nonzero((spectrum.spectral_axis >= center-window) & (spectrum.spectral_axis < center+window))
dispersion = dispersion[indices]
flux = flux[indices]
# In this case the window is the start and end points of where we should fit
elif window is not None and isinstance(window, tuple):
indices = np.nonzero((dispersion >= window[0]) & (dispersion < window[1]))
dispersion = dispersion[indices]
flux = flux[indices]
elif window is not None and isinstance(window, SpectralRegion):
try:
idx1, idx2 = window.bounds
if idx1 == idx2:
raise Exception("Bad selected region.")
extracted_regions = extract_region(spectrum, window)
dispersion, flux = _combined_region_data(extracted_regions)
dispersion = dispersion * dispersion_unit
flux = flux * flux_unit
except ValueError as e:
return
if flux is None or len(flux) == 0:
raise Exception("Spectrum flux is empty or None.")
input_spectrum = spectrum
spectrum = Spectrum1D(flux=flux.value * flux_unit,
spectral_axis=dispersion.value * dispersion_unit,
wcs=input_spectrum.wcs,
velocity_convention=input_spectrum.velocity_convention,
rest_value=input_spectrum.rest_value)
#
# Compound models with units can not be fit.
#
# Convert the model initial guess to the spectral
# units and then remove the units
#
model_unitless, dispersion_unitless, flux_unitless = _strip_units_from_model(model, spectrum, convert=not ignore_units)
#
# Do the fitting of spectrum to the model.
#
fit_model_unitless = fitter(model_unitless, dispersion_unitless, flux_unitless,
**kwargs)
#
# Now add the units back onto the model....
#
if not ignore_units:
fit_model = _add_units_to_model(fit_model_unitless, model, spectrum)
else:
fit_model = QuantityModel(fit_model_unitless, spectrum.spectral_axis.unit, spectrum.flux.unit)
return fit_model
def _combined_region_data(spec):
if isinstance(spec, list):
# Merge sub-spec spectral_axis and flux values.
x = np.array([sv for subspec in spec if subspec is not None
for sv in subspec.spectral_axis.value])
y = np.array([sv for subspec in spec if subspec is not None
for sv in subspec.flux.value])
else:
if spec is None:
return
x = spec.spectral_axis.value
y = spec.flux.value
if len(x) == 0:
return
return x, y
def _convert(q, dispersion_unit, dispersion, flux_unit):
#
# Convert the quantity to the spectrum's units, and then we will use
# the *value* of it in the new unitless-model.
#
if q.unit.is_equivalent(dispersion_unit, equivalencies=u.equivalencies.spectral()):
quantity = q.to(dispersion_unit, equivalencies=u.equivalencies.spectral())
elif q.unit.is_equivalent(flux_unit, equivalencies=u.equivalencies.spectral_density(dispersion)):
quantity = q.to(flux_unit, equivalencies=u.equivalencies.spectral_density(dispersion))
return quantity
def _convert_and_dequantify(poss_quantity, dispersion_unit, dispersion, flux_unit, convert=True):
"""
This method will convert the ``poss_quantity`` value to the proper
dispersion or flux units and then strip the units.
If the ``poss_quantity`` is None, or a number, we just return that...
Note: This method can be removed along with most of the others here
when astropy.fitting will fit models that contain units.
"""
if poss_quantity is None or isinstance(poss_quantity, (float, int)):
return poss_quantity
if convert and hasattr(poss_quantity, 'quantity') and poss_quantity.quantity is not None:
q = poss_quantity.quantity
quantity = _convert(q, dispersion_unit, dispersion, flux_unit)
v = quantity.value
elif convert and isinstance(poss_quantity, u.Quantity):
quantity = _convert(poss_quantity, dispersion_unit, dispersion, flux_unit)
v = quantity.value
else:
v = poss_quantity.value
return v
def _strip_units_from_model(model_in, spectrum, convert=True):
"""
This method strips the units from the model, so the result can
be passed to the fitting routine. This is necessary as CoumpoundModel
with units does not work in the fitters.
Note: When CompoundModel with units works in the fitters this method
can be removed.
Note: This assumes there are two types of models, those that are
based on `~astropy.modeling.models.PolynomialModel` and therefore
require the ``degree`` parameter when instantiating the class, and
"everything else" that does not require an "extra" parameter for
class instantiation.
Note: If convert is False, then we will *not* do the conversion of units
to the units of the Spectrum1D object. Otherwise we will convert.
"""
#
# Get the dispersion and flux information from the spectrum
#
dispersion = spectrum.spectral_axis
dispersion_unit = spectrum.spectral_axis.unit
flux = spectrum.flux
flux_unit = spectrum.flux.unit
#
# Determine if a compound model
#
compound_model = model_in.n_submodels() > 1
if not compound_model:
# For this we are going to just make it a list so that we
# can use the looping structure below.
model_in = [model_in]
else:
# If it is a compound model then we are going to create the RPN
# representation of it which is a list that contains either astropy
# models or string representations of operators (e.g., '+' or '*').
model_in = [c.value for c in model_in._tree.traverse_postorder()]
#
# Run through each model in the list or compound model
#
model_out_stack = []
for sub_model in model_in:
#
# If it is an operator put onto the stack and move on...
#
if not isinstance(sub_model, Model):
model_out_stack.append(sub_model)
continue
#
# Make a new instance of the class.
#
if isinstance(sub_model, models.PolynomialModel):
new_sub_model = sub_model.__class__(sub_model.degree)
else:
new_sub_model = sub_model.__class__()
#
# Now for each parameter in the model determine if a dispersion or
# flux type of unit, then convert to spectrum units and then get the value.
#
for pn in new_sub_model.param_names:
# This could be a Quantity or Parameter
v = _convert_and_dequantify(getattr(sub_model, pn), dispersion_unit, dispersion, flux_unit, convert=convert)
#
# Add this information for the parameter name into the
# new sub model.
#
setattr(new_sub_model, pn, v)
#
# Copy over all the constraints (e.g., tied, fixed...)
#
for k, v in sub_model._constraints.items():
new_sub_model._constraints[k] = v
#
# Convert teh bounds parameter
#
new_bounds = []
for a in sub_model.bounds[pn]:
v = _convert_and_dequantify(a, dispersion_unit, dispersion, flux_unit, convert=convert)
new_bounds.append(v)
new_sub_model.bounds[pn] = tuple(new_bounds)
# The new model now has unitless information in it but has
# been converted to spectral unit scale.
model_out_stack.append(new_sub_model)
# If a compound model we need to re-create it, otherwise
# it is a single model and we just get the first one (as
# there is only one).
if compound_model:
model_out = _combine_postfix(model_out_stack)
else:
model_out = model_out_stack[0]
return model_out, dispersion.value, flux.value
def _add_units_to_model(model_in, model_orig, spectrum):
"""
This method adds the units to the model based on the units of the
model passed in. This is necessary as CoumpoundModel
with units does not work in the fitters.
Note: When CompoundModel with units works in the fitters this method
can be removed.
Note: This assumes there are two types of models, those that are
based on `~astropy.modeling.models.PolynomialModel` and therefore
require the ``degree`` parameter when instantiating the class, and
"everything else" that does not require an "extra" parameter for
class instantiation.
"""
dispersion = spectrum.spectral_axis
#
# If not a compound model, then make a single element
# list so we can use the for loop below.
#
compound_model = model_in.n_submodels() > 1
if not compound_model:
model_in_list = [model_in]
model_orig_list = [model_orig]
else:
compound_model_in = model_in
model_in_list = [c.value for c in model_in._tree.traverse_postorder()]
model_orig_list = [c.value for c in model_orig._tree.traverse_postorder()]
model_out_stack = []
model_index = 0
#
# For each model in the list we will convert the values back to
# the original (sub-)model units.
#
for ii, m_in in enumerate(model_in_list):
#
# If an operator (ie not Model) then we'll just add
# to the stack and evaluate at the end.
#
if not isinstance(m_in, Model):
model_out_stack.append(m_in)
continue
#
# Get the corresponding *original* sub-model that
# will match the current sub-model. From this we will
# grab the units to apply.
#
m_orig = model_orig_list[ii]
#
# Make the new sub-model.
#
if isinstance(m_in, models.PolynomialModel):
new_sub_model = m_in.__class__(m_in.degree)
else:
new_sub_model = m_in.__class__()
#
# Convert the model values from the spectrum units back to the
# original model units.
#
for pi, pn in enumerate(new_sub_model.param_names):
#
# Get the parameter from the original model and unit-less model.
#
m_orig_param = getattr(m_orig, pn)
m_in_param = getattr(m_in, pn)
if hasattr(m_orig_param, 'quantity') and m_orig_param.quantity is not None:
m_orig_param_quantity = m_orig_param.quantity
#
# If a spectral dispersion type of unit...
#
if m_orig_param_quantity.unit.is_equivalent(spectrum.spectral_axis.unit,
equivalencies=u.equivalencies.spectral()):
# If it is a compound model, then we need to get the value from the
# actual compound model as the tree is not updated in the fitting
if compound_model:
current_value = getattr(compound_model_in, '{}_{}'.format(pn, model_index)).value *\
spectrum.spectral_axis.unit
else:
current_value = m_in_param.value * spectrum.spectral_axis.unit
v = current_value.to(m_orig_param_quantity.unit, equivalencies=u.equivalencies.spectral())
#
# If a spectral density type of unit...
#
elif m_orig_param_quantity.unit.is_equivalent(spectrum.flux.unit,
equivalencies=u.equivalencies.spectral_density(dispersion)):
# If it is a compound model, then we need to get the value from the
# actual compound model as the tree is not updated in the fitting
if compound_model:
current_value = getattr(compound_model_in, '{}_{}'.format(pn, model_index)).value *\
spectrum.flux.unit
else:
current_value = m_in_param.value * spectrum.flux.unit
v = current_value.to(m_orig_param_quantity.unit,
equivalencies=u.equivalencies.spectral_density(dispersion))
else:
v = getattr(m_in, pn).value
#
# Set the parameter value into the new sub-model.
#
setattr(new_sub_model, pn, v)
#
# Copy over all the constraints (e.g., tied, fixed, bounds...)
#
for k, v in m_orig._constraints.items():
new_sub_model._constraints[k] = v
#
# Add the new unit-filled model onto the stack.
#
model_out_stack.append(new_sub_model)
model_index += 1
#
# Create the output model which is either the evaulation
# of the RPN representation of the model (if a compound model)
# or just the first element if a non-compound model.
#
if compound_model:
model_out = _combine_postfix(model_out_stack)
else:
model_out = model_out_stack[0]
# If the first parameter is not a Quantity, then at this point we will assume
# none of them are. (It would be inconsistent for fitting to have a model that
# has some parameters as Quantities and some values).
if getattr(model_orig, model_orig.param_names[0]).unit is None:
model_out = QuantityModel(model_out, spectrum.spectral_axis.unit, spectrum.flux.unit)
return model_out
def _combine_postfix(equation):
"""
Given a Python list in post order (RPN) of an equation, convert/apply the operations to evaluate.
The list order is the same as what is output from ``model._tree.traverse_postorder()``.
Structure modified from https://codereview.stackexchange.com/questions/79795/reverse-polish-notation-calculator-in-python
"""
ops = {'+': operator.add,
'-': operator.sub,
'*': operator.mul,
'/': operator.truediv,
'^': operator.pow,
'**': operator.pow}
stack = []
result = 0
for i in equation:
if isinstance(i, Model):
stack.insert(0, i)
else:
if len(stack) < 2:
print('Error: insufficient values in expression')
break
else:
n1 = stack.pop(1)
n2 = stack.pop(0)
result = ops[i](n1, n2)
stack.insert(0, result)
return result
| 33.669746 | 125 | 0.637492 |
78d9b4825542698fd45b486ab86c6b1283ccf1d3 | 1,495 | py | Python | blogapp/models.py | divijagupta/Blog-Webapp | 186c6989c6d2e87895574944477ffc66177f870b | [
"MIT"
] | null | null | null | blogapp/models.py | divijagupta/Blog-Webapp | 186c6989c6d2e87895574944477ffc66177f870b | [
"MIT"
] | 6 | 2020-06-05T18:22:54.000Z | 2022-03-11T23:24:40.000Z | blogapp/models.py | divijagupta/Blog | 186c6989c6d2e87895574944477ffc66177f870b | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
from django.template.defaultfilters import slugify
class Category(models.Model):
category=models.CharField(max_length=50,unique=True)
slug_field_category=models.SlugField(max_length=100,blank=True)
def save(self, *args, **kwargs):
self.slug_field_category=slugify(self.slug_field_category)
super(Category, self).save()
class Author(models.Model):
author_user=models.ForeignKey(User,on_delete=models.CASCADE)
description=models.CharField(max_length=200,unique=True)
img=models.ImageField(upload_to="AuthorImages/")
class BlogPost(models.Model):
base_category=models.ForeignKey(Category,on_delete=models.CASCADE)
img=models.ImageField(upload_to="BlogImages/")
upvote=models.IntegerField(default=0)
favorite=models.BooleanField(default=False)
title=models.CharField(max_length=200,unique=True)
body=models.TextField()
author=models.ForeignKey(Author,on_delete=models.CASCADE)
date=models.DateTimeField(auto_now_add=True)
read_time_minutes=models.IntegerField(default=0)
class Meta:
ordering=["-date"]
class Comment(models.Model):
blog=models.ForeignKey(BlogPost,on_delete=models.CASCADE)
user_commented=models.ForeignKey(User,on_delete=models.CASCADE)
body=models.TextField(max_length=200)
class ContactUs(models.Model):
name=models.CharField(max_length=50)
email=models.CharField(max_length=50)
body=models.TextField()
company=models.CharField(max_length=100)
# Create your models here.
| 33.977273 | 67 | 0.815385 |
d5658f9b75591d6cb2f6cdc4c0931b8e8cbebcc3 | 4,472 | py | Python | attribution/tests/project.py | markrofail/attribution | 6180e972538bb4adcaf7773889f7c4da67891ea3 | [
"MIT"
] | null | null | null | attribution/tests/project.py | markrofail/attribution | 6180e972538bb4adcaf7773889f7c4da67891ea3 | [
"MIT"
] | null | null | null | attribution/tests/project.py | markrofail/attribution | 6180e972538bb4adcaf7773889f7c4da67891ea3 | [
"MIT"
] | null | null | null | # Copyright 2020 John Reese
# Licensed under the MIT license
import subprocess
from pathlib import Path
from tempfile import TemporaryDirectory
from unittest import TestCase
from unittest.mock import patch
from ..project import Project
from ..tag import Tag, Version
class ProjectTest(TestCase):
def test_project_eq(self):
p1 = Project("foo")
p2 = Project("bar")
p3 = Project("foo")
not_project = 42
self.assertIsNot(p1, p2)
self.assertNotEqual(p1, p2)
self.assertIsNot(p1, p3)
self.assertEqual(p1, p3)
self.assertNotEqual(p1, not_project)
@patch("attribution.project.Tag")
def test_tags(self, tag_mock):
fake_tags = [
Tag(name="v1.0", version=Version("1.0")),
Tag(name="v1.1", version=Version("1.1")),
]
tag_mock.all_tags.return_value = fake_tags
project = Project(name="foo", config={})
result = project.tags
tag_mock.all_tags.assert_called_once()
self.assertEqual(result, fake_tags)
tag_mock.all_tags.reset_mock()
result = project.tags
tag_mock.all_tags.assert_not_called()
self.assertEqual(result, fake_tags)
@patch("attribution.project.LOG")
@patch("attribution.project.sh")
def test_shortlog(self, sh_mock, log_mock):
sh_mock.side_effect = [
" 10 Foo Bar\n",
subprocess.CalledProcessError(1, ()),
]
project = Project("foo")
result = project.shortlog
sh_mock.assert_called_with(project.shortlog_cmd)
self.assertEqual(result, " 10 Foo Bar")
# cached value
sh_mock.reset_mock()
result = project.shortlog
sh_mock.assert_not_called()
self.assertEqual(result, " 10 Foo Bar")
sh_mock.reset_mock()
project = Project("foo")
result = project.shortlog
sh_mock.assert_called_with(project.shortlog_cmd)
log_mock.exception.assert_called_once()
self.assertEqual(result, "")
@patch("attribution.project.Path.cwd")
def test_load(self, cwd_mock):
fake_pyproject = """
[tool.attribution]
name = "fizzbuzz"
"""
with TemporaryDirectory() as td:
td = Path(td)
pyproject = td / "pyproject.toml"
pyproject.write_text(fake_pyproject.strip())
cwd_mock.return_value = td
with self.subTest("pyproject in cwd"):
project = Project.load()
cwd_mock.assert_called_once()
self.assertEqual(project.name, "fizzbuzz")
self.assertEqual(
project.config, {"name": "fizzbuzz", "version_file": True}
)
cwd_mock.reset_mock()
with self.subTest("pyproject in given path"):
project = Project.load(td)
cwd_mock.assert_not_called()
self.assertEqual(project.name, "fizzbuzz")
self.assertEqual(
project.config, {"name": "fizzbuzz", "version_file": True}
)
with self.subTest("pyproject with no version_file defaults to True"):
pyproject.write_text(fake_pyproject.strip())
project = Project.load(td)
self.assertTrue(project.config.get("version_file"))
self.assertEqual(
project.config, {"name": "fizzbuzz", "version_file": True}
)
with self.subTest("pyproject reads version_file"):
pyproject.write_text(fake_pyproject.strip() + "\nversion_file=false")
project = Project.load(td)
self.assertFalse(project.config.get("version_file"))
self.assertEqual(
project.config, {"name": "fizzbuzz", "version_file": False}
)
with self.subTest("empty pyproject"):
pyproject.write_text("\n")
project = Project.load(td)
cwd_mock.assert_not_called()
self.assertEqual(project.name, td.name)
self.assertEqual(project.config, {})
with self.subTest("no pyproject"):
pyproject.unlink()
project = Project.load(td)
cwd_mock.assert_not_called()
self.assertEqual(project.name, td.name)
self.assertEqual(project.config, {})
| 33.878788 | 85 | 0.579383 |
faaf1a4b943290509aa5dcb6bebbd18ee6efd394 | 880 | py | Python | pcg_gazebo/parsers/sdf/kinematic.py | TForce1/pcg_gazebo | 9ff88016b7b6903236484958ca7c6ed9f8ffb346 | [
"ECL-2.0",
"Apache-2.0"
] | 40 | 2020-02-04T18:16:49.000Z | 2022-02-22T11:36:34.000Z | pcg_gazebo/parsers/sdf/kinematic.py | awesomebytes/pcg_gazebo | 4f335dd460ef7c771f1df78b46a92fad4a62cedc | [
"ECL-2.0",
"Apache-2.0"
] | 75 | 2020-01-23T13:40:50.000Z | 2022-02-09T07:26:01.000Z | pcg_gazebo/parsers/sdf/kinematic.py | GimpelZhang/gazebo_world_generator | eb7215499d0ddc972d804c988fadab1969579b1b | [
"ECL-2.0",
"Apache-2.0"
] | 18 | 2020-09-10T06:35:41.000Z | 2022-02-20T19:08:17.000Z | # Copyright (c) 2019 - The Procedural Generation for Gazebo authors
# For information on the respective copyright owner see the NOTICE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..types import XMLBoolean
class Kinematic(XMLBoolean):
_NAME = 'kinematic'
_TYPE = 'sdf'
def __init__(self, default=False):
super(Kinematic, self).__init__(default)
| 35.2 | 74 | 0.748864 |
89fdb88ced520dcf75fd7e4ea5df94258761051f | 10,658 | py | Python | test/functional/wallet_listtransactions.py | sunrisellc/banditocoin | 731dcea8fdab8e78e9aaecd0ea8ef12c153c210f | [
"MIT"
] | null | null | null | test/functional/wallet_listtransactions.py | sunrisellc/banditocoin | 731dcea8fdab8e78e9aaecd0ea8ef12c153c210f | [
"MIT"
] | null | null | null | test/functional/wallet_listtransactions.py | sunrisellc/banditocoin | 731dcea8fdab8e78e9aaecd0ea8ef12c153c210f | [
"MIT"
] | 1 | 2022-01-09T06:41:03.000Z | 2022-01-09T06:41:03.000Z | #!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the listtransactions API."""
from decimal import Decimal
from io import BytesIO
from test_framework.messages import COIN, CTransaction
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_array_result,
assert_equal,
bytes_to_hex_str,
hex_str_to_bytes,
sync_mempools,
)
def tx_from_hex(hexstring):
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(hexstring))
tx.deserialize(f)
return tx
class ListTransactionsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.enable_mocktime()
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Simple send, 0 to 1:
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid},
{"category": "send", "amount": Decimal("-0.1"), "confirmations": 0})
assert_array_result(self.nodes[1].listtransactions(),
{"txid": txid},
{"category": "receive", "amount": Decimal("0.1"), "confirmations": 0})
# mine a block, confirmations should change:
self.nodes[0].generate(1)
self.sync_all()
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid},
{"category": "send", "amount": Decimal("-0.1"), "confirmations": 1})
assert_array_result(self.nodes[1].listtransactions(),
{"txid": txid},
{"category": "receive", "amount": Decimal("0.1"), "confirmations": 1})
# send-to-self:
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid, "category": "send"},
{"amount": Decimal("-0.2")})
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid, "category": "receive"},
{"amount": Decimal("0.2")})
# sendmany from node1: twice to self, twice to node2:
send_to = {self.nodes[0].getnewaddress(): 0.11,
self.nodes[1].getnewaddress(): 0.22,
self.nodes[0].getnewaddress(): 0.33,
self.nodes[1].getnewaddress(): 0.44}
txid = self.nodes[1].sendmany("", send_to)
self.sync_all()
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.11")},
{"txid": txid})
assert_array_result(self.nodes[0].listtransactions(),
{"category": "receive", "amount": Decimal("0.11")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.22")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "receive", "amount": Decimal("0.22")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.33")},
{"txid": txid})
assert_array_result(self.nodes[0].listtransactions(),
{"category": "receive", "amount": Decimal("0.33")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.44")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "receive", "amount": Decimal("0.44")},
{"txid": txid})
pubkey = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['pubkey']
multisig = self.nodes[1].createmultisig(1, [pubkey])
self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True)
txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
self.nodes[1].generate(1)
self.sync_all()
assert len(self.nodes[0].listtransactions(label="watchonly", count=100, skip=0, include_watchonly=False)) == 0
assert_array_result(self.nodes[0].listtransactions(label="watchonly", count=100, skip=0, include_watchonly=True),
{"category": "receive", "amount": Decimal("0.1")},
{"txid": txid, "label": "watchonly"})
# Bandito has RBF disabled
# self.run_rbf_opt_in_test()
# Check that the opt-in-rbf flag works properly, for sent and received
# transactions.
def run_rbf_opt_in_test(self):
# Check whether a transaction signals opt-in RBF itself
def is_opt_in(node, txid):
rawtx = node.getrawtransaction(txid, 1)
for x in rawtx["vin"]:
if x["sequence"] < 0xfffffffe:
return True
return False
# Find an unconfirmed output matching a certain txid
def get_unconfirmed_utxo_entry(node, txid_to_match):
utxo = node.listunspent(0, 0)
for i in utxo:
if i["txid"] == txid_to_match:
return i
return None
# 1. Chain a few transactions that don't opt-in.
txid_1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
assert(not is_opt_in(self.nodes[0], txid_1))
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_1}, {"bip125-replaceable": "no"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_1}, {"bip125-replaceable": "no"})
# Tx2 will build off txid_1, still not opting in to RBF.
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_1)
assert_equal(utxo_to_use["safe"], True)
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1)
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1)
assert_equal(utxo_to_use["safe"], False)
# Create tx2 using createrawtransaction
inputs = [{"txid": utxo_to_use["txid"], "vout": utxo_to_use["vout"]}]
outputs = {self.nodes[0].getnewaddress(): 0.999}
tx2 = self.nodes[1].createrawtransaction(inputs, outputs)
tx2_signed = self.nodes[1].signrawtransactionwithwallet(tx2)["hex"]
txid_2 = self.nodes[1].sendrawtransaction(tx2_signed)
# ...and check the result
assert(not is_opt_in(self.nodes[1], txid_2))
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_2}, {"bip125-replaceable": "no"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_2}, {"bip125-replaceable": "no"})
# Tx3 will opt-in to RBF
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_2)
inputs = [{"txid": txid_2, "vout": utxo_to_use["vout"]}]
outputs = {self.nodes[1].getnewaddress(): 0.998}
tx3 = self.nodes[0].createrawtransaction(inputs, outputs)
tx3_modified = tx_from_hex(tx3)
tx3_modified.vin[0].nSequence = 0
tx3 = bytes_to_hex_str(tx3_modified.serialize())
tx3_signed = self.nodes[0].signrawtransactionwithwallet(tx3)['hex']
txid_3 = self.nodes[0].sendrawtransaction(tx3_signed)
assert(is_opt_in(self.nodes[0], txid_3))
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_3}, {"bip125-replaceable": "yes"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_3}, {"bip125-replaceable": "yes"})
# Tx4 will chain off tx3. Doesn't signal itself, but depends on one
# that does.
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_3)
inputs = [{"txid": txid_3, "vout": utxo_to_use["vout"]}]
outputs = {self.nodes[0].getnewaddress(): 0.997}
tx4 = self.nodes[1].createrawtransaction(inputs, outputs)
tx4_signed = self.nodes[1].signrawtransactionwithwallet(tx4)["hex"]
txid_4 = self.nodes[1].sendrawtransaction(tx4_signed)
assert(not is_opt_in(self.nodes[1], txid_4))
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "yes"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "yes"})
# Replace tx3, and check that tx4 becomes unknown
tx3_b = tx3_modified
tx3_b.vout[0].nValue -= int(Decimal("0.004") * COIN) # bump the fee
tx3_b = bytes_to_hex_str(tx3_b.serialize())
tx3_b_signed = self.nodes[0].signrawtransactionwithwallet(tx3_b)['hex']
txid_3b = self.nodes[0].sendrawtransaction(tx3_b_signed, True)
assert(is_opt_in(self.nodes[0], txid_3b))
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "unknown"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "unknown"})
# Check gettransaction as well:
for n in self.nodes[0:2]:
assert_equal(n.gettransaction(txid_1)["bip125-replaceable"], "no")
assert_equal(n.gettransaction(txid_2)["bip125-replaceable"], "no")
assert_equal(n.gettransaction(txid_3)["bip125-replaceable"], "yes")
assert_equal(n.gettransaction(txid_3b)["bip125-replaceable"], "yes")
assert_equal(n.gettransaction(txid_4)["bip125-replaceable"], "unknown")
# After mining a transaction, it's no longer BIP125-replaceable
self.nodes[0].generate(1)
assert(txid_3b not in self.nodes[0].getrawmempool())
assert_equal(self.nodes[0].gettransaction(txid_3b)["bip125-replaceable"], "no")
assert_equal(self.nodes[0].gettransaction(txid_4)["bip125-replaceable"], "unknown")
if __name__ == '__main__':
ListTransactionsTest().main()
| 50.273585 | 121 | 0.601426 |
dc9338327c4d0a04e3f87fe9dbf7d6e51e08b840 | 8,964 | py | Python | server/util/device_info.py | joweeba/mrtaskman | cef92f11cca3de45c77b76a68a91d85af9c8fb48 | [
"Apache-2.0"
] | null | null | null | server/util/device_info.py | joweeba/mrtaskman | cef92f11cca3de45c77b76a68a91d85af9c8fb48 | [
"Apache-2.0"
] | null | null | null | server/util/device_info.py | joweeba/mrtaskman | cef92f11cca3de45c77b76a68a91d85af9c8fb48 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides info about an attached device depending on environment variables.
Expects that DEVICE_SN will be set in process environment.
"""
__author__ = 'jeff.carollo@gmail.com (Jeff Carollo)'
import json
import logging
import os
import sys
DEVICE_INFO = {
'''
# Jeff's Phone.
'35326BF6F6C300EC': {
'device_type': 'phone',
'device_name': 'Google Nexus S',
'os_name': 'android',
'os_version': '4.0.2',
'cell_number': '9795741534',
'provider': 'T-Mobile',
'hub': 'leonardo',
'hub_port': '*'
},
'''
'SH0CJLV00997': {
'device_type': 'phone',
'device_name': 'T-Mobile MyTouch 3G',
'os_name': 'android',
'os_version': '1.6',
'provider': 'T-Mobile',
'hub': '02',
'hub_port': 'D'
},
'123456789012': {
'device_type': 'phone',
'device_name': 'T-Mobile MyTouch 3G',
'os_name': 'android',
'os_version': '1.6',
'provider': 'T-Mobile',
'hub': '02',
'hub_port': 'D'
},
'3233A90D16A800EC': {
'device_type': 'phone',
'device_name': 'Google Nexus S',
'os_name': 'android',
'os_version': '2.3.6',
'provider': 'T-Mobile',
'hub': '02',
'hub_port': 'C'
},
'328C000600000001': {
'device_type': 'tablet',
'device_name': 'Amazon Kindle Fire',
'os_name': 'android',
'os_version': '2.3',
'hub': '01',
'hub_port': 'G'
},
'902a6d03': {
'device_type': 'tablet',
'device_name': 'Samsung Galaxy Tab 8.9',
'os_name': 'android',
'os_version': '3.2',
'provider': 'AT&T',
'hub': '02',
'hub_port': 'B'
},
'720b50a3': {
'device_type': 'tablet',
'device_name': 'Samsung Galaxy Tab 8.9',
'os_name': 'android',
'os_version': '3.2',
'provider': 'AT&T',
'hub': '02',
'hub_port': 'B'
},
'LGOTMSae4105e': {
'device_type': 'phone',
'device_name': 'LG Nitro HD',
'os_name': 'android',
'os_version': '2.3.5',
'cell_number': '7744208329',
'provider': 'AT&T',
'hub': '02',
'hub_port': 'A'
},
'LGOTMS1d96ae': {
'device_type': 'phone',
'device_name': 'LG Nitro HD',
'os_name': 'android',
'os_version': '2.3.5',
'cell_number': '7744208329',
'provider': 'AT&T',
'hub': '02',
'hub_port': 'A'
},
'0146A14C1001800C': {
'device_type': 'phone',
'device_name': 'Galaxy Nexus',
'os_name': 'android',
'os_version': '4.0.2',
'cell_number': '4255771762',
'provider': 'Verizon Wireless',
'hub': '01',
'hub_port': 'D'
},
'0146882E0D01A005': {
'device_type': 'phone',
'device_name': 'Galaxy Nexus',
'os_name': 'android',
'os_version': '4.0.2',
'cell_number': '4255771762',
'provider': 'Verizon Wireless',
'hub': '01',
'hub_port': 'D'
},
'HT16RS015741': {
'device_type': 'phone',
'device_name': 'HTC Thunderbolt',
'os_name': 'android',
'os_version': '2.3.4',
'cell_number': '4258908379',
'provider': 'Verizon Wireless',
'hub': '01',
'hub_port': 'B'
},
'HT11JS002214': {
'device_type': 'phone',
'device_name': 'HTC Thunderbolt',
'os_name': 'android',
'os_version': '2.3.4',
'cell_number': '4258908379',
'provider': 'Verizon Wireless',
'hub': '01',
'hub_port': 'B'
},
'TA08200CI0': {
'device_type': 'phone',
'device_name': 'Motorola Droid X2',
'os_name': 'android',
'os_version': '2.3.4',
'cell_number': '4258909336',
'provider': 'Verizon Wireless',
'hub': '01',
'hub_port': 'A'
},
'TA08401PHI': {
'device_type': 'phone',
'device_name': 'Motorola Droid X2',
'os_name': 'android',
'os_version': '2.3.4',
'cell_number': '4258909336',
'provider': 'Verizon Wireless',
'hub': '01',
'hub_port': 'A'
},
'4342354131485A483144': {
'device_type': 'phone',
'device_name': 'Sony Ericson Xperia PLAY 4G',
'os_name': 'android',
'os_version': '2.3.3',
'cell_number': '7744208420',
'provider': 'AT&T',
'hub': '01',
'hub_port': 'E'
},
'43423541314A59474A32': {
'device_type': 'phone',
'device_name': 'Sony Ericson Xperia PLAY 4G',
'os_name': 'android',
'os_version': '2.3.3',
'cell_number': '7744208420',
'provider': 'AT&T',
'hub': '01',
'hub_port': 'E'
},
'388920443A07097': {
'device_type': 'tablet',
'device_name': 'Samsung Galaxy Tab',
'os_name': 'android',
'os_version': '3.2',
'provider': 'Verizon Wireless',
'hub': '01',
'hub_port': 'C'
},
'42801C5422043D7': {
'device_type': 'tablet',
'device_name': 'Samsung Galaxy Tab',
'os_name': 'android',
'os_version': '3.2',
'provider': 'Verizon Wireless',
'hub': '01',
'hub_port': 'C'
},
'304D191A2004639E': {
'device_type': 'phone',
'device_name': 'Samsung Galaxy S II',
'os_name': 'android',
'os_version': '2.3.4',
'cell_number': 'unknown',
'provider': 'AT&T',
'hub': '01',
'hub_port': 'F'
},
'304D19FD2E23B83F': {
'device_type': 'phone',
'device_name': 'Samsung Galaxy S II',
'os_name': 'android',
'os_version': '2.3.4',
'cell_number': 'unknown',
'provider': 'AT&T',
'hub': '01',
'hub_port': 'F'
},
}
# Shouldn't change for the life of this process.
DEVICE_SN = os.environ.get('DEVICE_SN', None)
def GetDeviceSerialNumber():
"""Returns the serial number of the device assigned to the current worker.
Pulls from environment variables.
Returns:
Serial number as str, or None.
"""
return DEVICE_SN
def GetDeviceInfo(device_sn=DEVICE_SN):
"""Retrieves device info from given device serial number."""
return DEVICE_INFO.get(device_sn, None)
def AppendIf(l, value):
"""Appends to a list if value evaluates to a boolean."""
if value:
l.append(value)
def GetCapabilities():
"""Returns a list of capabilities of device from environment or None."""
capabilities = []
if DEVICE_SN:
capabilities.append(DEVICE_SN)
device_info = GetDeviceInfo()
if device_info:
AppendIf(capabilities, device_info.get('device_name', None))
AppendIf(capabilities, device_info.get('device_type', None))
AppendIf(capabilities, device_info.get('os_name', None))
AppendIf(capabilities, device_info.get('os_version', None))
AppendIf(capabilities, device_info.get('provider', None))
return capabilities
try:
import subprocess
def AdbDevices():
"""Returns the devices recognized by adb as a list of str."""
devices = []
command = ('adb devices 2>&1 | grep "device$" | '
'sed "s/\([a-zA-Z0-9]\)*\s*device/\1/g"')
try:
output = subprocess.check_output(command, shell=True)
except subprocess.CalledProcessError, e:
logging.error('Unable to invoke adb.')
logging.exception(e)
return devices
except OSError, e:
logging.error('Unable to invoke adb.')
logging.exception(e)
return devices
lines = output.split('\n')
for line in lines:
line = line.strip()
index = line.find('\t\x01')
if index >= 0 and index < len(line):
line = line[0:index]
if line:
devices.append(line)
return devices
def DeviceIsConnected(device_sn=DEVICE_SN):
devices = AdbDevices()
return device_sn in devices
except ImportError:
# Allow AppEngine clients to ignore this.
def AdbDevices():
"""Not defined for AppEngine."""
return []
def main(argv):
device_ids = sys.stdin.read().split('\n')
for device_id in device_ids:
if device_id:
print '"%s": %s' % (device_id,
json.dumps(GetDeviceInfo(device_id), indent=2))
if __name__ == '__main__':
main(sys.argv)
| 27.496933 | 77 | 0.548527 |
45a7ef1db3e186205cde737187780c4affabd7d7 | 369 | py | Python | manga_py/providers/__list.py | gromenauer/manga-py | 2bc5a8bc87877d4447e08e48045fad82ade1ebd0 | [
"MIT"
] | 4 | 2018-07-05T11:03:22.000Z | 2020-03-27T13:21:56.000Z | manga_py/providers/__list.py | gromenauer/manga-py | 2bc5a8bc87877d4447e08e48045fad82ade1ebd0 | [
"MIT"
] | null | null | null | manga_py/providers/__list.py | gromenauer/manga-py | 2bc5a8bc87877d4447e08e48045fad82ade1ebd0 | [
"MIT"
] | 1 | 2021-02-05T06:18:31.000Z | 2021-02-05T06:18:31.000Z | providers_list = [
{
'provider': 'manga_py.providers.readmanga_me',
'priority': 5, # default
'templates': [
r'readmanga\.me/.',
]
},
# {
# 'provider': 'manga_py.providers.readmanga_me',
# 'priority': 5, # default
# 'templates': [
# r'readmanga\.me/.',
# ]
# },
]
| 21.705882 | 56 | 0.439024 |
bc77fbe4568ef8c68253a5903cdc575d2f036bed | 5,828 | py | Python | Prototypes/Wi-Fi/wireless.py | jamestiotio/deLIGHT | 0a836760d0bfa6a9f854fa855c5792e65448b2e8 | [
"MIT"
] | 2 | 2020-05-30T17:17:50.000Z | 2021-01-23T17:20:22.000Z | Prototypes/Wi-Fi/wireless.py | jamestiotio/deLIGHT | 0a836760d0bfa6a9f854fa855c5792e65448b2e8 | [
"MIT"
] | 1 | 2022-01-07T08:20:26.000Z | 2022-01-07T09:43:55.000Z | Prototypes/Wi-Fi/wireless.py | jamestiotio/deLIGHT | 0a836760d0bfa6a9f854fa855c5792e65448b2e8 | [
"MIT"
] | null | null | null | # Wi-Fi Lighter Unlock Script
# Use ampy to put the code and use rshell to REPL into ESP32 and run the script.
# Set it to run on boot by renaming the file to main.py
# Created by James Raphael Tiovalen (2019)
# Import libraries
import utime
import network
import machine
from ST7735 import TFT
from sysfont import sysfont
import math
import esp
esp.osdebug(None)
import gc
gc.collect()
# Define Wi-Fi Access Point SSID & Password
ap_ssid = 'deLIGHTer'
ap_password = ''
authorized_users = [b''] # Extend this to a non-hardcoded whitelist
# Define Wi-Fi Station SSID & Password
sta_ssid = ''
sta_password = ''
# Define PWM variables for servo control
# Duty for servo is between 41 - 120, but mileage might differ
pin = machine.Pin(26)
servo = machine.PWM(pin, freq=50)
LID_CLOSE = 120
LID_OPEN = 60
servo_state = 'CLOSED'
# Define button for closing servo
button = machine.Pin(37, machine.Pin.IN)
# Enable LCD power through power management IC (AXP192)
def enable_lcd_power():
i2c = machine.I2C(-1, scl=machine.Pin(22), sda=machine.Pin(21), freq=100000)
i2c.writeto_mem(0x34, 0x28, b'\xff')
axp192_reg12 = i2c.readfrom_mem(0x34, 0x12, 1)[0]
axp192_reg12 |= 0x0c
i2c.writeto_mem(0x34, 0x12, bytes([axp192_reg12]))
enable_lcd_power()
# Define variables for LCD access and print title
spi = machine.SPI(1, baudrate=27000000, polarity=0, phase=0, bits=8, firstbit=machine.SPI.MSB, sck=machine.Pin(13), mosi=machine.Pin(15)) # Set baudrate way high but will be clamped to a maximum in SPI constructor
tft = TFT(spi,23,18,5)
tft.initr() # Initialize LCD screen
tft.invertcolor(True) # This is required for RGB to be parsed correctly (for some reason, 0x00 and 0xFF are flipped on normal mode)
tft.rgb(True)
tft.rotation(3) # Rotate to landscape mode
tft.fill() # We use black background since text chars would be encapsulated by black background, not transparent
tft.text((20,40), 'deLIGHT', TFT.YELLOW, sysfont, 3, nowrap=True)
tft.text((20,70), 'Wi-Fi', TFT.CYAN, sysfont, 2, nowrap=True)
def station():
# Enable station interface
sta = network.WLAN(network.STA_IF)
sta.active(True)
# Main station loop
while True:
scan_result = sta.scan()
for i in scan_result:
if i[0] == bytes(sta_ssid, 'utf-8'):
sta.connect(sta_ssid, sta_password)
# If connected to home network, break scan loop
if sta.isconnected():
print('Connected to home network!')
break
def access_point():
# Enable access point interface
ap = network.WLAN(network.AP_IF)
ap.active(True)
ap.config(essid=ap_ssid,authmode=network.AUTH_WPA_WPA2_PSK, password=ap_password)
# Condition checking barrier for active access point
while ap.active() == False:
pass
print('Access point created!')
print(ap.ifconfig()) # Returns (ip, subnet/netmask, gateway, dns) as a tuple
global servo_state
# Define AP internal functions
def wait_for_release():
while True:
if button.value() == 1:
return
def button_check():
global servo_state
while True:
if servo_state == 'OPEN':
servo.duty(LID_CLOSE)
if not ap.isconnected():
tft.fill()
tft.text((20,40), 'Authorized device', TFT.RED, sysfont, 1, nowrap=True)
tft.text((20,50), 'disconnected!', TFT.RED, sysfont, 1, nowrap=True)
tft.text((20,70), 'Closing...', TFT.RED, sysfont, 1, nowrap=True)
else:
tft.fill()
tft.text((20,40), 'Main button pressed!', TFT.RED, sysfont, 1, nowrap=True)
tft.text((20,70), 'Closing...', TFT.RED, sysfont, 1, nowrap=True)
servo_state = 'CLOSED'
return
def device_scan():
global servo_state
while True:
if button.value() == 0 and ap.isconnected(): # TODO: Add condition to pass to integrated finger module and do fingerprint method if ap.isconnected() == false (possibly use https://github.com/stinos/micropython-wrap)
for i in range(len(ap.status('stations'))):
print('Got a connection from %s.' % str(ap.status('stations')[i][0]))
if ap.status('stations')[i][0] in authorized_users and servo_state == 'CLOSED':
# Duty for servo is between 41 - 120, but mileage might differ
servo.duty(LID_OPEN)
servo_state = 'OPEN'
return
while True:
while True:
if button.value() == 0 and servo_state == 'CLOSED':
tft.fill()
tft.text((20,40), 'Press main button to', TFT.YELLOW, sysfont, 1, nowrap=True)
tft.text((20,50), 'open the lighter!', TFT.YELLOW, sysfont, 1, nowrap=True)
device_scan()
tft.fill()
tft.text((20,40), 'Authorized device', TFT.GREEN, sysfont, 1, nowrap=True)
tft.text((20,50), 'connected!', TFT.GREEN, sysfont, 1, nowrap=True)
tft.text((20,70), 'Opening...', TFT.GREEN, sysfont, 1, nowrap=True)
wait_for_release()
break
while True:
if (servo_state == 'OPEN') and (button.value() == 0 or not ap.isconnected()):
button_check()
wait_for_release()
break
def main():
# station() # TODO: Need to forward network packet data to the Internet and back to corresponding devices so that we can use ESP32 as Wi-Fi Repeater as well to maintain Internet access for connected stations.
access_point()
main()
| 35.754601 | 228 | 0.611531 |
51b60d526aef5d75bd34d4523db4d80ed5682d2a | 205 | py | Python | acclist/admin.py | ko-naka-3010527/accup-python | e10ff96111be889573d830f4b710d9b8b19e1828 | [
"MIT"
] | null | null | null | acclist/admin.py | ko-naka-3010527/accup-python | e10ff96111be889573d830f4b710d9b8b19e1828 | [
"MIT"
] | null | null | null | acclist/admin.py | ko-naka-3010527/accup-python | e10ff96111be889573d830f4b710d9b8b19e1828 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import *
admin.site.register(Service)
admin.site.register(Mailaddr)
admin.site.register(Address)
admin.site.register(Phonenum)
admin.site.register(Account)
| 18.636364 | 32 | 0.809756 |
a6ed324ad6b163f045dace772755bfa867fb72da | 585 | py | Python | app/config.py | telezhnaya/playing_with_flask | c813dbe1df19fe2d30dd875d27b70fe8c7ddb270 | [
"MIT"
] | 2 | 2018-10-30T17:18:37.000Z | 2018-11-23T08:36:17.000Z | app/config.py | telezhnaya/playing_with_flask | c813dbe1df19fe2d30dd875d27b70fe8c7ddb270 | [
"MIT"
] | 3 | 2018-11-23T07:33:02.000Z | 2018-11-29T12:04:50.000Z | app/config.py | telezhnaya/playing_with_flask | c813dbe1df19fe2d30dd875d27b70fe8c7ddb270 | [
"MIT"
] | 1 | 2018-11-23T08:36:21.000Z | 2018-11-23T08:36:21.000Z | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = os.environ['SECRET_KEY']
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SQLALCHEMY_TRACK_MODIFICATIONS = True
@classmethod
def init_app(cls, app):
""" """
class DevelopmentConfig(Config):
DEBUG = True
class TestingConfig(Config):
TESTING = True
class ProductionConfig(Config):
""" """
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig,
}
| 16.714286 | 56 | 0.680342 |
bf0eceae1acf1187fbf31016cfb0a82e647551b6 | 646 | py | Python | trails/feeds/feodotrackerdns.py | Jahismighty/maltrail | 9bc70430993b2140ceb4dbac4b487251a9254416 | [
"MIT"
] | 1 | 2021-01-10T14:35:36.000Z | 2021-01-10T14:35:36.000Z | trails/feeds/feodotrackerdns.py | Jahismighty/maltrail | 9bc70430993b2140ceb4dbac4b487251a9254416 | [
"MIT"
] | 29 | 2018-10-18T20:14:49.000Z | 2019-07-08T07:45:08.000Z | trails/feeds/feodotrackerdns.py | Jahismighty/maltrail | 9bc70430993b2140ceb4dbac4b487251a9254416 | [
"MIT"
] | 2 | 2018-11-29T22:50:57.000Z | 2019-04-12T03:35:35.000Z | #!/usr/bin/env python
"""
Copyright (c) 2014-2018 Miroslav Stampar (@stamparm)
See the file 'LICENSE' for copying permission
"""
from core.common import retrieve_content
__url__ = "https://feodotracker.abuse.ch/blocklist/?download=domainblocklist"
__check__ = "Feodo"
__info__ = "feodo (malware)"
__reference__ = "abuse.ch"
def fetch():
retval = {}
content = retrieve_content(__url__)
if __check__ in content:
for line in content.split('\n'):
line = line.strip()
if not line or line.startswith('#'):
continue
retval[line] = (__info__, __reference__)
return retval
| 23.925926 | 77 | 0.651703 |
a7852cda9648199ee7be17d096ae1d11bd49e4fd | 367 | py | Python | src/Task.py | Chamerli-sh/Giggle | 9608971b588bc26ef278e43f508da5f1a2c07e92 | [
"MIT"
] | null | null | null | src/Task.py | Chamerli-sh/Giggle | 9608971b588bc26ef278e43f508da5f1a2c07e92 | [
"MIT"
] | null | null | null | src/Task.py | Chamerli-sh/Giggle | 9608971b588bc26ef278e43f508da5f1a2c07e92 | [
"MIT"
] | null | null | null | class Task():
check = False
def __init__(self, name):
self.name = name
def set_check(self, state=False):
Task.check = state
def is_checked(self):
return Task.check
def str_check(self):
match self.check:
case True:
return '✅'
case False:
return '❌' | 20.388889 | 37 | 0.493188 |
6881f73f27d2ff45eceb7523e18fdd2d1afe467b | 1,990 | py | Python | opinion_classification/electra/finetune/scorer.py | 10jqka-aicubes/opinion_classification | 43f193522b033bd857d294737b3f9dbaac7aed9f | [
"MIT"
] | null | null | null | opinion_classification/electra/finetune/scorer.py | 10jqka-aicubes/opinion_classification | 43f193522b033bd857d294737b3f9dbaac7aed9f | [
"MIT"
] | null | null | null | opinion_classification/electra/finetune/scorer.py | 10jqka-aicubes/opinion_classification | 43f193522b033bd857d294737b3f9dbaac7aed9f | [
"MIT"
] | null | null | null | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for evaluation metrics."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import pdb
class Scorer(object):
"""Abstract base class for computing evaluation metrics."""
__metaclass__ = abc.ABCMeta
def __init__(self):
self._updated = False
self._cached_results = {}
@abc.abstractmethod
def update(self, results):
self._updated = True
@abc.abstractmethod
def get_loss(self):
pass
@abc.abstractmethod
def _get_results(self):
return []
@abc.abstractmethod
def _get_results_output(self):
return {}
@abc.abstractmethod
def _get_results_multilabel(self):
return {}
_get_results_multilabel
def get_results(self, prefix=""):
results = self._get_results() if self._updated else self._cached_results
self._cached_results = results
self._updated = False
return [(prefix + k, v) for k, v in results]
def get_results_outputall(self, prefix=""):
results = self._get_results_outputall() if self._updated else self._cached_results
self._cached_results = results
self._updated = False
return results
def results_str(self):
return " - ".join(["{:}: {:.2f}".format(k, v) for k, v in self.get_results()])
| 27.260274 | 90 | 0.690955 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.