hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a00591a1d883ffa8d7c775aef18175aa7aa9ac9
| 1,788
|
py
|
Python
|
book_figures/chapter1/fig_sdss_spectrum.py
|
StKyr/astroML_figures
|
45e9748335e0cd854d09319dff0e43ecd70e7b61
|
[
"BSD-2-Clause"
] | 6
|
2019-08-31T16:43:43.000Z
|
2021-07-10T06:06:20.000Z
|
book_figures/chapter1/fig_sdss_spectrum.py
|
StKyr/astroML_figures
|
45e9748335e0cd854d09319dff0e43ecd70e7b61
|
[
"BSD-2-Clause"
] | 34
|
2018-09-10T22:35:07.000Z
|
2022-02-08T21:17:39.000Z
|
book_figures/chapter1/fig_sdss_spectrum.py
|
StKyr/astroML_figures
|
45e9748335e0cd854d09319dff0e43ecd70e7b61
|
[
"BSD-2-Clause"
] | 10
|
2017-06-22T09:21:19.000Z
|
2020-01-26T03:54:26.000Z
|
"""
SDSS Spectrum Example
---------------------
Figure 1.2.
An example of an SDSS spectrum (the specific flux plotted as a function of
wavelength) loaded from the SDSS SQL server in real time using Python tools
provided here (this spectrum is uniquely described by SDSS parameters
plate=1615, fiber=513, and mjd=53166).
"""
# Author: Jake VanderPlas
# License: BSD
# The figure produced by this code is published in the textbook
# "Statistics, Data Mining, and Machine Learning in Astronomy" (2013)
# For more information, see http://astroML.github.com
# To report a bug or issue, use the following forum:
# https://groups.google.com/forum/#!forum/astroml-general
from matplotlib import pyplot as plt
from astroML.datasets import fetch_sdss_spectrum
#----------------------------------------------------------------------
# This function adjusts matplotlib settings for a uniform feel in the textbook.
# Note that with usetex=True, fonts are rendered with LaTeX. This may
# result in an error if LaTeX is not installed on your system. In that case,
# you can set usetex to False.
if "setup_text_plots" not in globals():
from astroML.plotting import setup_text_plots
setup_text_plots(fontsize=8, usetex=True)
#------------------------------------------------------------
# Fetch single spectrum
plate = 1615
mjd = 53166
fiber = 513
spec = fetch_sdss_spectrum(plate, mjd, fiber)
#------------------------------------------------------------
# Plot the resulting spectrum
fig, ax = plt.subplots(figsize=(5, 3.75))
ax.plot(spec.wavelength(), spec.spectrum, '-k', lw=1)
ax.set_xlim(3000, 10000)
ax.set_ylim(25, 300)
ax.set_xlabel(r'$\lambda {(\rm \AA)}$')
ax.set_ylabel('Flux')
ax.set_title('Plate = %(plate)i, MJD = %(mjd)i, Fiber = %(fiber)i' % locals())
plt.show()
| 35.058824
| 79
| 0.652685
|
4a005a9dd03d8ecabb61a7125e853b3af5b7e588
| 464
|
py
|
Python
|
data/scripts/templates/object/mobile/shared_space_starfighter_engineer_trainer_01.py
|
obi-two/GameServer
|
7d37024e2291a97d49522610cd8f1dbe5666afc2
|
[
"MIT"
] | 20
|
2015-02-23T15:11:56.000Z
|
2022-03-18T20:56:48.000Z
|
data/scripts/templates/object/mobile/shared_space_starfighter_engineer_trainer_01.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | null | null | null |
data/scripts/templates/object/mobile/shared_space_starfighter_engineer_trainer_01.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | 20
|
2015-04-04T16:35:59.000Z
|
2022-03-24T14:54:37.000Z
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_space_starfighter_engineer_trainer_01.iff"
result.attribute_template_id = 9
result.stfName("npc_name","chiss_base_female")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
| 27.294118
| 83
| 0.741379
|
4a005ab24ffda0966c286ef0b8ef4addd7e60086
| 465
|
py
|
Python
|
arpym/estimation/cov_2_corr.py
|
dpopadic/arpmRes
|
ddcc4de713b46e3e9dcb77cc08c502ce4df54f76
|
[
"MIT"
] | 6
|
2021-04-10T13:24:30.000Z
|
2022-03-26T08:20:42.000Z
|
arpym/estimation/cov_2_corr.py
|
dpopadic/arpmRes
|
ddcc4de713b46e3e9dcb77cc08c502ce4df54f76
|
[
"MIT"
] | null | null | null |
arpym/estimation/cov_2_corr.py
|
dpopadic/arpmRes
|
ddcc4de713b46e3e9dcb77cc08c502ce4df54f76
|
[
"MIT"
] | 6
|
2019-08-13T22:02:17.000Z
|
2022-02-09T17:49:12.000Z
|
# -*- coding: utf-8 -*-
import numpy as np
def cov_2_corr(s2):
"""For details, see here.
Parameters
----------
s2 : array, shape (n_, n_)
Returns
-------
c2 : array, shape (n_, n_)
s_vol : array, shape (n_,)
"""
# compute standard deviations
s_vol = np.sqrt(np.diag(s2))
diag_inv_svol = np.diag(1/s_vol)
# compute correlation matrix
c2 = diag_inv_svol@s2@diag_inv_svol
return c2, s_vol
| 17.884615
| 39
| 0.556989
|
4a005aee665475c24f8d706d3440c9591c6992ce
| 659
|
py
|
Python
|
modules/logs.py
|
vinihcrosa/osint_python
|
5e7e25494786ff52e9146c8607e0c0fc83344391
|
[
"MIT"
] | null | null | null |
modules/logs.py
|
vinihcrosa/osint_python
|
5e7e25494786ff52e9146c8607e0c0fc83344391
|
[
"MIT"
] | null | null | null |
modules/logs.py
|
vinihcrosa/osint_python
|
5e7e25494786ff52e9146c8607e0c0fc83344391
|
[
"MIT"
] | null | null | null |
import logging
def createLog(message: str, nivel: int):
log_format = '%(asctime)s:%(levelname)s:%(filename)s:%(message)s'
logging.basicConfig(filename='./log/osint.log',
filemode='w',
level=logging.DEBUG,
format=log_format)
logger = logging.getLogger('root')
if nivel == 0:
logger.notset(message)
elif nivel <= 10:
logger.debug(message)
elif nivel <= 20:
logger.info(message)
elif nivel <= 30:
logger.warning(message)
elif nivel <= 40:
logger.error(message)
elif nivel <= 50:
logger.critical(message)
| 28.652174
| 69
| 0.559939
|
4a005b1602046cac21502a644f937751b8f5fb87
| 678
|
py
|
Python
|
config.py
|
kokolokssk/FindLoli
|
5f65e4d782daca5c7fb17259b903187c9499de5e
|
[
"MIT"
] | null | null | null |
config.py
|
kokolokssk/FindLoli
|
5f65e4d782daca5c7fb17259b903187c9499de5e
|
[
"MIT"
] | 1
|
2020-11-26T05:47:39.000Z
|
2020-12-07T09:26:49.000Z
|
config.py
|
kokolokksk/FindLoli
|
5f65e4d782daca5c7fb17259b903187c9499de5e
|
[
"MIT"
] | null | null | null |
def get_api_params():
TwitterApiParams.__init__(TwitterApiParams,'x','xx','xx')
return TwitterApiParams
class TwitterApiParams():
def __init__(
self,api_key,api_secret_key,bearer_token):
self.api_key=api_key
self.api_secret_key=api_secret_key
self.bearer_token=bearer_token
def __getattr__(self,k):
try:
if(k=='api_key'):
return self.api_key
if(k=='api_secret_key'):
return self.api_secret_key
if(k=='bearer_token'):
return self.bearer_token
return "no such key"
except AttributeError:
return "error"
| 30.818182
| 61
| 0.59587
|
4a005c615d6b3274c742fde8533c0d21cdd6c91c
| 3,468
|
py
|
Python
|
test/test_lor_card.py
|
andrewdge/Pyot
|
a0b44a4462fd643bb21fbdc349beb9546543997c
|
[
"MIT"
] | 1
|
2021-02-17T01:02:08.000Z
|
2021-02-17T01:02:08.000Z
|
test/test_lor_card.py
|
bangingheads/Pyot
|
d133e93d96b6f51a3e22da182b9a9d738442e760
|
[
"MIT"
] | null | null | null |
test/test_lor_card.py
|
bangingheads/Pyot
|
d133e93d96b6f51a3e22da182b9a9d738442e760
|
[
"MIT"
] | null | null | null |
from pyot.models import lor
from pyot.utils import loop_run
async def async_card(card=None):
if card is None:
card = await lor.Card(code="03BW014", locale="en_us").get()
assert isinstance(card.region, str)
assert isinstance(card.region_ref, str)
assert isinstance(card.attack, int)
assert isinstance(card.cost, int)
assert isinstance(card.health, int)
assert isinstance(card.description, str)
assert isinstance(card.description_raw, str)
assert isinstance(card.levelup_description, str)
assert isinstance(card.levelup_description_raw, str)
assert isinstance(card.flavor_text, str)
assert isinstance(card.artist_name, str)
assert isinstance(card.name, str)
assert isinstance(card.code, str)
assert isinstance(card.spell_speed, str)
assert isinstance(card.spell_speed_ref, str)
assert isinstance(card.rarity, str)
assert isinstance(card.rarity_ref, str)
assert isinstance(card.subtype, str)
assert isinstance(card.supertype, str)
assert isinstance(card.type, str)
assert isinstance(card.collectible, bool)
assert isinstance(card.set, int)
assert isinstance(card.faction, str)
assert isinstance(card.number, int)
for keyword in card.keywords:
assert isinstance(keyword, str)
for keyword in card.keyword_refs:
assert isinstance(keyword, str)
for subtype in card.subtypes:
assert isinstance(subtype, str)
for code in card.associated_card_codes:
assert isinstance(code, str)
for asset in card.assets:
assert isinstance(asset.full_absolute_path, str)
assert isinstance(asset.game_absolute_path, str)
async def async_cards():
cards = await lor.Cards(set=3).get()
for card in cards:
await async_card(card)
def test_card():
loop_run(async_card())
def test_cards():
loop_run(async_cards())
def test_deck():
d = lor.Deck(code='CEBAIAIFB4WDANQIAEAQGDAUDAQSIJZUAIAQCBIFAEAQCBAA')
raw = d.decode().raw
for code in ['3:01SI015', '3:01SI044', '3:01SI048', '3:01SI054', '3:01FR003', '3:01FR012', '3:01FR020', '3:01FR024', '3:01FR033', '3:01FR036', '3:01FR039', '3:01FR052', '2:01SI005', '2:01FR004']:
assert code in raw
encoded = d.encode()
raw = d.decode().raw
for code in ['3:01SI015', '3:01SI044', '3:01SI048', '3:01SI054', '3:01FR003', '3:01FR012', '3:01FR020', '3:01FR024', '3:01FR033', '3:01FR036', '3:01FR039', '3:01FR052', '2:01SI005', '2:01FR004']:
assert code in raw
d = lor.Deck(code='CIBAIAIFB4WDANQIAEAQGDAUDAQSIJZUAIAQCAIEAEAQKBIA')
raw = d.decode().raw
for code in ['3:01SI015', '3:01SI044', '3:01SI048', '3:01SI054', '3:01FR003', '3:01FR012', '3:01FR020', '3:01FR024', '3:01FR033', '3:01FR036', '3:01FR039', '3:01FR052', '2:01SI005', '2:01FR004']:
assert code in raw
for batch in d:
assert isinstance(batch, lor.Batch)
assert batch.count > 0 and batch.count <= 3
assert isinstance(batch.code, str)
deck = lor.Deck()
for code in ['3:01SI015', '3:01SI044', '3:01SI048', '3:01SI054', '3:01FR003', '3:01FR012', '3:01FR020', '3:01FR024', '3:01FR033', '3:01FR036', '3:01FR039', '3:01FR052', '2:01SI005', '2:01FR004']:
deck.append(lor.Batch(raw=code))
deck.pop()
deck.append(lor.Batch(code="01FR004", count=2))
assert deck.encode() == encoded
deck.pull('01FR004')
deck.append(lor.Batch(code="01FR004", count=2))
assert deck.encode() == encoded
| 43.35
| 199
| 0.675317
|
4a005cc5a58103e9c45a4b79091123ad8c5ce7e1
| 4,812
|
py
|
Python
|
symenergy/core/altlinsolve.py
|
mcsoini/symenergy
|
e0d061f480f6acdb36b895c1092ccfc9f6d7970c
|
[
"BSD-2-Clause"
] | null | null | null |
symenergy/core/altlinsolve.py
|
mcsoini/symenergy
|
e0d061f480f6acdb36b895c1092ccfc9f6d7970c
|
[
"BSD-2-Clause"
] | 2
|
2018-12-21T17:07:46.000Z
|
2019-11-08T09:33:30.000Z
|
symenergy/core/altlinsolve.py
|
mcsoini/symenergy
|
e0d061f480f6acdb36b895c1092ccfc9f6d7970c
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 15 11:07:02 2019
@author: user
"""
from sympy import *
def linsolve_cond(eqs, unknowns):
if not eqs:
return S.Complexes**len(unknowns)
# Preprocessing
A, b = linear_eq_to_matrix(eqs, unknowns)
Aaug = Matrix.hstack(A, b).tolist()
# Main workhorse:
sols_conds = _linsolve_cond(Aaug)
# sols_conds is a list of 3-tuples:
# [(solset, pivot_conds, consistency_conds),...]
#
# solset: solution set as a FiniteSet or ImageSet
# pivot_conds: list of conditions (e.g. a!=0) assumed in pivoting
# consistency_conds: list of conditions needed for existence of solutions
# Build all the separate cases into a Piecewise:
sets_conds = []
for solset, pivot_conds, consistency_conds in sols_conds:
pivot_cond = And(*pivot_conds)
consistency_cond = And(*consistency_conds)
if consistency_cond is not False:
sets_conds.append((solset, pivot_cond & consistency_cond))
if consistency_cond is not True:
sets_conds.append((S.EmptySet, pivot_cond & Not(consistency_cond)))
sets_conds_d = {}
for ss, conds in sets_conds:
if ss not in sets_conds_d:
sets_conds_d[ss] = simplify_logic(conds)
else:
sets_conds_d[ss] = simplify_logic(Or(sets_conds_d[ss], conds))
return Piecewise(*sets_conds_d.items())
def _linsolve_cond(Aaug, _recurse=None):
Nr, Nc = len(Aaug), len(Aaug[0])
if _recurse is None:
row, col, pivots, pivot_conds = 0, 0, [], []
else:
row, col, pivots, pivot_conds = _recurse
if pivots:
row, col = pivots[-1]
else:
row, col = 0, 0
sols_conds = []
# Call self recursively for alternate pivots
def recurse_zero_pivot(r, c):
pivot = Aaug[r][c]
Aaugr = [[Arc.subs(pivot, 0) for Arc in Arow] for Arow in Aaug]
pivot_condsr = pivot_conds[:] + [Eq(pivot, 0)]
_recurse = (r, c, pivots[:], pivot_condsr)
sols_conds.extend(_linsolve_cond(Aaugr, _recurse=_recurse))
while row < Nr and col < Nc-1:
# Find pivot row and swap into position
for r in range(row, Nr):
is_zero = Aaug[r][col].is_zero
if not is_zero:
if is_zero is None:
# Recurse for the case that the pivot is zero
recurse_zero_pivot(r, col)
pivot_conds.append(Ne(Aaug[r][col], 0))
if r != row:
Aaug[r], Aaug[row] = Aaug[row], Aaug[r]
break
else:
# All zeros, next column
col += 1
continue
pivots.append((row, col))
pivot_row = Aaug[row]
pivot_div = Aaug[row][col]
for r in range(row+1, Nr):
pivot_mul = Aaug[r][col]
if pivot_mul.is_zero:
continue
Aaug[r][col] = S.Zero
for c in range(col+1, Nc):
if not pivot_row[c].is_zero:
Aaug[r][c] = Aaug[r][c]*pivot_div - pivot_row[c]*pivot_mul
# Next row/column...
row += 1
col += 1
# Back substitute and list of possibilities
sol_set, consistency_conds = _back_substitute(Aaug, pivots)
sols_conds.append((sol_set, pivot_conds, consistency_conds))
return sols_conds
def _back_substitute(Aaug, pivots):
Nc = len(Aaug[0])
# Check conditions for existence of solutions then find solutions by
# back-substitution below
consistency_conds = []
for row in reversed(range(len(Aaug))):
is_zero = [e.is_zero for e in Aaug[row]]
if not all(x is True for x in is_zero[:-1]):
break
elif is_zero[-1] is False:
consistency_conds.append(False)
elif is_zero[-1] is None:
consistency_conds.append(Eq(Aaug[row][-1], 0))
assert (row == 0 and not pivots) or row == pivots[-1][0]
# Matrix of all zeros?
if not pivots:
solset = S.Complexes**(Nc-1)
return solset, consistency_conds
gen = numbered_symbols('tau')
params = []
sol = [None] * (Nc-1)
pivots_cols = {c:r for r, c in pivots}
for col in reversed(range(Nc-1)):
if col in pivots_cols:
r = pivots_cols[col]
lhsterms = (Aaug[r][c]*sol[c] for c in range(col+1, Nc-1))
sol[col] = (Aaug[r][-1] - Add(*lhsterms)) / Aaug[r][col]
else:
# Non-pivot gets a free symbol
sym = next(gen)
params.append(sym)
sol[col] = sym
if params:
solset = ImageSet(Lambda(tuple(params), tuple(sol)), *[S.Complexes]*len(params))
else:
solset = FiniteSet(tuple(sol))
return solset, consistency_conds
| 30.649682
| 88
| 0.580424
|
4a005d263620be96147f674bd1fc5b6f57565a15
| 1,036
|
py
|
Python
|
tests/test_ubirch_anchoring.py
|
ubirch/ubirch-python-utils
|
a052db96cc8cccfd30f9baf7ff9a4424598724b0
|
[
"Apache-2.0"
] | null | null | null |
tests/test_ubirch_anchoring.py
|
ubirch/ubirch-python-utils
|
a052db96cc8cccfd30f9baf7ff9a4424598724b0
|
[
"Apache-2.0"
] | null | null | null |
tests/test_ubirch_anchoring.py
|
ubirch/ubirch-python-utils
|
a052db96cc8cccfd30f9baf7ff9a4424598724b0
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# ubirch anchoring test
#
# @author Victor Patrin
#
# Copyright (c) 2018 ubirch GmbH.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ubirch.anchoring import *
import unittest
# TODO : add more tests
class TestUbirchAnchoring(unittest.TestCase):
def test_is_hex(self):
lowerhex = "0x0123456789abcdef"
upperhex = "0xABCDEF"
nonhex = "0x123helloworld"
self.assertTrue(is_hex(lowerhex))
self.assertTrue(is_hex(upperhex))
self.assertTrue(not(is_hex(nonhex)))
| 24.093023
| 74
| 0.722008
|
4a005df6cea62712c2bdf0ab23d4455920204931
| 143
|
py
|
Python
|
reddit2telegram/channels/r_discordapp/app.py
|
mainyordle/reddit2telegram
|
1163e15aed3b6ff0fba65b222d3d9798f644c386
|
[
"MIT"
] | 187
|
2016-09-20T09:15:54.000Z
|
2022-03-29T12:22:33.000Z
|
reddit2telegram/channels/r_discordapp/app.py
|
mainyordle/reddit2telegram
|
1163e15aed3b6ff0fba65b222d3d9798f644c386
|
[
"MIT"
] | 84
|
2016-09-22T14:25:07.000Z
|
2022-03-19T01:26:17.000Z
|
reddit2telegram/channels/r_discordapp/app.py
|
mainyordle/reddit2telegram
|
1163e15aed3b6ff0fba65b222d3d9798f644c386
|
[
"MIT"
] | 172
|
2016-09-21T15:39:39.000Z
|
2022-03-16T15:15:58.000Z
|
#encoding:utf-8
subreddit = 'discordapp'
t_channel = '@r_discordapp'
def send_post(submission, r2t):
return r2t.send_simple(submission)
| 15.888889
| 38
| 0.748252
|
4a005dfd6dd66aeaa0dcc5657869986cd38851a0
| 1,144
|
py
|
Python
|
generate_platforms.py
|
t-actions/go-platforms
|
14e56af474701f0c6c649a8cbe984da06a87b93c
|
[
"MIT"
] | null | null | null |
generate_platforms.py
|
t-actions/go-platforms
|
14e56af474701f0c6c649a8cbe984da06a87b93c
|
[
"MIT"
] | null | null | null |
generate_platforms.py
|
t-actions/go-platforms
|
14e56af474701f0c6c649a8cbe984da06a87b93c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import os
import json
import logging
import subprocess
def main():
platforms = os.getenv('PLATFORMS')
if not platforms:
platforms = subprocess.check_output('go tool dist list', shell = True).decode('utf-8')
matrix = {
'include': []
}
for platform in platforms.strip().split():
parts = platform.split('/')
if len(parts) < 2:
logging.warn(f'[Skip] Unknown platform format {platform}')
goos = parts[0]
goarch = parts[1]
goarm = parts[2] if len(parts) > 2 else ''
if goarch != 'arm':
goarms = ['']
elif goarm == '':
goarms = ['7', '6', '5']
else:
goarms = [goarm]
for goarm in goarms:
matrix['include'].append({
'goos': goos,
'goarch': goarch,
'goarm': goarm,
})
print(json.dumps(matrix))
if __name__ == '__main__':
logging.basicConfig(
level = logging.DEBUG,
format = '%(asctime)s %(levelname)s %(message)s',
datefmt = '%Y-%m-%d %X',
)
main()
| 25.422222
| 94
| 0.503497
|
4a005e3d930c99abc0846bd8818671aabf2d3843
| 290
|
py
|
Python
|
ssz/constants.py
|
cburgdorf/py-ssz
|
d8db8e8d388b3778e6737b90df1617cfff518c58
|
[
"MIT"
] | null | null | null |
ssz/constants.py
|
cburgdorf/py-ssz
|
d8db8e8d388b3778e6737b90df1617cfff518c58
|
[
"MIT"
] | null | null | null |
ssz/constants.py
|
cburgdorf/py-ssz
|
d8db8e8d388b3778e6737b90df1617cfff518c58
|
[
"MIT"
] | null | null | null |
from eth_typing import (
Hash32,
)
CHUNK_SIZE = 32 # named BYTES_PER_CHUNK in the spec
EMPTY_CHUNK = Hash32(b"\x00" * CHUNK_SIZE)
SIZE_PREFIX_SIZE = 4 # named BYTES_PER_LENGTH_PREFIX in the spec
MAX_CONTENT_SIZE = 2 ** (SIZE_PREFIX_SIZE * 8) - 1
SIGNATURE_FIELD_NAME = "signature"
| 24.166667
| 65
| 0.744828
|
4a005e5dd552810e5f4cad531222c7979afd3031
| 3,612
|
py
|
Python
|
core.py
|
dawnflyc/PixivRandomDownload
|
2ec342f34716d31c8257adbc9b0dee11a6d7eb79
|
[
"MIT"
] | null | null | null |
core.py
|
dawnflyc/PixivRandomDownload
|
2ec342f34716d31c8257adbc9b0dee11a6d7eb79
|
[
"MIT"
] | null | null | null |
core.py
|
dawnflyc/PixivRandomDownload
|
2ec342f34716d31c8257adbc9b0dee11a6d7eb79
|
[
"MIT"
] | null | null | null |
import json
import os
import random
import re
import time
import requests
proxy = {
'http': '127.0.0.1:7890',
'https': '127.0.0.1:7890'
}
def header(url):
return {
'User-agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/76.0.3809.132 Safari/537.36",
'Referer': url,
'Cookie': '__cfduid=df4c94d9266140fa769fb982fde1d3f301617859032; first_visit_datetime_pc=2021-04-08+14:17:12; p_ab_id=5; p_ab_id_2=1; p_ab_d_id=1046732053; yuid_b=MFISaIU; __utmz=235335808.1617859035.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); _ga=GA1.2.1504593990.1617859035; PHPSESSID=31161881_CSTk2IFCP8o2ZSSzYWPV3rFc2kqrga6N; device_token=bae8bc7229d83f1568c721612919f25b; c_type=24; privacy_policy_agreement=0; a_type=0; b_type=1; ki_s=214760:0.0.0.0.2;214994:0.0.0.0.2; login_ever=yes; __utmv=235335808.|2=login ever=yes=1^3=plan=normal=1^5=gender=male=1^6=user_id=31161881=1^9=p_ab_id=5=1^10=p_ab_id_2=1=1^11=lang=zh=1; tag_view_ranking=JUhUC4yJLC~SKQUHZx-lq~BFKUzIToh2~jpIZPQ502H~zZVZ6698Si~YujG5aPAeR~--BqJSx0Ht~K5kkxWXCe_~ibTwFdmyvo~7wdsgQtcu4~RTJMXD26Ak~LJo91uBPz4~Lt-oEicbBr~Mmb81P1JFg~65aiw_5Y72; ki_t=1617859066292;1617859066292;1617861083437;1;12; __utma=235335808.1504593990.1617859035.1618117231.1618120293.3; __utmc=235335808; __cf_bm=8ed7dd1f0f687127adf080482f129014ca65da86-1618120291-1800-AS1460AWnnxjSpYyUxjI7PAggk4lLpx/LJfVDO1qqx48Ssp4eb5KXV5v1YNeDnS7gZpgc2iFsXAdCVQGNjvICnqtZTQWVpJH526XY+cZ8+tpNzFM1ivtRGcX2Axg5Yig8bSSGVjYOUUmM7IwDmMYRSXkwJ4ey9oSqc09W9zT97FtlUHn/cxPqZO1hKN8IvK13g==; __utmt=1; __utmb=235335808.2.10.1618120293; _gid=GA1.2.1749810522.1618120296; _gat_UA-1830249-3=1'
}
def download(url, name):
print('开始下载\t' + url)
ir = requests.get(url, headers=header(url), proxies=proxy)
open(name, 'wb').write(ir.content)
def find(work_id, path, filter=None):
id_str = str(work_id)
work_url = 'https://www.pixiv.net/artworks/' + id_str
page = requests.get(work_url, headers=header(work_url), proxies=proxy)
if page.status_code == 200:
json_str = re.findall('\"bookmarkCount\":[0-9]+,\"likeCount\":[0-9]+,\"commentCount\":[0-9]+',
str(page.content))
json_value = json.loads('{' + json_str[0] + '}')
sort = json_value["bookmarkCount"] + json_value["likeCount"] + json_value["commentCount"]
dir = str(sort) + '-' + id_str
if sort > 99:
if not filter is None:
if not filter(page):
return False
if not os.path.exists(path):
os.makedirs(path)
if not os.path.exists(path+'/' + dir):
os.makedirs(path+'/' + dir)
else:
return False
page_url = 'https://www.pixiv.net/ajax/illust/' + id_str + '/pages?lang=zh'
json_page = requests.get(page_url, headers=header(page_url), proxies=proxy)
json_text = json.loads(json_page.content)
if not json_text['error']:
print('\n开始爬取\t' + work_url)
json_body = json_text['body']
for u in json_body:
ran = random.randint(0, 99999)
ran_str = str(ran)
download(u['urls']['original'], path+'/'+ dir + '/' + ran_str+'.jpg')
return True
else:
print('页面ajax异常')
else:
print('...')
else:
print('...')
return False
| 51.6
| 1,322
| 0.629568
|
4a005eb461df400d69f77c2b9e1d06461ddab29c
| 5,787
|
py
|
Python
|
apollo/skills/browser_skills/__init__.py
|
heitorsampaio/ApolloAI
|
c2983ce51c52641453fb1f6e0d7598bdd47ed66d
|
[
"MIT"
] | null | null | null |
apollo/skills/browser_skills/__init__.py
|
heitorsampaio/ApolloAI
|
c2983ce51c52641453fb1f6e0d7598bdd47ed66d
|
[
"MIT"
] | null | null | null |
apollo/skills/browser_skills/__init__.py
|
heitorsampaio/ApolloAI
|
c2983ce51c52641453fb1f6e0d7598bdd47ed66d
|
[
"MIT"
] | null | null | null |
# MIT License
# Copyright (c) 2019 Georgios Papachristou
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import wikipedia
import requests
import logging
import time
import re
import urllib.request
import subprocess
from bs4 import BeautifulSoup as bs
from apollo.skills.skill_manager import AssistantSkill
class BrowserSkills(AssistantSkill):
@classmethod
def tell_me_about(cls, voice_transcript, skill):
"""
Tells about something by searching in wikipedia
:param voice_transcript: string (e.g 'about google')
:param skill: dict (e.g
"""
tags = cls._extract_tags(voice_transcript, skill['tags'])
for tag in tags:
reg_ex = re.search(tag + ' ([a-zA-Z]+)', voice_transcript)
try:
if reg_ex:
topic = reg_ex.group(1)
response = cls._decoded_wiki_response(topic)
cls.response(response)
except Exception as e:
logging.debug(e)
cls.response(" I can't find on the internet what you want")
@classmethod
def open_in_youtube(cls, voice_transcript, skill):
"""
Open a video in youtube.
:param voice_transcript: string (e.g 'about google')
:param skill: dict (e.g
"""
tags = cls._extract_tags(voice_transcript, skill['tags'])
for tag in tags:
reg_ex = re.search(tag + ' ([a-zA-Z]+)', voice_transcript)
try:
if reg_ex:
search_text = reg_ex.group(1)
base = "https://www.youtube.com/results?search_query=" + "&orderby=viewCount"
r = requests.get(base + search_text.replace(' ', '+'))
page = r.text
soup = bs(page, 'html.parser')
vids = soup.findAll('a', attrs={'class': 'yt-uix-tile-link'})
video = 'https://www.youtube.com' + vids[0]['href']
subprocess.Popen(["python", "-m", "webbrowser", "-t", video], stdout=subprocess.PIPE, shell=False)
except Exception as e:
logging.debug(e)
cls.response("I can't find what do you want in Youtube..")
@classmethod
def open_website_in_browser(cls, voice_transcript, skill):
"""
Opens a web page in the browser.
:param voice_transcript: string (e.g 'about google')
:param skill: dict (e.g
NOTE: If in the voice_transcript there are more than one commands_dict
e.g voice_transcript='open youtube and open netflix' the application will find
and execute only the first one, in our case will open the youtube.
"""
tags = cls._extract_tags(voice_transcript, skill['tags'])
for tag in tags:
reg_ex = re.search(tag + ' ([a-zA-Z]+)', voice_transcript)
try:
if reg_ex:
domain = reg_ex.group(1)
url = cls._create_url(domain)
cls.response('Sure')
subprocess.Popen(["python", "-m", "webbrowser", "-t", url], stdout=subprocess.PIPE, shell=False)
time.sleep(1)
cls.response('I opened the {0}'.format(domain))
except Exception as e:
logging.debug(e)
cls.response("I can't find this domain..")
@classmethod
def tell_me_today_news(cls, **kwargs):
try:
news_url = "https://news.google.com/news/rss"
client = urllib.request.urlopen(news_url)
xml_page = client.read()
client.close()
soup = bs(xml_page, "xml")
news_list = soup.findAll("item")
for news in news_list[:5]:
response = ""
data = news.title.text.encode('utf-8')
response += data.decode()
cls.response(response)
except Exception as e:
logging.debug(e)
@classmethod
def _decoded_wiki_response(cls, topic):
"""
A private method for decoding the wiki response.
:param topic: string
:return: string
"""
ny = wikipedia.page(topic)
data = ny.content[:500].encode('utf-8')
response = ''
response += data.decode()
return response
@classmethod
def _create_url(cls, tag):
"""
Creates a url. It checks if there is .com suffix and add it if it not exist.
:param tag: string (e.g youtube)
:return: string (e.g http://www.youtube.com)
"""
if re.search('.com', tag):
url = 'http://www.' + tag
else:
url = 'http://www.' + tag + '.com'
return url
| 38.58
| 118
| 0.590461
|
4a005efa52175b1af0ce8d043142779cb4a6fca6
| 2,790
|
py
|
Python
|
plot/aurocs.py
|
jacobdeasy/flexible-ehr
|
ce26ce718cf5cf18a18d38f273a84324dbd5f4b2
|
[
"MIT"
] | 12
|
2020-03-11T06:04:53.000Z
|
2021-12-06T04:33:24.000Z
|
plot/aurocs.py
|
jacobdeasy/flexible-ehr
|
ce26ce718cf5cf18a18d38f273a84324dbd5f4b2
|
[
"MIT"
] | null | null | null |
plot/aurocs.py
|
jacobdeasy/flexible-ehr
|
ce26ce718cf5cf18a18d38f273a84324dbd5f4b2
|
[
"MIT"
] | 1
|
2021-02-23T07:01:18.000Z
|
2021-02-23T07:01:18.000Z
|
import argparse, matplotlib as mpl, matplotlib.pyplot as plt, numpy as np, os, torch
mpl.rcParams["axes.spines.right"] = False
mpl.rcParams["axes.spines.top"] = False
from scipy import interp
from sklearn import metrics
from torch.utils.data import DataLoader, TensorDataset
from .bootstrap import bootstrap
from flexehr.utils.modelIO import load_metadata, load_model
from utils.helpers import array
def predict(test_loader, model):
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model.eval()
preds = []
with torch.no_grad():
for X in test_loader:
X = X[0].to(device)
preds += [model(X)]
return array(torch.cat(preds))
def auroc(model, show_plot=False):
# Load data
data = np.load(os.path.join('data', 'arrs_48_20.npy')).item()
models = [f for f in os.listdir('results') if f.startswith(model)]
# Test set
test_dataset = TensorDataset(torch.tensor(data['X_test']))
test_loader = DataLoader(test_dataset, batch_size=128, pin_memory=True)
base_fpr = np.linspace(0, 1, 101)
tprs = np.zeros((len(models), 101))
aucs = np.zeros((len(models)))
for i, model in enumerate(models):
# Load model
model_dir = os.path.join('results', model)
model = load_model(model_dir)
metadata = load_metadata(model_dir)
# Predict
preds = predict(test_loader, model)
fpr, tpr, _ = metrics.roc_curve(data['Y_test'], preds[:, -1])
aucs[i] = metrics.auc(fpr, tpr)
# Interpolate for bootstrap
tpr = interp(base_fpr, fpr, tpr)
tpr[0] = 0.0
tprs[i] = tpr
# Plot
mean_tprs = tprs.mean(axis=0)
std_tprs = tprs.std(axis=0)
tprs_upper = np.minimum(mean_tprs + 2 * std_tprs, 1)
tprs_lower = mean_tprs - 2 * std_tprs
plt.plot(base_fpr, mean_tprs, 'k', label=f'Ours: {np.mean(aucs):.4f}')
plt.fill_between(base_fpr, tprs_lower, tprs_upper,
color='red', alpha=0.5, label='95% CI')
plt.plot([0, 1], [0, 1], 'k--')
plt.xlim(0, 1)
plt.ylim(0, 1)
plt.xlabel('False Positive Rate', fontsize=15)
plt.ylabel('True Positive Rate', fontsize=15)
plt.legend(loc='lower right')
if show_plot:
plt.show()
else:
np.save(os.path.join('figs', 'auroc_info'),
np.stack((base_fpr, tprs_lower, mean_tprs, tprs_upper)))
plt.savefig(os.path.join('figs', f'auroc_48_20bins.pdf'))
if __name__ == '__main__':
parser = argparse.ArgumentParser('Plot dynamic AUROC.')
parser.add_argument('model', type=str,
help='Model prefix.')
parser.add_argument('-s', '--show-plot', type=bool, default=False)
args = parser.parse_args()
auroc(args.model, show_plot=args.show_plot)
| 32.823529
| 84
| 0.634409
|
4a005fc572847d9710308f05421d2cd1ac6e8f8e
| 184
|
py
|
Python
|
examples/cmykStroke.py
|
LettError/drawbot
|
dce9af449d429af3f10827654d8b9d3bb8bb8efe
|
[
"BSD-2-Clause"
] | 2
|
2015-09-17T01:27:02.000Z
|
2020-11-26T12:07:13.000Z
|
examples/cmykStroke.py
|
LettError/drawbot
|
dce9af449d429af3f10827654d8b9d3bb8bb8efe
|
[
"BSD-2-Clause"
] | null | null | null |
examples/cmykStroke.py
|
LettError/drawbot
|
dce9af449d429af3f10827654d8b9d3bb8bb8efe
|
[
"BSD-2-Clause"
] | null | null | null |
x, y = 20, 20
lines = 20
color_step = 1.00 / lines
strokeWidth(10)
for i in range(lines):
cmykStroke(0, i * color_step, 1, 0)
line((x, y), (x, y + 200))
translate(12, 0)
| 16.727273
| 39
| 0.581522
|
4a006041a3300bb7919f92f6180463fe379528c0
| 5,374
|
py
|
Python
|
notes/42 - PyGame Sprites/7. drawing sprites - flip.py
|
hSpiels/ICS3-Python-Notes
|
5cb06623d6714a62ff20550d635c1fd3f7d27ea2
|
[
"MIT"
] | 3
|
2022-02-10T19:06:28.000Z
|
2022-03-25T17:55:56.000Z
|
notes/42 - PyGame Sprites/7. drawing sprites - flip.py
|
hSpiels/ICS3-Python-Notes
|
5cb06623d6714a62ff20550d635c1fd3f7d27ea2
|
[
"MIT"
] | null | null | null |
notes/42 - PyGame Sprites/7. drawing sprites - flip.py
|
hSpiels/ICS3-Python-Notes
|
5cb06623d6714a62ff20550d635c1fd3f7d27ea2
|
[
"MIT"
] | 17
|
2020-09-15T16:40:23.000Z
|
2022-03-22T17:52:32.000Z
|
#TODO: Update this example showing how to use a temporary surface and pygame.transform.flip(tempSurface,True,False)
import pygame
import random
def main():
""" Set up the game and run the main game loop """
#----------------------Set up the game------------#
pygame.init() # Prepare the pygame module for use
surfaceSize = 480 # Desired physical surface size, in pixels.
clock = pygame.time.Clock() #Force frame rate to be slower
frameRate = 60 #Slowing down the program
frameCount = 0 #Count the number of frames that have occurred
# Create surface of (width, height), and its window.
mainSurface = pygame.display.set_mode((surfaceSize, surfaceSize))
spriteSheet = pygame.image.load("images/dungeon/0x72_DungeonTilesetII_v1.3.png")
spriteSheet = pygame.transform.scale2x(spriteSheet)
wizardPos = [0,50]
lizardPos = [0,150]
#These are needed for the image animation
lizardRect = [176,236,16,28] #Old Values
lizardRect = [352,472,32,56] #New values are doubled since I doubled the scale
lizardPatchNumber = 0 #Start at the initial patch
lizardNumPatches = 5 #Only use 4 patches
lizardFrameCount = 0 #Start at intial frame
lizardFrameRate = 10; #How often to re-draw the lizard
lizardDirection = 'Right' #Control which direction lizard is facing
lizardSpeed = 0.5
lizardMove = True #Control whether the lizard can move
while True:
#----------------------Check all the events to see if anything is happening------------#
ev = pygame.event.poll() # Look for any event
if ev.type == pygame.QUIT: # Window close button clicked?
break # ... leave game loop
elif ev.type == pygame.MOUSEBUTTONDOWN:
lizardMove = True
elif ev.type == pygame.MOUSEBUTTONUP:
lizardMove = False
elif ev.type == pygame.KEYUP: #Add some key handling to make space change lizard's direction
if ev.key == pygame.K_SPACE:
if lizardDirection == 'Right':
lizardDirection = 'Left'
else:
lizardDirection = 'Right'
#----------------------Game Logic Goes After Here----------------------------#
# Update your game objects and data structures here...
#Game logic for the lizard
if (lizardMove): #Check if the lizard should move
#Move the Dino
if lizardDirection =='Right': #Lizard goes right
lizardPos[0] += lizardSpeed #update the x for the lizard
else: #Lizard goes left
lizardPos[0] -= lizardSpeed #update the x for the lizard
if (frameCount % lizardFrameRate == 0): #Only change the animation frame once every {lizardFrameRate} frames
if (lizardPatchNumber < lizardNumPatches-1) :
lizardPatchNumber += 1
lizardRect[0] += lizardRect[2] #Shift the "display window" to the right along the sprite sheet by the width of the image
else:
lizardPatchNumber = 0 #Reset back to first patch
lizardRect[0] -= lizardRect[2]*(lizardNumPatches-1) #Reset the rect position of the rect back too
#self.imageRect = copy.copy(self.origImageRect)
print(f"Patch Number: {lizardPatchNumber} Image Rect: {lizardRect} ")
#Game Logic for the wizard
wizardPos[0] += 0.5 #update the x for the wizard
#----------------------Draw all the images----------------------------#
# We draw everything from scratch on each frame.
# So first fill everything with the background color
mainSurface.fill((0, 200, 255))
#Draw the image of the wizard sprite using the rect
mainSurface.blit(spriteSheet, wizardPos, [130,165,16,28]) #Positions found using msPaint
#Draw the image of the lizard sprite using the rect
#mainSurface.blit(spriteSheet, lizardPos, lizardRect) #Positions found using msPaint
tempSurface = pygame.Surface( (lizardRect[2], lizardRect[3]) ) #Make a temp Surface using the width and height of the rect
tempSurface.fill((1,1,1))
tempSurface.set_colorkey((1,1,1)) #Set the color black to be transparent
tempSurface.blit(spriteSheet, (0,0), lizardRect) #Copy the lizard image to the temp surface
if lizardDirection == 'Left':
tempSurface = pygame.transform.flip(tempSurface,True,False)
mainSurface.blit(tempSurface, lizardPos) #Positions found using msPaint
# Now the surface is ready, tell pygame to display it!
pygame.display.flip()
#----------------------Set your frame rate----------------------------#
frameCount += 1;
clock.tick(frameRate) #Force frame rate to be slower
pygame.quit() # Once we leave the loop, close the window.
main()
| 43.691057
| 141
| 0.566059
|
4a0060a03345b99fb66f70bb35128dd2720107ad
| 13,183
|
py
|
Python
|
python/mxnet/gluon/block.py
|
viper7882/mxnet_win32
|
8b05c0cf83026147efd70a21abb3ac25ca6099f1
|
[
"Apache-2.0"
] | null | null | null |
python/mxnet/gluon/block.py
|
viper7882/mxnet_win32
|
8b05c0cf83026147efd70a21abb3ac25ca6099f1
|
[
"Apache-2.0"
] | null | null | null |
python/mxnet/gluon/block.py
|
viper7882/mxnet_win32
|
8b05c0cf83026147efd70a21abb3ac25ca6099f1
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
# pylint: disable= arguments-differ
"""Base container class for all neural network models."""
from .. import symbol, ndarray, initializer
from ..symbol import Symbol
from ..ndarray import NDArray
from .. import name as _name
from .parameter import Parameter, ParameterDict, DeferredInitializationError
class _BlockScope(object):
"""Scope for collecting child `Block`s."""
_current = None
def __init__(self, block):
self._block = block
self._counter = {}
self._old_scope = None
@staticmethod
def create(prefix, params, hint):
"""Creates prefix and params for new `Block`."""
current = _BlockScope._current
if current is None:
if prefix is None:
prefix = _name.NameManager.current.get(None, hint) + '_'
if params is None:
params = ParameterDict(prefix)
else:
params = ParameterDict(params.prefix, params)
return prefix, params
if prefix is None:
count = current._counter.get(hint, 0)
prefix = '%s%d_'%(hint, count)
current._counter[hint] = count + 1
if params is None:
parent = current._block.params
params = ParameterDict(parent.prefix+prefix, parent._shared)
else:
params = ParameterDict(params.prefix, params)
return current._block.prefix+prefix, params
def __enter__(self):
self._old_scope = _BlockScope._current
_BlockScope._current = self
return self
def __exit__(self, ptype, value, trace):
_BlockScope._current = self._old_scope
def _flatten(args):
if isinstance(args, NDArray):
return [args], int(0)
if isinstance(args, Symbol):
length = len(args.list_outputs())
length = length if length > 1 else 0
return [args], int(length)
assert isinstance(args, (list, tuple)), \
"HybridBlock input must be (nested) list of Symbol or NDArray, " \
"but got %s of type %s"%(str(args), str(type(args)))
flat = []
fmts = []
for i in args:
arg, fmt = _flatten(i)
flat.extend(arg)
fmts.append(fmt)
return flat, fmts
def _regroup(args, fmt):
if isinstance(fmt, int):
if fmt == 0:
return args[0], args[1:]
return args[:fmt], args[fmt:]
assert isinstance(args, (list, tuple)), \
"HybridBlock output must be (nested) list of Symbol or NDArray, " \
"but got %s of type %s"%(str(args), str(type(args)))
ret = []
for i in fmt:
res, args = _regroup(args, i)
ret.append(res)
return ret, args
class Block(object):
"""Base class for all neural network layers and models. Your models should
subclass this class.
`Block` can be nested recursively in a tree structure. You can create and
assign child `Block` as regular attributes::
from mxnet.gluon import Block, nn
from mxnet import ndarray as F
class Model(Block):
def __init__(self, **kwargs):
super(Model, self).__init__(**kwargs)
# use name_scope to give child Blocks appropriate names.
# It also allows sharing Parameters between Blocks recursively.
with self.name_scope():
self.dense0 = nn.Dense(20)
self.dense1 = nn.Dense(20)
def forward(self, x):
x = F.relu(self.dense0(x))
return F.relu(self.dense1(x))
model = Model()
model.initialize(ctx=mx.cpu(0))
model(F.zeros((10, 10), ctx=mx.cpu(0)))
Child `Block` assigned this way will be registered and `collect_params`
will collect their Parameters recursively.
Parameters
----------
prefix : str
Prefix acts like a name space. It will be prepended to the name of all
Parameters and child `Block`s in this `Block`'s `name_scope`. Prefix
should be unique within one model to prevent name collisions.
params : ParameterDict or None
`ParameterDict` for sharing weights with the new `Block`. For example,
if you want `dense1` to share `dense0`'s weights, you can do::
dense0 = nn.Dense(20)
dense1 = nn.Dense(20, params=dense0.collect_params())
"""
def __init__(self, prefix=None, params=None):
self._prefix, self._params = _BlockScope.create(prefix, params, self._alias())
self._scope = _BlockScope(self)
self._children = []
def __setattr__(self, name, value):
"""Registers parameters."""
super(Block, self).__setattr__(name, value)
if isinstance(value, Block):
self.register_child(value)
def _alias(self):
return self.__class__.__name__.lower()
@property
def params(self):
"""Returns this `Block`'s parameter dictionary (does not include its
children's parameters)."""
return self._params
def collect_params(self):
"""Returns a `ParameterDict` containing this `Block` and all of its
children's Parameters."""
ret = ParameterDict(self._params.prefix)
ret.update(self.params)
for cld in self._children:
ret.update(cld.collect_params())
return ret
@property
def prefix(self):
"""Prefix of this `Block`."""
return self._prefix
@property
def name(self):
"""Name of this `Block`, without '_' in the end."""
if self.prefix.endswith('_'):
return self.prefix[:-1]
return self.prefix
def name_scope(self):
"""Returns a name space object managing a child `Block` and parameter
names. Should be used within a `with` statement::
with self.name_scope():
self.dense = nn.Dense(20)
"""
return self._scope
def register_child(self, block):
"""Registers block as a child of self. `Block`s assigned to self as
attributes will be registered automatically."""
self._children.append(block)
def initialize(self, init=initializer.Uniform(), ctx=None, verbose=False):
"""Initializes `Parameter`s of this `Block` and its children.
Equivalent to `block.collect_params().initialize(...)`
"""
self.collect_params().initialize(init, ctx, verbose)
def hybridize(self, active=True):
"""Activates or deactivates `HybridBlock`s recursively. Has no effect on
non-hybrid children.
Parameters
----------
active : bool, default True
Whether to turn hybrid on or off.
"""
for cld in self._children:
cld.hybridize(active)
def __call__(self, *args):
"""Calls forward. Only accepts positional arguments."""
return self.forward(*args)
def forward(self, *args):
"""Overrides to implement forward computation using `NDArray`. Only
accepts positional arguments.
Parameters
----------
*args : list of NDArray
Input tensors.
"""
# pylint: disable= invalid-name
raise NotImplementedError
class HybridBlock(Block):
"""`HybridBlock` supports forwarding with both Symbol and NDArray.
Forward computation in `HybridBlock` must be static to work with `Symbol`s,
i.e. you cannot call `.asnumpy()`, `.shape`, `.dtype`, etc on tensors.
Also, you cannot use branching or loop logic that bases on non-constant
expressions like random numbers or intermediate results, since they change
the graph structure for each iteration.
Before activating with `hybridize()`, `HybridBlock` works just like normal
`Block`. After activation, `HybridBlock` will create a symbolic graph
representing the forward computation and cache it. On subsequent forwards,
the cached graph will be used instead of `hybrid_forward`.
Refer `Hybrid tutorial <http://mxnet.io/tutorials/gluon/hybrid.html>`_ to see
the end-to-end usage.
"""
def __init__(self, prefix=None, params=None):
super(HybridBlock, self).__init__(prefix=prefix, params=params)
self._reg_params = {}
self._cached_graph = ()
self._cached_op = None
self._cached_params = None
self._out_format = None
self._in_format = None
self._active = False
def __setattr__(self, name, value):
"""Registers parameters."""
super(HybridBlock, self).__setattr__(name, value)
if isinstance(value, Parameter):
assert name not in self._reg_params or \
not isinstance(self._reg_params[name], Parameter), \
"Overriding Parameter attribute %s is not allowed. " \
"Please pass in Parameters by specifying `params` at " \
"Block construction instead."
self._reg_params[name] = value
def register_child(self, block):
if not isinstance(block, HybridBlock):
raise ValueError(
"Children of HybridBlock must also be HybridBlock, " \
"but %s has type %s. If you are using Sequential, " \
"please try HybridSequential instead"%(
str(block), str(type(block))))
super(HybridBlock, self).register_child(block)
def hybridize(self, active=True):
self._active = active
super(HybridBlock, self).hybridize(active)
def _get_graph(self, *args):
if self._cached_graph:
return self._cached_graph
args, self._in_format = _flatten(args)
syms = [symbol.var(str(i)) for i in range(len(args))]
sym_args = _regroup(syms, self._in_format)[0]
params = {i: j.var() for i, j in self._reg_params.items()}
out = self.hybrid_forward(symbol, *sym_args, **params) # pylint: disable=no-value-for-parameter
out, self._out_format = _flatten(out)
self._cached_graph = syms, symbol.Group(out)
return self._cached_graph
def infer_shape(self, *args):
"""Infers shape of Parameters from inputs."""
syms, out = self._get_graph(*args)
args, _, = _flatten(args)
arg_shapes, _, aux_shapes = out.infer_shape(
**{i.name: j.shape for i, j in zip(syms, args)})
sdict = {i: j for i, j in zip(out.list_arguments(), arg_shapes)}
sdict.update({name : shape for name, shape in \
zip(out.list_auxiliary_states(), aux_shapes)})
for i in self.collect_params().values():
i.shape = sdict[i.name]
def _build_cache(self, *args):
self.infer_shape(*args)
for i in self.collect_params().values():
i._finish_deferred_init()
_, out = self._get_graph(*args)
self._cached_op = ndarray.CachedOp(out)
params = dict(self.collect_params().items())
self._cached_params = [params.get(name, None) for name in out.list_inputs()]
self._in_idx = [(i, int(name)) for i, name in enumerate(out.list_inputs())
if name not in params]
def _call_cached_op(self, *args):
args, fmt = _flatten(args)
assert fmt == self._in_format, "Invalid input format"
cargs = [i.data() if i else None for i in self._cached_params]
for i, j in self._in_idx:
cargs[i] = args[j]
out = self._cached_op(*cargs)
if isinstance(out, NDArray):
out = [out]
return _regroup(out, self._out_format)[0]
def forward(self, x, *args):
"""Defines the forward computation. Arguments can be either
`NDArray` or `Symbol`."""
if isinstance(x, NDArray):
if self._active and self._cached_op is None:
self._build_cache(x, *args)
with x.context as ctx:
if self._active:
return self._call_cached_op(x, *args)
try:
params = {i: j.data(ctx) for i, j in self._reg_params.items()}
except DeferredInitializationError:
self.infer_shape(x, *args)
for i in self.collect_params().values():
i._finish_deferred_init()
params = {i: j.data(ctx) for i, j in self._reg_params.items()}
return self.hybrid_forward(ndarray, x, *args, **params)
else:
assert isinstance(x, Symbol), \
"HybridBlock requires the first argument to forward be either " \
"Symbol or NDArray, but got %s"%type(x)
params = {i: j.var() for i, j in self._reg_params.items()}
return self.hybrid_forward(symbol, x, *args, **params)
def hybrid_forward(self, F, x, *args, **kwargs):
"""Overrides to construct symbolic graph for this `Block`.
Parameters
----------
x : Symbol or NDArray
The first input tensor.
*args : list of Symbol or list of NDArray
Additional input tensors.
"""
# pylint: disable= invalid-name
raise NotImplementedError
| 36.316804
| 104
| 0.601456
|
4a0060ff65d73cd506e886bfd2664f063736c370
| 38,122
|
py
|
Python
|
tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py
|
BenRKarl/gapic-generator-python
|
e4f92bd988a5b955ede88a9a10163010aae825f1
|
[
"Apache-2.0"
] | 86
|
2018-09-28T11:46:15.000Z
|
2022-03-27T19:25:09.000Z
|
tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py
|
BenRKarl/gapic-generator-python
|
e4f92bd988a5b955ede88a9a10163010aae825f1
|
[
"Apache-2.0"
] | 1,054
|
2018-04-19T18:35:05.000Z
|
2022-03-30T14:12:38.000Z
|
tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py
|
BenRKarl/gapic-generator-python
|
e4f92bd988a5b955ede88a9a10163010aae825f1
|
[
"Apache-2.0"
] | 47
|
2018-04-26T22:08:56.000Z
|
2022-03-22T22:18:00.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers
from google.api_core import gapic_v1
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.logging_v2.types import logging_config
from google.protobuf import empty_pb2 # type: ignore
from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO
class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport):
"""gRPC backend transport for ConfigServiceV2.
Service for configuring sinks used to route log entries.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(self, *,
host: str = 'logging.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(cls,
host: str = 'logging.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def list_buckets(self) -> Callable[
[logging_config.ListBucketsRequest],
logging_config.ListBucketsResponse]:
r"""Return a callable for the list buckets method over gRPC.
Lists buckets.
Returns:
Callable[[~.ListBucketsRequest],
~.ListBucketsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_buckets' not in self._stubs:
self._stubs['list_buckets'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/ListBuckets',
request_serializer=logging_config.ListBucketsRequest.serialize,
response_deserializer=logging_config.ListBucketsResponse.deserialize,
)
return self._stubs['list_buckets']
@property
def get_bucket(self) -> Callable[
[logging_config.GetBucketRequest],
logging_config.LogBucket]:
r"""Return a callable for the get bucket method over gRPC.
Gets a bucket.
Returns:
Callable[[~.GetBucketRequest],
~.LogBucket]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_bucket' not in self._stubs:
self._stubs['get_bucket'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/GetBucket',
request_serializer=logging_config.GetBucketRequest.serialize,
response_deserializer=logging_config.LogBucket.deserialize,
)
return self._stubs['get_bucket']
@property
def create_bucket(self) -> Callable[
[logging_config.CreateBucketRequest],
logging_config.LogBucket]:
r"""Return a callable for the create bucket method over gRPC.
Creates a bucket that can be used to store log
entries. Once a bucket has been created, the region
cannot be changed.
Returns:
Callable[[~.CreateBucketRequest],
~.LogBucket]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_bucket' not in self._stubs:
self._stubs['create_bucket'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/CreateBucket',
request_serializer=logging_config.CreateBucketRequest.serialize,
response_deserializer=logging_config.LogBucket.deserialize,
)
return self._stubs['create_bucket']
@property
def update_bucket(self) -> Callable[
[logging_config.UpdateBucketRequest],
logging_config.LogBucket]:
r"""Return a callable for the update bucket method over gRPC.
Updates a bucket. This method replaces the following fields in
the existing bucket with values from the new bucket:
``retention_period``
If the retention period is decreased and the bucket is locked,
FAILED_PRECONDITION will be returned.
If the bucket has a LifecycleState of DELETE_REQUESTED,
FAILED_PRECONDITION will be returned.
A buckets region may not be modified after it is created.
Returns:
Callable[[~.UpdateBucketRequest],
~.LogBucket]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_bucket' not in self._stubs:
self._stubs['update_bucket'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/UpdateBucket',
request_serializer=logging_config.UpdateBucketRequest.serialize,
response_deserializer=logging_config.LogBucket.deserialize,
)
return self._stubs['update_bucket']
@property
def delete_bucket(self) -> Callable[
[logging_config.DeleteBucketRequest],
empty_pb2.Empty]:
r"""Return a callable for the delete bucket method over gRPC.
Deletes a bucket. Moves the bucket to the DELETE_REQUESTED
state. After 7 days, the bucket will be purged and all logs in
the bucket will be permanently deleted.
Returns:
Callable[[~.DeleteBucketRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_bucket' not in self._stubs:
self._stubs['delete_bucket'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/DeleteBucket',
request_serializer=logging_config.DeleteBucketRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_bucket']
@property
def undelete_bucket(self) -> Callable[
[logging_config.UndeleteBucketRequest],
empty_pb2.Empty]:
r"""Return a callable for the undelete bucket method over gRPC.
Undeletes a bucket. A bucket that has been deleted
may be undeleted within the grace period of 7 days.
Returns:
Callable[[~.UndeleteBucketRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'undelete_bucket' not in self._stubs:
self._stubs['undelete_bucket'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/UndeleteBucket',
request_serializer=logging_config.UndeleteBucketRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['undelete_bucket']
@property
def list_views(self) -> Callable[
[logging_config.ListViewsRequest],
logging_config.ListViewsResponse]:
r"""Return a callable for the list views method over gRPC.
Lists views on a bucket.
Returns:
Callable[[~.ListViewsRequest],
~.ListViewsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_views' not in self._stubs:
self._stubs['list_views'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/ListViews',
request_serializer=logging_config.ListViewsRequest.serialize,
response_deserializer=logging_config.ListViewsResponse.deserialize,
)
return self._stubs['list_views']
@property
def get_view(self) -> Callable[
[logging_config.GetViewRequest],
logging_config.LogView]:
r"""Return a callable for the get view method over gRPC.
Gets a view.
Returns:
Callable[[~.GetViewRequest],
~.LogView]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_view' not in self._stubs:
self._stubs['get_view'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/GetView',
request_serializer=logging_config.GetViewRequest.serialize,
response_deserializer=logging_config.LogView.deserialize,
)
return self._stubs['get_view']
@property
def create_view(self) -> Callable[
[logging_config.CreateViewRequest],
logging_config.LogView]:
r"""Return a callable for the create view method over gRPC.
Creates a view over logs in a bucket. A bucket may
contain a maximum of 50 views.
Returns:
Callable[[~.CreateViewRequest],
~.LogView]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_view' not in self._stubs:
self._stubs['create_view'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/CreateView',
request_serializer=logging_config.CreateViewRequest.serialize,
response_deserializer=logging_config.LogView.deserialize,
)
return self._stubs['create_view']
@property
def update_view(self) -> Callable[
[logging_config.UpdateViewRequest],
logging_config.LogView]:
r"""Return a callable for the update view method over gRPC.
Updates a view. This method replaces the following fields in the
existing view with values from the new view: ``filter``.
Returns:
Callable[[~.UpdateViewRequest],
~.LogView]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_view' not in self._stubs:
self._stubs['update_view'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/UpdateView',
request_serializer=logging_config.UpdateViewRequest.serialize,
response_deserializer=logging_config.LogView.deserialize,
)
return self._stubs['update_view']
@property
def delete_view(self) -> Callable[
[logging_config.DeleteViewRequest],
empty_pb2.Empty]:
r"""Return a callable for the delete view method over gRPC.
Deletes a view from a bucket.
Returns:
Callable[[~.DeleteViewRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_view' not in self._stubs:
self._stubs['delete_view'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/DeleteView',
request_serializer=logging_config.DeleteViewRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_view']
@property
def list_sinks(self) -> Callable[
[logging_config.ListSinksRequest],
logging_config.ListSinksResponse]:
r"""Return a callable for the list sinks method over gRPC.
Lists sinks.
Returns:
Callable[[~.ListSinksRequest],
~.ListSinksResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_sinks' not in self._stubs:
self._stubs['list_sinks'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/ListSinks',
request_serializer=logging_config.ListSinksRequest.serialize,
response_deserializer=logging_config.ListSinksResponse.deserialize,
)
return self._stubs['list_sinks']
@property
def get_sink(self) -> Callable[
[logging_config.GetSinkRequest],
logging_config.LogSink]:
r"""Return a callable for the get sink method over gRPC.
Gets a sink.
Returns:
Callable[[~.GetSinkRequest],
~.LogSink]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_sink' not in self._stubs:
self._stubs['get_sink'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/GetSink',
request_serializer=logging_config.GetSinkRequest.serialize,
response_deserializer=logging_config.LogSink.deserialize,
)
return self._stubs['get_sink']
@property
def create_sink(self) -> Callable[
[logging_config.CreateSinkRequest],
logging_config.LogSink]:
r"""Return a callable for the create sink method over gRPC.
Creates a sink that exports specified log entries to a
destination. The export of newly-ingested log entries begins
immediately, unless the sink's ``writer_identity`` is not
permitted to write to the destination. A sink can export log
entries only from the resource owning the sink.
Returns:
Callable[[~.CreateSinkRequest],
~.LogSink]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_sink' not in self._stubs:
self._stubs['create_sink'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/CreateSink',
request_serializer=logging_config.CreateSinkRequest.serialize,
response_deserializer=logging_config.LogSink.deserialize,
)
return self._stubs['create_sink']
@property
def update_sink(self) -> Callable[
[logging_config.UpdateSinkRequest],
logging_config.LogSink]:
r"""Return a callable for the update sink method over gRPC.
Updates a sink. This method replaces the following fields in the
existing sink with values from the new sink: ``destination``,
and ``filter``.
The updated sink might also have a new ``writer_identity``; see
the ``unique_writer_identity`` field.
Returns:
Callable[[~.UpdateSinkRequest],
~.LogSink]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_sink' not in self._stubs:
self._stubs['update_sink'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/UpdateSink',
request_serializer=logging_config.UpdateSinkRequest.serialize,
response_deserializer=logging_config.LogSink.deserialize,
)
return self._stubs['update_sink']
@property
def delete_sink(self) -> Callable[
[logging_config.DeleteSinkRequest],
empty_pb2.Empty]:
r"""Return a callable for the delete sink method over gRPC.
Deletes a sink. If the sink has a unique ``writer_identity``,
then that service account is also deleted.
Returns:
Callable[[~.DeleteSinkRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_sink' not in self._stubs:
self._stubs['delete_sink'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/DeleteSink',
request_serializer=logging_config.DeleteSinkRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_sink']
@property
def list_exclusions(self) -> Callable[
[logging_config.ListExclusionsRequest],
logging_config.ListExclusionsResponse]:
r"""Return a callable for the list exclusions method over gRPC.
Lists all the exclusions in a parent resource.
Returns:
Callable[[~.ListExclusionsRequest],
~.ListExclusionsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_exclusions' not in self._stubs:
self._stubs['list_exclusions'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/ListExclusions',
request_serializer=logging_config.ListExclusionsRequest.serialize,
response_deserializer=logging_config.ListExclusionsResponse.deserialize,
)
return self._stubs['list_exclusions']
@property
def get_exclusion(self) -> Callable[
[logging_config.GetExclusionRequest],
logging_config.LogExclusion]:
r"""Return a callable for the get exclusion method over gRPC.
Gets the description of an exclusion.
Returns:
Callable[[~.GetExclusionRequest],
~.LogExclusion]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_exclusion' not in self._stubs:
self._stubs['get_exclusion'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/GetExclusion',
request_serializer=logging_config.GetExclusionRequest.serialize,
response_deserializer=logging_config.LogExclusion.deserialize,
)
return self._stubs['get_exclusion']
@property
def create_exclusion(self) -> Callable[
[logging_config.CreateExclusionRequest],
logging_config.LogExclusion]:
r"""Return a callable for the create exclusion method over gRPC.
Creates a new exclusion in a specified parent
resource. Only log entries belonging to that resource
can be excluded. You can have up to 10 exclusions in a
resource.
Returns:
Callable[[~.CreateExclusionRequest],
~.LogExclusion]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_exclusion' not in self._stubs:
self._stubs['create_exclusion'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/CreateExclusion',
request_serializer=logging_config.CreateExclusionRequest.serialize,
response_deserializer=logging_config.LogExclusion.deserialize,
)
return self._stubs['create_exclusion']
@property
def update_exclusion(self) -> Callable[
[logging_config.UpdateExclusionRequest],
logging_config.LogExclusion]:
r"""Return a callable for the update exclusion method over gRPC.
Changes one or more properties of an existing
exclusion.
Returns:
Callable[[~.UpdateExclusionRequest],
~.LogExclusion]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_exclusion' not in self._stubs:
self._stubs['update_exclusion'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/UpdateExclusion',
request_serializer=logging_config.UpdateExclusionRequest.serialize,
response_deserializer=logging_config.LogExclusion.deserialize,
)
return self._stubs['update_exclusion']
@property
def delete_exclusion(self) -> Callable[
[logging_config.DeleteExclusionRequest],
empty_pb2.Empty]:
r"""Return a callable for the delete exclusion method over gRPC.
Deletes an exclusion.
Returns:
Callable[[~.DeleteExclusionRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_exclusion' not in self._stubs:
self._stubs['delete_exclusion'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/DeleteExclusion',
request_serializer=logging_config.DeleteExclusionRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_exclusion']
@property
def get_cmek_settings(self) -> Callable[
[logging_config.GetCmekSettingsRequest],
logging_config.CmekSettings]:
r"""Return a callable for the get cmek settings method over gRPC.
Gets the Logs Router CMEK settings for the given resource.
Note: CMEK for the Logs Router can currently only be configured
for GCP organizations. Once configured, it applies to all
projects and folders in the GCP organization.
See `Enabling CMEK for Logs
Router <https://cloud.google.com/logging/docs/routing/managed-encryption>`__
for more information.
Returns:
Callable[[~.GetCmekSettingsRequest],
~.CmekSettings]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_cmek_settings' not in self._stubs:
self._stubs['get_cmek_settings'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/GetCmekSettings',
request_serializer=logging_config.GetCmekSettingsRequest.serialize,
response_deserializer=logging_config.CmekSettings.deserialize,
)
return self._stubs['get_cmek_settings']
@property
def update_cmek_settings(self) -> Callable[
[logging_config.UpdateCmekSettingsRequest],
logging_config.CmekSettings]:
r"""Return a callable for the update cmek settings method over gRPC.
Updates the Logs Router CMEK settings for the given resource.
Note: CMEK for the Logs Router can currently only be configured
for GCP organizations. Once configured, it applies to all
projects and folders in the GCP organization.
[UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]
will fail if 1) ``kms_key_name`` is invalid, or 2) the
associated service account does not have the required
``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for
the key, or 3) access to the key is disabled.
See `Enabling CMEK for Logs
Router <https://cloud.google.com/logging/docs/routing/managed-encryption>`__
for more information.
Returns:
Callable[[~.UpdateCmekSettingsRequest],
~.CmekSettings]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_cmek_settings' not in self._stubs:
self._stubs['update_cmek_settings'] = self.grpc_channel.unary_unary(
'/google.logging.v2.ConfigServiceV2/UpdateCmekSettings',
request_serializer=logging_config.UpdateCmekSettingsRequest.serialize,
response_deserializer=logging_config.CmekSettings.deserialize,
)
return self._stubs['update_cmek_settings']
def close(self):
self.grpc_channel.close()
__all__ = (
'ConfigServiceV2GrpcTransport',
)
| 43.271283
| 88
| 0.625177
|
4a00610befd0a325f1568c888e310e64a4a6be8e
| 532
|
py
|
Python
|
V1_1_0_0/MGC3130/setup.py
|
MatteoDestro/RaspberryPi_Gesture_MGC3130
|
071c7d26dab897786dcfd6fc1e5faac9e3531b4b
|
[
"BSD-2-Clause"
] | 1
|
2021-04-19T12:09:36.000Z
|
2021-04-19T12:09:36.000Z
|
V1_1_0_0/MGC3130/setup.py
|
MatteoDestro/RaspberryPi_Gesture_MGC3130
|
071c7d26dab897786dcfd6fc1e5faac9e3531b4b
|
[
"BSD-2-Clause"
] | null | null | null |
V1_1_0_0/MGC3130/setup.py
|
MatteoDestro/RaspberryPi_Gesture_MGC3130
|
071c7d26dab897786dcfd6fc1e5faac9e3531b4b
|
[
"BSD-2-Clause"
] | null | null | null |
from distutils.core import setup
setup(name = "MGC3130",
version = "1.1",
description = "MGC3130 Library",
long_description = "This package is usefull to manage the MGC3130 gestic controller developed by Microchip",
author = "Matteo Destro",
author_email = "info@open-electronics.org",
url = "www.open-electronics.org",
license = "GPL",
platform = "RaspberryPi",
packages = ["MGC3130"])
| 40.923077
| 115
| 0.541353
|
4a0061d337367fec129df78fe6669e7c177901b4
| 288
|
py
|
Python
|
flu/sequence_and_annotations/muscle_batch_align.py
|
iosonofabio/FitnessInference
|
3de97a9301733ac9e47ebc78f4e76f7530ccb538
|
[
"MIT"
] | 11
|
2015-08-24T05:20:56.000Z
|
2021-08-13T13:58:31.000Z
|
flu/sequence_and_annotations/muscle_batch_align.py
|
iosonofabio/FitnessInference
|
3de97a9301733ac9e47ebc78f4e76f7530ccb538
|
[
"MIT"
] | 3
|
2015-08-24T04:43:25.000Z
|
2021-07-06T00:33:44.000Z
|
flu/sequence_and_annotations/muscle_batch_align.py
|
iosonofabio/FitnessInference
|
3de97a9301733ac9e47ebc78f4e76f7530ccb538
|
[
"MIT"
] | 3
|
2019-08-30T18:52:49.000Z
|
2021-06-15T06:47:04.000Z
|
import glob
import os
flist = glob.glob('../data/gisaid_H3N2_all_years_human_full_date_???.fasta')
for fname in flist:
cmd = ['qsub', '-cwd -l h_vmem=8G -l h_rt=00:59:59', 'muscle_script.sh', fname, fname[:-6]+'_aligned.fasta']
os.system(' '.join(cmd))
print ' '.join(cmd)
| 28.8
| 113
| 0.65625
|
4a00642668fabac77d69b7ebeb72fe815fee840e
| 1,190
|
py
|
Python
|
tests/helpers.py
|
MrKevinWeiss/riot_pal
|
61e8229bc957d4297bbd1bad4bc4d229f6deee62
|
[
"MIT"
] | 1
|
2018-08-24T06:52:47.000Z
|
2018-08-24T06:52:47.000Z
|
tests/helpers.py
|
MrKevinWeiss/riot_pal
|
61e8229bc957d4297bbd1bad4bc4d229f6deee62
|
[
"MIT"
] | 17
|
2018-10-11T12:19:19.000Z
|
2019-10-22T14:28:29.000Z
|
tests/helpers.py
|
MrKevinWeiss/riot_pal
|
61e8229bc957d4297bbd1bad4bc4d229f6deee62
|
[
"MIT"
] | 2
|
2019-10-18T12:58:33.000Z
|
2020-01-24T09:55:20.000Z
|
# Copyright (c) 2018 Kevin Weiss, for HAW Hamburg <kevin.weiss@haw-hamburg.de>
#
# This file is subject to the terms and conditions of the MIT License. See the
# file LICENSE in the top level directory for more details.
# SPDX-License-Identifier: MIT
"""Helper functions for running tests on RIOT PAL."""
def _try_parse_int(val):
try:
return int(val, 0)
except (ValueError, TypeError):
return val
def weak_cmp(val1, val2):
"""Compares write numbers that ban be hex, string, or int.
Args:
val1(int, str): Value to compare.
val2(int, str): Value to compare.
Return:
bool: True if aproximatly equal, False if not.
"""
return _try_parse_int(val1) == _try_parse_int(val2)
def try_add(val):
"""Attempts to add a number to a value.
Args:
val(int, str, list): Value to add the number to.
Return:
int: If successful returns the val + 1.
If failed just retruns the val.
"""
try:
return _try_parse_int(val)+1
except TypeError:
try:
val[0] = _try_parse_int(val[0])+1
return val
except TypeError:
return val
| 25.319149
| 79
| 0.620168
|
4a0064bde336d8a6ac533e7ec6ab8993b453efcd
| 4,954
|
py
|
Python
|
tests/test_managed_keys.py
|
egberts/bind9_parser
|
ee7b845a4319acfa6bb924e3aec853cf6a28b61c
|
[
"MIT"
] | 5
|
2020-04-24T14:03:18.000Z
|
2021-08-12T15:38:55.000Z
|
tests/test_managed_keys.py
|
egberts/bind9_parser
|
ee7b845a4319acfa6bb924e3aec853cf6a28b61c
|
[
"MIT"
] | 15
|
2020-03-05T09:39:33.000Z
|
2022-03-05T08:36:44.000Z
|
tests/test_managed_keys.py
|
egberts/bind9_parser
|
ee7b845a4319acfa6bb924e3aec853cf6a28b61c
|
[
"MIT"
] | 2
|
2020-07-26T08:24:42.000Z
|
2022-03-24T17:55:14.000Z
|
#!/usr/bin/env python3
"""
File: test_managed_keys.py
Clause: view, managed-keys
Statement: managed-keys
Description: Performs unit test on the isc_managed_keys.py source file.
The managed-keys statement used by either by:
* 'view' clause as a statement or
* top-level 'managed-keys' clause.
"""
import unittest
from bind9_parser.isc_utils import assertParserResultDictTrue, assertParserResultDictFalse
from bind9_parser.isc_managed_keys import managed_keyname_type,\
managed_keyname_dquoted, managed_keyname_squoted,\
managed_key_domain_name, managed_key_type,\
managed_key_flags_type, managed_key_protocol_type,\
managed_key_algorithm_name, managed_key_algorithm_type,\
managed_key_secret_type, managed_keys_set,\
managed_keys_series, managed_keys_statement_standalone,\
quoted_managed_key_secret_type
class TestManagedKeys(unittest.TestCase):
""" Statement managed-keys; used by view or managed-keys clause """
def test_isc_managed_keys_domain_name_passing(self):
""" Statement managed-keys; Type Domain Name; passing mode """
test_data = [
'key_name1',
'unquoted-key_id2',
"'squoted-key_id3'",
'"dquoted-key_id4"',
]
result = managed_key_domain_name.runTests(test_data, failureTests=False)
self.assertTrue(result[0])
def test_isc_managed_keys_domain_name_failing(self):
""" Statement managed-keys; Type Domain Name; failing mode """
test_data = [
'country us',
]
result = managed_key_domain_name.runTests(test_data, failureTests=True)
self.assertTrue(result[0])
def test_isc_managed_keys_algorithm_name_passing(self):
""" Statement managed-keys; algorithm name; passing mode"""
test_data = [
'aABCDEFG',
'hmac-md5',
'hmac-sha512',
'hmac-sha4096',
]
result = managed_key_algorithm_name.runTests(
test_data,
failureTests=False
)
self.assertTrue(result[0])
def test_isc_managed_keys_algorithm_name_failing(self):
""" Statement managed-keys; algorithm name; failing mode"""
test_data = [
'aAB&DEFG',
'\'aABDEFG',
'aABDEFG\'',
'"aABDEFG',
'aABDEFG"',
'bad*algorithm;',
]
result = managed_key_algorithm_name.runTests(
test_data,
failureTests=True
)
self.assertTrue(result[0])
# domain name, flags, protocol, algorithm, and the Base64 representation of the
# key data.
def test_isc_managed_keys_series_passing(self):
""" Statement managed-keys; managed keys series; passing mode """
test_data = [
'abc initial-key 1 1 1 "ASBASDASD==";',
'abc initial-key 1 1 1 "ASBASDASD=="; def initial-key 243 16 7 "LKJOULKJOIULKKJ+ASD==";',
]
result = managed_keys_series.runTests(
test_data,
failureTests=False
)
self.assertTrue(result[0])
def test_isc_managed_keys_series_failing(self):
""" Statement managed-keys; managed keys series, failing mode """
test_data = [
'abc initial-key X Y Z ASBASDASD==;',
]
result = managed_keys_series.runTests(
test_data,
failureTests=True
)
self.assertTrue(result[0])
def test_isc_managed_keys_statement_set_passing(self):
""" Statement managed-keys; passing mode """
test_data = [
'managed-keys { abc initial-key 1 1 1 "ASBASDASD==";};',
'managed-keys { example.com initial-key 1 1 1 "ASBASDASD==";};',
'managed-keys { www.example.com initial-key 1 1 1 "ASBASDASD==";};',
'managed-keys { www1.www.example.com initial-key 1 1 1 "ASBASDASD==";};',
'managed-keys { www1.www.example.com initial-key 1 1 1 "ASBASDASD==";};',
'managed-keys { "." initial-key 257 3 3 "AAAAAAAAA+BBBBBBBBBBBBB/CCXCCCCCCCCCCCCC";};',
"managed-keys { \".\" initial-key 257 3 3 'AAAAAAAAA+BBBBBBBBBBBBB/CCXCCCCCCCCCCCCC';};",
'managed-keys { "." initial-key 257 3 3 "AAAAAAAAA+BBBBBBBBBBBBB/CCXCCCCCCCCCCCCC";};',
]
result = managed_keys_statement_standalone.runTests(
test_data,
failureTests=False
)
self.assertTrue(result[0])
def test_isc_managed_keys_statement_set_failing(self):
""" Statement managed-keys; failing mode """
test_data = [
'managed-keys { . initial_key 257 3 3 AAAAAAAAA+BBBBBBBBBBBBB/CCXCCCCCCCCCCCCC;};',
]
result = managed_keys_statement_standalone.runTests(
test_data,
failureTests=True
)
self.assertTrue(result[0])
if __name__ == '__main__':
unittest.main()
| 36.160584
| 101
| 0.623335
|
4a0064eccfa0d77de9313c7a4bc4caa159a7db18
| 5,021
|
py
|
Python
|
lib/datasets/pascal_ctx.py
|
andreABbauer/HRNet-Semantic-Segmentation
|
5618ddd1416cb5d1b861eab2c5d46e2b31c3b71d
|
[
"MIT"
] | 84
|
2021-02-24T11:54:00.000Z
|
2022-03-29T03:47:20.000Z
|
lib/datasets/pascal_ctx.py
|
andreABbauer/HRNet-Semantic-Segmentation
|
5618ddd1416cb5d1b861eab2c5d46e2b31c3b71d
|
[
"MIT"
] | 19
|
2021-03-14T06:52:36.000Z
|
2022-03-29T12:21:27.000Z
|
lib/datasets/pascal_ctx.py
|
andreABbauer/HRNet-Semantic-Segmentation
|
5618ddd1416cb5d1b861eab2c5d46e2b31c3b71d
|
[
"MIT"
] | 25
|
2021-02-28T06:52:39.000Z
|
2022-03-22T08:34:15.000Z
|
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Ke Sun (sunk@mail.ustc.edu.cn)
# Referring to the implementation in
# https://github.com/zhanghang1989/PyTorch-Encoding
# ------------------------------------------------------------------------------
import os
import cv2
import numpy as np
from PIL import Image
import torch
from .base_dataset import BaseDataset
class PASCALContext(BaseDataset):
def __init__(self,
root,
list_path,
num_samples=None,
num_classes=59,
multi_scale=True,
flip=True,
ignore_label=-1,
base_size=520,
crop_size=(480, 480),
downsample_rate=1,
scale_factor=16,
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225],):
super(PASCALContext, self).__init__(ignore_label, base_size,
crop_size, downsample_rate, scale_factor, mean, std)
self.root = os.path.join(root, 'pascal_ctx/VOCdevkit/VOC2010')
self.split = list_path
self.num_classes = num_classes
self.class_weights = None
self.multi_scale = multi_scale
self.flip = flip
self.crop_size = crop_size
# prepare data
annots = os.path.join(self.root, 'trainval_merged.json')
img_path = os.path.join(self.root, 'JPEGImages')
from detail import Detail
if 'val' in self.split:
self.detail = Detail(annots, img_path, 'val')
mask_file = os.path.join(self.root, 'val.pth')
elif 'train' in self.split:
self.mode = 'train'
self.detail = Detail(annots, img_path, 'train')
mask_file = os.path.join(self.root, 'train.pth')
else:
raise NotImplementedError('only supporting train and val set.')
self.files = self.detail.getImgs()
# generate masks
self._mapping = np.sort(np.array([
0, 2, 259, 260, 415, 324, 9, 258, 144, 18, 19, 22,
23, 397, 25, 284, 158, 159, 416, 33, 162, 420, 454, 295, 296,
427, 44, 45, 46, 308, 59, 440, 445, 31, 232, 65, 354, 424,
68, 326, 72, 458, 34, 207, 80, 355, 85, 347, 220, 349, 360,
98, 187, 104, 105, 366, 189, 368, 113, 115]))
self._key = np.array(range(len(self._mapping))).astype('uint8')
print('mask_file:', mask_file)
if os.path.exists(mask_file):
self.masks = torch.load(mask_file)
else:
self.masks = self._preprocess(mask_file)
def _class_to_index(self, mask):
# assert the values
values = np.unique(mask)
for i in range(len(values)):
assert(values[i] in self._mapping)
index = np.digitize(mask.ravel(), self._mapping, right=True)
return self._key[index].reshape(mask.shape)
def _preprocess(self, mask_file):
masks = {}
print("Preprocessing mask, this will take a while." + \
"But don't worry, it only run once for each split.")
for i in range(len(self.files)):
img_id = self.files[i]
mask = Image.fromarray(self._class_to_index(
self.detail.getMask(img_id)))
masks[img_id['image_id']] = mask
torch.save(masks, mask_file)
return masks
def __getitem__(self, index):
item = self.files[index]
name = item['file_name']
img_id = item['image_id']
image = cv2.imread(os.path.join(self.detail.img_folder,name),
cv2.IMREAD_COLOR)
label = np.asarray(self.masks[img_id],dtype=np.int)
size = image.shape
if self.split == 'val':
image = cv2.resize(image, self.crop_size,
interpolation = cv2.INTER_LINEAR)
image = self.input_transform(image)
image = image.transpose((2, 0, 1))
label = cv2.resize(label, self.crop_size,
interpolation=cv2.INTER_NEAREST)
label = self.label_transform(label)
elif self.split == 'testval':
# evaluate model on val dataset
image = self.input_transform(image)
image = image.transpose((2, 0, 1))
label = self.label_transform(label)
else:
image, label = self.gen_sample(image, label,
self.multi_scale, self.flip)
return image.copy(), label.copy(), np.array(size), name
def label_transform(self, label):
if self.num_classes == 59:
# background is ignored
label = np.array(label).astype('int32') - 1
label[label==-2] = -1
else:
label = np.array(label).astype('int32')
return label
| 36.649635
| 80
| 0.533559
|
4a00653220ff0cc8a9700360f2fd19ed6496f435
| 1,175
|
py
|
Python
|
tests/python/gaia-ui-tests/gaiatest/tests/functional/contacts/test_default_contact_image.py
|
marshall/gaia
|
00722269f5d559595fd2f79d9dd70310758af08c
|
[
"Apache-2.0"
] | 1
|
2019-02-13T23:44:14.000Z
|
2019-02-13T23:44:14.000Z
|
tests/python/gaia-ui-tests/gaiatest/tests/functional/contacts/test_default_contact_image.py
|
marshall/gaia
|
00722269f5d559595fd2f79d9dd70310758af08c
|
[
"Apache-2.0"
] | null | null | null |
tests/python/gaia-ui-tests/gaiatest/tests/functional/contacts/test_default_contact_image.py
|
marshall/gaia
|
00722269f5d559595fd2f79d9dd70310758af08c
|
[
"Apache-2.0"
] | null | null | null |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
try:
from marionette.wait import Wait
except:
from marionette_driver.wait import Wait
from gaiatest import GaiaTestCase
from gaiatest.mocks.mock_contact import MockContact
from gaiatest.apps.contacts.app import Contacts
class TestDefaultContactImage(GaiaTestCase):
def test_default_contact_image(self):
"""
https://moztrap.mozilla.org/manage/case/14399/
"""
contact = MockContact()
contacts_app = Contacts(self.marionette)
contacts_app.launch()
new_contact_form = contacts_app.tap_new_contact()
new_contact_form.type_given_name(contact['givenName'])
new_contact_form.type_family_name(contact['familyName'])
new_contact_form.tap_done()
Wait(self.marionette).until(lambda m: len(contacts_app.contacts) == 1)
first_letter = contact['givenName'][:1].upper()
Wait(self.marionette).until(lambda m: contacts_app.contacts[0].image_data_group == first_letter)
| 33.571429
| 104
| 0.718298
|
4a0066c16e7d6278b15c597e8967b5acbc7daa71
| 7,664
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/web/get_web_app_source_control_slot.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 31
|
2020-09-21T09:41:01.000Z
|
2021-02-26T13:21:59.000Z
|
sdk/python/pulumi_azure_nextgen/web/get_web_app_source_control_slot.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 231
|
2020-09-21T09:38:45.000Z
|
2021-03-01T11:16:03.000Z
|
sdk/python/pulumi_azure_nextgen/web/get_web_app_source_control_slot.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 4
|
2020-09-29T14:14:59.000Z
|
2021-02-10T20:38:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
__all__ = [
'GetWebAppSourceControlSlotResult',
'AwaitableGetWebAppSourceControlSlotResult',
'get_web_app_source_control_slot',
]
@pulumi.output_type
class GetWebAppSourceControlSlotResult:
"""
Source control configuration for an app.
"""
def __init__(__self__, branch=None, deployment_rollback_enabled=None, id=None, is_git_hub_action=None, is_manual_integration=None, is_mercurial=None, kind=None, name=None, repo_url=None, system_data=None, type=None):
if branch and not isinstance(branch, str):
raise TypeError("Expected argument 'branch' to be a str")
pulumi.set(__self__, "branch", branch)
if deployment_rollback_enabled and not isinstance(deployment_rollback_enabled, bool):
raise TypeError("Expected argument 'deployment_rollback_enabled' to be a bool")
pulumi.set(__self__, "deployment_rollback_enabled", deployment_rollback_enabled)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if is_git_hub_action and not isinstance(is_git_hub_action, bool):
raise TypeError("Expected argument 'is_git_hub_action' to be a bool")
pulumi.set(__self__, "is_git_hub_action", is_git_hub_action)
if is_manual_integration and not isinstance(is_manual_integration, bool):
raise TypeError("Expected argument 'is_manual_integration' to be a bool")
pulumi.set(__self__, "is_manual_integration", is_manual_integration)
if is_mercurial and not isinstance(is_mercurial, bool):
raise TypeError("Expected argument 'is_mercurial' to be a bool")
pulumi.set(__self__, "is_mercurial", is_mercurial)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if repo_url and not isinstance(repo_url, str):
raise TypeError("Expected argument 'repo_url' to be a str")
pulumi.set(__self__, "repo_url", repo_url)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def branch(self) -> Optional[str]:
"""
Name of branch to use for deployment.
"""
return pulumi.get(self, "branch")
@property
@pulumi.getter(name="deploymentRollbackEnabled")
def deployment_rollback_enabled(self) -> Optional[bool]:
"""
<code>true</code> to enable deployment rollback; otherwise, <code>false</code>.
"""
return pulumi.get(self, "deployment_rollback_enabled")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isGitHubAction")
def is_git_hub_action(self) -> Optional[bool]:
"""
<code>true</code> if this is deployed via GitHub action.
"""
return pulumi.get(self, "is_git_hub_action")
@property
@pulumi.getter(name="isManualIntegration")
def is_manual_integration(self) -> Optional[bool]:
"""
<code>true</code> to limit to manual integration; <code>false</code> to enable continuous integration (which configures webhooks into online repos like GitHub).
"""
return pulumi.get(self, "is_manual_integration")
@property
@pulumi.getter(name="isMercurial")
def is_mercurial(self) -> Optional[bool]:
"""
<code>true</code> for a Mercurial repository; <code>false</code> for a Git repository.
"""
return pulumi.get(self, "is_mercurial")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="repoUrl")
def repo_url(self) -> Optional[str]:
"""
Repository or source control URL.
"""
return pulumi.get(self, "repo_url")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
The system metadata relating to this resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetWebAppSourceControlSlotResult(GetWebAppSourceControlSlotResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetWebAppSourceControlSlotResult(
branch=self.branch,
deployment_rollback_enabled=self.deployment_rollback_enabled,
id=self.id,
is_git_hub_action=self.is_git_hub_action,
is_manual_integration=self.is_manual_integration,
is_mercurial=self.is_mercurial,
kind=self.kind,
name=self.name,
repo_url=self.repo_url,
system_data=self.system_data,
type=self.type)
def get_web_app_source_control_slot(name: Optional[str] = None,
resource_group_name: Optional[str] = None,
slot: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWebAppSourceControlSlotResult:
"""
Source control configuration for an app.
API Version: 2020-10-01.
:param str name: Name of the app.
:param str resource_group_name: Name of the resource group to which the resource belongs.
:param str slot: Name of the deployment slot. If a slot is not specified, the API will get the source control configuration for the production slot.
"""
__args__ = dict()
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
__args__['slot'] = slot
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:web:getWebAppSourceControlSlot', __args__, opts=opts, typ=GetWebAppSourceControlSlotResult).value
return AwaitableGetWebAppSourceControlSlotResult(
branch=__ret__.branch,
deployment_rollback_enabled=__ret__.deployment_rollback_enabled,
id=__ret__.id,
is_git_hub_action=__ret__.is_git_hub_action,
is_manual_integration=__ret__.is_manual_integration,
is_mercurial=__ret__.is_mercurial,
kind=__ret__.kind,
name=__ret__.name,
repo_url=__ret__.repo_url,
system_data=__ret__.system_data,
type=__ret__.type)
| 38.129353
| 220
| 0.654489
|
4a00680280f1e6ec29d548a0ce84580a1a74fc95
| 2,130
|
py
|
Python
|
test/test_role_assign.py
|
talon-one/talon_one.py
|
f863bb3c2cc5ddc94d9227adcf14947b2ea7db41
|
[
"MIT"
] | 1
|
2021-03-05T06:41:26.000Z
|
2021-03-05T06:41:26.000Z
|
test/test_role_assign.py
|
talon-one/talon_one.py
|
f863bb3c2cc5ddc94d9227adcf14947b2ea7db41
|
[
"MIT"
] | 1
|
2021-09-07T08:56:58.000Z
|
2021-09-07T08:56:58.000Z
|
test/test_role_assign.py
|
talon-one/talon_one.py
|
f863bb3c2cc5ddc94d9227adcf14947b2ea7db41
|
[
"MIT"
] | 1
|
2019-05-21T10:27:54.000Z
|
2019-05-21T10:27:54.000Z
|
# coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import talon_one
from talon_one.models.role_assign import RoleAssign # noqa: E501
from talon_one.rest import ApiException
class TestRoleAssign(unittest.TestCase):
"""RoleAssign unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test RoleAssign
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = talon_one.models.role_assign.RoleAssign() # noqa: E501
if include_optional :
return RoleAssign(
users = [
56
],
roles = [
56
]
)
else :
return RoleAssign(
users = [
56
],
roles = [
56
],
)
def testRoleAssign(self):
"""Test RoleAssign"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 33.28125
| 647
| 0.621127
|
4a0068974650d6578c5b206d53e14858f8e81490
| 363
|
py
|
Python
|
cms/utils/compat/urls.py
|
samirasnoun/django_cms_gallery_image
|
7792aa06a60877d86c022e73b60d0d669e79cb74
|
[
"BSD-3-Clause"
] | 1
|
2019-04-15T10:28:46.000Z
|
2019-04-15T10:28:46.000Z
|
cms/utils/compat/urls.py
|
samirasnoun/django_cms_gallery_image
|
7792aa06a60877d86c022e73b60d0d669e79cb74
|
[
"BSD-3-Clause"
] | 5
|
2021-03-19T15:39:27.000Z
|
2021-09-08T02:47:21.000Z
|
cms/utils/compat/urls.py
|
samirasnoun/django_cms_gallery_image
|
7792aa06a60877d86c022e73b60d0d669e79cb74
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
try:
from urllib import urlencode
from urlparse import urlparse, urljoin
from urllib import unquote
except ImportError:
from urllib.parse import urlencode # nopyflakes
from urllib.parse import urlparse # nopyflakes
from urllib.parse import unquote # nopyflakes
from urllib.parse import urljoin # nopyflakes
| 33
| 52
| 0.732782
|
4a006a575496d3d1bdb939ab3ac24730e15fcfeb
| 2,101
|
py
|
Python
|
dvc/command/remove.py
|
amisev/dvc
|
025de9aeb509a539d5560f82caf47e851162f4a2
|
[
"Apache-2.0"
] | 2
|
2019-06-23T14:24:48.000Z
|
2019-07-08T12:22:53.000Z
|
dvc/command/remove.py
|
urantialife/dvc
|
5b018047060e0316207eea6ca818966380f740f0
|
[
"Apache-2.0"
] | null | null | null |
dvc/command/remove.py
|
urantialife/dvc
|
5b018047060e0316207eea6ca818966380f740f0
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import unicode_literals
import argparse
import logging
import dvc.prompt as prompt
from dvc.exceptions import DvcException
from dvc.command.base import CmdBase, append_doc_link
logger = logging.getLogger(__name__)
class CmdRemove(CmdBase):
def _is_outs_only(self, target):
if not self.args.purge:
return True
if self.args.force:
return False
msg = "Are you sure you want to remove {} with its outputs?".format(
target
)
if prompt.confirm(msg):
return False
raise DvcException(
"Cannot purge without a confirmation from the user."
" Use '-f' to force."
)
def run(self):
for target in self.args.targets:
try:
outs_only = self._is_outs_only(target)
self.repo.remove(target, outs_only=outs_only)
except DvcException:
logger.exception("failed to remove {}".format(target))
return 1
return 0
def add_parser(subparsers, parent_parser):
REMOVE_HELP = "Remove outputs of DVC-file."
remove_parser = subparsers.add_parser(
"remove",
parents=[parent_parser],
description=append_doc_link(REMOVE_HELP, "remove"),
help=REMOVE_HELP,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
remove_parser_group = remove_parser.add_mutually_exclusive_group()
remove_parser_group.add_argument(
"-o",
"--outs",
action="store_true",
default=True,
help="Only remove DVC-file outputs. (Default)",
)
remove_parser_group.add_argument(
"-p",
"--purge",
action="store_true",
default=False,
help="Remove DVC-file and all its outputs.",
)
remove_parser.add_argument(
"-f",
"--force",
action="store_true",
default=False,
help="Force purge.",
)
remove_parser.add_argument("targets", nargs="+", help="DVC-files.")
remove_parser.set_defaults(func=CmdRemove)
| 26.935897
| 76
| 0.611614
|
4a006ae8dd1cb5ed1f3d61a48e4699a9c536c3ad
| 1,624
|
py
|
Python
|
problems/test_0685_union_find.py
|
chrisxue815/leetcode_python
|
dec3c160d411a5c19dc8e9d96e7843f0e4c36820
|
[
"Unlicense"
] | 1
|
2017-06-17T23:47:17.000Z
|
2017-06-17T23:47:17.000Z
|
problems/test_0685_union_find.py
|
chrisxue815/leetcode_python
|
dec3c160d411a5c19dc8e9d96e7843f0e4c36820
|
[
"Unlicense"
] | null | null | null |
problems/test_0685_union_find.py
|
chrisxue815/leetcode_python
|
dec3c160d411a5c19dc8e9d96e7843f0e4c36820
|
[
"Unlicense"
] | null | null | null |
import unittest
import utils
def find_root(parents, node):
while node != parents[node]:
node = parents[node]
return node
# O(nlog(n)) time. O(n) space. Union-find.
class Solution:
def findRedundantDirectedConnection(self, edges):
"""
:type edges: List[List[int]]
:rtype: List[int]
"""
parents = [0] * (len(edges) + 1)
candidate_a = None
candidate_b = None
candidate_b_index = -1
for index, (parent, child) in enumerate(edges):
if parents[child] == 0:
parents[child] = parent
else:
candidate_a = [parents[child], child]
candidate_b = [parent, child]
candidate_b_index = index
break
parents = list(range(len(edges) + 1))
for index, (parent, child) in enumerate(edges):
if index == candidate_b_index:
continue
root = find_root(parents, parent)
if root != child:
parents[child] = parent
else:
if candidate_a:
return candidate_a
else:
return [parent, child]
return candidate_b
class Test(unittest.TestCase):
def test(self):
cases = utils.load_test_json(__file__).test_cases
for case in cases:
args = str(case.args)
actual = Solution().findRedundantDirectedConnection(**case.args.__dict__)
self.assertEqual(case.expected, actual, msg=args)
if __name__ == '__main__':
unittest.main()
| 25.777778
| 85
| 0.54064
|
4a006b102108232ff85e292f6dcd2a733a923aa3
| 3,864
|
py
|
Python
|
Diff/diff.py
|
rvantonder/Packages
|
afbada0dff69d2639e898d27b5a814fd839b8c97
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | 62
|
2021-01-13T07:09:22.000Z
|
2022-03-14T06:06:10.000Z
|
Diff/diff.py
|
rvantonder/Packages
|
afbada0dff69d2639e898d27b5a814fd839b8c97
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | 9
|
2018-05-04T19:38:47.000Z
|
2021-11-30T22:51:18.000Z
|
Diff/diff.py
|
rvantonder/Packages
|
afbada0dff69d2639e898d27b5a814fd839b8c97
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | 14
|
2018-02-21T20:02:04.000Z
|
2022-01-11T15:18:04.000Z
|
import codecs
import difflib
import os.path
import time
import sublime
import sublime_plugin
def splitlines_keep_ends(text):
lines = text.split('\n')
# Need to insert back the newline characters between lines, difflib
# requires this.
if len(lines) > 0:
for i in range(len(lines) - 1):
lines[i] += '\n'
return lines
def read_file_lines(fname):
with open(fname, mode="rt", encoding="utf-8") as f:
lines = splitlines_keep_ends(f.read())
# as `difflib` doesn't work properly when the file does not end
# with a new line character (https://bugs.python.org/issue2142),
# we add a warning ourselves to fix it
add_no_eol_warning_if_applicable(lines)
return lines
def add_no_eol_warning_if_applicable(lines):
if len(lines) > 0 and lines[-1]:
# note we update the last line rather than adding a new one
# so that the diff will show the warning with the last line
lines[-1] += '\n\\ No newline at end of file\n'
class DiffFilesCommand(sublime_plugin.WindowCommand):
def run(self, files):
if len(files) != 2:
return
try:
a = read_file_lines(files[1])
b = read_file_lines(files[0])
except UnicodeDecodeError:
sublime.status_message("Diff only works with UTF-8 files")
return
adate = time.ctime(os.stat(files[1]).st_mtime)
bdate = time.ctime(os.stat(files[0]).st_mtime)
diff = difflib.unified_diff(a, b, files[1], files[0], adate, bdate)
difftxt = u"".join(line for line in diff)
if difftxt == "":
sublime.status_message("Files are identical")
else:
v = self.window.new_file()
v.set_name(os.path.basename(files[1]) + " -> " + os.path.basename(files[0]))
v.set_scratch(True)
v.assign_syntax('Packages/Diff/Diff.sublime-syntax')
v.run_command('append', {'characters': difftxt, 'disable_tab_translation': True})
def is_visible(self, files):
return len(files) == 2
class DiffChangesCommand(sublime_plugin.TextCommand):
def run(self, edit):
fname = self.view.file_name()
if not fname or not os.path.exists(fname):
sublime.status_message("Unable to diff changes because the file does not exist")
return
try:
a = read_file_lines(fname)
except UnicodeDecodeError:
sublime.status_message("Diff only works with UTF-8 files")
return
b = splitlines_keep_ends(self.view.substr(sublime.Region(0, self.view.size())))
add_no_eol_warning_if_applicable(b)
adate = time.ctime(os.stat(fname).st_mtime)
bdate = time.ctime()
diff = difflib.unified_diff(a, b, fname, fname, adate, bdate)
difftxt = u"".join(line for line in diff)
if difftxt == "":
sublime.status_message("No changes")
return
use_buffer = self.view.settings().get('diff_changes_to_buffer')
if use_buffer:
v = self.view.window().new_file()
v.set_name("Unsaved Changes: " + os.path.basename(self.view.file_name()))
v.set_scratch(True)
v.assign_syntax('Packages/Diff/Diff.sublime-syntax')
else:
win = self.view.window()
v = win.create_output_panel('unsaved_changes')
v.assign_syntax('Packages/Diff/Diff.sublime-syntax')
v.settings().set('word_wrap', self.view.settings().get('word_wrap'))
v.run_command('append', {'characters': difftxt, 'disable_tab_translation': True})
if not use_buffer:
win.run_command("show_panel", {"panel": "output.unsaved_changes"})
def is_enabled(self):
return self.view.is_dirty() and self.view.file_name() is not None
| 31.16129
| 93
| 0.620342
|
4a006bb6ed9fefeefb13a219236f3d74a17cc587
| 720
|
py
|
Python
|
derek.py
|
milmd90/TwitterBot
|
bc0df4f289e6b4eb3d5c0f0144d1b11280e3735c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
derek.py
|
milmd90/TwitterBot
|
bc0df4f289e6b4eb3d5c0f0144d1b11280e3735c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
derek.py
|
milmd90/TwitterBot
|
bc0df4f289e6b4eb3d5c0f0144d1b11280e3735c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import sys
import wolfram_api
import mongo
def ask(id, user, text) :
snts = get_sentences(text)
response = []
for snt in snts :
response.append(command(id, user, snt))
return response
def command(id, user, sent) :
words = get_words(sent)
action = words[0]
words.pop(0)
msg = ' '.join(words)
if action == 'Wolfram' :
return wolfram_api.query(msg)
elif action == 'Insert' :
return mongo.insert(id, user, msg)
elif action == 'Find' :
return mongo.find(user)
else :
print 'Invlid request'
#helpers
def get_sentences(text) :
return filter(bool, text.replace(".", ".\n").replace("?", "?\n").replace("!", "!\n").split('\n'))
def get_words(sentence) :
return sentence.replace(',', '').split(' ')
| 21.176471
| 98
| 0.651389
|
4a006bc85d4d93fcb84c91d6db118eb79be13d59
| 85,698
|
py
|
Python
|
django/db/models/fields/__init__.py
|
August1996/Django
|
d392fc293c9439c19451e152f9560f24d1659563
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
django/db/models/fields/__init__.py
|
August1996/Django
|
d392fc293c9439c19451e152f9560f24d1659563
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
django/db/models/fields/__init__.py
|
August1996/Django
|
d392fc293c9439c19451e152f9560f24d1659563
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
import collections
import copy
import datetime
import decimal
import itertools
import operator
import uuid
import warnings
from base64 import b64decode, b64encode
from functools import partialmethod, total_ordering
from django import forms
from django.apps import apps
from django.conf import settings
from django.core import checks, exceptions, validators
# When the _meta object was formalized, this exception was moved to
# django.core.exceptions. It is retained here for backwards compatibility
# purposes.
from django.core.exceptions import FieldDoesNotExist # NOQA
from django.db import connection, connections, router
from django.db.models.constants import LOOKUP_SEP
from django.db.models.query_utils import DeferredAttribute, RegisterLookupMixin
from django.utils import timezone
from django.utils.datastructures import DictWrapper
from django.utils.dateparse import (
parse_date, parse_datetime, parse_duration, parse_time,
)
from django.utils.duration import duration_string
from django.utils.encoding import force_bytes, smart_text
from django.utils.functional import Promise, cached_property
from django.utils.ipv6 import clean_ipv6_address
from django.utils.itercompat import is_iterable
from django.utils.text import capfirst
from django.utils.translation import gettext_lazy as _
__all__ = [
'AutoField', 'BLANK_CHOICE_DASH', 'BigAutoField', 'BigIntegerField',
'BinaryField', 'BooleanField', 'CharField', 'CommaSeparatedIntegerField',
'DateField', 'DateTimeField', 'DecimalField', 'DurationField',
'EmailField', 'Empty', 'Field', 'FieldDoesNotExist', 'FilePathField',
'FloatField', 'GenericIPAddressField', 'IPAddressField', 'IntegerField',
'NOT_PROVIDED', 'NullBooleanField', 'PositiveIntegerField',
'PositiveSmallIntegerField', 'SlugField', 'SmallIntegerField', 'TextField',
'TimeField', 'URLField', 'UUIDField',
]
class Empty:
pass
class NOT_PROVIDED:
pass
# The values to use for "blank" in SelectFields. Will be appended to the start
# of most "choices" lists.
BLANK_CHOICE_DASH = [("", "---------")]
def _load_field(app_label, model_name, field_name):
return apps.get_model(app_label, model_name)._meta.get_field(field_name)
# A guide to Field parameters:
#
# * name: The name of the field specified in the model.
# * attname: The attribute to use on the model object. This is the same as
# "name", except in the case of ForeignKeys, where "_id" is
# appended.
# * db_column: The db_column specified in the model (or None).
# * column: The database column for this field. This is the same as
# "attname", except if db_column is specified.
#
# Code that introspects values, or does other dynamic things, should use
# attname. For example, this gets the primary key value of object "obj":
#
# getattr(obj, opts.pk.attname)
def _empty(of_cls):
new = Empty()
new.__class__ = of_cls
return new
def return_None():
return None
@total_ordering
class Field(RegisterLookupMixin):
"""Base class for all field types"""
# Designates whether empty strings fundamentally are allowed at the
# database level.
empty_strings_allowed = True
empty_values = list(validators.EMPTY_VALUES)
# These track each time a Field instance is created. Used to retain order.
# The auto_creation_counter is used for fields that Django implicitly
# creates, creation_counter is used for all user-specified fields.
creation_counter = 0
auto_creation_counter = -1
default_validators = [] # Default set of validators
default_error_messages = {
'invalid_choice': _('Value %(value)r is not a valid choice.'),
'null': _('This field cannot be null.'),
'blank': _('This field cannot be blank.'),
'unique': _('%(model_name)s with this %(field_label)s '
'already exists.'),
# Translators: The 'lookup_type' is one of 'date', 'year' or 'month'.
# Eg: "Title must be unique for pub_date year"
'unique_for_date': _("%(field_label)s must be unique for "
"%(date_field_label)s %(lookup_type)s."),
}
system_check_deprecated_details = None
system_check_removed_details = None
# Field flags
hidden = False
many_to_many = None
many_to_one = None
one_to_many = None
one_to_one = None
related_model = None
# Generic field type description, usually overridden by subclasses
def _description(self):
return _('Field of type: %(field_type)s') % {
'field_type': self.__class__.__name__
}
description = property(_description)
def __init__(self, verbose_name=None, name=None, primary_key=False,
max_length=None, unique=False, blank=False, null=False,
db_index=False, rel=None, default=NOT_PROVIDED, editable=True,
serialize=True, unique_for_date=None, unique_for_month=None,
unique_for_year=None, choices=None, help_text='', db_column=None,
db_tablespace=None, auto_created=False, validators=(),
error_messages=None):
self.name = name
self.verbose_name = verbose_name # May be set by set_attributes_from_name
self._verbose_name = verbose_name # Store original for deconstruction
self.primary_key = primary_key
self.max_length, self._unique = max_length, unique
self.blank, self.null = blank, null
self.remote_field = rel
self.is_relation = self.remote_field is not None
self.default = default
self.editable = editable
self.serialize = serialize
self.unique_for_date = unique_for_date
self.unique_for_month = unique_for_month
self.unique_for_year = unique_for_year
if isinstance(choices, collections.Iterator):
choices = list(choices)
self.choices = choices or []
self.help_text = help_text
self.db_index = db_index
self.db_column = db_column
self._db_tablespace = db_tablespace
self.auto_created = auto_created
# Adjust the appropriate creation counter, and save our local copy.
if auto_created:
self.creation_counter = Field.auto_creation_counter
Field.auto_creation_counter -= 1
else:
self.creation_counter = Field.creation_counter
Field.creation_counter += 1
self._validators = list(validators) # Store for deconstruction later
messages = {}
for c in reversed(self.__class__.__mro__):
messages.update(getattr(c, 'default_error_messages', {}))
messages.update(error_messages or {})
self._error_messages = error_messages # Store for deconstruction later
self.error_messages = messages
def __str__(self):
"""
Return "app_label.model_label.field_name" for fields attached to
models.
"""
if not hasattr(self, 'model'):
return super().__str__()
model = self.model
app = model._meta.app_label
return '%s.%s.%s' % (app, model._meta.object_name, self.name)
def __repr__(self):
"""Display the module, class, and name of the field."""
path = '%s.%s' % (self.__class__.__module__, self.__class__.__qualname__)
name = getattr(self, 'name', None)
if name is not None:
return '<%s: %s>' % (path, name)
return '<%s>' % path
def check(self, **kwargs):
errors = []
errors.extend(self._check_field_name())
errors.extend(self._check_choices())
errors.extend(self._check_db_index())
errors.extend(self._check_null_allowed_for_primary_keys())
errors.extend(self._check_backend_specific_checks(**kwargs))
errors.extend(self._check_validators())
errors.extend(self._check_deprecation_details())
return errors
def _check_field_name(self):
"""
Check if field name is valid, i.e. 1) does not end with an
underscore, 2) does not contain "__" and 3) is not "pk".
"""
if self.name.endswith('_'):
return [
checks.Error(
'Field names must not end with an underscore.',
obj=self,
id='fields.E001',
)
]
elif LOOKUP_SEP in self.name:
return [
checks.Error(
'Field names must not contain "%s".' % (LOOKUP_SEP,),
obj=self,
id='fields.E002',
)
]
elif self.name == 'pk':
return [
checks.Error(
"'pk' is a reserved word that cannot be used as a field name.",
obj=self,
id='fields.E003',
)
]
else:
return []
def _check_choices(self):
if self.choices:
if isinstance(self.choices, str) or not is_iterable(self.choices):
return [
checks.Error(
"'choices' must be an iterable (e.g., a list or tuple).",
obj=self,
id='fields.E004',
)
]
elif any(isinstance(choice, str) or
not is_iterable(choice) or len(choice) != 2
for choice in self.choices):
return [
checks.Error(
"'choices' must be an iterable containing "
"(actual value, human readable name) tuples.",
obj=self,
id='fields.E005',
)
]
else:
return []
else:
return []
def _check_db_index(self):
if self.db_index not in (None, True, False):
return [
checks.Error(
"'db_index' must be None, True or False.",
obj=self,
id='fields.E006',
)
]
else:
return []
def _check_null_allowed_for_primary_keys(self):
if (self.primary_key and self.null and
not connection.features.interprets_empty_strings_as_nulls):
# We cannot reliably check this for backends like Oracle which
# consider NULL and '' to be equal (and thus set up
# character-based fields a little differently).
return [
checks.Error(
'Primary keys must not have null=True.',
hint=('Set null=False on the field, or '
'remove primary_key=True argument.'),
obj=self,
id='fields.E007',
)
]
else:
return []
def _check_backend_specific_checks(self, **kwargs):
app_label = self.model._meta.app_label
for db in connections:
if router.allow_migrate(db, app_label, model_name=self.model._meta.model_name):
return connections[db].validation.check_field(self, **kwargs)
return []
def _check_validators(self):
errors = []
for i, validator in enumerate(self.validators):
if not callable(validator):
errors.append(
checks.Error(
"All 'validators' must be callable.",
hint=(
"validators[{i}] ({repr}) isn't a function or "
"instance of a validator class.".format(
i=i, repr=repr(validator),
)
),
obj=self,
id='fields.E008',
)
)
return errors
def _check_deprecation_details(self):
if self.system_check_removed_details is not None:
return [
checks.Error(
self.system_check_removed_details.get(
'msg',
'%s has been removed except for support in historical '
'migrations.' % self.__class__.__name__
),
hint=self.system_check_removed_details.get('hint'),
obj=self,
id=self.system_check_removed_details.get('id', 'fields.EXXX'),
)
]
elif self.system_check_deprecated_details is not None:
return [
checks.Warning(
self.system_check_deprecated_details.get(
'msg',
'%s has been deprecated.' % self.__class__.__name__
),
hint=self.system_check_deprecated_details.get('hint'),
obj=self,
id=self.system_check_deprecated_details.get('id', 'fields.WXXX'),
)
]
return []
def get_col(self, alias, output_field=None):
if output_field is None:
output_field = self
if alias != self.model._meta.db_table or output_field != self:
from django.db.models.expressions import Col
return Col(alias, self, output_field)
else:
return self.cached_col
@cached_property
def cached_col(self):
from django.db.models.expressions import Col
return Col(self.model._meta.db_table, self)
def select_format(self, compiler, sql, params):
"""
Custom format for select clauses. For example, GIS columns need to be
selected as AsText(table.col) on MySQL as the table.col data can't be
used by Django.
"""
return sql, params
def deconstruct(self):
"""
Return enough information to recreate the field as a 4-tuple:
* The name of the field on the model, if contribute_to_class() has
been run.
* The import path of the field, including the class:e.g.
django.db.models.IntegerField This should be the most portable
version, so less specific may be better.
* A list of positional arguments.
* A dict of keyword arguments.
Note that the positional or keyword arguments must contain values of
the following types (including inner values of collection types):
* None, bool, str, int, float, complex, set, frozenset, list, tuple,
dict
* UUID
* datetime.datetime (naive), datetime.date
* top-level classes, top-level functions - will be referenced by their
full import path
* Storage instances - these have their own deconstruct() method
This is because the values here must be serialized into a text format
(possibly new Python code, possibly JSON) and these are the only types
with encoding handlers defined.
There's no need to return the exact way the field was instantiated this
time, just ensure that the resulting field is the same - prefer keyword
arguments over positional ones, and omit parameters with their default
values.
"""
# Short-form way of fetching all the default parameters
keywords = {}
possibles = {
"verbose_name": None,
"primary_key": False,
"max_length": None,
"unique": False,
"blank": False,
"null": False,
"db_index": False,
"default": NOT_PROVIDED,
"editable": True,
"serialize": True,
"unique_for_date": None,
"unique_for_month": None,
"unique_for_year": None,
"choices": [],
"help_text": '',
"db_column": None,
"db_tablespace": None,
"auto_created": False,
"validators": [],
"error_messages": None,
}
attr_overrides = {
"unique": "_unique",
"error_messages": "_error_messages",
"validators": "_validators",
"verbose_name": "_verbose_name",
"db_tablespace": "_db_tablespace",
}
equals_comparison = {"choices", "validators"}
for name, default in possibles.items():
value = getattr(self, attr_overrides.get(name, name))
# Unroll anything iterable for choices into a concrete list
if name == "choices" and isinstance(value, collections.Iterable):
value = list(value)
# Do correct kind of comparison
if name in equals_comparison:
if value != default:
keywords[name] = value
else:
if value is not default:
keywords[name] = value
# Work out path - we shorten it for known Django core fields
path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__)
if path.startswith("django.db.models.fields.related"):
path = path.replace("django.db.models.fields.related", "django.db.models")
if path.startswith("django.db.models.fields.files"):
path = path.replace("django.db.models.fields.files", "django.db.models")
if path.startswith("django.db.models.fields.proxy"):
path = path.replace("django.db.models.fields.proxy", "django.db.models")
if path.startswith("django.db.models.fields"):
path = path.replace("django.db.models.fields", "django.db.models")
# Return basic info - other fields should override this.
return (self.name, path, [], keywords)
def clone(self):
"""
Uses deconstruct() to clone a new copy of this Field.
Will not preserve any class attachments/attribute names.
"""
name, path, args, kwargs = self.deconstruct()
return self.__class__(*args, **kwargs)
def __eq__(self, other):
# Needed for @total_ordering
if isinstance(other, Field):
return self.creation_counter == other.creation_counter
return NotImplemented
def __lt__(self, other):
# This is needed because bisect does not take a comparison function.
if isinstance(other, Field):
return self.creation_counter < other.creation_counter
return NotImplemented
def __hash__(self):
return hash(self.creation_counter)
def __deepcopy__(self, memodict):
# We don't have to deepcopy very much here, since most things are not
# intended to be altered after initial creation.
obj = copy.copy(self)
if self.remote_field:
obj.remote_field = copy.copy(self.remote_field)
if hasattr(self.remote_field, 'field') and self.remote_field.field is self:
obj.remote_field.field = obj
memodict[id(self)] = obj
return obj
def __copy__(self):
# We need to avoid hitting __reduce__, so define this
# slightly weird copy construct.
obj = Empty()
obj.__class__ = self.__class__
obj.__dict__ = self.__dict__.copy()
return obj
def __reduce__(self):
"""
Pickling should return the model._meta.fields instance of the field,
not a new copy of that field. So, use the app registry to load the
model and then the field back.
"""
if not hasattr(self, 'model'):
# Fields are sometimes used without attaching them to models (for
# example in aggregation). In this case give back a plain field
# instance. The code below will create a new empty instance of
# class self.__class__, then update its dict with self.__dict__
# values - so, this is very close to normal pickle.
state = self.__dict__.copy()
# The _get_default cached_property can't be pickled due to lambda
# usage.
state.pop('_get_default', None)
return _empty, (self.__class__,), state
return _load_field, (self.model._meta.app_label, self.model._meta.object_name,
self.name)
def get_pk_value_on_save(self, instance):
"""
Hook to generate new PK values on save. This method is called when
saving instances with no primary key value set. If this method returns
something else than None, then the returned value is used when saving
the new instance.
"""
if self.default:
return self.get_default()
return None
def to_python(self, value):
"""
Convert the input value into the expected Python data type, raising
django.core.exceptions.ValidationError if the data can't be converted.
Return the converted value. Subclasses should override this.
"""
return value
@cached_property
def validators(self):
"""
Some validators can't be created at field initialization time.
This method provides a way to delay their creation until required.
"""
return list(itertools.chain(self.default_validators, self._validators))
def run_validators(self, value):
if value in self.empty_values:
return
errors = []
for v in self.validators:
try:
v(value)
except exceptions.ValidationError as e:
if hasattr(e, 'code') and e.code in self.error_messages:
e.message = self.error_messages[e.code]
errors.extend(e.error_list)
if errors:
raise exceptions.ValidationError(errors)
def validate(self, value, model_instance):
"""
Validate value and raise ValidationError if necessary. Subclasses
should override this to provide validation logic.
"""
if not self.editable:
# Skip validation for non-editable fields.
return
if self.choices and value not in self.empty_values:
for option_key, option_value in self.choices:
if isinstance(option_value, (list, tuple)):
# This is an optgroup, so look inside the group for
# options.
for optgroup_key, optgroup_value in option_value:
if value == optgroup_key:
return
elif value == option_key:
return
raise exceptions.ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': value},
)
if value is None and not self.null:
raise exceptions.ValidationError(self.error_messages['null'], code='null')
if not self.blank and value in self.empty_values:
raise exceptions.ValidationError(self.error_messages['blank'], code='blank')
def clean(self, value, model_instance):
"""
Convert the value's type and run validation. Validation errors
from to_python() and validate() are propagated. Return the correct
value if no error is raised.
"""
value = self.to_python(value)
self.validate(value, model_instance)
self.run_validators(value)
return value
def db_type_parameters(self, connection):
return DictWrapper(self.__dict__, connection.ops.quote_name, 'qn_')
def db_check(self, connection):
"""
Return the database column check constraint for this field, for the
provided connection. Works the same way as db_type() for the case that
get_internal_type() does not map to a preexisting model field.
"""
data = self.db_type_parameters(connection)
try:
return connection.data_type_check_constraints[self.get_internal_type()] % data
except KeyError:
return None
def db_type(self, connection):
"""
Return the database column data type for this field, for the provided
connection.
"""
# The default implementation of this method looks at the
# backend-specific data_types dictionary, looking up the field by its
# "internal type".
#
# A Field class can implement the get_internal_type() method to specify
# which *preexisting* Django Field class it's most similar to -- i.e.,
# a custom field might be represented by a TEXT column type, which is
# the same as the TextField Django field type, which means the custom
# field's get_internal_type() returns 'TextField'.
#
# But the limitation of the get_internal_type() / data_types approach
# is that it cannot handle database column types that aren't already
# mapped to one of the built-in Django field types. In this case, you
# can implement db_type() instead of get_internal_type() to specify
# exactly which wacky database column type you want to use.
data = self.db_type_parameters(connection)
try:
return connection.data_types[self.get_internal_type()] % data
except KeyError:
return None
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. For example, this method is called by ForeignKey and OneToOneField
to determine its data type.
"""
return self.db_type(connection)
def cast_db_type(self, connection):
"""Return the data type to use in the Cast() function."""
db_type = connection.ops.cast_data_types.get(self.get_internal_type())
if db_type:
return db_type % self.db_type_parameters(connection)
return self.db_type(connection)
def db_parameters(self, connection):
"""
Extension of db_type(), providing a range of different return values
(type, checks). This will look at db_type(), allowing custom model
fields to override it.
"""
type_string = self.db_type(connection)
check_string = self.db_check(connection)
return {
"type": type_string,
"check": check_string,
}
def db_type_suffix(self, connection):
return connection.data_types_suffix.get(self.get_internal_type())
def get_db_converters(self, connection):
if hasattr(self, 'from_db_value'):
return [self.from_db_value]
return []
@property
def unique(self):
return self._unique or self.primary_key
@property
def db_tablespace(self):
return self._db_tablespace or settings.DEFAULT_INDEX_TABLESPACE
def set_attributes_from_name(self, name):
if not self.name:
self.name = name
self.attname, self.column = self.get_attname_column()
self.concrete = self.column is not None
if self.verbose_name is None and self.name:
self.verbose_name = self.name.replace('_', ' ')
def contribute_to_class(self, cls, name, private_only=False):
"""
Register the field with the model class it belongs to.
If private_only is True, create a separate instance of this field
for every subclass of cls, even if cls is not an abstract model.
"""
self.set_attributes_from_name(name)
self.model = cls
if private_only:
cls._meta.add_field(self, private=True)
else:
cls._meta.add_field(self)
if self.column:
# Don't override classmethods with the descriptor. This means that
# if you have a classmethod and a field with the same name, then
# such fields can't be deferred (we don't have a check for this).
if not getattr(cls, self.attname, None):
setattr(cls, self.attname, DeferredAttribute(self.attname, cls))
if self.choices:
setattr(cls, 'get_%s_display' % self.name,
partialmethod(cls._get_FIELD_display, field=self))
def get_filter_kwargs_for_object(self, obj):
"""
Return a dict that when passed as kwargs to self.model.filter(), would
yield all instances having the same value for this field as obj has.
"""
return {self.name: getattr(obj, self.attname)}
def get_attname(self):
return self.name
def get_attname_column(self):
attname = self.get_attname()
column = self.db_column or attname
return attname, column
def get_internal_type(self):
return self.__class__.__name__
def pre_save(self, model_instance, add):
"""Return field's value just before saving."""
return getattr(model_instance, self.attname)
def get_prep_value(self, value):
"""Perform preliminary non-db specific value checks and conversions."""
if isinstance(value, Promise):
value = value._proxy____cast()
return value
def get_db_prep_value(self, value, connection, prepared=False):
"""
Return field's value prepared for interacting with the database backend.
Used by the default implementations of get_db_prep_save().
"""
if not prepared:
value = self.get_prep_value(value)
return value
def get_db_prep_save(self, value, connection):
"""Return field's value prepared for saving into a database."""
return self.get_db_prep_value(value, connection=connection, prepared=False)
def has_default(self):
"""Return a boolean of whether this field has a default value."""
return self.default is not NOT_PROVIDED
def get_default(self):
"""Return the default value for this field."""
return self._get_default()
@cached_property
def _get_default(self):
if self.has_default():
if callable(self.default):
return self.default
return lambda: self.default
if not self.empty_strings_allowed or self.null and not connection.features.interprets_empty_strings_as_nulls:
return return_None
return str # return empty string
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None):
"""
Return choices with a default blank choices included, for use
as <select> choices for this field.
"""
if self.choices:
choices = list(self.choices)
if include_blank:
named_groups = isinstance(choices[0][1], (list, tuple))
blank_defined = not named_groups and any(choice in ('', None) for choice, __ in choices)
if not blank_defined:
choices = blank_choice + choices
return choices
rel_model = self.remote_field.model
limit_choices_to = limit_choices_to or self.get_limit_choices_to()
choice_func = operator.attrgetter(
self.remote_field.get_related_field().attname
if hasattr(self.remote_field, 'get_related_field')
else 'pk'
)
return (blank_choice if include_blank else []) + [
(choice_func(x), smart_text(x))
for x in rel_model._default_manager.complex_filter(limit_choices_to)
]
def value_to_string(self, obj):
"""
Return a string value of this field from the passed obj.
This is used by the serialization framework.
"""
return str(self.value_from_object(obj))
def _get_flatchoices(self):
"""Flattened version of choices tuple."""
flat = []
for choice, value in self.choices:
if isinstance(value, (list, tuple)):
flat.extend(value)
else:
flat.append((choice, value))
return flat
flatchoices = property(_get_flatchoices)
def save_form_data(self, instance, data):
setattr(instance, self.name, data)
def formfield(self, form_class=None, choices_form_class=None, **kwargs):
"""Return a django.forms.Field instance for this field."""
defaults = {'required': not self.blank,
'label': capfirst(self.verbose_name),
'help_text': self.help_text}
if self.has_default():
if callable(self.default):
defaults['initial'] = self.default
defaults['show_hidden_initial'] = True
else:
defaults['initial'] = self.get_default()
if self.choices:
# Fields with choices get special treatment.
include_blank = (self.blank or
not (self.has_default() or 'initial' in kwargs))
defaults['choices'] = self.get_choices(include_blank=include_blank)
defaults['coerce'] = self.to_python
if self.null:
defaults['empty_value'] = None
if choices_form_class is not None:
form_class = choices_form_class
else:
form_class = forms.TypedChoiceField
# Many of the subclass-specific formfield arguments (min_value,
# max_value) don't apply for choice fields, so be sure to only pass
# the values that TypedChoiceField will understand.
for k in list(kwargs):
if k not in ('coerce', 'empty_value', 'choices', 'required',
'widget', 'label', 'initial', 'help_text',
'error_messages', 'show_hidden_initial', 'disabled'):
del kwargs[k]
defaults.update(kwargs)
if form_class is None:
form_class = forms.CharField
return form_class(**defaults)
def value_from_object(self, obj):
"""Return the value of this field in the given model instance."""
return getattr(obj, self.attname)
class AutoField(Field):
description = _("Integer")
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be an integer."),
}
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
super().__init__(*args, **kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
errors.extend(self._check_primary_key())
return errors
def _check_primary_key(self):
if not self.primary_key:
return [
checks.Error(
'AutoFields must set primary_key=True.',
obj=self,
id='fields.E100',
),
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['blank']
kwargs['primary_key'] = True
return name, path, args, kwargs
def get_internal_type(self):
return "AutoField"
def to_python(self, value):
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def rel_db_type(self, connection):
return IntegerField().db_type(connection=connection)
def validate(self, value, model_instance):
pass
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
value = connection.ops.validate_autopk_value(value)
return value
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return int(value)
def contribute_to_class(self, cls, name, **kwargs):
assert not cls._meta.auto_field, "A model can't have more than one AutoField."
super().contribute_to_class(cls, name, **kwargs)
cls._meta.auto_field = self
def formfield(self, **kwargs):
return None
class BigAutoField(AutoField):
description = _("Big (8 byte) integer")
def get_internal_type(self):
return "BigAutoField"
def rel_db_type(self, connection):
return BigIntegerField().db_type(connection=connection)
class BooleanField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be either True or False."),
}
description = _("Boolean (Either True or False)")
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
super().__init__(*args, **kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
errors.extend(self._check_null(**kwargs))
return errors
def _check_null(self, **kwargs):
if getattr(self, 'null', False):
return [
checks.Error(
'BooleanFields do not accept null values.',
hint='Use a NullBooleanField instead.',
obj=self,
id='fields.E110',
)
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['blank']
return name, path, args, kwargs
def get_internal_type(self):
return "BooleanField"
def to_python(self, value):
if value in (True, False):
# if value is 1 or 0 than it's equal to True or False, but we want
# to return a true bool for semantic reasons.
return bool(value)
if value in ('t', 'True', '1'):
return True
if value in ('f', 'False', '0'):
return False
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return self.to_python(value)
def formfield(self, **kwargs):
# Unlike most fields, BooleanField figures out include_blank from
# self.null instead of self.blank.
if self.choices:
include_blank = not (self.has_default() or 'initial' in kwargs)
defaults = {'choices': self.get_choices(include_blank=include_blank)}
else:
defaults = {'form_class': forms.BooleanField}
defaults.update(kwargs)
return super().formfield(**defaults)
class CharField(Field):
description = _("String (up to %(max_length)s)")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.validators.append(validators.MaxLengthValidator(self.max_length))
def check(self, **kwargs):
errors = super().check(**kwargs)
errors.extend(self._check_max_length_attribute(**kwargs))
return errors
def _check_max_length_attribute(self, **kwargs):
if self.max_length is None:
return [
checks.Error(
"CharFields must define a 'max_length' attribute.",
obj=self,
id='fields.E120',
)
]
elif (not isinstance(self.max_length, int) or isinstance(self.max_length, bool) or
self.max_length <= 0):
return [
checks.Error(
"'max_length' must be a positive integer.",
obj=self,
id='fields.E121',
)
]
else:
return []
def cast_db_type(self, connection):
if self.max_length is None:
return connection.ops.cast_char_field_without_max_length
return super().cast_db_type(connection)
def get_internal_type(self):
return "CharField"
def to_python(self, value):
if isinstance(value, str) or value is None:
return value
return str(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
defaults = {'max_length': self.max_length}
# TODO: Handle multiple backends with different feature flags.
if self.null and not connection.features.interprets_empty_strings_as_nulls:
defaults['empty_value'] = None
defaults.update(kwargs)
return super().formfield(**defaults)
class CommaSeparatedIntegerField(CharField):
default_validators = [validators.validate_comma_separated_integer_list]
description = _("Comma-separated integers")
system_check_removed_details = {
'msg': (
'CommaSeparatedIntegerField is removed except for support in '
'historical migrations.'
),
'hint': (
'Use CharField(validators=[validate_comma_separated_integer_list]) '
'instead.'
),
'id': 'fields.E901',
}
class DateTimeCheckMixin:
def check(self, **kwargs):
errors = super().check(**kwargs)
errors.extend(self._check_mutually_exclusive_options())
errors.extend(self._check_fix_default_value())
return errors
def _check_mutually_exclusive_options(self):
# auto_now, auto_now_add, and default are mutually exclusive
# options. The use of more than one of these options together
# will trigger an Error
mutually_exclusive_options = [self.auto_now_add, self.auto_now, self.has_default()]
enabled_options = [option not in (None, False) for option in mutually_exclusive_options].count(True)
if enabled_options > 1:
return [
checks.Error(
"The options auto_now, auto_now_add, and default "
"are mutually exclusive. Only one of these options "
"may be present.",
obj=self,
id='fields.E160',
)
]
else:
return []
def _check_fix_default_value(self):
return []
class DateField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value has an invalid date format. It must be "
"in YYYY-MM-DD format."),
'invalid_date': _("'%(value)s' value has the correct format (YYYY-MM-DD) "
"but it is an invalid date."),
}
description = _("Date (without time)")
def __init__(self, verbose_name=None, name=None, auto_now=False,
auto_now_add=False, **kwargs):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs['editable'] = False
kwargs['blank'] = True
super().__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
now = timezone.now()
if not timezone.is_naive(now):
now = timezone.make_naive(now, timezone.utc)
value = self.default
if isinstance(value, datetime.datetime):
if not timezone.is_naive(value):
value = timezone.make_naive(value, timezone.utc)
value = value.date()
elif isinstance(value, datetime.date):
# Nothing to do, as dates don't have tz information
pass
else:
# No explicit date / datetime value -- no checks necessary
return []
offset = datetime.timedelta(days=1)
lower = (now - offset).date()
upper = (now + offset).date()
if lower <= value <= upper:
return [
checks.Warning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=self,
id='fields.W161',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.auto_now:
kwargs['auto_now'] = True
if self.auto_now_add:
kwargs['auto_now_add'] = True
if self.auto_now or self.auto_now_add:
del kwargs['editable']
del kwargs['blank']
return name, path, args, kwargs
def get_internal_type(self):
return "DateField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
if settings.USE_TZ and timezone.is_aware(value):
# Convert aware datetimes to the default time zone
# before casting them to dates (#17742).
default_timezone = timezone.get_default_timezone()
value = timezone.make_naive(value, default_timezone)
return value.date()
if isinstance(value, datetime.date):
return value
try:
parsed = parse_date(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_date'],
code='invalid_date',
params={'value': value},
)
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.date.today()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
def contribute_to_class(self, cls, name, **kwargs):
super().contribute_to_class(cls, name, **kwargs)
if not self.null:
setattr(
cls, 'get_next_by_%s' % self.name,
partialmethod(cls._get_next_or_previous_by_FIELD, field=self, is_next=True)
)
setattr(
cls, 'get_previous_by_%s' % self.name,
partialmethod(cls._get_next_or_previous_by_FIELD, field=self, is_next=False)
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts dates into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
defaults = {'form_class': forms.DateField}
defaults.update(kwargs)
return super().formfield(**defaults)
class DateTimeField(DateField):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value has an invalid format. It must be in "
"YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."),
'invalid_date': _("'%(value)s' value has the correct format "
"(YYYY-MM-DD) but it is an invalid date."),
'invalid_datetime': _("'%(value)s' value has the correct format "
"(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) "
"but it is an invalid date/time."),
}
description = _("Date (with time)")
# __init__ is inherited from DateField
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
now = timezone.now()
if not timezone.is_naive(now):
now = timezone.make_naive(now, timezone.utc)
value = self.default
if isinstance(value, datetime.datetime):
second_offset = datetime.timedelta(seconds=10)
lower = now - second_offset
upper = now + second_offset
if timezone.is_aware(value):
value = timezone.make_naive(value, timezone.utc)
elif isinstance(value, datetime.date):
second_offset = datetime.timedelta(seconds=10)
lower = now - second_offset
lower = datetime.datetime(lower.year, lower.month, lower.day)
upper = now + second_offset
upper = datetime.datetime(upper.year, upper.month, upper.day)
value = datetime.datetime(value.year, value.month, value.day)
else:
# No explicit date / datetime value -- no checks necessary
return []
if lower <= value <= upper:
return [
checks.Warning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=self,
id='fields.W161',
)
]
return []
def get_internal_type(self):
return "DateTimeField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
return value
if isinstance(value, datetime.date):
value = datetime.datetime(value.year, value.month, value.day)
if settings.USE_TZ:
# For backwards compatibility, interpret naive datetimes in
# local time. This won't work during DST change, but we can't
# do much about it, so we let the exceptions percolate up the
# call stack.
warnings.warn("DateTimeField %s.%s received a naive datetime "
"(%s) while time zone support is active." %
(self.model.__name__, self.name, value),
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
try:
parsed = parse_datetime(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_datetime'],
code='invalid_datetime',
params={'value': value},
)
try:
parsed = parse_date(value)
if parsed is not None:
return datetime.datetime(parsed.year, parsed.month, parsed.day)
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_date'],
code='invalid_date',
params={'value': value},
)
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = timezone.now()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
# contribute_to_class is inherited from DateField, it registers
# get_next_by_FOO and get_prev_by_FOO
def get_prep_value(self, value):
value = super().get_prep_value(value)
value = self.to_python(value)
if value is not None and settings.USE_TZ and timezone.is_naive(value):
# For backwards compatibility, interpret naive datetimes in local
# time. This won't work during DST change, but we can't do much
# about it, so we let the exceptions percolate up the call stack.
try:
name = '%s.%s' % (self.model.__name__, self.name)
except AttributeError:
name = '(unbound)'
warnings.warn("DateTimeField %s received a naive datetime (%s)"
" while time zone support is active." %
(name, value),
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
def get_db_prep_value(self, value, connection, prepared=False):
# Casts datetimes into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datetimefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
defaults = {'form_class': forms.DateTimeField}
defaults.update(kwargs)
return super().formfield(**defaults)
class DecimalField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be a decimal number."),
}
description = _("Decimal number")
def __init__(self, verbose_name=None, name=None, max_digits=None,
decimal_places=None, **kwargs):
self.max_digits, self.decimal_places = max_digits, decimal_places
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
digits_errors = self._check_decimal_places()
digits_errors.extend(self._check_max_digits())
if not digits_errors:
errors.extend(self._check_decimal_places_and_max_digits(**kwargs))
else:
errors.extend(digits_errors)
return errors
def _check_decimal_places(self):
try:
decimal_places = int(self.decimal_places)
if decimal_places < 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'decimal_places' attribute.",
obj=self,
id='fields.E130',
)
]
except ValueError:
return [
checks.Error(
"'decimal_places' must be a non-negative integer.",
obj=self,
id='fields.E131',
)
]
else:
return []
def _check_max_digits(self):
try:
max_digits = int(self.max_digits)
if max_digits <= 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'max_digits' attribute.",
obj=self,
id='fields.E132',
)
]
except ValueError:
return [
checks.Error(
"'max_digits' must be a positive integer.",
obj=self,
id='fields.E133',
)
]
else:
return []
def _check_decimal_places_and_max_digits(self, **kwargs):
if int(self.decimal_places) > int(self.max_digits):
return [
checks.Error(
"'max_digits' must be greater or equal to 'decimal_places'.",
obj=self,
id='fields.E134',
)
]
return []
@cached_property
def validators(self):
return super().validators + [
validators.DecimalValidator(self.max_digits, self.decimal_places)
]
@cached_property
def context(self):
return decimal.Context(prec=self.max_digits)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.max_digits is not None:
kwargs['max_digits'] = self.max_digits
if self.decimal_places is not None:
kwargs['decimal_places'] = self.decimal_places
return name, path, args, kwargs
def get_internal_type(self):
return "DecimalField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, float):
return self.context.create_decimal_from_float(value)
try:
return decimal.Decimal(value)
except decimal.InvalidOperation:
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def format_number(self, value):
"""
Format a number into a string with the requisite number of digits and
decimal places.
"""
# Method moved to django.db.backends.utils.
#
# It is preserved because it is used by the oracle backend
# (django.db.backends.oracle.query), and also for
# backwards-compatibility with any external code which may have used
# this method.
from django.db.backends import utils
return utils.format_number(value, self.max_digits, self.decimal_places)
def get_db_prep_save(self, value, connection):
return connection.ops.adapt_decimalfield_value(self.to_python(value), self.max_digits, self.decimal_places)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
defaults = {
'max_digits': self.max_digits,
'decimal_places': self.decimal_places,
'form_class': forms.DecimalField,
}
defaults.update(kwargs)
return super().formfield(**defaults)
class DurationField(Field):
"""
Store timedelta objects.
Use interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint
of microseconds on other databases.
"""
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value has an invalid format. It must be in "
"[DD] [HH:[MM:]]ss[.uuuuuu] format.")
}
description = _("Duration")
def get_internal_type(self):
return "DurationField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.timedelta):
return value
try:
parsed = parse_duration(value)
except ValueError:
pass
else:
if parsed is not None:
return parsed
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def get_db_prep_value(self, value, connection, prepared=False):
if connection.features.has_native_duration_field:
return value
if value is None:
return None
# Discard any fractional microseconds due to floating point arithmetic.
return round(value.total_seconds() * 1000000)
def get_db_converters(self, connection):
converters = []
if not connection.features.has_native_duration_field:
converters.append(connection.ops.convert_durationfield_value)
return converters + super().get_db_converters(connection)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return '' if val is None else duration_string(val)
def formfield(self, **kwargs):
defaults = {
'form_class': forms.DurationField,
}
defaults.update(kwargs)
return super().formfield(**defaults)
class EmailField(CharField):
default_validators = [validators.validate_email]
description = _("Email address")
def __init__(self, *args, **kwargs):
# max_length=254 to be compliant with RFCs 3696 and 5321
kwargs['max_length'] = kwargs.get('max_length', 254)
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
# We do not exclude max_length if it matches default as we want to change
# the default in future.
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause email validation to be performed
# twice.
defaults = {
'form_class': forms.EmailField,
}
defaults.update(kwargs)
return super().formfield(**defaults)
class FilePathField(Field):
description = _("File path")
def __init__(self, verbose_name=None, name=None, path='', match=None,
recursive=False, allow_files=True, allow_folders=False, **kwargs):
self.path, self.match, self.recursive = path, match, recursive
self.allow_files, self.allow_folders = allow_files, allow_folders
kwargs['max_length'] = kwargs.get('max_length', 100)
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
errors.extend(self._check_allowing_files_or_folders(**kwargs))
return errors
def _check_allowing_files_or_folders(self, **kwargs):
if not self.allow_files and not self.allow_folders:
return [
checks.Error(
"FilePathFields must have either 'allow_files' or 'allow_folders' set to True.",
obj=self,
id='fields.E140',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.path != '':
kwargs['path'] = self.path
if self.match is not None:
kwargs['match'] = self.match
if self.recursive is not False:
kwargs['recursive'] = self.recursive
if self.allow_files is not True:
kwargs['allow_files'] = self.allow_files
if self.allow_folders is not False:
kwargs['allow_folders'] = self.allow_folders
if kwargs.get("max_length") == 100:
del kwargs["max_length"]
return name, path, args, kwargs
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return str(value)
def formfield(self, **kwargs):
defaults = {
'path': self.path,
'match': self.match,
'recursive': self.recursive,
'form_class': forms.FilePathField,
'allow_files': self.allow_files,
'allow_folders': self.allow_folders,
}
defaults.update(kwargs)
return super().formfield(**defaults)
def get_internal_type(self):
return "FilePathField"
class FloatField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be a float."),
}
description = _("Floating point number")
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return float(value)
def get_internal_type(self):
return "FloatField"
def to_python(self, value):
if value is None:
return value
try:
return float(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def formfield(self, **kwargs):
defaults = {'form_class': forms.FloatField}
defaults.update(kwargs)
return super().formfield(**defaults)
class IntegerField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be an integer."),
}
description = _("Integer")
def check(self, **kwargs):
errors = super().check(**kwargs)
errors.extend(self._check_max_length_warning())
return errors
def _check_max_length_warning(self):
if self.max_length is not None:
return [
checks.Warning(
"'max_length' is ignored when used with IntegerField",
hint="Remove 'max_length' from field",
obj=self,
id='fields.W122',
)
]
return []
@cached_property
def validators(self):
# These validators can't be added at field initialization time since
# they're based on values retrieved from `connection`.
validators_ = super().validators
internal_type = self.get_internal_type()
min_value, max_value = connection.ops.integer_field_range(internal_type)
if min_value is not None:
for validator in validators_:
if isinstance(validator, validators.MinValueValidator) and validator.limit_value >= min_value:
break
else:
validators_.append(validators.MinValueValidator(min_value))
if max_value is not None:
for validator in validators_:
if isinstance(validator, validators.MaxValueValidator) and validator.limit_value <= max_value:
break
else:
validators_.append(validators.MaxValueValidator(max_value))
return validators_
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return int(value)
def get_internal_type(self):
return "IntegerField"
def to_python(self, value):
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def formfield(self, **kwargs):
defaults = {'form_class': forms.IntegerField}
defaults.update(kwargs)
return super().formfield(**defaults)
class BigIntegerField(IntegerField):
empty_strings_allowed = False
description = _("Big (8 byte) integer")
MAX_BIGINT = 9223372036854775807
def get_internal_type(self):
return "BigIntegerField"
def formfield(self, **kwargs):
defaults = {'min_value': -BigIntegerField.MAX_BIGINT - 1,
'max_value': BigIntegerField.MAX_BIGINT}
defaults.update(kwargs)
return super().formfield(**defaults)
class IPAddressField(Field):
empty_strings_allowed = False
description = _("IPv4 address")
system_check_removed_details = {
'msg': (
'IPAddressField has been removed except for support in '
'historical migrations.'
),
'hint': 'Use GenericIPAddressField instead.',
'id': 'fields.E900',
}
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 15
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['max_length']
return name, path, args, kwargs
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return str(value)
def get_internal_type(self):
return "IPAddressField"
class GenericIPAddressField(Field):
empty_strings_allowed = False
description = _("IP address")
default_error_messages = {}
def __init__(self, verbose_name=None, name=None, protocol='both',
unpack_ipv4=False, *args, **kwargs):
self.unpack_ipv4 = unpack_ipv4
self.protocol = protocol
self.default_validators, invalid_error_message = \
validators.ip_address_validators(protocol, unpack_ipv4)
self.default_error_messages['invalid'] = invalid_error_message
kwargs['max_length'] = 39
super().__init__(verbose_name, name, *args, **kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
errors.extend(self._check_blank_and_null_values(**kwargs))
return errors
def _check_blank_and_null_values(self, **kwargs):
if not getattr(self, 'null', False) and getattr(self, 'blank', False):
return [
checks.Error(
'GenericIPAddressFields cannot have blank=True if null=False, '
'as blank values are stored as nulls.',
obj=self,
id='fields.E150',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.unpack_ipv4 is not False:
kwargs['unpack_ipv4'] = self.unpack_ipv4
if self.protocol != "both":
kwargs['protocol'] = self.protocol
if kwargs.get("max_length") == 39:
del kwargs['max_length']
return name, path, args, kwargs
def get_internal_type(self):
return "GenericIPAddressField"
def to_python(self, value):
if value is None:
return None
if not isinstance(value, str):
value = str(value)
value = value.strip()
if ':' in value:
return clean_ipv6_address(value, self.unpack_ipv4, self.error_messages['invalid'])
return value
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_ipaddressfield_value(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
if value and ':' in value:
try:
return clean_ipv6_address(value, self.unpack_ipv4)
except exceptions.ValidationError:
pass
return str(value)
def formfield(self, **kwargs):
defaults = {
'protocol': self.protocol,
'form_class': forms.GenericIPAddressField,
}
defaults.update(kwargs)
return super().formfield(**defaults)
class NullBooleanField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be either None, True or False."),
}
description = _("Boolean (Either True, False or None)")
def __init__(self, *args, **kwargs):
kwargs['null'] = True
kwargs['blank'] = True
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['null']
del kwargs['blank']
return name, path, args, kwargs
def get_internal_type(self):
return "NullBooleanField"
def to_python(self, value):
if value is None:
return None
if value in (True, False):
return bool(value)
if value in ('None',):
return None
if value in ('t', 'True', '1'):
return True
if value in ('f', 'False', '0'):
return False
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return self.to_python(value)
def formfield(self, **kwargs):
defaults = {'form_class': forms.NullBooleanField}
defaults.update(kwargs)
return super().formfield(**defaults)
class PositiveIntegerRelDbTypeMixin:
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. In most cases, a foreign key pointing to a positive integer
primary key will have an integer column data type but some databases
(e.g. MySQL) have an unsigned integer type. In that case
(related_fields_match_type=True), the primary key should return its
db_type.
"""
if connection.features.related_fields_match_type:
return self.db_type(connection)
else:
return IntegerField().db_type(connection=connection)
class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField):
description = _("Positive integer")
def get_internal_type(self):
return "PositiveIntegerField"
def formfield(self, **kwargs):
defaults = {'min_value': 0}
defaults.update(kwargs)
return super().formfield(**defaults)
class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField):
description = _("Positive small integer")
def get_internal_type(self):
return "PositiveSmallIntegerField"
def formfield(self, **kwargs):
defaults = {'min_value': 0}
defaults.update(kwargs)
return super().formfield(**defaults)
class SlugField(CharField):
default_validators = [validators.validate_slug]
description = _("Slug (up to %(max_length)s)")
def __init__(self, *args, max_length=50, db_index=True, allow_unicode=False, **kwargs):
self.allow_unicode = allow_unicode
if self.allow_unicode:
self.default_validators = [validators.validate_unicode_slug]
super().__init__(*args, max_length=max_length, db_index=db_index, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs.get("max_length") == 50:
del kwargs['max_length']
if self.db_index is False:
kwargs['db_index'] = False
else:
del kwargs['db_index']
if self.allow_unicode is not False:
kwargs['allow_unicode'] = self.allow_unicode
return name, path, args, kwargs
def get_internal_type(self):
return "SlugField"
def formfield(self, **kwargs):
defaults = {'form_class': forms.SlugField, 'allow_unicode': self.allow_unicode}
defaults.update(kwargs)
return super().formfield(**defaults)
class SmallIntegerField(IntegerField):
description = _("Small integer")
def get_internal_type(self):
return "SmallIntegerField"
class TextField(Field):
description = _("Text")
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if isinstance(value, str) or value is None:
return value
return str(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
defaults = {'max_length': self.max_length}
if not self.choices:
defaults['widget'] = forms.Textarea
defaults.update(kwargs)
return super().formfield(**defaults)
class TimeField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value has an invalid format. It must be in "
"HH:MM[:ss[.uuuuuu]] format."),
'invalid_time': _("'%(value)s' value has the correct format "
"(HH:MM[:ss[.uuuuuu]]) but it is an invalid time."),
}
description = _("Time")
def __init__(self, verbose_name=None, name=None, auto_now=False,
auto_now_add=False, **kwargs):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs['editable'] = False
kwargs['blank'] = True
super().__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
now = timezone.now()
if not timezone.is_naive(now):
now = timezone.make_naive(now, timezone.utc)
value = self.default
if isinstance(value, datetime.datetime):
second_offset = datetime.timedelta(seconds=10)
lower = now - second_offset
upper = now + second_offset
if timezone.is_aware(value):
value = timezone.make_naive(value, timezone.utc)
elif isinstance(value, datetime.time):
second_offset = datetime.timedelta(seconds=10)
lower = now - second_offset
upper = now + second_offset
value = datetime.datetime.combine(now.date(), value)
if timezone.is_aware(value):
value = timezone.make_naive(value, timezone.utc).time()
else:
# No explicit time / datetime value -- no checks necessary
return []
if lower <= value <= upper:
return [
checks.Warning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=self,
id='fields.W161',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.auto_now is not False:
kwargs["auto_now"] = self.auto_now
if self.auto_now_add is not False:
kwargs["auto_now_add"] = self.auto_now_add
if self.auto_now or self.auto_now_add:
del kwargs['blank']
del kwargs['editable']
return name, path, args, kwargs
def get_internal_type(self):
return "TimeField"
def to_python(self, value):
if value is None:
return None
if isinstance(value, datetime.time):
return value
if isinstance(value, datetime.datetime):
# Not usually a good idea to pass in a datetime here (it loses
# information), but this can be a side-effect of interacting with a
# database backend (e.g. Oracle), so we'll be accommodating.
return value.time()
try:
parsed = parse_time(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_time'],
code='invalid_time',
params={'value': value},
)
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.datetime.now().time()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts times into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_timefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
defaults = {'form_class': forms.TimeField}
defaults.update(kwargs)
return super().formfield(**defaults)
class URLField(CharField):
default_validators = [validators.URLValidator()]
description = _("URL")
def __init__(self, verbose_name=None, name=None, **kwargs):
kwargs['max_length'] = kwargs.get('max_length', 200)
super().__init__(verbose_name, name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs.get("max_length") == 200:
del kwargs['max_length']
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause URL validation to be performed
# twice.
defaults = {
'form_class': forms.URLField,
}
defaults.update(kwargs)
return super().formfield(**defaults)
class BinaryField(Field):
description = _("Raw binary data")
empty_values = [None, b'']
def __init__(self, *args, **kwargs):
kwargs['editable'] = False
super().__init__(*args, **kwargs)
if self.max_length is not None:
self.validators.append(validators.MaxLengthValidator(self.max_length))
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['editable']
return name, path, args, kwargs
def get_internal_type(self):
return "BinaryField"
def get_placeholder(self, value, compiler, connection):
return connection.ops.binary_placeholder_sql(value)
def get_default(self):
if self.has_default() and not callable(self.default):
return self.default
default = super().get_default()
if default == '':
return b''
return default
def get_db_prep_value(self, value, connection, prepared=False):
value = super().get_db_prep_value(value, connection, prepared)
if value is not None:
return connection.Database.Binary(value)
return value
def value_to_string(self, obj):
"""Binary data is serialized as base64"""
return b64encode(force_bytes(self.value_from_object(obj))).decode('ascii')
def to_python(self, value):
# If it's a string, it should be base64-encoded data
if isinstance(value, str):
return memoryview(b64decode(force_bytes(value)))
return value
class UUIDField(Field):
default_error_messages = {
'invalid': _("'%(value)s' is not a valid UUID."),
}
description = 'Universally unique identifier'
empty_strings_allowed = False
def __init__(self, verbose_name=None, **kwargs):
kwargs['max_length'] = 32
super().__init__(verbose_name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['max_length']
return name, path, args, kwargs
def get_internal_type(self):
return "UUIDField"
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return None
if not isinstance(value, uuid.UUID):
value = self.to_python(value)
if connection.features.has_native_uuid_field:
return value
return value.hex
def to_python(self, value):
if value is not None and not isinstance(value, uuid.UUID):
try:
return uuid.UUID(value)
except (AttributeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
return value
def formfield(self, **kwargs):
defaults = {
'form_class': forms.UUIDField,
}
defaults.update(kwargs)
return super().formfield(**defaults)
| 36.11378
| 117
| 0.591426
|
4a006bff05d551ecf3602e89bbe8728bca630741
| 731
|
py
|
Python
|
78. Subsets.py
|
Nriver/leetcode
|
e0b30fae8a31513172fcbd13d67eded8922ba785
|
[
"MIT"
] | null | null | null |
78. Subsets.py
|
Nriver/leetcode
|
e0b30fae8a31513172fcbd13d67eded8922ba785
|
[
"MIT"
] | null | null | null |
78. Subsets.py
|
Nriver/leetcode
|
e0b30fae8a31513172fcbd13d67eded8922ba785
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# @Author: zengjq
# @Date: 2020-10-21 15:23:40
# @Last Modified by: zengjq
# @Last Modified time: 2020-10-21 16:10:22
class Solution:
# 79 mem 100
def subsets1(self, nums):
res = []
l = len(nums)
# 转换成一个二进制过滤器问题
for x in range(2 ** l):
selectors = [int(y) for y in bin(x)[2:].zfill(l)]
subset = list(d for d, s in zip(nums, selectors) if s)
# print(subset)
res.append(subset)
return res
# 别人的答案
# 这个很巧妙
def subsets(self, nums):
subsets = [[]]
for n in nums:
subsets += [s + [n] for s in subsets]
return subsets
s = Solution()
s.subsets([1,2,3])
| 22.151515
| 66
| 0.504788
|
4a006c1dcf28db4db3f97a4ee79bfeade391333f
| 20,256
|
py
|
Python
|
psutil/_common.py
|
weirongbi1992/psutils
|
4dde104ffccd3a97b726f05975e681699a337c41
|
[
"BSD-3-Clause"
] | null | null | null |
psutil/_common.py
|
weirongbi1992/psutils
|
4dde104ffccd3a97b726f05975e681699a337c41
|
[
"BSD-3-Clause"
] | 1
|
2021-12-28T09:27:13.000Z
|
2021-12-28T09:27:13.000Z
|
psutil/_common.py
|
weirongbi1992/psutils
|
4dde104ffccd3a97b726f05975e681699a337c41
|
[
"BSD-3-Clause"
] | 1
|
2021-12-28T09:34:01.000Z
|
2021-12-28T09:34:01.000Z
|
# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Common objects shared by __init__.py and _ps*.py modules."""
# Note: this module is imported by setup.py so it should not import
# psutil or third-party modules.
from __future__ import division
import contextlib
import errno
import functools
import os
import socket
import stat
import sys
import threading
import warnings
from collections import defaultdict
from collections import namedtuple
from socket import AF_INET
from socket import SOCK_DGRAM
from socket import SOCK_STREAM
try:
from socket import AF_INET6
except ImportError:
AF_INET6 = None
try:
from socket import AF_UNIX
except ImportError:
AF_UNIX = None
if sys.version_info >= (3, 4):
import enum
else:
enum = None
# can't take it from _common.py as this script is imported by setup.py
PY3 = sys.version_info[0] == 3
__all__ = [
# constants
'FREEBSD', 'BSD', 'LINUX', 'NETBSD', 'OPENBSD', 'MACOS', 'OSX', 'POSIX',
'SUNOS', 'WINDOWS',
'ENCODING', 'ENCODING_ERRS', 'AF_INET6',
# connection constants
'CONN_CLOSE', 'CONN_CLOSE_WAIT', 'CONN_CLOSING', 'CONN_ESTABLISHED',
'CONN_FIN_WAIT1', 'CONN_FIN_WAIT2', 'CONN_LAST_ACK', 'CONN_LISTEN',
'CONN_NONE', 'CONN_SYN_RECV', 'CONN_SYN_SENT', 'CONN_TIME_WAIT',
# net constants
'NIC_DUPLEX_FULL', 'NIC_DUPLEX_HALF', 'NIC_DUPLEX_UNKNOWN',
# process status constants
'STATUS_DEAD', 'STATUS_DISK_SLEEP', 'STATUS_IDLE', 'STATUS_LOCKED',
'STATUS_RUNNING', 'STATUS_SLEEPING', 'STATUS_STOPPED', 'STATUS_SUSPENDED',
'STATUS_TRACING_STOP', 'STATUS_WAITING', 'STATUS_WAKE_KILL',
'STATUS_WAKING', 'STATUS_ZOMBIE', 'STATUS_PARKED',
# named tuples
'pconn', 'pcputimes', 'pctxsw', 'pgids', 'pio', 'pionice', 'popenfile',
'pthread', 'puids', 'sconn', 'scpustats', 'sdiskio', 'sdiskpart',
'sdiskusage', 'snetio', 'snicaddr', 'snicstats', 'sswap', 'suser',
# utility functions
'conn_tmap', 'deprecated_method', 'isfile_strict', 'memoize',
'parse_environ_block', 'path_exists_strict', 'usage_percent',
'supports_ipv6', 'sockfam_to_enum', 'socktype_to_enum', "wrap_numbers",
'bytes2human', 'conn_to_ntuple',
]
# ===================================================================
# --- OS constants
# ===================================================================
POSIX = os.name == "posix"
WINDOWS = os.name == "nt"
LINUX = sys.platform.startswith("linux")
MACOS = sys.platform.startswith("darwin")
OSX = MACOS # deprecated alias
FREEBSD = sys.platform.startswith("freebsd")
OPENBSD = sys.platform.startswith("openbsd")
NETBSD = sys.platform.startswith("netbsd")
BSD = FREEBSD or OPENBSD or NETBSD
SUNOS = sys.platform.startswith(("sunos", "solaris"))
AIX = sys.platform.startswith("aix")
# ===================================================================
# --- API constants
# ===================================================================
# Process.status()
STATUS_RUNNING = "running"
STATUS_SLEEPING = "sleeping"
STATUS_DISK_SLEEP = "disk-sleep"
STATUS_STOPPED = "stopped"
STATUS_TRACING_STOP = "tracing-stop"
STATUS_ZOMBIE = "zombie"
STATUS_DEAD = "dead"
STATUS_WAKE_KILL = "wake-kill"
STATUS_WAKING = "waking"
STATUS_IDLE = "idle" # Linux, macOS, FreeBSD
STATUS_LOCKED = "locked" # FreeBSD
STATUS_WAITING = "waiting" # FreeBSD
STATUS_SUSPENDED = "suspended" # NetBSD
STATUS_PARKED = "parked" # Linux
# Process.connections() and psutil.net_connections()
CONN_ESTABLISHED = "ESTABLISHED"
CONN_SYN_SENT = "SYN_SENT"
CONN_SYN_RECV = "SYN_RECV"
CONN_FIN_WAIT1 = "FIN_WAIT1"
CONN_FIN_WAIT2 = "FIN_WAIT2"
CONN_TIME_WAIT = "TIME_WAIT"
CONN_CLOSE = "CLOSE"
CONN_CLOSE_WAIT = "CLOSE_WAIT"
CONN_LAST_ACK = "LAST_ACK"
CONN_LISTEN = "LISTEN"
CONN_CLOSING = "CLOSING"
CONN_NONE = "NONE"
# net_if_stats()
if enum is None:
NIC_DUPLEX_FULL = 2
NIC_DUPLEX_HALF = 1
NIC_DUPLEX_UNKNOWN = 0
else:
class NicDuplex(enum.IntEnum):
NIC_DUPLEX_FULL = 2
NIC_DUPLEX_HALF = 1
NIC_DUPLEX_UNKNOWN = 0
globals().update(NicDuplex.__members__)
# sensors_battery()
if enum is None:
POWER_TIME_UNKNOWN = -1
POWER_TIME_UNLIMITED = -2
else:
class BatteryTime(enum.IntEnum):
POWER_TIME_UNKNOWN = -1
POWER_TIME_UNLIMITED = -2
globals().update(BatteryTime.__members__)
# --- others
ENCODING = sys.getfilesystemencoding()
if not PY3:
ENCODING_ERRS = "replace"
else:
try:
ENCODING_ERRS = sys.getfilesystemencodeerrors() # py 3.6
except AttributeError:
ENCODING_ERRS = "surrogateescape" if POSIX else "replace"
# ===================================================================
# --- namedtuples
# ===================================================================
# --- for system functions
# psutil.swap_memory()
sswap = namedtuple('sswap', ['total', 'used', 'free', 'percent', 'sin',
'sout'])
# psutil.disk_usage()
sdiskusage = namedtuple('sdiskusage', ['total', 'used', 'free', 'percent'])
# psutil.disk_io_counters()
sdiskio = namedtuple('sdiskio', ['read_count', 'write_count',
'read_bytes', 'write_bytes',
'read_time', 'write_time'])
# psutil.disk_partitions()
sdiskpart = namedtuple('sdiskpart', ['device', 'mountpoint', 'fstype', 'opts'])
# psutil.net_io_counters()
snetio = namedtuple('snetio', ['bytes_sent', 'bytes_recv',
'packets_sent', 'packets_recv',
'errin', 'errout',
'dropin', 'dropout'])
# psutil.users()
suser = namedtuple('suser', ['name', 'terminal', 'host', 'started', 'pid'])
# psutil.net_connections()
sconn = namedtuple('sconn', ['fd', 'family', 'type', 'laddr', 'raddr',
'status', 'pid'])
# psutil.net_if_addrs()
snicaddr = namedtuple('snicaddr',
['family', 'address', 'netmask', 'broadcast', 'ptp'])
# psutil.net_if_stats()
snicstats = namedtuple('snicstats', ['isup', 'duplex', 'speed', 'mtu'])
# psutil.cpu_stats()
scpustats = namedtuple(
'scpustats', ['ctx_switches', 'interrupts', 'soft_interrupts', 'syscalls'])
# psutil.cpu_freq()
scpufreq = namedtuple('scpufreq', ['current', 'min', 'max'])
# psutil.sensors_temperatures()
shwtemp = namedtuple(
'shwtemp', ['label', 'current', 'high', 'critical'])
# psutil.sensors_battery()
sbattery = namedtuple('sbattery', ['percent', 'secsleft', 'power_plugged'])
# psutil.sensors_fans()
sfan = namedtuple('sfan', ['label', 'current'])
# --- for Process methods
# psutil.Process.cpu_times()
pcputimes = namedtuple('pcputimes',
['user', 'system', 'children_user', 'children_system'])
# psutil.Process.open_files()
popenfile = namedtuple('popenfile', ['path', 'fd'])
# psutil.Process.threads()
pthread = namedtuple('pthread', ['id', 'user_time', 'system_time'])
# psutil.Process.uids()
puids = namedtuple('puids', ['real', 'effective', 'saved'])
# psutil.Process.gids()
pgids = namedtuple('pgids', ['real', 'effective', 'saved'])
# psutil.Process.io_counters()
pio = namedtuple('pio', ['read_count', 'write_count',
'read_bytes', 'write_bytes'])
# psutil.Process.ionice()
pionice = namedtuple('pionice', ['ioclass', 'value'])
# psutil.Process.ctx_switches()
pctxsw = namedtuple('pctxsw', ['voluntary', 'involuntary'])
# psutil.Process.connections()
pconn = namedtuple('pconn', ['fd', 'family', 'type', 'laddr', 'raddr',
'status'])
# psutil.connections() and psutil.Process.connections()
addr = namedtuple('addr', ['ip', 'port'])
# ===================================================================
# --- Process.connections() 'kind' parameter mapping
# ===================================================================
conn_tmap = {
"all": ([AF_INET, AF_INET6, AF_UNIX], [SOCK_STREAM, SOCK_DGRAM]),
"tcp": ([AF_INET, AF_INET6], [SOCK_STREAM]),
"tcp4": ([AF_INET], [SOCK_STREAM]),
"udp": ([AF_INET, AF_INET6], [SOCK_DGRAM]),
"udp4": ([AF_INET], [SOCK_DGRAM]),
"inet": ([AF_INET, AF_INET6], [SOCK_STREAM, SOCK_DGRAM]),
"inet4": ([AF_INET], [SOCK_STREAM, SOCK_DGRAM]),
"inet6": ([AF_INET6], [SOCK_STREAM, SOCK_DGRAM]),
}
if AF_INET6 is not None:
conn_tmap.update({
"tcp6": ([AF_INET6], [SOCK_STREAM]),
"udp6": ([AF_INET6], [SOCK_DGRAM]),
})
if AF_UNIX is not None:
conn_tmap.update({
"unix": ([AF_UNIX], [SOCK_STREAM, SOCK_DGRAM]),
})
# ===================================================================
# --- utils
# ===================================================================
def usage_percent(used, total, round_=None):
"""Calculate percentage usage of 'used' against 'total'."""
try:
ret = (used / total) * 100
except ZeroDivisionError:
ret = 0.0 if isinstance(used, float) or isinstance(total, float) else 0
if round_ is not None:
return round(ret, round_)
else:
return ret
def memoize(fun):
"""A simple memoize decorator for functions supporting (hashable)
positional arguments.
It also provides a cache_clear() function for clearing the cache:
>>> @memoize
... def foo()
... return 1
...
>>> foo()
1
>>> foo.cache_clear()
>>>
"""
@functools.wraps(fun)
def wrapper(*args, **kwargs):
key = (args, frozenset(sorted(kwargs.items())))
try:
return cache[key]
except KeyError:
ret = cache[key] = fun(*args, **kwargs)
return ret
def cache_clear():
"""Clear cache."""
cache.clear()
cache = {}
wrapper.cache_clear = cache_clear
return wrapper
def memoize_when_activated(fun):
"""A memoize decorator which is disabled by default. It can be
activated and deactivated on request.
For efficiency reasons it can be used only against class methods
accepting no arguments.
>>> class Foo:
... @memoize
... def foo()
... print(1)
...
>>> f = Foo()
>>> # deactivated (default)
>>> foo()
1
>>> foo()
1
>>>
>>> # activated
>>> foo.cache_activate(self)
>>> foo()
1
>>> foo()
>>> foo()
>>>
"""
@functools.wraps(fun)
def wrapper(self):
try:
# case 1: we previously entered oneshot() ctx
ret = self._cache[fun]
except AttributeError:
# case 2: we never entered oneshot() ctx
return fun(self)
except KeyError:
# case 3: we entered oneshot() ctx but there's no cache
# for this entry yet
ret = self._cache[fun] = fun(self)
return ret
def cache_activate(proc):
"""Activate cache. Expects a Process instance. Cache will be
stored as a "_cache" instance attribute."""
proc._cache = {}
def cache_deactivate(proc):
"""Deactivate and clear cache."""
try:
del proc._cache
except AttributeError:
pass
wrapper.cache_activate = cache_activate
wrapper.cache_deactivate = cache_deactivate
return wrapper
def isfile_strict(path):
"""Same as os.path.isfile() but does not swallow EACCES / EPERM
exceptions, see:
http://mail.python.org/pipermail/python-dev/2012-June/120787.html
"""
try:
st = os.stat(path)
except OSError as err:
if err.errno in (errno.EPERM, errno.EACCES):
raise
return False
else:
return stat.S_ISREG(st.st_mode)
def path_exists_strict(path):
"""Same as os.path.exists() but does not swallow EACCES / EPERM
exceptions, see:
http://mail.python.org/pipermail/python-dev/2012-June/120787.html
"""
try:
os.stat(path)
except OSError as err:
if err.errno in (errno.EPERM, errno.EACCES):
raise
return False
else:
return True
@memoize
def supports_ipv6():
"""Return True if IPv6 is supported on this platform."""
if not socket.has_ipv6 or AF_INET6 is None:
return False
try:
sock = socket.socket(AF_INET6, socket.SOCK_STREAM)
with contextlib.closing(sock):
sock.bind(("::1", 0))
return True
except socket.error:
return False
def parse_environ_block(data):
"""Parse a C environ block of environment variables into a dictionary."""
# The block is usually raw data from the target process. It might contain
# trailing garbage and lines that do not look like assignments.
ret = {}
pos = 0
# localize global variable to speed up access.
WINDOWS_ = WINDOWS
while True:
next_pos = data.find("\0", pos)
# nul byte at the beginning or double nul byte means finish
if next_pos <= pos:
break
# there might not be an equals sign
equal_pos = data.find("=", pos, next_pos)
if equal_pos > pos:
key = data[pos:equal_pos]
value = data[equal_pos + 1:next_pos]
# Windows expects environment variables to be uppercase only
if WINDOWS_:
key = key.upper()
ret[key] = value
pos = next_pos + 1
return ret
def sockfam_to_enum(num):
"""Convert a numeric socket family value to an IntEnum member.
If it's not a known member, return the numeric value itself.
"""
if enum is None:
return num
else: # pragma: no cover
try:
return socket.AddressFamily(num)
except ValueError:
return num
def socktype_to_enum(num):
"""Convert a numeric socket type value to an IntEnum member.
If it's not a known member, return the numeric value itself.
"""
if enum is None:
return num
else: # pragma: no cover
try:
return socket.SocketKind(num)
except ValueError:
return num
def conn_to_ntuple(fd, fam, type_, laddr, raddr, status, status_map, pid=None):
"""Convert a raw connection tuple to a proper ntuple."""
if fam in (socket.AF_INET, AF_INET6):
if laddr:
laddr = addr(*laddr)
if raddr:
raddr = addr(*raddr)
if type_ == socket.SOCK_STREAM and fam in (AF_INET, AF_INET6):
status = status_map.get(status, CONN_NONE)
else:
status = CONN_NONE # ignore whatever C returned to us
fam = sockfam_to_enum(fam)
type_ = socktype_to_enum(type_)
if pid is None:
return pconn(fd, fam, type_, laddr, raddr, status)
else:
return sconn(fd, fam, type_, laddr, raddr, status, pid)
def deprecated_method(replacement):
"""A decorator which can be used to mark a method as deprecated
'replcement' is the method name which will be called instead.
"""
def outer(fun):
msg = "%s() is deprecated and will be removed; use %s() instead" % (
fun.__name__, replacement)
if fun.__doc__ is None:
fun.__doc__ = msg
@functools.wraps(fun)
def inner(self, *args, **kwargs):
warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
return getattr(self, replacement)(*args, **kwargs)
return inner
return outer
class _WrapNumbers:
"""Watches numbers so that they don't overflow and wrap
(reset to zero).
"""
def __init__(self):
self.lock = threading.Lock()
self.cache = {}
self.reminders = {}
self.reminder_keys = {}
def _add_dict(self, input_dict, name):
assert name not in self.cache
assert name not in self.reminders
assert name not in self.reminder_keys
self.cache[name] = input_dict
self.reminders[name] = defaultdict(int)
self.reminder_keys[name] = defaultdict(set)
def _remove_dead_reminders(self, input_dict, name):
"""In case the number of keys changed between calls (e.g. a
disk disappears) this removes the entry from self.reminders.
"""
old_dict = self.cache[name]
gone_keys = set(old_dict.keys()) - set(input_dict.keys())
for gone_key in gone_keys:
for remkey in self.reminder_keys[name][gone_key]:
del self.reminders[name][remkey]
del self.reminder_keys[name][gone_key]
def run(self, input_dict, name):
"""Cache dict and sum numbers which overflow and wrap.
Return an updated copy of `input_dict`
"""
if name not in self.cache:
# This was the first call.
self._add_dict(input_dict, name)
return input_dict
self._remove_dead_reminders(input_dict, name)
old_dict = self.cache[name]
new_dict = {}
for key in input_dict.keys():
input_tuple = input_dict[key]
try:
old_tuple = old_dict[key]
except KeyError:
# The input dict has a new key (e.g. a new disk or NIC)
# which didn't exist in the previous call.
new_dict[key] = input_tuple
continue
bits = []
for i in range(len(input_tuple)):
input_value = input_tuple[i]
old_value = old_tuple[i]
remkey = (key, i)
if input_value < old_value:
# it wrapped!
self.reminders[name][remkey] += old_value
self.reminder_keys[name][key].add(remkey)
bits.append(input_value + self.reminders[name][remkey])
new_dict[key] = tuple(bits)
self.cache[name] = input_dict
return new_dict
def cache_clear(self, name=None):
"""Clear the internal cache, optionally only for function 'name'."""
with self.lock:
if name is None:
self.cache.clear()
self.reminders.clear()
self.reminder_keys.clear()
else:
self.cache.pop(name, None)
self.reminders.pop(name, None)
self.reminder_keys.pop(name, None)
def cache_info(self):
"""Return internal cache dicts as a tuple of 3 elements."""
with self.lock:
return (self.cache, self.reminders, self.reminder_keys)
def wrap_numbers(input_dict, name):
"""Given an `input_dict` and a function `name`, adjust the numbers
which "wrap" (restart from zero) across different calls by adding
"old value" to "new value" and return an updated dict.
"""
with _wn.lock:
return _wn.run(input_dict, name)
_wn = _WrapNumbers()
wrap_numbers.cache_clear = _wn.cache_clear
wrap_numbers.cache_info = _wn.cache_info
def open_binary(fname, **kwargs):
return open(fname, "rb", **kwargs)
def open_text(fname, **kwargs):
"""On Python 3 opens a file in text mode by using fs encoding and
a proper en/decoding errors handler.
On Python 2 this is just an alias for open(name, 'rt').
"""
if PY3:
# See:
# https://github.com/giampaolo/psutil/issues/675
# https://github.com/giampaolo/psutil/pull/733
kwargs.setdefault('encoding', ENCODING)
kwargs.setdefault('errors', ENCODING_ERRS)
return open(fname, "rt", **kwargs)
def bytes2human(n, format="%(value).1f%(symbol)s"):
"""Used by various scripts. See:
http://goo.gl/zeJZl
>>> bytes2human(10000)
'9.8K'
>>> bytes2human(100001221)
'95.4M'
"""
symbols = ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
prefix = {}
for i, s in enumerate(symbols[1:]):
prefix[s] = 1 << (i + 1) * 10
for symbol in reversed(symbols[1:]):
if n >= prefix[symbol]:
value = float(n) / prefix[symbol]
return format % locals()
return format % dict(symbol=symbols[0], value=n)
def get_procfs_path():
"""Return updated psutil.PROCFS_PATH constant."""
return sys.modules['psutil'].PROCFS_PATH
if PY3:
def decode(s):
return s.decode(encoding=ENCODING, errors=ENCODING_ERRS)
else:
def decode(s):
return s
| 31.067485
| 79
| 0.598094
|
4a006c81b5d473c3b9054a4c39ff70d833933463
| 7,336
|
py
|
Python
|
dltk/models/gan/dcgan.py
|
mseitzer/DLTK
|
3237aa6c7ed63aa177ca90eafcc076d144155a34
|
[
"Apache-2.0"
] | 17
|
2019-03-24T08:36:56.000Z
|
2021-12-28T11:42:56.000Z
|
dltk/models/gan/dcgan.py
|
mseitzer/DLTK
|
3237aa6c7ed63aa177ca90eafcc076d144155a34
|
[
"Apache-2.0"
] | null | null | null |
dltk/models/gan/dcgan.py
|
mseitzer/DLTK
|
3237aa6c7ed63aa177ca90eafcc076d144155a34
|
[
"Apache-2.0"
] | 6
|
2019-05-19T10:37:18.000Z
|
2021-12-04T05:13:01.000Z
|
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import tensorflow as tf
import numpy as np
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.training import moving_averages
from dltk.core.modules import *
class DCGAN(AbstractModule):
"""Convolutional Autoencoder
This module builds a convolutional autoencoder with varying number of layers and hidden units.
"""
def __init__(self, discriminator_filters=(64, 128, 256, 512), generator_filters=(512, 256, 128, 64, 1),
discriminator_strides=((1, 1, 1), (2, 2, 2), (1, 1, 1), (2, 2, 2)),
generator_strides=((7, 7, 7), (2, 2, 2), (2, 2, 2)), relu_leakiness=0.01,
generator_activation=tf.identity, name='dcgan'):
"""Deep Convolutional Generative Adversarial Network
Parameters
----------
discriminator_filters : list or tuple
list of filters used for the discriminator
generator_filters : list or tuple
list of filters used for the generator
discriminator_strides : list or tuple
list of strides used for the discriminator
generator_strides : list or tuple
list of strides used for the generator
relu_leakiness : float
leakiness of the relus used in the discriminator
generator_activation : function
function to be used as activation for the generator
name : string
name of the network used for scoping
"""
self.discriminator_filters = discriminator_filters
self.discriminator_strides = discriminator_strides
self.generator_filters = generator_filters
self.generator_strides = generator_strides
self.discriminator_strides = discriminator_strides
self.relu_leakiness = relu_leakiness
self.generator_activation = generator_activation
self.in_filter = None
assert len(discriminator_filters) == len(discriminator_strides)
super(DCGAN, self).__init__(name)
class Discriminator(AbstractModule):
def __init__(self, filters, strides, relu_leakiness, name):
"""Constructs the discriminator of a DCGAN
Parameters
----------
filters : list or tuple
filters for convolutional layers
strides : list or tuple
strides to be used for convolutions
relu_leakiness : float
leakines of relu nonlinearity
name : string
name of the network
"""
self.filters = filters
self.strides = strides
self.relu_leakiness = relu_leakiness
self.in_filter = None
assert len(strides) == len(filters)
super(DCGAN.Discriminator, self).__init__(name)
def _build(self, x, is_training=True):
if self.in_filter is None:
self.in_filter = x.get_shape().as_list()[-1]
assert self.in_filter == x.get_shape().as_list()[-1], 'Network was built for a different input shape'
out = {}
for i in range(len(self.filters) -1):
with tf.variable_scope('l{}'.format(i)):
x = Convolution(self.filters[i], 4, self.strides[i])(x)
x = BatchNorm()(x)
x = leaky_relu(x, self.relu_leakiness)
with tf.variable_scope('final'):
x = tf.reshape(x, (tf.shape(x)[0], np.prod(x.get_shape().as_list()[1:])))
x = Linear(1)(x)
out['logits'] = x
x = tf.nn.sigmoid(x)
out['probs'] = x
out['pred'] = tf.greater(x, 0.5)
return out
class Generator(AbstractModule):
def __init__(self, filters, strides, output_activation, name):
"""Constructs the discriminator of a DCGAN
Parameters
----------
filters : list or tuple
filters for convolutional layers
strides : list or tuple
strides to be used for convolutions
name : string
name of the network
"""
self.filters = filters
self.strides = strides
self.in_filter = None
self.output_activation = output_activation
assert len(strides) == len(filters)
super(DCGAN.Generator, self).__init__(name)
def _build(self, x, is_training=True):
if self.in_filter is None:
self.in_filter = x.get_shape().as_list()[-1]
assert self.in_filter == x.get_shape().as_list()[-1], 'Network was built for a different input shape'
x = tf.reshape(x, [tf.shape(x)[0]] + [1,] * len(self.strides[0]) + [self.in_filter])
out = {}
for i in range(len(self.filters) - 1):
with tf.variable_scope('l{}'.format(i)):
x = TransposedConvolution(self.filters[i], strides=self.strides[i])(x)
x = BatchNorm()(x)
x = tf.nn.relu(x)
with tf.variable_scope('final'):
x = TransposedConvolution(self.filters[-1], strides=self.strides[-1])(x)
x = self.output_activation(x)
out['gen'] = x
return out
def _build(self, noise, samples, is_training=True):
"""Constructs a DCGAN
Parameters
----------
noise : tf.Tensor
noise tensor for the generator to generate fake samples
samples : tf.Tensor
real samples used by the discriminator
is_training : bool
flag to specify whether this is training - passed to batch normalization
Returns
-------
dict
output dictionary containing:
- `gen` - generator output dictionary
- `gen` - generated sample
- `disc_gen` - discriminator output dictionary for generated sample
- `disc_sample` - discriminator output dictionary for real sample
- `d_loss` - discriminator loss
- `g_loss` - generator loss
"""
if self.in_filter is None:
self.in_filter = samples.get_shape().as_list()[-1]
assert self.in_filter == samples.get_shape().as_list()[-1], 'Network was built for a different input shape'
assert self.in_filter == self.generator_filters[-1], 'Generator was built for a different sample shape'
out = {}
self.disc = self.Discriminator(self.discriminator_filters, self.discriminator_strides, self.relu_leakiness,
'disc')
self.gen = self.Generator(self.generator_filters, self.generator_strides, self.generator_activation, 'gen')
out['gen'] = self.gen(noise)
out['disc_gen'] = self.disc(out['gen']['gen'])
out['disc_sample'] = self.disc(samples)
out['d_loss'] = -(tf.reduce_mean(tf.log(out['disc_sample']['probs']))
+ tf.reduce_mean(tf.log(1. - out['disc_gen']['probs'])))
out['g_loss'] = -tf.reduce_mean(tf.log(out['disc_gen']['probs']))
return out
| 39.654054
| 115
| 0.580425
|
4a006ca74b665bd83ac179e089d4d29e1785346b
| 6,725
|
py
|
Python
|
webwhatsapi/wapi_js_wrapper.py
|
Phlop/WebWhatsapp-Wrapper
|
77e01d06a13e74f64fe2c2618ee7bdf6972ca1f1
|
[
"MIT"
] | null | null | null |
webwhatsapi/wapi_js_wrapper.py
|
Phlop/WebWhatsapp-Wrapper
|
77e01d06a13e74f64fe2c2618ee7bdf6972ca1f1
|
[
"MIT"
] | null | null | null |
webwhatsapi/wapi_js_wrapper.py
|
Phlop/WebWhatsapp-Wrapper
|
77e01d06a13e74f64fe2c2618ee7bdf6972ca1f1
|
[
"MIT"
] | null | null | null |
import os
import time
import collections
import numpy as np
from selenium.common.exceptions import WebDriverException, JavascriptException
from six import string_types
from threading import Thread
from .objects.message import factory_message
from selenium.common.exceptions import TimeoutException
class JsException(Exception):
def __init__(self, message=None):
super(Exception, self).__init__(message)
class WapiPhoneNotConnectedException(Exception):
def __init__(self, message=None):
super(Exception, self).__init__(message)
class WapiJsWrapper(object):
"""
Wraps JS functions in window.WAPI for easier use from python
"""
def __init__(self, driver, wapi_driver):
self.driver = driver
self.wapi_driver = wapi_driver
self.available_functions = None
# Starts new messages observable thread.
self.new_messages_observable = NewMessagesObservable(self, wapi_driver, driver)
self.new_messages_observable.start()
def __getattr__(self, item):
"""
Finds functions in window.WAPI
:param item: Function name
:return: Callable function object
:rtype: JsFunction
"""
wapi_functions = dir(self)
if item not in wapi_functions:
raise AttributeError("Function {0} doesn't exist".format(item))
return JsFunction(item, self.driver, self)
def __dir__(self):
"""
Load wapi.js and returns its functions
:return: List of functions in window.WAPI
"""
if self.available_functions is not None:
return self.available_functions
"""Sleep wait until WhatsApp loads and creates webpack objects"""
time.sleep(5)
try:
script_path = os.path.dirname(os.path.abspath(__file__))
except NameError:
script_path = os.getcwd()
result = self.driver.execute_script(
"if (document.querySelector('*[data-icon=chat]') !== null) { return true } else { return false }") # noqa E501
if result:
with open(os.path.join(script_path, "js", "wapi.js"), "r") as script:
self.driver.execute_script(script.read())
result = self.driver.execute_script("return window.WAPI")
if result:
self.available_functions = result.keys()
return self.available_functions
else:
return []
def quit(self):
self.new_messages_observable.stop()
class JsArg(object):
"""
Represents a JS function argument
"""
def __init__(self, obj):
"""
Constructor
:param obj: Python object to represent
"""
self.obj = obj
def __str__(self):
"""
Casts self.obj from python type to valid JS literal
:return: JS literal represented in a string
"""
if isinstance(self.obj, string_types):
return repr(str(self.obj))
if isinstance(self.obj, bool):
return str(self.obj).lower()
return str(self.obj)
class JsFunction(object):
"""
Callable object represents functions in window.WAPI
"""
def __init__(self, function_name, driver, wapi_wrapper):
self.driver = driver
self.function_name = function_name
self.wapi_wrapper = wapi_wrapper
self.is_a_retry = False
def __call__(self, *args, **kwargs):
# Selenium's execute_async_script passes a callback function that should be called when the JS operation is done
# It is passed to the WAPI function using arguments[0]
if len(args):
command = "return WAPI.{0}({1}, arguments[0])" \
.format(self.function_name, ",".join([str(JsArg(arg)) for arg in args]))
else:
command = "return WAPI.{0}(arguments[0])".format(self.function_name)
try:
timeOut = 5000
if self.function_name == 'downloadFile':
#timeOut = 30 #Normal
timeOut = 60 #Old collection
#print '>>>CALL<<< ARGS', args, 'KWARGS', kwargs, 'FUNCTION', self.function_name
self.driver.set_script_timeout(timeOut)
return self.driver.execute_async_script(command)
except JavascriptException as e:
if 'WAPI is not defined' in e.msg and self.is_a_retry is not True:
self.wapi_wrapper.available_functions = None
retry_command = getattr(self.wapi_wrapper, self.function_name)
retry_command.is_a_retry = True
retry_command(*args, **kwargs)
else:
raise JsException(
"Error in function {0} ({1}). Command: {2}".format(self.function_name, e.msg, command))
except WebDriverException as e:
if e.msg == 'Timed out':
raise WapiPhoneNotConnectedException("Phone not connected to Internet")
raise JsException("Error in function {0} ({1}). Command: {2}".format(self.function_name, e.msg, command))
except TimeoutException as et:
raise TimeoutException('Timeout Error')
class NewMessagesObservable(Thread):
def __init__(self, wapi_js_wrapper, wapi_driver, webdriver):
Thread.__init__(self)
self.daemon = True
self.wapi_js_wrapper = wapi_js_wrapper
self.wapi_driver = wapi_driver
self.webdriver = webdriver
self.observers = []
self.running = False
def run(self):
self.running = True
while self.running:
try:
new_js_messages = self.wapi_js_wrapper.getBufferedNewMessages()
if isinstance(new_js_messages, (collections.Sequence, np.ndarray)) and len(new_js_messages) > 0:
new_messages = []
for js_message in new_js_messages:
new_messages.append(factory_message(js_message, self.wapi_driver))
self._inform_all(new_messages)
except Exception as e: # noqa F841
pass
time.sleep(2)
def stop(self):
self.running = False
def subscribe(self, observer):
inform_method = getattr(observer, "on_message_received", None)
if not callable(inform_method):
raise Exception('You need to inform an observable that implements \'on_message_received(new_messages)\'.')
self.observers.append(observer)
def unsubscribe(self, observer):
self.observers.remove(observer)
def _inform_all(self, new_messages):
for observer in self.observers:
observer.on_message_received(new_messages)
| 33.79397
| 123
| 0.620669
|
4a006ce519bdd6b8549c77c33d21c81435a00d49
| 3,668
|
py
|
Python
|
azure-mgmt-network/azure/mgmt/network/v2016_09_01/models/network_security_group.py
|
v-Ajnava/azure-sdk-for-python
|
a1f6f80eb5869c5b710e8bfb66146546697e2a6f
|
[
"MIT"
] | 4
|
2016-06-17T23:25:29.000Z
|
2022-03-30T22:37:45.000Z
|
azure/mgmt/network/v2016_09_01/models/network_security_group.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 2
|
2016-09-30T21:40:24.000Z
|
2017-11-10T18:16:18.000Z
|
azure/mgmt/network/v2016_09_01/models/network_security_group.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 3
|
2016-05-03T20:49:46.000Z
|
2017-10-05T21:05:27.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class NetworkSecurityGroup(Resource):
"""NetworkSecurityGroup resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param security_rules: A collection of security rules of the network
security group.
:type security_rules:
list[~azure.mgmt.network.v2016_09_01.models.SecurityRule]
:param default_security_rules: The default security rules of network
security group.
:type default_security_rules:
list[~azure.mgmt.network.v2016_09_01.models.SecurityRule]
:ivar network_interfaces: A collection of references to network
interfaces.
:vartype network_interfaces:
list[~azure.mgmt.network.v2016_09_01.models.NetworkInterface]
:ivar subnets: A collection of references to subnets.
:vartype subnets: list[~azure.mgmt.network.v2016_09_01.models.Subnet]
:param resource_guid: The resource GUID property of the network security
group resource.
:type resource_guid: str
:param provisioning_state: The provisioning state of the public IP
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'network_interfaces': {'readonly': True},
'subnets': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'security_rules': {'key': 'properties.securityRules', 'type': '[SecurityRule]'},
'default_security_rules': {'key': 'properties.defaultSecurityRules', 'type': '[SecurityRule]'},
'network_interfaces': {'key': 'properties.networkInterfaces', 'type': '[NetworkInterface]'},
'subnets': {'key': 'properties.subnets', 'type': '[Subnet]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, id=None, location=None, tags=None, security_rules=None, default_security_rules=None, resource_guid=None, provisioning_state=None, etag=None):
super(NetworkSecurityGroup, self).__init__(id=id, location=location, tags=tags)
self.security_rules = security_rules
self.default_security_rules = default_security_rules
self.network_interfaces = None
self.subnets = None
self.resource_guid = resource_guid
self.provisioning_state = provisioning_state
self.etag = etag
| 42.16092
| 164
| 0.643948
|
4a006d86e8843b7f53089cd2bbc58e6bf0bead27
| 3,115
|
py
|
Python
|
samples/purchase/purchase.py
|
VictorCoCo/flask-ask
|
526b3a272fdd6e1438e2191c5ab08ff20853817d
|
[
"Apache-2.0"
] | null | null | null |
samples/purchase/purchase.py
|
VictorCoCo/flask-ask
|
526b3a272fdd6e1438e2191c5ab08ff20853817d
|
[
"Apache-2.0"
] | null | null | null |
samples/purchase/purchase.py
|
VictorCoCo/flask-ask
|
526b3a272fdd6e1438e2191c5ab08ff20853817d
|
[
"Apache-2.0"
] | null | null | null |
import logging
import os
import requests
from flask import Flask, json, render_template
from flask_ask import (
Ask,
request,
session,
question,
statement,
context,
buy,
upsell,
refund,
logger,
)
from model import Product
app = Flask(__name__)
ask = Ask(app, "/")
logging.getLogger("flask_ask").setLevel(logging.DEBUG)
PRODUCT_KEY = "PRODUCT"
@ask.on_purchase_completed(
mapping={"payload": "payload", "name": "name", "status": "status", "token": "token"}
)
def completed(payload, name, status, token):
products = Product(context.System.apiAccessToken)
logger.info("on-purchase-completed {}".format(request))
logger.info("payload: {} {}".format(payload.purchaseResult, payload.productId))
logger.info("name: {}".format(name))
logger.info("token: {}".format(token))
logger.info("status: {}".format(status.code == 200))
product_name = products.productName(payload.productId)
logger.info("Product name".format(product_name))
if status.code == "200" and ("ACCEPTED" in payload.purchaseResult):
return question("To listen it just say - play {} ".format(product_name))
else:
return question("Do you want to buy another product?")
@ask.launch
def launch():
products = Product(context.System.apiAccessToken)
question_text = render_template("welcome", products=products.list())
reprompt_text = render_template("welcome_reprompt")
return (
question(question_text)
.reprompt(reprompt_text)
.simple_card("Welcome", question_text)
)
@ask.intent("BuySkillItemIntent", mapping={"product_name": "ProductName"})
def buy_intent(product_name):
products = Product(context.System.apiAccessToken)
logger.info("PRODUCT: {}".format(product_name))
buy_card = render_template("buy_card", product=product_name)
productId = products.productId(product_name)
if productId is not None:
session.attributes[PRODUCT_KEY] = productId
else:
return statement("I didn't find a product {}".format(product_name))
raise NotImplementedError()
return buy(productId).simple_card("Welcome", question_text)
# return upsell(product,'get this great product')
@ask.intent("RefundSkillItemIntent", mapping={"product_name": "ProductName"})
def refund_intent(product_name):
refund_card = render_template("refund_card")
logger.info("PRODUCT: {}".format(product_name))
products = Product(context.System.apiAccessToken)
productId = products.productId(product_name)
if productId is not None:
session.attributes[PRODUCT_KEY] = productId
else:
raise NotImplementedError()
return refund(productId)
@ask.intent("AMAZON.FallbackIntent")
def fallback_intent():
return statement("FallbackIntent")
@ask.session_ended
def session_ended():
return "{}", 200
if __name__ == "__main__":
if "ASK_VERIFY_REQUESTS" in os.environ:
verify = str(os.environ.get("ASK_VERIFY_REQUESTS", "")).lower()
if verify == "false":
app.config["ASK_VERIFY_REQUESTS"] = False
app.run(debug=True)
| 29.666667
| 88
| 0.694703
|
4a006e80f0b28a6d5a227d1b851541352a1598e6
| 853
|
py
|
Python
|
storeApi/migrations/0017_auto_20151006_1511.py
|
Arlefreak/MaloBarba
|
7c3cc0238d6bf0717fb072eac3812cd79607824a
|
[
"MIT"
] | null | null | null |
storeApi/migrations/0017_auto_20151006_1511.py
|
Arlefreak/MaloBarba
|
7c3cc0238d6bf0717fb072eac3812cd79607824a
|
[
"MIT"
] | null | null | null |
storeApi/migrations/0017_auto_20151006_1511.py
|
Arlefreak/MaloBarba
|
7c3cc0238d6bf0717fb072eac3812cd79607824a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('storeApi', '0016_auto_20151006_1452'),
]
operations = [
migrations.RenameField(
model_name='adress',
old_name='user',
new_name='client',
),
migrations.RenameField(
model_name='order',
old_name='user',
new_name='client',
),
migrations.RenameField(
model_name='shoppingcartproduct',
old_name='user',
new_name='client',
),
migrations.AlterField(
model_name='order',
name='sku',
field=models.SlugField(unique=True, editable=False, verbose_name='SKU'),
),
]
| 24.371429
| 84
| 0.547479
|
4a006fcca8684e17717a99a7b1ff332b8840de80
| 5,386
|
py
|
Python
|
transformation.py
|
hendrikp/Deformation-Transfer-for-Triangle-Meshes
|
fbb3b73c78cf3df529759f7497cc1894be5754bb
|
[
"MIT"
] | 3
|
2022-01-09T14:43:34.000Z
|
2022-01-13T01:45:27.000Z
|
transformation.py
|
hendrikp/Deformation-Transfer-for-Triangle-Meshes
|
fbb3b73c78cf3df529759f7497cc1894be5754bb
|
[
"MIT"
] | null | null | null |
transformation.py
|
hendrikp/Deformation-Transfer-for-Triangle-Meshes
|
fbb3b73c78cf3df529759f7497cc1894be5754bb
|
[
"MIT"
] | null | null | null |
import numpy as np
import tqdm
from scipy import sparse
import meshlib
from config import ConfigFile
from correspondence import get_correspondence, compute_adjacent_by_edges, TransformMatrix
# PHYTHONPATH is extended by C:\git
from FaceSpeechProcessing import facialdata as fd
# static functions for usage of facial data
pvf = fd.FacialData()
class Transformation:
def __init__(
self,
source: meshlib.Mesh,
target: meshlib.Mesh,
mapping: np.ndarray,
smoothness=1.0,
recenterFaceData=False # we use this for the facial data
):
self.source = source.to_third_dimension(copy=False)
self.target = target.to_third_dimension(copy=False)
self.mapping = mapping
self.Wm = 1.0
self.Ws = max(0.00000001, smoothness)
self._Am = self._compute_mapping_matrix(self.target, mapping)
self._As, self._Bs = self._compute_missing_smoothness(self.target, mapping)
self.recenterFaceData = recenterFaceData
@classmethod
def _compute_mapping_matrix(cls, target: meshlib.Mesh, mapping: np.ndarray):
target = target.to_fourth_dimension(copy=False)
inv_target_span = np.linalg.inv(target.span)
# Matrix
Am = TransformMatrix.construct(target.faces[mapping[:, 1]], inv_target_span[mapping[:, 1]],
len(target.vertices), desc="Building Mapping")
return Am.tocsc()
@classmethod
def _compute_missing_smoothness(cls, target: meshlib.Mesh, mapping: np.ndarray):
adjacent = compute_adjacent_by_edges(target)
target = target.to_fourth_dimension(copy=False)
inv_target_span = np.linalg.inv(target.span)
missing = np.setdiff1d(np.arange(len(target.faces)), np.unique(mapping[:, 1]))
count_adjacent = sum(len(adjacent[m]) for m in missing)
# shape = (
# count_adjacent * 3,
# len(target.vertices)
# )
if count_adjacent == 0:
return sparse.csc_matrix((0, len(target.vertices)), dtype=float), np.zeros((0, 3))
size = len(target.vertices)
def construct(f, inv, index):
a = TransformMatrix.expand(f, inv, size).tocsc()
for adj in adjacent[index]:
yield a, TransformMatrix.expand(target.faces[adj], inv_target_span[adj], size).tocsc()
lhs, rhs = zip(*(adjacents for index, m in
enumerate(tqdm.tqdm(missing, total=len(missing),
desc="Fixing Missing Mapping with Smoothness"), disable=ConfigFile.disableVerboseProgress)
for adjacents in construct(target.faces[m], inv_target_span[m], index)))
As = (sparse.vstack(lhs) - sparse.vstack(rhs)).tocsc()
Bs = np.zeros((As.shape[0], 3))
return As, Bs
def __call__(self, pose: meshlib.Mesh) -> meshlib.Mesh:
# Transformation of source
## Si * V = V~ ==>> Si = V~ * V^-1
s = (pose.span @ np.linalg.inv(self.source.span)).transpose(0, 2, 1)
## Stack Si
Bm = np.concatenate(s[self.mapping[:, 0]])
Astack = [self._Am * self.Wm, self._As * self.Ws]
Bstack = [Bm * self.Wm, self._Bs * self.Ws]
A: sparse.spmatrix = sparse.vstack(Astack, format="csc")
A.eliminate_zeros()
b = np.concatenate(Bstack)
assert A.shape[0] == b.shape[0]
assert b.shape[1] == 3
LU = sparse.linalg.splu((A.T @ A).tocsc())
x = LU.solve(A.T @ b)
vertices = x
# for the parquet face we use the nose tip as origin (first vertice)
vertices = vertices[:len(self.target.vertices)]
if self.recenterFaceData == True:
avgError = pvf.recenterVertices(vertices)
result = meshlib.Mesh(vertices=vertices, faces=self.target.faces)
return result
if __name__ == "__main__":
import render.plot_result as plt_res
import render.plot as plt
#cfg = ConfigFile.load(ConfigFile.Paths.highpoly.horse_camel)
#cfg = ConfigFile.load(ConfigFile.Paths.highpoly.cat_lion)
#cfg = ConfigFile.load(ConfigFile.Paths.lowpoly.catdog)
cfg = ConfigFile.load(ConfigFile.Paths.lowpoly.face)
corr_markers = cfg.markers # List of vertex-tuples (source, target)
identity = False
if identity:
corr_markers = np.ascontiguousarray(np.array((corr_markers[:, 0], corr_markers[:, 0]), dtype=np.int).T)
#########################################################
# Load meshes
original_source = meshlib.Mesh.load(cfg.source.reference)
original_pose = meshlib.Mesh.load(cfg.source.poses[0])
original_target = meshlib.Mesh.load(cfg.target.reference)
if identity:
original_target = meshlib.Mesh.load(cfg.source.reference)
#########################################################
# Load correspondence from cache if possible
mapping = get_correspondence(original_source, original_target, corr_markers, plot=False)
transf = Transformation(original_source, original_target, mapping, recenterFaceData=cfg.source.reference.endswith('parquet') )
result = transf(original_pose)
plt.MeshPlots.plot_correspondence(original_source, original_target, mapping).show(renderer="browser")
plt_res.plot(original_pose, result).show(renderer="browser")
| 38.748201
| 135
| 0.63238
|
4a00754d76cacfc1ee6bb738576ea5333af93d19
| 114
|
py
|
Python
|
month01/all_code/day15/package01/package02/__init__.py
|
chaofan-zheng/tedu-python-demo
|
abe983ddc52690f4726cf42cc6390cba815026d8
|
[
"Apache-2.0"
] | 4
|
2021-01-07T14:25:15.000Z
|
2021-02-01T10:36:10.000Z
|
month01/all_code/day15/package01/package02/__init__.py
|
chaofan-zheng/tedu-python-demo
|
abe983ddc52690f4726cf42cc6390cba815026d8
|
[
"Apache-2.0"
] | null | null | null |
month01/all_code/day15/package01/package02/__init__.py
|
chaofan-zheng/tedu-python-demo
|
abe983ddc52690f4726cf42cc6390cba815026d8
|
[
"Apache-2.0"
] | null | null | null |
import package01.package02.module02
from package01.package02.module02 import func01
def xxx():
print("大傻X")
| 16.285714
| 47
| 0.77193
|
4a007563830483d74f333b76fe6b6b38185f0fd7
| 599
|
py
|
Python
|
aula4/aula4.py
|
jessicsous/Curso_Python
|
94c9b70ec03022b21ac23bf85918aa20ce4cfdd1
|
[
"MIT"
] | 1
|
2021-09-21T01:50:10.000Z
|
2021-09-21T01:50:10.000Z
|
aula4/aula4.py
|
jessicsous/Curso_Python
|
94c9b70ec03022b21ac23bf85918aa20ce4cfdd1
|
[
"MIT"
] | null | null | null |
aula4/aula4.py
|
jessicsous/Curso_Python
|
94c9b70ec03022b21ac23bf85918aa20ce4cfdd1
|
[
"MIT"
] | 1
|
2021-10-04T19:49:04.000Z
|
2021-10-04T19:49:04.000Z
|
'''
tipos de dados
str - string - textos 'assim' "assim"
int - inteiro - 123456 0 -10 -20 -30
float - real/ponto flutuante - 10.50 1.5 -10.10 -50.93 0.0
bool - booleano/lógico - True/False 10 == 10
'''
print('Luiz', type('Luiz')) # a função type retorna o tipo do valor
print('10', type('10'))
print(10, type(10)) # a função type retorna o tipo do valor
print(1.5, type(1.5)) # a função type retorna o tipo do valor
print(10 == 10, type( 10 == 10)) # a função type retorna o tipo do valor
print('l' == 'l', type('l' == 'l'))
print(bool()) # avalia em falso
print(bool(0)) # avalia em falso
| 31.526316
| 73
| 0.63606
|
4a0075d6d6fdd385431d7f5bc19e270242124169
| 9,583
|
py
|
Python
|
wandb/integration/fastai/__init__.py
|
soumik12345/client
|
31e4c2b143e6c219ea005fe4477e294f383f6888
|
[
"MIT"
] | null | null | null |
wandb/integration/fastai/__init__.py
|
soumik12345/client
|
31e4c2b143e6c219ea005fe4477e294f383f6888
|
[
"MIT"
] | null | null | null |
wandb/integration/fastai/__init__.py
|
soumik12345/client
|
31e4c2b143e6c219ea005fe4477e294f383f6888
|
[
"MIT"
] | null | null | null |
"""
This module hooks fast.ai v1 Learners to Weights & Biases through a callback.
Requested logged data can be configured through the callback constructor.
Examples:
WandbCallback can be used when initializing the Learner::
```
from wandb.fastai import WandbCallback
[...]
learn = Learner(data, ..., callback_fns=WandbCallback)
learn.fit(epochs)
```
Custom parameters can be given using functools.partial::
```
from wandb.fastai import WandbCallback
from functools import partial
[...]
learn = Learner(data, ..., callback_fns=partial(WandbCallback, ...))
learn.fit(epochs)
```
Finally, it is possible to use WandbCallback only when starting
training. In this case it must be instantiated::
```
learn.fit(..., callbacks=WandbCallback(learn))
```
or, with custom parameters::
```
learn.fit(..., callbacks=WandbCallback(learn, ...))
```
"""
from pathlib import Path
import random
import sys
from typing import Any, Optional
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
import fastai # type: ignore[import]
from fastai.callbacks import TrackerCallback # type: ignore[import]
import wandb
try:
import matplotlib # type: ignore[import]
if wandb.wandb_lib.ipython._get_python_type() != "jupyter": # type: ignore[attr-defined]
matplotlib.use("Agg") # non-interactive backend (avoid tkinter issues)
import matplotlib.pyplot as plt # type: ignore[import]
except ImportError:
print("Warning: matplotlib required if logging sample image predictions")
class WandbCallback(TrackerCallback):
"""
Automatically saves model topology, losses & metrics.
Optionally logs weights, gradients, sample predictions and best trained model.
Arguments:
learn (fastai.basic_train.Learner): the fast.ai learner to hook.
log (str): "gradients", "parameters", "all", or None. Losses & metrics are always logged.
save_model (bool): save model at the end of each epoch. It will also load best model at the end of training.
monitor (str): metric to monitor for saving best model. None uses default TrackerCallback monitor value.
mode (str): "auto", "min" or "max" to compare "monitor" values and define best model.
input_type (str): "images" or None. Used to display sample predictions.
validation_data (list): data used for sample predictions if input_type is set.
predictions (int): number of predictions to make if input_type is set and validation_data is None.
seed (int): initialize random generator for sample predictions if input_type is set and validation_data is None.
"""
# Record if watch has been called previously (even in another instance)
_watch_called = False
def __init__(
self,
learn: "fastai.basic_train.Learner",
log: Optional[Literal["gradients", "parameters", "all"]] = "gradients",
save_model: bool = True,
monitor: Optional[str] = None,
mode: Literal["auto", "min", "max"] = "auto",
input_type: Optional[Literal["images"]] = None,
validation_data: Optional[list] = None,
predictions: int = 36,
seed: int = 12345,
) -> None:
# Check if wandb.init has been called
if wandb.run is None:
raise ValueError("You must call wandb.init() before WandbCallback()")
# Adapted from fast.ai "SaveModelCallback"
if monitor is None:
# use default TrackerCallback monitor value
super().__init__(learn, mode=mode)
else:
super().__init__(learn, monitor=monitor, mode=mode)
self.save_model = save_model
self.model_path = Path(wandb.run.dir) / "bestmodel.pth"
self.log = log
self.input_type = input_type
self.best = None
# Select items for sample predictions to see evolution along training
self.validation_data = validation_data
if input_type and not self.validation_data:
wandb_random = random.Random(seed) # For repeatability
predictions = min(predictions, len(learn.data.valid_ds))
indices = wandb_random.sample(range(len(learn.data.valid_ds)), predictions)
self.validation_data = [learn.data.valid_ds[i] for i in indices]
def on_train_begin(self, **kwargs: Any) -> None:
"""Call watch method to log model topology, gradients & weights"""
# Set self.best, method inherited from "TrackerCallback" by "SaveModelCallback"
super().on_train_begin()
# Ensure we don't call "watch" multiple times
if not WandbCallback._watch_called:
WandbCallback._watch_called = True
# Logs model topology and optionally gradients and weights
wandb.watch(self.learn.model, log=self.log)
def on_epoch_end(
self, epoch: int, smooth_loss: float, last_metrics: list, **kwargs: Any
) -> None:
"""Logs training loss, validation loss and custom metrics & log prediction samples & save model"""
if self.save_model:
# Adapted from fast.ai "SaveModelCallback"
current = self.get_monitor_value()
if current is not None and self.operator(current, self.best):
print(
"Better model found at epoch {} with {} value: {}.".format(
epoch, self.monitor, current
)
)
self.best = current
# Save within wandb folder
with self.model_path.open("wb") as model_file:
self.learn.save(model_file)
# Log sample predictions if learn.predict is available
if self.validation_data:
try:
self._wandb_log_predictions()
except FastaiError as e:
wandb.termwarn(e.message)
self.validation_data = None # prevent from trying again on next loop
except Exception as e:
wandb.termwarn(f"Unable to log prediction samples.\n{e}")
self.validation_data = None # prevent from trying again on next loop
# Log losses & metrics
# Adapted from fast.ai "CSVLogger"
logs = {
name: stat
for name, stat in list(
zip(self.learn.recorder.names, [epoch, smooth_loss] + last_metrics)
)
}
wandb.log(logs)
def on_train_end(self, **kwargs: Any) -> None:
"""Load the best model."""
if self.save_model:
# Adapted from fast.ai "SaveModelCallback"
if self.model_path.is_file():
with self.model_path.open("rb") as model_file:
self.learn.load(model_file, purge=False)
print(f"Loaded best saved model from {self.model_path}")
def _wandb_log_predictions(self) -> None:
"""Log prediction samples"""
pred_log = []
if self.validation_data is None:
return
for x, y in self.validation_data:
try:
pred = self.learn.predict(x)
except Exception:
raise FastaiError(
'Unable to run "predict" method from Learner to log prediction samples.'
)
# scalar -> likely to be a category
# tensor of dim 1 -> likely to be multicategory
if not pred[1].shape or pred[1].dim() == 1:
pred_log.append(
wandb.Image(
x.data,
caption=f"Ground Truth: {y}\nPrediction: {pred[0]}",
)
)
# most vision datasets have a "show" function we can use
elif hasattr(x, "show"):
# log input data
pred_log.append(wandb.Image(x.data, caption="Input data", grouping=3))
# log label and prediction
for im, capt in ((pred[0], "Prediction"), (y, "Ground Truth")):
# Resize plot to image resolution
# from https://stackoverflow.com/a/13714915
my_dpi = 100
fig = plt.figure(frameon=False, dpi=my_dpi)
h, w = x.size
fig.set_size_inches(w / my_dpi, h / my_dpi)
ax = plt.Axes(fig, [0.0, 0.0, 1.0, 1.0])
ax.set_axis_off()
fig.add_axes(ax)
# Superpose label or prediction to input image
x.show(ax=ax, y=im)
pred_log.append(wandb.Image(fig, caption=capt))
plt.close(fig)
# likely to be an image
elif hasattr(y, "shape") and (
(len(y.shape) == 2) or (len(y.shape) == 3 and y.shape[0] in [1, 3, 4])
):
pred_log.extend(
[
wandb.Image(x.data, caption="Input data", grouping=3),
wandb.Image(pred[0].data, caption="Prediction"),
wandb.Image(y.data, caption="Ground Truth"),
]
)
# we just log input data
else:
pred_log.append(wandb.Image(x.data, caption="Input data"))
wandb.log({"Prediction Samples": pred_log}, commit=False)
class FastaiError(wandb.Error):
pass
| 37.728346
| 120
| 0.584681
|
4a00789b5d7a04e2bbf64574a9eab87dc3d28f4e
| 1,083
|
py
|
Python
|
2020/02 February/dp02212020.py
|
vishrutkmr7/DailyPracticeProblemsDIP
|
d1bfbc75f2024736c22c05385f753a90ddcfa0f5
|
[
"MIT"
] | 5
|
2019-08-06T02:34:41.000Z
|
2022-01-08T03:03:16.000Z
|
2020/02 February/dp02212020.py
|
ourangzeb/DailyPracticeProblemsDIP
|
66c07af88754e5d59b243e3ee9f02db69f7c0a77
|
[
"MIT"
] | 15
|
2021-06-01T14:04:16.000Z
|
2022-03-08T21:17:22.000Z
|
2020/02 February/dp02212020.py
|
ourangzeb/DailyPracticeProblemsDIP
|
66c07af88754e5d59b243e3ee9f02db69f7c0a77
|
[
"MIT"
] | 4
|
2019-09-19T20:00:05.000Z
|
2021-08-16T11:31:51.000Z
|
# This problem was recently asked by Microsoft:
# Given a node in a connected directional graph, create a copy of it.
class Node:
def __init__(self, value, adj=None):
self.value = value
self.adj = adj
# Variable to help print graph
self._print_visited = set()
if self.adj is None:
self.adj = []
# Able to print graph
def __repr__(self):
if self in self._print_visited:
return ""
else:
self._print_visited.add(self)
final_str = ""
for n in self.adj:
final_str += f"{n}\n"
self._print_visited.remove(self)
return final_str + f"({self.value}, ({[n.value for n in self.adj]}))"
def deep_copy_graph(graph_node, visited=None):
# Fill this in.
return graph_node
n5 = Node(5)
n4 = Node(4)
n3 = Node(3, [n4])
n2 = Node(2)
n1 = Node(1, [n5])
n5.adj = [n3]
n4.adj = [n3, n2]
n2.adj = [n4]
graph_copy = deep_copy_graph(n1)
print(graph_copy)
# (2, ([4]))
# (4, ([3, 2]))
# (3, ([4]))
# (5, ([3]))
# (1, ([5]))
| 21.66
| 81
| 0.550323
|
4a0079354d509aa7ce90d8191ea8dec1dfe715b7
| 447
|
py
|
Python
|
wrap_callgraph/utils.py
|
theGloves/wrap_callgraph
|
c4a0756ae1270fef95c30bdf544df2d0f68bfe8f
|
[
"MIT"
] | null | null | null |
wrap_callgraph/utils.py
|
theGloves/wrap_callgraph
|
c4a0756ae1270fef95c30bdf544df2d0f68bfe8f
|
[
"MIT"
] | 2
|
2020-09-06T23:00:28.000Z
|
2021-05-08T03:29:54.000Z
|
wrap_callgraph/utils.py
|
theGloves/wrap_callgraph
|
c4a0756ae1270fef95c30bdf544df2d0f68bfe8f
|
[
"MIT"
] | null | null | null |
def get_pkg_name(path):
if path is None or not isinstance(path, str):
return ""
if "/" in path:
return path.split("/")[-1]
return path
def match_pkg(s, pattern):
pattern_list = []
if not isinstance(pattern, list):
pattern_list.append(pattern)
else:
pattern_list += pattern
for pattern in pattern_list:
if s.find(pattern) != -1:
return True
return False
| 23.526316
| 49
| 0.579418
|
4a007973cf5f84255a90e26297d645e4bfe5ec21
| 16,396
|
py
|
Python
|
flax/nn/recurrent.py
|
jaehlee/flax
|
e01e2bcb012211d48e4c75e78297b8e15d742a37
|
[
"Apache-2.0"
] | 1
|
2021-09-06T12:13:20.000Z
|
2021-09-06T12:13:20.000Z
|
flax/nn/recurrent.py
|
jaehlee/flax
|
e01e2bcb012211d48e4c75e78297b8e15d742a37
|
[
"Apache-2.0"
] | 1
|
2021-08-16T09:16:55.000Z
|
2021-08-16T09:16:55.000Z
|
flax/nn/recurrent.py
|
jaehlee/flax
|
e01e2bcb012211d48e4c75e78297b8e15d742a37
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 The Flax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""DEPRECATION WARNING:
The `flax.nn` module is Deprecated, use `flax.linen` instead.
Learn more and find an upgrade guide at
https://github.com/google/flax/blob/main/flax/linen/README.md"
Recurrent neural network modules.
THe RNNCell modules are designed to fit in with the scan function in JAX::
_, initial_params = LSTMCell.init(rng_1, time_series[0])
model = nn.Model(LSTMCell, initial_params)
carry = LSTMCell.initialize_carry(rng_2, (batch_size,), memory_size)
carry, y = jax.lax.scan(model, carry, time_series)
"""
import abc
from . import activation
from . import base
from . import initializers
from . import linear
from jax import numpy as jnp
from jax import random
from jax import lax
import numpy as np
class RNNCellBase(base.Module):
"""DEPRECATION WARNING:
The `flax.nn` module is Deprecated, use `flax.linen` instead.
Learn more and find an upgrade guide at
https://github.com/google/flax/blob/main/flax/linen/README.md"
RNN cell base class."""
@staticmethod
@abc.abstractmethod
def initialize_carry(rng, batch_dims, size, init_fn=initializers.zeros):
"""initialize the RNN cell carry.
Args:
rng: random number generator passed to the init_fn.
batch_dims: a tuple providing the shape of the batch dimensions.
size: the size or number of features of the memory.
init_fn: initializer function for the carry.
Returns:
An initialized carry for the given RNN cell.
"""
pass
class LSTMCell(RNNCellBase):
"""DEPRECATION WARNING:
The `flax.nn` module is Deprecated, use `flax.linen` instead.
Learn more and find an upgrade guide at
https://github.com/google/flax/blob/main/flax/linen/README.md"
LSTM cell."""
def apply(self, carry, inputs,
gate_fn=activation.sigmoid, activation_fn=activation.tanh,
kernel_init=linear.default_kernel_init,
recurrent_kernel_init=initializers.orthogonal(),
bias_init=initializers.zeros):
r"""A long short-term memory (LSTM) cell.
the mathematical definition of the cell is as follows
.. math::
\begin{array}{ll}
i = \sigma(W_{ii} x + W_{hi} h + b_{hi}) \\
f = \sigma(W_{if} x + W_{hf} h + b_{hf}) \\
g = \tanh(W_{ig} x + W_{hg} h + b_{hg}) \\
o = \sigma(W_{io} x + W_{ho} h + b_{ho}) \\
c' = f * c + i * g \\
h' = o * \tanh(c') \\
\end{array}
where x is the input, h is the output of the previous time step, and c is
the memory.
Args:
carry: the hidden state of the LSTM cell,
initialized using `LSTMCell.initialize_carry`.
inputs: an ndarray with the input for the current time step.
All dimensions except the final are considered batch dimensions.
gate_fn: activation function used for gates (default: sigmoid)
activation_fn: activation function used for output and memory update
(default: tanh).
kernel_init: initializer function for the kernels that transform
the input (default: lecun_normal).
recurrent_kernel_init: initializer function for the kernels that transform
the hidden state (default: orthogonal).
bias_init: initializer for the bias parameters (default: zeros)
Returns:
A tuple with the new carry and the output.
"""
c, h = carry
hidden_features = h.shape[-1]
# input and recurrent layers are summed so only one needs a bias.
dense_h = linear.Dense.partial(
inputs=h, features=hidden_features, bias=True,
kernel_init=recurrent_kernel_init, bias_init=bias_init)
dense_i = linear.Dense.partial(
inputs=inputs, features=hidden_features, bias=False,
kernel_init=kernel_init)
i = gate_fn(dense_i(name='ii') + dense_h(name='hi'))
f = gate_fn(dense_i(name='if') + dense_h(name='hf'))
g = activation_fn(dense_i(name='ig') + dense_h(name='hg'))
o = gate_fn(dense_i(name='io') + dense_h(name='ho'))
new_c = f * c + i * g
new_h = o * activation_fn(new_c)
return (new_c, new_h), new_h
@staticmethod
def initialize_carry(rng, batch_dims, size, init_fn=initializers.zeros):
"""initialize the RNN cell carry.
Args:
rng: random number generator passed to the init_fn.
batch_dims: a tuple providing the shape of the batch dimensions.
size: the size or number of features of the memory.
init_fn: initializer function for the carry.
Returns:
An initialized carry for the given RNN cell.
"""
key1, key2 = random.split(rng)
mem_shape = batch_dims + (size,)
return init_fn(key1, mem_shape), init_fn(key2, mem_shape)
class OptimizedLSTMCell(RNNCellBase):
"""DEPRECATION WARNING:
The `flax.nn` module is Deprecated, use `flax.linen` instead.
Learn more and find an upgrade guide at
https://github.com/google/flax/blob/main/flax/linen/README.md"
More efficient LSTM Cell that concatenates state components before matmul.
Parameters are compatible with `flax.nn.LSTMCell`.
"""
class DummyDense(base.Module):
"""Dummy module for creating parameters matching `flax.nn.Dense`."""
def apply(self,
inputs,
features,
kernel_init,
bias_init,
bias=True):
k = self.param('kernel', (inputs.shape[-1], features), kernel_init)
b = (self.param('bias', (features,), bias_init)
if bias else jnp.zeros((features,)))
return k, b
def apply(self,
carry,
inputs,
gate_fn=activation.sigmoid,
activation_fn=activation.tanh,
kernel_init=linear.default_kernel_init,
recurrent_kernel_init=initializers.orthogonal(),
bias_init=initializers.zeros):
r"""A long short-term memory (LSTM) cell.
the mathematical definition of the cell is as follows
.. math::
\begin{array}{ll}
i = \sigma(W_{ii} x + W_{hi} h + b_{hi}) \\
f = \sigma(W_{if} x + W_{hf} h + b_{hf}) \\
g = \tanh(W_{ig} x + W_{hg} h + b_{hg}) \\
o = \sigma(W_{io} x + W_{ho} h + b_{ho}) \\
c' = f * c + i * g \\
h' = o * \tanh(c') \\
\end{array}
where x is the input, h is the output of the previous time step, and c is
the memory.
Args:
carry: the hidden state of the LSTM cell, initialized using
`LSTMCell.initialize_carry`.
inputs: an ndarray with the input for the current time step. All
dimensions except the final are considered batch dimensions.
gate_fn: activation function used for gates (default: sigmoid)
activation_fn: activation function used for output and memory update
(default: tanh).
kernel_init: initializer function for the kernels that transform
the input (default: lecun_normal).
recurrent_kernel_init: initializer function for the kernels that transform
the hidden state (default: orthogonal).
bias_init: initializer for the bias parameters (default: zeros)
Returns:
A tuple with the new carry and the output.
"""
c, h = carry
hidden_features = h.shape[-1]
def _concat_dense(inputs, params, use_bias=True):
kernels, biases = zip(*params.values())
kernel = jnp.asarray(jnp.concatenate(kernels, axis=-1), jnp.float32)
y = jnp.dot(inputs, kernel)
if use_bias:
bias = jnp.asarray(jnp.concatenate(biases, axis=-1), jnp.float32)
y = y + bias
# Split the result back into individual (i, f, g, o) outputs.
split_indices = np.cumsum([b.shape[0] for b in biases[:-1]])
ys = jnp.split(y, split_indices, axis=-1)
return dict(zip(params.keys(), ys))
# Create the params in the same order as LSTMCell for initialization
# compatibility.
dense_params_h = {}
dense_params_i = {}
for component in ['i', 'f', 'g', 'o']:
dense_params_i[component] = OptimizedLSTMCell.DummyDense(
inputs=inputs, features=hidden_features, bias=False,
kernel_init=kernel_init, bias_init=bias_init,
name=f'i{component}')
dense_params_h[component] = OptimizedLSTMCell.DummyDense(
inputs=h, features=hidden_features, bias=True,
kernel_init=recurrent_kernel_init, bias_init=bias_init,
name=f'h{component}')
dense_h = _concat_dense(h, dense_params_h, use_bias=True)
dense_i = _concat_dense(inputs, dense_params_i, use_bias=False)
i = gate_fn(dense_h['i'] + dense_i['i'])
f = gate_fn(dense_h['f'] + dense_i['f'])
g = activation_fn(dense_h['g'] + dense_i['g'])
o = gate_fn(dense_h['o'] + dense_i['o'])
new_c = f * c + i * g
new_h = o * activation_fn(new_c)
return (new_c, new_h), new_h
@staticmethod
def initialize_carry(rng, batch_dims, size, init_fn=initializers.zeros):
"""initialize the RNN cell carry.
Args:
rng: random number generator passed to the init_fn.
batch_dims: a tuple providing the shape of the batch dimensions.
size: the size or number of features of the memory.
init_fn: initializer function for the carry.
Returns:
An initialized carry for the given RNN cell.
"""
key1, key2 = random.split(rng)
mem_shape = batch_dims + (size,)
return init_fn(key1, mem_shape), init_fn(key2, mem_shape)
class GRUCell(RNNCellBase):
"""DEPRECATION WARNING:
The `flax.nn` module is Deprecated, use `flax.linen` instead.
Learn more and find an upgrade guide at
https://github.com/google/flax/blob/main/flax/linen/README.md"
GRU cell."""
def apply(self, carry, inputs,
gate_fn=activation.sigmoid, activation_fn=activation.tanh,
kernel_init=linear.default_kernel_init,
recurrent_kernel_init=initializers.orthogonal(),
bias_init=initializers.zeros):
r"""Gated recurrent unit (GRU) cell.
the mathematical definition of the cell is as follows
.. math::
\begin{array}{ll}
r = \sigma(W_{ir} x + W_{hr} h + b_{hr}) \\
z = \sigma(W_{iz} x + W_{hz} h + b_{hz}) \\
n = \tanh(W_{in} x + b_{in} + r * (W_{hn} h + b_{hn})) \\
h' = (1 - z) * n + z * h
\end{array}
where x is the input and h, is the output of the previous time step.
Args:
carry: the hidden state of the LSTM cell,
initialized using `GRUCell.initialize_carry`.
inputs: an ndarray with the input for the current time step.
All dimensions except the final are considered batch dimensions.
gate_fn: activation function used for gates (default: sigmoid)
activation_fn: activation function used for output and memory update
(default: tanh).
kernel_init: initializer function for the kernels that transform
the input (default: lecun_normal).
recurrent_kernel_init: initializer function for the kernels that transform
the hidden state (default: orthogonal).
bias_init: initializer for the bias parameters (default: zeros)
Returns:
A tuple with the new carry and the output.
"""
h = carry
hidden_features = h.shape[-1]
# input and recurrent layers are summed so only one needs a bias.
dense_h = linear.Dense.partial(
inputs=h, features=hidden_features, bias=False,
kernel_init=recurrent_kernel_init, bias_init=bias_init)
dense_i = linear.Dense.partial(
inputs=inputs, features=hidden_features, bias=True,
kernel_init=kernel_init, bias_init=bias_init)
r = gate_fn(dense_i(name='ir') + dense_h(name='hr'))
z = gate_fn(dense_i(name='iz') + dense_h(name='hz'))
# add bias because the linear transformations aren't directly summed.
n = activation_fn(dense_i(name='in') + r * dense_h(name='hn', bias=True))
new_h = (1. - z) * n + z * h
return new_h, new_h
@staticmethod
def initialize_carry(rng, batch_dims, size, init_fn=initializers.zeros):
"""initialize the RNN cell carry.
Args:
rng: random number generator passed to the init_fn.
batch_dims: a tuple providing the shape of the batch dimensions.
size: the size or number of features of the memory.
init_fn: initializer function for the carry.
Returns:
An initialized carry for the given RNN cell.
"""
mem_shape = batch_dims + (size,)
return init_fn(rng, mem_shape)
class ConvLSTM(RNNCellBase):
r"""DEPRECATION WARNING:
The `flax.nn` module is Deprecated, use `flax.linen` instead.
Learn more and find an upgrade guide at
https://github.com/google/flax/blob/main/flax/linen/README.md"
A convolutional LSTM cell.
The implementation is based on xingjian2015convolutional.
Given x_t and the previous state (h_{t-1}, c_{t-1})
the core computes
.. math::
\begin{array}{ll}
i_t = \sigma(W_{ii} * x_t + W_{hi} * h_{t-1} + b_i) \\
f_t = \sigma(W_{if} * x_t + W_{hf} * h_{t-1} + b_f) \\
g_t = \tanh(W_{ig} * x_t + W_{hg} * h_{t-1} + b_g) \\
o_t = \sigma(W_{io} * x_t + W_{ho} * h_{t-1} + b_o) \\
c_t = f_t c_{t-1} + i_t g_t \\
h_t = o_t \tanh(c_t)
\end{array}
where * denotes the convolution operator;
i_t, f_t, o_t are input, forget and output gate activations,
and g_t is a vector of cell updates.
Notes:
Forget gate initialization:
Following jozefowicz2015empirical we add 1.0 to b_f
after initialization in order to reduce the scale of forgetting in
the beginning of the training.
"""
def apply(self,
carry,
inputs,
features,
kernel_size,
strides=None,
padding='SAME',
bias=True,
dtype=jnp.float32):
"""Constructs a convolutional LSTM.
Args:
carry: the hidden state of the Conv2DLSTM cell,
initialized using `Conv2DLSTM.initialize_carry`.
inputs: input data with dimensions (batch, spatial_dims..., features).
features: number of convolution filters.
kernel_size: shape of the convolutional kernel.
strides: a sequence of `n` integers, representing the inter-window
strides.
padding: either the string `'SAME'`, the string `'VALID'`, or a sequence
of `n` `(low, high)` integer pairs that give the padding to apply before
and after each spatial dimension.
bias: whether to add a bias to the output (default: True).
dtype: the dtype of the computation (default: float32).
Returns:
A tuple with the new carry and the output.
"""
c, h = carry
input_to_hidden = linear.Conv.partial(
features=4*features,
kernel_size=kernel_size,
strides=strides,
padding=padding,
bias=bias,
dtype=dtype,
name="ih")
hidden_to_hidden = linear.Conv.partial(
features=4*features,
kernel_size=kernel_size,
strides=strides,
padding=padding,
bias=bias,
dtype=dtype,
name="hh")
gates = input_to_hidden(inputs) + hidden_to_hidden(h)
i, g, f, o = jnp.split(gates, indices_or_sections=4, axis=-1)
f = activation.sigmoid(f + 1)
new_c = f * c + activation.sigmoid(i) * jnp.tanh(g)
new_h = activation.sigmoid(o) * jnp.tanh(new_c)
return (new_c, new_h), new_h
@staticmethod
def initialize_carry(rng, batch_dims, size, init_fn=initializers.zeros):
"""initialize the RNN cell carry.
Args:
rng: random number generator passed to the init_fn.
batch_dims: a tuple providing the shape of the batch dimensions.
size: the input_shape + (features,).
init_fn: initializer function for the carry.
Returns:
An initialized carry for the given RNN cell.
"""
key1, key2 = random.split(rng)
mem_shape = batch_dims + size
return init_fn(key1, mem_shape), init_fn(key2, mem_shape)
| 37.179138
| 80
| 0.657294
|
4a0079a820b29b20c8e83e3e496114747673580b
| 3,255
|
py
|
Python
|
general_language_model/utils.py
|
Adoni/general_languange_model
|
e12594226918b18ec8abdfac918cb3f74d481b9d
|
[
"MIT"
] | 1
|
2019-01-12T15:36:48.000Z
|
2019-01-12T15:36:48.000Z
|
general_language_model/utils.py
|
Adoni/general_languange_model
|
e12594226918b18ec8abdfac918cb3f74d481b9d
|
[
"MIT"
] | null | null | null |
general_language_model/utils.py
|
Adoni/general_languange_model
|
e12594226918b18ec8abdfac918cb3f74d481b9d
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
"""
utils.py
~~~~~~~~~
Here are some helper classes and functions
:author: Xiaofei Sun
:contact: adoni1203@gmail.com
:date: 2018/5/28
:license: MIT, see LICENSE for more details.
"""
import os
import torch
class Dictionary(object):
def __init__(self):
self.word2idx = {}
self.idx2word = []
self.frozen = False
def add_word(self, word):
if word not in self.word2idx:
self.idx2word.append(word)
self.word2idx[word] = len(self.idx2word) - 1
return self.word2idx[word]
def freeze(self):
self.frozen = True
def __len__(self):
return len(self.idx2word)
def tokenize_sentence(self, words):
"""Tokenizes a text file."""
# Add words to the dictionary
ids = torch.LongTensor(len(words))
for i, word in enumerate(words):
try:
ids[i] = self.word2idx[word]
except:
ids[i] = self.word2idx["<unk>"]
return ids
def tokenize_path(self, path):
"""Tokenizes a text file."""
assert os.path.exists(path)
# Add words to the dictionary
with open(path, 'r') as f:
tokens = 0
for line in f:
words = line.split() + ['<eos>']
tokens += len(words)
if not self.frozen:
for word in words:
self.add_word(word)
# Tokenize file content
with open(path, 'r') as f:
ids = torch.LongTensor(tokens)
token = 0
for line in f:
words = line.split() + ['<eos>']
for word in words:
try:
ids[token] = self.word2idx[word]
except:
ids[token] = self.word2idx["<unk>"]
token += 1
return ids
def batchify(data, bsz, device):
"""
Starting from sequential data, batchify arranges the dataset into columns.
For instance, with the alphabet as the sequence and batch size 4, we'd get
┌ a g m s ┐
│ b h n t │
│ c i o u │
│ d j p v │
│ e k q w │
└ f l r x ┘.
These columns are treated as independent by the model, which means that the
dependence of e. g. 'g' on 'f' can not be learned, but allows more efficient
batch processing.
:param data:
:param bsz:
:return:
"""
# Work out how cleanly we can divide the dataset into bsz parts.
nbatch = data.size(0) // bsz
# Trim off any extra elements that wouldn't cleanly fit (remainders).
data = data.narrow(0, 0, nbatch * bsz)
# Evenly divide the data across the bsz batches.
data = data.view(bsz, -1).t().contiguous()
return data.to(device)
def get_batch(source, i, seq_len):
seq_len = min(seq_len, len(source) - 1 - i)
data = source[i:i+seq_len]
target = source[i+1:i+1+seq_len].view(-1)
return data, target
def repackage_hidden(h):
"""Wraps hidden states in new Tensors, to detach them from their history."""
if isinstance(h, torch.Tensor):
return h.detach()
else:
return tuple(repackage_hidden(v) for v in h)
| 28.552632
| 80
| 0.549309
|
4a0079ac38da2583bc2abc951e4f9a1d2214b7ba
| 470
|
py
|
Python
|
django/core/migrations/0006_auto_20181206_1449.py
|
TrainerDex/trainerdex.co.uk
|
82b029b8ac480686ec192233c5bc62521d480f88
|
[
"Apache-2.0"
] | 1
|
2021-03-04T14:46:31.000Z
|
2021-03-04T14:46:31.000Z
|
django/core/migrations/0006_auto_20181206_1449.py
|
TrainerDex/trainerdex.co.uk
|
82b029b8ac480686ec192233c5bc62521d480f88
|
[
"Apache-2.0"
] | 99
|
2020-09-10T11:00:59.000Z
|
2022-03-29T09:08:40.000Z
|
django/core/migrations/0006_auto_20181206_1449.py
|
TrainerDex/trainerdex.co.uk
|
82b029b8ac480686ec192233c5bc62521d480f88
|
[
"Apache-2.0"
] | 1
|
2021-12-21T00:54:01.000Z
|
2021-12-21T00:54:01.000Z
|
# Generated by Django 2.1.3 on 2018-12-06 14:49
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("core", "0005_discordguildmembership_active"),
]
operations = [
migrations.AlterModelOptions(
name="discordguildmembership",
options={
"verbose_name": "Discord Member",
"verbose_name_plural": "Discord Members",
},
),
]
| 22.380952
| 57
| 0.580851
|
4a0079b608fb2a64b7c4c283976eae05ad66c028
| 9,791
|
py
|
Python
|
grouper/ctl/user.py
|
aneeq009/merou
|
7a87b43aaf64244932fa460842132a2d9329e704
|
[
"Apache-2.0"
] | 58
|
2017-05-26T06:46:24.000Z
|
2022-03-25T20:55:51.000Z
|
grouper/ctl/user.py
|
aneeq009/merou
|
7a87b43aaf64244932fa460842132a2d9329e704
|
[
"Apache-2.0"
] | 74
|
2017-06-16T17:48:37.000Z
|
2022-03-28T23:09:54.000Z
|
grouper/ctl/user.py
|
aneeq009/merou
|
7a87b43aaf64244932fa460842132a2d9329e704
|
[
"Apache-2.0"
] | 43
|
2017-05-20T22:11:51.000Z
|
2022-03-25T00:24:56.000Z
|
import logging
import sys
from typing import TYPE_CHECKING
from grouper import public_key
from grouper.ctl.base import CtlCommand
from grouper.ctl.util import ensure_valid_username
from grouper.models.audit_log import AuditLog
from grouper.models.user import User
from grouper.plugin.exceptions import PluginRejectedDisablingUser
from grouper.repositories.factory import SessionFactory
from grouper.role_user import disable_role_user, enable_role_user
from grouper.usecases.convert_user_to_service_account import ConvertUserToServiceAccountUI
from grouper.user import disable_user, enable_user, get_all_users
from grouper.user_metadata import set_user_metadata
if TYPE_CHECKING:
from argparse import ArgumentParser, Namespace
from grouper.ctl.settings import CtlSettings
from grouper.usecases.factory import UseCaseFactory
@ensure_valid_username
def user_command(args, settings, session_factory):
# type: (Namespace, CtlSettings, SessionFactory) -> None
session = session_factory.create_session()
if args.subcommand == "create":
for username in args.username:
user = User.get(session, name=username)
if not user:
logging.info("{}: No such user, creating...".format(username))
user = User.get_or_create(session, username=username, role_user=args.role_user)
session.commit()
else:
logging.info("{}: Already exists. Doing nothing.".format(username))
return
elif args.subcommand == "disable":
for username in args.username:
user = User.get(session, name=username)
if not user:
logging.info("{}: No such user. Doing nothing.".format(username))
elif not user.enabled:
logging.info("{}: User already disabled. Doing nothing.".format(username))
else:
logging.info("{}: User found, disabling...".format(username))
try:
if user.role_user:
disable_role_user(session, user)
else:
disable_user(session, user)
AuditLog.log(
session,
user.id,
"disable_user",
"(Administrative) User disabled via grouper-ctl",
on_user_id=user.id,
)
session.commit()
except PluginRejectedDisablingUser as e:
logging.error("%s", e)
sys.exit(1)
return
elif args.subcommand == "enable":
for username in args.username:
user = User.get(session, name=username)
if not user:
logging.info("{}: No such user. Doing nothing.".format(username))
elif user.enabled:
logging.info("{}: User not disabled. Doing nothing.".format(username))
else:
logging.info("{}: User found, enabling...".format(username))
if user.role_user:
enable_role_user(
session, user, preserve_membership=args.preserve_membership, user=user
)
else:
enable_user(session, user, user, preserve_membership=args.preserve_membership)
AuditLog.log(
session,
user.id,
"enable_user",
"(Administrative) User enabled via grouper-ctl",
on_user_id=user.id,
)
session.commit()
return
# "add_public_key" and "set_metadata"
user = User.get(session, name=args.username)
if not user:
logging.error("{}: No such user. Doing nothing.".format(args.username))
return
# User must exist at this point.
if args.subcommand == "set_metadata":
logging.info(
"Setting %s metadata: %s=%s", args.username, args.metadata_key, args.metadata_value
)
if args.metadata_value == "":
args.metadata_value = None
set_user_metadata(session, user.id, args.metadata_key, args.metadata_value)
session.commit()
elif args.subcommand == "add_public_key":
logging.info("Adding public key for user")
try:
pubkey = public_key.add_public_key(session, user, args.public_key)
except public_key.DuplicateKey:
logging.error("Key already in use")
return
except public_key.PublicKeyParseError:
logging.error("Public key appears to be invalid")
return
AuditLog.log(
session,
user.id,
"add_public_key",
"(Administrative) Added public key: {}".format(pubkey.fingerprint_sha256),
on_user_id=user.id,
)
class ConvertUserToServiceAccountCommand(CtlCommand, ConvertUserToServiceAccountUI):
"""Convert a user to a service account."""
@staticmethod
def add_arguments(parser):
# type: (ArgumentParser) -> None
parser.add_argument(
"--owner", required=True, help="Name of group to own the service account"
)
parser.add_argument("username")
def __init__(self, usecase_factory):
# type: (UseCaseFactory) -> None
self.usecase_factory = usecase_factory
def converted_user_to_service_account(self, user, owner):
# type: (str, str) -> None
logging.info("converted user %s to service account owned by %s", user, owner)
def convert_user_to_service_account_failed_permission_denied(self, user):
# type: (str) -> None
logging.critical("not permitted to convert user %s to service account", user)
sys.exit(1)
def convert_user_to_service_account_failed_user_is_in_groups(self, user):
# type: (str) -> None
logging.critical("user %s cannot be converted while a member of any groups", user)
sys.exit(1)
def run(self, args):
# type: (Namespace) -> None
usecase = self.usecase_factory.create_convert_user_to_service_account_usecase(
args.actor_name, self
)
usecase.convert_user_to_service_account(args.username, args.owner)
class UserCommand(CtlCommand):
"""Commands to modify users."""
@staticmethod
def add_arguments(parser):
# type: (ArgumentParser) -> None
parser.add_argument(
"-a",
"--actor",
required=False,
dest="actor_name",
help=(
"Name of the entity performing this action."
" Must be a valid Grouper human or service account."
),
)
subparser = parser.add_subparsers(dest="subcommand")
user_key_parser = subparser.add_parser("add_public_key", help="Add public key to user")
user_key_parser.add_argument("username")
user_key_parser.add_argument("public_key")
user_convert_parser = subparser.add_parser(
"convert_to_service_account", help="Convert to service account"
)
ConvertUserToServiceAccountCommand.add_arguments(user_convert_parser)
user_create_parser = subparser.add_parser("create", help="Create a new user account")
user_create_parser.add_argument("username", nargs="+")
user_create_parser.add_argument(
"--role-user",
default=False,
action="store_true",
help="If given, identifies user as a role user.",
)
user_disable_parser = subparser.add_parser("disable", help="Disable a user account")
user_disable_parser.add_argument("username", nargs="+")
user_enable_parser = subparser.add_parser("enable", help="(Re-)enable a user account")
user_enable_parser.add_argument("username", nargs="+")
user_enable_parser.add_argument(
"--preserve-membership",
default=False,
action="store_true",
help="Unless provided, scrub all group memberships when re-enabling user.",
)
subparser.add_parser("list", help="List all users and their account statuses")
user_set_metadata_parser = subparser.add_parser(
"set_metadata", help="Set metadata on user"
)
user_set_metadata_parser.add_argument("username")
user_set_metadata_parser.add_argument("metadata_key")
user_set_metadata_parser.add_argument("metadata_value")
def __init__(self, settings, usecase_factory):
# type: (CtlSettings, UseCaseFactory) -> None
self.settings = settings
self.usecase_factory = usecase_factory
def run(self, args):
# type: (Namespace) -> None
if args.subcommand == "convert_to_service_account":
subcommand = ConvertUserToServiceAccountCommand(self.usecase_factory)
subcommand.run(args)
elif args.subcommand == "list":
# Ugly temporary hack until this is converted to a usecase.
repository_factory = self.usecase_factory.service_factory.repository_factory
session = repository_factory.session_factory.create_session()
all_users = get_all_users(session)
for user in all_users:
user_enabled = "enabled" if user.enabled else "disabled"
logging.info("{} has status {}".format(user.name, user_enabled))
return
else:
# Ugly temporary hack until this is converted to a usecase.
repository_factory = self.usecase_factory.service_factory.repository_factory
user_command(args, self.settings, repository_factory.session_factory)
| 39.321285
| 98
| 0.620264
|
4a007a61b65a5c01fb2efd41a54638898a204973
| 4,852
|
py
|
Python
|
pirates/battle/BattleSkillDiary.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 81
|
2018-04-08T18:14:24.000Z
|
2022-01-11T07:22:15.000Z
|
pirates/battle/BattleSkillDiary.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 4
|
2018-09-13T20:41:22.000Z
|
2022-01-08T06:57:00.000Z
|
pirates/battle/BattleSkillDiary.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 26
|
2018-05-26T12:49:27.000Z
|
2021-09-11T09:11:59.000Z
|
from pirates.battle import WeaponGlobals
from pirates.uberdog.UberDogGlobals import InventoryType
class BattleSkillDiary():
IDLE = 0
CHARGING = 1
def __init__(self, cr, av):
self.cr = cr
self.av = av
self.__timers = {}
self.__hits = {}
def startRecharging(self, skillId, ammoSkillId):
if self.cr.battleMgr.getModifiedRechargeTime(self.av, skillId, ammoSkillId) == 0.0:
return
self.__timers[skillId] = [
self.CHARGING, 0.0, globalClock.getFrameTime(), ammoSkillId]
def pauseRecharging(self, skillId):
details = self.__timers.get(skillId)
if not details or details[0] == self.IDLE:
return
ammoSkillId = details[3]
if self.cr.battleMgr.getModifiedRechargeTime(self.av, skillId, ammoSkillId) == 0.0:
return
details[0] = self.IDLE
curTime = globalClock.getFrameTime()
lastTime = details[2]
dt = curTime - lastTime
details[1] += dt
details[2] = curTime
def continueRecharging(self, skillId):
details = self.__timers.get(skillId)
if not details:
return
ammoSkillId = details[3]
if self.cr.battleMgr.getModifiedRechargeTime(self.av, skillId, ammoSkillId) == 0.0:
return
if details[0] != self.CHARGING:
details[0] = self.CHARGING
details[2] = globalClock.getFrameTime()
def clearRecharging(self, skillId):
if self.__timers.has_key(skillId):
del self.__timers[skillId]
def addHit(self, skillId, amount):
if self.__hits.has_key(skillId):
self.__hits[skillId] += amount
else:
self.__hits[skillId] = amount
def clearHits(self, skillId):
self.__hits[skillId] = 0
def getHits(self, skillId):
return self.__hits.get(skillId, 0)
def modifyTimeSpentRecharging(self, skillId, timeSpentRecharging):
details = self.__timers.get(skillId)
if details and details[0] == self.CHARGING:
details[2] = globalClock.getFrameTime() - timeSpentRecharging
def getTimeSpentRecharging(self, skillId):
if WeaponGlobals.getSkillTrack(skillId) == WeaponGlobals.BREAK_ATTACK_SKILL_INDEX:
return self.getHits(skillId)
details = self.__timers.get(skillId)
if not details:
return None
t = details[1]
if details[0] == self.CHARGING:
curTime = globalClock.getFrameTime()
lastTime = details[2]
dt = curTime - lastTime
t += dt
return t
def getTimeRemaining(self, skillId):
if WeaponGlobals.getSkillTrack(skillId) == WeaponGlobals.BREAK_ATTACK_SKILL_INDEX:
ammoSkillId = 0
else:
details = self.__timers.get(skillId)
if not details:
return
ammoSkillId = details[3]
timeRequired = self.cr.battleMgr.getModifiedRechargeTime(self.av, skillId, ammoSkillId)
if timeRequired == 0.0:
return 0.0
timeSpent = self.getTimeSpentRecharging(skillId)
if timeSpent is None:
return 0.0
elif timeSpent >= timeRequired:
return 0.0
else:
return timeRequired - timeSpent
return
def canUseSkill(self, skillId, ammoSkillId, tolerance=0.0):
timeRequired = self.cr.battleMgr.getModifiedRechargeTime(self.av, skillId, ammoSkillId)
if timeRequired == 0.0:
return 1
timeSpent = self.getTimeSpentRecharging(skillId)
if timeSpent is None:
return 1
if WeaponGlobals.getSkillTrack(skillId) == WeaponGlobals.BREAK_ATTACK_SKILL_INDEX:
if timeSpent >= timeRequired:
return 1
return 0
if timeSpent + tolerance >= timeRequired:
return 1
elif skillId == InventoryType.CannonShoot:
return 1
else:
return 0
return
def __str__(self):
s = 'BattleSkillDiary\n'
s += ' Skill: Timestamp\n'
for skillId, details in self.__timers.items():
skillName = WeaponGlobals.getSkillName(skillId)
state = ('Idle', 'Charging')[details[0]]
dt = details[1]
timeStamp = details[2]
remaining = self.getTimeRemaining(skillId)
s += ' %s (%s): %s, dt=%f, t=%f, remaining=%f (s)\n' % (skillName, skillId, state, dt, timeStamp, remaining)
for skillId, details in self.__hits.items():
skillName = WeaponGlobals.getSkillName(skillId)
hits = details[0]
remaining = self.getTimeRemaining(skillId)
s += ' %s (%s): %s, hits=%f, remaining=%f (s)\n' % (skillName, skillId, hits, remaining)
return s
| 35.676471
| 120
| 0.599134
|
4a007abb840efc9dc51c4b626eee94a244fce6e9
| 413
|
py
|
Python
|
problem_solving/warmup/mini_max_sum/main.py
|
hugolribeiro/hackerrank_exercises
|
d2757b24479c26ec39e01091e3a15e8980e97864
|
[
"MIT"
] | null | null | null |
problem_solving/warmup/mini_max_sum/main.py
|
hugolribeiro/hackerrank_exercises
|
d2757b24479c26ec39e01091e3a15e8980e97864
|
[
"MIT"
] | null | null | null |
problem_solving/warmup/mini_max_sum/main.py
|
hugolribeiro/hackerrank_exercises
|
d2757b24479c26ec39e01091e3a15e8980e97864
|
[
"MIT"
] | null | null | null |
# import math
# import os
# import random
# import re
# import sys
# Complete the miniMaxSum function below.
def miniMaxSum(arr):
sum_all = sum(arr)
max_sum = sum_all - min(arr)
min_sum = sum_all - max(arr)
print(f'{min_sum} {max_sum}')
#################################################
# if __name__ == '__main__':
# arr = list(map(int, input().rstrip().split()))
#
# miniMaxSum(arr)
| 19.666667
| 52
| 0.556901
|
4a007aefe51b0d8d97dc53e86250107936bfcc57
| 3,287
|
py
|
Python
|
examples/conformer/saved_model.py
|
Thumb-Technologies/TensorFlowASR
|
37ea12af04a8f2c13f75a617f4aa4331f95ce945
|
[
"Apache-2.0"
] | null | null | null |
examples/conformer/saved_model.py
|
Thumb-Technologies/TensorFlowASR
|
37ea12af04a8f2c13f75a617f4aa4331f95ce945
|
[
"Apache-2.0"
] | null | null | null |
examples/conformer/saved_model.py
|
Thumb-Technologies/TensorFlowASR
|
37ea12af04a8f2c13f75a617f4aa4331f95ce945
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Huy Le Nguyen (@usimarit)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from tensorflow_asr.utils import env_util
logger = env_util.setup_environment()
import tensorflow as tf
DEFAULT_YAML = os.path.join(os.path.abspath(os.path.dirname(__file__)), "config.yml")
tf.keras.backend.clear_session()
parser = argparse.ArgumentParser(prog="Conformer Testing")
parser.add_argument(
"--config",
type=str,
default=DEFAULT_YAML,
help="The file path of model configuration file",
)
parser.add_argument(
"--h5",
type=str,
default=None,
help="Path to saved h5 weights",
)
parser.add_argument(
"--sentence_piece",
default=False,
action="store_true",
help="Whether to use `SentencePiece` model",
)
parser.add_argument(
"--subwords",
default=False,
action="store_true",
help="Use subwords",
)
parser.add_argument(
"--output_dir",
type=str,
default=None,
help="Output directory for saved model",
)
args = parser.parse_args()
assert args.h5
assert args.output_dir
from tensorflow_asr.configs.config import Config
from tensorflow_asr.featurizers.speech_featurizers import TFSpeechFeaturizer
from tensorflow_asr.featurizers.text_featurizers import CharFeaturizer, SentencePieceFeaturizer, SubwordFeaturizer
from tensorflow_asr.models.transducer.conformer import Conformer
config = Config(args.config)
speech_featurizer = TFSpeechFeaturizer(config.speech_config)
if args.sentence_piece:
logger.info("Use SentencePiece ...")
text_featurizer = SentencePieceFeaturizer(config.decoder_config)
elif args.subwords:
logger.info("Use subwords ...")
text_featurizer = SubwordFeaturizer(config.decoder_config)
else:
logger.info("Use characters ...")
text_featurizer = CharFeaturizer(config.decoder_config)
tf.random.set_seed(0)
# build model
conformer = Conformer(**config.model_config, vocabulary_size=text_featurizer.num_classes)
conformer.make(speech_featurizer.shape)
conformer.load_weights(args.h5, by_name=True)
conformer.summary(line_length=100)
conformer.add_featurizers(speech_featurizer, text_featurizer)
class aModule(tf.Module):
def __init__(self, model):
super().__init__()
self.model = model
@tf.function(
input_signature=[
{
"inputs": tf.TensorSpec(shape=[None, None, 80, 1], dtype=tf.float32, name="inputs"),
"inputs_length": tf.TensorSpec(shape=[None], dtype=tf.int32, name="inputs_length"),
}
]
)
def pred(self, input_batch):
result = self.model.recognize(input_batch)
return {"ASR": result}
module = aModule(conformer)
tf.saved_model.save(module, args.output_dir, signatures={"serving_default": module.pred})
| 28.094017
| 114
| 0.731974
|
4a007b327c29af826678d4fa1cf11d1e52012e72
| 427
|
py
|
Python
|
lambda_zip/lambda.py
|
patheard/opentelemetry-lambda
|
0c53a363e94f54117956764de909a5076807e92a
|
[
"MIT"
] | null | null | null |
lambda_zip/lambda.py
|
patheard/opentelemetry-lambda
|
0c53a363e94f54117956764de909a5076807e92a
|
[
"MIT"
] | 1
|
2022-01-20T21:35:25.000Z
|
2022-01-20T21:35:25.000Z
|
lambda_zip/lambda.py
|
mohamed-cds/opentelemetry-lambda
|
4a8d3cb351f18c13264b7df3b97504ac816345f7
|
[
"MIT"
] | 1
|
2021-12-17T20:46:51.000Z
|
2021-12-17T20:46:51.000Z
|
#! /usr/bin/python3
import boto3
import json
import os
def handler(event, context):
client = boto3.client("s3")
client.list_buckets()
client = boto3.client("ec2")
client.describe_instances()
return {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": json.dumps({
"Region ": os.environ['AWS_REGION']
})
}
| 19.409091
| 47
| 0.550351
|
4a007c8838d2b277323839d30859357b7ea75578
| 4,338
|
py
|
Python
|
test/dataset_helpers_test.py
|
ImageMarkup/isic-archive
|
7cd8097886d685ec629e2fcba079271fb77d028f
|
[
"Apache-2.0"
] | 42
|
2015-12-12T14:05:46.000Z
|
2022-03-26T15:20:39.000Z
|
test/dataset_helpers_test.py
|
ImageMarkup/isic-archive
|
7cd8097886d685ec629e2fcba079271fb77d028f
|
[
"Apache-2.0"
] | 494
|
2015-07-09T16:14:12.000Z
|
2021-03-09T09:37:36.000Z
|
test/dataset_helpers_test.py
|
ImageMarkup/uda
|
d221af3368baf3a06ecab67e69e9d0077426c8f9
|
[
"Apache-2.0"
] | 12
|
2015-08-20T14:20:48.000Z
|
2020-10-20T01:14:44.000Z
|
import re
from isic_archive.models.dataset_helpers import matchFilenameRegex
def assertMatch(originalFilename, csvFilename):
"""Assert that the filename in the CSV matches the original filename."""
regex = matchFilenameRegex(csvFilename)
assert re.match(regex, originalFilename) is not None
def assertNotMatch(originalFilename, csvFilename):
"""Assert that the filename in the CSV doesn't match the original filename."""
regex = matchFilenameRegex(csvFilename)
assert re.match(regex, originalFilename) is None
def testMatchFilenameRegex():
"""
Test matchFilenameRegex.
The matchFilenameRegex function generates a regular expression to match image
filenames in a metadata CSV file to original image filenames in the database.
"""
originalFilename = 'ABC-6D.JPG'
assertMatch(originalFilename, 'ABC-6D')
assertMatch(originalFilename, 'ABC-6D.JPG')
assertMatch(originalFilename, 'ABC-6D.jpg')
assertMatch(originalFilename, 'abc-6D.jpg')
assertMatch(originalFilename, 'abc-6d.jpg')
assertNotMatch(originalFilename, 'ABC-6D.png')
assertNotMatch(originalFilename, 'ABC-6D.PNG')
originalFilename = '20010425124238356.jpg'
assertMatch(originalFilename, '20010425124238356')
assertMatch(originalFilename, '20010425124238356.jpg')
assertMatch(originalFilename, '20010425124238356.JPG')
assertNotMatch(originalFilename, '20010425124238356.png')
assertNotMatch(originalFilename, '20010425124238356.PNG')
originalFilename = 'AbcDef00598.jpg'
assertMatch(originalFilename, 'AbcDef00598')
assertMatch(originalFilename, 'AbcDef00598.jpg')
assertMatch(originalFilename, 'AbcDef00598.JPG')
assertMatch(originalFilename, 'abcdef00598.JPG')
assertNotMatch(originalFilename, 'AbcDef00598.png')
assertNotMatch(originalFilename, 'AbcDef00598.PNG')
originalFilename = 'test-20010425124238356.jpg'
assertMatch(originalFilename, 'test-20010425124238356')
assertMatch(originalFilename, 'test-20010425124238356.jpg')
assertMatch(originalFilename, 'TEST-20010425124238356.jpg')
assertMatch(originalFilename, 'TEST-20010425124238356.JPG')
assertNotMatch(originalFilename, 'TEST-20010425124238356.png')
assertNotMatch(originalFilename, 'TEST-20010425124238356.PNG')
originalFilename = 'AEOU3014, (20020901020318037) 20010425124238356.jpg'
assertMatch(originalFilename, 'AEOU3014, (20020901020318037) 20010425124238356')
assertMatch(originalFilename, 'AEOU3014, (20020901020318037) 20010425124238356.jpg')
assertMatch(originalFilename, 'AEOU3014, (20020901020318037) 20010425124238356.JPG')
assertMatch(originalFilename, 'aeou3014, (20020901020318037) 20010425124238356.JPG')
assertMatch(originalFilename, 'aeou3014, (20020901020318037) 20010425124238356.jpg')
assertNotMatch(originalFilename, 'AEOU3014, (20020901020318037) 20010425124238356.png')
assertNotMatch(originalFilename, 'AEOU3014, (20020901020318037) 20010425124238356.PNG')
originalFilename = '20020901020318037_30445187_2002-0901_Null_ 001.jpg'
assertMatch(originalFilename, '20020901020318037_30445187_2002-0901_Null_ 001')
assertMatch(originalFilename, '20020901020318037_30445187_2002-0901_Null_ 001.jpg')
assertMatch(originalFilename, '20020901020318037_30445187_2002-0901_Null_ 001.JPG')
assertMatch(originalFilename, '20020901020318037_30445187_2002-0901_NULL_ 001.jpg')
assertMatch(originalFilename, '20020901020318037_30445187_2002-0901_NULL_ 001.JPG')
assertNotMatch(originalFilename, '20020901020318037_30445187_2002-0901_NULL_ 001.png')
assertNotMatch(originalFilename, '20020901020318037_30445187_2002-0901_NULL_ 001.PNG')
# Filename that contains a period
originalFilename = 'test.315704d.jpg'
assertMatch(originalFilename, 'test.315704d')
assertMatch(originalFilename, 'test.315704d.jpg')
assertNotMatch(originalFilename, 'test.315704d.PNG')
# Filename that contains multiple periods
originalFilename = 'test.315704d.4e95e3d.png'
assertMatch(originalFilename, 'test.315704d.4e95e3d')
assertMatch(originalFilename, 'test.315704d.4e95e3d.png')
assertNotMatch(originalFilename, 'test.315704d')
assertNotMatch(originalFilename, 'test.315704d.4e95e3d.')
assertNotMatch(originalFilename, 'test.315704d.4e95e3d.jpg')
| 49.295455
| 91
| 0.785846
|
4a007c981f855a18550ac15dc309f64185bd20af
| 18,135
|
py
|
Python
|
cogs/lottory.py
|
SpenserPorter/discord_lotto_bot
|
7ca427df82cea05e6fd54194c380657b9bd8502b
|
[
"MIT"
] | null | null | null |
cogs/lottory.py
|
SpenserPorter/discord_lotto_bot
|
7ca427df82cea05e6fd54194c380657b9bd8502b
|
[
"MIT"
] | 5
|
2019-11-15T02:13:56.000Z
|
2022-03-12T00:22:55.000Z
|
cogs/lottory.py
|
SpenserPorter/discord_lotto_bot
|
7ca427df82cea05e6fd54194c380657b9bd8502b
|
[
"MIT"
] | 2
|
2019-10-16T14:39:27.000Z
|
2019-11-03T23:58:20.000Z
|
import discord
import numpy as np
import lotto_dao as db
import asyncio
import re
from discord.ext import commands
ticket_cost = 10000
#{Megaball:{num_matches:payout}}
payout_table = {True:{0:0*ticket_cost, 1:3*ticket_cost, 2:10*ticket_cost, 3:150*ticket_cost, 4:15000*ticket_cost},
False:{0:0*ticket_cost, 1:0*ticket_cost, 2:2*ticket_cost, 3:15*ticket_cost, 4:1250*ticket_cost}}
numbers = [x for x in range(1,24)]
def build_embed(embed_input_dict):
embed = discord.Embed(title=None,
description=None,
colour=embed_input_dict['colour']
)
embed.set_author(name=embed_input_dict['author_name'])
try:
for key, field in embed_input_dict['fields'].items():
embed.add_field(name=field['name'], value=field['value'], inline=field['inline'] if 'inline' in field else False)
except:
pass
return embed
class Ticket(object):
__slots__ = ('numbers')
def __init__(self, numbers):
self.numbers = numbers
def __repr__(self):
return "{}-{}-{}-{}-M{}".format(*[n for n in self.numbers])
def quickpick(number_of_tickets=1):
'''Returns a number of QP tickets'''
ticket_list = []
numlist = [x for x in range(1,24)]
for unused in range(number_of_tickets):
np.random.shuffle(numlist)
numbers = numlist[:4]
megaball = np.random.randint(1,12)
numbers.append(megaball)
ticket_list.append(numbers)
return ticket_list
def parse_ticket(winner, ticket):
match = [x for x in ticket[:4] if x in winner[:4]]
mega = winner[4] == ticket[4]
return mega, len(match)
def add_ticket_to_dict(result_dict, user_id, ticket_value, payout):
if user_id not in result_dict:
result_dict[user_id] = [[ticket_value, payout]]
else:
result_dict[user_id].append([ticket_value, payout])
return result_dict
def determine_payout(mega, match):
return payout_table[mega][match]
class Lottory(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.group(invoke_without_command=True)
async def tickets(self, ctx, lottory_id=None):
"""
DM all tickets for current lottory, or lottory id if given.
"""
lottory = db.get_current_lottory() if lottory_id is None else lottory_id
ticket_list = db.get_user_tickets(ctx.author.id,lottory)
await ctx.send('{} has {} tickets in the curret drawing.'.format(ctx.author, len(ticket_list)))
for n in range(0, len(ticket_list), 50):
await ctx.author.send('{}'.format(ticket_list[n:n+50]))
@commands.group(invoke_without_command=True,aliases=['lottery'], hidden=True)
async def lottory(self, ctx, confirm):
'''Advances to next lottory, this will abandon all tickets from previous if
it was not drawn first!'''
if ctx.author.id != 154415714411741185:
await ctx.send("You're not my real dad!")
return
if confirm != "confirm":
await ctx.send("Please confirm")
return
db.add_lottory(0)
lottory_id = db.get_current_lottory()
await ctx.send("Lottory {} has begun, purchase tickets now!".format(lottory_id))
@lottory.command(hidden=True)
async def modify_prog(self, ctx, amount: int):
if ctx.author.id != 154415714411741185:
await ctx.send("You're not my real dad!")
return
lid = db.get_current_lottory()
new_prog = db.modify_lottory_jackpot_prog(lid, amount)
jackpot = new_prog + payout_table[True][4]
await ctx.send("Lottory {} jackpot is now {}".format(lid, jackpot))
@commands.group(invoke_without_comands=True, aliases=['wd'])
async def withdraw(self,ctx,amount:int):
'''Withdraws from Lotto balance, to be deposited manually to Ubeleivaboat balance'''
balance = db.get_user_balance(ctx.author.id)
if amount <= 0:
ctx.send("You're broke u bitch")
return
if amount > balance:
amount = balance
new_balance = db.modify_user_balance(ctx.author.id, -1 * amount)
await ctx.send("{} withdrew {:,}. Your new balance is {:,}. An admin will credit your RiggBott acount soon!".format(ctx.author.name, amount, new_balance))
admin = await self.bot.fetch_user(154415714411741185)
await admin.send("Please run --> !add-money cash {} {} <--".format(ctx.author.name, amount))
@commands.group(invoke_without_command=True, aliases=['what'])
async def info(self, ctx):
'''Displays the paytable'''
lid = db.get_current_lottory()
progressive = db.get_lottory_jackpot_prog(lid)
await ctx.send("4 Unordered Balls <1-23>, 1 MEGABALL <1-11> - Ticket cost {:,} \n Match 4+1 win {:,} + {:,} progressive!\nMatch 4 win {:,}\nMatch 3+1 win {:,}\nMatch 3 win {:,}\nMatch 2+1 win {:,}\nMatch 2+0 win {:,}\nMatch 1+1 win {:,}\nChance to win ANY prize 1:6".format(ticket_cost,
payout_table[True][4], progressive, payout_table[False][4], payout_table[True][3], payout_table[False][3], payout_table[True][2], payout_table[False][2], payout_table[True][1]))
@commands.group(invoke_without_command=True)
async def status(self,ctx):
'''Display the current lottory status'''
lottory_id = db.get_current_lottory()
tickets = db.get_lottory_tickets(lottory_id)
num_tickets = len(tickets)
progressive = db.get_lottory_jackpot_prog(lottory_id)
jackpot = payout_table[True][4] + progressive
await ctx.send("Lottory {} is in progress, currently {:,} tickets sold, current jackpot is {:,}".format(lottory_id,num_tickets,jackpot))
@commands.group(invoke_without_command=True)
async def draw(self,ctx):
'''Start the next drawing'''
lottory_id = db.get_current_lottory()
progressive = db.get_lottory_jackpot_prog(lottory_id)
total_jackpot = progressive + payout_table[True][4]
ticket_list = db.get_lottory_tickets(lottory_id) #List of tuples (user_id, ticket_value)
if len(ticket_list) < 1: #Verify there is at least 1 ticket sold before allowing drawing
await ctx.send("There are no tickets sold for this drawing yet!")
return
db.add_lottory() #increment current when drawing starts
winning_numbers = quickpick()[0] #Choose winning numbers
balls = {0:'First', 1:'Second', 2:'Third', 3:'Fourth', 4:'MEGA'}
embed_dict = {'colour':discord.Colour(0x006400), 'author_name':"Drawing for lottory {}! Jackpot is currently {:,}".format(lottory_id,total_jackpot),
'fields': {}
}
lottory_message = await ctx.send(embed=build_embed(embed_dict))
async with ctx.typing():
winning_ticket_display = []
for ball_number, ball_name in balls.items():
await asyncio.sleep(3)
winning_ticket_display.append(str(winning_numbers[ball_number]))
embed_dict['fields'][1] = {'name': "{} Ball".format(ball_name), 'value': winning_numbers[ball_number], 'inline': True}
winning_numbers_value = "-".join(winning_ticket_display) if len(winning_ticket_display) < 5 else Ticket(winning_numbers)
embed_dict['fields'][2] = {'name': 'Winning Numbers' , 'value': winning_numbers_value, 'inline': True}
await lottory_message.edit(embed=build_embed(embed_dict))
num_tickets = len(ticket_list)
progressive_split = []
winner_dict = {}
loser_dict = {}
total_payout = 0
async with ctx.typing():
for ticket_tuple in ticket_list:
ticket_value = eval(ticket_tuple[0]) #ticket value stored as a string, convert back to list
user_id = ticket_tuple[1]
mega, match = parse_ticket(winning_numbers, ticket_value)
ticket_payout = determine_payout(mega, match)
if ticket_payout != 0:
winner_dict = add_ticket_to_dict(winner_dict, user_id, ticket_value, ticket_payout)
else:
loser_dict = add_ticket_to_dict(loser_dict, user_id, ticket_value, ticket_payout)
results = {}
async with ctx.typing():
for user_id, list_of_winning_tickets in winner_dict.items():
balance_modifier = 0
for ticket_tuple in list_of_winning_tickets:
ticket_value = Ticket(ticket_tuple[0])
ticket_payout = ticket_tuple[1]
if ticket_payout == payout_table[True][4]:
progressive_split.append([user_id, ticket_value])
else:
balance_modifier += ticket_payout
new_user_balance = db.modify_user_balance(user_id, balance_modifier)
results[user_id] = [balance_modifier, new_user_balance, list_of_winning_tickets]
total_payout += balance_modifier
jackpot_results = {}
if len(progressive_split) > 0:
jackpot_progressive_share = round(progressive / len(progressive_split), 2)
jackpot_payout = round(payout_table[True][4] + jackpot_progressive_share, 2)
for ticket_tuple in progressive_split:
user_id = ticket_tuple[0]
ticket_value = ticket_tuple[1]
total_payout += jackpot_payout
new_user_balance = db.modify_user_balance(user_id, jackpot_payout)
if user_id not in jackpot_results:
jackpot_results[user_id] = [jackpot_payout, new_user_balance, [ticket_value], jackpot_progressive_share]
else:
jackpot_results[user_id][0] += jackpot_payout
jackpot_results[user_id][1] = new_user_balance
jackpot_results[user_id][2].append(ticket_value)
jackpot_results[user_id][3] += jackpot_progressive_share
split_won = 'won' if len(jackpot_results) == 1 else 'split'
await ctx.send("------------JACKPOT WINNAR!!!!!!-------------")
for user_id, result in jackpot_results.items():
jackpot_payout = result[0]
new_user_balance = result[1]
ticket_values = result[2] if len(result[2]) <= 10 else len(result[2])
progressive_split = result[3]
user = await self.bot.fetch_user(user_id)
await ctx.send('{} {} the Jackpot! Payout {:,}, your share of the progressive is {:,}! with {} tickets!!'.format(user.name, split_won, round(jackpot_payout,2), round(progressive_split,2), ticket_values))
await user.send('You {} the Jackpot for lottory {} with ticket {}! {:,} has been deposited into your account. Your new balance is {}.'.format(split_won, lottory_id, ticket_value, round(jackpot_payout,2), new_user_balance))
for user_id, result in results.items():
jackpot_balance_modifier = jackpot_results[user_id][0] if user_id in jackpot_results else 0
balance_modifier = result[0] + jackpot_balance_modifier
new_user_balance = result[1]
winning_tickets = result[2]
user = await self.bot.fetch_user(user_id)
embed_dict['fields'][user_id] = {'name': user.name, 'value': "Won a total of {:,} on {:,} winning tickers!".format(balance_modifier, len(winning_tickets)), 'inline': False}
await user.send("Lottory {} Results: You won {:,}. Your new balance is {:,}.".format(lottory_id, balance_modifier, new_user_balance))
if len(winning_tickets) < 100:
for n in range(0, len(winning_tickets), 50):
await user.send("Your winnings tickets for Lottory {}: Winning Numbers:{} Your winners: {}".format(lottory_id, winning_numbers, winning_tickets[n:n+50]))
await lottory_message.edit(embed=build_embed(embed_dict))
income = ticket_cost * num_tickets
payout_ratio = 100 * (total_payout - income) / income
db.update_lottory_stats(lottory_id, income, total_payout)
embed_dict['author_name'] = "Lottory {} ended!".format(lottory_id)
embed_dict['fields'][0] = {"name": "{:,} tickets were sold for {:,}".format(num_tickets, income), 'value':"{:,} was paid out for a payout ratio of {}%".format(round(total_payout, 2), round(payout_ratio, 2))}
await lottory_message.edit(embed=build_embed(embed_dict))
if len(progressive_split) == 0:
lottory_id = db.get_current_lottory() #Add progressive to next lottory
db.modify_lottory_jackpot_prog(lottory_id, progressive)
else:
await ctx.send("Jackpot has been reseeded to {:,}".format(payout_table[True][4]))
@lottory.command(invoke_without_command=True)
async def stats(self, ctx, lottory_id=None):
'''Returns lifetime lottory statistics, or stats of specific lottory_id if given'''
stats_list = db.get_lottory_stats(lottory_id)
total_income = 0
total_outflow = 0
lottory_id_total = '{}'.format(lottory_id) if lottory_id is not None else 'lifetime'
for stats_tuple in stats_list:
income = stats_tuple[0]
outflow = stats_tuple[1]
total_income += income
total_outflow += outflow
if total_income > 0:
payout_ratio = 100 * (total_outflow - total_income) / total_income
await ctx.send("Lottory {} stats: Total income: {:,} Total payout: {:,} Payout Ratio: {}%".format(lottory_id_total, total_income, total_outflow, round(payout_ratio,2)))
else:
await ctx.send("There are not stats yet!")
@commands.group(invoke_without_command=True, aliases=['buy_tickets', 'bt'])
async def buy_ticket(self, ctx, first: int, second: int, third: int, fourth: int, mega: int):
"""
Purchase a lottory ticket, enter all 5 numbers seperated by spaces.
"""
lottory_id = db.get_current_lottory()
user_balance = db.get_user_balance(ctx.author.id)
ticket = [first, second, third, fourth,mega]
ticket_print = Ticket(ticket)
if user_balance < ticket_cost:
await ctx.send("That would cost {:,}, your balance is {:,}. Broke ass bitch".format(ticket_cost, user_balance))
return
#Validate ticket entry
for number in ticket[:4]:
if number not in numbers:
await ctx.send("{} is not a valid ticket, first 4 numbers must be between {}-{}}".format(ticket, numbers[0], numbers[-1]))
return
if ticket[4] not in range(1,12):
await ctx.send("{} is not a valid ticket, megaball must be between 1-11".format(ticket))
return
for i in range(3):
if ticket[i] in ticket[:i]:
await ctx.send("{} is not a valid ticket, first four numbers must be unique".format(ticket_print))
return
if ticket[i] in ticket[i+1:4]:
await ctx.send("{} is not a valid ticket, first four numbers must be unique".format(ticket_print))
return
progressive_add = ticket_cost * .1
db.add_ticket_to_user([ticket], lottory_id, ctx.author.id)
new_balance = db.modify_user_balance(ctx.author.id, -1 * ticket_cost)
db.modify_lottory_jackpot_prog(lottory_id, progressive_add)
new_progressive = db.get_lottory_jackpot_prog(lottory_id) + payout_table[True][4]
await ctx.send("{} purchased ticket {}, your balance is now {:,}. The progressive jackpot is now {:,}.".format(ctx.author.name, Ticket(ticket), new_balance, new_progressive))
@buy_ticket.command(aliases=['quickpick'])
async def qp(self, ctx, number_of_tickets=1):
"""
Quickpick tickets, enter a number to choose how many you want!
"""
lottory_id = db.get_current_lottory()
user_balance = db.get_user_balance(ctx.author.id)
total_cost = ticket_cost * number_of_tickets
if user_balance < total_cost:
await ctx.send("That would cost {:,}, your balance is {:,}. Broke ass bitch".format(total_cost, user_balance))
return
else:
async with ctx.typing():
ticket_list = quickpick(number_of_tickets)
progressive_add = number_of_tickets * ticket_cost * .1
db.add_ticket_to_user(ticket_list, lottory_id, ctx.author.id)
new_balance = db.modify_user_balance(ctx.author.id, -1 * total_cost)
db.modify_lottory_jackpot_prog(lottory_id, progressive_add)
new_progressive = db.get_lottory_jackpot_prog(lottory_id)
ticket_obj_list = list(map(lambda x: Ticket(x), ticket_list)) #Convert list of tickets to Ticket objects
if len(ticket_list) <= 5:
output_line_list = []
for ticket in ticket_list:
output_line_list.append('Quickpick ticket {} purchased by {}, good luck!'.format(Ticket(ticket), ctx.author.name))
await ctx.send("\n".join(output_line_list))
if number_of_tickets > 500:
await ctx.author.send("You bought {} tickets. I'm not going to send you all of them.".format(number_of_tickets))
else:
for n in range(0, len(ticket_list), 50):
await ctx.author.send("Lottory {} Quickpick tickets {}".format(lottory_id, ticket_list[n:n+50]))
await ctx.send("{} spent {:,} on {:,} tickets, new balance is {:,}. The jackpot is now {:,}".format(ctx.author.name, total_cost, number_of_tickets, round(new_balance,2), payout_table[True][4]+new_progressive))
def setup(bot):
bot.add_cog(Lottory(bot))
| 50.51532
| 302
| 0.624262
|
4a007ce1dcc9e1c57868804a5eefc69a29a8e1f7
| 3,774
|
py
|
Python
|
tests/p2p_stress.py
|
cubetrain/CubeTrain
|
b930a3e88e941225c2c54219267f743c790e388f
|
[
"MIT"
] | null | null | null |
tests/p2p_stress.py
|
cubetrain/CubeTrain
|
b930a3e88e941225c2c54219267f743c790e388f
|
[
"MIT"
] | null | null | null |
tests/p2p_stress.py
|
cubetrain/CubeTrain
|
b930a3e88e941225c2c54219267f743c790e388f
|
[
"MIT"
] | null | null | null |
import testUtils
import p2p_test_peers
import random
import time
import copy
import threading
from core_symbol import CORE_SYMBOL
class StressNetwork:
speeds=[1,5,10,30,60,100,500]
sec=10
maxthreads=100
trList=[]
def maxIndex(self):
return len(self.speeds)
def randAcctName(self):
s=""
for i in range(12):
s=s+random.choice("abcdefghijklmnopqrstuvwxyz12345")
return s
def _transfer(self, node, acc1, acc2, amount, threadId, round):
memo="%d %d" % (threadId, round)
tr = node.transferFunds(acc1, acc2, amount, memo)
self.trList.append(tr)
def execute(self, cmdInd, node, ta, cubetrain):
print("\n==== network stress test: %d transaction(s)/s for %d secs ====" % (self.speeds[cmdInd], self.sec))
total = self.speeds[cmdInd] * self.sec
ta.name = self.randAcctName()
acc1 = copy.copy(ta)
print("creating new account %s" % (ta.name))
tr = node.createAccount(ta, cubetrain, stakedDeposit=0, waitForTransBlock=True, exitOnError=True)
trid = node.getTransId(tr)
if trid is None:
return ([], "", 0.0, "failed to create account")
print("transaction id %s" % (trid))
ta.name = self.randAcctName()
acc2 = copy.copy(ta)
print("creating new account %s" % (ta.name))
tr = node.createAccount(ta, cubetrain, stakedDeposit=0, waitForTransBlock=True, exitOnError=True)
trid = node.getTransId(tr)
if trid is None:
return ([], "", 0.0, "failed to create account")
print("transaction id %s" % (trid))
print("issue currency0000 into %s" % (acc1.name))
contract="cubetrain"
action="issue"
data="{\"to\":\"" + acc1.name + "\",\"quantity\":\"1000000.0000 "+CORE_SYMBOL+"\"}"
opts="--permission cubetrain@active"
tr=node.pushMessage(contract, action, data, opts)
trid = node.getTransId(tr[1])
if trid is None:
return ([], "", 0.0, "failed to issue currency0000")
print("transaction id %s" % (trid))
node.waitForTransInBlock(trid)
self.trList = []
expBal = 0
nthreads=self.maxthreads
if nthreads > self.speeds[cmdInd]:
nthreads = self.speeds[cmdInd]
cycle = int(total / nthreads)
total = cycle * nthreads # rounding
delay = 1.0 / self.speeds[cmdInd] * nthreads
print("start currency0000 trasfer from %s to %s for %d times with %d threads" % (acc1.name, acc2.name, total, nthreads))
t00 = time.time()
for k in range(cycle):
t0 = time.time()
amount = 1
threadList = []
for m in range(nthreads):
th = threading.Thread(target = self._transfer,args = (node, acc1, acc2, amount, m, k))
th.start()
threadList.append(th)
for th in threadList:
th.join()
expBal = expBal + amount * nthreads
t1 = time.time()
if (t1-t0 < delay):
time.sleep(delay - (t1-t0))
t11 = time.time()
print("time used = %lf" % (t11 - t00))
actBal = node.getAccountBalance(acc2.name)
print("account %s: expect Balance:%d, actual Balance %d" % (acc2.name, expBal, actBal))
transIdlist = []
for tr in self.trList:
trid = node.getTransId(tr)
transIdlist.append(trid)
node.waitForTransInBlock(trid)
return (transIdlist, acc2.name, expBal, "")
def on_exit(self):
print("end of network stress tests")
| 35.603774
| 129
| 0.555114
|
4a007ff3e1bf5852801832a76f36229eb40659a2
| 3,308
|
py
|
Python
|
amaascore/assets/fund.py
|
amaas-fintech/amaas-core-sdk-python
|
bd77884de6e5ab05d864638addeb4bb338a51183
|
[
"Apache-2.0"
] | null | null | null |
amaascore/assets/fund.py
|
amaas-fintech/amaas-core-sdk-python
|
bd77884de6e5ab05d864638addeb4bb338a51183
|
[
"Apache-2.0"
] | 8
|
2017-06-06T09:42:41.000Z
|
2018-01-16T10:16:16.000Z
|
amaascore/assets/fund.py
|
amaas-fintech/amaas-core-sdk-python
|
bd77884de6e5ab05d864638addeb4bb338a51183
|
[
"Apache-2.0"
] | 8
|
2017-01-18T04:14:01.000Z
|
2017-12-01T08:03:10.000Z
|
from __future__ import absolute_import, division, print_function, unicode_literals
from datetime import datetime, date
from dateutil.parser import parse
from decimal import Decimal
import sys
from amaascore.assets.asset import Asset
# This extremely ugly hack is due to the whole Python 2 vs 3 debacle.
type_check = str if sys.version_info >= (3, 0, 0) else (str, unicode)
class Fund(Asset):
def __init__(self, asset_manager_id, asset_id, fund_type, nav=None, expense_ratio=None, net_assets=None,
asset_issuer_id=None, asset_status='Active', roll_price=False, display_name='', description='',
country_id=None, venue_id=None, currency=None, creation_date=None,
links=None, references=None,
*args, **kwargs):
if not hasattr(self, 'asset_class'): # A more specific child class may have already set this
self.asset_class = 'Fund'
self.fund_type = fund_type
self.creation_date = creation_date
self.nav = nav
self.expense_ratio = expense_ratio
self.net_assets = net_assets
super(Fund, self).__init__(asset_manager_id=asset_manager_id, asset_id=asset_id, fungible=True,
asset_issuer_id=asset_issuer_id, asset_status=asset_status,
display_name=display_name, roll_price=roll_price,
description=description, country_id=country_id, venue_id=venue_id,
currency=currency, links=links, references=references, *args, **kwargs)
# Perhaps this comes from the linked Fund
@property
def creation_date(self):
if hasattr(self, '_creation_date'):
return self._creation_date
@creation_date.setter
def creation_date(self, value):
"""
The date on which the bond was issued.
:param creation_date:
:return:
"""
self._creation_date = parse(value).date() if isinstance(value, type_check) else value
@property
def nav(self):
if hasattr(self, '_nav'):
return self._nav
@nav.setter
def nav(self, nav):
"""
:param nav:
:return:
"""
self._nav = Decimal(nav) if nav else None
@property
def expense_ratio(self):
if hasattr(self, '_expense_ratio'):
return self._expense_ratio
@expense_ratio.setter
def expense_ratio(self, expense_ratio):
"""
:param expense_ratio:
:return:
"""
self._expense_ratio = Decimal(expense_ratio) if expense_ratio else None
@property
def net_assets(self):
if hasattr(self, '_net_assets'):
return self._net_assets
@net_assets.setter
def net_assets(self, net_assets):
"""
:param net_assets: An integer representing the net assets of the fund
:return:
"""
self._net_assets = net_assets
@property
def fund_type(self):
return self._fund_type
@fund_type.setter
def fund_type(self, fund_type):
"""
:param fund_type: One of ['Open', 'Closed', 'ETF']
:return:
"""
if fund_type in ['Open', 'Closed', 'ETF']:
self._fund_type = fund_type
| 31.807692
| 112
| 0.617896
|
4a00808fd7f3ad7e98d27e3dd1009f7886dc5418
| 2,074
|
py
|
Python
|
data_loads/load_cabi_trips_membertype.py
|
georgetown-analytics/DC-Bikeshare
|
9f5a6a3256cff15a29f0dca6e9a9d8098ab2df28
|
[
"MIT"
] | 11
|
2018-07-01T16:43:05.000Z
|
2020-07-17T19:08:16.000Z
|
data_loads/load_cabi_trips_membertype.py
|
noahnewberger/Bikeshare-DC
|
42676654d103cdaddfb76db76d1eece533251261
|
[
"MIT"
] | 5
|
2021-02-08T20:21:12.000Z
|
2021-12-13T19:47:04.000Z
|
data_loads/load_cabi_trips_membertype.py
|
noahnewberger/Bikeshare-DC
|
42676654d103cdaddfb76db76d1eece533251261
|
[
"MIT"
] | 5
|
2018-10-05T19:54:20.000Z
|
2020-10-27T11:54:09.000Z
|
import pandas as pd
import os
import util_functions as uf
# TODO: Define function to pull CaBi Trip data from DDOT source
def trips_to_df(cabi_trip_dir):
# Loop through CSVs of Trip Data
trip_df_list = []
csv_list = [f for f in os.listdir(cabi_trip_dir) if f.endswith('.txt')]
for csv in csv_list:
csv_name = csv.replace('.txt', '')
print("{} has started processing".format(csv_name))
# Load original CSV as dataframe
trip_df = pd.read_csv(os.path.join(cabi_trip_dir, csv_name + '.txt'), delimiter="|")
keep_cols = ["Startdate", "Enddate", "Bikenumber", "MemberType"]
trip_df = trip_df[keep_cols]
trip_df_list.append(trip_df)
combined_df = pd.concat(trip_df_list, axis=0)
return combined_df
def create_cabi_trips_membertype(cur):
# This script creates the CaBi Stations Geo Temp AWS table
cur.execute("""
DROP TABLE IF EXISTS cabi_trips_membertype;
CREATE TABLE cabi_trips_membertype(
Startdate timestamp,
Enddate timestamp,
Bikenumber varchar(30),
member_type varchar(20),
trip_id varchar(20) PRIMARY KEY
);
""")
if __name__ == "__main__":
# Connect to AWS
uf.set_env_path()
conn, cur = uf.aws_connect()
# Loop through all CSVs in cabi trip data folder
cabi_trip_dir = r'./data/Cabi_Trips_MemberType'
# Convert trip data from CSV to dataframe
combined_df = trips_to_df(cabi_trip_dir)
# Sort by StartDate and Add trip_id
combined_df.sort_values(['Startdate'], inplace=True)
combined_df.reset_index(inplace=True)
combined_df.drop(['index'], axis=1, inplace=True)
combined_df['trip_id'] = combined_df.index + 1
# Output dataframe as CSV
outname = "Cabi_Trips_MemberType"
combined_df.to_csv(os.path.join("data", outname + ".csv"), index=False, sep='|')
# Create Table in AWS
create_cabi_trips_membertype(cur)
# Load to Database
uf.aws_load(outname, "cabi_trips_membertype", cur)
# Commit changes to database
conn.commit()
| 30.5
| 92
| 0.675506
|
4a0080acebed22a142836b5fc2881b1d80663e8c
| 582
|
py
|
Python
|
matilda/data_pipeline/db_tutorial.py
|
AlainDaccache/Quantropy
|
6cfa06ed2b764471382ebf94d40af867f10433bb
|
[
"MIT"
] | 45
|
2021-01-28T04:12:21.000Z
|
2022-02-24T13:15:50.000Z
|
matilda/data_pipeline/db_tutorial.py
|
AlainDaccache/Quantropy
|
6cfa06ed2b764471382ebf94d40af867f10433bb
|
[
"MIT"
] | 32
|
2021-03-02T18:45:16.000Z
|
2022-03-12T00:53:10.000Z
|
matilda/data_pipeline/db_tutorial.py
|
AlainDaccache/Quantropy
|
6cfa06ed2b764471382ebf94d40af867f10433bb
|
[
"MIT"
] | 10
|
2020-12-25T15:02:40.000Z
|
2021-12-30T11:40:15.000Z
|
# print(Filing.objects.as_pymongo()) # returns list of all filing objects
# get filings for which assets > $100
# filings = Filing.objects(BalanceSheet__Assets__TotalAssets__gt=100)
# print(filings.as_pymongo())
# won't work because Company is a Reference document for Filings, not embedded document
# print(Filing.objects(company__cik__="0123456789")) # returns filings for company with cik
# this will:
# companies = Company.objects(cik="0123456789")
# filings = Filing.objects().filter(company__in=companies)
# print(filings.as_pymongo())
| 44.769231
| 96
| 0.725086
|
4a00813aad0d0b6368a61934d61ffc5c08e67877
| 12,338
|
py
|
Python
|
jieba/__init__.py
|
leonee/jieba
|
14a0ab046619bfffc1e8d892a3e0b4c8800f7cde
|
[
"MIT"
] | 1
|
2017-01-16T18:05:50.000Z
|
2017-01-16T18:05:50.000Z
|
jieba/__init__.py
|
leonee/jieba
|
14a0ab046619bfffc1e8d892a3e0b4c8800f7cde
|
[
"MIT"
] | null | null | null |
jieba/__init__.py
|
leonee/jieba
|
14a0ab046619bfffc1e8d892a3e0b4c8800f7cde
|
[
"MIT"
] | null | null | null |
from __future__ import with_statement
__version__ = '0.31'
__license__ = 'MIT'
import re
import os
import sys
import finalseg
import time
import tempfile
import marshal
from math import log
import random
import threading
from functools import wraps
import logging
DICTIONARY = "dict.txt"
DICT_LOCK = threading.RLock()
trie = None # to be initialized
FREQ = {}
min_freq = 0.0
total =0.0
user_word_tag_tab={}
initialized = False
log_console = logging.StreamHandler(sys.stderr)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(log_console)
def setLogLevel(log_level):
global logger
logger.setLevel(log_level)
def gen_trie(f_name):
lfreq = {}
trie = {}
ltotal = 0.0
with open(f_name, 'rb') as f:
lineno = 0
for line in f.read().rstrip().decode('utf-8').split('\n'):
lineno += 1
try:
word,freq,_ = line.split(' ')
freq = float(freq)
lfreq[word] = freq
ltotal+=freq
p = trie
for c in word:
if c not in p:
p[c] ={}
p = p[c]
p['']='' #ending flag
except ValueError, e:
logger.debug('%s at line %s %s' % (f_name, lineno, line))
raise ValueError, e
return trie, lfreq,ltotal
def initialize(*args):
global trie, FREQ, total, min_freq, initialized
if len(args)==0:
dictionary = DICTIONARY
else:
dictionary = args[0]
with DICT_LOCK:
if initialized:
return
if trie:
del trie
trie = None
_curpath=os.path.normpath( os.path.join( os.getcwd(), os.path.dirname(__file__) ) )
abs_path = os.path.join(_curpath,dictionary)
logger.debug("Building Trie..., from %s" % abs_path)
t1 = time.time()
if abs_path == os.path.join(_curpath,"dict.txt"): #defautl dictionary
cache_file = os.path.join(tempfile.gettempdir(),"jieba.cache")
else: #customer dictionary
cache_file = os.path.join(tempfile.gettempdir(),"jieba.user."+str(hash(abs_path))+".cache")
load_from_cache_fail = True
if os.path.exists(cache_file) and os.path.getmtime(cache_file)>os.path.getmtime(abs_path):
logger.debug("loading model from cache %s" % cache_file)
try:
trie,FREQ,total,min_freq = marshal.load(open(cache_file,'rb'))
load_from_cache_fail = False
except:
load_from_cache_fail = True
if load_from_cache_fail:
trie,FREQ,total = gen_trie(abs_path)
FREQ = dict([(k,log(float(v)/total)) for k,v in FREQ.iteritems()]) #normalize
min_freq = min(FREQ.itervalues())
logger.debug("dumping model to file cache %s" % cache_file)
try:
tmp_suffix = "."+str(random.random())
with open(cache_file+tmp_suffix,'wb') as temp_cache_file:
marshal.dump((trie,FREQ,total,min_freq),temp_cache_file)
if os.name=='nt':
import shutil
replace_file = shutil.move
else:
replace_file = os.rename
replace_file(cache_file+tmp_suffix,cache_file)
except:
logger.error("dump cache file failed.")
logger.exception("")
initialized = True
logger.debug("loading model cost %s seconds." % (time.time() - t1))
logger.debug("Trie has been built succesfully.")
def require_initialized(fn):
@wraps(fn)
def wrapped(*args, **kwargs):
global initialized
if initialized:
return fn(*args, **kwargs)
else:
initialize(DICTIONARY)
return fn(*args, **kwargs)
return wrapped
def __cut_all(sentence):
dag = get_DAG(sentence)
old_j = -1
for k,L in dag.iteritems():
if len(L)==1 and k>old_j:
yield sentence[k:L[0]+1]
old_j = L[0]
else:
for j in L:
if j>k:
yield sentence[k:j+1]
old_j = j
def calc(sentence,DAG,idx,route):
N = len(sentence)
route[N] = (0.0,'')
for idx in xrange(N-1,-1,-1):
candidates = [ ( FREQ.get(sentence[idx:x+1],min_freq) + route[x+1][0],x ) for x in DAG[idx] ]
route[idx] = max(candidates)
@require_initialized
def get_DAG(sentence):
N = len(sentence)
i,j=0,0
p = trie
DAG = {}
while i<N:
c = sentence[j]
if c in p:
p = p[c]
if '' in p:
if i not in DAG:
DAG[i]=[]
DAG[i].append(j)
j+=1
if j>=N:
i+=1
j=i
p=trie
else:
p = trie
i+=1
j=i
for i in xrange(len(sentence)):
if i not in DAG:
DAG[i] =[i]
return DAG
def __cut_DAG_NO_HMM(sentence):
re_eng = re.compile(ur'[a-zA-Z0-9]',re.U)
DAG = get_DAG(sentence)
route ={}
calc(sentence,DAG,0,route=route)
x = 0
N = len(sentence)
buf = u''
while x<N:
y = route[x][1]+1
l_word = sentence[x:y]
if re_eng.match(l_word) and len(l_word)==1:
buf += l_word
x =y
else:
if len(buf)>0:
yield buf
buf = u''
yield l_word
x =y
if len(buf)>0:
yield buf
buf = u''
def __cut_DAG(sentence):
DAG = get_DAG(sentence)
route ={}
calc(sentence,DAG,0,route=route)
x = 0
buf =u''
N = len(sentence)
while x<N:
y = route[x][1]+1
l_word = sentence[x:y]
if y-x==1:
buf+= l_word
else:
if len(buf)>0:
if len(buf)==1:
yield buf
buf=u''
else:
if (buf not in FREQ):
regognized = finalseg.cut(buf)
for t in regognized:
yield t
else:
for elem in buf:
yield elem
buf=u''
yield l_word
x =y
if len(buf)>0:
if len(buf)==1:
yield buf
else:
if (buf not in FREQ):
regognized = finalseg.cut(buf)
for t in regognized:
yield t
else:
for elem in buf:
yield elem
def cut(sentence,cut_all=False,HMM=True):
if not isinstance(sentence, unicode):
try:
sentence = sentence.decode('utf-8')
except UnicodeDecodeError:
sentence = sentence.decode('gbk','ignore')
re_han, re_skip = re.compile(ur"([\u4E00-\u9FA5a-zA-Z0-9+#&\._]+)", re.U), re.compile(ur"(\r\n|\s)", re.U)
if cut_all:
re_han, re_skip = re.compile(ur"([\u4E00-\u9FA5]+)", re.U), re.compile(ur"[^a-zA-Z0-9+#\n]", re.U)
blocks = re_han.split(sentence)
if HMM:
cut_block = __cut_DAG
else:
cut_block = __cut_DAG_NO_HMM
if cut_all:
cut_block = __cut_all
for blk in blocks:
if len(blk)==0:
continue
if re_han.match(blk):
for word in cut_block(blk):
yield word
else:
tmp = re_skip.split(blk)
for x in tmp:
if re_skip.match(x):
yield x
elif not cut_all:
for xx in x:
yield xx
else:
yield x
def cut_for_search(sentence,HMM=True):
words = cut(sentence,HMM=HMM)
for w in words:
if len(w)>2:
for i in xrange(len(w)-1):
gram2 = w[i:i+2]
if gram2 in FREQ:
yield gram2
if len(w)>3:
for i in xrange(len(w)-2):
gram3 = w[i:i+3]
if gram3 in FREQ:
yield gram3
yield w
@require_initialized
def load_userdict(f):
global trie,total,FREQ
if isinstance(f, (str, unicode)):
f = open(f, 'rb')
content = f.read().decode('utf-8')
line_no = 0
for line in content.split("\n"):
line_no+=1
if line.rstrip()=='': continue
tup =line.split(" ")
word,freq = tup[0],tup[1]
if line_no==1:
word = word.replace(u'\ufeff',u"") #remove bom flag if it exists
if len(tup)==3:
add_word(word, freq, tup[2])
else:
add_word(word, freq)
def add_word(word, freq, tag=None):
global FREQ, trie, total, user_word_tag_tab
freq = float(freq)
FREQ[word] = log(freq / total)
if tag is not None:
user_word_tag_tab[word] = tag.strip()
p = trie
for c in word:
if c not in p:
p[c] = {}
p = p[c]
p[''] = '' # ending flag
__ref_cut = cut
__ref_cut_for_search = cut_for_search
def __lcut(sentence):
return list(__ref_cut(sentence,False))
def __lcut_no_hmm(sentence):
return list(__ref_cut(sentence,False,False))
def __lcut_all(sentence):
return list(__ref_cut(sentence,True))
def __lcut_for_search(sentence):
return list(__ref_cut_for_search(sentence))
@require_initialized
def enable_parallel(processnum=None):
global pool,cut,cut_for_search
if os.name=='nt':
raise Exception("jieba: parallel mode only supports posix system")
if sys.version_info[0]==2 and sys.version_info[1]<6:
raise Exception("jieba: the parallel feature needs Python version>2.5 ")
from multiprocessing import Pool,cpu_count
if processnum==None:
processnum = cpu_count()
pool = Pool(processnum)
def pcut(sentence,cut_all=False,HMM=True):
parts = re.compile('([\r\n]+)').split(sentence)
if cut_all:
result = pool.map(__lcut_all,parts)
else:
if HMM:
result = pool.map(__lcut,parts)
else:
result = pool.map(__lcut_no_hmm,parts)
for r in result:
for w in r:
yield w
def pcut_for_search(sentence):
parts = re.compile('([\r\n]+)').split(sentence)
result = pool.map(__lcut_for_search,parts)
for r in result:
for w in r:
yield w
cut = pcut
cut_for_search = pcut_for_search
def disable_parallel():
global pool,cut,cut_for_search
if 'pool' in globals():
pool.close()
pool = None
cut = __ref_cut
cut_for_search = __ref_cut_for_search
def set_dictionary(dictionary_path):
global initialized, DICTIONARY
with DICT_LOCK:
abs_path = os.path.normpath( os.path.join( os.getcwd(), dictionary_path ) )
if not os.path.exists(abs_path):
raise Exception("jieba: path does not exists:" + abs_path)
DICTIONARY = abs_path
initialized = False
def get_abs_path_dict():
_curpath=os.path.normpath( os.path.join( os.getcwd(), os.path.dirname(__file__) ) )
abs_path = os.path.join(_curpath,DICTIONARY)
return abs_path
def tokenize(unicode_sentence,mode="default",HMM=True):
#mode ("default" or "search")
if not isinstance(unicode_sentence, unicode):
raise Exception("jieba: the input parameter should unicode.")
start = 0
if mode=='default':
for w in cut(unicode_sentence,HMM=HMM):
width = len(w)
yield (w,start,start+width)
start+=width
else:
for w in cut(unicode_sentence,HMM=HMM):
width = len(w)
if len(w)>2:
for i in xrange(len(w)-1):
gram2 = w[i:i+2]
if gram2 in FREQ:
yield (gram2,start+i,start+i+2)
if len(w)>3:
for i in xrange(len(w)-2):
gram3 = w[i:i+3]
if gram3 in FREQ:
yield (gram3,start+i,start+i+3)
yield (w,start,start+width)
start+=width
| 29.306413
| 110
| 0.523748
|
4a0081824af78a067c2d055390e229b115bb66c6
| 9,832
|
py
|
Python
|
src/sync.py
|
cykyy/unms-ros-sync
|
36bcf6638ea0bfda6c5da6f1c5d8a1b863c17784
|
[
"MIT"
] | 1
|
2020-10-03T10:52:50.000Z
|
2020-10-03T10:52:50.000Z
|
src/sync.py
|
cykyy/unms-ros-sync
|
36bcf6638ea0bfda6c5da6f1c5d8a1b863c17784
|
[
"MIT"
] | null | null | null |
src/sync.py
|
cykyy/unms-ros-sync
|
36bcf6638ea0bfda6c5da6f1c5d8a1b863c17784
|
[
"MIT"
] | 1
|
2021-04-14T10:00:11.000Z
|
2021-04-14T10:00:11.000Z
|
try:
from src.unmscrm import *
from src.parser import *
from .ros import ros_api
except Exception as e:
print(':Error: some modules are missing. {}'.format(e))
class Sync:
try:
ros_api = ros_api()
prs_obj = ParseString()
except Exception as es:
print('Error occurred during creating objects in sync. {}'.format(es))
def sync_password_crm(self, all_crm_clients, all_rtr_clients):
for x in all_crm_clients:
for y in all_rtr_clients:
if x.get('c_ident') == y.get('c_ident'):
try:
if x.get('p_pw') != y.get('p_pw'):
_password = x.get('p_pw')
if _password is None:
_password = self.prs_obj.parse_regdate(x.get('reg_date'))
if _password != y.get('p_pw'):
self.ros_api.update_secret_password(y.get('c_ident'), _password)
else:
self.ros_api.update_secret_password(y.get('c_ident'), _password)
except Exception as espc:
print(espc)
def sync_enabled(self, ena_c_crm, ena_c_rtr, disa_c_rtr):
ena_clients_crm = ena_c_crm
ena_clients_rtr = ena_c_rtr
disa_clients_rtr = disa_c_rtr
ena_matched_clients_crm = [] # those who matched in both place, authority crm
ena_unmatched_clients_crm = [] # those who didn't matched in both place, authority crm
for x_ena_c in ena_clients_crm:
for y_ena_r in ena_clients_rtr:
if x_ena_c.get('c_ident') == y_ena_r.get('c_ident'):
ena_matched_clients_crm.append(x_ena_c)
# checking if a enabled client package has been changed or not, if changed call package update
# function
if x_ena_c.get('spack') is not None:
if x_ena_c.get('spack') != y_ena_r.get('profile'):
self.ros_api.update_secret_profile(y_ena_r.get('c_ident'), x_ena_c.get('spack'))
# printing both place enabled and available
# print('first matched:', ena_matched_clients_crm)
for x in ena_clients_crm:
if x not in ena_matched_clients_crm:
ena_unmatched_clients_crm.append(
x) # getting non-matched clients. those who are not in same state in routerOS
# printing clients that are not enabled or not available in mikrotik
# print(ena_unmatched_clients_crm)
new_clients = ena_unmatched_clients_crm.copy()
temp_new_clients = new_clients.copy()
print('Enabled Now in RTR(ena):', end=' ')
# checking if we are still connected with routerOS or not xg1
if self.ros_api.check_connection_ros() is True:
# comparing crm previously unmatched rtr enabled clients with rtr disabled clients
for unmatched in temp_new_clients: # cng:1; ena_unmatched_clients_crm; where cng:1 means recently it was
# active and needs more taste
for disa_rt in disa_clients_rtr:
if unmatched.get('c_ident') == disa_rt.get('c_ident'):
# clients enabled in crm but disabled in rtr -> needs actions
# now calling function and passing client id to enable disabled client
try:
# checking if a enabled client package has been changed or not, if changed call package
# update function
if unmatched.get('spack') is not None:
if unmatched.get('spack') != disa_rt.get('profile'):
self.ros_api.update_secret_profile(disa_rt.get('c_ident'), unmatched.get('spack'))
# passing to enable this client
_try_res = self.ros_api.set_ppp_enable(
unmatched.get('c_ident')) # passing client to enable in router from disable state
except Exception as seu:
print(seu)
finally:
if _try_res is False:
print(':Error: executing command on RTR')
print(unmatched.get('c_ident'), end=', ') # printing for log, which client is disabling
new_clients.remove(unmatched)
print()
# checking of routerOS connection for this time is handled from the rtr adder function itself
# now creating clients who do not exist in router.
try:
self.ros_api.add_clients_enable_rtr(new_clients)
except Exception as esa:
print(':Error: during adding clients! {}'.format(esa))
# printing logs.
# print('to create: ', new_clients)
print('Created Now in RTR(ena):', end=' ')
for _to_log in new_clients:
if _to_log.get('spack') in self.ros_api.get_profile():
print(_to_log.get('c_ident'), end=', ')
print()
def sync_disabled(self, disa_c_crm, ena_c_rtr, disa_c_rtr):
disa_clients_crm = disa_c_crm
ena_clients_rtr = ena_c_rtr
disa_clients_rtr = disa_c_rtr
disa_matched_clients_crm = []
disa_unmatched_clients_crm = []
to_create = []
for x_disa_c in disa_clients_crm:
for y_disa_r in disa_clients_rtr:
if x_disa_c.get('c_ident') == y_disa_r.get('c_ident'):
# finding disabled clients in rtr
disa_matched_clients_crm.append(x_disa_c)
# checking if a enabled client package has been changed or not, if changed call package update
# function
if x_disa_c.get('spack') is not None:
if x_disa_c.get('spack') != y_disa_r.get('profile'):
self.ros_api.update_secret_profile(y_disa_r.get('c_ident'), x_disa_c.get('spack'))
# printing both place disabled and available clients
# print('initial matched:', disa_matched_clients_crm)
# getting unmatched clients, those who are not in the same state as in crm
for x in disa_clients_crm:
if x not in disa_matched_clients_crm:
disa_unmatched_clients_crm.append(x)
# printing clients that are not enabled or not available in mikrotik
# prints only those clients who is not created in rtr but created and disabled in ucrm
# print('not enabled or not available: ', disa_unmatched_clients_crm)
print('Not exist in RTR(disa) & not enabled in CRM:', end=' ')
for _to_log in disa_unmatched_clients_crm:
print(_to_log.get('c_ident'), end=', ')
print()
new_clients = disa_unmatched_clients_crm.copy()
# checking if they are enabled or not
temp_new_clients = new_clients.copy()
print('Disabled Now in RTR(disa):', end='')
# checking if we are still connected with routerOS or not xg1
if self.ros_api.check_connection_ros() is True:
for unmatched in temp_new_clients:
for ena_rt in ena_clients_rtr: # now comparing with routerOS enabled list
if unmatched.get('c_ident') == ena_rt.get('c_ident'):
# clients disabled in crm but enabled in rtr -> needs actions
# now calling function and passing client id to disable enable client
# print('count')
_try_res = ''
try:
_try_res = self.ros_api.set_ppp_disable(
unmatched.get('c_ident')) # also can return response and condition it and do the rest
if _try_res:
self.ros_api.remove_active_ppp_secret(unmatched.get('c_ident'))
except Exception as sd:
print(':Error: on sync disabled', sd)
finally:
if _try_res is False:
print(':Error: executing command on RTR')
print(unmatched.get('c_ident'), end=', ') # for logs
disa_matched_clients_crm.append(unmatched)
new_clients.remove(unmatched) # not needed for now
print() # line break
# checking if the user has any package or not on crm. if not then it is also discarded from to_create
for x in disa_clients_crm: # also can be used disa_unmatched_clients_crm here
if x not in disa_matched_clients_crm:
# only telling to create who has a service and or either in active state or suspend state in crm
if x.get('spack') is not None and (x.get('service_status') == 1 or x.get('service_status') == 3):
to_create.append(x)
# print(to_create)
# passing clients to create them because these clients are not available in mikrotik but available in crm
try:
_try_res_two = self.ros_api.add_clients_enable_rtr(to_create)
except Exception as addce:
print(':Error: during creating brand new clients! {}'.format(addce))
finally:
if _try_res_two is False:
print(':Error: executing command on RTR')
print('Created Now ena/disa clients of crm who are not available in RTR(disa): ', end='')
for _to_log in to_create:
print(_to_log.get('c_ident'), end=', ')
print()
| 51.47644
| 118
| 0.570077
|
4a0081ed8361ed206d89755dac1aea315d9208ea
| 67
|
py
|
Python
|
main.py
|
Alexander9673/discord-lib-py-test
|
871785d51b76d79bf5ec403b678451a3854e7835
|
[
"MIT"
] | null | null | null |
main.py
|
Alexander9673/discord-lib-py-test
|
871785d51b76d79bf5ec403b678451a3854e7835
|
[
"MIT"
] | null | null | null |
main.py
|
Alexander9673/discord-lib-py-test
|
871785d51b76d79bf5ec403b678451a3854e7835
|
[
"MIT"
] | null | null | null |
from src import Client as client
bot = client.Client("YOUR TOKEN")
| 22.333333
| 33
| 0.761194
|
4a0081ffcd0614df4f5f6255a7bf9562aac9074d
| 2,098
|
py
|
Python
|
train_zero_layer.py
|
ElanaPearl/ml-playground
|
be0309c7546c267bda3bf4c970d55b7ec673b199
|
[
"Apache-2.0"
] | null | null | null |
train_zero_layer.py
|
ElanaPearl/ml-playground
|
be0309c7546c267bda3bf4c970d55b7ec673b199
|
[
"Apache-2.0"
] | null | null | null |
train_zero_layer.py
|
ElanaPearl/ml-playground
|
be0309c7546c267bda3bf4c970d55b7ec673b199
|
[
"Apache-2.0"
] | null | null | null |
import torch
from torch.functional import Tensor
from optimizers import Adam
from zero_layer_transformer import (
Embedder,
Logits,
Tokens,
Unembedder,
ZeroLayerTransformer,
)
class Softmax:
def forward(self, logits: Logits) -> Tensor:
# subtract mean for numeric stability, doesn't affect results
normalized_logits = logits - logits.max()
exp_logits = normalized_logits.exp()
self.probs = exp_logits / exp_logits.sum()
return self.probs
def backward(self, softmax_grads: Tensor) -> Logits:
"""Equation to calculate derivative of softmax:
dL/dx_i = ∑_j dL/d_Sj * dS_j/dx_i [by chain rule]
where S_i = softmax(x_i) and:
dS_i/dx_i = S_i(1 - S_i)
dS_i/dx_j = S_i*S_j [when i!=j]
"""
kronecker_delta = torch.eye(self.probs.shape[2])
outer = torch.einsum("bnv,bnw->bnvw", self.probs, self.probs)
jacobian = -outer + torch.einsum("vw,bnv->bnvw", kronecker_delta, self.probs)
return torch.einsum("bnvw,bnv->bnv", jacobian, softmax_grads)
class SoftmaxCrossEntropyLoss:
def forward(self, logits: Logits, labels: Tokens) -> Tensor:
probs = Softmax.forward(logits)
# probs are BxNxV but labels are BxN so labels need an extra dim for gather
labels_3d = labels[:, :, None]
predicted_probs_for_labels = probs.gather(dim=2, index=labels_3d)
return -predicted_probs_for_labels.log().sum()
def train_step(
model: ZeroLayerTransformer,
token_batch: Tokens,
optimizer: Adam,
loss_fn: SoftmaxCrossEntropyLoss,
) -> float:
# Slice out last token since there is no label for it
inputs: Tokens = token_batch[:, :-1]
# Slice out first token since there is no input token before it
labels: Tokens = token_batch[:, 1:]
logits = model.forward(inputs)
loss = loss_fn.forward(logits, labels)
logit_grads = loss_fn.backward(loss)
model.backward(logit_grads=logit_grads)
# this updates the model parameters accordingly
optimizer.step()
return loss.item()
| 32.276923
| 85
| 0.667779
|
4a008215cc9749a013f959ee011e99ab80d590d0
| 2,173
|
py
|
Python
|
utils.py
|
ankit13sharma/TradingWithYFinance
|
45d86fec78524bb93e7813a519153d041f81ef02
|
[
"MIT"
] | null | null | null |
utils.py
|
ankit13sharma/TradingWithYFinance
|
45d86fec78524bb93e7813a519153d041f81ef02
|
[
"MIT"
] | null | null | null |
utils.py
|
ankit13sharma/TradingWithYFinance
|
45d86fec78524bb93e7813a519153d041f81ef02
|
[
"MIT"
] | null | null | null |
import yfinance as yf
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import norm
def stock_data(symbol,time_period,time_interval):
stock = pd.DataFrame(yf.download(symbol,period = time_period,interval = time_interval))
stock.index = pd.to_datetime(stock.index, format="%Y%m%d").to_period('D')
if len(stock.index) != len(stock.index.unique()):
stock = stock.groupby(stock.index).mean().reset_index()
return stock
def all_returns(data: pd.DataFrame, frequency):
if frequency == "annual":
factor = 12
elif frequency == "monthly":
factor = 252
returns = data.pct_change(fill_method='bfill')
returns = returns.dropna()
returns_prod = (1+returns).prod(axis = 0)
# daily_returns = returns_prod**(1/len(returns)) -1
period_returns = returns_prod**(1/len(returns)) -1
annual_returns = returns_prod**(factor/len(returns)) -1
annual_vol = returns.std()*(factor**0.5)
return returns,period_returns,annual_returns, annual_vol
def sharp_ratio(annual_returns, annual_vol, riskfree_rate):
return (annual_returns - riskfree_rate)/annual_vol
def drawdown(return_series: pd.Series):
"""Takes a time series of asset returns.
returns a DataFrame with columns for
the wealth index,
the previous peaks, and
the percentage drawdown
"""
wealth_index = 1000*(1+return_series).cumprod()
previous_peaks = wealth_index.cummax()
drawdowns = (wealth_index - previous_peaks)/previous_peaks
return pd.DataFrame({"Wealth": wealth_index,
"Previous Peak": previous_peaks,
"Drawdown": drawdowns})
def get_thresholds_mean_k(arr:pd.DataFrame,scale):
kt = pd.DataFrame(index = arr.index)
kt_mean = pd.DataFrame(index = arr.index)
for column in arr.columns:
y = arr[column]/scale
kt[column] = y /(y.shift(1)*(1-y.shift(1)))
# kt[column] = kt[column].replace(np.inf,np.nan).mean(skipna=True)
kt_mean[column] = [kt[column][:i].replace(np.inf,np.nan).dropna().mean(skipna=True) for i in range(len(arr.index))]
return kt,kt_mean
| 40.240741
| 123
| 0.675104
|
4a00826409bc5b086c790cc78eb1c0d05c171286
| 4,163
|
py
|
Python
|
geoscript/style/label.py
|
jericks/geoscript-py
|
8dc600343f42dd87b1981023522a1a8cf56006a9
|
[
"MIT"
] | null | null | null |
geoscript/style/label.py
|
jericks/geoscript-py
|
8dc600343f42dd87b1981023522a1a8cf56006a9
|
[
"MIT"
] | null | null | null |
geoscript/style/label.py
|
jericks/geoscript-py
|
8dc600343f42dd87b1981023522a1a8cf56006a9
|
[
"MIT"
] | null | null | null |
from geoscript.style import util
from geoscript.style.expression import Expression
from geoscript.style.font import Font
from geoscript.style.fill import Fill
from geoscript.style.halo import Halo
from geoscript.style.color import Color
from geoscript.style.property import Property
from geoscript.style.symbolizer import Symbolizer
from org.geotools.styling import TextSymbolizer
class Label(Symbolizer):
"""
Symbolizer for labelling a geometry.
The ``property`` argument specifies the field or attribute with which to generate labels from.
The ``font`` and ``color`` arguments specify the label font and color
respectively.
>>> Label('foo')
Label(property=foo)
"""
def __init__(self, property, font=None, color=None):
Symbolizer.__init__(self)
self.property = Property(property)
self.color = Color(color) if color else None
self._font = Font(font) if font else None
self._halo = None
self._placement = None
def font(self, font):
"""
Sets the font for this label. The ``font`` argument is a string describing the
font attributes. See :class:`Font <geoscript.style.font.Font>` for supported
syntax.
>>> label = Label('foo').font('italic bold 12px "Times New Roman"')
"""
self._font = Font(font)
return self
def halo(self, fill=None, radius=None):
"""
Generates a halo for this label.
The ``fill`` and ``radius`` arguments specify the :class:`Fill` and radius to
use for the halo.
>>> from geoscript.style import Fill
>>> label = Label('foo').halo(Fill('#ffffff'), 2)
"""
self._halo = Halo(fill, radius)
return self
def point(self, anchor=(0.5,0.5), displace=(0,0), rotate=0):
"""
Sets the label placement relative to a point.
The ``anchor`` argument is a tuple that specifies how the label should be
anchored along an xy axis relative to the geometry being labeled. Allowable
values range from (0,0) to (1,1) ordered from the bottom left corner to the top
right corner of the label.
The ``displacement`` argument is a tuple that specifies how the label should be
displaced along an xy axis.
The ``rotate`` argument specifies in degrees the angle at which to rotate the
label.
>>> label = Label('foo').point((0.5,0), (0,5))
"""
f = self.factory
ap = f.createAnchorPoint(Expression(anchor[0]).expr,Expression(anchor[1]).expr)
dp = f.createDisplacement(Expression(displace[0]).expr,
Expression(displace[1]).expr)
self._placement = f.createPointPlacement(ap, dp, Expression(rotate).expr)
return self
def linear(self, offset=0, gap=None, igap=None, align=False, follow=False,
group=False, displace=None, repeat=None):
"""
Sets the label placement relative to a line.
The ``offset`` argument specifies the perpindicular distance from the line at
which to position the label.
The ``align`` argument specifies whether to align the label along the line. The
``follow`` argument specifies whether to curve the label in order to force it
to follow the line.
>>> label = Label('foo').linear(align=True, follow=True)
"""
f = self.factory
lp = f.createLinePlacement(Expression(offset).expr)
lp.setAligned(align)
#lp.setRepeated(repeat)
if gap:
lp.setGap(Expression(gap).expr)
if igap:
lp.setInitialGap(Expression(igap).expr)
self._placement = lp
self.options = {'followLine': follow, 'group': group}
if displace:
self.options['maxDisplacement'] = displace
if repeat:
self.options['repeat'] = repeat
return self
def _prepare(self, rule):
syms = util.symbolizers(rule, TextSymbolizer)
for sym in syms:
self._apply(sym)
def _apply(self, sym):
Symbolizer._apply(self, sym)
sym.setLabel(self.property.expr)
if self._font:
self._font._apply(sym)
if self._halo:
self._halo._apply(sym)
if self.color:
sym.setFill(Fill(self.color)._fill())
if self._placement:
sym.setLabelPlacement(self._placement)
def __repr__(self):
return self._repr('property')
| 30.837037
| 96
| 0.679078
|
4a0082e6b5cecedd0581d9326b6ca94f58dcc3ac
| 68
|
py
|
Python
|
teepy/__init__.py
|
longapalooza/teepy
|
5faf00a7136e11db95b37f80fddcfad6b1b7a054
|
[
"MIT"
] | null | null | null |
teepy/__init__.py
|
longapalooza/teepy
|
5faf00a7136e11db95b37f80fddcfad6b1b7a054
|
[
"MIT"
] | null | null | null |
teepy/__init__.py
|
longapalooza/teepy
|
5faf00a7136e11db95b37f80fddcfad6b1b7a054
|
[
"MIT"
] | null | null | null |
__version__ = '0.3.6'
__VERSION__ = __version__
from .teepy import *
| 22.666667
| 25
| 0.75
|
4a00836f3bcfb91b4a0b6ffe6a71d34f12f9c165
| 241
|
py
|
Python
|
extract.py
|
foamliu/DeepRankIQA
|
7801cb4ff2c934a9d954ace9ad52600f96396125
|
[
"MIT"
] | 4
|
2019-11-04T07:34:17.000Z
|
2020-04-21T02:22:37.000Z
|
extract.py
|
foamliu/DeepRankIQA
|
7801cb4ff2c934a9d954ace9ad52600f96396125
|
[
"MIT"
] | 1
|
2019-11-14T03:52:00.000Z
|
2019-11-14T09:15:15.000Z
|
extract.py
|
foamliu/DeepRankIQA
|
7801cb4ff2c934a9d954ace9ad52600f96396125
|
[
"MIT"
] | null | null | null |
import zipfile
def extract(filename):
print('Extracting {}...'.format(filename))
zip_ref = zipfile.ZipFile(filename, 'r')
zip_ref.extractall('data')
zip_ref.close()
if __name__ == "__main__":
extract('data/data.zip')
| 18.538462
| 46
| 0.659751
|
4a0084080f9320912104e523d0a354bcfe5253c3
| 37
|
py
|
Python
|
wiremock/tests/__init__.py
|
sp1rs/python-wiremock
|
b570b0ebc60ac0d873812f21f78f2a8a4353792f
|
[
"Apache-2.0"
] | 22
|
2017-07-01T14:44:04.000Z
|
2021-09-08T08:45:21.000Z
|
wiremock/tests/__init__.py
|
sp1rs/python-wiremock
|
b570b0ebc60ac0d873812f21f78f2a8a4353792f
|
[
"Apache-2.0"
] | 37
|
2017-04-24T15:28:27.000Z
|
2021-09-20T08:58:26.000Z
|
wiremock/tests/__init__.py
|
sp1rs/python-wiremock
|
b570b0ebc60ac0d873812f21f78f2a8a4353792f
|
[
"Apache-2.0"
] | 22
|
2017-04-24T14:58:06.000Z
|
2021-09-09T09:22:31.000Z
|
from .base import BaseClientTestCase
| 18.5
| 36
| 0.864865
|
4a0085b01c5a18b447c31112ffa1b7db9ab1d6b0
| 442
|
py
|
Python
|
Shoots/bin/info.py
|
lyh-ADT/Shoots
|
0133c6c277148687c368ad16cfe23ff0a4194ecd
|
[
"MIT"
] | 8
|
2020-07-23T10:23:37.000Z
|
2022-03-17T07:24:26.000Z
|
Shoots/bin/info.py
|
lyh-ADT/Shoots
|
0133c6c277148687c368ad16cfe23ff0a4194ecd
|
[
"MIT"
] | 1
|
2022-03-17T09:12:13.000Z
|
2022-03-18T13:43:58.000Z
|
Shoots/bin/info.py
|
lyh-ADT/Shoots
|
0133c6c277148687c368ad16cfe23ff0a4194ecd
|
[
"MIT"
] | 1
|
2022-01-05T13:20:11.000Z
|
2022-01-05T13:20:11.000Z
|
import time
class Info:
OP_MOVE_UP = 0
OP_MOVE_DOWN = 1
OP_MOVE_LEFT = 2
OP_MOVE_RIGHT = 3
FACE_UP = 4
FACE_DONW = 5
FACE_LEFT = 6
FACE_RIGHT = 7
OP_SHOOT = 8
def __init__(self):
self.sound = []
self.shooter = []
def get_dict(self):
return {
'sound':self.sound,
'shooter':self.shooter,
'time':round(time.time()*1000)
}
| 18.416667
| 42
| 0.513575
|
4a0086b04fed8d25e72fcedabbaa11af26ffad26
| 110
|
py
|
Python
|
2021/examples-in-class-2021-09-10/indentation_example1.py
|
ati-ozgur/course-python
|
38237d120043c07230658b56dc3aeb01c3364933
|
[
"Apache-2.0"
] | 1
|
2021-02-04T16:59:11.000Z
|
2021-02-04T16:59:11.000Z
|
2021/examples-in-class-2021-09-10/indentation_example1.py
|
ati-ozgur/course-python
|
38237d120043c07230658b56dc3aeb01c3364933
|
[
"Apache-2.0"
] | null | null | null |
2021/examples-in-class-2021-09-10/indentation_example1.py
|
ati-ozgur/course-python
|
38237d120043c07230658b56dc3aeb01c3364933
|
[
"Apache-2.0"
] | 1
|
2019-10-30T14:37:48.000Z
|
2019-10-30T14:37:48.000Z
|
#x = -15
x = 15
if x < 0:
print('A negative number is entered')
print("second line")
print("third line")
| 13.75
| 41
| 0.618182
|
4a008827abdbe26cca4100758c5d53892861230e
| 5,118
|
py
|
Python
|
monopoly/constants.py
|
XargsUK/FlameCogs
|
f4f3368a9816fa7a7af19ee59ea4c3307f72fa93
|
[
"MIT"
] | 26
|
2019-02-11T09:00:01.000Z
|
2022-03-03T07:33:08.000Z
|
monopoly/constants.py
|
XargsUK/FlameCogs
|
f4f3368a9816fa7a7af19ee59ea4c3307f72fa93
|
[
"MIT"
] | 15
|
2019-04-19T14:19:52.000Z
|
2022-03-15T17:20:38.000Z
|
monopoly/constants.py
|
XargsUK/FlameCogs
|
f4f3368a9816fa7a7af19ee59ea4c3307f72fa93
|
[
"MIT"
] | 34
|
2018-12-05T22:37:44.000Z
|
2022-01-16T05:02:20.000Z
|
TILENAME = [
'Go', 'Mediterranean Avenue',
'Community Chest', 'Baltic Avenue',
'Income Tax', 'Reading Railroad',
'Oriental Avenue', 'Chance',
'Vermont Avenue', 'Connecticut Avenue',
'Jail', 'St. Charles Place',
'Electric Company', 'States Avenue',
'Virginia Avenue', 'Pennsylvania Railroad',
'St. James Place', 'Community Chest',
'Tennessee Avenue', 'New York Avenue',
'Free Parking', 'Kentucky Avenue',
'Chance', 'Indiana Avenue',
'Illinois Avenue', 'B&O Railroad',
'Atlantic Avenue', 'Ventnor Avenue',
'Water Works', 'Marvin Gardens',
'Go To Jail', 'Pacific Avenue',
'North Carolina Avenue', 'Community Chest',
'Pennsylvania Avenue', 'Short Line',
'Chance', 'Park Place',
'Luxury Tax', 'Boardwalk'
]
PRICEBUY = [
-1, 60, -1, 60, -1,
200, 100, -1, 100, 120,
-1, 140, 150, 140, 160,
200, 180, -1, 180, 200,
-1, 220, -1, 220, 240,
200, 260, 260, 150, 280,
-1, 300, 300, -1, 320,
200, -1, 350, -1, 400
]
RENTPRICE = [
-1, -1, -1, -1, -1, -1,
2, 10, 30, 90, 160, 250,
-1, -1, -1, -1, -1, -1,
4, 20, 60, 180, 360, 450,
-1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1,
6, 30, 90, 270, 400, 550,
-1, -1, -1, -1, -1, -1,
6, 30, 90, 270, 400, 550,
8, 40, 100, 300, 450, 600,
-1, -1, -1, -1, -1, -1,
10, 50, 150, 450, 625, 750,
-1, -1, -1, -1, -1, -1,
10, 50, 150, 450, 625, 750,
12, 60, 180, 500, 700, 900,
-1, -1, -1, -1, -1, -1,
14, 70, 200, 550, 750, 950,
-1, -1, -1, -1, -1, -1,
14, 70, 200, 550, 750, 950,
16, 80, 220, 600, 800, 1000,
-1, -1, -1, -1, -1, -1,
18, 90, 250, 700, 875, 1050,
-1, -1, -1, -1, -1, -1,
10, 90, 250, 700, 875, 1050,
20, 100, 300, 750, 925, 1100,
-1, -1, -1, -1, -1, -1,
22, 110, 330, 800, 975, 1150,
22, 110, 330, 800, 975, 1150,
-1, -1, -1, -1, -1, -1,
24, 120, 360, 850, 1025, 1200,
-1, -1, -1, -1, -1, -1,
26, 130, 390, 900, 1100, 1275,
26, 130, 390, 900, 1100, 1275,
-1, -1, -1, -1, -1, -1,
28, 150, 450, 1000, 1200, 1400,
-1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1,
35, 175, 500, 1100, 1300, 1500,
-1, -1, -1, -1, -1, -1,
50, 200, 600, 1400, 1700, 2000
]
RRPRICE = [0, 25, 50, 100, 200]
CCNAME = [
'Advance to Go (Collect $200)',
'Bank error in your favor\nCollect $200',
'Doctor\'s fee\nPay $50',
'From sale of stock you get $50',
'Get Out of Jail Free',
'Go to Jail\nGo directly to jail\nDo not pass Go\nDo not collect $200',
'Grand Opera Night\nCollect $50 from every player for opening night seats',
'Holiday Fund matures\nReceive $100',
'Income tax refund\nCollect $20',
'It is your birthday\nCollect $10',
'Life insurance matures\nCollect $100',
'Pay hospital fees of $100',
'Pay school fees of $150',
'Receive $25 consultancy fee',
'You are assessed for street repairs\n$40 per house\n$115 per hotel',
'You have won second prize in a beauty contest\nCollect $10',
'You inherit $100'
]
CHANCENAME = [
'Advance to Go (Collect $200)',
'Advance to Illinois Ave\nIf you pass Go, collect $200.',
'Advance to St. Charles Place\nIf you pass Go, collect $200',
(
'Advance token to nearest Utility. If unowned, you may buy it from the Bank. '
'If owned, throw dice and pay owner a total ten times the amount thrown.'
), (
'Advance token to the nearest Railroad and pay owner twice the rental to which '
'he/she is otherwise entitled. If Railroad is unowned, you may buy it from the Bank.'
),
'Bank pays you dividend of $50',
'Get Out of Jail Free',
'Go Back 3 Spaces',
'Go to Jail\nGo directly to Jail\nDo not pass Go\nDo not collect $200',
'Make general repairs on all your property\nFor each house pay $25\nFor each hotel $100',
'Pay poor tax of $15',
'Take a trip to Reading Railroad\nIf you pass Go, collect $200',
'Take a walk on the Boardwalk\nAdvance token to Boardwalk',
'You have been elected Chairman of the Board\nPay each player $50',
'Your building and loan matures\nCollect $150',
'You have won a crossword competition\nCollect $100'
]
MORTGAGEPRICE = [
-1, 30, -1, 30, -1,
100, 50, -1, 50, 60,
-1, 70, 75, 70, 80,
100, 90, -1, 90, 100,
-1, 110, -1, 110, 120,
100, 130, 130, 75, 140,
-1, 150, 150, -1, 160,
100, -1, 175, -1, 200
]
TENMORTGAGEPRICE = [
-1, 33, -1, 33, -1,
110, 55, -1, 55, 66,
-1, 77, 83, 77, 88,
110, 99, -1, 99, 110,
-1, 121, -1, 121, 132,
110, 143, 143, 83, 154,
-1, 165, 165, -1, 176,
110, -1, 188, -1, 220
]
HOUSEPRICE = [
-1, 50, -1, 50, -1,
-1, 50, -1, 50, 50,
-1, 100, -1, 100, 100,
-1, 100, -1, 100, 100,
-1, 150, -1, 150, 150,
-1, 150, 150, -1, 150,
-1, 200, 200, -1, 200,
-1, -1, 200, -1, 200
]
PROPGROUPS = {
'Brown': [1, 3], 'Light Blue': [6, 8, 9],
'Pink': [11, 13, 14], 'Orange': [16, 18, 19],
'Red': [21, 23, 24], 'Yellow': [26, 27, 29],
'Green': [31, 32, 34], 'Dark Blue': [37, 39]
}
PROPCOLORS = {
1: 'Brown', 3: 'Brown',
6: 'Light Blue', 8: 'Light Blue', 9: 'Light Blue',
11: 'Pink', 13: 'Pink', 14: 'Pink',
16: 'Orange', 18: 'Orange', 19: 'Orange',
21: 'Red', 23: 'Red', 24: 'Red',
26: 'Yellow', 27: 'Yellow', 29: 'Yellow',
31: 'Green', 32: 'Green', 34: 'Green',
37: 'Dark Blue', 39: 'Dark Blue',
5: 'Railroad', 15: 'Railroad', 25: 'Railroad', 35: 'Railroad',
12: 'Utility', 28: 'Utility'
}
| 30.831325
| 90
| 0.586557
|
4a00892baad35fe9ac1702e987108df6bef4599e
| 1,877
|
py
|
Python
|
ee/clickhouse/sql/clickhouse.py
|
msnitish/posthog
|
cb86113f568e72eedcb64b5fd00c313d21e72f90
|
[
"MIT"
] | null | null | null |
ee/clickhouse/sql/clickhouse.py
|
msnitish/posthog
|
cb86113f568e72eedcb64b5fd00c313d21e72f90
|
[
"MIT"
] | null | null | null |
ee/clickhouse/sql/clickhouse.py
|
msnitish/posthog
|
cb86113f568e72eedcb64b5fd00c313d21e72f90
|
[
"MIT"
] | null | null | null |
# Note for the vary: these engine definitions (and many table definitions) are not in sync with cloud!
from typing import Literal
from django.conf import settings
STORAGE_POLICY = lambda: "SETTINGS storage_policy = 'hot_to_cold'" if settings.CLICKHOUSE_ENABLE_STORAGE_POLICY else ""
KAFKA_ENGINE = "Kafka('{kafka_host}', '{topic}', '{group}', '{serialization}')"
KAFKA_PROTO_ENGINE = """
Kafka () SETTINGS
kafka_broker_list = '{kafka_host}',
kafka_topic_list = '{topic}',
kafka_group_name = '{group}',
kafka_format = 'Protobuf',
kafka_schema = '{proto_schema}',
kafka_skip_broken_messages = {skip_broken_messages}
"""
GENERATE_UUID_SQL = """
SELECT generateUUIDv4()
"""
# The kafka_engine automatically adds these columns to the kafka tables. We use
# this string to add them to the other tables as well.
KAFKA_COLUMNS = """
, _timestamp DateTime
, _offset UInt64
"""
def kafka_engine(
topic: str,
kafka_host=None,
group="group1",
serialization: Literal["JSONEachRow", "Protobuf"] = "JSONEachRow",
proto_schema=None,
skip_broken_messages=100,
):
if kafka_host is None:
kafka_host = settings.KAFKA_HOSTS_FOR_CLICKHOUSE
if serialization == "Protobuf" and not settings.CLICKHOUSE_DISABLE_EXTERNAL_SCHEMAS:
return KAFKA_PROTO_ENGINE.format(
topic=topic,
kafka_host=kafka_host,
group=group,
proto_schema=proto_schema,
skip_broken_messages=skip_broken_messages,
)
else:
return KAFKA_ENGINE.format(topic=topic, kafka_host=kafka_host, group=group, serialization="JSONEachRow")
def ttl_period(field: str = "created_at", weeks: int = 3):
return "" if settings.TEST else f"TTL toDate({field}) + INTERVAL {weeks} WEEK"
def trim_quotes_expr(expr: str) -> str:
return f"replaceRegexpAll({expr}, '^\"|\"$', '')"
| 31.283333
| 119
| 0.695791
|
4a00894e34b9f7711607afd4698be3f937187f7f
| 126
|
py
|
Python
|
students/k3342/laboratory_works/Kataeva_Veronika/laboratory_work_1/scoreboard/board/templatetags/index.py
|
KataevaVeronika/ITMO_ICT_WebProgramming_2020
|
d8cf1d0479519bbafd34d4678e9eda2eabaeb0cf
|
[
"MIT"
] | null | null | null |
students/k3342/laboratory_works/Kataeva_Veronika/laboratory_work_1/scoreboard/board/templatetags/index.py
|
KataevaVeronika/ITMO_ICT_WebProgramming_2020
|
d8cf1d0479519bbafd34d4678e9eda2eabaeb0cf
|
[
"MIT"
] | null | null | null |
students/k3342/laboratory_works/Kataeva_Veronika/laboratory_work_1/scoreboard/board/templatetags/index.py
|
KataevaVeronika/ITMO_ICT_WebProgramming_2020
|
d8cf1d0479519bbafd34d4678e9eda2eabaeb0cf
|
[
"MIT"
] | null | null | null |
from django import template
register = template.Library()
@register.filter
def index(indexable, i):
return indexable[i-1]
| 21
| 29
| 0.761905
|
4a008999e264e0c3cd54a081835acff3bdcc6399
| 479
|
py
|
Python
|
Dataset/Leetcode/test/111/559.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/test/111/559.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
Dataset/Leetcode/test/111/559.py
|
kkcookies99/UAST
|
fff81885aa07901786141a71e5600a08d7cb4868
|
[
"MIT"
] | null | null | null |
class Solution:
def XXX(self, root: TreeNode) -> int:
# BFS,遇左右节点均为空打印退出
if not root:
return 0
res = []
queque = [(1, root)]
while queque:
depth, first = queque.pop(0)
if not first.left and not first.right:
return depth
if first.left:
queque.append((depth+1, first.left))
if first.right:
queque.append((depth+1, first.right))
| 28.176471
| 53
| 0.48643
|
4a008ad352d2c4a2e2483d213bb6895386b2e661
| 2,576
|
py
|
Python
|
comps/models/comp.py
|
dlanghorne0428/dancesport-tracker-projec
|
e55d91a4f03c26d6ee8c28846a809064adfdb158
|
[
"MIT"
] | null | null | null |
comps/models/comp.py
|
dlanghorne0428/dancesport-tracker-projec
|
e55d91a4f03c26d6ee8c28846a809064adfdb158
|
[
"MIT"
] | 87
|
2020-04-15T22:29:03.000Z
|
2022-01-02T02:21:28.000Z
|
comps/models/comp.py
|
dlanghorne0428/dancesport-tracker-projec
|
e55d91a4f03c26d6ee8c28846a809064adfdb158
|
[
"MIT"
] | null | null | null |
from datetime import datetime, timezone
from django.db import models
from cloudinary_storage.storage import RawMediaCloudinaryStorage
from cloudinary.models import CloudinaryField
def comp_logo_path(instance, filename):
return "comps/{0}/{1}".format(instance.title, filename)
class Comp(models.Model):
title = models.CharField(max_length=100)
location = models.CharField(max_length=100)
start_date = models.DateField()
end_date = models.DateField()
logo = CloudinaryField('logo', blank=True)
# the different file formats pointed to by the URLs
COMP_MNGR = 'CM'
COMP_ORG = 'CO'
NDCA_PREM = 'ND'
NDCA_FEED = 'NF'
O2CM = "O2"
DATA_FORMAT_CHOICES = [
(COMP_MNGR, 'Comp Manager'),
(COMP_ORG, 'Comp Organizer'),
(NDCA_PREM , 'NDCA Premier'),
(NDCA_FEED , 'NDCA Premier - Feed'),
(O2CM, 'O2cm.com')
]
url_data_format = models.CharField(
max_length=2,
choices=DATA_FORMAT_CHOICES,
default=COMP_MNGR,
)
# URLs are optional, blank=True allows that, use heatlist_file if URL not available
heatsheet_url = models.URLField(blank=True)
heatsheet_file = models.FileField(upload_to=comp_logo_path, blank=True, storage=RawMediaCloudinaryStorage())
# this field indicates when the heatsheet was loaded
default_time = datetime(1970,1,1,tzinfo=timezone.utc)
heatsheet_load_time = models.DateTimeField(null=True, default=default_time)
scoresheet_url = models.URLField(blank=True)
# the different states of processing a competition
INITIAL = "IN"
DANCERS_LOADED = "DL"
DANCER_NAMES_FORMATTED = "DNF"
HEATS_LOADED = "HL"
HEAT_STYLES_DEFINED = "HSD"
HEAT_LEVELS_DEFINED = "HLD"
HEAT_ENTRIES_MATCHED = "CEM"
SCORESHEETS_LOADED = "SSL"
RESULTS_RESOLVED = "RR"
COMPLETE = "FIN"
PROCESS_STATE_CHOICES = [
(INITIAL, 'Comp Initialized'),
(DANCERS_LOADED, 'Dancers Loaded'),
(DANCER_NAMES_FORMATTED, 'Dancer Names Formatted'),
(HEATS_LOADED, 'Heats Loaded'),
(HEAT_STYLES_DEFINED, 'Heat Styles Defined'),
(HEAT_LEVELS_DEFINED, 'Heat Levels Defined'),
(HEAT_ENTRIES_MATCHED, 'Heat Entries Matched'),
(SCORESHEETS_LOADED, 'Scoresheets Loaded'),
(RESULTS_RESOLVED, 'Results Resolved '),
(COMPLETE, 'Processing Complete')
]
process_state = models.CharField(
max_length = 3,
choices=PROCESS_STATE_CHOICES,
default=INITIAL,
)
def __str__(self):
return self.title
| 33.025641
| 112
| 0.67663
|
4a008b217b6136d36feb25926c61002154f11120
| 2,483
|
py
|
Python
|
iroha_cli/_commands.py
|
2hoursleep/Iroha-CLI
|
d6d39da8728a20b7ab9cf8a6e32eb061612fcb89
|
[
"Apache-2.0"
] | null | null | null |
iroha_cli/_commands.py
|
2hoursleep/Iroha-CLI
|
d6d39da8728a20b7ab9cf8a6e32eb061612fcb89
|
[
"Apache-2.0"
] | null | null | null |
iroha_cli/_commands.py
|
2hoursleep/Iroha-CLI
|
d6d39da8728a20b7ab9cf8a6e32eb061612fcb89
|
[
"Apache-2.0"
] | null | null | null |
import click
# ACCOUNT COMMANDS
class CommandsAPI:
"""
Iroha Commands API utilities
Supports gRPC & HTTP JSON Transactions & Queries
"""
def __init__(self, iroha_client):
self.iroha_client = iroha_client
# Account Commands
def create_new_user_account(self):
user_name = click.prompt("Username For New Account")
domain = click.prompt("Domain")
public_key = click.prompt("Public Key")
self.iroha_client.create_new_account(user_name, domain, public_key)
def detach_role(self):
account_id = click.prompt("Username For New Account")
role_name = click.prompt("Domain")
self.iroha_client.detach_role_tx(account_id, role_name)
def write_account_detail(self):
account_id = click.prompt("Account To Use : Username@domain")
key = click.prompt("Enter New Key, existing key entries will be overwritten")
value = click.prompt("Please enter a value to set")
self.iroha_client.set_account_detail(account_id, key, value)
def grant_acc_read_permission(self):
account_id = click.prompt("Account To Use : Username@domain")
contact = click.prompt("Username@domain Your Write Acc Granting Permission")
self.iroha_client.grant_account_read_permission(
account_id=account_id, contact=contact
)
# ASSET COMMANDS#
def create_new_asset(self):
asset = click.prompt("New Asset Name Only")
domain = click.prompt("Domain Name Only")
precision = click.prompt("Precision", type=int)
self.iroha_client.create_new_asset(asset, domain, precision)
def new_asset_transfer(self):
src_account_id = click.prompt("Source Account", default=account_id)
recipient = click.prompt("Recipient")
asset_id = click.prompt("AssetID : asset#domain")
qty = click.prompt("Total Amount to Send")
description = click.prompt("Enter Transaction Details")
self.iroha_client.transfer_asset(
src_account_id, recipient, asset_id, description, qty
)
def increase_asset_qty(self):
asset_id = click.prompt("AssetID : asset#domain")
qty = click.prompt("Qty To Add")
self.iroha_client.add_asset_qty(asset_id, qty)
def decrease_asset_qty(self):
asset_id = click.prompt("AssetID : asset#domain")
qty = click.prompt("Qty To Subtract")
self.iroha_client.subtract_asset_qty(asset_id, qty)
| 35.985507
| 85
| 0.676601
|
4a008b2923f028dad4bd3f0dd71dfe7568fe6774
| 1,514
|
py
|
Python
|
flashproxy/proc.py
|
arlolra/flashproxy
|
e73702e769db1ab1e02f7e2ab152cebde6e9b2b7
|
[
"MIT"
] | 5
|
2015-04-18T21:00:20.000Z
|
2019-12-16T08:18:02.000Z
|
flashproxy/proc.py
|
infinity0/flashproxy
|
de8d428ab5f2dfcc830ac8a4b34985a87643749f
|
[
"MIT"
] | 1
|
2020-04-15T15:56:17.000Z
|
2020-04-15T15:56:17.000Z
|
flashproxy/proc.py
|
arlolra/flashproxy
|
e73702e769db1ab1e02f7e2ab152cebde6e9b2b7
|
[
"MIT"
] | 3
|
2015-04-25T22:55:17.000Z
|
2021-07-29T19:12:12.000Z
|
import errno
import os
import socket
import stat
import pwd
DEFAULT_CLIENT_TRANSPORT = "websocket"
# Return true iff the given fd is readable, writable, and executable only by its
# owner.
def check_perms(fd):
mode = os.fstat(fd)[0]
return (mode & (stat.S_IRWXG | stat.S_IRWXO)) == 0
# Drop privileges by switching ID to that of the given user.
# http://stackoverflow.com/questions/2699907/dropping-root-permissions-in-python/2699996#2699996
# https://www.securecoding.cert.org/confluence/display/seccode/POS36-C.+Observe+correct+revocation+order+while+relinquishing+privileges
# https://www.securecoding.cert.org/confluence/display/seccode/POS37-C.+Ensure+that+privilege+relinquishment+is+successful
def drop_privs(username):
uid = pwd.getpwnam(username).pw_uid
gid = pwd.getpwnam(username).pw_gid
os.setgroups([])
os.setgid(gid)
os.setuid(uid)
try:
os.setuid(0)
except OSError:
pass
else:
raise AssertionError("setuid(0) succeeded after attempting to drop privileges")
# A decorator to ignore "broken pipe" errors.
def catch_epipe(fn):
def ret(self, *args):
try:
return fn(self, *args)
except socket.error, e:
try:
err_num = e.errno
except AttributeError:
# Before Python 2.6, exception can be a pair.
err_num, errstr = e
except:
raise
if err_num != errno.EPIPE:
raise
return ret
| 31.541667
| 135
| 0.657199
|
4a008ba470944e1dd9cc6c356ad2701003e61cbd
| 3,554
|
py
|
Python
|
process_image_dir.py
|
Face-Body-Toolbox/face_alignment
|
619bde7ad431391f060fbfe1123962ccd578b550
|
[
"BSD-3-Clause"
] | 1
|
2021-04-12T07:22:02.000Z
|
2021-04-12T07:22:02.000Z
|
process_image_dir.py
|
Face-Body-Toolbox/face_alignment
|
619bde7ad431391f060fbfe1123962ccd578b550
|
[
"BSD-3-Clause"
] | 1
|
2021-04-11T09:47:34.000Z
|
2021-04-11T15:03:01.000Z
|
process_image_dir.py
|
sariyanidi/face_alignment_opencv
|
619bde7ad431391f060fbfe1123962ccd578b550
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 7 18:45:22 2021
@author: sariyanidi
"""
import os
import cv2
import argparse
import face_alignment_opencv as fao
import numpy as np
from glob import glob
# Parse command-line parameters
parser = argparse.ArgumentParser()
parser.add_argument("images_dir", type=str, default="samples/testdir", help="Path to the directory that contains the images to process")
parser.add_argument("--single_face", type=int, default=1, help="Set to 0 for aligning one face per image or to 1 for aligning as many faces as are detected")
parser.add_argument("--flip_input", type=int, default=1, help="Set to 1 to flip the input while making the predictions, or to 0 to to not flip. Flipping typically leads to more robust results but reduces speed by 50%.")
parser.add_argument("--device", type=str, default='cuda', help="""Device to process. Must be set to either 'cpu' or 'cuda'. Default is 'cuda'
OpenCV must be compiled with CUDA and CUDNN support to really use GPU support, otherwise the software will run on CPU""")
parser.add_argument("--detection_threshold", type=float, default=0.3, help="Threshold for face detection. Default is 0.3")
parser.add_argument("--save_result_image", type=int, default=1, help="Set to 1 (Default) to save the resulting image (next to the original file) or to 0 otherwise")
parser.add_argument("--save_result_landmarks", type=int, default=1, help="Set to 1 (Default) to save resulting landmarks as .txt file or to 0 otherwise")
args = parser.parse_args()
image_paths = glob('%s/*png' % args.images_dir) + glob('%s/*jpg' % args.images_dir) + glob('%s/*bmp' % args.images_dir)
# Load face detector and aligner
detector = fao.FaceDetector(threshold=args.detection_threshold, device=args.device)
aligner = fao.FaceAligner(device=args.device, flip_input=args.flip_input)
# Create the directory to store the visual results
if args.save_result_image:
visual_results_dir = os.path.join(args.images_dir, 'aligned_results')
if not os.path.exists(visual_results_dir):
os.mkdir(visual_results_dir)
# Now process each image
for image_path in image_paths:
# read image
im = cv2.imread(image_path)
rim = im.copy()
# detect faces
detections = detector.get_detections(im, single_face=bool(args.single_face))
# align faces
landmark_sets = []
for (x0, y0, xf, yf) in detections:
p = aligner.get_landmarks(im.copy(), x0, y0, xf, yf)
landmark_sets.append(p)
# save landmarks as txt file. Each row in the output file
# corresponds to one face's landmarks in the format x1, y1, x2, y2, ..., x68, y68
if args.save_result_landmarks:
num_det = len(landmark_sets)
out = np.zeros((num_det, 68*2), dtype=int)
for i in range(num_det):
out[i,:] = landmark_sets[i].reshape(-1,)
out_path = '.'.join(image_path.split('.')[0:-1])+'.txt'
np.savetxt(out_path, out, fmt='%.2f')
# the rest of the code optionally visualizes and saves the results
if args.save_result_image or args.visualize_result:
for p in landmark_sets:
for ip in range(p.shape[0]):
cv2.circle(rim, (p[ip,0], p[ip,1]), 3, (0, 255, 0), -2)
# save results if asked
if args.save_result_image:
vis_path = os.path.join(visual_results_dir, os.path.basename(image_path))
print('Image with detected landmarks is saved to %s' % vis_path)
cv2.imwrite(vis_path, rim)
| 44.987342
| 219
| 0.691334
|
4a008c6021c5c631fe627a1fd71dd4effd522a1e
| 922
|
py
|
Python
|
src/modelo/viajero.py
|
ManuelMasferrer/MISW4101-202111-Grupo57-sandbox
|
27dcd9b17315b8a90f1adb94a107abfb14525025
|
[
"MIT"
] | null | null | null |
src/modelo/viajero.py
|
ManuelMasferrer/MISW4101-202111-Grupo57-sandbox
|
27dcd9b17315b8a90f1adb94a107abfb14525025
|
[
"MIT"
] | null | null | null |
src/modelo/viajero.py
|
ManuelMasferrer/MISW4101-202111-Grupo57-sandbox
|
27dcd9b17315b8a90f1adb94a107abfb14525025
|
[
"MIT"
] | 1
|
2021-03-08T21:59:51.000Z
|
2021-03-08T21:59:51.000Z
|
from sqlalchemy import Column, Integer, String, ForeignKey, Table
from sqlalchemy.orm import relationship
from .declarative_base import Base
#viajero_gasto = Table(
# 'viajero_gasto', Base.metadata,
# Column('viajero_id', Integer, ForeignKey('viajero.id')),
# Column('gasto_id', Integer, ForeignKey('gasto.id'))
#)
class ActividadViajero(Base):
__tablename__ = 'actividad_viajero'
actividad = Column(Integer, ForeignKey('actividad.id'), primary_key=True)
viajero = Column(Integer, ForeignKey('viajero.id'), primary_key=True)
class Viajero(Base):
__tablename__ = 'viajero'
id = Column(Integer,primary_key=True)
nombre = Column(String)
apellido = Column(String)
actividades = relationship('Actividad', secondary = 'actividad_viajero')
gastos = relationship('Gasto')
def __init__(self, nombre, apellido):
self.nombre = nombre
self.apellido = apellido
| 28.8125
| 77
| 0.714751
|
4a008c6ee7829e70e1bb6ca001d63209e3da9272
| 17,252
|
py
|
Python
|
snli_cooccur.py
|
lei56/snli-ethics
|
d5b18717ff1a6d135fed52086d2406cf26adaf2f
|
[
"BSD-2-Clause"
] | null | null | null |
snli_cooccur.py
|
lei56/snli-ethics
|
d5b18717ff1a6d135fed52086d2406cf26adaf2f
|
[
"BSD-2-Clause"
] | null | null | null |
snli_cooccur.py
|
lei56/snli-ethics
|
d5b18717ff1a6d135fed52086d2406cf26adaf2f
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
from collections import Counter
from multiprocessing import Pool
from functools import partial
import pickle
import logging
import json
import os
from humanfriendly import format_size
WITHIN_HYPOTHESIS = 'within-hypothesis'
WITHIN_PREMISE = 'within-premise'
BETWEEN_PREM_HYPO = 'between-prem-hypo'
def configure_logging():
'''
Configure logging module to print INFO messages.
This should probably be run in main or at the beginning of an
interactive session.
'''
logging.basicConfig(
level=logging.INFO,
format='%(asctime)-15s %(levelname)s %(process)d: %(message)s'
)
def mkdirp_parent(path):
'''
Make parent directory of path if it does not exist.
'''
dirname = os.path.dirname(path)
if dirname:
mkdirp(dirname)
def mkdirp(path):
'''
Make directory at path if it does not exist.
'''
if not os.path.isdir(path):
os.makedirs(path)
def resource_usage_str():
'''
Return short string explaining current resource usage, or a stub
with a message to install psutil if it is not available.
'''
try:
import os
import psutil
process = psutil.Process(os.getpid())
mem_info = process.memory_info()
return 'mem used: %s res, %s virt' % (
format_size(mem_info.rss),
format_size(mem_info.vms)
)
except Exception:
return 'mem used: ? res, ? virt (pip install psutil)'
def tokens_to_max_ngrams(tokens, max_ngram=1):
'''
Given an enumerable of tokens (strings/unicode), return a
corresponding generator of n-grams (tuples of length n + 1
whose first element is the start index of the ngram and latter
indices are the string/unicode words making up the ngram) for n
between 1 and max_ngram (inclusive).
>>> from pprint import pprint
>>> pprint(list(tokens_to_max_ngrams(['hello', 'world', '!'],
... max_ngram=2)))
[(0, ('hello',)),
(1, ('world',)),
(2, ('!',)),
(0, ('hello', 'world')),
(1, ('world', '!'))]
'''
tokens = list(tokens)
for ngram in range(1, max_ngram + 1):
for token in tokens_to_ngrams(tokens, ngram=ngram):
yield token
def tokens_to_ngrams(tokens, ngram=1):
'''
Given an enumerable of tokens (strings/unicode), return a
corresponding generator of n-grams (tuples of length n + 1
whose first element is the start index of the ngram and latter
indices are the string/unicode words making up the ngram) for n
equal to the value of the ngram parameter.
>>> list(tokens_to_ngrams(['hello', 'world', '!'], ngram=2))
[(0, ('hello', 'world')), (1, ('world', '!'))]
'''
tokens = list(tokens)
for start in range(len(tokens) - (ngram - 1)):
yield (start, tuple(tokens[start:start + ngram]))
def binary_parse_to_tokens(parse_str):
'''
Given a string representing the binary parse (from the SNLI data),
return a generator of the tokens (terminals) from the parse.
>>> list(binary_parse_to_tokens('( ( hello world ) ! )'))
['hello', 'world', '!']
'''
return (
w.lower()
for w in parse_str.split(' ')
if w not in ('(', ')')
)
def within_sentence_pairs(sentence):
'''
Given a list of ngrams ((index, tokens) pairs where index is the
start index of the ngram in a sentence and tokens is a tuple of
string/unicode representing the tokens in the ngram) representing
a sentence, return a generator of token pairs (cooccurrences) within
the sentence. Cooccurrences that intersect are skipped.
>>> from pprint import pprint
>>> pprint(list(within_sentence_pairs(
... [(0, ('hello',)), (1, ('world',)), (2, ('!',)),
... (0, ('hello', 'world')), (1, ('world', '!'))])))
[(('hello',), ('world',)),
(('hello',), ('!',)),
(('hello',), ('world', '!')),
(('world',), ('hello',)),
(('world',), ('!',)),
(('!',), ('hello',)),
(('!',), ('world',)),
(('!',), ('hello', 'world')),
(('hello', 'world'), ('!',)),
(('world', '!'), ('hello',))]
'''
for (i, ti) in sentence:
for (j, tj) in sentence:
# Skip intersecting tokens.
# Determine intersection by observing that the following are
# equivalent:
# ti and tj to intersect.
# ti starts within tj or tj starts within ti.
if (i <= j and j < i + len(ti)) or (j <= i and i < j + len(tj)):
continue
yield (ti, tj)
def between_sentence_pairs(sentence1, sentence2):
'''
Given two lists of ngrams ((index, tokens) pairs where index is the
start index of the ngram in a sentence and tokens is a tuple of
string/unicode representing the tokens in the ngram) representing
distinct sentences, return a generator of token pairs
(cooccurrences) within the sentence.
>>> from pprint import pprint
>>> pprint(list(between_sentence_pairs(
... [(0, ('hello',)), (1, ('world',)), (2, ('!',)),
... (0, ('hello', 'world')), (1, ('world', '!'))],
... [(0, ('goodnight',)), (0, ('goodnight', 'earth'))])))
[(('hello',), ('goodnight',)),
(('hello',), ('goodnight', 'earth')),
(('world',), ('goodnight',)),
(('world',), ('goodnight', 'earth')),
(('!',), ('goodnight',)),
(('!',), ('goodnight', 'earth')),
(('hello', 'world'), ('goodnight',)),
(('hello', 'world'), ('goodnight', 'earth')),
(('world', '!'), ('goodnight',)),
(('world', '!'), ('goodnight', 'earth'))]
'''
for (i1, t1) in sentence1:
for (i2, t2) in sentence2:
yield (t1, t2)
class CooccurrenceCounts(object):
'''
Counter for cooccurrences and marginals.
Members:
xy dict (Counter) for (x, y) pairs (cooccurrences)
x dict (Counter) for x marginals
y dict (Counter) for y marginals
xy_total integer representing total (double marginal)
'''
def __init__(self):
self.xy = Counter()
self.x = Counter()
self.y = Counter()
self.xy_total = 0
def increment(self, x, y):
'''
Increment counts for cooccurrence (x, y) where x and y are
hashable, e.g., tuples of strings/unicode representing ngrams.
'''
self.xy[(x, y)] += 1
self.x[x] += 1
self.y[y] += 1
self.xy_total += 1
def update(self, other):
'''
Add all counts from other (an instance of CooccurrenceCounts)
to this counter.
'''
self.xy.update(other.xy)
self.x.update(other.x)
self.y.update(other.y)
self.xy_total += other.xy_total
def chunks(enumerable, chunk_size=10000):
'''
Given enumerable enumerable, return generator of chunks (lists) of
up to chunk_size consecutive items from enumerable.
This function is primarily used as a helper function for
parallelization.
>>> list(chunks(xrange(7), chunk_size=3))
[[0, 1, 2], [3, 4, 5], [6]]
'''
chunk = []
for x in enumerable:
chunk.append(x)
if len(chunk) == chunk_size:
yield chunk
chunk = []
if chunk:
yield chunk
def increment_all(counts, pairs):
'''
Given counts, an instance of CooccurrenceCounts, and pairs, a list
of (x, y) pairs (where x and y are hashable), increment counts for
all (x, y) pairs.
>>> from mock import Mock, call
>>> counts = Mock()
>>> increment_all(counts,
... [(('hello',), ('world',)), (('hello',), ('world', '!'))])
>>> counts.increment.assert_has_calls([
... call(('hello',), ('world',)),
... call(('hello',), ('world', '!')),
... ])
'''
for (x, y) in pairs:
counts.increment(x, y)
def compute_vocabs(snli_file_triples, filter_vocab_by_freq=1):
'''
Given an enumerable of triples representing the SNLI dataset and
an integer representing the minimum count of an ngram to include
it in the vocabulary, compute separate vocabs---sets---for
premises and hypotheses and return the pair (premise vocab,
hypothesis vocab).
snli_file_triples should be an enumerable of triples. In each
triple, the first element is the parsed SNLI json, the second is
a list of ngrams representing the premise, and the third is a list
of ngrams representing the hypothesis.
Note the first element in each ngram is the start index of that
ngram in the sentence.
'''
premise_word_counts = Counter()
hypothesis_word_counts = Counter()
for (j, premise_ngrams, hypothesis_ngrams) in snli_file_triples:
for (_, token) in premise_ngrams:
premise_word_counts[token] += 1
for (_, token) in hypothesis_ngrams:
hypothesis_word_counts[token] += 1
premise_vocab = set(
word for (word, count) in premise_word_counts.items()
if count >= filter_vocab_by_freq)
hypothesis_vocab = set(
word for (word, count) in hypothesis_word_counts.items()
if count >= filter_vocab_by_freq)
return (premise_vocab, hypothesis_vocab)
def count_cooccurrences(snli_file_triples, model,
premise_vocab=None, hypothesis_vocab=None,
filter_hypo_by_prem=False):
'''
Given an enumerable of triples representing the SNLI dataset and a
string representing the cooccurrence model (WITHIN_HYPOTHESIS,
WITHIN_PREMISE, BETWEEN_PREM_HYPO), count cooccurrences in a
CooccurrenceCounts object and return it.
If premise_vocab is not None, filter premise tokens to those
appearing in premise_vocab (a set).
If hypothesis_vocab is not None, filter hypothesis tokens to those
appearing in hypothesis_vocab (a set).
If filter_hypo_by_prem is True, remove words in hypothesis that
appear in the premise.
snli_file_triples should be an enumerable of triples. In each
triple, the first element is the parsed SNLI json, the second is
a list of ngrams representing the premise, and the third is a list
of ngrams representing the hypothesis.
Note the first element in each ngram is the start index of that
ngram in the sentence.
'''
counts = CooccurrenceCounts()
for (j, premise_ngrams, hypothesis_ngrams) in snli_file_triples:
if premise_vocab is not None:
premise_ngrams = filter(
lambda p: p[1] in premise_vocab,
premise_ngrams)
if hypothesis_vocab is not None:
hypothesis_ngrams = filter(
lambda p: p[1] in hypothesis_vocab,
hypothesis_ngrams)
if filter_hypo_by_prem:
premise_filter_set = set(map(lambda p: p[1], premise_ngrams))
hypothesis_ngrams = filter(
lambda p: p[1] not in premise_filter_set,
hypothesis_ngrams)
if model == WITHIN_HYPOTHESIS:
increment_all(
counts,
within_sentence_pairs(hypothesis_ngrams))
elif model == BETWEEN_PREM_HYPO:
increment_all(
counts,
between_sentence_pairs(premise_ngrams, hypothesis_ngrams))
elif model == WITHIN_PREMISE:
increment_all(
counts,
within_sentence_pairs(premise_ngrams))
else:
raise ValueError('unknown model %s' % model)
return counts
def iter_snli(snli_jsonl_path, inference_type=None, max_ngram=1,
unique_premises=False):
'''
Given a path to an SNLI jsonl file (list of JSON-serialized
premise-hypothesis pairs, one per line), return generator of
SNLI file triples.
If inference_type is not None, filter to only those triples
whose gold-labeled inference type ('contradiction', 'neutral',
'entailment') matches inference_type.
Compute the tokens of both the premise and hypothesis as all
n-grams for n between 1 and max_ngram.
If unique_premises is True, only emit one premise-hypothesis
pair for each premise.
In each
triple, the first element is the parsed SNLI json, the second is
a list of ngrams representing the premise, and the third is a list
of ngrams representing the hypothesis.
Note the first element in each ngram is the start index of that
ngram in the sentence.
'''
caption_ids_seen = set()
with open(snli_jsonl_path) as f:
for (i, line) in enumerate(f):
if i % 1000 == 0:
logging.info('ingested %d hypotheses (%s)' %
(i, resource_usage_str()))
j = json.loads(line)
if (inference_type is not None and
j['gold_label'] != inference_type):
continue
caption_id = j['captionID']
if unique_premises and caption_id in caption_ids_seen:
continue
caption_ids_seen.add(caption_id)
premise_ngrams = list(tokens_to_max_ngrams(
binary_parse_to_tokens(j['sentence1_binary_parse']),
max_ngram=max_ngram))
hypothesis_ngrams = list(tokens_to_max_ngrams(
binary_parse_to_tokens(j['sentence2_binary_parse']),
max_ngram=max_ngram))
yield (j, premise_ngrams, hypothesis_ngrams)
def main():
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
parser = ArgumentParser(
formatter_class=ArgumentDefaultsHelpFormatter,
description='compute PMI on SNLI',
)
parser.add_argument('model', type=str,
choices=(WITHIN_PREMISE, WITHIN_HYPOTHESIS,
BETWEEN_PREM_HYPO),
help=('cooccurrence model to compute'))
parser.add_argument('snli_jsonl_path', type=str,
help='path to snli_1.0_train.json')
parser.add_argument('pickle_path', type=str,
help='(output) path to pickled counts and pmi')
parser.add_argument('--inference-type', type=str,
choices=('entailment', 'contradiction', 'neutral'),
help=('filter to inferences of this type (only for %s '
'and %s models') % (WITHIN_HYPOTHESIS,
BETWEEN_PREM_HYPO))
parser.add_argument('--filter-hypo-by-prem', action='store_true',
help='remove words from hypothesis that appear in '
'premise (only for %s and %s models' % (
WITHIN_HYPOTHESIS, BETWEEN_PREM_HYPO))
parser.add_argument('--max-ngram', type=int, default=1,
help='compute n-grams for n up to this number')
parser.add_argument('--num-proc', type=int, default=1,
help='size of processor pool to use')
parser.add_argument('--filter-vocab-by-freq', type=int,
help='filter vocab to words occuring at least this '
'many times')
args = parser.parse_args()
configure_logging()
if args.filter_hypo_by_prem and args.model == WITHIN_PREMISE:
raise ValueError(
'can only filter hypo by prem for %s and %s models' % (
WITHIN_HYPOTHESIS, BETWEEN_PREM_HYPO))
if args.inference_type is not None and args.model == WITHIN_PREMISE:
raise ValueError(
'can only filter by inference type for %s and %s models' % (
WITHIN_HYPOTHESIS, BETWEEN_PREM_HYPO))
pool = Pool(args.num_proc)
if args.filter_vocab_by_freq is None:
premise_vocab = None
hypothesis_vocab = None
else:
premise_vocab = set()
hypothesis_vocab = set()
for (pv, hv) in pool.imap_unordered(
partial(compute_vocabs,
filter_vocab_by_freq=args.filter_vocab_by_freq),
chunks(iter_snli(args.snli_jsonl_path,
inference_type=args.inference_type,
max_ngram=args.max_ngram))):
premise_vocab.update(pv)
hypothesis_vocab.update(hv)
unique_premises = (args.model == WITHIN_PREMISE)
counts = CooccurrenceCounts()
for c in pool.imap_unordered(
partial(count_cooccurrences, model=args.model,
premise_vocab=premise_vocab,
hypothesis_vocab=hypothesis_vocab,
filter_hypo_by_prem=args.filter_hypo_by_prem),
chunks(iter_snli(args.snli_jsonl_path,
inference_type=args.inference_type,
max_ngram=args.max_ngram,
unique_premises=unique_premises))):
counts.update(c)
logging.info('saving to disk (%s)' % resource_usage_str())
mkdirp_parent(args.pickle_path)
with open(args.pickle_path, 'wb') as f:
pickle.dump(counts, f)
logging.info('done')
if __name__ == '__main__':
main()
| 34.504
| 79
| 0.59767
|
4a008d80320a1c7ac0a53cc89def3ba87693dc44
| 2,259
|
py
|
Python
|
notify/by_email_oauth.py
|
bttg/UoM-WAM-Spam
|
8083cbac003397e9c022c02bc427454638dd235f
|
[
"MIT"
] | 54
|
2019-06-20T00:50:38.000Z
|
2021-12-01T06:59:38.000Z
|
notify/by_email_oauth.py
|
bttg/UoM-WAM-Spam
|
8083cbac003397e9c022c02bc427454638dd235f
|
[
"MIT"
] | 18
|
2019-06-21T00:20:54.000Z
|
2020-12-03T22:04:15.000Z
|
notify/by_email_oauth.py
|
bttg/UoM-WAM-Spam
|
8083cbac003397e9c022c02bc427454638dd235f
|
[
"MIT"
] | 24
|
2019-06-20T02:49:18.000Z
|
2021-12-02T08:22:09.000Z
|
"""
OAuth/Gmail API-based email notifier
:author: alanung and Matthew Farrugia-Roberts
"""
import base64
import pickle
import os.path
from email.mime.text import MIMEText
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
CLIENT_ID_PATH = 'gmail-credentials.json'
ACCESS_TOKEN_PATH = 'gmail-token.pickle'
SCOPES = ['https://www.googleapis.com/auth/gmail.send']
class GmailAPINotifier:
"""Class to send emails using the Gmail API and OAuth 2.0 Protocol.
To use this class, you must obtain an OAuth 2.0 Client ID from the
Google API Console (https://console.developers.google.com). See README
for detailed instructions.
"""
def __init__(self, address):
"""
Initialise a GMail notifier object. Prompt the user to authenticate
and provide mailing permissions if required.
"""
self.address = address
self.creds = None
# if there's an access token from previous authentication, load it
if os.path.exists(ACCESS_TOKEN_PATH):
with open(ACCESS_TOKEN_PATH, 'rb') as tokenfile:
self.creds = pickle.load(tokenfile)
# if the credentials are invalid or non-existent, prompt to authenticate
if not self.creds or not self.creds.valid:
if self.creds and self.creds.expired and self.creds.refresh_token:
self.creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
CLIENT_ID_PATH, SCOPES)
self.creds = flow.run_local_server()
# save the credentials for the next run
with open(ACCESS_TOKEN_PATH, 'wb') as tokenfile:
pickle.dump(self.creds, tokenfile)
self.service = build('gmail', 'v1', credentials=self.creds)
def notify(self, subject, text):
msg = MIMEText(text)
msg['To'] = self.address
msg['From'] = self.address
msg['Subject'] = subject
data = {'raw': base64.urlsafe_b64encode(msg.as_bytes()).decode()}
self.service.users().messages().send(userId='me', body=data).execute()
print("Email sent!")
| 34.227273
| 80
| 0.657371
|
4a008e3bf5d4486017a063620ae02c599ed25e42
| 2,532
|
py
|
Python
|
collectGo/collectGo2.py
|
mudssky/pythonScripts
|
bea3cab1715bee4af4a73bc07aa248d5f4f40f38
|
[
"MIT"
] | null | null | null |
collectGo/collectGo2.py
|
mudssky/pythonScripts
|
bea3cab1715bee4af4a73bc07aa248d5f4f40f38
|
[
"MIT"
] | null | null | null |
collectGo/collectGo2.py
|
mudssky/pythonScripts
|
bea3cab1715bee4af4a73bc07aa248d5f4f40f38
|
[
"MIT"
] | null | null | null |
# coding:utf-8
import os
import shutil
rootPath = os.path.abspath('.')
# 使用前需设置要收集的扩展名和 保存目录
extnameContainer ={
'.7z':[],
# '.exe':[]
}
targetDirectory={
# '.go':os.path.join(rootPath,'go'),
# '.exe':os.path.join(rootPath,'exe'),
'.7z':os.path.join(rootPath,'7z2'),
}
def collect(path,extnameContainer):
for x in os.listdir(path):
nextpath = path+'\\'+x
if(os.path.isdir(nextpath)):
collect(nextpath,extnameContainer)
else:
derectory,extname = os.path.splitext(nextpath)
# print(extname)
if extname in extnameContainer:
extnameContainer[extname].append(nextpath)
def copyAlist(filelist):
print('该文件列表的文件总数为:'+str(len(filelist)))
for index,i in enumerate(filelist):
extname = os.path.splitext(i)[1]
copyFile(i,targetDirectory[extname])
print('正在拷贝第'+str(index)+'个文件'+i+'到'+targetDirectory[extname])
def copyFile(sourcePath,targetPath):
if not os.path.exists(targetPath):
print('目标路径不存在,创建目标路径: '+targetPath)
os.mkdir(targetPath)
basename = os.path.basename(sourcePath)
with open(sourcePath,'rb') as f:
with open(targetPath+'\\'+basename,'ab+') as fw:
# 处理大文件的时候,一点一点读
for line in f:
fw.write(line)
fw.close()
f.close()
# # 调用系统shell兼容性不佳,只适用于windows系统
# def moveFile(sourcePath,targetPath):
# if not os.path.exists(targetPath):
# print('目标路径不存在,创建目标路径: '+ targetPath)
# os.mkdir(targetPath)
# # 拼接批处理命令,move的文件名中可能会有空格,目录名要用双引号括起来
# command = 'move "'+sourcePath+'" "'+targetPath+'"'
# print(command)
# status = os.system(command)
# # print('正在移文件'+sourcePath+'到'+targetPath)
# print(status)
def moveAlist(filelist):
print('该文件列表的文件总数为:'+str(len(filelist)))
for index,i in enumerate(filelist):
# 使用enumerate可以返回下标
extname = os.path.splitext(i)[1]
print('正在移动第'+str(index)+'个文件'+i+'到'+targetDirectory[extname])
# shutil模块中有shutil.copyfile和shutilmove等方法
shutil.move(i,targetDirectory[extname])
# moveFile(i,targetDirectory[extname])
if __name__ == "__main__":
collect(rootPath,extnameContainer)
# print(extnameContainer)
for key in extnameContainer:
print('当前处理的文件类型'+key)
moveAlist(extnameContainer[key])
# go语言build默认是在当前目录生成exe,而且一个目录作为一个项目只能有一个main
# 每次打开都隔着一层目录,这样不利于学习阶段代码片段的查看
# 所以写了这个collectGo脚本,把一个目录中所有go程序和exe程序分别收集起来
# 放到脚本运行目录下的go和exe文件夹中
| 30.506024
| 70
| 0.64139
|
4a008e80eae078b5799a7c226c81611ae05259a9
| 3,884
|
py
|
Python
|
src/primaires/scripting/memoires.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
src/primaires/scripting/memoires.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
src/primaires/scripting/memoires.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
# -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant la classe Memoire, détaillée plus bas."""
from datetime import datetime, timedelta
from collections import OrderedDict
from abstraits.obase import BaseObj, MetaBaseObj
from .exceptions import ErreurScripting
class Memoires(BaseObj):
"""Classe enveloppe définissant les mémoires du scripting.
Les mémoires peuvent être liées à un PNJ, une salle, un objet, l'univers...
Elles sont manipulées en scripting par l'action ecrire_memoire, et les
fonctions memoire_existe et memoire.
"""
enregistrer = True
def __init__(self, parent=None):
"""Constructeur de la classe"""
BaseObj.__init__(self)
self._memoires = {}
self._a_detruire = {}
self._construire()
def __getnewargs__(self):
return ()
def __delitem__(self, cle):
del self._memoires[cle]
def __getitem__(self, cle):
return self._memoires[cle]
def __setitem__(self, cle, valeur):
self._memoires[cle] = valeur
def __contains__(self, valeur):
return valeur in self._memoires
def nettoyer_memoire(self, cle, valeur):
"""Nettoie la mémoire à détruire.
Cette méthode est appelée quand on efface tout de suite une mémoire.
Si la mémoire est effacée mais qu'elle est toujours programmée pour
destruction, il va y avoir problème. On nettoie donc les mémoires à détruire.
"""
if cle in self._a_detruire and valeur in self._a_detruire[cle]:
del self._a_detruire[cle][valeur]
if cle in self._a_detruire and not self._a_detruire[cle]:
del self._a_detruire[cle]
def programmer_destruction(self, cle, valeur, temps):
"""Programme la destruction de la mémoire.
Paramètres :
cle -- la clé de la mémoire (souvent un objet, une salle, un PNJ...)
valeur -- la valeur de la mémoire (c'est-à-dire le nom mémorisé)
temps -- le temps en minutes
"""
if cle not in self or valeur not in self[cle]:
raise ValueError("la mémoire {} n'existe pas dans {}".format(
valeur, cle))
a_detruire = self._a_detruire.get(cle, {})
a_detruire[valeur] = datetime.now() + timedelta(seconds=temps * 60)
self._a_detruire[cle] = a_detruire
| 38.455446
| 85
| 0.703656
|
4a008eb5102d36f293b6dcad799cce020b6d5a12
| 3,492
|
py
|
Python
|
bindings/python/ensmallen/datasets/string/achromobactersproot83.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 5
|
2021-02-17T00:44:45.000Z
|
2021-08-09T16:41:47.000Z
|
bindings/python/ensmallen/datasets/string/achromobactersproot83.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 18
|
2021-01-07T16:47:39.000Z
|
2021-08-12T21:51:32.000Z
|
bindings/python/ensmallen/datasets/string/achromobactersproot83.py
|
AnacletoLAB/ensmallen
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 3
|
2021-01-14T02:20:59.000Z
|
2021-08-04T19:09:52.000Z
|
"""
This file offers the methods to automatically retrieve the graph Achromobacter sp. Root83.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def AchromobacterSpRoot83(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Achromobacter sp. Root83 graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.5
- physical.links.v11.5
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Achromobacter sp. Root83 graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="AchromobacterSpRoot83",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 33.257143
| 223
| 0.677835
|
4a008f102a22096662b2537d217b8137e6c01bcd
| 12,798
|
py
|
Python
|
pyats_ios_example.py
|
xander-petty/pyats-ios-sample
|
d61838ce7cb3d2f56cb95dc840709e332fe40a15
|
[
"ECL-2.0",
"Apache-2.0"
] | 22
|
2017-12-01T18:57:33.000Z
|
2022-02-24T01:43:34.000Z
|
pyats_ios_example.py
|
xander-petty/pyats-ios-sample
|
d61838ce7cb3d2f56cb95dc840709e332fe40a15
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2018-10-24T20:11:08.000Z
|
2021-12-02T21:04:25.000Z
|
pyats_ios_example.py
|
xander-petty/pyats-ios-sample
|
d61838ce7cb3d2f56cb95dc840709e332fe40a15
|
[
"ECL-2.0",
"Apache-2.0"
] | 14
|
2017-12-11T21:05:35.000Z
|
2022-03-11T14:26:04.000Z
|
#!/bin/env python
'''pyats_ios_example.py
This is a testscript example intended to walk users through basic Cisco IOS
device connection, command execution and result verification using pyATS.
Arguments:
This script requires one script argument (testbed) to be passed in when run
under standalone execution for demonstration purposes.
testbed: the path to testbed yaml file
Topology:
+-------------+ Eth0/0 <-> Eth0/0 +-------------+
| | ------------------------------------ | |
| ios1 | | ios2 |
| | ------------------------------------ | |
+-------------+ Eth0/1 <-> Eth0/1 +-------------+
Testing:
This script performs the following tests for demonstration purposes.
- router connection: basic device connection test
- `ping` command: basic device ping test; logs ping result.
- interface count verification
- execute `show version` command: basic command execution and data
parsing; extract ethernet and serial
interface counts; logs interface
counts.
- execute `show ip interface brief` command: basic command execution and
data parsing; extract all
ethernet and serial
interfaces; logs number of
interface counts.
- verify ethernet and serial interface counts from above commands.
- router disconnect: basic device disconnect test
Examples:
# to run under standalone execution
bash$ python pyats_ios_example.py --testbed pyats_ios_example.yaml
References:
For the complete and up-to-date user guide on pyATS, visit:
https://developer.cisco.com/site/pyats/docs/
'''
#
# optional author information
#
__author__ = 'Wei Chen <weiche3@cisco.com>'
__copyright__ = 'Copyright 2017, Cisco Systems'
__email__ = 'pyats-support@cisco.com'
__date__= 'Nov 15, 2017'
#
# import statements
#
import re
import logging
from ats import aetest
from ats.log.utils import banner
#
# create a logger for this testscript
#
logger = logging.getLogger(__name__)
#
# Common Setup Section
#
class common_setup(aetest.CommonSetup):
'''Common Setup Section
Defines subsections that performs configuration common to the entire script.
'''
@aetest.subsection
def check_topology(self,
testbed,
ios1_name = 'ios1',
ios2_name = 'ios2'):
'''
check that we have at least two devices and a link between the devices
If so, mark the next subsection for looping.
'''
# abort/fail the testscript if no testbed was provided
if not testbed or not testbed.devices:
self.failed('No testbed was provided to script launch',
goto = ['exit'])
# abort/fail the testscript if no matching device was provided
for ios_name in (ios1_name, ios2_name):
if ios_name not in testbed:
self.failed('testbed needs to contain device {ios_name}'.format(
ios_name=ios_name,
),
goto = ['exit'])
ios1 = testbed.devices[ios1_name]
ios2 = testbed.devices[ios2_name]
# add them to testscript parameters
self.parent.parameters.update(ios1 = ios1, ios2 = ios2)
# get corresponding links
links = ios1.find_links(ios2)
assert len(links) >= 1, 'require one link between ios1 and ios2'
# save link as uut link parameter
self.parent.parameters['uut_link'] = links.pop()
@aetest.subsection
def establish_connections(self, steps, ios1, ios2):
'''
establish connection to both devices
'''
with steps.start('Connecting to Router-1'):
ios1.connect()
with steps.start('Connecting to Router-2'):
ios2.connect()
# abort/fail the testscript if any device isn't connected
if not ios1.connected or not ios2.connected:
self.failed('One of the two devices could not be connected to',
goto = ['exit'])
@aetest.subsection
def marking_interface_count_testcases(self, testbed):
'''
mark the VerifyInterfaceCountTestcase for looping.
'''
# ignore VIRL lxc's
devices = [d for d in testbed.devices.keys() if 'mgmt' not in d]
logger.info(banner('Looping VerifyInterfaceCountTestcase'
' for {}'.format(devices)))
# dynamic loop marking on testcase
aetest.loop.mark(VerifyInterfaceCountTestcase, device = devices)
#
# Ping Testcase: leverage dual-level looping
#
@aetest.loop(device = ('ios1', 'ios2'))
class PingTestcase(aetest.Testcase):
'''Ping test'''
groups = ('basic', 'looping')
@aetest.setup
def setup(self, uut_link):
destination = []
for intf in uut_link.interfaces:
destination.append(str(intf.ipv4.ip))
# apply loop to next section
aetest.loop.mark(self.ping, destination = destination)
@aetest.test
def ping(self, device, destination):
'''
ping destination ip address from device
Sample of ping command result:
ping
Protocol [ip]:
Target IP address: 10.10.10.2
Repeat count [5]:
Datagram size [100]:
Timeout in seconds [2]:
Extended commands [n]: n
Sweep range of sizes [n]: n
Type escape sequence to abort.
Sending 5, 100-byte ICMP Echos to 10.10.10.2, timeout is 2 seconds:
!!!!!
Success rate is 100 percent (5/5), round-trip min/avg/max = 1/1/1 ms
'''
try:
# store command result for later usage
result = self.parameters[device].ping(destination)
except Exception as e:
# abort/fail the testscript if ping command returns any exception
# such as connection timeout or command failure
self.failed('Ping {} from device {} failed with error: {}'.format(
destination,
device,
str(e),
),
goto = ['exit'])
else:
# extract success rate from ping result with regular expression
match = re.search(r'Success rate is (?P<rate>\d+) percent', result)
success_rate = match.group('rate')
# log the success rate
logger.info(banner('Ping {} with success rate of {}%'.format(
destination,
success_rate,
)
)
)
#
# Verify Interface Count Testcase
#
class VerifyInterfaceCountTestcase(aetest.Testcase):
'''Verify interface count test'''
groups = ('basic', 'looping')
@aetest.test
def extract_interface_count(self, device):
'''
extract interface counts from `show version`
Sample of show version command result:
show version
Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(2)T, RELEASE SOFTWARE (fc2)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2016 by Cisco Systems, Inc.
Compiled Tue 22-Mar-16 16:19 by prod_rel_team
ROM: Bootstrap program is IOSv
ios2 uptime is 1 hour, 17 minutes
System returned to ROM by reload
System image file is "flash0:/vios-adventerprisek9-m"
Last reload reason: Unknown reason
<....>
Cisco IOSv (revision 1.0) with with 484609K/37888K bytes of memory.
Processor board ID 9QTSICFAZS7Q2I61N8WNZ
2 Gigabit Ethernet interfaces
DRAM configuration is 72 bits wide with parity disabled.
256K bytes of non-volatile configuration memory.
2097152K bytes of ATA System CompactFlash 0 (Read/Write)
0K bytes of ATA CompactFlash 1 (Read/Write)
0K bytes of ATA CompactFlash 2 (Read/Write)
10080K bytes of ATA CompactFlash 3 (Read/Write)
Configuration register is 0x0
'''
try:
# store execution result for later usage
result = self.parameters[device].execute('show version')
except Exception as e:
# abort/fail the testscript if show version command returns any
# exception such as connection timeout or command failure
self.failed('Device {} \'show version\' failed: {}'.format(device,
str(e)),
goto = ['exit'])
else:
# extract interfaces counts from `show version`
match = re.search(r'(?P<ethernet>\d+) Gigabit Ethernet interfaces\r\n', result)
ethernet_intf_count = int(match.group('ethernet'))
# log the interface counts
logger.info(banner('\'show version\' returns {} ethernet interfaces'
.format(
ethernet_intf_count
)
)
)
# add them to testcase parameters
self.parameters.update(ethernet_intf_count = ethernet_intf_count,
serial_intf_count = 0)
@aetest.test
def verify_interface_count(self,
device,
ethernet_intf_count = 0,
serial_intf_count = 0):
'''
verify interface counts with `show ip interface brief`
Sample of show ip interface brief command result:
show ip interface brief
Interface IP-Address OK? Method Status Protocol
GigabitEthernet0/0 unassigned YES unset administratively down down
GigabitEthernet0/1 10.10.10.2 YES manual up up
'''
try:
# store execution result for later usage
result = self.parameters[device].execute('show ip interface brief')
except Exception as e:
# abort/fail the testscript if show ip interface brief command
# returns any exception such as connection timeout or command
# failure
self.failed('Device {} \'show ip interface brief\' failed: '
'{}'.format(device, str(e)),
goto = ['exit'])
else:
# extract ethernet interfaces
ethernet_interfaces = re.finditer(r'\r\nGigabitEthernet\d+/\d+\s+', result)
# total number of ethernet interface
len_ethernet_interfaces = len(tuple(ethernet_interfaces))
# log the ethernet interface counts
logger.info(banner('\'show ip interface brief\' returns {} ethernet'
' interfaces'.format(len_ethernet_interfaces)))
# compare the ethernet interface count between
# `show ip interface brief` and `show version`
assert len_ethernet_interfaces == ethernet_intf_count
class common_cleanup(aetest.CommonCleanup):
'''disconnect from ios routers'''
@aetest.subsection
def disconnect(self, steps, ios1, ios2):
'''disconnect from both devices'''
with steps.start('Disconnecting from Router-1'):
ios1.disconnect()
with steps.start('Disconnecting from Router-2'):
ios2.disconnect()
if ios1.connected or ios2.connected:
# abort/fail the testscript if device connection still exists
self.failed('One of the two devices could not be disconnected from',
goto = ['exit'])
if __name__ == '__main__':
# local imports
import argparse
from ats.topology import loader
parser = argparse.ArgumentParser(description = "standalone parser")
parser.add_argument('--testbed', dest = 'testbed',
type = loader.load)
# parse args
args, unknown = parser.parse_known_args()
# and pass all arguments to aetest.main() as kwargs
aetest.main(**vars(args))
| 34.403226
| 108
| 0.560166
|
4a008f4911f95f760d94e4cd4e6c00800dd98ad9
| 10,449
|
py
|
Python
|
chime/cogs/PersonalPlaylistsCog.py
|
realmayus/chime
|
a9ad4c6e6d02ed99d45b94b6cf8ca0694ef3b6fc
|
[
"MIT"
] | 3
|
2020-06-06T11:57:36.000Z
|
2020-06-19T09:51:56.000Z
|
chime/cogs/PersonalPlaylistsCog.py
|
realmayus/chime
|
a9ad4c6e6d02ed99d45b94b6cf8ca0694ef3b6fc
|
[
"MIT"
] | 4
|
2020-06-19T09:42:31.000Z
|
2020-11-08T13:10:10.000Z
|
chime/cogs/PersonalPlaylistsCog.py
|
realmayus/chime
|
a9ad4c6e6d02ed99d45b94b6cf8ca0694ef3b6fc
|
[
"MIT"
] | 1
|
2020-06-30T10:41:55.000Z
|
2020-06-30T10:41:55.000Z
|
import base64
import wavelink
from typing import Union
from discord import Message
from discord.ext import commands
from discord.ext.commands import Context
from wavelink import Player, TrackPlaylist, BuildTrackError, Track
from chime.main import prefix
from chime.misc.BadRequestException import BadRequestException
from chime.misc.CustomCommand import custom_command
from chime.misc.Database import Database
from chime.misc.MusicController import MusicController
from chime.misc.PagedListEmbed import PagedListEmbed
from chime.misc.StyledEmbed import StyledEmbed
from chime.util import search_song
class PersonalPlaylistsCog(commands.Cog, name="Personal Playlists"):
def __init__(self, bot, db):
"""A cog that handles interfacing with the database and the creation, population and editing of personal playlists."""
self.bot = bot
# self.db = db
self.database = Database(db)
@commands.command()
async def playlists(self, ctx: Context):
"""Shows a list of all your playlists. Alias of `""" + prefix + """playlist list`"""
playlists = self.database.get_all_playlists(ctx.author.id)
await ctx.send(embed=StyledEmbed(title="Your playlists", description=f"use the `{prefix}playlist` commands for adding songs to a playlist, creating playlists, viewing the playlist's contents etc. \n\n" + "\n".join([f"• **{playlist['name']}**" for playlist in playlists])))
@custom_command(
usage="playlist [action] <playlist_name>",
aliases=["pl", "l"],
available_args=[
{"type": "[action]", "args":
[
{"name": "list", "desc": "Lists all your playlists."},
{"name": "create", "desc": "Creates a playlist."},
{"name": "add", "desc": "Adds a song to the given playlist."},
{"name": "show | view", "desc": "Lists the songs in your playlist."},
{"name": "play", "desc": "Plays the playlist."},
{"name": "share", "desc": "Gives you a link so that you can share your beautiful playlist!"}
]
}],
examples=[
{
"ex": "pl create Favorites",
"desc": "Creates a playlist called 'Favorites'"
},
{
"ex": "pl create \"chill hop\"",
"desc": "Creates a playlist called 'chill hop'. Note the quotation marks that we need because the name contains a space."
},
{
"ex": "pl play favorites",
"desc": "Adds all the songs in the playlist to the queue and plays them."
},
{
"ex": "pl show favorites",
"desc": "Shows the contents of the playlist 'favorites'."
},
{
"ex": "pl add favorites oh my dayum",
"desc": "Adds the song 'oh my dayum' to the playlist 'favorites'."
},
{
"ex": "pl list",
"desc": "Lists all your playlists."
},
{
"ex": "pl share chillhop",
"desc": "Displays a sharable link for the playlist 'chillhop'."
}
]
)
async def playlist(self, ctx: Context, action: str, playlist: str = None, *, additional_args=None):
"""Manage all your personal playlists. You can also manage them on [chime's web app](https://chime.realmayus.xyz)"""
if action == "create":
if not additional_args: # if the playlist name contained spaces, the individual parts would be in additional_args
self.database.create_playlist(ctx.author.id, playlist)
await ctx.message.add_reaction("<:ok:746377326245445653>")
else:
raise BadRequestException("If you want spaces in your playlist's name, you have to wrap it in quotation marks!")
elif action == "show" or action == "view":
if playlist is not None:
contents = self.database.get_playlist_contents(ctx.author.id, playlist)
if len(contents) == 0:
raise BadRequestException("Playlist is empty!")
embed = PagedListEmbed(f"Contents of `{playlist}`", [f"{i + 1}. {song['title']}" for i, song in enumerate(contents)], ctx, self.bot)
await embed.send(embed.get())
else:
raise BadRequestException("Please enter a playlist name!")
elif action == "play":
if playlist is not None:
contents = self.database.get_playlist_contents(ctx.author.id, playlist)
await self.join_channel(ctx)
if len(contents) == 0:
raise BadRequestException("Playlist is empty!")
index = 0
failed = 0
for index, song_data_raw in enumerate(contents):
try:
track = await self.bot.wavelink.build_track(song_data_raw["data"])
controller = self.get_controller(ctx)
controller.queue.append(track)
except BuildTrackError:
failed += 1
print("Failed to reconstruct track with data " + song_data_raw["data"])
await ctx.send(embed=StyledEmbed(description=f"**Added** {index + 1} **tracks to queue**."))
if failed > 0:
raise BadRequestException(f"**Failed to add** {failed} **track(s)**!")
else:
raise BadRequestException("Please enter a playlist name!")
elif action == "list":
await ctx.invoke(self.playlists)
elif action == "add":
tracks_to_add = []
if playlist is None:
raise BadRequestException("Please enter a playlist name!")
async def success_callback(track_, last_msg: Message):
await last_msg.clear_reactions()
await last_msg.edit(embed=StyledEmbed(description=f"**Added** {track_} **to playlist {playlist}.**"),
delete_after=10.0)
tracks_to_add.append(track_)
async def success_callback_url(tracks):
nonlocal tracks_to_add
if isinstance(tracks, TrackPlaylist):
tracks = tracks.tracks
await ctx.send(
embed=StyledEmbed(description=f"**Added** {len(tracks)} **tracks to playlist {playlist}.**"),
delete_after=10.0)
else:
try:
await ctx.send(
embed=StyledEmbed(description=f"**Added** {tracks[0]} **to playlist {playlist}.**"),
delete_after=10.0)
except TypeError:
raise BadRequestException("Couldn't add this item to the queue!")
tracks_to_add = tracks
if not additional_args:
raise BadRequestException("You have to provide either a search term or a URL!")
await search_song(additional_args, ctx, self.bot, success_callback, success_callback_url)
if len(tracks_to_add) > 0:
self.database.add_to_playlist(ctx.author.id, playlist, tracks_to_add)
await ctx.message.add_reaction("<:ok:746377326245445653>")
else:
raise BadRequestException("No track selected!")
elif action == "share":
if playlist is None:
raise BadRequestException("Please enter a playlist name!")
playlist_id = self.database.raise_if_not_exists(ctx.author.id, playlist)
message = f"{ctx.author.id}:{playlist_id}:{playlist}:{ctx.author.name}"
message_bytes = message.encode("utf8")
base64_bytes = base64.b64encode(message_bytes)
base64_message = base64_bytes.decode("ascii")
await ctx.send(embed=StyledEmbed(title="Share this link",
description=f"https://chime.realmayus.xyz/view/{base64_message}"))
elif action == "delete":
raise BadRequestException("This feature has not been implemented yet.")
else:
raise BadRequestException("This action does not exist. Valid actions are: `create`, `list`, `add`, `show`, `play`, `delete` and `share`.")
@commands.command()
async def like(self, ctx):
"""Adds the current song to your 'Liked' playlist"""
current_track: Track = self.get_controller(ctx).current_track
if not current_track:
raise BadRequestException("No track is currently playling!")
self.database.create_if_non_existant(ctx.author.id, "Liked")
self.database.add_to_playlist(ctx.author.id, "Liked", [current_track])
await ctx.message.add_reaction("<:ok:746377326245445653>")
@commands.command()
async def unlike(self, ctx):
"""Removes the current song from your 'Liked' playlist"""
current_track: Track = self.get_controller(ctx).current_track
if not current_track:
raise BadRequestException("No track is currently playing!")
self.database.remove_from_playlist(ctx.author.id, "Liked", [current_track])
await ctx.message.add_reaction("<:ok:746377326245445653>")
def get_controller(self, value: Union[commands.Context, wavelink.Player]):
"""Return the given guild's instance of the MusicController"""
if isinstance(value, commands.Context):
gid = value.guild.id
else:
gid = value.guild_id
try:
controller = self.bot.controllers[gid]
except KeyError:
controller = MusicController(self.bot, gid)
self.bot.controllers[gid] = controller
return controller
async def join_channel(self, ctx):
player: Player = self.bot.wavelink.get_player(ctx.guild.id)
try:
if player.channel_id != ctx.author.voice.channel.id:
await player.connect(ctx.author.voice.channel.id)
controller = self.get_controller(ctx)
controller.channel = ctx.channel
except AttributeError:
raise BadRequestException("You are not connected to a voice channel.")
| 48.152074
| 281
| 0.5813
|
4a008f6bca8ca47c26ec717fb1b7ebc477554e59
| 2,924
|
py
|
Python
|
Scripts/Mapping/pdbList.py
|
merkllab/ConsGNN
|
75cf7d4d458a8c7602d96be2c0863cf5da7c4ce5
|
[
"MIT"
] | null | null | null |
Scripts/Mapping/pdbList.py
|
merkllab/ConsGNN
|
75cf7d4d458a8c7602d96be2c0863cf5da7c4ce5
|
[
"MIT"
] | null | null | null |
Scripts/Mapping/pdbList.py
|
merkllab/ConsGNN
|
75cf7d4d458a8c7602d96be2c0863cf5da7c4ce5
|
[
"MIT"
] | null | null | null |
import sys
import re
import urllib2, urlparse, gzip
from StringIO import StringIO
from HTMLParser import HTMLParser
# The script generates a list of the PDB-IDs together with the Pfam- and InterPro-IDs
# Executio: python pdbListFamGO.py pdblist_AB
class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
def http_error_301(self, req, fp, code, msg, headers):
result = urllib2.HTTPRedirectHandler.http_error_301(
self, req, fp, code, msg, headers)
result.status = code
return result
def http_error_302(self, req, fp, code, msg, headers):
result = urllib2.HTTPRedirectHandler.http_error_302(
self, req, fp, code, msg, headers)
result.status = code
return result
class DefaultErrorHandler(urllib2.HTTPDefaultErrorHandler):
def http_error_default(self, req, fp, code, msg, headers):
result = urllib2.HTTPError(
req.get_full_url(), code, msg, headers, fp)
result.status = code
return result
class MyHTMLParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.data = []
def handle_starttag(self, tag, attrs):
if tag == "a":
for name, value in attrs:
if name == "href":
self.data.append(value)
def handle_data(self, data):
self.data.append(data)
def fetch(source):
result = {}
if hasattr(source, 'read'):
return source
request = urllib2.Request(source)
request.add_header('Accept-encoding', 'gzip')
opener = urllib2.build_opener(SmartRedirectHandler(), DefaultErrorHandler())
f = opener.open(request)
data = f.read()
data = gzip.GzipFile(fileobj=StringIO(data)).read()
f.close()
return data
def readPDB(pdbN):
pdbN = "".join(pdbN)
str = "http://www.ebi.ac.uk/pdbe/entry/pdb/" + pdbN + "/analysis"
print ">>> %s:" % pdbN
html = fetch(str)
parser = MyHTMLParser()
result = parser.feed(html)
result = parser.data
parser.close()
cnt = 0
mask = 0
output = ""
for line in result:
cnt = cnt + 1
if "Chains:" in line or "Chain:" in line and len(line) < 10:
if len(output) > 0:
print output
output = result[cnt]
output += " = "
if "pfam.xfam.org" in line:
part = re.split('/',line)
for p1 in part:
if "PF" in p1:
output+=" "+p1
if "interpro" in line:
if "superfamily" in line:
mask = 1
continue
if mask == 1:
mask = 0
continue
if "www.ebi.ac.uk" in line:
part = re.split('/|"',line)
for p1 in part:
if "IPR" in p1:
output = output + " " + p1
print output
def _main(argv):
file = "".join(argv)
f = open(file, 'rb')
lines = f.read();
lines = re.split('\n',lines)
cnt = 0
for pdbN in lines:
pdbN = ''.join(pdbN.split())
if len(pdbN) == 4:
readPDB(pdbN)
if __name__ == '__main__':
_main(sys.argv[1:])
| 27.327103
| 86
| 0.613201
|
4a009034b05457e2e6d2f5ee0badd69ae1fba804
| 15,736
|
py
|
Python
|
Builds/LOCAR/Software/Windows/Python 2.7/output/plotting_py3.py
|
pd3d/magneto
|
da619b58b3e3c0ba9d6ac149e8902d8fc4614ccb
|
[
"MIT"
] | 1
|
2021-05-18T16:50:11.000Z
|
2021-05-18T16:50:11.000Z
|
Builds/LOCAR/Software/Windows/Python 2.7/output/plotting_py3.py
|
dash-orlando/magneto
|
da619b58b3e3c0ba9d6ac149e8902d8fc4614ccb
|
[
"MIT"
] | null | null | null |
Builds/LOCAR/Software/Windows/Python 2.7/output/plotting_py3.py
|
dash-orlando/magneto
|
da619b58b3e3c0ba9d6ac149e8902d8fc4614ccb
|
[
"MIT"
] | null | null | null |
"""
plotting
Script developed to plot text data obtained from the LAPARO trainer
--Python 3 support
"""
# ============== #
# Import Modules
# ============== #
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import numpy as np
# variables
filename = "10-26-18_15-1-49.txt"
##filename = "11-1-18_15-14-57.txt"
# ========================================================================= #
# import data from text file
# ========================================================================= #
xm = []
ym = []
zm = []
xe = []
ye = []
ze = []
length = []
sol_time = []
#prog_time = []
with open( filename, 'r' ) as file:
for line in file:
line = line.strip( '\n' ).split(',')
xm.append( float( line[0] ) )
ym.append( float( line[1] ) )
zm.append( float( line[2] ) )
xe.append( float( line[3] ) )
ye.append( float( line[4] ) )
ze.append( float( line[5] ) )
length.append( float( line[6] ) )
sol_time.append( float( line[7] ) )
#prog_time.append( line[8] )
# ========================================================================= #
# Data processing, statistics
# ========================================================================= #
# Data for three-dimensional scattered points
## position limits
"""
pos = [(233, 370),
(396, 529),
(552, 691),
(707, 843),
(869, 999),
(1014, 1150),
(1166, 1303),
(1317, 1449),
(1458, 1592),
(1606, 1742)]
"""
pos = [(160, 263),
(276, 387),
(407, 515),
(532, 643),
(662, 769),
(784, 897),
(908, 1026),
(1041, 1153),
(1160, 1280),
(1291, 1415)]
Npos = len(pos)
# statistics
xm_mean = []
ym_mean = []
zm_mean = []
xe_mean = []
ye_mean = []
ze_mean = []
xm_std = []
ym_std = []
zm_std = []
xe_std = []
ye_std = []
ze_std = []
xm_se = []
ym_se = []
zm_se = []
xe_se = []
ye_se = []
ze_se = []
filename = 'stats.txt'
file = open( filename, 'w')
for i in range( 0, Npos ):
data_len = pos[i][1]-pos[i][0] # number of data points (n)
# means
## magnet
xm_mean.append( np.mean( xm[pos[i][0]:pos[i][1]] ) )
ym_mean.append( np.mean( ym[pos[i][0]:pos[i][1]] ) )
zm_mean.append( np.mean( zm[pos[i][0]:pos[i][1]] ) )
## end effector
xe_mean.append( np.mean( xe[pos[i][0]:pos[i][1]] ) )
ye_mean.append( np.mean( ye[pos[i][0]:pos[i][1]] ) )
ze_mean.append( np.mean( ze[pos[i][0]:pos[i][1]] ) )
# std
## magnet
xm_std.append( np.std( xm[pos[i][0]:pos[i][1]] ) )
ym_std.append( np.std( ym[pos[i][0]:pos[i][1]] ) )
zm_std.append( np.std( zm[pos[i][0]:pos[i][1]] ) )
## end effector
xe_std.append( np.std( xe[pos[i][0]:pos[i][1]] ) )
ye_std.append( np.std( ye[pos[i][0]:pos[i][1]] ) )
ze_std.append( np.std( ze[pos[i][0]:pos[i][1]] ) )
# se
## magnet
xm_se.append( xm_std[i] / np.sqrt( data_len ) )
ym_se.append( ym_std[i] / np.sqrt( data_len ) )
zm_se.append( zm_std[i] / np.sqrt( data_len ) )
## end effector
xe_se.append( xe_std[i] / np.sqrt( data_len ) )
ye_se.append( ye_std[i] / np.sqrt( data_len ) )
ze_se.append( ze_std[i] / np.sqrt( data_len ) )
file.write( '{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {} \n'.format( xm_mean[i],
ym_mean[i],
zm_mean[i],
xe_mean[i],
ye_mean[i],
ze_mean[i],
xm_std[i],
ym_std[i],
zm_std[i],
xe_std[i],
ye_std[i],
ze_std[i],
xm_se[i],
ym_se[i],
zm_se[i],
xe_se[i],
ye_se[i],
ze_se[i] ) )
file.close()
# ========================================================================= #
# plotting
# ========================================================================= #
### ========================================================================= #
#### raw data plot
### ========================================================================= #
##ax1 = plt.axes(projection='3d')
##
### Data for a three-dimensional line
###zline = np.linspace(0, 15, 1000)
###xline = np.sin(zline)
###yline = np.cos(zline)
###ax.plot3D(xline, yline, zline, 'gray')
##
##for i in range( 0, Npos ):
##
## # magnet position
## ax1.scatter3D(xm[pos[i][0]:pos[i][1]],
## ym[pos[i][0]:pos[i][1]],
## zm[pos[i][0]:pos[i][1]],
## c=zm[pos[i][0]:pos[i][1]],
## cmap='Greens')
##
## # end-effector position
## ax1.scatter3D(xe[pos[i][0]:pos[i][1]],
## ye[pos[i][0]:pos[i][1]],
## ze[pos[i][0]:pos[i][1]],
## c=ze[pos[i][0]:pos[i][1]],
## cmap='Reds')
##
##
###ax.legend()
##ax1.set_xlim(-150, 150)
##ax1.set_ylim(-150, 150)
##ax1.set_zlim(-250, 250)
##ax1.set_xlabel('X')
##ax1.set_ylabel('Y')
##ax1.set_zlabel('Z')
##plt.show()
### ========================================================================= #
#### stats
### ========================================================================= #
##ax2 = plt.axes(projection='3d')
##
### Data for a three-dimensional line
###zline = np.linspace(0, 15, 1000)
###xline = np.sin(zline)
###yline = np.cos(zline)
###ax.plot3D(xline, yline, zline, 'gray')
##
##for i in range( 0, Npos ):
##
## # magnet position
## ax2.scatter3D(xm_mean[i],
## ym_mean[i],
## zm_mean[i],
## color='Blue')
##
## # magnet position error bars
## ax2.plot([xm_mean[i]+xm_se[i]*100, xm_mean[i]-xm_se[i]*100],
## [ym_mean[i], ym_mean[i]],
## [zm_mean[i], zm_mean[i]],
## marker="_",
## color='Black')
##
## ax2.plot([xm_mean[i], xm_mean[i]],
## [ym_mean[i]+ym_se[i]*100, ym_mean[i]-ym_se[i]*100],
## [zm_mean[i], zm_mean[i]],
## marker="_",
## color='Black')
##
## ax2.plot([xm_mean[i], xm_mean[i]],
## [ym_mean[i], ym_mean[i]],
## [zm_mean[i]+zm_se[i]*100, zm_mean[i]-zm_se[i]*100],
## marker="_",
## color='Black')
##
## # end-effector position
## ax2.scatter3D(xe_mean[i],
## ye_mean[i],
## ze_mean[i],
## color='Red')
##
## # end-effector position error bars
## ax2.plot([xe_mean[i]+xe_se[i]*100, xe_mean[i]-xe_se[i]*100],
## [ye_mean[i], ye_mean[i]],
## [ze_mean[i], ze_mean[i]],
## marker="_",
## color='Black')
##
## ax2.plot([xe_mean[i], xe_mean[i]],
## [ye_mean[i]+ye_se[i]*100, ye_mean[i]-ye_se[i]*100],
## [ze_mean[i], ze_mean[i]],
## marker="_",
## color='Black')
##
## ax2.plot([xe_mean[i], xe_mean[i]],
## [ye_mean[i], ye_mean[i]],
## [ze_mean[i]+ze_se[i]*100, ze_mean[i]-ze_se[i]*100],
## marker="_",
## color='Black')
##
##
###ax.legend()
##ax2.set_xlim(-150, 150)
##ax2.set_ylim(-150, 150)
##ax2.set_zlim(-250, 250)
##ax2.set_xlabel('X')
##ax2.set_ylabel('Y')
##ax2.set_zlabel('Z')
##plt.show()
# ========================================================================= #
## other
# ========================================================================= #
"""
actual_pos = [(22.50, -84.00, -158.30),
(24.00, 95.00, -206.40),
(83.00, 37.00, -125.10),
(54.00, 71.00, -225.50),
(-34.00, -38.00, -194.20),
(-84.00, -93.00, -225.50),
(-121.50, -11.50, -225.50),
(-81.50, 30.00, -206.40),
(-78.00, 48.00, -206.40),
(-65.50, 63.00, -206.40)]
"""
actual_pos = [(20.00, -85.00, -165.00),
(26.00, 95.00, -212.80),
(85.00, 37.00, -129.80),
(55.75, 74.50, -231.80),
(-37.50, -36.00, -199.80),
(-86.00, -90.00, -231.80),
(-126.50, -8.00, -231.80),
(-82.50, 27.50, -212.80),
(-79.00, 46.00, -212.80),
(-66.50, 60.50, -212.80)]
ax1 = plt.axes(projection='3d')
# Data for a three-dimensional line
#zline = np.linspace(0, 15, 1000)
#xline = np.sin(zline)
#yline = np.cos(zline)
#ax.plot3D(xline, yline, zline, 'gray')
for i in range( 0, Npos ):
# magnet position
ax1.scatter3D(actual_pos[i][0],
actual_pos[i][1],
actual_pos[i][2],
color='Blue')
# end-effector position
ax1.scatter3D(xe[pos[i][0]:pos[i][1]],
ye[pos[i][0]:pos[i][1]],
ze[pos[i][0]:pos[i][1]],
color='Red')
#ax.legend()
ax1.set_xlim(-150, 150)
ax1.set_ylim(-150, 150)
ax1.set_zlim(-250, 250)
ax1.set_xlabel('X')
ax1.set_ylabel('Y')
ax1.set_zlabel('Z')
plt.show()
# ========================================================================= #
## flat subplots
# ========================================================================= #
"""
actual_pos = [(22.50, -84.00, -158.30),
(24.00, 95.00, -206.40),
(83.00, 37.00, -125.10),
(54.00, 71.00, -225.50),
(-34.00, -38.00, -194.20),
(-84.00, -93.00, -225.50),
(-121.50, -11.50, -225.50),
(-81.50, 30.00, -206.40),
(-78.00, 48.00, -206.40),
(-65.50, 63.00, -206.40)]
"""
actual_pos = [(20.00, -85.00, -165.00),
(26.00, 95.00, -212.80),
(85.00, 37.00, -129.80),
(55.75, 74.50, -231.80),
(-37.50, -36.00, -199.80),
(-86.00, -90.00, -231.80),
(-126.50, -8.00, -231.80),
(-82.50, 27.50, -212.80),
(-79.00, 46.00, -212.80),
(-66.50, 60.50, -212.80)]
# ------------------------------------------------------------------------- #
## XY
# ------------------------------------------------------------------------- #
for i in range( 0, Npos ):
# magnet position
magpos = plt.scatter(actual_pos[i][0],
actual_pos[i][1],
color='Blue')
# end-effector position
endeff = plt.scatter(xe[pos[i][0]:pos[i][1]],
ye[pos[i][0]:pos[i][1]],
color='Red')
# ref circle, magnet
rmcx = [100]
rmcy = [-100]
rm = [800]
refmag = plt.scatter(rmcx, rmcy, s=rm, color='black', edgecolor='black')
# ref circle, end effector
recx = [100]
recy = [-100]
re = [100]
refeff = plt.scatter(recx, recy, s=re, color='white', edgecolor='black')
ticks = np.linspace(-150, 150, num=int(300/25 + 1), endpoint=True)
plt.xticks(ticks, fontsize=9)
plt.yticks(ticks, fontsize=9)
plt.xlabel('X (mm)')
plt.ylabel('Y (mm)')
plt.legend((magpos, endeff, refmag, refeff),
("Magnet Position","End-Effector Position","Magnet Ref. Size","End Effector Ref. Size"),
labelspacing=1.5,
#ncol=4,
fontsize=10,
framealpha=1,
shadow=True,
borderpad=1,
loc=1)
plt.grid()
plt.show()
# ------------------------------------------------------------------------- #
## XZ
# ------------------------------------------------------------------------- #
for i in range( 0, Npos ):
# magnet position
magpos = plt.scatter(actual_pos[i][0],
actual_pos[i][2],
color='Blue')
# end-effector position
endeff = plt.scatter(xe[pos[i][0]:pos[i][1]],
ze[pos[i][0]:pos[i][1]],
color='Red')
"""
# ref circle, magnet
rmcx = [100]
rmcy = [-100]
rm = [800]
refmag = plt.scatter(rmcx, rmcy, s=rm, color='black', edgecolor='black')
# ref circle, end effector
recx = [100]
recy = [-100]
re = [100]
refeff = plt.scatter(recx, recy, s=re, color='white', edgecolor='black')
"""
xticks = np.linspace(-150, 150, num=int(300/25 + 1), endpoint=True)
zticks = np.linspace(-250, -150, num=int(100/25 + 1), endpoint=True)
plt.xticks(xticks, fontsize=9)
plt.yticks(zticks, fontsize=9)
plt.xlabel('X (mm)')
plt.ylabel('Z (mm)')
plt.legend((magpos, endeff),
("Magnet Position","End-Effector Position"),
labelspacing=1.5,
#ncol=4,
fontsize=10,
framealpha=1,
shadow=True,
borderpad=1,
loc=1)
plt.grid()
plt.show()
# ------------------------------------------------------------------------- #
## YZ
# ------------------------------------------------------------------------- #
for i in range( 0, Npos ):
# magnet position
magpos = plt.scatter(actual_pos[i][1],
actual_pos[i][2],
color='Blue')
# end-effector position
endeff = plt.scatter(ye[pos[i][0]:pos[i][1]],
ze[pos[i][0]:pos[i][1]],
color='Red')
"""
# ref circle, magnet
rmcx = [100]
rmcy = [-100]
rm = [800]
refmag = plt.scatter(rmcx, rmcy, s=rm, color='black', edgecolor='black')
# ref circle, end effector
recx = [100]
recy = [-100]
re = [100]
refeff = plt.scatter(recx, recy, s=re, color='white', edgecolor='black')
"""
yticks = np.linspace(-150, 150, num=int(300/25 + 1), endpoint=True)
zticks = np.linspace(-250, -150, num=int(100/25 + 1), endpoint=True)
plt.xticks(yticks, fontsize=9)
plt.yticks(zticks, fontsize=9)
plt.xlabel('Y (mm)')
plt.ylabel('Z (mm)')
plt.legend((magpos, endeff),
("Magnet Position","End-Effector Position"),
labelspacing=1.5,
#ncol=4,
fontsize=10,
framealpha=1,
shadow=True,
borderpad=1,
loc=1)
plt.grid()
plt.show()
| 31.346614
| 112
| 0.378241
|
4a00910b68a4cbcca8ce6199553e91a5aedb4fc1
| 67,055
|
py
|
Python
|
cms/tests/test_menu.py
|
rspeed/django-cms-contrib
|
c5fbbea191646ab922b5ff6f89a1de6baa648e7f
|
[
"BSD-3-Clause"
] | null | null | null |
cms/tests/test_menu.py
|
rspeed/django-cms-contrib
|
c5fbbea191646ab922b5ff6f89a1de6baa648e7f
|
[
"BSD-3-Clause"
] | null | null | null |
cms/tests/test_menu.py
|
rspeed/django-cms-contrib
|
c5fbbea191646ab922b5ff6f89a1de6baa648e7f
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import copy
from cms.test_utils.project.sampleapp.cms_apps import NamespacedApp, SampleApp, SampleApp2
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, Permission, Group
from django.template import Template, TemplateSyntaxError
from django.test.utils import override_settings
from django.utils.translation import activate
from cms.apphook_pool import apphook_pool
from menus.base import NavigationNode
from menus.menu_pool import menu_pool, _build_nodes_inner_for_one_menu
from menus.models import CacheKey
from menus.utils import mark_descendants, find_selected, cut_levels
from cms.api import create_page
from cms.cms_menus import get_visible_pages
from cms.models import Page, ACCESS_PAGE_AND_DESCENDANTS
from cms.models.permissionmodels import GlobalPagePermission, PagePermission
from cms.test_utils.project.sampleapp.cms_menus import SampleAppMenu, StaticMenu, StaticMenu2
from cms.test_utils.fixtures.menus import (MenusFixture, SubMenusFixture,
SoftrootFixture, ExtendedMenusFixture)
from cms.test_utils.testcases import CMSTestCase
from cms.test_utils.util.context_managers import apphooks, LanguageOverride
from cms.test_utils.util.mock import AttributeObject
from cms.utils import get_cms_setting
from cms.utils.i18n import force_language
class BaseMenuTest(CMSTestCase):
def _get_nodes(self, path='/'):
node1 = NavigationNode('1', '/1/', 1)
node2 = NavigationNode('2', '/2/', 2, 1)
node3 = NavigationNode('3', '/3/', 3, 2)
node4 = NavigationNode('4', '/4/', 4, 2)
node5 = NavigationNode('5', '/5/', 5)
nodes = [node1, node2, node3, node4, node5]
tree = _build_nodes_inner_for_one_menu([n for n in nodes], "test")
request = self.get_request(path)
renderer = menu_pool.get_renderer(request)
renderer.apply_modifiers(tree, request)
return tree, nodes
def setUp(self):
super(BaseMenuTest, self).setUp()
if not menu_pool.discovered:
menu_pool.discover_menus()
self.old_menu = menu_pool.menus
menu_pool.menus = {'CMSMenu': self.old_menu['CMSMenu']}
menu_pool.clear(settings.SITE_ID)
activate("en")
def tearDown(self):
menu_pool.menus = self.old_menu
super(BaseMenuTest, self).tearDown()
def get_page(self, num):
return Page.objects.public().get(title_set__title='P%s' % num)
class MenuDiscoveryTest(ExtendedMenusFixture, CMSTestCase):
def setUp(self):
super(MenuDiscoveryTest, self).setUp()
menu_pool.discovered = False
self.old_menu = menu_pool.menus
menu_pool.menus = {}
menu_pool.discover_menus()
menu_pool.register_menu(SampleAppMenu)
menu_pool.register_menu(StaticMenu)
menu_pool.register_menu(StaticMenu2)
def tearDown(self):
menu_pool.menus = self.old_menu
super(MenuDiscoveryTest, self).tearDown()
def test_menu_registered(self):
menu_pool.discovered = False
menu_pool.discover_menus()
# The following tests that get_registered_menus()
# returns all menus registered based on the for_rendering flag
# A list of menu classes registered regardless of whether they
# have instances attached or not
registered = menu_pool.get_registered_menus(for_rendering=False)
# A list of menu classes registered and filter out any attached menu
# if it does not have instances.
registered_for_rendering = menu_pool.get_registered_menus(for_rendering=True)
# We've registered three menus
self.assertEqual(len(registered), 3)
# But two of those are attached menus and shouldn't be rendered.
self.assertEqual(len(registered_for_rendering), 1)
# Attached both menus to separate pages
create_page("apphooked-page", "nav_playground.html", "en",
published=True,
navigation_extenders='StaticMenu')
create_page("apphooked-page", "nav_playground.html", "en",
published=True,
navigation_extenders='StaticMenu2')
registered = menu_pool.get_registered_menus(for_rendering=False)
registered_for_rendering = menu_pool.get_registered_menus(for_rendering=True)
# The count should be 3 but grows to 5 because of the two published instances.
# Even though we've registered three menus, the total is give because two
# are attached menus and each attached menu has two instances.
self.assertEqual(len(registered), 5)
self.assertEqual(len(registered_for_rendering), 5)
def test_menu_registered_in_renderer(self):
menu_pool.discovered = False
menu_pool.discover_menus()
# The following tests that a menu renderer calculates the registered
# menus on a request basis.
request_1 = self.get_request('/en/')
request_1_renderer = menu_pool.get_renderer(request_1)
registered = menu_pool.get_registered_menus(for_rendering=False)
self.assertEqual(len(registered), 3)
self.assertEqual(len(request_1_renderer.menus), 1)
create_page("apphooked-page", "nav_playground.html", "en",
published=True,
navigation_extenders='StaticMenu')
create_page("apphooked-page", "nav_playground.html", "en",
published=True,
navigation_extenders='StaticMenu2')
request_2 = self.get_request('/en/')
request_2_renderer = menu_pool.get_renderer(request_2)
# The count should be 3 but grows to 5 because of the two published instances.
self.assertEqual(len(request_2_renderer.menus), 5)
def test_menu_expanded(self):
menu_pool.discovered = False
menu_pool.discover_menus()
with self.settings(ROOT_URLCONF='cms.test_utils.project.urls_for_apphook_tests'):
with apphooks(SampleApp):
page = create_page("apphooked-page", "nav_playground.html", "en",
published=True, apphook="SampleApp",
navigation_extenders='StaticMenu')
self.assertTrue(menu_pool.discovered)
menus = menu_pool.get_registered_menus()
self.assertTrue(menu_pool.discovered)
# Counts the number of StaticMenu (which is expanded) and StaticMenu2
# (which is not) and checks the key name for the StaticMenu instances
static_menus = 2
static_menus_2 = 1
for key, menu in menus.items():
if key.startswith('StaticMenu:'):
static_menus -= 1
self.assertTrue(key.endswith(str(page.get_public_object().pk)) or key.endswith(str(page.get_draft_object().pk)))
if key == 'StaticMenu2':
static_menus_2 -= 1
self.assertEqual(static_menus, 0)
self.assertEqual(static_menus_2, 0)
def test_multiple_menus(self):
with self.settings(ROOT_URLCONF='cms.test_utils.project.urls_for_apphook_tests'):
with apphooks(NamespacedApp, SampleApp2):
apphook_pool.discovered = False
apphook_pool.discover_apps()
create_page("apphooked-page", "nav_playground.html", "en",
published=True, apphook="SampleApp2")
create_page("apphooked-page", "nav_playground.html", "en",
published=True,
navigation_extenders='StaticMenu')
create_page("apphooked-page", "nav_playground.html", "en",
published=True, apphook="NamespacedApp", apphook_namespace='whatever',
navigation_extenders='StaticMenu')
self.assertEqual(len(menu_pool.get_menus_by_attribute("cms_enabled", True)), 2)
class ExtendedFixturesMenuTests(ExtendedMenusFixture, BaseMenuTest):
"""
Tree from fixture:
+ P1
| + P2
| + P3
| + P9
| + P10
| + P11
+ P4
| + P5
+ P6 (not in menu)
+ P7
+ P8
"""
def get_page(self, num):
return Page.objects.public().get(title_set__title='P%s' % num)
def get_level(self, num):
return Page.objects.public().filter(level=num)
def get_all_pages(self):
return Page.objects.public()
def test_menu_failfast_on_invalid_usage(self):
context = self.get_context()
context['child'] = self.get_page(1)
# test standard show_menu
with self.settings(DEBUG=True, TEMPLATE_DEBUG=True):
tpl = Template("{% load menu_tags %}{% show_menu 0 0 0 0 'menu/menu.html' child %}")
self.assertRaises(TemplateSyntaxError, tpl.render, context)
def test_show_submenu_nephews(self):
page_2 = self.get_page(2)
context = self.get_context(path=page_2.get_absolute_url(), page=page_2)
tpl = Template("{% load menu_tags %}{% show_sub_menu 100 1 1 %}")
tpl.render(context)
nodes = context["children"]
# P2 is the selected node
self.assertTrue(nodes[0].selected)
# Should include P10 but not P11
self.assertEqual(len(nodes[1].children), 1)
self.assertFalse(nodes[1].children[0].children)
tpl = Template("{% load menu_tags %}{% show_sub_menu 100 1 %}")
tpl.render(context)
nodes = context["children"]
# should now include both P10 and P11
self.assertEqual(len(nodes[1].children), 1)
self.assertEqual(len(nodes[1].children[0].children), 1)
def test_show_submenu_template_root_level_none_no_nephew_limit(self):
root = self.get_page(1)
context = self.get_context(path=root.get_absolute_url(), page=root)
tpl = Template("{% load menu_tags %}{% show_sub_menu 100 None 100 %}")
tpl.render(context)
nodes = context["children"]
# default nephew limit, P2 and P9 in the nodes list
self.assertEqual(len(nodes), 2)
class FixturesMenuTests(MenusFixture, BaseMenuTest):
"""
Tree from fixture:
+ P1
| + P2
| + P3
+ P4
| + P5
+ P6 (not in menu)
+ P7
+ P8
"""
def get_page(self, num):
return Page.objects.public().get(title_set__title='P%s' % num)
def get_level(self, num):
return Page.objects.public().filter(depth=num)
def get_all_pages(self):
return Page.objects.public()
def test_menu_failfast_on_invalid_usage(self):
context = self.get_context()
context['child'] = self.get_page(1)
# test standard show_menu
with self.settings(DEBUG=True, TEMPLATE_DEBUG=True):
tpl = Template("{% load menu_tags %}{% show_menu 0 0 0 0 'menu/menu.html' child %}")
self.assertRaises(TemplateSyntaxError, tpl.render, context)
def test_basic_cms_menu(self):
menus = menu_pool.get_registered_menus()
self.assertEqual(len(menus), 1)
with force_language("en"):
response = self.client.get(self.get_pages_root()) # path = '/'
self.assertEqual(response.status_code, 200)
request = self.get_request()
renderer = menu_pool.get_renderer(request)
# test the cms menu class
menu = renderer.get_menu('CMSMenu')
nodes = menu.get_nodes(request)
self.assertEqual(len(nodes), len(self.get_all_pages()))
def test_show_menu(self):
root = self.get_page(1)
context = self.get_context(page=root)
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 2)
self.assertEqual(nodes[0].selected, True)
self.assertEqual(nodes[0].sibling, False)
self.assertEqual(nodes[0].descendant, False)
self.assertEqual(nodes[0].children[0].descendant, True)
self.assertEqual(nodes[0].children[0].children[0].descendant, True)
self.assertEqual(nodes[0].get_absolute_url(), self.get_pages_root())
self.assertEqual(nodes[1].get_absolute_url(), self.get_page(4).get_absolute_url())
self.assertEqual(nodes[1].sibling, True)
self.assertEqual(nodes[1].selected, False)
def test_show_menu_num_queries(self):
context = self.get_context()
# test standard show_menu
with self.assertNumQueries(6):
"""
The queries should be:
get all public pages
get all draft pages from public pages
get all page permissions
get all titles
get the menu cache key
set the menu cache key
"""
tpl = Template("{% load menu_tags %}{% show_menu %}")
tpl.render(context)
def test_show_menu_cache_key_leak(self):
context = self.get_context()
tpl = Template("{% load menu_tags %}{% show_menu %}")
self.assertEqual(CacheKey.objects.count(), 0)
tpl.render(context)
self.assertEqual(CacheKey.objects.count(), 1)
tpl.render(context)
self.assertEqual(CacheKey.objects.count(), 1)
def test_menu_cache_draft_only(self):
# Tests that the cms uses a separate cache for draft & live
public_page = self.get_page(1)
draft_page = public_page.publisher_public
edit_on_path = draft_page.get_absolute_url() + '?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')
edit_off_path = public_page.get_absolute_url() + '?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')
superuser = self.get_superuser()
# Prime the public menu cache
with self.login_user_context(superuser):
context = self.get_context(path=edit_off_path, page=public_page)
context['request'].session['cms_edit'] = False
Template("{% load menu_tags %}{% show_menu %}").render(context)
# This should prime the draft menu cache
with self.login_user_context(superuser):
context = self.get_context(path=edit_on_path, page=draft_page)
context['request'].session['cms_edit'] = True
Template("{% load menu_tags %}{% show_menu %}").render(context)
# All nodes should be draft nodes
node_ids = [node.id for node in context['children']]
page_count = Page.objects.drafts().filter(pk__in=node_ids).count()
self.assertEqual(len(node_ids), page_count, msg='Not all pages in the draft menu are draft')
def test_menu_cache_live_only(self):
# Tests that the cms uses a separate cache for draft & live
public_page = self.get_page(1)
draft_page = public_page.publisher_public
edit_on_path = draft_page.get_absolute_url() + '?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')
edit_off_path = public_page.get_absolute_url() + '?preview&%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')
superuser = self.get_superuser()
# Prime the draft menu cache
with self.login_user_context(superuser):
context = self.get_context(path=edit_on_path, page=draft_page)
context['request'].session['cms_edit'] = True
Template("{% load menu_tags %}{% show_menu %}").render(context)
# This should prime the public menu cache
with self.login_user_context(superuser):
context = self.get_context(path=edit_off_path, page=public_page)
context['request'].session['cms_edit'] = False
context['request'].session['cms_preview'] = True
Template("{% load menu_tags %}{% show_menu %}").render(context)
# All nodes should be public nodes
node_ids = [node.id for node in context['children']]
page_count = Page.objects.public().filter(pk__in=node_ids).count()
self.assertEqual(len(node_ids), page_count, msg='Not all pages in the public menu are public')
def test_menu_cache_respects_database_keys(self):
public_page = self.get_page(1)
# Prime the public menu cache
context = self.get_context(path=public_page.get_absolute_url(), page=public_page)
context['request'].session['cms_edit'] = False
# Prime the cache
with self.assertNumQueries(6):
# The queries should be:
# get all public pages
# get all draft pages from public pages
# get all page permissions
# get all titles
# get the menu cache key
# set the menu cache key
Template("{% load menu_tags %}{% show_menu %}").render(context)
# One new CacheKey should have been created
self.assertEqual(CacheKey.objects.count(), 1)
# Because its cached, only one query is made to the db
with self.assertNumQueries(1):
# The queries should be:
# get the menu cache key
Template("{% load menu_tags %}{% show_menu %}").render(context)
# Delete the current cache key but don't touch the cache
CacheKey.objects.all().delete()
# The menu should be recalculated
with self.assertNumQueries(6):
# The queries should be:
# get all public pages
# get all draft pages from public pages
# get all page permissions
# get all titles
# get the menu cache key
# set the menu cache key
Template("{% load menu_tags %}{% show_menu %}").render(context)
def test_menu_keys_duplicate_clear(self):
"""
Tests that the menu clears all keys, including duplicates.
"""
CacheKey.objects.create(language="fr", site=1, key="a")
CacheKey.objects.create(language="fr", site=1, key="a")
self.assertEqual(CacheKey.objects.count(), 2)
menu_pool.clear(site_id=1, language='fr')
self.assertEqual(CacheKey.objects.count(), 0)
def test_only_active_tree(self):
context = self.get_context(page=self.get_page(1))
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes[1].children), 0)
self.assertEqual(len(nodes[0].children), 1)
self.assertEqual(len(nodes[0].children[0].children), 1)
page_4 = self.get_page(4)
context = self.get_context(path=page_4.get_absolute_url(), page=page_4)
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes[1].children), 1)
self.assertEqual(len(nodes[0].children), 0)
def test_only_one_active_level(self):
context = self.get_context(page=self.get_page(1))
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 1 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes[1].children), 0)
self.assertEqual(len(nodes[0].children), 1)
self.assertEqual(len(nodes[0].children[0].children), 0)
def test_only_level_zero(self):
context = self.get_context()
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu 0 0 0 0 %}")
tpl.render(context)
nodes = context['children']
for node in nodes:
self.assertEqual(len(node.children), 0)
def test_only_level_one(self):
context = self.get_context()
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu 1 1 100 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), len(self.get_level(2)))
for node in nodes:
self.assertEqual(len(node.children), 0)
def test_only_level_one_active(self):
context = self.get_context(page=self.get_page(1))
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu 1 1 0 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].descendant, True)
self.assertEqual(len(nodes[0].children), 0)
def test_level_zero_and_one(self):
context = self.get_context()
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_menu 0 1 100 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 2)
for node in nodes:
self.assertEqual(len(node.children), 1)
def test_show_submenu(self):
context = self.get_context(page=self.get_page(1))
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_sub_menu %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(nodes[0].descendant, True)
self.assertEqual(len(nodes), 1)
self.assertEqual(len(nodes[0].children), 1)
tpl = Template("{% load menu_tags %}{% show_sub_menu 1 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
self.assertEqual(len(nodes[0].children), 0)
page_3 = self.get_page(3)
context = self.get_context(path=page_3.get_absolute_url(), page=page_3)
tpl = Template("{% load menu_tags %}{% show_sub_menu 100 1 %}")
tpl.render(context)
nodes = context["children"]
# P3 is the selected node
self.assertFalse(nodes[0].selected)
self.assertTrue(nodes[0].children[0].selected)
# top level node should be P2
self.assertEqual(nodes[0].get_absolute_url(), self.get_page(2).get_absolute_url())
# should include P3 as well
self.assertEqual(len(nodes[0].children), 1)
page_2 = self.get_page(2)
context = self.get_context(path=page_2.get_absolute_url(), page=page_2)
tpl = Template("{% load menu_tags %}{% show_sub_menu 100 0 %}")
tpl.render(context)
nodes = context["children"]
# P1 should be in the nav
self.assertEqual(nodes[0].get_absolute_url(), self.get_page(1).get_absolute_url())
# P2 is selected
self.assertTrue(nodes[0].children[0].selected)
def test_show_submenu_template_root_level_none(self):
root = self.get_page(1)
context = self.get_context(path=root.get_absolute_url(), page=root)
tpl = Template("{% load menu_tags %}{% show_sub_menu 100 None 1 %}")
tpl.render(context)
nodes = context["children"]
# First node is P2 (P1 children) thus not selected
self.assertFalse(nodes[0].selected)
# nephew limit of 1, so only P2 is the nodes list
self.assertEqual(len(nodes), 1)
# P3 is a child of P2, but not in nodes list
self.assertTrue(nodes[0].children)
def test_show_breadcrumb(self):
page_3 = self.get_page(3)
context = self.get_context(path=self.get_page(3).get_absolute_url(), page=page_3)
tpl = Template("{% load menu_tags %}{% show_breadcrumb %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 3)
tpl = Template("{% load menu_tags %}{% show_breadcrumb 1 %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 2)
context = self.get_context()
tpl = Template("{% load menu_tags %}{% show_breadcrumb %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 1)
tpl = Template("{% load menu_tags %}{% show_breadcrumb 1 %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 0)
page1 = self.get_page(1)
page1.in_navigation = False
page1.save()
page2 = self.get_page(2)
context = self.get_context(path=page2.get_absolute_url(), page=page2)
tpl = Template("{% load menu_tags %}{% show_breadcrumb %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 2)
self.assertEqual(nodes[0].get_absolute_url(), self.get_pages_root())
self.assertEqual(isinstance(nodes[0], NavigationNode), True)
self.assertEqual(nodes[1].get_absolute_url(), page2.get_absolute_url())
def test_language_chooser(self):
# test simple language chooser with default args
lang_settings = copy.deepcopy(get_cms_setting('LANGUAGES'))
lang_settings[1][0]['public'] = False
with self.settings(CMS_LANGUAGES=lang_settings):
context = self.get_context(path=self.get_page(3).get_absolute_url())
tpl = Template("{% load menu_tags %}{% language_chooser %}")
tpl.render(context)
self.assertEqual(len(context['languages']), 3)
# try a different template and some different args
tpl = Template("{% load menu_tags %}{% language_chooser 'menu/test_language_chooser.html' %}")
tpl.render(context)
self.assertEqual(context['template'], 'menu/test_language_chooser.html')
tpl = Template("{% load menu_tags %}{% language_chooser 'short' 'menu/test_language_chooser.html' %}")
tpl.render(context)
self.assertEqual(context['template'], 'menu/test_language_chooser.html')
for lang in context['languages']:
self.assertEqual(*lang)
def test_page_language_url(self):
path = self.get_page(3).get_absolute_url()
context = self.get_context(path=path)
tpl = Template("{%% load menu_tags %%}{%% page_language_url '%s' %%}" % 'en')
url = tpl.render(context)
self.assertEqual(url, "%s" % path)
def test_show_menu_below_id(self):
page2 = self.get_page(2)
page2.reverse_id = "hello"
page2.save()
page2 = self.reload(page2)
self.assertEqual(page2.reverse_id, "hello")
page5 = self.get_page(5)
context = self.get_context(path=page5.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu_below_id 'hello' %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
page3_url = self.get_page(3).get_absolute_url()
self.assertEqual(nodes[0].get_absolute_url(), page3_url)
page2.in_navigation = False
page2.save()
context = self.get_context(path=page5.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu_below_id 'hello' %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].get_absolute_url(), page3_url)
def test_unpublished(self):
page2 = self.get_page(2)
page2.title_set.update(published=False)
context = self.get_context()
tpl = Template("{% load menu_tags %}{% show_menu %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 2)
self.assertEqual(len(nodes[0].children), 0)
def test_home_not_in_menu(self):
page1 = self.get_page(1)
page1.in_navigation = False
page1.save()
page4 = self.get_page(4)
page4.in_navigation = False
page4.save()
context = self.get_context()
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].get_absolute_url(), self.get_page(2).get_absolute_url())
self.assertEqual(nodes[0].children[0].get_absolute_url(), self.get_page(3).get_absolute_url())
page4 = self.get_page(4)
page4.in_navigation = True
page4.save()
menu_pool.clear(settings.SITE_ID)
context = self.get_context()
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 2)
def test_show_submenu_from_non_menu_page(self):
"""
Here's the structure bit we're interested in:
+ P6 (not in menu)
+ P7
+ P8
When we render P6, there should be a menu entry for P7 and P8 if the
tag parameters are "1 XXX XXX XXX"
"""
page6 = self.get_page(6)
context = self.get_context(page6.get_absolute_url(), page=page6)
tpl = Template("{% load menu_tags %}{% show_menu 1 100 0 1 %}")
tpl.render(context)
nodes = context['children']
number_of_p6_children = len(page6.children.filter(in_navigation=True))
self.assertEqual(len(nodes), number_of_p6_children)
page7 = self.get_page(7)
context = self.get_context(page7.get_absolute_url(), page=page7)
tpl = Template("{% load menu_tags %}{% show_menu 1 100 0 1 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), number_of_p6_children)
tpl = Template("{% load menu_tags %}{% show_menu 2 100 0 1 %}")
tpl.render(context)
nodes = context['children']
number_of_p7_children = len(page7.children.filter(in_navigation=True))
self.assertEqual(len(nodes), number_of_p7_children)
def test_show_breadcrumb_invisible(self):
# Must use the drafts to find the parent when calling create_page
parent = Page.objects.drafts().get(title_set__title='P3')
invisible_page = create_page("invisible", "nav_playground.html", "en",
parent=parent, published=True, in_navigation=False)
context = self.get_context(
path=invisible_page.get_absolute_url(),
page=invisible_page.publisher_public,
)
tpl = Template("{% load menu_tags %}{% show_breadcrumb %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 3)
tpl = Template("{% load menu_tags %}{% show_breadcrumb 0 'menu/breadcrumb.html' 1 %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 3)
tpl = Template("{% load menu_tags %}{% show_breadcrumb 0 'menu/breadcrumb.html' 0 %}")
tpl.render(context)
nodes = context['ancestors']
self.assertEqual(len(nodes), 4)
class MenuTests(BaseMenuTest):
def test_build_nodes_inner_for_worst_case_menu(self):
'''
Tests the worst case scenario
node5
node4
node3
node2
node1
'''
node1 = NavigationNode('Test1', '/test1/', 1, 2)
node2 = NavigationNode('Test2', '/test2/', 2, 3)
node3 = NavigationNode('Test3', '/test3/', 3, 4)
node4 = NavigationNode('Test4', '/test4/', 4, 5)
node5 = NavigationNode('Test5', '/test5/', 5, None)
menu_class_name = 'Test'
nodes = [node1, node2, node3, node4, node5, ]
len_nodes = len(nodes)
final_list = _build_nodes_inner_for_one_menu(nodes, menu_class_name)
self.assertEqual(len(final_list), len_nodes)
self.assertEqual(node1.parent, node2)
self.assertEqual(node2.parent, node3)
self.assertEqual(node3.parent, node4)
self.assertEqual(node4.parent, node5)
self.assertEqual(node5.parent, None)
self.assertEqual(node1.children, [])
self.assertEqual(node2.children, [node1])
self.assertEqual(node3.children, [node2])
self.assertEqual(node4.children, [node3])
self.assertEqual(node5.children, [node4])
def test_build_nodes_inner_for_circular_menu(self):
'''
TODO:
To properly handle this test we need to have a circular dependency
detection system.
Go nuts implementing it :)
'''
pass
def test_build_nodes_inner_for_broken_menu(self):
'''
Tests a broken menu tree (non-existing parent)
node5
node4
node3
<non-existant>
node2
node1
'''
node1 = NavigationNode('Test1', '/test1/', 1, 2)
node2 = NavigationNode('Test2', '/test2/', 2, 12)
node3 = NavigationNode('Test3', '/test3/', 3, 4)
node4 = NavigationNode('Test4', '/test4/', 4, 5)
node5 = NavigationNode('Test5', '/test5/', 5, None)
menu_class_name = 'Test'
nodes = [node1, node2, node3, node4, node5, ]
final_list = _build_nodes_inner_for_one_menu(nodes, menu_class_name)
self.assertEqual(len(final_list), 3)
self.assertFalse(node1 in final_list)
self.assertFalse(node2 in final_list)
self.assertEqual(node1.parent, None)
self.assertEqual(node2.parent, None)
self.assertEqual(node3.parent, node4)
self.assertEqual(node4.parent, node5)
self.assertEqual(node5.parent, None)
self.assertEqual(node1.children, [])
self.assertEqual(node2.children, [])
self.assertEqual(node3.children, [])
self.assertEqual(node4.children, [node3])
self.assertEqual(node5.children, [node4])
def test_utils_mark_descendants(self):
tree_nodes, flat_nodes = self._get_nodes()
mark_descendants(tree_nodes)
for node in flat_nodes:
self.assertTrue(node.descendant, node)
def test_utils_find_selected(self):
tree_nodes, flat_nodes = self._get_nodes()
node = flat_nodes[0]
selected = find_selected(tree_nodes)
self.assertEqual(selected, node)
selected = find_selected([])
self.assertEqual(selected, None)
def test_utils_cut_levels(self):
tree_nodes, flat_nodes = self._get_nodes()
self.assertEqual(cut_levels(tree_nodes, 1), [flat_nodes[1]])
def test_empty_menu(self):
context = self.get_context()
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 0)
@override_settings(CMS_PERMISSION=False)
class AdvancedSoftrootTests(SoftrootFixture, CMSTestCase):
"""
Tree in fixture (as taken from issue 662):
top
root
aaa
111
ccc
ddd
222
bbb
333
444
In the fixture, all pages are "in_navigation", "published" and
NOT-"soft_root".
What is a soft root?
If a page is a soft root, it becomes the root page in the menu if
we are currently on or under that page.
If we are above that page, the children of this page are not shown.
"""
def tearDown(self):
Page.objects.all().delete()
def get_page(self, name):
return Page.objects.public().get(title_set__slug=name)
def assertTreeQuality(self, a, b, *attrs):
"""
Checks that the node-lists a and b are the same for attrs.
This is recursive over the tree
"""
msg = '%r != %r with %r, %r' % (len(a), len(b), a, b)
self.assertEqual(len(a), len(b), msg)
for n1, n2 in zip(a, b):
for attr in attrs:
a1 = getattr(n1, attr)
a2 = getattr(n2, attr)
msg = '%r != %r with %r, %r (%s)' % (a1, a2, n1, n2, attr)
self.assertEqual(a1, a2, msg)
self.assertTreeQuality(n1.children, n2.children)
def test_top_not_in_nav(self):
"""
top: not in navigation
tag: show_menu 0 100 0 100
context shared: current page is aaa
context 1: root is NOT a softroot
context 2: root IS a softroot
expected result: the two node-trees should be equal
"""
top = self.get_page('top')
top.in_navigation = False
top.save()
aaa = self.get_page('aaa')
# root is NOT a soft root
context = self.get_context(aaa.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 100 %}")
tpl.render(context)
hard_root = context['children']
# root IS a soft root
root = self.get_page('root')
root.soft_root = True
root.save()
aaa = self.get_page('aaa')
context = self.get_context(aaa.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 100 %}")
tpl.render(context)
soft_root = context['children']
# assert the two trees are equal in terms of 'level' and 'title'
self.assertTreeQuality(hard_root, soft_root, 'level', 'title')
def test_top_in_nav(self):
"""
top: in navigation
tag: show_menu 0 100 0 100
context shared: current page is aaa
context 1: root is NOT a softroot
context 2: root IS a softroot
expected result 1:
0:top
1:root
2:aaa
3:111
4:ccc
5:ddd
3:222
2:bbb
expected result 2:
0:root
1:aaa
2:111
3:ccc
4:ddd
2:222
1:bbb
"""
aaa = self.get_page('aaa')
# root is NOT a soft root
context = self.get_context(aaa.get_absolute_url(), page=aaa)
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 100 %}")
tpl.render(context)
hard_root = context['children']
mock_tree = [
AttributeObject(title='top', level=0, children=[
AttributeObject(title='root', level=1, children=[
AttributeObject(title='aaa', level=2, children=[
AttributeObject(title='111', level=3, children=[
AttributeObject(title='ccc', level=4, children=[
AttributeObject(title='ddd', level=5, children=[])
])
]),
AttributeObject(title='222', level=3, children=[])
]),
AttributeObject(title='bbb', level=2, children=[])
])
])
]
self.assertTreeQuality(hard_root, mock_tree)
# root IS a soft root
root = self.get_page('root')
root.soft_root = True
root.save()
aaa = self.get_page('aaa')
context = self.get_context(aaa.get_absolute_url(), page=aaa)
tpl = Template("{% load menu_tags %}{% show_menu 0 100 0 100 %}")
tpl.render(context)
soft_root = context['children']
mock_tree = [
AttributeObject(title='root', level=0, children=[
AttributeObject(title='aaa', level=1, children=[
AttributeObject(title='111', level=2, children=[
AttributeObject(title='ccc', level=3, children=[
AttributeObject(title='ddd', level=4, children=[])
])
]),
AttributeObject(title='222', level=2, children=[])
]),
AttributeObject(title='bbb', level=1, children=[])
])
]
self.assertTreeQuality(soft_root, mock_tree, 'title', 'level')
class ShowSubMenuCheck(SubMenusFixture, BaseMenuTest):
"""
Tree from fixture:
+ P1
| + P2
| + P3
+ P4
| + P5
+ P6
+ P7 (not in menu)
+ P8
"""
def test_show_submenu(self):
page = self.get_page(6)
subpage = self.get_page(8)
context = self.get_context(page.get_absolute_url(), page=page)
# test standard show_menu
tpl = Template("{% load menu_tags %}{% show_sub_menu %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].id, subpage.pk)
def test_show_submenu_num_queries(self):
page = self.get_page(6)
subpage = self.get_page(8)
context = self.get_context(page.get_absolute_url(), page=page)
# test standard show_menu
with self.assertNumQueries(6):
"""
The queries should be:
get all public pages
get all draft pages for public pages
get all page permissions
get all titles
get the menu cache key
set the menu cache key
"""
tpl = Template("{% load menu_tags %}{% show_sub_menu %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].id, subpage.pk)
class ShowMenuBelowIdTests(BaseMenuTest):
"""
Test for issue 521
Build the following tree:
A
|-B
|-C
\-D (not in nav)
"""
def test_not_in_navigation(self):
a = create_page('A', 'nav_playground.html', 'en', published=True,
in_navigation=True, reverse_id='a')
b = create_page('B', 'nav_playground.html', 'en', parent=a,
published=True, in_navigation=True)
c = create_page('C', 'nav_playground.html', 'en', parent=b,
published=True, in_navigation=True)
create_page('D', 'nav_playground.html', 'en', parent=self.reload(b),
published=True, in_navigation=False)
context = self.get_context(a.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu_below_id 'a' 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1, nodes)
node = nodes[0]
self.assertEqual(node.id, b.publisher_public.id)
children = node.children
self.assertEqual(len(children), 1, repr(children))
child = children[0]
self.assertEqual(child.id, c.publisher_public.id)
def test_menu_beyond_soft_root(self):
"""
Test for issue 4107
Build the following tree:
A
|-B (soft_root)
|-C
"""
stdkwargs = {
'template': 'nav_playground.html',
'language': 'en',
'published': True,
'in_navigation': True,
}
a = create_page('A', reverse_id='a', **stdkwargs)
a_public = a.publisher_public
b = create_page('B', parent=a, soft_root=True, **stdkwargs)
b_public = b.publisher_public
c = create_page('C', parent=b, **stdkwargs)
c_public = c.publisher_public
context = self.get_context(a.get_absolute_url(), page=a_public)
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check whole menu
self.assertEqual(len(nodes), 1)
a_node = nodes[0]
self.assertEqual(a_node.id, a.publisher_public.pk) # On A, show from A
self.assertEqual(len(a_node.children), 1)
b_node = a_node.children[0]
self.assertEqual(b_node.id, b.publisher_public.pk)
self.assertEqual(len(b_node.children), 1)
c_node = b_node.children[0]
self.assertEqual(c_node.id, c.publisher_public.pk)
self.assertEqual(len(c_node.children), 0)
context = self.get_context(b.get_absolute_url(), page=b_public)
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check whole menu
self.assertEqual(len(nodes), 1)
b_node = nodes[0]
self.assertEqual(b_node.id, b.publisher_public.pk) # On B, show from B
self.assertEqual(len(b_node.children), 1)
c_node = b_node.children[0]
self.assertEqual(c_node.id, c.publisher_public.pk)
self.assertEqual(len(c_node.children), 0)
context = self.get_context(c.get_absolute_url(), page=c_public)
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check whole menu
self.assertEqual(len(nodes), 1)
b_node = nodes[0]
self.assertEqual(b_node.id, b.publisher_public.pk) # On C, show from B
self.assertEqual(len(b_node.children), 1)
c_node = b_node.children[0]
self.assertEqual(c_node.id, c.publisher_public.pk)
self.assertEqual(len(c_node.children), 0)
context = self.get_context(a.get_absolute_url(), page=a_public)
tpl = Template("{% load menu_tags %}{% show_menu_below_id 'a' 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check whole menu
self.assertEqual(len(nodes), 1)
b_node = nodes[0]
self.assertEqual(b_node.id, b.publisher_public.pk) # On A, show from B (since below A)
self.assertEqual(len(b_node.children), 1)
c_node = b_node.children[0]
self.assertEqual(c_node.id, c.publisher_public.pk)
self.assertEqual(len(c_node.children), 0)
context = self.get_context(b.get_absolute_url(), page=b_public)
tpl = Template("{% load menu_tags %}{% show_menu_below_id 'a' 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check whole menu
self.assertEqual(len(nodes), 1)
b_node = nodes[0]
self.assertEqual(b_node.id, b.publisher_public.pk) # On B, show from B (since below A)
self.assertEqual(len(b_node.children), 1)
c_node = b_node.children[0]
self.assertEqual(c_node.id, c.publisher_public.pk)
self.assertEqual(len(c_node.children), 0)
context = self.get_context(c.get_absolute_url(), page=c_public)
tpl = Template("{% load menu_tags %}{% show_menu_below_id 'a' 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check whole menu
self.assertEqual(len(nodes), 1)
b_node = nodes[0]
self.assertEqual(b_node.id, b.publisher_public.pk) # On C, show from B (since below A)
self.assertEqual(len(b_node.children), 1)
c_node = b_node.children[0]
self.assertEqual(c_node.id, c.publisher_public.pk)
self.assertEqual(len(c_node.children), 0)
def test_not_in_navigation_num_queries(self):
"""
Test for issue 521
Build the following tree:
A
|-B
|-C
\-D (not in nav)
"""
a = create_page('A', 'nav_playground.html', 'en', published=True,
in_navigation=True, reverse_id='a')
b = create_page('B', 'nav_playground.html', 'en', parent=a,
published=True, in_navigation=True)
c = create_page('C', 'nav_playground.html', 'en', parent=b,
published=True, in_navigation=True)
create_page('D', 'nav_playground.html', 'en', parent=self.reload(b),
published=True, in_navigation=False)
with LanguageOverride('en'):
context = self.get_context(a.get_absolute_url())
with self.assertNumQueries(6):
"""
The queries should be:
get all public pages
get all draft pages for public pages
get all page permissions
get all titles
get the menu cache key
set the menu cache key
"""
# Actually seems to run:
tpl = Template("{% load menu_tags %}{% show_menu_below_id 'a' 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1, nodes)
node = nodes[0]
self.assertEqual(node.id, b.publisher_public.id)
children = node.children
self.assertEqual(len(children), 1, repr(children))
child = children[0]
self.assertEqual(child.id, c.publisher_public.id)
def test_menu_in_soft_root(self):
"""
Test for issue 3504
Build the following tree:
A
|-B
C (soft_root)
"""
a = create_page('A', 'nav_playground.html', 'en', published=True,
in_navigation=True, reverse_id='a')
b = create_page('B', 'nav_playground.html', 'en', parent=a,
published=True, in_navigation=True)
c = create_page('C', 'nav_playground.html', 'en', published=True,
in_navigation=True, soft_root=True)
context = self.get_context(a.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu_below_id 'a' %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
node = nodes[0]
self.assertEqual(node.id, b.publisher_public.id)
context = self.get_context(c.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu_below_id 'a' %}")
tpl.render(context)
nodes = context['children']
self.assertEqual(len(nodes), 1)
node = nodes[0]
self.assertEqual(node.id, b.publisher_public.id)
@override_settings(
CMS_PERMISSION=True,
CMS_PUBLIC_FOR='staff',
)
class ViewPermissionMenuTests(CMSTestCase):
def setUp(self):
self.page = create_page('page', 'nav_playground.html', 'en')
self.pages = [self.page]
self.user = self.get_standard_user()
def get_request(self, user=None):
attrs = {
'user': user or AnonymousUser(),
'REQUEST': {},
'POST': {},
'GET': {},
'session': {},
}
return type('Request', (object,), attrs)
def test_public_for_all_staff(self):
request = self.get_request(self.user)
request.user.is_staff = True
with self.assertNumQueries(4):
"""
The queries are:
User permissions
Content type
GlobalPagePermission query
PagePermission count query
"""
result = get_visible_pages(request, self.pages)
self.assertEqual(result, [self.page.pk])
@override_settings(CMS_PUBLIC_FOR='all')
def test_public_for_all(self):
request = self.get_request(self.user)
with self.assertNumQueries(4):
"""
The queries are:
User permissions
Content type
GlobalPagePermission query
PagePermission query for affected pages
"""
result = get_visible_pages(request, self.pages)
self.assertEqual(result, [self.page.pk])
@override_settings(CMS_PUBLIC_FOR='all')
def test_unauthed(self):
request = self.get_request()
with self.assertNumQueries(1):
"""
The query is:
PagePermission query for affected pages
"""
result = get_visible_pages(request, self.pages)
self.assertEqual(result, [self.page.pk])
def test_authed_basic_perm(self):
self.user.user_permissions.add(Permission.objects.get(codename='view_page'))
request = self.get_request(self.user)
with self.assertNumQueries(2):
"""
The queries are:
User permissions
Content type
"""
result = get_visible_pages(request, self.pages, self.page.site)
self.assertEqual(result, [self.page.pk])
def test_authed_no_access(self):
request = self.get_request(self.user)
with self.assertNumQueries(4):
"""
The queries are:
View Permission Calculation Query
GlobalpagePermission query for user
User permissions
Content type
"""
result = get_visible_pages(request, self.pages, self.page.site)
self.assertEqual(result, [])
def test_unauthed_no_access(self):
request = self.get_request()
with self.assertNumQueries(0):
result = get_visible_pages(request, self.pages)
self.assertEqual(result, [])
def test_page_permissions(self):
request = self.get_request(self.user)
PagePermission.objects.create(can_view=True, user=self.user, page=self.page)
with self.assertNumQueries(4):
"""
The queries are:
PagePermission query for affected pages
User permissions
Content type
GlobalpagePermission query for user
"""
result = get_visible_pages(request, self.pages)
self.assertEqual(result, [self.page.pk])
def test_page_permissions_view_groups(self):
group = Group.objects.create(name='testgroup')
self.user.groups.add(group)
request = self.get_request(self.user)
PagePermission.objects.create(can_view=True, group=group, page=self.page)
with self.assertNumQueries(5):
"""
The queries are:
PagePermission query for affected pages
User permissions
Content type
GlobalpagePermission query for user
Group query via PagePermission
"""
result = get_visible_pages(request, self.pages)
self.assertEqual(result, [self.page.pk])
def test_global_permission(self):
GlobalPagePermission.objects.create(can_view=True, user=self.user)
request = self.get_request(self.user)
group = Group.objects.create(name='testgroup')
PagePermission.objects.create(can_view=True, group=group, page=self.page)
with self.assertNumQueries(3):
"""
The queries are:
User permissions
Content type
GlobalpagePermission query for user
"""
result = get_visible_pages(request, self.pages)
self.assertEqual(result, [self.page.pk])
@override_settings(
CMS_PERMISSION=True,
CMS_PUBLIC_FOR='all',
)
class PublicViewPermissionMenuTests(CMSTestCase):
def setUp(self):
"""
Create this published hierarchy:
A
B1 B2
C1 C2 C3 C4
"""
l = 'nav_playground.html'
kw = dict(published=True, in_navigation=True)
a = create_page('a', l, 'en', **kw)
b1 = create_page('b1', l, 'en', parent=a, **kw)
b2 = create_page('b2', l, 'en', parent=a, **kw)
c1 = create_page('c1', l, 'en', parent=b1, **kw)
c2 = create_page('c2', l, 'en', parent=b1, **kw)
c3 = create_page('c3', l, 'en', parent=b2, **kw)
c4 = create_page('c4', l, 'en', parent=b2, **kw)
self.pages = [a, b1, c1, c2, b2, c3, c4] # tree order
self.site = a.site
self.user = self._create_user("standard", is_staff=False, is_superuser=False)
self.other = self._create_user("other", is_staff=False, is_superuser=False)
PagePermission.objects.create(page=b1, user=self.user, can_view=True,
grant_on=ACCESS_PAGE_AND_DESCENDANTS)
PagePermission.objects.create(page=b2, user=self.other, can_view=True,
grant_on=ACCESS_PAGE_AND_DESCENDANTS)
attrs = {
'user': self.user,
'REQUEST': {},
'POST': {},
'GET': {},
'session': {},
}
self.request = type('Request', (object,), attrs)
def test_draft_list_access(self):
result = get_visible_pages(self.request, self.pages, self.site)
pages = Page.objects.filter(id__in=result).values_list('title_set__title', flat=True)
pages = list(pages)
self.assertEqual(pages, ['a', 'b1', 'c1', 'c2'])
def test_draft_qs_access(self):
result = get_visible_pages(self.request, Page.objects.drafts(), self.site)
pages = Page.objects.filter(id__in=result).values_list('title_set__title', flat=True)
pages = list(pages)
self.assertEqual(pages, ['a', 'b1', 'c1', 'c2'])
def test_public_qs_access(self):
result = get_visible_pages(self.request, Page.objects.public(), self.site)
pages = Page.objects.filter(id__in=result).values_list('title_set__title', flat=True)
pages = list(pages)
self.assertEqual(pages, ['a', 'b1', 'c1', 'c2'])
@override_settings(CMS_PERMISSION=False)
class SoftrootTests(CMSTestCase):
"""
Ask evildmp/superdmp if you don't understand softroots!
Softroot description from the docs:
A soft root is a page that acts as the root for a menu navigation tree.
Typically, this will be a page that is the root of a significant new
section on your site.
When the soft root feature is enabled, the navigation menu for any page
will start at the nearest soft root, rather than at the real root of
the site’s page hierarchy.
This feature is useful when your site has deep page hierarchies (and
therefore multiple levels in its navigation trees). In such a case, you
usually don’t want to present site visitors with deep menus of nested
items.
For example, you’re on the page “Introduction to Bleeding”, so the menu
might look like this:
School of Medicine
Medical Education
Departments
Department of Lorem Ipsum
Department of Donec Imperdiet
Department of Cras Eros
Department of Mediaeval Surgery
Theory
Cures
Bleeding
Introduction to Bleeding <this is the current page>
Bleeding - the scientific evidence
Cleaning up the mess
Cupping
Leaches
Maggots
Techniques
Instruments
Department of Curabitur a Purus
Department of Sed Accumsan
Department of Etiam
Research
Administration
Contact us
Impressum
which is frankly overwhelming.
By making “Department of Mediaeval Surgery” a soft root, the menu
becomes much more manageable:
Department of Mediaeval Surgery
Theory
Cures
Bleeding
Introduction to Bleeding <current page>
Bleeding - the scientific evidence
Cleaning up the mess
Cupping
Leaches
Maggots
Techniques
Instruments
"""
def test_basic_home(self):
"""
Given the tree:
|- Home
| |- Projects (SOFTROOT)
| | |- django CMS
| | |- django Shop
| |- People
Expected menu when on "Home" (0 100 100 100):
|- Home
| |- Projects (SOFTROOT)
| | |- django CMS
| | |- django Shop
| |- People
"""
stdkwargs = {
'template': 'nav_playground.html',
'language': 'en',
'published': True,
'in_navigation': True,
}
home = create_page("Home", **stdkwargs)
projects = create_page("Projects", parent=home, soft_root=True, **stdkwargs)
djangocms = create_page("django CMS", parent=projects, **stdkwargs)
djangoshop = create_page("django Shop", parent=projects, **stdkwargs)
people = create_page("People", parent=home, **stdkwargs)
# On Home
context = self.get_context(home.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check everything
self.assertEqual(len(nodes), 1)
homenode = nodes[0]
self.assertEqual(homenode.id, home.publisher_public.pk)
self.assertEqual(len(homenode.children), 2)
projectsnode, peoplenode = homenode.children
self.assertEqual(projectsnode.id, projects.publisher_public.pk)
self.assertEqual(peoplenode.id, people.publisher_public.pk)
self.assertEqual(len(projectsnode.children), 2)
cmsnode, shopnode = projectsnode.children
self.assertEqual(cmsnode.id, djangocms.publisher_public.pk)
self.assertEqual(shopnode.id, djangoshop.publisher_public.pk)
self.assertEqual(len(cmsnode.children), 0)
self.assertEqual(len(shopnode.children), 0)
self.assertEqual(len(peoplenode.children), 0)
def test_basic_projects(self):
"""
Given the tree:
|- Home
| |- Projects (SOFTROOT)
| | |- django CMS
| | |- django Shop
| |- People
Expected menu when on "Projects" (0 100 100 100):
|- Projects (SOFTROOT)
| |- django CMS
| |- django Shop
"""
stdkwargs = {
'template': 'nav_playground.html',
'language': 'en',
'published': True,
'in_navigation': True,
}
home = create_page("Home", **stdkwargs)
projects = create_page("Projects", parent=home, soft_root=True, **stdkwargs)
djangocms = create_page("django CMS", parent=projects, **stdkwargs)
djangoshop = create_page("django Shop", parent=projects, **stdkwargs)
create_page("People", parent=home, **stdkwargs)
# On Projects
context = self.get_context(projects.get_absolute_url(), page=projects.publisher_public)
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check everything
self.assertEqual(len(nodes), 1)
projectsnode = nodes[0]
self.assertEqual(projectsnode.id, projects.publisher_public.pk)
self.assertEqual(len(projectsnode.children), 2)
cmsnode, shopnode = projectsnode.children
self.assertEqual(cmsnode.id, djangocms.publisher_public.pk)
self.assertEqual(shopnode.id, djangoshop.publisher_public.pk)
self.assertEqual(len(cmsnode.children), 0)
self.assertEqual(len(shopnode.children), 0)
def test_basic_djangocms(self):
"""
Given the tree:
|- Home
| |- Projects (SOFTROOT)
| | |- django CMS
| | |- django Shop
| |- People
Expected menu when on "django CMS" (0 100 100 100):
|- Projects (SOFTROOT)
| |- django CMS
| |- django Shop
"""
stdkwargs = {
'template': 'nav_playground.html',
'language': 'en',
'published': True,
'in_navigation': True,
}
home = create_page("Home", **stdkwargs)
projects = create_page("Projects", parent=home, soft_root=True, **stdkwargs)
djangocms = create_page("django CMS", parent=projects, **stdkwargs)
djangoshop = create_page("django Shop", parent=projects, **stdkwargs)
create_page("People", parent=home, **stdkwargs)
# On django CMS
context = self.get_context(djangocms.get_absolute_url(), page=djangocms.publisher_public)
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check everything
self.assertEqual(len(nodes), 1)
projectsnode = nodes[0]
self.assertEqual(projectsnode.id, projects.publisher_public.pk)
self.assertEqual(len(projectsnode.children), 2)
cmsnode, shopnode = projectsnode.children
self.assertEqual(cmsnode.id, djangocms.publisher_public.pk)
self.assertEqual(shopnode.id, djangoshop.publisher_public.pk)
self.assertEqual(len(cmsnode.children), 0)
self.assertEqual(len(shopnode.children), 0)
def test_basic_people(self):
"""
Given the tree:
|- Home
| |- Projects (SOFTROOT)
| | |- django CMS
| | |- django Shop
| |- People
Expected menu when on "People" (0 100 100 100):
|- Home
| |- Projects (SOFTROOT)
| | |- django CMS
| | |- django Shop
| |- People
"""
stdkwargs = {
'template': 'nav_playground.html',
'language': 'en',
'published': True,
'in_navigation': True,
}
home = create_page("Home", **stdkwargs)
projects = create_page("Projects", parent=home, soft_root=True, **stdkwargs)
djangocms = create_page("django CMS", parent=projects, **stdkwargs)
djangoshop = create_page("django Shop", parent=projects, **stdkwargs)
people = create_page("People", parent=home, **stdkwargs)
# On People
context = self.get_context(home.get_absolute_url())
tpl = Template("{% load menu_tags %}{% show_menu 0 100 100 100 %}")
tpl.render(context)
nodes = context['children']
# check everything
self.assertEqual(len(nodes), 1)
homenode = nodes[0]
self.assertEqual(homenode.id, home.publisher_public.pk)
self.assertEqual(len(homenode.children), 2)
projectsnode, peoplenode = homenode.children
self.assertEqual(projectsnode.id, projects.publisher_public.pk)
self.assertEqual(peoplenode.id, people.publisher_public.pk)
self.assertEqual(len(projectsnode.children), 2)
cmsnode, shopnode = projectsnode.children
self.assertEqual(cmsnode.id, djangocms.publisher_public.pk)
self.assertEqual(shopnode.id, djangoshop.publisher_public.pk)
self.assertEqual(len(cmsnode.children), 0)
self.assertEqual(len(shopnode.children), 0)
self.assertEqual(len(peoplenode.children), 0)
| 38.940186
| 136
| 0.596868
|
4a0091d10166c41e2747dfc3ba30fb1bc1cbb921
| 924
|
py
|
Python
|
exporters/station_collision_plot.py
|
jmgraeffe/ieee802-11-simplified-mac-simulator
|
dbc2a57785715b35bb30603532486be1aee53578
|
[
"MIT"
] | null | null | null |
exporters/station_collision_plot.py
|
jmgraeffe/ieee802-11-simplified-mac-simulator
|
dbc2a57785715b35bb30603532486be1aee53578
|
[
"MIT"
] | null | null | null |
exporters/station_collision_plot.py
|
jmgraeffe/ieee802-11-simplified-mac-simulator
|
dbc2a57785715b35bb30603532486be1aee53578
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
from simulation import Scheme
def export(simulations, file_path, marker_styles=None):
xticks = []
for scheme, simulations2 in simulations.items():
xs = []
ys = []
for num_stations, simulation in simulations2.items():
if num_stations not in xticks and num_stations % 10 == 0:
xticks.append(num_stations)
xs.append(num_stations)
ys.append(simulation.collisions_ap)
if marker_styles is None:
plt.plot(xs, ys, 'o-', label=Scheme.to_human_name(scheme))
else:
plt.plot(xs, ys, label=Scheme.to_human_name(scheme), **marker_styles[scheme])
plt.grid()
plt.xlabel('Number of Stations')
plt.ylabel('Number of Collisions on AP')
plt.xticks(xticks)
plt.legend(fancybox=True, framealpha=1.0)
plt.savefig(file_path, bbox_inches='tight')
plt.clf()
| 28.875
| 89
| 0.637446
|
4a0092bf25205aad5674e633032a0151d5547d86
| 1,144
|
py
|
Python
|
airflow/contrib/hooks/gcp_dataproc_hook.py
|
suensummit/airflow
|
37a342d0e96a91ce2d34085e225a4e86f54c4e21
|
[
"Apache-2.0"
] | 1
|
2017-06-25T14:18:15.000Z
|
2017-06-25T14:18:15.000Z
|
airflow/contrib/hooks/gcp_dataproc_hook.py
|
suensummit/airflow
|
37a342d0e96a91ce2d34085e225a4e86f54c4e21
|
[
"Apache-2.0"
] | 3
|
2020-07-07T20:39:24.000Z
|
2021-09-29T17:34:46.000Z
|
airflow/contrib/hooks/gcp_dataproc_hook.py
|
suensummit/airflow
|
37a342d0e96a91ce2d34085e225a4e86f54c4e21
|
[
"Apache-2.0"
] | 1
|
2020-11-04T03:17:51.000Z
|
2020-11-04T03:17:51.000Z
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.gcp.hooks.dataproc`."""
import warnings
# pylint: disable=unused-import
from airflow.gcp.hooks.dataproc import DataProcHook, DataprocJobStatus # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.gcp.hooks.dataproc`.",
DeprecationWarning, stacklevel=2
)
| 38.133333
| 78
| 0.761364
|
4a00935846e95f7714991357829f0a5fd81b518f
| 1,566
|
py
|
Python
|
samples/interactive-tutorials/noxfile_config.py
|
tetiana-karasova/python-retail
|
b834c1fb16212e59241267e18d38b490e962af7f
|
[
"Apache-2.0"
] | 1
|
2022-02-11T14:00:31.000Z
|
2022-02-11T14:00:31.000Z
|
samples/interactive-tutorials/noxfile_config.py
|
tetiana-karasova/python-retail
|
b834c1fb16212e59241267e18d38b490e962af7f
|
[
"Apache-2.0"
] | null | null | null |
samples/interactive-tutorials/noxfile_config.py
|
tetiana-karasova/python-retail
|
b834c1fb16212e59241267e18d38b490e962af7f
|
[
"Apache-2.0"
] | 2
|
2022-01-28T09:53:16.000Z
|
2022-02-07T14:27:38.000Z
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Default TEST_CONFIG_OVERRIDE for python repos.
# The source of truth:
# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py
TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
"ignored_versions": ["2.7", "3.6"],
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
"gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
"envs": {
"DATA_LABELING_API_ENDPOINT": "us-central1-autopush-aiplatform.sandbox.googleapis.com",
"PYTEST_ADDOPTS": "-n=auto", # Run tests parallel using all available CPUs
},
}
| 43.5
| 95
| 0.736909
|
4a0093664ac965433cb57ba116f6e25f3759be51
| 359
|
py
|
Python
|
silence/logging/default_logger.py
|
miguel-bermudo/Silence
|
6879e1d87c426f6cc994058c70df30b043060db1
|
[
"MIT"
] | 1
|
2021-09-23T15:05:10.000Z
|
2021-09-23T15:05:10.000Z
|
silence/logging/default_logger.py
|
miguel-bermudo/Silence
|
6879e1d87c426f6cc994058c70df30b043060db1
|
[
"MIT"
] | null | null | null |
silence/logging/default_logger.py
|
miguel-bermudo/Silence
|
6879e1d87c426f6cc994058c70df30b043060db1
|
[
"MIT"
] | 1
|
2021-09-21T08:23:14.000Z
|
2021-09-21T08:23:14.000Z
|
import logging
from silence.settings import settings
from silence.logging.default_formatter import DefaultFormatter
logger = logging.getLogger("silence")
log_lvl = logging.DEBUG if settings.DEBUG_ENABLED else logging.INFO
logger.setLevel(log_lvl)
ch = logging.StreamHandler()
ch.setLevel(log_lvl)
ch.setFormatter(DefaultFormatter())
logger.addHandler(ch)
| 23.933333
| 67
| 0.827298
|
4a009397ded2a61514804c63c188b53db3ba0405
| 2,672
|
py
|
Python
|
.history/run_update_20220328075842.py
|
miguel-fresh/geoip-translation
|
ccf9dbc0330e597704e57d8b2967fc9be16017ed
|
[
"Info-ZIP"
] | null | null | null |
.history/run_update_20220328075842.py
|
miguel-fresh/geoip-translation
|
ccf9dbc0330e597704e57d8b2967fc9be16017ed
|
[
"Info-ZIP"
] | null | null | null |
.history/run_update_20220328075842.py
|
miguel-fresh/geoip-translation
|
ccf9dbc0330e597704e57d8b2967fc9be16017ed
|
[
"Info-ZIP"
] | null | null | null |
from asyncio.subprocess import STDOUT
from fileinput import filename
from genericpath import exists
import subprocess
from pathlib import Path
from os import remove, rename, path
import yaml
def removeFileIfExists(file_path):
if path.exists(file_path):
remove(file_path)
# LEGACY
ZIP_LEGACY_NAME = 'GeoLite2-City-CSV.zip'
# Default values
ONSTART_DOWNLOAD = False
ONSTART_CONVERT = False
CURRENT_DIR = Path(__file__).parent.resolve()
CONFIG_FILENAME = 'config.yml'
CONFIG_ABSPATH = CURRENT_DIR.joinpath(CONFIG_FILENAME)
ZIP_NAME = 'GeoLite2-City-CSV.zip'
DAT_NAME = 'GeoLiteCity.dat'
DOWNLOAD_DIRNAME = './data'
OUTPUT_DIRNAME = './output'
LICENSE_KEY = ''
DB_EDITION = ''
# Get config from config.yml file
try:
with open(CONFIG_ABSPATH) as cfg_file:
documents = yaml.full_load(cfg_file)
paths = documents['paths']
names = documents['names']
on_start = documents['on_start']
max_mind = documents['max_mind']
OUTPUT_DIRNAME = paths['output']
DOWNLOAD_DIRNAME = paths['data']
ZIP_NAME = names['zip']
DAT_NAME = names['dat']
ONSTART_DOWNLOAD = on_start['download_zip']
ONSTART_CONVERT = on_start['convert_to_dat']
LICENSE_KEY = max_mind['license-key']
DB_EDITION = max_mind['edition']
except:
print('No config.yml file found, using default values...')
# Setting paths
DOWNLOAD_ABSPATH = CURRENT_DIR.joinpath(DOWNLOAD_DIRNAME)
OUTPUT_ABSPATH = CURRENT_DIR.joinpath(OUTPUT_DIRNAME)
ZIP_ABSPATH = DOWNLOAD_ABSPATH.joinpath(ZIP_LEGACY_NAME)
DAT_ABSPATH = OUTPUT_ABSPATH.joinpath(DAT_NAME)
if ONSTART_DOWNLOAD:
removeFileIfExists(ZIP_ABSPATH)
print(f'Downloading {ZIP_LEGACY_NAME}...')
# Download .zip
download_output = subprocess.run(['php', 'download.php',
'--license-key', LICENSE_KEY,
'--output-path', DOWNLOAD_ABSPATH,
'--edition', DB_EDITION],
cwd=CURRENT_DIR.joinpath('./geoip2-update'), stderr=STDOUT)
# Rename zip if necessary
if (ZIP_LEGACY_NAME != ZIP_NAME):
rename(ZIP_ABSPATH, DOWNLOAD_ABSPATH.joinpath(ZIP_NAME))
# Convert format
if ONSTART_CONVERT:
# python geolite2legacy.py -i GeoLite2-City-CSV.zip -o GeoLiteCity.dat -f geoname2fips.csv
downloaded_zip_asbpath = CURRENT_DIR.joinpath(ZIP_LEGACY_NAME)
print(downloaded_zip_asbpath)
update_output = subprocess.run(['python', 'geolite2legacy.py',
'-i', ZIP_ABSPATH,
'-o', DAT_ABSPATH,
'-f', 'geoname2fips.csv'],
cwd='./geolite2legacy')
| 28.425532
| 94
| 0.674401
|
4a0093a0665c09400b4a820ff3008f199972611d
| 51
|
py
|
Python
|
flagging_site/blueprints/__init__.py
|
cameronreaves/flagging
|
412fae782ac38f971a1715aeb257a8ab10a9ad3a
|
[
"MIT"
] | null | null | null |
flagging_site/blueprints/__init__.py
|
cameronreaves/flagging
|
412fae782ac38f971a1715aeb257a8ab10a9ad3a
|
[
"MIT"
] | null | null | null |
flagging_site/blueprints/__init__.py
|
cameronreaves/flagging
|
412fae782ac38f971a1715aeb257a8ab10a9ad3a
|
[
"MIT"
] | null | null | null |
from . import cyanobacteria
from . import flagging
| 17
| 27
| 0.803922
|
4a0095f42e6b35702c48d7725098cb5f95c7a98c
| 64,365
|
py
|
Python
|
src/attr/_make.py
|
gappleto97/attrs
|
1e8e4bc3ced1080b6591aa3fb2a39a464409d499
|
[
"MIT"
] | null | null | null |
src/attr/_make.py
|
gappleto97/attrs
|
1e8e4bc3ced1080b6591aa3fb2a39a464409d499
|
[
"MIT"
] | null | null | null |
src/attr/_make.py
|
gappleto97/attrs
|
1e8e4bc3ced1080b6591aa3fb2a39a464409d499
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, division, print_function
import copy
import hashlib
import linecache
import sys
import threading
import warnings
from operator import itemgetter
from . import _config
from ._compat import (
PY2,
isclass,
iteritems,
metadata_proxy,
ordered_dict,
set_closure_cell,
)
from .exceptions import (
DefaultAlreadySetError,
FrozenInstanceError,
NotAnAttrsClassError,
PythonTooOldError,
UnannotatedAttributeError,
)
# This is used at least twice, so cache it here.
_obj_setattr = object.__setattr__
_init_converter_pat = "__attr_converter_{}"
_init_factory_pat = "__attr_factory_{}"
_tuple_property_pat = (
" {attr_name} = _attrs_property(_attrs_itemgetter({index}))"
)
_classvar_prefixes = ("typing.ClassVar", "t.ClassVar", "ClassVar")
# we don't use a double-underscore prefix because that triggers
# name mangling when trying to create a slot for the field
# (when slots=True)
_hash_cache_field = "_attrs_cached_hash"
_empty_metadata_singleton = metadata_proxy({})
class _Nothing(object):
"""
Sentinel class to indicate the lack of a value when ``None`` is ambiguous.
All instances of `_Nothing` are equal.
"""
def __copy__(self):
return self
def __deepcopy__(self, _):
return self
def __eq__(self, other):
return other.__class__ == _Nothing
def __ne__(self, other):
return not self == other
def __repr__(self):
return "NOTHING"
def __hash__(self):
return 0xc0ffee
NOTHING = _Nothing()
"""
Sentinel to indicate the lack of a value when ``None`` is ambiguous.
"""
def attrib(
default=NOTHING,
validator=None,
repr=True,
cmp=True,
hash=None,
init=True,
convert=None,
metadata=None,
type=None,
converter=None,
factory=None,
kw_only=False,
):
"""
Create a new attribute on a class.
.. warning::
Does *not* do anything unless the class is also decorated with
:func:`attr.s`!
:param default: A value that is used if an ``attrs``-generated ``__init__``
is used and no value is passed while instantiating or the attribute is
excluded using ``init=False``.
If the value is an instance of :class:`Factory`, its callable will be
used to construct a new value (useful for mutable data types like lists
or dicts).
If a default is not set (or set manually to ``attr.NOTHING``), a value
*must* be supplied when instantiating; otherwise a :exc:`TypeError`
will be raised.
The default can also be set using decorator notation as shown below.
:type default: Any value.
:param callable factory: Syntactic sugar for
``default=attr.Factory(callable)``.
:param validator: :func:`callable` that is called by ``attrs``-generated
``__init__`` methods after the instance has been initialized. They
receive the initialized instance, the :class:`Attribute`, and the
passed value.
The return value is *not* inspected so the validator has to throw an
exception itself.
If a ``list`` is passed, its items are treated as validators and must
all pass.
Validators can be globally disabled and re-enabled using
:func:`get_run_validators`.
The validator can also be set using decorator notation as shown below.
:type validator: ``callable`` or a ``list`` of ``callable``\\ s.
:param bool repr: Include this attribute in the generated ``__repr__``
method.
:param bool cmp: Include this attribute in the generated comparison methods
(``__eq__`` et al).
:param hash: Include this attribute in the generated ``__hash__``
method. If ``None`` (default), mirror *cmp*'s value. This is the
correct behavior according the Python spec. Setting this value to
anything else than ``None`` is *discouraged*.
:type hash: ``bool`` or ``None``
:param bool init: Include this attribute in the generated ``__init__``
method. It is possible to set this to ``False`` and set a default
value. In that case this attributed is unconditionally initialized
with the specified default value or factory.
:param callable converter: :func:`callable` that is called by
``attrs``-generated ``__init__`` methods to converter attribute's value
to the desired format. It is given the passed-in value, and the
returned value will be used as the new value of the attribute. The
value is converted before being passed to the validator, if any.
:param metadata: An arbitrary mapping, to be used by third-party
components. See :ref:`extending_metadata`.
:param type: The type of the attribute. In Python 3.6 or greater, the
preferred method to specify the type is using a variable annotation
(see `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_).
This argument is provided for backward compatibility.
Regardless of the approach used, the type will be stored on
``Attribute.type``.
:param kw_only: Make this attribute keyword-only (Python 3+)
in the generated ``__init__`` (if ``init`` is ``False``, this
parameter is ignored).
.. versionadded:: 15.2.0 *convert*
.. versionadded:: 16.3.0 *metadata*
.. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
.. versionchanged:: 17.1.0
*hash* is ``None`` and therefore mirrors *cmp* by default.
.. versionadded:: 17.3.0 *type*
.. deprecated:: 17.4.0 *convert*
.. versionadded:: 17.4.0 *converter* as a replacement for the deprecated
*convert* to achieve consistency with other noun-based arguments.
.. versionadded:: 18.1.0
``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
.. versionadded:: 18.2.0 *kw_only*
"""
if hash is not None and hash is not True and hash is not False:
raise TypeError(
"Invalid value for hash. Must be True, False, or None."
)
if convert is not None:
if converter is not None:
raise RuntimeError(
"Can't pass both `convert` and `converter`. "
"Please use `converter` only."
)
warnings.warn(
"The `convert` argument is deprecated in favor of `converter`. "
"It will be removed after 2019/01.",
DeprecationWarning,
stacklevel=2,
)
converter = convert
if factory is not None:
if default is not NOTHING:
raise ValueError(
"The `default` and `factory` arguments are mutually "
"exclusive."
)
if not callable(factory):
raise ValueError("The `factory` argument must be a callable.")
default = Factory(factory)
if metadata is None:
metadata = {}
return _CountingAttr(
default=default,
validator=validator,
repr=repr,
cmp=cmp,
hash=hash,
init=init,
converter=converter,
metadata=metadata,
type=type,
kw_only=kw_only,
)
def _make_attr_tuple_class(cls_name, attr_names):
"""
Create a tuple subclass to hold `Attribute`s for an `attrs` class.
The subclass is a bare tuple with properties for names.
class MyClassAttributes(tuple):
__slots__ = ()
x = property(itemgetter(0))
"""
attr_class_name = "{}Attributes".format(cls_name)
attr_class_template = [
"class {}(tuple):".format(attr_class_name),
" __slots__ = ()",
]
if attr_names:
for i, attr_name in enumerate(attr_names):
attr_class_template.append(
_tuple_property_pat.format(index=i, attr_name=attr_name)
)
else:
attr_class_template.append(" pass")
globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
eval(compile("\n".join(attr_class_template), "", "exec"), globs)
return globs[attr_class_name]
# Tuple class for extracted attributes from a class definition.
# `super_attrs` is a subset of `attrs`.
_Attributes = _make_attr_tuple_class(
"_Attributes",
[
# all attributes to build dunder methods for
"attrs",
# attributes that have been inherited
"super_attrs",
# map inherited attributes to their originating classes
"super_attrs_map",
],
)
def _is_class_var(annot):
"""
Check whether *annot* is a typing.ClassVar.
The string comparison hack is used to avoid evaluating all string
annotations which would put attrs-based classes at a performance
disadvantage compared to plain old classes.
"""
return str(annot).startswith(_classvar_prefixes)
def _get_annotations(cls):
"""
Get annotations for *cls*.
"""
anns = getattr(cls, "__annotations__", None)
if anns is None:
return {}
# Verify that the annotations aren't merely inherited.
for super_cls in cls.__mro__[1:]:
if anns is getattr(super_cls, "__annotations__", None):
return {}
return anns
def _counter_getter(e):
"""
Key function for sorting to avoid re-creating a lambda for every class.
"""
return e[1].counter
def _transform_attrs(cls, these, auto_attribs, kw_only):
"""
Transform all `_CountingAttr`s on a class into `Attribute`s.
If *these* is passed, use that and don't look for them on the class.
Return an `_Attributes`.
"""
cd = cls.__dict__
anns = _get_annotations(cls)
if these is not None:
ca_list = [(name, ca) for name, ca in iteritems(these)]
if not isinstance(these, ordered_dict):
ca_list.sort(key=_counter_getter)
elif auto_attribs is True:
ca_names = {
name
for name, attr in cd.items()
if isinstance(attr, _CountingAttr)
}
ca_list = []
annot_names = set()
for attr_name, type in anns.items():
if _is_class_var(type):
continue
annot_names.add(attr_name)
a = cd.get(attr_name, NOTHING)
if not isinstance(a, _CountingAttr):
if a is NOTHING:
a = attrib()
else:
a = attrib(default=a)
ca_list.append((attr_name, a))
unannotated = ca_names - annot_names
if len(unannotated) > 0:
raise UnannotatedAttributeError(
"The following `attr.ib`s lack a type annotation: "
+ ", ".join(
sorted(unannotated, key=lambda n: cd.get(n).counter)
)
+ "."
)
else:
ca_list = sorted(
(
(name, attr)
for name, attr in cd.items()
if isinstance(attr, _CountingAttr)
),
key=lambda e: e[1].counter,
)
own_attrs = [
Attribute.from_counting_attr(
name=attr_name, ca=ca, type=anns.get(attr_name)
)
for attr_name, ca in ca_list
]
super_attrs = []
super_attr_map = {} # A dictionary of superattrs to their classes.
taken_attr_names = {a.name: a for a in own_attrs}
# Traverse the MRO and collect attributes.
for super_cls in cls.__mro__[1:-1]:
sub_attrs = getattr(super_cls, "__attrs_attrs__", None)
if sub_attrs is not None:
for a in sub_attrs:
prev_a = taken_attr_names.get(a.name)
# Only add an attribute if it hasn't been defined before. This
# allows for overwriting attribute definitions by subclassing.
if prev_a is None:
super_attrs.append(a)
taken_attr_names[a.name] = a
super_attr_map[a.name] = super_cls
attr_names = [a.name for a in super_attrs + own_attrs]
AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
if kw_only:
own_attrs = [a._assoc(kw_only=True) for a in own_attrs]
super_attrs = [a._assoc(kw_only=True) for a in super_attrs]
attrs = AttrsClass(super_attrs + own_attrs)
had_default = False
was_kw_only = False
for a in attrs:
if (
was_kw_only is False
and had_default is True
and a.default is NOTHING
and a.init is True
and a.kw_only is False
):
raise ValueError(
"No mandatory attributes allowed after an attribute with a "
"default value or factory. Attribute in question: %r" % (a,)
)
elif (
had_default is False
and a.default is not NOTHING
and a.init is not False
and
# Keyword-only attributes without defaults can be specified
# after keyword-only attributes with defaults.
a.kw_only is False
):
had_default = True
if was_kw_only is True and a.kw_only is False:
raise ValueError(
"Non keyword-only attributes are not allowed after a "
"keyword-only attribute. Attribute in question: {a!r}".format(
a=a
)
)
if was_kw_only is False and a.init is True and a.kw_only is True:
was_kw_only = True
return _Attributes((attrs, super_attrs, super_attr_map))
def _frozen_setattrs(self, name, value):
"""
Attached to frozen classes as __setattr__.
"""
raise FrozenInstanceError()
def _frozen_delattrs(self, name):
"""
Attached to frozen classes as __delattr__.
"""
raise FrozenInstanceError()
class _ClassBuilder(object):
"""
Iteratively build *one* class.
"""
__slots__ = (
"_cls",
"_cls_dict",
"_attrs",
"_super_names",
"_attr_names",
"_slots",
"_frozen",
"_cache_hash",
"_has_post_init",
"_delete_attribs",
"_super_attr_map",
)
def __init__(
self, cls, these, slots, frozen, auto_attribs, kw_only, cache_hash
):
attrs, super_attrs, super_map = _transform_attrs(
cls, these, auto_attribs, kw_only
)
self._cls = cls
self._cls_dict = dict(cls.__dict__) if slots else {}
self._attrs = attrs
self._super_names = set(a.name for a in super_attrs)
self._super_attr_map = super_map
self._attr_names = tuple(a.name for a in attrs)
self._slots = slots
self._frozen = frozen or _has_frozen_superclass(cls)
self._cache_hash = cache_hash
self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
self._delete_attribs = not bool(these)
self._cls_dict["__attrs_attrs__"] = self._attrs
if frozen:
self._cls_dict["__setattr__"] = _frozen_setattrs
self._cls_dict["__delattr__"] = _frozen_delattrs
def __repr__(self):
return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__)
def build_class(self):
"""
Finalize class based on the accumulated configuration.
Builder cannot be used anymore after calling this method.
"""
if self._slots is True:
return self._create_slots_class()
else:
return self._patch_original_class()
def _patch_original_class(self):
"""
Apply accumulated methods and return the class.
"""
cls = self._cls
super_names = self._super_names
# Clean class of attribute definitions (`attr.ib()`s).
if self._delete_attribs:
for name in self._attr_names:
if (
name not in super_names
and getattr(cls, name, None) is not None
):
delattr(cls, name)
# Attach our dunder methods.
for name, value in self._cls_dict.items():
setattr(cls, name, value)
return cls
def _create_slots_class(self):
"""
Build and return a new class with a `__slots__` attribute.
"""
super_names = self._super_names
cd = {
k: v
for k, v in iteritems(self._cls_dict)
if k not in tuple(self._attr_names) + ("__dict__", "__weakref__")
}
# We only add the names of attributes that aren't inherited.
# Settings __slots__ to inherited attributes wastes memory.
slot_names = [
name for name in self._attr_names if name not in super_names
]
if self._cache_hash:
slot_names.append(_hash_cache_field)
cd["__slots__"] = tuple(slot_names)
qualname = getattr(self._cls, "__qualname__", None)
if qualname is not None:
cd["__qualname__"] = qualname
# __weakref__ is not writable.
state_attr_names = tuple(
an for an in self._attr_names if an != "__weakref__"
)
def slots_getstate(self):
"""
Automatically created by attrs.
"""
return tuple(getattr(self, name) for name in state_attr_names)
def slots_setstate(self, state):
"""
Automatically created by attrs.
"""
__bound_setattr = _obj_setattr.__get__(self, Attribute)
for name, value in zip(state_attr_names, state):
__bound_setattr(name, value)
# slots and frozen require __getstate__/__setstate__ to work
cd["__getstate__"] = slots_getstate
cd["__setstate__"] = slots_setstate
# Create new class based on old class and our methods.
cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
# The following is a fix for
# https://github.com/python-attrs/attrs/issues/102. On Python 3,
# if a method mentions `__class__` or uses the no-arg super(), the
# compiler will bake a reference to the class in the method itself
# as `method.__closure__`. Since we replace the class with a
# clone, we rewrite these references so it keeps working.
for item in cls.__dict__.values():
if isinstance(item, (classmethod, staticmethod)):
# Class- and staticmethods hide their functions inside.
# These might need to be rewritten as well.
closure_cells = getattr(item.__func__, "__closure__", None)
else:
closure_cells = getattr(item, "__closure__", None)
if not closure_cells: # Catch None or the empty list.
continue
for cell in closure_cells:
if cell.cell_contents is self._cls:
set_closure_cell(cell, cls)
return cls
def add_repr(self, ns):
self._cls_dict["__repr__"] = self._add_method_dunders(
_make_repr(self._attrs, ns=ns)
)
return self
def add_str(self):
repr = self._cls_dict.get("__repr__")
if repr is None:
raise ValueError(
"__str__ can only be generated if a __repr__ exists."
)
def __str__(self):
return self.__repr__()
self._cls_dict["__str__"] = self._add_method_dunders(__str__)
return self
def make_unhashable(self):
self._cls_dict["__hash__"] = None
return self
def add_hash(self):
self._cls_dict["__hash__"] = self._add_method_dunders(
_make_hash(
self._attrs, frozen=self._frozen, cache_hash=self._cache_hash
)
)
return self
def add_init(self):
self._cls_dict["__init__"] = self._add_method_dunders(
_make_init(
self._attrs,
self._has_post_init,
self._frozen,
self._slots,
self._cache_hash,
self._super_attr_map,
)
)
return self
def add_cmp(self):
cd = self._cls_dict
cd["__eq__"], cd["__ne__"], cd["__lt__"], cd["__le__"], cd[
"__gt__"
], cd["__ge__"] = (
self._add_method_dunders(meth) for meth in _make_cmp(self._attrs)
)
return self
def _add_method_dunders(self, method):
"""
Add __module__ and __qualname__ to a *method* if possible.
"""
try:
method.__module__ = self._cls.__module__
except AttributeError:
pass
try:
method.__qualname__ = ".".join(
(self._cls.__qualname__, method.__name__)
)
except AttributeError:
pass
return method
def attrs(
maybe_cls=None,
these=None,
repr_ns=None,
repr=True,
cmp=True,
hash=None,
init=True,
slots=False,
frozen=False,
str=False,
auto_attribs=False,
kw_only=False,
cache_hash=False,
):
r"""
A class decorator that adds `dunder
<https://wiki.python.org/moin/DunderAlias>`_\ -methods according to the
specified attributes using :func:`attr.ib` or the *these* argument.
:param these: A dictionary of name to :func:`attr.ib` mappings. This is
useful to avoid the definition of your attributes within the class body
because you can't (e.g. if you want to add ``__repr__`` methods to
Django models) or don't want to.
If *these* is not ``None``, ``attrs`` will *not* search the class body
for attributes and will *not* remove any attributes from it.
If *these* is an ordered dict (:class:`dict` on Python 3.6+,
:class:`collections.OrderedDict` otherwise), the order is deduced from
the order of the attributes inside *these*. Otherwise the order
of the definition of the attributes is used.
:type these: :class:`dict` of :class:`str` to :func:`attr.ib`
:param str repr_ns: When using nested classes, there's no way in Python 2
to automatically detect that. Therefore it's possible to set the
namespace explicitly for a more meaningful ``repr`` output.
:param bool repr: Create a ``__repr__`` method with a human readable
representation of ``attrs`` attributes..
:param bool str: Create a ``__str__`` method that is identical to
``__repr__``. This is usually not necessary except for
:class:`Exception`\ s.
:param bool cmp: Create ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``,
``__gt__``, and ``__ge__`` methods that compare the class as if it were
a tuple of its ``attrs`` attributes. But the attributes are *only*
compared, if the types of both classes are *identical*!
:param hash: If ``None`` (default), the ``__hash__`` method is generated
according how *cmp* and *frozen* are set.
1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you.
2. If *cmp* is True and *frozen* is False, ``__hash__`` will be set to
None, marking it unhashable (which it is).
3. If *cmp* is False, ``__hash__`` will be left untouched meaning the
``__hash__`` method of the superclass will be used (if superclass is
``object``, this means it will fall back to id-based hashing.).
Although not recommended, you can decide for yourself and force
``attrs`` to create one (e.g. if the class is immutable even though you
didn't freeze it programmatically) by passing ``True`` or not. Both of
these cases are rather special and should be used carefully.
See the `Python documentation \
<https://docs.python.org/3/reference/datamodel.html#object.__hash__>`_
and the `GitHub issue that led to the default behavior \
<https://github.com/python-attrs/attrs/issues/136>`_ for more details.
:type hash: ``bool`` or ``None``
:param bool init: Create a ``__init__`` method that initializes the
``attrs`` attributes. Leading underscores are stripped for the
argument name. If a ``__attrs_post_init__`` method exists on the
class, it will be called after the class is fully initialized.
:param bool slots: Create a slots_-style class that's more
memory-efficient. See :ref:`slots` for further ramifications.
:param bool frozen: Make instances immutable after initialization. If
someone attempts to modify a frozen instance,
:exc:`attr.exceptions.FrozenInstanceError` is raised.
Please note:
1. This is achieved by installing a custom ``__setattr__`` method
on your class so you can't implement an own one.
2. True immutability is impossible in Python.
3. This *does* have a minor a runtime performance :ref:`impact
<how-frozen>` when initializing new instances. In other words:
``__init__`` is slightly slower with ``frozen=True``.
4. If a class is frozen, you cannot modify ``self`` in
``__attrs_post_init__`` or a self-written ``__init__``. You can
circumvent that limitation by using
``object.__setattr__(self, "attribute_name", value)``.
.. _slots: https://docs.python.org/3/reference/datamodel.html#slots
:param bool auto_attribs: If True, collect `PEP 526`_-annotated attributes
(Python 3.6 and later only) from the class body.
In this case, you **must** annotate every field. If ``attrs``
encounters a field that is set to an :func:`attr.ib` but lacks a type
annotation, an :exc:`attr.exceptions.UnannotatedAttributeError` is
raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't
want to set a type.
If you assign a value to those attributes (e.g. ``x: int = 42``), that
value becomes the default value like if it were passed using
``attr.ib(default=42)``. Passing an instance of :class:`Factory` also
works as expected.
Attributes annotated as :data:`typing.ClassVar` are **ignored**.
.. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/
:param bool kw_only: Make all attributes keyword-only (Python 3+)
in the generated ``__init__`` (if ``init`` is ``False``, this
parameter is ignored).
:param bool cache_hash: Ensure that the object's hash code is computed
only once and stored on the object. If this is set to ``True``,
hashing must be either explicitly or implicitly enabled for this
class. If the hash code is cached, then no attributes of this
class which participate in hash code computation may be mutated
after object creation.
.. versionadded:: 16.0.0 *slots*
.. versionadded:: 16.1.0 *frozen*
.. versionadded:: 16.3.0 *str*
.. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
.. versionchanged:: 17.1.0
*hash* supports ``None`` as value which is also the default now.
.. versionadded:: 17.3.0 *auto_attribs*
.. versionchanged:: 18.1.0
If *these* is passed, no attributes are deleted from the class body.
.. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
.. deprecated:: 18.2.0
``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
:class:`DeprecationWarning` if the classes compared are subclasses of
each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
to each other.
.. versionadded:: 18.2.0 *kw_only*
.. versionadded:: 18.2.0 *cache_hash*
"""
def wrap(cls):
if getattr(cls, "__class__", None) is None:
raise TypeError("attrs only works with new-style classes.")
builder = _ClassBuilder(
cls, these, slots, frozen, auto_attribs, kw_only, cache_hash
)
if repr is True:
builder.add_repr(repr_ns)
if str is True:
builder.add_str()
if cmp is True:
builder.add_cmp()
if hash is not True and hash is not False and hash is not None:
# Can't use `hash in` because 1 == True for example.
raise TypeError(
"Invalid value for hash. Must be True, False, or None."
)
elif hash is False or (hash is None and cmp is False):
if cache_hash:
raise TypeError(
"Invalid value for cache_hash. To use hash caching,"
" hashing must be either explicitly or implicitly "
"enabled."
)
elif hash is True or (hash is None and cmp is True and frozen is True):
builder.add_hash()
else:
if cache_hash:
raise TypeError(
"Invalid value for cache_hash. To use hash caching,"
" hashing must be either explicitly or implicitly "
"enabled."
)
builder.make_unhashable()
if init is True:
builder.add_init()
else:
if cache_hash:
raise TypeError(
"Invalid value for cache_hash. To use hash caching,"
" init must be True."
)
return builder.build_class()
# maybe_cls's type depends on the usage of the decorator. It's a class
# if it's used as `@attrs` but ``None`` if used as `@attrs()`.
if maybe_cls is None:
return wrap
else:
return wrap(maybe_cls)
_attrs = attrs
"""
Internal alias so we can use it in functions that take an argument called
*attrs*.
"""
if PY2:
def _has_frozen_superclass(cls):
"""
Check whether *cls* has a frozen ancestor by looking at its
__setattr__.
"""
return (
getattr(cls.__setattr__, "__module__", None)
== _frozen_setattrs.__module__
and cls.__setattr__.__name__ == _frozen_setattrs.__name__
)
else:
def _has_frozen_superclass(cls):
"""
Check whether *cls* has a frozen ancestor by looking at its
__setattr__.
"""
return cls.__setattr__ == _frozen_setattrs
def _attrs_to_tuple(obj, attrs):
"""
Create a tuple of all values of *obj*'s *attrs*.
"""
return tuple(getattr(obj, a.name) for a in attrs)
def _make_hash(attrs, frozen, cache_hash):
attrs = tuple(
a
for a in attrs
if a.hash is True or (a.hash is None and a.cmp is True)
)
tab = " "
# We cache the generated hash methods for the same kinds of attributes.
sha1 = hashlib.sha1()
sha1.update(repr(attrs).encode("utf-8"))
unique_filename = "<attrs generated hash %s>" % (sha1.hexdigest(),)
type_hash = hash(unique_filename)
method_lines = ["def __hash__(self):"]
def append_hash_computation_lines(prefix, indent):
"""
Generate the code for actually computing the hash code.
Below this will either be returned directly or used to compute
a value which is then cached, depending on the value of cache_hash
"""
method_lines.extend(
[indent + prefix + "hash((", indent + " %d," % (type_hash,)]
)
for a in attrs:
method_lines.append(indent + " self.%s," % a.name)
method_lines.append(indent + " ))")
if cache_hash:
method_lines.append(tab + "if self.%s is None:" % _hash_cache_field)
if frozen:
append_hash_computation_lines(
"object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2
)
method_lines.append(tab * 2 + ")") # close __setattr__
else:
append_hash_computation_lines(
"self.%s = " % _hash_cache_field, tab * 2
)
method_lines.append(tab + "return self.%s" % _hash_cache_field)
else:
append_hash_computation_lines("return ", tab)
script = "\n".join(method_lines)
globs = {}
locs = {}
bytecode = compile(script, unique_filename, "exec")
eval(bytecode, globs, locs)
# In order of debuggers like PDB being able to step through the code,
# we add a fake linecache entry.
linecache.cache[unique_filename] = (
len(script),
None,
script.splitlines(True),
unique_filename,
)
return locs["__hash__"]
def _add_hash(cls, attrs):
"""
Add a hash method to *cls*.
"""
cls.__hash__ = _make_hash(attrs, frozen=False, cache_hash=False)
return cls
def __ne__(self, other):
"""
Check equality and either forward a NotImplemented or return the result
negated.
"""
result = self.__eq__(other)
if result is NotImplemented:
return NotImplemented
return not result
WARNING_CMP_ISINSTANCE = (
"Comparision of subclasses using __%s__ is deprecated and will be removed "
"in 2019."
)
def _make_cmp(attrs):
attrs = [a for a in attrs if a.cmp]
# We cache the generated eq methods for the same kinds of attributes.
sha1 = hashlib.sha1()
sha1.update(repr(attrs).encode("utf-8"))
unique_filename = "<attrs generated eq %s>" % (sha1.hexdigest(),)
lines = [
"def __eq__(self, other):",
" if other.__class__ is not self.__class__:",
" return NotImplemented",
]
# We can't just do a big self.x = other.x and... clause due to
# irregularities like nan == nan is false but (nan,) == (nan,) is true.
if attrs:
lines.append(" return (")
others = [" ) == ("]
for a in attrs:
lines.append(" self.%s," % (a.name,))
others.append(" other.%s," % (a.name,))
lines += others + [" )"]
else:
lines.append(" return True")
script = "\n".join(lines)
globs = {}
locs = {}
bytecode = compile(script, unique_filename, "exec")
eval(bytecode, globs, locs)
# In order of debuggers like PDB being able to step through the code,
# we add a fake linecache entry.
linecache.cache[unique_filename] = (
len(script),
None,
script.splitlines(True),
unique_filename,
)
eq = locs["__eq__"]
ne = __ne__
def attrs_to_tuple(obj):
"""
Save us some typing.
"""
return _attrs_to_tuple(obj, attrs)
def __lt__(self, other):
"""
Automatically created by attrs.
"""
if isinstance(other, self.__class__):
if other.__class__ is not self.__class__:
warnings.warn(
WARNING_CMP_ISINSTANCE % ("lt",), DeprecationWarning
)
return attrs_to_tuple(self) < attrs_to_tuple(other)
else:
return NotImplemented
def __le__(self, other):
"""
Automatically created by attrs.
"""
if isinstance(other, self.__class__):
if other.__class__ is not self.__class__:
warnings.warn(
WARNING_CMP_ISINSTANCE % ("le",), DeprecationWarning
)
return attrs_to_tuple(self) <= attrs_to_tuple(other)
else:
return NotImplemented
def __gt__(self, other):
"""
Automatically created by attrs.
"""
if isinstance(other, self.__class__):
if other.__class__ is not self.__class__:
warnings.warn(
WARNING_CMP_ISINSTANCE % ("gt",), DeprecationWarning
)
return attrs_to_tuple(self) > attrs_to_tuple(other)
else:
return NotImplemented
def __ge__(self, other):
"""
Automatically created by attrs.
"""
if isinstance(other, self.__class__):
if other.__class__ is not self.__class__:
warnings.warn(
WARNING_CMP_ISINSTANCE % ("ge",), DeprecationWarning
)
return attrs_to_tuple(self) >= attrs_to_tuple(other)
else:
return NotImplemented
return eq, ne, __lt__, __le__, __gt__, __ge__
def _add_cmp(cls, attrs=None):
"""
Add comparison methods to *cls*.
"""
if attrs is None:
attrs = cls.__attrs_attrs__
cls.__eq__, cls.__ne__, cls.__lt__, cls.__le__, cls.__gt__, cls.__ge__ = _make_cmp( # noqa
attrs
)
return cls
_already_repring = threading.local()
def _make_repr(attrs, ns):
"""
Make a repr method for *attr_names* adding *ns* to the full name.
"""
attr_names = tuple(a.name for a in attrs if a.repr)
def __repr__(self):
"""
Automatically created by attrs.
"""
try:
working_set = _already_repring.working_set
except AttributeError:
working_set = set()
_already_repring.working_set = working_set
if id(self) in working_set:
return "..."
real_cls = self.__class__
if ns is None:
qualname = getattr(real_cls, "__qualname__", None)
if qualname is not None:
class_name = qualname.rsplit(">.", 1)[-1]
else:
class_name = real_cls.__name__
else:
class_name = ns + "." + real_cls.__name__
# Since 'self' remains on the stack (i.e.: strongly referenced) for the
# duration of this call, it's safe to depend on id(...) stability, and
# not need to track the instance and therefore worry about properties
# like weakref- or hash-ability.
working_set.add(id(self))
try:
result = [class_name, "("]
first = True
for name in attr_names:
if first:
first = False
else:
result.append(", ")
result.extend((name, "=", repr(getattr(self, name, NOTHING))))
return "".join(result) + ")"
finally:
working_set.remove(id(self))
return __repr__
def _add_repr(cls, ns=None, attrs=None):
"""
Add a repr method to *cls*.
"""
if attrs is None:
attrs = cls.__attrs_attrs__
cls.__repr__ = _make_repr(attrs, ns)
return cls
def _make_init(attrs, post_init, frozen, slots, cache_hash, super_attr_map):
attrs = [a for a in attrs if a.init or a.default is not NOTHING]
# We cache the generated init methods for the same kinds of attributes.
sha1 = hashlib.sha1()
sha1.update(repr(attrs).encode("utf-8"))
unique_filename = "<attrs generated init {0}>".format(sha1.hexdigest())
script, globs, annotations = _attrs_to_init_script(
attrs, frozen, slots, post_init, cache_hash, super_attr_map
)
locs = {}
bytecode = compile(script, unique_filename, "exec")
attr_dict = dict((a.name, a) for a in attrs)
globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
if frozen is True:
# Save the lookup overhead in __init__ if we need to circumvent
# immutability.
globs["_cached_setattr"] = _obj_setattr
eval(bytecode, globs, locs)
# In order of debuggers like PDB being able to step through the code,
# we add a fake linecache entry.
linecache.cache[unique_filename] = (
len(script),
None,
script.splitlines(True),
unique_filename,
)
__init__ = locs["__init__"]
__init__.__annotations__ = annotations
return __init__
def _add_init(cls, frozen):
"""
Add a __init__ method to *cls*. If *frozen* is True, make it immutable.
"""
cls.__init__ = _make_init(
cls.__attrs_attrs__,
getattr(cls, "__attrs_post_init__", False),
frozen,
_is_slot_cls(cls),
cache_hash=False,
super_attr_map={},
)
return cls
def fields(cls):
"""
Return the tuple of ``attrs`` attributes for a class.
The tuple also allows accessing the fields by their names (see below for
examples).
:param type cls: Class to introspect.
:raise TypeError: If *cls* is not a class.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
:rtype: tuple (with name accessors) of :class:`attr.Attribute`
.. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
by name.
"""
if not isclass(cls):
raise TypeError("Passed object must be a class.")
attrs = getattr(cls, "__attrs_attrs__", None)
if attrs is None:
raise NotAnAttrsClassError(
"{cls!r} is not an attrs-decorated class.".format(cls=cls)
)
return attrs
def fields_dict(cls):
"""
Return an ordered dictionary of ``attrs`` attributes for a class, whose
keys are the attribute names.
:param type cls: Class to introspect.
:raise TypeError: If *cls* is not a class.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
:rtype: an ordered dict where keys are attribute names and values are
:class:`attr.Attribute`\\ s. This will be a :class:`dict` if it's
naturally ordered like on Python 3.6+ or an
:class:`~collections.OrderedDict` otherwise.
.. versionadded:: 18.1.0
"""
if not isclass(cls):
raise TypeError("Passed object must be a class.")
attrs = getattr(cls, "__attrs_attrs__", None)
if attrs is None:
raise NotAnAttrsClassError(
"{cls!r} is not an attrs-decorated class.".format(cls=cls)
)
return ordered_dict(((a.name, a) for a in attrs))
def validate(inst):
"""
Validate all attributes on *inst* that have a validator.
Leaves all exceptions through.
:param inst: Instance of a class with ``attrs`` attributes.
"""
if _config._run_validators is False:
return
for a in fields(inst.__class__):
v = a.validator
if v is not None:
v(inst, a, getattr(inst, a.name))
def _is_slot_cls(cls):
return "__slots__" in cls.__dict__
def _is_slot_attr(a_name, super_attr_map):
"""
Check if the attribute name comes from a slot class.
"""
return a_name in super_attr_map and _is_slot_cls(super_attr_map[a_name])
def _attrs_to_init_script(
attrs, frozen, slots, post_init, cache_hash, super_attr_map
):
"""
Return a script of an initializer for *attrs* and a dict of globals.
The globals are expected by the generated script.
If *frozen* is True, we cannot set the attributes directly so we use
a cached ``object.__setattr__``.
"""
lines = []
any_slot_ancestors = any(
_is_slot_attr(a.name, super_attr_map) for a in attrs
)
if frozen is True:
if slots is True:
lines.append(
# Circumvent the __setattr__ descriptor to save one lookup per
# assignment.
# Note _setattr will be used again below if cache_hash is True
"_setattr = _cached_setattr.__get__(self, self.__class__)"
)
def fmt_setter(attr_name, value_var):
return "_setattr('%(attr_name)s', %(value_var)s)" % {
"attr_name": attr_name,
"value_var": value_var,
}
def fmt_setter_with_converter(attr_name, value_var):
conv_name = _init_converter_pat.format(attr_name)
return "_setattr('%(attr_name)s', %(conv)s(%(value_var)s))" % {
"attr_name": attr_name,
"value_var": value_var,
"conv": conv_name,
}
else:
# Dict frozen classes assign directly to __dict__.
# But only if the attribute doesn't come from an ancestor slot
# class.
# Note _inst_dict will be used again below if cache_hash is True
lines.append("_inst_dict = self.__dict__")
if any_slot_ancestors:
lines.append(
# Circumvent the __setattr__ descriptor to save one lookup
# per assignment.
"_setattr = _cached_setattr.__get__(self, self.__class__)"
)
def fmt_setter(attr_name, value_var):
if _is_slot_attr(attr_name, super_attr_map):
res = "_setattr('%(attr_name)s', %(value_var)s)" % {
"attr_name": attr_name,
"value_var": value_var,
}
else:
res = "_inst_dict['%(attr_name)s'] = %(value_var)s" % {
"attr_name": attr_name,
"value_var": value_var,
}
return res
def fmt_setter_with_converter(attr_name, value_var):
conv_name = _init_converter_pat.format(attr_name)
if _is_slot_attr(attr_name, super_attr_map):
tmpl = "_setattr('%(attr_name)s', %(c)s(%(value_var)s))"
else:
tmpl = "_inst_dict['%(attr_name)s'] = %(c)s(%(value_var)s)"
return tmpl % {
"attr_name": attr_name,
"value_var": value_var,
"c": conv_name,
}
else:
# Not frozen.
def fmt_setter(attr_name, value):
return "self.%(attr_name)s = %(value)s" % {
"attr_name": attr_name,
"value": value,
}
def fmt_setter_with_converter(attr_name, value_var):
conv_name = _init_converter_pat.format(attr_name)
return "self.%(attr_name)s = %(conv)s(%(value_var)s)" % {
"attr_name": attr_name,
"value_var": value_var,
"conv": conv_name,
}
args = []
kw_only_args = []
attrs_to_validate = []
# This is a dictionary of names to validator and converter callables.
# Injecting this into __init__ globals lets us avoid lookups.
names_for_globals = {}
annotations = {"return": None}
for a in attrs:
if a.validator:
attrs_to_validate.append(a)
attr_name = a.name
arg_name = a.name.lstrip("_")
has_factory = isinstance(a.default, Factory)
if has_factory and a.default.takes_self:
maybe_self = "self"
else:
maybe_self = ""
if a.init is False:
if has_factory:
init_factory_name = _init_factory_pat.format(a.name)
if a.converter is not None:
lines.append(
fmt_setter_with_converter(
attr_name,
init_factory_name + "({0})".format(maybe_self),
)
)
conv_name = _init_converter_pat.format(a.name)
names_for_globals[conv_name] = a.converter
else:
lines.append(
fmt_setter(
attr_name,
init_factory_name + "({0})".format(maybe_self),
)
)
names_for_globals[init_factory_name] = a.default.factory
else:
if a.converter is not None:
lines.append(
fmt_setter_with_converter(
attr_name,
"attr_dict['{attr_name}'].default".format(
attr_name=attr_name
),
)
)
conv_name = _init_converter_pat.format(a.name)
names_for_globals[conv_name] = a.converter
else:
lines.append(
fmt_setter(
attr_name,
"attr_dict['{attr_name}'].default".format(
attr_name=attr_name
),
)
)
elif a.default is not NOTHING and not has_factory:
arg = "{arg_name}=attr_dict['{attr_name}'].default".format(
arg_name=arg_name, attr_name=attr_name
)
if a.kw_only:
kw_only_args.append(arg)
else:
args.append(arg)
if a.converter is not None:
lines.append(fmt_setter_with_converter(attr_name, arg_name))
names_for_globals[
_init_converter_pat.format(a.name)
] = a.converter
else:
lines.append(fmt_setter(attr_name, arg_name))
elif has_factory:
arg = "{arg_name}=NOTHING".format(arg_name=arg_name)
if a.kw_only:
kw_only_args.append(arg)
else:
args.append(arg)
lines.append(
"if {arg_name} is not NOTHING:".format(arg_name=arg_name)
)
init_factory_name = _init_factory_pat.format(a.name)
if a.converter is not None:
lines.append(
" " + fmt_setter_with_converter(attr_name, arg_name)
)
lines.append("else:")
lines.append(
" "
+ fmt_setter_with_converter(
attr_name,
init_factory_name + "({0})".format(maybe_self),
)
)
names_for_globals[
_init_converter_pat.format(a.name)
] = a.converter
else:
lines.append(" " + fmt_setter(attr_name, arg_name))
lines.append("else:")
lines.append(
" "
+ fmt_setter(
attr_name,
init_factory_name + "({0})".format(maybe_self),
)
)
names_for_globals[init_factory_name] = a.default.factory
else:
if a.kw_only:
kw_only_args.append(arg_name)
else:
args.append(arg_name)
if a.converter is not None:
lines.append(fmt_setter_with_converter(attr_name, arg_name))
names_for_globals[
_init_converter_pat.format(a.name)
] = a.converter
else:
lines.append(fmt_setter(attr_name, arg_name))
if a.init is True and a.converter is None and a.type is not None:
annotations[arg_name] = a.type
if attrs_to_validate: # we can skip this if there are no validators.
names_for_globals["_config"] = _config
lines.append("if _config._run_validators is True:")
for a in attrs_to_validate:
val_name = "__attr_validator_{}".format(a.name)
attr_name = "__attr_{}".format(a.name)
lines.append(
" {}(self, {}, self.{})".format(val_name, attr_name, a.name)
)
names_for_globals[val_name] = a.validator
names_for_globals[attr_name] = a
if post_init:
lines.append("self.__attrs_post_init__()")
# because this is set only after __attrs_post_init is called, a crash
# will result if post-init tries to access the hash code. This seemed
# preferable to setting this beforehand, in which case alteration to
# field values during post-init combined with post-init accessing the
# hash code would result in silent bugs.
if cache_hash:
if frozen:
if slots:
# if frozen and slots, then _setattr defined above
init_hash_cache = "_setattr('%s', %s)"
else:
# if frozen and not slots, then _inst_dict defined above
init_hash_cache = "_inst_dict['%s'] = %s"
else:
init_hash_cache = "self.%s = %s"
lines.append(init_hash_cache % (_hash_cache_field, "None"))
args = ", ".join(args)
if kw_only_args:
if PY2:
raise PythonTooOldError(
"Keyword-only arguments only work on Python 3 and later."
)
args += "{leading_comma}*, {kw_only_args}".format(
leading_comma=", " if args else "",
kw_only_args=", ".join(kw_only_args),
)
return (
"""\
def __init__(self, {args}):
{lines}
""".format(
args=args, lines="\n ".join(lines) if lines else "pass"
),
names_for_globals,
annotations,
)
class Attribute(object):
"""
*Read-only* representation of an attribute.
:attribute name: The name of the attribute.
Plus *all* arguments of :func:`attr.ib`.
For the version history of the fields, see :func:`attr.ib`.
"""
__slots__ = (
"name",
"default",
"validator",
"repr",
"cmp",
"hash",
"init",
"metadata",
"type",
"converter",
"kw_only",
)
def __init__(
self,
name,
default,
validator,
repr,
cmp,
hash,
init,
convert=None,
metadata=None,
type=None,
converter=None,
kw_only=False,
):
# Cache this descriptor here to speed things up later.
bound_setattr = _obj_setattr.__get__(self, Attribute)
# Despite the big red warning, people *do* instantiate `Attribute`
# themselves.
if convert is not None:
if converter is not None:
raise RuntimeError(
"Can't pass both `convert` and `converter`. "
"Please use `converter` only."
)
warnings.warn(
"The `convert` argument is deprecated in favor of `converter`."
" It will be removed after 2019/01.",
DeprecationWarning,
stacklevel=2,
)
converter = convert
bound_setattr("name", name)
bound_setattr("default", default)
bound_setattr("validator", validator)
bound_setattr("repr", repr)
bound_setattr("cmp", cmp)
bound_setattr("hash", hash)
bound_setattr("init", init)
bound_setattr("converter", converter)
bound_setattr(
"metadata",
(
metadata_proxy(metadata)
if metadata
else _empty_metadata_singleton
),
)
bound_setattr("type", type)
bound_setattr("kw_only", kw_only)
def __setattr__(self, name, value):
raise FrozenInstanceError()
@property
def convert(self):
warnings.warn(
"The `convert` attribute is deprecated in favor of `converter`. "
"It will be removed after 2019/01.",
DeprecationWarning,
stacklevel=2,
)
return self.converter
@classmethod
def from_counting_attr(cls, name, ca, type=None):
# type holds the annotated value. deal with conflicts:
if type is None:
type = ca.type
elif ca.type is not None:
raise ValueError(
"Type annotation and type argument cannot both be present"
)
inst_dict = {
k: getattr(ca, k)
for k in Attribute.__slots__
if k
not in (
"name",
"validator",
"default",
"type",
"convert",
) # exclude methods and deprecated alias
}
return cls(
name=name,
validator=ca._validator,
default=ca._default,
type=type,
**inst_dict
)
# Don't use attr.assoc since fields(Attribute) doesn't work
def _assoc(self, **changes):
"""
Copy *self* and apply *changes*.
"""
new = copy.copy(self)
new._setattrs(changes.items())
return new
# Don't use _add_pickle since fields(Attribute) doesn't work
def __getstate__(self):
"""
Play nice with pickle.
"""
return tuple(
getattr(self, name) if name != "metadata" else dict(self.metadata)
for name in self.__slots__
)
def __setstate__(self, state):
"""
Play nice with pickle.
"""
self._setattrs(zip(self.__slots__, state))
def _setattrs(self, name_values_pairs):
bound_setattr = _obj_setattr.__get__(self, Attribute)
for name, value in name_values_pairs:
if name != "metadata":
bound_setattr(name, value)
else:
bound_setattr(
name,
metadata_proxy(value)
if value
else _empty_metadata_singleton,
)
_a = [
Attribute(
name=name,
default=NOTHING,
validator=None,
repr=True,
cmp=True,
hash=(name != "metadata"),
init=True,
)
for name in Attribute.__slots__
if name != "convert" # XXX: remove once `convert` is gone
]
Attribute = _add_hash(
_add_cmp(_add_repr(Attribute, attrs=_a), attrs=_a),
attrs=[a for a in _a if a.hash],
)
class _CountingAttr(object):
"""
Intermediate representation of attributes that uses a counter to preserve
the order in which the attributes have been defined.
*Internal* data structure of the attrs library. Running into is most
likely the result of a bug like a forgotten `@attr.s` decorator.
"""
__slots__ = (
"counter",
"_default",
"repr",
"cmp",
"hash",
"init",
"metadata",
"_validator",
"converter",
"type",
"kw_only",
)
__attrs_attrs__ = tuple(
Attribute(
name=name,
default=NOTHING,
validator=None,
repr=True,
cmp=True,
hash=True,
init=True,
kw_only=False,
)
for name in ("counter", "_default", "repr", "cmp", "hash", "init")
) + (
Attribute(
name="metadata",
default=None,
validator=None,
repr=True,
cmp=True,
hash=False,
init=True,
kw_only=False,
),
)
cls_counter = 0
def __init__(
self,
default,
validator,
repr,
cmp,
hash,
init,
converter,
metadata,
type,
kw_only,
):
_CountingAttr.cls_counter += 1
self.counter = _CountingAttr.cls_counter
self._default = default
# If validator is a list/tuple, wrap it using helper validator.
if validator and isinstance(validator, (list, tuple)):
self._validator = and_(*validator)
else:
self._validator = validator
self.repr = repr
self.cmp = cmp
self.hash = hash
self.init = init
self.converter = converter
self.metadata = metadata
self.type = type
self.kw_only = kw_only
def validator(self, meth):
"""
Decorator that adds *meth* to the list of validators.
Returns *meth* unchanged.
.. versionadded:: 17.1.0
"""
if self._validator is None:
self._validator = meth
else:
self._validator = and_(self._validator, meth)
return meth
def default(self, meth):
"""
Decorator that allows to set the default for an attribute.
Returns *meth* unchanged.
:raises DefaultAlreadySetError: If default has been set before.
.. versionadded:: 17.1.0
"""
if self._default is not NOTHING:
raise DefaultAlreadySetError()
self._default = Factory(meth, takes_self=True)
return meth
_CountingAttr = _add_cmp(_add_repr(_CountingAttr))
@attrs(slots=True, init=False, hash=True)
class Factory(object):
"""
Stores a factory callable.
If passed as the default value to :func:`attr.ib`, the factory is used to
generate a new value.
:param callable factory: A callable that takes either none or exactly one
mandatory positional argument depending on *takes_self*.
:param bool takes_self: Pass the partially initialized instance that is
being initialized as a positional argument.
.. versionadded:: 17.1.0 *takes_self*
"""
factory = attrib()
takes_self = attrib()
def __init__(self, factory, takes_self=False):
"""
`Factory` is part of the default machinery so if we want a default
value here, we have to implement it ourselves.
"""
self.factory = factory
self.takes_self = takes_self
def make_class(name, attrs, bases=(object,), **attributes_arguments):
"""
A quick way to create a new class called *name* with *attrs*.
:param name: The name for the new class.
:type name: str
:param attrs: A list of names or a dictionary of mappings of names to
attributes.
If *attrs* is a list or an ordered dict (:class:`dict` on Python 3.6+,
:class:`collections.OrderedDict` otherwise), the order is deduced from
the order of the names or attributes inside *attrs*. Otherwise the
order of the definition of the attributes is used.
:type attrs: :class:`list` or :class:`dict`
:param tuple bases: Classes that the new class will subclass.
:param attributes_arguments: Passed unmodified to :func:`attr.s`.
:return: A new class with *attrs*.
:rtype: type
.. versionadded:: 17.1.0 *bases*
.. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
"""
if isinstance(attrs, dict):
cls_dict = attrs
elif isinstance(attrs, (list, tuple)):
cls_dict = dict((a, attrib()) for a in attrs)
else:
raise TypeError("attrs argument must be a dict or a list.")
post_init = cls_dict.pop("__attrs_post_init__", None)
type_ = type(
name,
bases,
{} if post_init is None else {"__attrs_post_init__": post_init},
)
# For pickling to work, the __module__ variable needs to be set to the
# frame where the class is created. Bypass this step in environments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython).
try:
type_.__module__ = sys._getframe(1).f_globals.get(
"__name__", "__main__"
)
except (AttributeError, ValueError):
pass
return _attrs(these=cls_dict, **attributes_arguments)(type_)
# These are required by within this module so we define them here and merely
# import into .validators.
@attrs(slots=True, hash=True)
class _AndValidator(object):
"""
Compose many validators to a single one.
"""
_validators = attrib()
def __call__(self, inst, attr, value):
for v in self._validators:
v(inst, attr, value)
def and_(*validators):
"""
A validator that composes multiple validators into one.
When called on a value, it runs all wrapped validators.
:param validators: Arbitrary number of validators.
:type validators: callables
.. versionadded:: 17.1.0
"""
vals = []
for validator in validators:
vals.extend(
validator._validators
if isinstance(validator, _AndValidator)
else [validator]
)
return _AndValidator(tuple(vals))
| 32.230846
| 95
| 0.578467
|
4a009632540ee9e74a8e7c35166690ac0ba7db15
| 5,436
|
py
|
Python
|
perfkitbenchmarker/linux_benchmarks/cloudsuite_graph_analytics_benchmark.py
|
xiaolihope/PerfKitBenchmarker-1.7.0
|
7699b1073a80d7a92fd3db93da742b93a2ecf900
|
[
"Apache-2.0"
] | null | null | null |
perfkitbenchmarker/linux_benchmarks/cloudsuite_graph_analytics_benchmark.py
|
xiaolihope/PerfKitBenchmarker-1.7.0
|
7699b1073a80d7a92fd3db93da742b93a2ecf900
|
[
"Apache-2.0"
] | null | null | null |
perfkitbenchmarker/linux_benchmarks/cloudsuite_graph_analytics_benchmark.py
|
xiaolihope/PerfKitBenchmarker-1.7.0
|
7699b1073a80d7a92fd3db93da742b93a2ecf900
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs the graph analytics benchmark of Cloudsuite.
More info: http://cloudsuite.ch/graphanalytics/
"""
import re
from perfkitbenchmarker import configs
from perfkitbenchmarker import errors
from perfkitbenchmarker import flags
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import docker
flags.DEFINE_integer('cloudsuite_graph_analytics_worker_mem',
2,
'Amount of memory for the worker, in gigabytes')
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'cloudsuite_graph_analytics'
BENCHMARK_CONFIG = """
cloudsuite_graph_analytics:
description: >
Run Cloudsuite graph analytics benchmark. Specify the number of worker
VMs with --num_vms.
vm_groups:
master:
vm_spec: *default_single_core
vm_count: 1
workers:
vm_spec: *default_single_core
"""
def GetConfig(user_config):
"""Reads the config file and overwrites vm_count with num_vms."""
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
if FLAGS['num_vms'].present:
config['vm_groups']['workers']['vm_count'] = FLAGS.num_vms
return config
def Prepare(benchmark_spec):
"""Install docker.
Pull the required images from DockerHub, create datasets, and
start Spark master and workers.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
master = benchmark_spec.vm_groups['master'][0]
workers = benchmark_spec.vm_groups['workers']
def PrepareCommon(vm):
if not docker.IsInstalled(vm):
vm.Install('docker')
vm.RemoteCommand('sudo docker pull cloudsuite/spark')
vm.RemoteCommand('sudo docker pull cloudsuite/twitter-dataset-graph')
vm.RemoteCommand('sudo docker create --name data '
'cloudsuite/twitter-dataset-graph')
def PrepareMaster(vm):
PrepareCommon(vm)
vm.RemoteCommand('sudo docker pull cloudsuite/graph-analytics')
master_cmd = ('sudo docker run -d --net host -e SPARK_MASTER_IP=%s '
'--name spark-master cloudsuite/spark master' %
vm.internal_ip)
vm.RemoteCommand(master_cmd)
def PrepareWorker(vm):
PrepareCommon(vm)
worker_cmd = ('sudo docker run -d --net host --volumes-from data '
'--name spark-worker cloudsuite/spark worker '
'spark://%s:7077' % master.internal_ip)
vm.RemoteCommand(worker_cmd)
target_arg_tuples = ([(PrepareWorker, [vm], {}) for vm in workers] +
[(PrepareMaster, [master], {})])
vm_util.RunParallelThreads(target_arg_tuples, len(target_arg_tuples))
def Run(benchmark_spec):
"""Run the graph analytics benchmark.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
master = benchmark_spec.vm_groups['master'][0]
results = []
memory_option = ('--executor-memory %dg' %
(FLAGS.cloudsuite_graph_analytics_worker_mem))
benchmark_cmd = ('sudo docker run --rm --net host --volumes-from data '
'cloudsuite/graph-analytics %s --master spark://%s:7077' %
(memory_option, master.internal_ip))
stdout, _ = master.RemoteCommand(benchmark_cmd, should_log=True)
matches = re.findall(r'Running time = (\d+)', stdout)
if len(matches) != 1:
errors.Benchmarks.RunError('Expected to find benchmark execution '
'time')
execution_time = matches[0]
results.append(sample.Sample('Benchmark execution time',
float(execution_time) / 1000, 'seconds'))
return results
def Cleanup(benchmark_spec):
"""Stop and remove docker containers. Remove images.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
master = benchmark_spec.vm_groups['master'][0]
workers = benchmark_spec.vm_groups['workers']
def CleanupCommon(vm):
vm.RemoteCommand('sudo docker rm -v data')
vm.RemoteCommand('sudo docker rmi cloudsuite/twitter-dataset-graph')
vm.RemoteCommand('sudo docker rmi cloudsuite/spark')
def CleanupMaster(vm):
vm.RemoteCommand('sudo docker stop spark-master')
vm.RemoteCommand('sudo docker rm spark-master')
vm.RemoteCommand('sudo docker rmi cloudsuite/spark')
CleanupCommon(vm)
def CleanupWorker(vm):
vm.RemoteCommand('sudo docker stop spark-worker')
vm.RemoteCommand('sudo docker rm spark-worker')
CleanupCommon(vm)
target_arg_tuples = ([(CleanupWorker, [vm], {}) for vm in workers] +
[(CleanupMaster, [master], {})])
vm_util.RunParallelThreads(target_arg_tuples, len(target_arg_tuples))
| 33.975
| 77
| 0.699411
|
4a00963df04141d8e2bdc1a801ee1c022deba1cc
| 2,132
|
py
|
Python
|
src/isolation_forest.py
|
Tpool1/Cancer_ML
|
39a5605e58d7940e17810bc1858877e6c0887f4f
|
[
"MIT"
] | null | null | null |
src/isolation_forest.py
|
Tpool1/Cancer_ML
|
39a5605e58d7940e17810bc1858877e6c0887f4f
|
[
"MIT"
] | null | null | null |
src/isolation_forest.py
|
Tpool1/Cancer_ML
|
39a5605e58d7940e17810bc1858877e6c0887f4f
|
[
"MIT"
] | null | null | null |
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import IsolationForest
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
def isolation_forest(features, target):
isolated_forest=IsolationForest(n_estimators=100, n_jobs=-1, random_state=42)
if type(features) == tuple:
# concatenate features for image clinical
clinical_array = features[0]
image_array = features[1]
new_array = np.empty(shape=(image_array.shape[0], int(image_array.shape[1])**2))
i = 0
for image in image_array:
image = np.reshape(image, (1, int(image_array.shape[1])**2))
new_array[i] = image
i = i + 1
image_array = new_array
concatenated_array = np.concatenate((clinical_array, image_array), axis=1)
isolated_forest.fit(concatenated_array, target)
predicted = isolated_forest.predict(concatenated_array)
else:
isolated_forest.fit(features, target)
predicted = isolated_forest.predict(features)
predicted_df = pd.DataFrame(predicted)
predicted_df.to_csv('data_anomaly.csv')
outlier_indices = []
i = 0
for prediction in predicted:
if prediction == -1:
outlier_indices.append(i)
i = i + 1
pca = PCA(n_components=3)
if type(features) == tuple:
features = concatenated_array
scaler = StandardScaler()
features = scaler.fit_transform(features)
features = pca.fit_transform(features)
fig = plt.figure()
fig.suptitle("3D PCA of Features with Outliers and Inliers")
ax = fig.add_subplot(111, projection='3d')
ax.scatter(features[:, 0], features[:, 1], zs=features[:, 2], s=4, lw=1, label="inliers", c="green")
ax.scatter(features[outlier_indices,0], features[outlier_indices,1], features[outlier_indices,2], lw=2, s=60, marker='x', c='red', label='outliers')
ax.legend()
plt.savefig("3d_outlier_pca" + str(features.shape) + ".png")
return predicted
| 30.898551
| 152
| 0.670732
|
4a0096afc7000067b08ac02ac045706bedcc4df0
| 2,930
|
py
|
Python
|
apps/marketing/subviews/campaign.py
|
gvizquel/pyerp
|
c859f7293cabd1003f79112463cee93ac89fccba
|
[
"MIT"
] | null | null | null |
apps/marketing/subviews/campaign.py
|
gvizquel/pyerp
|
c859f7293cabd1003f79112463cee93ac89fccba
|
[
"MIT"
] | 11
|
2020-06-05T22:50:37.000Z
|
2022-02-10T09:05:56.000Z
|
apps/marketing/subviews/campaign.py
|
gvizquel/pyerp
|
c859f7293cabd1003f79112463cee93ac89fccba
|
[
"MIT"
] | null | null | null |
# Librerias Django
from django.contrib.auth import authenticate
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.models import User
from django.shortcuts import redirect, render
from django.urls import reverse, reverse_lazy
from django.views.generic import DetailView, ListView
from django.views.generic.edit import CreateView, DeleteView, UpdateView
# Librerias en carpetas locales
from ..submodels.campaign import PyCampaign
CAMPAIGN_FIELDS = [
{'string': 'Nombre', 'field': 'name'},
{'string': 'Código', 'field': 'code'},
]
CAMPAIGN_FIELDS_SHORT = ['name','code']
class CampaignListView(LoginRequiredMixin, ListView):
model = PyCampaign
template_name = 'erp/list.html'
login_url = "/erp/login"
def get_context_data(self, **kwargs):
context = super(CampaignListView, self).get_context_data(**kwargs)
context['title'] = 'Campañas'
context['detail_url'] = 'campaign-detail'
context['add_url'] = 'campaign-add'
context['fields'] = CAMPAIGN_FIELDS
return context
class CampaignDetailView(LoginRequiredMixin, DetailView):
model = PyCampaign
template_name = 'erp/detail.html'
login_url = "/erp/login"
def get_context_data(self, **kwargs):
context = super(CampaignDetailView, self).get_context_data(**kwargs)
context['title'] = context['object'].name
context['breadcrumbs'] = [{'url': 'campaign', 'name': 'Campaña'}]
context['update_url'] = 'campaign-update'
context['delete_url'] = 'campaign-delete'
context['fields'] = CAMPAIGN_FIELDS
return context
class CampaignCreateView(LoginRequiredMixin, CreateView):
model = PyCampaign
fields = CAMPAIGN_FIELDS_SHORT
template_name = 'erp/form.html'
login_url = "/erp/login"
def get_context_data(self, **kwargs):
context = super(CampaignCreateView, self).get_context_data(**kwargs)
context['title'] = 'Crear Campaña'
context['breadcrumbs'] = [{'url': 'campaign', 'name': 'Campaña'}]
context['back_url'] = reverse('campaign')
return context
class CampaignUpdateView(LoginRequiredMixin, UpdateView):
model = PyCampaign
fields = CAMPAIGN_FIELDS_SHORT
template_name = 'erp/form.html'
login_url = "/erp/login"
def get_context_data(self, **kwargs):
context = super(CampaignUpdateView, self).get_context_data(**kwargs)
context['title'] = context['object'].name
context['breadcrumbs'] = [{'url': 'campaign', 'name': 'Campaña'}]
context['back_url'] = reverse('campaign-detail', kwargs={'pk': context['object'].pk})
return context
@login_required(login_url="/erp/login")
def DeleteCampaign(self, pk):
campaign = PyCampaign.objects.get(id=pk)
campaign.delete()
return redirect(reverse('campaign'))
| 36.17284
| 93
| 0.686689
|
4a00980e6cd363cf565c6f2ef1ed1ba63d368799
| 520
|
py
|
Python
|
fastapi/model.py
|
jaisenbe58r/iAApi-QAS-BERT
|
b999104d7949080c3d5f432608418a49f4e2edaf
|
[
"MIT"
] | 3
|
2020-12-12T21:48:30.000Z
|
2021-02-14T18:17:53.000Z
|
fastapi/model.py
|
jaisenbe58r/iAApi-QAS-BERT
|
b999104d7949080c3d5f432608418a49f4e2edaf
|
[
"MIT"
] | null | null | null |
fastapi/model.py
|
jaisenbe58r/iAApi-QAS-BERT
|
b999104d7949080c3d5f432608418a49f4e2edaf
|
[
"MIT"
] | 1
|
2021-09-29T01:37:42.000Z
|
2021-09-29T01:37:42.000Z
|
from PIL import Image
import io
from transformers import pipeline
def get_model():
# qa = pipeline('question-answering',
# model="mrm8488/bert-base-spanish-wwm-cased-finetuned-spa-squad2-es",
# tokenizer="mrm8488/bert-base-spanish-wwm-cased-finetuned-spa-squad2-es")
def qa(context: str, question: str):
return {"answer": "Esto es una prueba"}
return qa
def get_result(qa, context, question, max_size=512):
r = qa(context=context, question=question)
return r
| 30.588235
| 88
| 0.675
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.