text stringlengths 0 1.05M | meta dict |
|---|---|
"""Access the persistent configuration information."""
import json
import os
import logging
class Config(object):
__instance = None
__config_path = os.path.expanduser('~/.config/throw/throw.json')
__log = logging.getLogger(__name__ + '.Config')
# Each configuration option is in one section and has some help text
# associated with it and, optionally, a default option if it is optional
__options = {
'user': {
'name': { 'help': 'Your full name to use when sending email' },
'email': { 'help': 'The email address to use when sending email' },
},
'smtp': {
'host': {
'help': 'The hostname of a SMTP server to use to send mail' },
'port': {
'help': 'The port to connect the to SMTP server when sending mail',
'default': 25 },
'use_tls': {
'help': 'Use TLS when connecting to the SMTP server',
'default': False },
'use_ssl': {
'help': 'Use SSL when connecting to the SMTP server',
'default': False },
'username': {
'help': 'Authenticate to the SMTP server with this username',
'default': None },
'password': {
'help': 'Authenticate to the SMTP server with this username',
'default': None },
},
}
# Implement the singleton pattern
def __new__(cls, *args, **kwargs):
if not cls.__instance:
cls._instance = super(Config, cls).\
__new__(cls, *args, **kwargs)
return cls._instance
def __init__(self):
# Attempt to load the config file
try:
self._config_dict = json.load(open(Config.__config_path, 'r'))
Config.__log.info('Loaded configuration from %s' % (Config.__config_path,))
except IOError:
self._config_dict = { }
Config.__log.info('Loaded blank configuration')
def _sync(self):
if not os.path.exists(os.path.dirname(Config.__config_path)):
os.makedirs(os.path.dirname(Config.__config_path))
fp = open(Config.__config_path, 'w')
json.dump(self._config_dict, fp, indent=4)
def exists(self, section, option):
if section not in self._config_dict:
return False
if option not in self._config_dict[section]:
return False
return True
def get_section(self, section):
if section not in self._config_dict:
if section in Config.__options:
fallback_dict = { }
for option in Config.__options[section]:
if 'default' in Config.__options[section][option]:
fallback_dict[option] = \
Config.__options[section][option]['default']
return fallback_dict
raise KeyError('No fallback found for configuration section "%s".' % (section,))
return self._config_dict[section]
def get(self, section, option):
if self.exists(section, option):
if option in self._config_dict[section]:
return self._config_dict[section][option]
# We need to use a fallback, silently ignore a KeyError
# if there is no default value.
try:
return Config.__options[section][option]['default']
except KeyError:
pass
# If we got here, there was no fallback to return
raise KeyError('No fallback found for configuration option "%s.%s"' % (section, option))
def set(self, section, option, value):
if section not in self._config_dict:
self._config_dict[section] = { }
self._config_dict[section][option] = value
self._sync()
| {
"repo_name": "rjw57/throw",
"path": "throw/config.py",
"copies": "1",
"size": "3855",
"license": "apache-2.0",
"hash": -8380047467608163000,
"line_mean": 36.427184466,
"line_max": 96,
"alpha_frac": 0.5463035019,
"autogenerated": false,
"ratio": 4.519343493552169,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5565646995452169,
"avg_score": null,
"num_lines": null
} |
"""Access to account statement webservice."""
import logging
import time
import xml.etree.ElementTree as et
from contextlib import suppress
from urllib.request import urlopen
from ib_insync import util
from ib_insync.objects import DynamicObject
__all__ = ('FlexReport', 'FlexError')
_logger = logging.getLogger('ib_insync.flexreport')
class FlexError(Exception):
pass
class FlexReport:
"""
Download and parse IB account statements via the Flex Web Service.
https://www.interactivebrokers.com/en/software/am/am/reports/flex_web_service_version_3.htm
To obtain a ``token`` in account management, go to
Reports -> Settings -> Flex Web Service.
Tip: choose a 1 year expiry.
To obtain a ``queryId``: Create and save a query with
Report -> Activity -> Flex Queries or
Report -> Trade Confirmations -> Flex Queries.
Find the query ID (not the query name).
A large query can take a few minutes. In the weekends the query servers
can be down.
"""
def __init__(self, token=None, queryId=None, path=None):
"""
Download a report by giving a valid ``token`` and ``queryId``,
or load from file by giving a valid ``path``.
"""
self.data = None
self.root = None
if token and queryId:
self.download(token, queryId)
elif path:
self.load(path)
def topics(self):
"""Get the set of topics that can be extracted from this report."""
return set(node.tag for node in self.root.iter() if node.attrib)
def extract(self, topic: str, parseNumbers=True) -> list:
"""
Extract items of given topic and return as list of objects.
The topic is a string like TradeConfirm, ChangeInDividendAccrual,
Order, etc.
"""
cls = type(topic, (DynamicObject,), {})
results = [cls(**node.attrib) for node in self.root.iter(topic)]
if parseNumbers:
for obj in results:
d = obj.__dict__
for k, v in d.items():
with suppress(ValueError):
d[k] = float(v)
d[k] = int(v)
return results
def df(self, topic: str, parseNumbers=True):
"""Same as extract but return the result as a pandas DataFrame."""
return util.df(self.extract(topic, parseNumbers))
def download(self, token, queryId):
"""Download report for the given ``token`` and ``queryId``."""
url = (
'https://gdcdyn.interactivebrokers.com'
f'/Universal/servlet/FlexStatementService.SendRequest?'
f't={token}&q={queryId}&v=3')
resp = urlopen(url)
data = resp.read()
root = et.fromstring(data)
if root.find('Status').text == 'Success':
code = root.find('ReferenceCode').text
baseUrl = root.find('Url').text
_logger.info('Statement is being prepared...')
else:
errorCode = root.find('ErrorCode').text
errorMsg = root.find('ErrorMessage').text
raise FlexError(f'{errorCode}: {errorMsg}')
while True:
time.sleep(1)
url = f'{baseUrl}?q={code}&t={token}'
resp = urlopen(url)
self.data = resp.read()
self.root = et.fromstring(self.data)
if self.root[0].tag == 'code':
msg = self.root[0].text
if msg.startswith('Statement generation in progress'):
_logger.info('still working...')
continue
else:
raise FlexError(msg)
break
_logger.info('Statement retrieved.')
def load(self, path):
"""Load report from XML file."""
with open(path, 'rb') as f:
self.data = f.read()
self.root = et.fromstring(self.data)
def save(self, path):
"""Save report to XML file."""
with open(path, 'wb') as f:
f.write(self.data)
if __name__ == '__main__':
util.logToConsole()
report = FlexReport('945692423458902392892687', '272555')
print(report.topics())
trades = report.extract('Trade')
print(trades)
| {
"repo_name": "erdewit/ib_insync",
"path": "ib_insync/flexreport.py",
"copies": "1",
"size": "4242",
"license": "bsd-2-clause",
"hash": -623465338225136000,
"line_mean": 31.8837209302,
"line_max": 95,
"alpha_frac": 0.5763790665,
"autogenerated": false,
"ratio": 4.013245033112582,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 129
} |
"""Access to common geographic datasets
Copyright (c) 2013 Clarinova. This file is licensed under the terms of the
Revised BSD License, included in this distribution as LICENSE.txt
"""
import yaml
import sys
from ambry.dbexceptions import ConfigurationError
class US:
""" Access to US states, regions, etc. """
def __init__(self, library):
self.library = library
@property
def usgeo(self):
try:
usgeo = self.library.dep('usgeo')
except ConfigurationError:
raise ConfigurationError("MISSING DEPENDENCY: "+"To use the US geo datasets, the bundle ( or library ) must specify a"+
" dependency with a set named 'usgeo', in build.dependencies.usgeo")
return usgeo
def _places(self):
try:
places = self.library.dep('places').partition
except ConfigurationError:
raise ConfigurationError("MISSING DEPENDENCY: "+"To use the US county datasets, the bundle ( or library ) must specify a"+
" dependency with a set named 'places', in build.dependencies.places. "+
" See https://github.com/clarinova/ambry/wiki/Error-Messages#geoanalysisareasget_analysis_area")
return places
@property
def states(self):
return [ UsState(self.library, row) for row in self.bundle.query('SELECT * FROM states')]
def state(self,abbrev=None,**kwargs):
"""Retrieve a state record by abbreviation, fips code, ansi code or census code
The argument to the function is a keyword that can be:
abbrev Lookup by the state's abbreviation
fips Lookup by the state's fips code
ansi Lookup by the state's ansi code
census Lookup by the state's census code
Note that the ansi codes are represented as integers, but they aren't actually numbers;
the codes have a leading zero that is only maintained when the codes are used as strings. This
interface returnes the codes as integers, with the leading zero removed.
"""
if kwargs.get('abbrev') or abbrev:
if not abbrev:
abbrev = kwargs.get('abbrev')
rows = self.usgeo.query("SELECT * FROM states WHERE stusab = ?", abbrev.upper() )
elif kwargs.get('fips'):
rows = self.usgeo.query("SELECT * FROM states WHERE state = ?", int(kwargs.get('fips')))
elif kwargs.get('ansi'):
rows = self.usgeo.query("SELECT * FROM states WHERE statens = ?", int(kwargs.get('ansi')))
elif kwargs.get('census'):
rows = self.usgeo.query("SELECT * FROM states WHERE statece = ?", int(kwargs.get('ansi')))
else:
rows = None
if rows:
return UsState(self.library, rows.first())
else:
return None
def county(self, code):
row = self._places().query("""SELECT AsText(geometry) as wkt, SRID(geometry) as srid, *
FROM counties WHERE code = ? LIMIT 1""", code).first()
return UsCounty(self.library, row)
def place(self, code):
row = self._places().query("""SELECT AsText(geometry) as wkt, SRID(geometry) as srid, *
FROM places WHERE code = ? LIMIT 1""", code).first()
if not row:
return None
return Place(self.library, row)
@property
def places(self):
for row in self._places().query("""SELECT AsText(geometry) as wkt, SRID(geometry) as srid, * FROM places"""):
yield Place(self.library, row)
class UsState:
"""Represents a US State, with acessors for counties, tracks, blocks and other regions
This object is a wrapper on the state table in the geodim dataset, so the fields in the object
that are acessible through _-getattr__ depend on that table, but are typically:
geoid TEXT
region INTEGER Region
division INTEGER Division
state INTEGER State census code
stusab INTEGER State Abbreviation
statece INTEGER State (FIPS)
statens INTEGER State (ANSI)
lsadc TEXT Legal/Statistical Area Description Code
name TEXT
Additional acessors include:
fips FIPS code, equal to the 'state' field
ansi ANSI code, euals to the 'statens' field
census CENSUS code, equal to the 'statece' field
usps Uppercase state abbreviation, equal to the 'stusab' field
"""
def __init__(self,library, row):
self.library = library
self.row = row
def __getattr__(self, name):
return self.row[name]
@property
def fips(self):
return self.row['state']
@property
def ansi(self):
return self.row['statens']
@property
def census(self):
return self.row['statece']
@property
def usps(self):
return self.row['stusab']
def __str__(self):
return "<{}:{}>".format('USState',self.row['name'])
class UsCounty(object):
def __init__(self,library, row):
self.library = library
self.row = row
@property
def places(self):
for row in self._places().query("""SELECT AsText(geometry) as wkt, SRID(geometry) as srid, * FROM places"""):
yield Place(self.library, row)
class Place(object):
def __init__(self,library, row):
self.library = library
self.row = row
if not self.row:
raise Exception('row cannot be None')
@property
def spsrs(self):
return self.row['spsrs']
@property
def type(self):
return self.row['type']
@property
def name(self):
return self.row['name']
@property
def code(self):
return self.row['code']
def aa(self, scale=None):
"""Return an analysis Area"""
import json
from ..geo.analysisarea import AnalysisArea
d = json.loads(self.row['aa'])
if scale:
d['_scale'] = scale
if not d.get('_scale'):
d['_scale'] = 20
return AnalysisArea(**d)
def mask(self, ar=None, nodata=0, scale=10):
import ogr
import gdal
from osgeo.gdalconst import GDT_Byte
import numpy as np
import numpy.ma as ma
"""Return an numpy array with a hard mask to exclude areas outside of the place"""
srs_in = ogr.osr.SpatialReference()
srs_in.ImportFromEPSG(self.row['srid'])
aa = self.aa(scale)
image = aa.get_memimage(data_type=GDT_Byte)
ogr_ds = ogr.GetDriverByName('Memory').CreateDataSource('/tmp/foo')
lyr = ogr_ds.CreateLayer('place', aa.srs)
geometry = ogr.CreateGeometryFromWkt(self.row['wkt'])
geometry.AssignSpatialReference(srs_in)
geometry.TransformTo(aa.srs)
feature = ogr.Feature(lyr.GetLayerDefn())
feature.SetGeometryDirectly(geometry)
lyr.CreateFeature(feature)
gdal.RasterizeLayer( image, [1], lyr, burn_values=[1])
feature.Destroy()
mask = np.logical_not(np.flipud(np.array(image.GetRasterBand(1).ReadAsArray(), dtype=bool)))
if ar is not None:
return ma.masked_array(ar, mask=mask, nodata=nodata, hard=True)
else:
return mask
| {
"repo_name": "kball/ambry",
"path": "ambry/datasets/geo.py",
"copies": "1",
"size": "7705",
"license": "bsd-2-clause",
"hash": -3344617720788776000,
"line_mean": 29.9477911647,
"line_max": 135,
"alpha_frac": 0.5756002596,
"autogenerated": false,
"ratio": 4.155879180151024,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5231479439751024,
"avg_score": null,
"num_lines": null
} |
"access to double and single prec SWIG wrapped FFTW module"
__author__ = "Sebastian Haase <haase@msg.ucsf.edu>"
__license__ = "BSD license - see LICENSE file"
import Priithon_bin.sfftw as _sfftw
import Priithon_bin.dfftw as _dfftw
import numpy as _N
_splans = {}
_dplans = {}
_measure = _sfftw.FFTW_ESTIMATE # ==0
# _measure = _sfftw.FFTW_MEASURE # == 1
RTYPE = _N.float32
RTYPES = (_N.float32, _N.float64)
CTYPE = _N.complex64
CTYPES = (_N.complex64, _N.complex128)
ncpu = 1
def rfft(a,af=None, inplace=0, nthreads=1):
#CHECK b type size
if inplace:
inplace = _sfftw.FFTW_IN_PLACE # == 8
shape = a.shape[:-1]+(a.shape[-1]-2,)
else:
shape = a.shape
inplace = 0
dir = _sfftw.FFTW_FORWARD
if a.dtype == _N.float32:
if af is not None and (not af.flags.carray or af.dtype != _N.complex64):
raise RuntimeError("af needs to be well behaved Complex64 array")
key = ("sr%d"%inplace, shape )
try:
p = _splans[ key ]
except:
ashape = _N.array(shape, dtype=_N.int32)
p = _sfftw.rfftwnd_create_plan(len(shape), ashape, dir,
_measure | inplace)
if p is None:
raise RuntimeError("could not create plan")
_splans[ key ] = p
if inplace:
#debug print 11111111111111
_sfftw.rfftwnd_one_real_to_complex(p,a,None)
if af is None:
s2 = shape[:-1]+(shape[-1]/2.+1,)
af = _N.ndarray(buffer=a, shape=s2,dtype=_N.complex64)
return af
else:
#debug print "plan", repr(p)
#debuf print 22222222222222, a.shape
#debuf print a.flags, a.dtype.isnative
if af is None:
s2 = shape[:-1]+(shape[-1]/2.+1,)
af = _N.empty(shape=s2, dtype=_N.complex64)
#debuf print 33333333322222, af.shape
#debuf print af.flags, af.dtype.isnative
_sfftw.rfftwnd_one_real_to_complex(p,a,af)
return af
else:
#debuf print 22222222222222, af.shape
#debuf print af.flags, af.dtype.isnative
_sfftw.rfftwnd_one_real_to_complex(p,a,af)
elif a.dtype == _N.float64:
if af is not None and (not af.flags.carray or af.dtype != _N.complex128):
raise RuntimeError("af needs to be well behaved Complex64 array")
key = ("dr%d"%inplace, shape )
try:
p = _dplans[ key ]
except:
p = _dfftw.rfftwnd_create_plan(len(shape), _N.array(shape, dtype=_N.int32), dir,
_measure | inplace)
if p is None:
raise RuntimeError("could not create plan")
_dplans[ key ] = p
if inplace:
_dfftw.rfftwnd_one_real_to_complex(p,a,None)
if af is None:
s2 = shape[:-1]+(shape[-1]/2.+1,)
af = _N.ndarray(buffer=a, shape=s2, dtype=_N.complex128)
return af
else:
if af is None:
s2 = shape[:-1]+(shape[-1]/2.+1,)
af = _N.empty(shape=s2, dtype=_N.complex128)
_dfftw.rfftwnd_one_real_to_complex(p,a,af)
return af
else:
_dfftw.rfftwnd_one_real_to_complex(p,a,af)
else:
raise TypeError("(c)float32 and (c)float64 must be used consistently (%s %s)"%\
((a is None and "a is None" or "a.dtype=%s"%a.dtype),
(af is None and "af is None" or "af.dtype=%s"%af.dtype)))
def irfft(af, a=None, inplace=0, copy=1, nthreads=1):
"""if copy==1 (and inplace==0 !!) fftw uses a copy of af to prevent overwriting the original
(fftw always messes up the input array when inv-fft complex_to_real)
"""
#CHECK b type size
global shape,s2
if copy and not inplace:
af = af.copy()
if inplace:
inplace = _dfftw.FFTW_IN_PLACE # == 8
shape = af.shape[:-1] + ((af.shape[-1]-1)*2,)
else:
shape = af.shape[:-1] + ((af.shape[-1]-1)*2,)
inplace = 0
dir = _sfftw.FFTW_BACKWARD
if af.dtype == _N.complex64:
if a is not None and (not a.flags.carray or a.dtype != _N.float32):
raise RuntimeError("a needs to be well behaved float32 array")
key = ("sir%d"%inplace, shape )
try:
p = _splans[ key ]
except:
p = _sfftw.rfftwnd_create_plan(len(shape), _N.array(shape, dtype=_N.int32), dir,
_measure | inplace)
if p is None:
raise RuntimeError("could not create plan")
_splans[ key ] = p
if inplace:
_sfftw.rfftwnd_one_complex_to_real(p,af,None)
if a is None:
s2 = shape[:-1]+(shape[-1]+2,)
a = _N.ndarray(buffer=af, shape=s2, dtype=_N.float32)
return a
else:
if a is None:
s2 = shape
a = _N.empty(shape=s2, dtype=_N.float32)
_sfftw.rfftwnd_one_complex_to_real(p,af,a)
return a
else:
_sfftw.rfftwnd_one_complex_to_real(p,af,a)
elif af.dtype == _N.complex128:
if a is not None and (not a.flags.carray or a.dtype != _N.float64):
raise RuntimeError("a needs to be well behaved float64 array")
key = ("dir%d"%inplace, shape )
try:
p = _dplans[ key ]
except:
p = _dfftw.rfftwnd_create_plan(len(shape), _N.array(shape, dtype=_N.int32), dir,
_measure | inplace)
if p is None:
raise RuntimeError("could not create plan")
_dplans[ key ] = p
if inplace:
_dfftw.rfftwnd_one_complex_to_real(p,af,None)
if a is None:
s2 = shape[:-1]+(shape[-1]+2,)
a = _N.ndarray(buffer=af, shape=s2,dtype=_N.float64)
return a
else:
if a is None:
s2 = shape
a = _N.empty(shape=s2, dtype=_N.float64)
_dfftw.rfftwnd_one_complex_to_real(p,af,a)
return a
else:
_dfftw.rfftwnd_one_complex_to_real(p,af,a)
else:
raise TypeError("(c)float32 and (c)float64 must be used consistently (%s %s)"%\
((a is None and "a is None" or "a.dtype=%s"%a.dtype),
(af is None and "af is None" or "af.dtype=%s"%af.dtype)))
def destroy_plans():
for k in list(_splans.keys()):
_sfftw.rfftwnd_destroy_plan( _splans[ k ] )
del _splans[ k ]
for k in list(_dplans.keys()):
_dfftw.rfftwnd_destroy_plan( _dplans[ k ] )
del _dplans[ k ]
'''
>> plan = sfftw.rfftw2d_create_plan(256,256,sfftw.FFTW_FORWARD,
... sfftw.FFTW_ESTIMATE)
>>>
>>> a = F.gaussianArr()
>>> a.shape
(256, 256)
>>> af = F.zeroArrC(256,129)
>>> sfftw.rfftwnd_one_real_to_complex(plan,a,af)
>>> Y.view(af)
** split-viewer: complex - used abs()
# window: 0) af
>>> b = F.noiseArr(shape=(256, 256), stddev=1.0, mean=0.0)
>>> bf = F.zeroArrC(256,129)
>>> sfftw.rfftwnd_one_real_to_complex(plan,b,bf)
>>> Y.view(bf)
** split-viewer: complex - used abs()
# window: 1) bf
>>> bb = F.rfft2d(b)
>>> Y.view(bb)
** split-viewer: complex - used abs()
# window: 2) bb
>>> p = {}
>>> p[ ("r",(256,256), sfftw.FFTW_FORWARD) ] = plan
'''
def fft(a,af=None, inplace=0, nthreads=1):
#CHECK b type size
if inplace:
inplace = _sfftw.FFTW_IN_PLACE # == 8
shape = a.shape
else:
shape = a.shape
inplace = 0
dir = _sfftw.FFTW_FORWARD
if a.dtype == _N.complex64:
if af is not None and (not af.flags.carray or af.dtype != _N.complex64):
raise RuntimeError("af needs to be well behaved complex64 array")
key = ("s%d"%inplace, shape )
try:
p = _splans[ key ]
except:
p = _sfftw.fftwnd_create_plan(len(shape), _N.array(shape, dtype=_N.int32), dir,
_measure | inplace)
if p is None:
raise RuntimeError("could not create plan")
_splans[ key ] = p
if inplace:
_sfftw.fftwnd_one(p,a,None)
if af is None:
s2 = shape
af = _N.ndarray(buffer=a, shape=s2, dtype=_N.complex64)
return af
else:
if af is None:
s2 = shape
af = _N.empty(shape=s2, dtype=_N.complex64)
_sfftw.fftwnd_one(p,a,af)
return af
else:
_sfftw.fftwnd_one(p,a,af)
elif a.dtype == _N.complex128:
if af is not None and (not af.flags.carray or af.dtype != _N.complex128):
raise RuntimeError("af needs to be well behaved complex128 array")
key = ("d%d"%inplace, shape )
try:
p = _dplans[ key ]
except:
p = _dfftw.fftwnd_create_plan(len(shape), _N.array(shape, dtype=_N.int32), dir,
_measure | inplace)
if p is None:
raise RuntimeError("could not create plan")
_dplans[ key ] = p
if inplace:
_dfftw.fftwnd_one(p,a,None)
if af is None:
s2 = shape
af = _N.ndarray(buffer=a, shape=s2,dtype=_N.complex128)
return af
else:
if af is None:
s2 = shape
af = _N.empty(shape=s2, dtype=_N.complex128)
_dfftw.fftwnd_one(p,a,af)
return af
else:
_dfftw.fftwnd_one(p,a,af)
else:
raise TypeError("complex64 and complex128 must be used consistently (%s %s)"%\
((a is None and "a is None" or "a.dtype=%s"%a.dtype),
(af is None and "af is None" or "af.dtype=%s"%af.dtype)))
def ifft(af, a=None, inplace=0, nthreads=1):
#CHECK b type size
global shape,s2
if inplace:
inplace = _dfftw.FFTW_IN_PLACE # == 8
shape = af.shape
else:
shape = af.shape
inplace = 0
dir = _sfftw.FFTW_BACKWARD
if af.dtype == _N.complex64:
if a is not None and (not a.flags.carray or a.dtype != _N.complex64):
raise RuntimeError("a needs to be well behaved complex64 array")
key = ("si%d"%inplace, shape )
try:
p = _splans[ key ]
except:
p = _sfftw.fftwnd_create_plan(len(shape), _N.array(shape, dtype=_N.int32), dir,
_measure | inplace)
if p is None:
raise RuntimeError("could not create plan")
_splans[ key ] = p
if inplace:
_sfftw.fftwnd_one(p,af,None)
if a is None:
s2 = shape
a = _N.ndarray(buffer=af, shape=s2,dtype=_N.complex64)
return a
else:
if a is None:
s2 = shape
a = _N.empty(shape=s2, dtype=_N.complex64)
_sfftw.fftwnd_one(p,af,a)
return a
else:
_sfftw.fftwnd_one(p,af,a)
elif af.dtype == _N.complex128:
if a is not None and (not a.flags.carray or a.dtype != _N.complex128):
raise RuntimeError("a needs to be well behaved complex128 array")
key = ("di%d"%inplace, shape )
try:
p = _dplans[ key ]
except:
p = _dfftw.fftwnd_create_plan(len(shape), _N.array(shape, dtype=_N.int32), dir,
_measure | inplace)
if p is None:
raise RuntimeError("could not create plan")
_dplans[ key ] = p
if inplace:
_dfftw.fftwnd_one(p,af,None)
if a is None:
s2 = shape
a = _N.ndarray(buffer=af, shape=s2,dtype=_N.complex128)
return a
else:
if a is None:
s2 = shape
a = _N.empty(shape=s2, dtype=_N.complex128)
_dfftw.fftwnd_one(p,af,a)
return a
else:
_dfftw.fftwnd_one(p,af,a)
else:
raise TypeError("complex64 and complex128 must be used consistently (%s %s)"%\
((a is None and "a is None" or "a.dtype=%s"%a.dtype),
(af is None and "af is None" or "af.dtype=%s"%af.dtype)))
| {
"repo_name": "macronucleus/chromagnon",
"path": "Chromagnon/Priithon/fftwseb.py",
"copies": "1",
"size": "12556",
"license": "mit",
"hash": -9146876790278282000,
"line_mean": 32.2169312169,
"line_max": 96,
"alpha_frac": 0.5054953807,
"autogenerated": false,
"ratio": 3.269791666666667,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4275287047366667,
"avg_score": null,
"num_lines": null
} |
"""Access to filter bandpasses used by FSPS"""
import os
import numpy as np
FILTER_LIST = [(1,'V','Johnson V (from Bessell 1990 via M. Blanton) - this defines V=0 for the Vega system'),
(2,"U","Johnson U (from Bessell 1990 via M. Blanton)"),
(3,"CFHT_B","CFHT B-band (from Blanton's kcorrect)"),
(4,"CFHT_R","CFHT R-band (from Blanton's kcorrect)"),
(5,"CFHT_I","CFHT I-band (from Blanton's kcorrect)"),
(6,"TMASS_J","2MASS J filter (total response w/atm)"),
(7,"TMASS_H","2MASS H filter (total response w/atm))"),
(8,"TMASS_Ks","2MASS Ks filter (total response w/atm)"),
(9,"SDSS_u","SDSS Camera u Response Function, airmass = 1.3 (June 2001)"),
(10,"SDSS_g","SDSS Camera g Response Function, airmass = 1.3 (June 2001)"),
(11,"SDSS_r","SDSS Camera r Response Function, airmass = 1.3 (June 2001)"),
(12,"SDSS_i","SDSS Camera i Response Function, airmass = 1.3 (June 2001)"),
(13,"SDSS_z","SDSS Camera z Response Function, airmass = 1.3 (June 2001)"),
(14,"WFC_ACS_F435W","WFC ACS F435W (http://acs.pha.jhu.edu/instrument/photometry/)"),
(15,"WFC_ACS_F606W","WFC ACS F606W (http://acs.pha.jhu.edu/instrument/photometry/)"),
(16,"WFC_ACS_F775W","WFC ACS F775W (http://acs.pha.jhu.edu/instrument/photometry/)"),
(17,"WFC_ACS_F814W","WFC ACS F814W (http://acs.pha.jhu.edu/instrument/photometry/)"),
(18,"WFC_ACS_F850LP","WFC ACS F850LP (http://acs.pha.jhu.edu/instrument/photometry/)"),
(19,"IRAC_1","IRAC Channel 1"),
(20,"IRAC_2","IRAC Channel 2"),
(21,"IRAC_3","IRAC Channel 3"),
(22,"ISAAC_Js","ISAAC Js"),
(23,"ISAAC_Ks","ISAAC Ks"),
(24,"FORS_V","FORS V"),
(25,"FORS_R","FORS R"),
(26,"NICMOS_F110W","NICMOS F110W"),
(27,"NICMOS_F160W","NICMOS F160W"),
(28,"GALEX_NUV","GALEX NUV"),
(29,"GALEX_FUV","GALEX FUV"),
(30,"DES_g","DES g (from Huan Lin, for DES camera)"),
(31,"DES_r","DES r (from Huan Lin, for DES camera)"),
(32,"DES_i","DES i (from Huan Lin, for DES camera)"),
(33,"DES_z","DES z (from Huan Lin, for DES camera)"),
(34,"DES_Y","DES Y (from Huan Lin, for DES camera)"),
(35,"WFCAM_Z","WFCAM Z (from Hewett et al. 2006, via A. Smith)"),
(36,"WFCAM_Y","WFCAM Y (from Hewett et al. 2006, via A. Smith)"),
(37,"WFCAM_J","WFCAM J (from Hewett et al. 2006, via A. Smith)"),
(38,"WFCAM_H","WFCAM H (from Hewett et al. 2006, via A. Smith)"),
(39,"WFCAM_K","WFCAM K (from Hewett et al. 2006, via A. Smith)"),
(40,"BC03_B","Johnson B (from BC03. This is the B2 filter from Buser)"),
(41,"Cousins_R","Cousins R (from Bessell 1990 via M. Blanton)"),
(42,"Cousins_I","Cousins I (from Bessell 1990 via M. Blanton)"),
(43,"B","Johnson B (from Bessell 1990 via M. Blanton)"),
(44,"WFPC2_F555W","WFPC2 F555W (http://acs.pha.jhu.edu/instrument/photometry/WFPC2/)"),
(45,"WFPC2_F814W","WFPC2 F814W (http://acs.pha.jhu.edu/instrument/photometry/WFPC2/)"),
(46,"Cousins_I_2","Cousins I (http://acs.pha.jhu.edu/instrument/photometry/GROUND/)"),
(47,"WFC3_F275W","WFC3 F275W (ftp://ftp.stsci.edu/cdbs/comp/wfc3/)"),
(48,"Steidel_Un","Steidel Un (via A. Shapley; see Steidel et al. 2003)"),
(49,"Steidel_G","Steidel G (via A. Shapley; see Steidel et al. 2003)"),
(50,"Steidel_Rs","Steidel Rs (via A. Shapley; see Steidel et al. 2003)"),
(51,"Steidel_I","Steidel I (via A. Shapley; see Steidel et al. 2003)"),
(52,"MegaCam_u","CFHT MegaCam u* (http://cadcwww.dao.nrc.ca/megapipe/docs/filters.html, Dec 2010)"),
(53,"MegaCam_g","CFHT MegaCam g' (http://cadcwww.dao.nrc.ca/megapipe/docs/filters.html)"),
(54,"MegaCam_r","CFHT MegaCam r' (http://cadcwww.dao.nrc.ca/megapipe/docs/filters.html)"),
(55,"MegaCam_i","CFHT MegaCam i' (http://cadcwww.dao.nrc.ca/megapipe/docs/filters.html)"),
(56,"MegaCam_z","CFHT MegaCam z' (http://cadcwww.dao.nrc.ca/megapipe/docs/filters.html)"),
(57,"WISE_W1","3.4um WISE W1 (http://www.astro.ucla.edu/~wright/WISE/passbands.html)"),
(58,"WISE_W2","4.6um WISE W2 (http://www.astro.ucla.edu/~wright/WISE/passbands.html)"),
(59,"WISE_W3","12um WISE W3 (http://www.astro.ucla.edu/~wright/WISE/passbands.html)"),
(60,"WISE_W4","22um WISE W4 22um (http://www.astro.ucla.edu/~wright/WISE/passbands.html)"),
(61,"WFC3_F125W","WFC3 F125W (ftp://ftp.stsci.edu/cdbs/comp/wfc3/)"),
(62,"WFC3_F160W","WFC3 F160W (ftp://ftp.stsci.edu/cdbs/comp/wfc3/)"),
(63,"UVOT_W2","UVOT W2 (from Erik Hoversten, 2011)"),
(64,"UVOT_M2","UVOT M2 (from Erik Hoversten, 2011)"),
(65,"UVOT_W1","UVOT W1 (from Erik Hoversten, 2011)"),
(66,"MIPS_24","Spitzer MIPS 24um"),
(67,"MIPS_70","Spitzer MIPS 70um"),
(68,"MIPS_160","Spitzer MIPS 160um"),
(69,"SCUBA_450WB","JCMT SCUBA 450WB (www.jach.hawaii.edu/JCMT/continuum/background/background.html)"),
(70,"SCUBA_850WB","JCMT SCUBA 850WB"),
(71,"PACS_70","Herschel PACS 70um"),
(72,"PACS_100","Herschel PACS 100um"),
(73,"PACS_160","Herschel PACS 160um"),
(74,"SPIRE_250","Herschel SPIRE 250um"),
(75,"SPIRE_350","Herschel SPIRE 350um"),
(76,"SPIRE_500","Herschel SPIRE 500um"),
(77,"IRAS_12","IRAS 12um"),
(78,"IRAS_25","IRAS 25um"),
(79,"IRAS_60","IRAS 60um"),
(80,"IRAS_100","IRAS 100um"),
(81,"Bessell_L","Bessell & Brett (1988) L band"),
(82,"Bessell_LP","Bessell & Brett (1988) L' band"),
(83,"Bessell_M","Bessell & Brett (1988) M band")]
class FilterDB(object):
"""docstring for Filters"""
def __init__(self):
super(FilterDB, self).__init__()
fspsDir = os.path.expandvars("$SPS_HOME")
filterPath = os.path.join(fspsDir, "data", "allfilters.dat")
self.filters = self.read_filters(filterPath)
def __getitem__(self, key):
"""Get filter data by key. `key` can either be the FSPS index, or the
keyname string used in FILTER_LIST.
The return type is a dictionary with the following fields:
* index: (int) FSPS index for filter
* key: (str) pySPS keyname for filter
* comment: (str) comment string
* wave: (np array 1,N) wavelength array (angstroms)
* band: (np array 1,N) band pass
"""
if type(key) is str:
index = self._resolve_index(key)
else:
index = key
return self.filters[index]
def _resolve_index(self, key):
"""Get the index for this filter"""
for i, rec in enumerate(FILTER_LIST):
if rec[1] == key:
return i + 1
def read_filters(self, dataPath):
"""Called upon instantiation to parse $SPS_HOME/data/allfilters.dat"""
filterDB = {}
f = open(dataPath)
index = 0
wave = []
band = []
for line in f:
if ("#" in line) and (index > 0):
filterDB[index] = self._package_filter(index, wave, band)
index += 1
wave = []
band = []
elif ("#" in line) and (index == 0):
index += 1
continue
else:
w, b = line.strip().split()
wave.append(float(w))
band.append(float(b))
filterDB[index] = self._package_filter(index, wave, band)
f.close()
return filterDB
def _package_filter(self, index, wave, band):
"""Create dictionary document for each filter, used by
:meth:`read_filters()`.
"""
doc = {}
filterInfo = FILTER_LIST[index - 1]
doc['index'] = filterInfo[0]
doc['key'] = filterInfo[1]
doc['comment'] = filterInfo[2]
doc['wave'] = np.array(wave)
doc['band'] = np.array(band)
return doc
| {
"repo_name": "jonathansick/pySPS",
"path": "pysps/filters.py",
"copies": "1",
"size": "8093",
"license": "bsd-3-clause",
"hash": 8640839226606504000,
"line_mean": 50.5477707006,
"line_max": 110,
"alpha_frac": 0.5632027678,
"autogenerated": false,
"ratio": 2.82084349947717,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8753946252376316,
"avg_score": 0.02602000298017085,
"num_lines": 157
} |
# Access to KEGG API
from bioservices.kegg import KEGG
import ora_msc
# Define the path of metabolomics data
DATA_PATH = './data/'
# Stating the annotation files & modzscore files
pos_annot = DATA_PATH + 'annotation_pos.txt'
pos_mod = DATA_PATH + 'modzscore_pos_annotated.tsv'
neg_annot = DATA_PATH + 'annotation_neg.txt'
neg_mod = DATA_PATH + 'modzscore_neg_annotated.tsv'
# Initialise KEGG instance
kegg_instance = KEGG()
kegg_instance.organism = "eco"
# Initialise both backgrounds
test_compounds = ora_msc.get_all_compounds('eco')
zamboni_bg = ora_msc.loadTsv(DATA_PATH + 'annotation_all.txt')
# Remove metabolites detected in Zamboni but not in any E.coli pathway
zamboni_bg = zamboni_bg & test_compounds
# build {pathway: compounds} dictionary for E.coli
ecoli_pathways = kegg_instance.pathwayIds
pathway_2_compounds = dict()
for pathway in ecoli_pathways:
parsed_output = kegg_instance.parse(kegg_instance.get(pathway)) # parsed_ouput has lots of information about the pathway
try:
compounds = set(parsed_output['COMPOUND'].keys())
pathway_2_compounds[pathway] = compounds
except KeyError: # Some pathways do not have defined compounds
pass
# Translate KO number to gene name
sample_id_all = DATA_PATH + 'sample_id_modzscore.tsv'
all_knockouts = []# End product
fh_sample_id_all = open(sample_id_all, 'r')
for knockout in fh_sample_id_all:
all_knockouts.append(knockout.rstrip())
fh_sample_id_all.close()
# Background Analysis
for ko_number in range(2406, 3717):
nobg_pval, nobg_pathway_id, nobg_sizes = ora_msc.oras_ko(ko_number, ecoli_pathways, test_compounds, pathway_2_compounds,
pos_annot, pos_mod, neg_annot, neg_mod, 2, True, True, 0, [])
zamboni_pval, zamboni_pathway_id, zamboni_sizes = ora_msc.oras_ko(ko_number, ecoli_pathways, zamboni_bg, pathway_2_compounds,
pos_annot, pos_mod, neg_annot, neg_mod, 2, True, True, 0, [])
# Define where to save results here
result_file = './Backgrounds/KO' + str(ko_number) + '.tsv'
fh = open(result_file, 'w')
for i in range(0, len(nobg_pathway_id)):
fh.write('{}\t{}\t{}\t{}\t{}\n'.format(nobg_pathway_id[i][5:], nobg_pval[i], nobg_sizes[i], zamboni_pval[i], zamboni_sizes[i]))
fh.close() | {
"repo_name": "Louiszr/ora_msc",
"path": "scripts/background.py",
"copies": "1",
"size": "2278",
"license": "mit",
"hash": 762936314003405400,
"line_mean": 42,
"line_max": 135,
"alpha_frac": 0.6922739245,
"autogenerated": false,
"ratio": 2.751207729468599,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3943481653968599,
"avg_score": null,
"num_lines": null
} |
# Access to KEGG API
from bioservices.kegg import KEGG
import ora_msc
import matplotlib.pyplot as plt
# Define the path of metabolomics data
DATA_PATH = './data/'
# Stating the annotation files & modzscore files
pos_annot = DATA_PATH + 'annotation_pos.txt'
pos_mod = DATA_PATH + 'modzscore_pos_annotated.tsv'
neg_annot = DATA_PATH + 'annotation_neg.txt'
neg_mod = DATA_PATH + 'modzscore_neg_annotated.tsv'
# Initialise KEGG instance
kegg_instance = KEGG()
kegg_instance.organism = "eco"
# Initialise both backgrounds
test_compounds = ora_msc.get_all_compounds('eco')
zamboni_bg = ora_msc.loadTsv(DATA_PATH + 'annotation_all.txt')
# Remove metabolites detected in Zamboni but not in any E.coli pathway
zamboni_bg = zamboni_bg & test_compounds
# build {pathway: compounds} dictionary for E.coli
ecoli_pathways = kegg_instance.pathwayIds
pathway_2_compounds = dict()
for pathway in ecoli_pathways:
parsed_output = kegg_instance.parse(kegg_instance.get(pathway)) # parsed_ouput has lots of information about the pathway
try:
compounds = set(parsed_output['COMPOUND'].keys())
pathway_2_compounds[pathway] = compounds
except KeyError: # Some pathways do not have defined compounds
pass
# Translate KO number to gene name
sample_id_all = DATA_PATH + 'sample_id_modzscore.tsv'
all_knockouts = []# End product
fh_sample_id_all = open(sample_id_all, 'r')
for knockout in fh_sample_id_all:
all_knockouts.append(knockout.rstrip())
fh_sample_id_all.close()
size_dist = []
for pathway in pathway_2_compounds:
#if len(pathway_2_compounds[pathway]) == 1:
# print(pathway)
size_dist.append(len(pathway_2_compounds[pathway]))
zamboni_size_dist = []
for pathway in pathway_2_compounds:
compounds = pathway_2_compounds[pathway]
cmpd_count = 0
for compound in compounds:
if compound in zamboni_bg:
cmpd_count += 1
zamboni_size_dist.append(cmpd_count)
plt.subplot(211)
plt.hist(zamboni_size_dist, bins=range(0, 145, 5))
plt.ylim(0, 40)
plt.xlabel('Pathway size')
plt.ylabel('Number of pathways')
plt.title('Pathway size distribution (Zamboni background)')
plt.subplot(212)
plt.hist(size_dist, bins=range(0, 145, 5))
plt.ylim(0, 40)
plt.xlabel('Pathway size')
plt.ylabel('Number of pathways')
plt.title('Pathway size distribution (all compounds)')
plt.tight_layout()
plt.show() | {
"repo_name": "Louiszr/ora_msc",
"path": "scripts/size_distribution.py",
"copies": "1",
"size": "2356",
"license": "mit",
"hash": -5628205044505612000,
"line_mean": 30.8513513514,
"line_max": 124,
"alpha_frac": 0.7224108659,
"autogenerated": false,
"ratio": 2.83855421686747,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.406096508276747,
"avg_score": null,
"num_lines": null
} |
# Access to KEGG API
from bioservices.kegg import KEGG
import ora_msc
import random
import numpy as np
# Define the path of metabolomics data
DATA_PATH = './data/'
# Stating the annotation files & modzscore files
pos_annot = DATA_PATH + 'annotation_pos.txt'
pos_mod = DATA_PATH + 'modzscore_pos_annotated.tsv'
neg_annot = DATA_PATH + 'annotation_neg.txt'
neg_mod = DATA_PATH + 'modzscore_neg_annotated.tsv'
# Initialise KEGG instance
kegg_instance = KEGG()
kegg_instance.organism = "eco"
# Initialise both backgrounds
test_compounds = ora_msc.get_all_compounds('eco')
zamboni_bg = ora_msc.loadTsv(DATA_PATH + 'annotation_all.txt')
# Remove metabolites detected in Zamboni but not in any E.coli pathway
zamboni_bg = zamboni_bg & test_compounds
# build {pathway: compounds} dictionary for E.coli
ecoli_pathways = kegg_instance.pathwayIds
pathway_2_compounds = dict()
for pathway in ecoli_pathways:
parsed_output = kegg_instance.parse(kegg_instance.get(pathway)) # parsed_ouput has lots of information about the pathway
try:
compounds = set(parsed_output['COMPOUND'].keys())
pathway_2_compounds[pathway] = compounds
except KeyError: # Some pathways do not have defined compounds
pass
# Translate KO number to gene name
sample_id_all = DATA_PATH + 'sample_id_modzscore.tsv'
all_knockouts = []# End product
fh_sample_id_all = open(sample_id_all, 'r')
for knockout in fh_sample_id_all:
all_knockouts.append(knockout.rstrip())
fh_sample_id_all.close()
# Generate random knockout numbers
random_knockouts = np.random.randint(3717, size=50)
# Random metabolite mutation
# Change misidentification & number of simulations rate here
MISIDENT_RATE = 0.1
NUMBER_SIMULATION = 20
#
# Change results file name here
filename = './mrate' + str(MISIDENT_RATE * 100) + '.tsv'
fh = open(filename, 'w')
for ko_number in random_knockouts:
fp = 0
fn = 0
ora_results = []
for i in range(0, NUMBER_SIMULATION + 1):
ora_results.append([])
(pvals, pathwayids, pathsizes) = ora_msc.oras_ko(ko_number, ecoli_pathways, zamboni_bg, pathway_2_compounds,
pos_annot, pos_mod, neg_annot, neg_mod, 2,
True, False, 0, [])
for ind in range(0, len(pvals)):
if pvals[ind] < 0.05:
ora_results[0].append(pathwayids[ind])
for k in range(0, NUMBER_SIMULATION): # Number of mutations per ko
(pvals_mut, pathwayids_mut, pathsizes_mut) = ora_msc.oras_ko(ko_number, ecoli_pathways, zamboni_bg, pathway_2_compounds,
pos_annot, pos_mod, neg_annot, neg_mod, 2,
True, False, int(MISIDENT_RATE * len(zamboni_bg)), test_compounds)
for ind in range(0, len(pvals_mut)):
if pvals_mut[ind] < 0.05:
ora_results[k+1].append(pathwayids_mut[ind])
# write ora_result to a file
for i in range(1, len(ora_results)):
result = ora_results[i]
fp += len(set(result) - set(ora_results[0]))
fn += len(set(ora_results[0]) - set(result))
fh.write('\t'.join([str(len(ora_results[0])), str(fp), str(fn), str(ko_number)]))
fh.write('\n')
fh.close() | {
"repo_name": "Louiszr/ora_msc",
"path": "scripts/misident.py",
"copies": "1",
"size": "3229",
"license": "mit",
"hash": -7525956657943524000,
"line_mean": 36.1264367816,
"line_max": 129,
"alpha_frac": 0.6512852276,
"autogenerated": false,
"ratio": 2.97329650092081,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9087263246448538,
"avg_score": 0.007463696414454286,
"num_lines": 87
} |
ACCESS_TOKEN_URL = 'https://auth.aweber.com/1.0/oauth/access_token'
API_BASE = 'https://api.aweber.com/1.0'
AUTHORIZE_URL = 'https://auth.aweber.com/1.0/oauth/authorize'
REQUEST_TOKEN_URL = 'https://auth.aweber.com/1.0/oauth/request_token'
class APIException(Exception):
"""APIExceptions."""
class AWeberBase(object):
"""Provides functionality shared accross all AWeber objects"""
collections_map = {
'account': ['lists', 'integrations'],
'broadcast_campaign': ['links', 'messages', 'stats'],
'component': [],
'custom_field': [],
'followup_campaign': ['links', 'messages', 'stats'],
'integration': [],
'link': ['clicks'],
'list': [
'campaigns',
'custom_fields',
'subscribers',
'web_forms',
'web_form_split_tests',
],
'message': ['opens', 'tracked_events'],
'service-root': 'accounts',
'subscriber': [],
'tracked_events': [],
'web_form': [],
'web_form_split_test': ['components'],
}
@property
def user(self):
return self.adapter.user
def load_from_url(self, url):
"""Gets an AWeberCollection or AWeberEntry from a given URL."""
response = self.adapter.request('GET', url)
return self._read_response(url, response)
def _method_for(self, type):
if not self.type == type:
raise AttributeError('Method does not exist')
def _read_response(self, url, response):
if 'entries' in response:
from aweber_api.collection import AWeberCollection
return AWeberCollection(url, response, self.adapter)
if 'resource_type_link' in response:
from aweber_api.entry import AWeberEntry
return AWeberEntry(url, response, self.adapter)
raise TypeError('Unknown value returned')
def _parseNamedOperation(self, data):
from aweber_api.entry import AWeberEntry
entries = []
for item in data:
entries.append(
AWeberEntry(
item['self_link'].replace(API_BASE, ''),
item,
self.adapter,
)
)
return entries
def _partition_url(self):
try:
url_parts = self.url.split('/')
#If top of tree - no parent entry
if len(url_parts) <= 3:
return None
except AttributeError:
return None
return url_parts
def _construct_parent_url(self, url_parts, child_position):
"""Remove collection id and slash from end of url."""
url = '/'.join(url_parts[:-child_position])
return url
| {
"repo_name": "aweber/AWeber-API-Python-Library",
"path": "aweber_api/base.py",
"copies": "1",
"size": "2754",
"license": "bsd-3-clause",
"hash": -8892500995200825000,
"line_mean": 30.2954545455,
"line_max": 71,
"alpha_frac": 0.5566448802,
"autogenerated": false,
"ratio": 4.008733624454148,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00024703557312252963,
"num_lines": 88
} |
"""Access tool command lines, handling back compatibility and file type issues.
Abstracts out
"""
import subprocess
import toolz as tz
from bcbio.pipeline import datadict as dd
from bcbio.pipeline import config_utils
def get_tabix_cmd(config):
"""Retrieve tabix command, handling new bcftools tabix and older tabix.
"""
try:
bcftools = config_utils.get_program("bcftools", config)
# bcftools has terrible error codes and stderr output, swallow those.
bcftools_tabix = subprocess.check_output("{bcftools} 2>&1; echo $?".format(**locals()),
shell=True).find("tabix") >= 0
except config_utils.CmdNotFound:
bcftools_tabix = False
if bcftools_tabix:
return "{0} tabix".format(bcftools)
else:
tabix = config_utils.get_program("tabix", config)
return tabix
def get_bgzip_cmd(config, is_retry=False):
"""Retrieve command to use for bgzip, trying to use bgzip parallel threads.
By default, parallel bgzip is enabled in bcbio. If it causes problems
please report them. You can turn parallel bgzip off with `tools_off: [pbgzip]`
"""
num_cores = tz.get_in(["algorithm", "num_cores"], config, 1)
cmd = config_utils.get_program("bgzip", config)
if (not is_retry and num_cores > 1 and
"pbgzip" not in dd.get_tools_off({"config": config})):
cmd += " --threads %s" % num_cores
return cmd
| {
"repo_name": "biocyberman/bcbio-nextgen",
"path": "bcbio/pipeline/tools.py",
"copies": "1",
"size": "1454",
"license": "mit",
"hash": -6699806075839547000,
"line_mean": 38.2972972973,
"line_max": 95,
"alpha_frac": 0.6499312242,
"autogenerated": false,
"ratio": 3.625935162094763,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9762603909833562,
"avg_score": 0.0026524952922403187,
"num_lines": 37
} |
"""Access to Python's configuration information."""
import os
import sys
from os.path import pardir, realpath
__all__ = [
'get_config_h_filename',
'get_config_var',
'get_config_vars',
'get_makefile_filename',
'get_path',
'get_path_names',
'get_paths',
'get_platform',
'get_python_version',
'get_scheme_names',
'parse_config_h',
]
_INSTALL_SCHEMES = {
'posix_prefix': {
'stdlib': '{installed_base}/lib/{implementation_lower}{py_version_short}',
'platstdlib': '{platbase}/lib/{implementation_lower}{py_version_short}',
'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages',
'platlib': '{platbase}/lib/{implementation_lower}{py_version_short}/site-packages',
'include':
'{installed_base}/include/{implementation_lower}{py_version_short}{abiflags}',
'platinclude':
'{installed_platbase}/include/{implementation_lower}{py_version_short}{abiflags}',
'scripts': '{base}/bin',
'data': '{base}',
},
'posix_home': {
'stdlib': '{installed_base}/lib/{implementation_lower}',
'platstdlib': '{base}/lib/{implementation_lower}',
'purelib': '{base}/lib/{implementation_lower}',
'platlib': '{base}/lib/{implementation_lower}',
'include': '{installed_base}/include/{implementation_lower}',
'platinclude': '{installed_base}/include/{implementation_lower}',
'scripts': '{base}/bin',
'data': '{base}',
},
'nt': {
'stdlib': '{installed_base}/Lib',
'platstdlib': '{base}/Lib',
'purelib': '{base}/Lib/site-packages',
'platlib': '{base}/Lib/site-packages',
'include': '{installed_base}/Include',
'platinclude': '{installed_base}/Include',
'scripts': '{base}/Scripts',
'data': '{base}',
},
'nt_user': {
'stdlib': '{userbase}/{implementation}{py_version_nodot}',
'platstdlib': '{userbase}/{implementation}{py_version_nodot}',
'purelib': '{userbase}/{implementation}{py_version_nodot}/site-packages',
'platlib': '{userbase}/{implementation}{py_version_nodot}/site-packages',
'include': '{userbase}/{implementation}{py_version_nodot}/Include',
'scripts': '{userbase}/Scripts',
'data': '{userbase}',
},
'posix_user': {
'stdlib': '{userbase}/lib/{implementation_lower}{py_version_short}',
'platstdlib': '{userbase}/lib/{implementation_lower}{py_version_short}',
'purelib': '{userbase}/lib/{implementation_lower}{py_version_short}/site-packages',
'platlib': '{userbase}/lib/{implementation_lower}{py_version_short}/site-packages',
'include': '{userbase}/include/{implementation_lower}{py_version_short}',
'scripts': '{userbase}/bin',
'data': '{userbase}',
},
'osx_framework_user': {
'stdlib': '{userbase}/lib/{implementation_lower}',
'platstdlib': '{userbase}/lib/{implementation_lower}',
'purelib': '{userbase}/lib/{implementation_lower}/site-packages',
'platlib': '{userbase}/lib/{implementation_lower}/site-packages',
'include': '{userbase}/include',
'scripts': '{userbase}/bin',
'data': '{userbase}',
},
}
_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
'scripts', 'data')
# FIXME don't rely on sys.version here, its format is an implementation detail
# of CPython, use sys.version_info or sys.hexversion
_PY_VERSION = sys.version.split()[0]
_PY_VERSION_SHORT = sys.version[:3]
_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2]
_PREFIX = os.path.normpath(sys.prefix)
_BASE_PREFIX = os.path.normpath(sys.base_prefix)
_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
_BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix)
_CONFIG_VARS = None
_USER_BASE = None
def _get_implementation():
if sys.implementation.name == 'ironpython':
return 'IronPython'
return 'Python'
def _safe_realpath(path):
try:
return realpath(path)
except OSError:
return path
if sys.executable:
_PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable))
else:
# sys.executable can be empty if argv[0] has been changed and Python is
# unable to retrieve the real program name
_PROJECT_BASE = _safe_realpath(os.getcwd())
if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower():
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir))
# PC/VS7.1
if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower():
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
# PC/AMD64
if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
# set for cross builds
if "_PYTHON_PROJECT_BASE" in os.environ:
_PROJECT_BASE = _safe_realpath(os.environ["_PYTHON_PROJECT_BASE"])
def _is_python_source_dir(d):
for fn in ("Setup.dist", "Setup.local"):
if os.path.isfile(os.path.join(d, "Modules", fn)):
return True
return False
_sys_home = getattr(sys, '_home', None)
if _sys_home and os.name == 'nt' and \
_sys_home.lower().endswith(('pcbuild', 'pcbuild\\amd64')):
_sys_home = os.path.dirname(_sys_home)
if _sys_home.endswith('pcbuild'): # must be amd64
_sys_home = os.path.dirname(_sys_home)
def is_python_build(check_home=False):
if check_home and _sys_home:
return _is_python_source_dir(_sys_home)
return _is_python_source_dir(_PROJECT_BASE)
_PYTHON_BUILD = is_python_build(True)
if _PYTHON_BUILD:
for scheme in ('posix_prefix', 'posix_home'):
_INSTALL_SCHEMES[scheme]['include'] = '{srcdir}/Include'
_INSTALL_SCHEMES[scheme]['platinclude'] = '{projectbase}/.'
def _subst_vars(s, local_vars):
try:
return s.format(**local_vars)
except KeyError:
try:
return s.format(**os.environ)
except KeyError as var:
raise AttributeError('{%s}' % var)
def _extend_dict(target_dict, other_dict):
target_keys = target_dict.keys()
for key, value in other_dict.items():
if key in target_keys:
continue
target_dict[key] = value
def _expand_vars(scheme, vars):
res = {}
if vars is None:
vars = {}
_extend_dict(vars, get_config_vars())
for key, value in _INSTALL_SCHEMES[scheme].items():
if os.name in ('posix', 'nt'):
value = os.path.expanduser(value)
res[key] = os.path.normpath(_subst_vars(value, vars))
return res
def _get_default_scheme():
if os.name == 'posix':
# the default scheme for posix is posix_prefix
return 'posix_prefix'
return os.name
def _getuserbase():
env_base = os.environ.get("PYTHONUSERBASE", None)
def joinuser(*args):
return os.path.expanduser(os.path.join(*args))
if os.name == "nt":
base = os.environ.get("APPDATA") or "~"
if env_base:
return env_base
else:
return joinuser(base, "Python")
if sys.platform == "darwin":
framework = get_config_var("PYTHONFRAMEWORK")
if framework:
if env_base:
return env_base
else:
return joinuser("~", "Library", framework, "%d.%d" %
sys.version_info[:2])
if env_base:
return env_base
else:
return joinuser("~", ".local")
def _parse_makefile(filename, vars=None):
"""Parse a Makefile-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
# Regexes needed for parsing Makefile (and similar syntaxes,
# like old-style Setup files).
import re
_variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
if vars is None:
vars = {}
done = {}
notdone = {}
with open(filename, errors="surrogateescape") as f:
lines = f.readlines()
for line in lines:
if line.startswith('#') or line.strip() == '':
continue
m = _variable_rx.match(line)
if m:
n, v = m.group(1, 2)
v = v.strip()
# `$$' is a literal `$' in make
tmpv = v.replace('$$', '')
if "$" in tmpv:
notdone[n] = v
else:
try:
v = int(v)
except ValueError:
# insert literal `$'
done[n] = v.replace('$$', '$')
else:
done[n] = v
# do variable interpolation here
variables = list(notdone.keys())
# Variables with a 'PY_' prefix in the makefile. These need to
# be made available without that prefix through sysconfig.
# Special care is needed to ensure that variable expansion works, even
# if the expansion uses the name without a prefix.
renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
while len(variables) > 0:
for name in tuple(variables):
value = notdone[name]
m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
if m is not None:
n = m.group(1)
found = True
if n in done:
item = str(done[n])
elif n in notdone:
# get it on a subsequent round
found = False
elif n in os.environ:
# do it like make: fall back to environment
item = os.environ[n]
elif n in renamed_variables:
if (name.startswith('PY_') and
name[3:] in renamed_variables):
item = ""
elif 'PY_' + n in notdone:
found = False
else:
item = str(done['PY_' + n])
else:
done[n] = item = ""
if found:
after = value[m.end():]
value = value[:m.start()] + item + after
if "$" in after:
notdone[name] = value
else:
try:
value = int(value)
except ValueError:
done[name] = value.strip()
else:
done[name] = value
variables.remove(name)
if name.startswith('PY_') \
and name[3:] in renamed_variables:
name = name[3:]
if name not in done:
done[name] = value
else:
# bogus variable reference (e.g. "prefix=$/opt/python");
# just drop it since we can't deal
done[name] = value
variables.remove(name)
# strip spurious spaces
for k, v in done.items():
if isinstance(v, str):
done[k] = v.strip()
# save the results in the global dictionary
vars.update(done)
return vars
def get_makefile_filename():
"""Return the path of the Makefile."""
if _PYTHON_BUILD:
return os.path.join(_sys_home or _PROJECT_BASE, "Makefile")
if hasattr(sys, 'abiflags'):
config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags)
else:
config_dir_name = 'config'
return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')
def _generate_posix_vars():
"""Generate the Python module containing build-time variables."""
import pprint
vars = {}
# load the installed Makefile:
makefile = get_makefile_filename()
try:
_parse_makefile(makefile, vars)
except OSError as e:
msg = "invalid Python installation: unable to open %s" % makefile
if hasattr(e, "strerror"):
msg = msg + " (%s)" % e.strerror
raise OSError(msg)
# load the installed pyconfig.h:
config_h = get_config_h_filename()
try:
with open(config_h) as f:
parse_config_h(f, vars)
except OSError as e:
msg = "invalid Python installation: unable to open %s" % config_h
if hasattr(e, "strerror"):
msg = msg + " (%s)" % e.strerror
raise OSError(msg)
# On AIX, there are wrong paths to the linker scripts in the Makefile
# -- these paths are relative to the Python source, but when installed
# the scripts are in another directory.
if _PYTHON_BUILD:
vars['BLDSHARED'] = vars['LDSHARED']
# There's a chicken-and-egg situation on OS X with regards to the
# _sysconfigdata module after the changes introduced by #15298:
# get_config_vars() is called by get_platform() as part of the
# `make pybuilddir.txt` target -- which is a precursor to the
# _sysconfigdata.py module being constructed. Unfortunately,
# get_config_vars() eventually calls _init_posix(), which attempts
# to import _sysconfigdata, which we won't have built yet. In order
# for _init_posix() to work, if we're on Darwin, just mock up the
# _sysconfigdata module manually and populate it with the build vars.
# This is more than sufficient for ensuring the subsequent call to
# get_platform() succeeds.
name = '_sysconfigdata'
if 'darwin' in sys.platform:
import types
module = types.ModuleType(name)
module.build_time_vars = vars
sys.modules[name] = module
pybuilddir = 'build/lib.%s-%s' % (get_platform(), sys.version[:3])
if hasattr(sys, "gettotalrefcount"):
pybuilddir += '-pydebug'
os.makedirs(pybuilddir, exist_ok=True)
destfile = os.path.join(pybuilddir, name + '.py')
with open(destfile, 'w', encoding='utf8') as f:
f.write('# system configuration generated and used by'
' the sysconfig module\n')
f.write('build_time_vars = ')
pprint.pprint(vars, stream=f)
# Create file used for sys.path fixup -- see Modules/getpath.c
with open('pybuilddir.txt', 'w', encoding='ascii') as f:
f.write(pybuilddir)
def _init_posix(vars):
"""Initialize the module as appropriate for POSIX systems."""
# _sysconfigdata is generated at build time, see _generate_posix_vars()
from _sysconfigdata import build_time_vars
vars.update(build_time_vars)
def _init_non_posix(vars):
"""Initialize the module as appropriate for NT"""
# set basic install directories
vars['LIBDEST'] = get_path('stdlib')
vars['BINLIBDEST'] = get_path('platstdlib')
vars['INCLUDEPY'] = get_path('include')
vars['EXT_SUFFIX'] = '.pyd'
vars['EXE'] = '.exe'
vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))
#
# public APIs
#
def parse_config_h(fp, vars=None):
"""Parse a config.h-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
if vars is None:
vars = {}
import re
define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
while True:
line = fp.readline()
if not line:
break
m = define_rx.match(line)
if m:
n, v = m.group(1, 2)
try:
v = int(v)
except ValueError:
pass
vars[n] = v
else:
m = undef_rx.match(line)
if m:
vars[m.group(1)] = 0
return vars
def get_config_h_filename():
"""Return the path of pyconfig.h."""
if _PYTHON_BUILD:
if os.name == "nt":
inc_dir = os.path.join(_sys_home or _PROJECT_BASE, "PC")
else:
inc_dir = _sys_home or _PROJECT_BASE
else:
inc_dir = get_path('platinclude')
return os.path.join(inc_dir, 'pyconfig.h')
def get_scheme_names():
"""Return a tuple containing the schemes names."""
return tuple(sorted(_INSTALL_SCHEMES))
def get_path_names():
"""Return a tuple containing the paths names."""
return _SCHEME_KEYS
def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
"""Return a mapping containing an install scheme.
``scheme`` is the install scheme name. If not provided, it will
return the default scheme for the current platform.
"""
if expand:
return _expand_vars(scheme, vars)
else:
return _INSTALL_SCHEMES[scheme]
def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
"""Return a path corresponding to the scheme.
``scheme`` is the install scheme name.
"""
return get_paths(scheme, vars, expand)[name]
def get_config_vars(*args):
"""With no arguments, return a dictionary of all configuration
variables relevant for the current platform.
On Unix, this means every variable defined in Python's installed Makefile;
On Windows it's a much smaller set.
With arguments, return a list of values that result from looking up
each argument in the configuration variable dictionary.
"""
global _CONFIG_VARS
if _CONFIG_VARS is None:
_CONFIG_VARS = {}
# Normalized versions of prefix and exec_prefix are handy to have;
# in fact, these are the standard versions used most places in the
# Distutils.
_CONFIG_VARS['prefix'] = _PREFIX
_CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
_CONFIG_VARS['py_version'] = _PY_VERSION
_CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
_CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]
_CONFIG_VARS['installed_base'] = _BASE_PREFIX
_CONFIG_VARS['base'] = _PREFIX
_CONFIG_VARS['installed_platbase'] = _BASE_EXEC_PREFIX
_CONFIG_VARS['platbase'] = _EXEC_PREFIX
_CONFIG_VARS['projectbase'] = _PROJECT_BASE
try:
_CONFIG_VARS['abiflags'] = sys.abiflags
except AttributeError:
# sys.abiflags may not be defined on all platforms.
_CONFIG_VARS['abiflags'] = ''
_CONFIG_VARS['implementation'] = _get_implementation()
_CONFIG_VARS['implementation_lower'] = _get_implementation().lower()
if os.name == 'nt':
_init_non_posix(_CONFIG_VARS)
if os.name == 'posix':
_init_posix(_CONFIG_VARS)
# For backward compatibility, see issue19555
SO = _CONFIG_VARS.get('EXT_SUFFIX')
if SO is not None:
_CONFIG_VARS['SO'] = SO
# Setting 'userbase' is done below the call to the
# init function to enable using 'get_config_var' in
# the init-function.
_CONFIG_VARS['userbase'] = _getuserbase()
# Always convert srcdir to an absolute path
srcdir = _CONFIG_VARS.get('srcdir', _PROJECT_BASE)
if os.name == 'posix':
if _PYTHON_BUILD:
# If srcdir is a relative path (typically '.' or '..')
# then it should be interpreted relative to the directory
# containing Makefile.
base = os.path.dirname(get_makefile_filename())
srcdir = os.path.join(base, srcdir)
else:
# srcdir is not meaningful since the installation is
# spread about the filesystem. We choose the
# directory containing the Makefile since we know it
# exists.
srcdir = os.path.dirname(get_makefile_filename())
_CONFIG_VARS['srcdir'] = _safe_realpath(srcdir)
# OS X platforms require special customization to handle
# multi-architecture, multi-os-version installers
if sys.platform == 'darwin':
import _osx_support
_osx_support.customize_config_vars(_CONFIG_VARS)
if args:
vals = []
for name in args:
vals.append(_CONFIG_VARS.get(name))
return vals
else:
return _CONFIG_VARS
def get_config_var(name):
"""Return the value of a single variable using the dictionary returned by
'get_config_vars()'.
Equivalent to get_config_vars().get(name)
"""
if name == 'SO':
import warnings
warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2)
return get_config_vars().get(name)
def get_platform():
"""Return a string that identifies the current platform.
This is used mainly to distinguish platform-specific build directories and
platform-specific built distributions. Typically includes the OS name
and version and the architecture (as supplied by 'os.uname()'),
although the exact information included depends on the OS; eg. for IRIX
the architecture isn't particularly important (IRIX only runs on SGI
hardware), but for Linux the kernel version isn't particularly
important.
Examples of returned values:
linux-i586
linux-alpha (?)
solaris-2.6-sun4u
irix-5.3
irix64-6.2
Windows will return one of:
win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
win-ia64 (64bit Windows on Itanium)
win32 (all others - specifically, sys.platform is returned)
For other non-POSIX platforms, currently just returns 'sys.platform'.
"""
if os.name == 'nt':
# sniff sys.version for architecture.
prefix = " bit ("
i = sys.version.find(prefix)
if i == -1:
return sys.platform
j = sys.version.find(")", i)
look = sys.version[i+len(prefix):j].lower()
if look == 'amd64':
return 'win-amd64'
if look == 'itanium':
return 'win-ia64'
return sys.platform
if os.name != "posix" or not hasattr(os, 'uname'):
# XXX what about the architecture? NT is Intel or Alpha
return sys.platform
# Set for cross builds explicitly
if "_PYTHON_HOST_PLATFORM" in os.environ:
return os.environ["_PYTHON_HOST_PLATFORM"]
# Try to distinguish various flavours of Unix
osname, host, release, version, machine = os.uname()
# Convert the OS name to lowercase, remove '/' characters
# (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
osname = osname.lower().replace('/', '')
machine = machine.replace(' ', '_')
machine = machine.replace('/', '-')
if osname[:5] == "linux":
# At least on Linux/Intel, 'machine' is the processor --
# i386, etc.
# XXX what about Alpha, SPARC, etc?
return "%s-%s" % (osname, machine)
elif osname[:5] == "sunos":
if release[0] >= "5": # SunOS 5 == Solaris 2
osname = "solaris"
release = "%d.%s" % (int(release[0]) - 3, release[2:])
# We can't use "platform.architecture()[0]" because a
# bootstrap problem. We use a dict to get an error
# if some suspicious happens.
bitness = {2147483647:"32bit", 9223372036854775807:"64bit"}
machine += ".%s" % bitness[sys.maxsize]
# fall through to standard osname-release-machine representation
elif osname[:4] == "irix": # could be "irix64"!
return "%s-%s" % (osname, release)
elif osname[:3] == "aix":
return "%s-%s.%s" % (osname, version, release)
elif osname[:6] == "cygwin":
osname = "cygwin"
import re
rel_re = re.compile(r'[\d.]+')
m = rel_re.match(release)
if m:
release = m.group()
elif osname[:6] == "darwin":
import _osx_support
osname, release, machine = _osx_support.get_platform_osx(
get_config_vars(),
osname, release, machine)
return "%s-%s-%s" % (osname, release, machine)
def get_python_version():
return _PY_VERSION_SHORT
def _print_dict(title, data):
for index, (key, value) in enumerate(sorted(data.items())):
if index == 0:
print('%s: ' % (title))
print('\t%s = "%s"' % (key, value))
def _main():
"""Display all information sysconfig detains."""
if '--generate-posix-vars' in sys.argv:
_generate_posix_vars()
return
print('Platform: "%s"' % get_platform())
print('Python version: "%s"' % get_python_version())
print('Current installation scheme: "%s"' % _get_default_scheme())
print()
_print_dict('Paths', get_paths())
print()
_print_dict('Variables', get_config_vars())
if __name__ == '__main__':
_main()
| {
"repo_name": "IronLanguages/ironpython3",
"path": "Src/StdLib/Lib/sysconfig.py",
"copies": "1",
"size": "25158",
"license": "apache-2.0",
"hash": 3062484578502443000,
"line_mean": 34.186013986,
"line_max": 94,
"alpha_frac": 0.575999682,
"autogenerated": false,
"ratio": 3.8228232791369092,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9893776128140791,
"avg_score": 0.001009366599223692,
"num_lines": 715
} |
"""Access to realtime market information."""
from dataclasses import dataclass, field
from datetime import datetime
from typing import ClassVar, List, Optional, Union
from eventkit import Event, Op
from ib_insync.contract import Contract
from ib_insync.objects import (
DOMLevel, Dividends, FundamentalRatios, MktDepthData,
OptionComputation, TickByTickAllLast, TickByTickBidAsk, TickByTickMidPoint,
TickData)
from ib_insync.util import dataclassRepr, isNan
__all__ = ['Ticker']
nan = float('nan')
@dataclass
class Ticker:
"""
Current market data such as bid, ask, last price, etc. for a contract.
Streaming level-1 ticks of type :class:`.TickData` are stored in
the ``ticks`` list.
Streaming level-2 ticks of type :class:`.MktDepthData` are stored in the
``domTicks`` list. The order book (DOM) is available as lists of
:class:`.DOMLevel` in ``domBids`` and ``domAsks``.
Streaming tick-by-tick ticks are stored in ``tickByTicks``.
For options the :class:`.OptionComputation` values for the bid, ask, resp.
last price are stored in the ``bidGreeks``, ``askGreeks`` resp.
``lastGreeks`` attributes. There is also ``modelGreeks`` that conveys
the greeks as calculated by Interactive Brokers' option model.
Events:
* ``updateEvent`` (ticker: :class:`.Ticker`)
"""
events: ClassVar = ('updateEvent',)
contract: Optional[Contract] = None
time: Optional[datetime] = None
marketDataType: int = 1
bid: float = nan
bidSize: float = nan
ask: float = nan
askSize: float = nan
last: float = nan
lastSize: float = nan
prevBid: float = nan
prevBidSize: float = nan
prevAsk: float = nan
prevAskSize: float = nan
prevLast: float = nan
prevLastSize: float = nan
volume: float = nan
open: float = nan
high: float = nan
low: float = nan
close: float = nan
vwap: float = nan
low13week: float = nan
high13week: float = nan
low26week: float = nan
high26week: float = nan
low52week: float = nan
high52week: float = nan
bidYield: float = nan
askYield: float = nan
lastYield: float = nan
markPrice: float = nan
halted: float = nan
rtHistVolatility: float = nan
rtVolume: float = nan
rtTradeVolume: float = nan
rtTime: Optional[datetime] = None
avVolume: float = nan
tradeCount: float = nan
tradeRate: float = nan
volumeRate: float = nan
shortableShares: float = nan
indexFuturePremium: float = nan
futuresOpenInterest: float = nan
putOpenInterest: float = nan
callOpenInterest: float = nan
putVolume: float = nan
callVolume: float = nan
avOptionVolume: float = nan
histVolatility: float = nan
impliedVolatility: float = nan
dividends: Optional[Dividends] = None
fundamentalRatios: Optional[FundamentalRatios] = None
ticks: List[TickData] = field(default_factory=list)
tickByTicks: List[Union[
TickByTickAllLast, TickByTickBidAsk, TickByTickMidPoint]] = \
field(default_factory=list)
domBids: List[DOMLevel] = field(default_factory=list)
domAsks: List[DOMLevel] = field(default_factory=list)
domTicks: List[MktDepthData] = field(default_factory=list)
bidGreeks: Optional[OptionComputation] = None
askGreeks: Optional[OptionComputation] = None
lastGreeks: Optional[OptionComputation] = None
modelGreeks: Optional[OptionComputation] = None
auctionVolume: float = nan
auctionPrice: float = nan
auctionImbalance: float = nan
def __post_init__(self):
self.updateEvent = TickerUpdateEvent('updateEvent')
def __eq__(self, other):
return self is other
def __hash__(self):
return id(self)
__repr__ = dataclassRepr
__str__ = dataclassRepr
def hasBidAsk(self) -> bool:
"""See if this ticker has a valid bid and ask."""
return (
self.bid != -1 and not isNan(self.bid) and self.bidSize > 0
and self.ask != -1 and not isNan(self.ask) and self.askSize > 0)
def midpoint(self) -> float:
"""
Return average of bid and ask, or NaN if no valid bid and ask
are available.
"""
return (self.bid + self.ask) * 0.5 if self.hasBidAsk() else nan
def marketPrice(self) -> float:
"""
Return the first available one of
* last price if within current bid/ask;
* average of bid and ask (midpoint);
* close price.
"""
price = self.last if (
self.hasBidAsk() and self.bid <= self.last <= self.ask) else \
self.midpoint()
if isNan(price):
price = self.close
return price
class TickerUpdateEvent(Event):
__slots__ = ()
def trades(self) -> "Tickfilter":
"""Emit trade ticks."""
return Tickfilter((4, 5, 48, 68, 71), self)
def bids(self) -> "Tickfilter":
"""Emit bid ticks."""
return Tickfilter((0, 1, 66, 69), self)
def asks(self) -> "Tickfilter":
"""Emit ask ticks."""
return Tickfilter((2, 3, 67, 70), self)
def bidasks(self) -> "Tickfilter":
"""Emit bid and ask ticks."""
return Tickfilter((0, 1, 66, 69, 2, 3, 67, 70), self)
def midpoints(self) -> "Tickfilter":
"""Emit midpoint ticks."""
return Midpoints((), self)
class Tickfilter(Op):
"""Tick filtering event operators that ``emit(time, price, size)``."""
__slots__ = ('_tickTypes',)
def __init__(self, tickTypes, source=None):
Op.__init__(self, source)
self._tickTypes = set(tickTypes)
def on_source(self, ticker):
for t in ticker.ticks:
if t.tickType in self._tickTypes:
self.emit(t.time, t.price, t.size)
def timebars(self, timer: Event) -> "TimeBars":
"""
Aggregate ticks into time bars, where the timing of new bars
is derived from a timer event.
Emits a completed :class:`Bar`.
This event stores a :class:`BarList` of all created bars in the
``bars`` property.
Args:
timer: Event for timing when a new bar starts.
"""
return TimeBars(timer, self)
def tickbars(self, count: int) -> "TickBars":
"""
Aggregate ticks into bars that have the same number of ticks.
Emits a completed :class:`Bar`.
This event stores a :class:`BarList` of all created bars in the
``bars`` property.
Args:
count: Number of ticks to use to form one bar.
"""
return TickBars(count, self)
class Midpoints(Tickfilter):
__slots__ = ()
def on_source(self, ticker):
if ticker.ticks:
self.emit(ticker.time, ticker.midpoint(), 0)
@dataclass
class Bar:
time: Optional[datetime]
open: float = nan
high: float = nan
low: float = nan
close: float = nan
volume: int = 0
count: int = 0
class BarList(List[Bar]):
def __init__(self, *args):
super().__init__(*args)
self.updateEvent = Event('updateEvent')
def __eq__(self, other):
return self is other
def __hash__(self):
return id(self)
class TimeBars(Op):
__slots__ = ('_timer', 'bars',)
__doc__ = Tickfilter.timebars.__doc__
bars: BarList
def __init__(self, timer, source=None):
Op.__init__(self, source)
self._timer = timer
self._timer.connect(self._on_timer, None, self._on_timer_done)
self.bars = BarList()
def on_source(self, time, price, size):
if not self.bars:
return
bar = self.bars[-1]
if isNan(bar.open):
bar.open = bar.high = bar.low = price
bar.high = max(bar.high, price)
bar.low = min(bar.low, price)
bar.close = price
bar.volume += size
bar.count += 1
self.bars.updateEvent.emit(self.bars, False)
def _on_timer(self, time):
if self.bars:
bar = self.bars[-1]
if isNan(bar.close) and len(self.bars) > 1:
bar.open = bar.high = bar.low = bar.close = \
self.bars[-2].close
self.bars.updateEvent.emit(self.bars, True)
self.emit(bar)
self.bars.append(Bar(time))
def _on_timer_done(self, timer):
self._timer = None
self.set_done()
class TickBars(Op):
__slots__ = ('_count', 'bars')
__doc__ = Tickfilter.tickbars.__doc__
bars: BarList
def __init__(self, count, source=None):
Op.__init__(self, source)
self._count = count
self.bars = BarList()
def on_source(self, time, price, size):
if not self.bars or self.bars[-1].count == self._count:
bar = Bar(time, price, price, price, price, size, 1)
self.bars.append(bar)
else:
bar = self.bars[-1]
bar.high = max(bar.high, price)
bar.low = min(bar.low, price)
bar.close = price
bar.volume += size
bar.count += 1
if bar.count == self._count:
self.bars.updateEvent.emit(self.bars, True)
self.emit(self.bars)
| {
"repo_name": "erdewit/ib_insync",
"path": "ib_insync/ticker.py",
"copies": "1",
"size": "9222",
"license": "bsd-2-clause",
"hash": -2300298133865389800,
"line_mean": 28.1835443038,
"line_max": 79,
"alpha_frac": 0.5975927131,
"autogenerated": false,
"ratio": 3.617889368379757,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4715482081479757,
"avg_score": null,
"num_lines": null
} |
"""Access to the base Slack Web API.
Attributes:
ALL (:py:class:`object`): Marker for cases where all child methods
should be deleted by :py:func:`api_subclass_factory`.
"""
from copy import deepcopy
import logging
import aiohttp
from .core import Service, UrlParamMixin
from .utils import FriendlyError, raise_for_status
logger = logging.getLogger(__name__)
ALL = object()
class SlackApiError(FriendlyError):
"""Wrapper exception for error messages in the response JSON."""
EXPECTED_ERRORS = {
'account_inactive': 'Authentication token is for a deleted user or '
'team.',
'invalid_auth': 'Invalid authentication token.',
'migration_in_progress': 'Team is being migrated between servers.',
'not_authed': "No authentication token provided.",
}
"""Friendly messages for expected Slack API errors."""
class SlackApi(UrlParamMixin, Service):
"""Class to handle interaction with Slack's API.
Attributes:
API_METHODS (:py:class:`dict`): The API methods defined by Slack.
"""
API_METHODS = {
'api': {'test': 'Checks API calling code.'},
'auth': {'test': 'Checks authentication & identity.'},
'channels': {
'archive': 'Archives a channel.',
'create': 'Creates a channel.',
'history': 'Fetches history of messages and events from a channel.',
'info': 'Gets information about a channel.',
'invite': 'Invites a user to a channel.',
'join': 'Joins a channel, creating it if needed.',
'kick': 'Removes a user from a channel.',
'leave': 'Leaves a channel.',
'list': 'Lists all channels in a Slack team.',
'mark': 'Sets the read cursor in a channel.',
'rename': 'Renames a channel.',
'setPurpose': 'Sets the purpose for a channel.',
'setTopic': 'Sets the topic for a channel.',
'unarchive': 'Unarchives a channel.',
},
'chat': {
'delete': 'Deletes a message.',
'postMessage': 'Sends a message to a channel.',
'update': 'Updates a message.'
},
'emoji': {'list': ' Lists custom emoji for a team.'},
'files': {
'delete': 'Deletes a file.',
'info': 'Gets information about a team file.',
'list': 'Lists & filters team files.',
'upload': 'Uploads or creates a file.'
},
'groups': {
'archive': 'Archives a private channel.',
'close': 'Closes a private channel.',
'create': 'Creates a private private channel.',
'createChild': 'Clones and archives a private channel.',
'history': 'Fetches history of messages and events from a private '
'channel.',
'info': 'Gets information about a private channel.',
'invite': 'Invites a user to a private channel.',
'kick': 'Removes a user from a private channel.',
'leave': 'Leaves a private channel.',
'list': 'Lists private channels that the calling user has access '
'to.',
'mark': 'Sets the read cursor in a private channel.',
'open': 'Opens a private channel.',
'rename': 'Renames a private channel.',
'setPurpose': 'Sets the purpose for a private channel.',
'setTopic': 'Sets the topic for a private channel.',
'unarchive': 'Unarchives a private channel.',
},
'im': {
'close': 'Close a direct message channel.',
'history': 'Fetches history of messages and events from direct '
'message channel.',
'list': 'Lists direct message channels for the calling user.',
'mark': 'Sets the read cursor in a direct message channel.',
'open': 'Opens a direct message channel.',
},
'mpim': {
'close': 'Closes a multiparty direct message channel.',
'history': 'Fetches history of messages and events from a '
'multiparty direct message.',
'list': 'Lists multiparty direct message channels for the calling '
'user.',
'mark': 'Sets the read cursor in a multiparty direct message '
'channel.',
'open': 'This method opens a multiparty direct message.',
},
'oauth': {
'access': 'Exchanges a temporary OAuth code for an API token.'
},
'pins': {
'add': 'Pins an item to a channel.',
'list': 'Lists items pinned to a channel.',
'remove': 'Un-pins an item from a channel.',
},
'reactions': {
'add': 'Adds a reaction to an item.',
'get': 'Gets reactions for an item.',
'list': 'Lists reactions made by a user.',
'remove': 'Removes a reaction from an item.',
},
'rtm': {'start': 'Starts a Real Time Messaging session.'},
'search': {
'all': 'Searches for messages and files matching a query.',
'files': 'Searches for files matching a query.',
'messages': 'Searches for messages matching a query.',
},
'stars': {
'add': 'Adds a star to an item.',
'list': 'Lists stars for a user.',
'remove': 'Removes a star from an item.',
},
'team': {
'accessLogs': 'Gets the access logs for the current team.',
'info': 'Gets information about the current team.',
'integrationLogs': 'Gets the integration logs for the current '
'team.',
},
'usergroups': {
'create': 'Create a user group.',
'disable': 'Disable an existing user group.',
'enable': 'Enable a user group.',
'list': 'List all user groups for a team.',
'update': 'Update an existing user group',
'users': {
'list': 'List all users in a user group',
'update': ' Update the list of users for a user group.',
},
},
'users': {
'getPresence': 'Gets user presence information.',
'info': 'Gets information about a user.',
'list': 'Lists all users in a Slack team.',
'setActive': 'Marks a user as active.',
'setPresence': 'Manually sets user presence.',
},
}
AUTH_PARAM = 'token'
REQUIRED = {'api_token'}
ROOT = 'https://slack.com/api/'
TOKEN_ENV_VAR = 'SLACK_API_TOKEN'
async def execute_method(self, method, **params):
"""Execute a specified Slack Web API method.
Arguments:
method (:py:class:`str`): The name of the method.
**params (:py:class:`dict`): Any additional parameters
required.
Returns:
:py:class:`dict`: The JSON data from the response.
Raises:
:py:class:`aiohttp.web_exceptions.HTTPException`: If the HTTP
request returns a code other than 200 (OK).
SlackApiError: If the Slack API is reached but the response
contains an error message.
"""
url = self.url_builder(method, url_params=params)
logger.info('Executing method %r', method)
response = await aiohttp.get(url)
logger.info('Status: %r', response.status)
if response.status == 200:
json = await response.json()
logger.debug('...with JSON %r', json)
if json.get('ok'):
return json
raise SlackApiError(json['error'])
else:
raise_for_status(response)
@classmethod
def method_exists(cls, method):
"""Whether a given method exists in the known API.
Arguments:
method (:py:class:`str`): The name of the method.
Returns:
:py:class:`bool`: Whether the method is in the known API.
"""
methods = cls.API_METHODS
for key in method.split('.'):
methods = methods.get(key)
if methods is None:
break
if isinstance(methods, str):
logger.debug('%r: %r', method, methods)
return True
return False
def api_subclass_factory(name, docstring, remove_methods, base=SlackApi):
"""Create an API subclass with fewer methods than its base class.
Arguments:
name (:py:class:`str`): The name of the new class.
docstring (:py:class:`str`): The docstring for the new class.
remove_methods (:py:class:`dict`): The methods to remove from
the base class's :py:attr:`API_METHODS` for the subclass. The
key is the name of the root method (e.g. ``'auth'`` for
``'auth.test'``, the value is either a tuple of child method
names (e.g. ``('test',)``) or, if all children should be
removed, the special value :py:const:`ALL`.
base (:py:class:`type`, optional): The base class (defaults to
:py:class:`SlackApi`).
Returns:
:py:class:`type`: The new subclass.
Raises:
:py:class:`KeyError`: If the method wasn't in the superclass.
"""
methods = deepcopy(base.API_METHODS)
for parent, to_remove in remove_methods.items():
if to_remove is ALL:
del methods[parent]
else:
for method in to_remove:
del methods[parent][method]
return type(name, (base,), dict(API_METHODS=methods, __doc__=docstring))
SlackBotApi = api_subclass_factory( # pylint: disable=invalid-name
'SlackBotApi',
'API accessible to Slack custom bots.',
remove_methods=dict(
channels=('archive', 'create', 'invite', 'join', 'kick', 'leave',
'rename', 'unarchive'),
files=('info', 'list'),
groups=('archive', 'create', 'createChild', 'invite', 'kick', 'leave',
'rename', 'unarchive'),
pins=('list',),
search=ALL,
stars=('list',),
team=('accessLogs', 'integrationLogs'),
usergroups=ALL,
),
)
SlackAppBotApi = api_subclass_factory( # pylint: disable=invalid-name
'SlackAppBotApi',
'API accessible to Slack app bots.',
remove_methods=dict(
channels=('history', 'mark', 'setPurpose', 'setTopic'),
emoji=ALL,
groups=('close', 'history', 'mark', 'open', 'setPurpose', 'setTopic'),
team=ALL,
),
base=SlackBotApi,
)
| {
"repo_name": "textbook/aslack",
"path": "aslack/slack_api.py",
"copies": "1",
"size": "10598",
"license": "isc",
"hash": -6237074297638674000,
"line_mean": 36.1859649123,
"line_max": 80,
"alpha_frac": 0.5536893754,
"autogenerated": false,
"ratio": 4.361316872427984,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00004331817197314273,
"num_lines": 285
} |
"""Access to the beeminder.com api"""
import datetime
import requests
BEE42_INI_API_URL = "https://www.beeminder.com/api/v1/"
class User:
"""A readonly Beeminder user that is loaded using the API.
It provides access to the read/write goals and thus datapoints"""
def __init__(self, username, token):
""" Construct a read-only Beeminder user object and fill it using
the api."""
self.username = username
self.token = token
self.user_url = BEE42_INI_API_URL + "users/" + self.username + ".json"
self.auth_param = {'auth_token': self.token}
self.api_get_user()
def api_get_user(self):
""" Use the Beeminder api to fill this user object"""
response = requests.get(self.user_url, params=self.auth_param)
user_dict = response.json()
self.timezone = user_dict["timezone"]
self.updated_at = datetime.datetime.fromtimestamp(user_dict["updated_at"])
self.goalslugs = user_dict["goals"]
self.deadbeat = user_dict["deadbeat"]
def load_goal(self, slug):
"""retrieve a single goal from the Beeminder API"""
goal_url = BEE42_INI_API_URL + "users/" + self.username + "/goals/" + slug + ".json"
response = requests.get(goal_url, params=self.auth_param)
goal_dict = response.json()
goal = Goal(slug, goal_dict)
return goal
def getDatapoints(self, slug):
"""retrieve all the datapoints for a goal for this user"""
datapoint_url = BEE42_INI_API_URL + "users/" + self.username + "/goals/" + slug + "/datapoints.json"
param={}
param["auth_token"] = self.token
response = requests.get(datapoint_url, params=param)
return response.json()
class Goal:
"""A limited version of a Beeminder goal"""
def __init__(self, slug, goal_dict):
"""Create a goal from the response to an api get goal"""
self.slug = slug
self.title = goal_dict["title"]
self.description = goal_dict["description"]
self.goalval = goal_dict["goalval"]
self.initday = datetime.datetime.fromtimestamp(goal_dict["initday"])
class Datapoint:
"""An actual activity, or weight or value associated with a goal"""
def __init__(self, user, slug):
"""Create a datapoint object"""
self.user = user
self.slug = slug
self.id = None
self.timestamp = None
self.daystamp = None
self.value = None
self.comment = None
self.updated_at = None
self.requestid = None
def post(self):
"""save a datapoint object to beeminder using API"""
datapoint_url = BEE42_INI_API_URL + "users/" + self.user.username + "/goals/" + self.slug + "/datapoints.json"
param={}
param["auth_token"] = self.user.token
param["id"] = self.id
param["timestamp"] = self.timestamp
param["daystamp"] = self.daystamp
param["value"] = self.value
param["comment"] = self.comment
param["updated_at"] = self.updated_at
param["requestid"] = self.requestid
response = requests.post(datapoint_url, params=param)
return response.status_code
| {
"repo_name": "davew/bee42",
"path": "bee42/bee42.py",
"copies": "1",
"size": "3222",
"license": "apache-2.0",
"hash": 8091386503146415000,
"line_mean": 34.8,
"line_max": 118,
"alpha_frac": 0.60955928,
"autogenerated": false,
"ratio": 3.7334878331402086,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48430471131402086,
"avg_score": null,
"num_lines": null
} |
# Access to the CIFAR-10 dataset.
#
# Some of the code is based on the cs231n data utils code
# [http://cs231n.github.io/]
# See https://www.cs.toronto.edu/~kriz/cifar.html for information on the
# data set and its format.
#
# * Xs (data) are arrays of 32x33x3 arrays of pixels. Logically, the values are
# between 0-255, so they would fit into a uint8. However, since we want to
# perform math on them without running into uint overflow and other problems,
# we load them as float64.
# * ys (labels) are arrays of integers in the range 0-9
import cPickle as pickle
import numpy as np
import os
def _load_CIFAR_batch(filename):
"""Load a single batch of CIFAR from the given file."""
with open(filename, 'rb') as f:
datadict = pickle.load(f)
X = datadict['data']
Y = datadict['labels']
X = X.reshape(10000, 3, 32, 32).transpose(0,2,3,1).astype('float64')
Y = np.array(Y)
return X, Y
def load_CIFAR10(rootdir):
"""Load the whole CIFAR-10 data set.
Given a path to the root directory containing CIFAR-10 samples in batches.
Returns a 4-tuple: (Xtraining, Ytraining, Xtest, Ytest).
"""
xs = []
ys = []
for b in range(1,6):
f = os.path.join(rootdir, 'data_batch_%d' % (b, ))
X, Y = _load_CIFAR_batch(f)
xs.append(X)
ys.append(Y)
Xtr = np.concatenate(xs)
Ytr = np.concatenate(ys)
Xte, Yte = _load_CIFAR_batch(os.path.join(rootdir, 'test_batch'))
return Xtr, Ytr, Xte, Yte
def show_CIFAR10_samples(X_train, y_train):
"""Show some sample images with classifications from the dataset."""
import matplotlib.pyplot as plt
classes = ['plane', 'car', 'bird', 'cat', 'deer',
'dog', 'frog', 'horse', 'ship', 'truck']
samples_per_class = 7
for y, cls in enumerate(classes):
idxs = np.flatnonzero(y_train == y)
idxs = np.random.choice(idxs, samples_per_class, replace=False)
for i, idx in enumerate(idxs):
plt_idx = i * len(classes) + y + 1
plt.subplot(samples_per_class, len(classes), plt_idx)
plt.imshow(X_train[idx].astype('uint8'))
plt.axis('off')
if i == 0:
plt.title(cls)
plt.show()
if __name__ == '__main__':
dir = 'datasets/cifar-10-batches-py'
X_train, y_train, X_test, y_test = load_CIFAR10(dir)
print 'Training data shape: ', X_train.shape, X_train.dtype
print 'Training labels shape: ', y_train.shape, y_train.dtype
print 'Test data shape: ', X_test.shape, X_test.dtype
print 'Test labels shape: ', y_test.shape, y_test.dtype
print 'Showing a few samples from the dataset.....'
show_CIFAR10_samples(X_train, y_train)
| {
"repo_name": "eliben/deep-learning-samples",
"path": "cs231n/cifar10.py",
"copies": "1",
"size": "2733",
"license": "unlicense",
"hash": -4175786139159961600,
"line_mean": 34.4935064935,
"line_max": 79,
"alpha_frac": 0.6183680937,
"autogenerated": false,
"ratio": 3.1341743119266057,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.924741506543433,
"avg_score": 0.0010254680384550513,
"num_lines": 77
} |
"""Access to the imageio library
The following main classes of imageio expose an interface for
advanced users. A brief overview:
* imageio.FormatManager - for keeping track of registered formats.
* imageio.Format - representation of a file format reader/writer
* imageio.Format.Reader - object used during the reading of a file.
* imageio.Format.Writer - object used during saving a file.
* imageio.Request - used to store the filename and other info.
"""
# standard imports
import logging
# third party imports
import imageio
import imageio_ffmpeg
import numpy as np
# toolbox imports
from ..base import image, video
# logging
LOG = logging.getLogger(__name__)
class ImageIO(image.ImageReader, image.ImageWriter):
def read(self, filename: str, **kwargs) -> np.ndarray:
return imageio.imread(filename)
def write(self, image: image.Imagelike, filename: str, **kwargs) -> None:
imageio.imwrite(filename, image.Image.as_array(image))
class VideoReader(video.FileReader):
"""A :py:class:`BaseFileReader` realized by the imageio library.
To use this plugin, the imageio-ffmpeg library should be installed.
The :py:class:`imageio.plugins.ffmpeg.FfmpegFormat.Reader`
provides the following properties:
* closed: bool
* request: imageio.core.request.Request
* format: <Format FFMPEG - Many video formats and cameras (via ffmpeg)>
methods:
* count_frames()
Count the number of frames.
* get_meta_data()
{'plugin': 'ffmpeg',
'nframes': inf,
'ffmpeg_version': '4.1.3 built with gcc 7.3.0 ...',
'codec': 'h264',
'pix_fmt': 'yuv420p',
'fps': 25.0,
'source_size': (460, 360),
'size': (460, 360),
'duration': 230.18}
* get_data()
* get_next_data()
* iter_data(): generator
* set_image_index()
Set the internal pointer such that the next call to
get_next_data() returns the image specified by the index
* get_length(): can be inf
* close()
Attributes
----------
_reader:
The underlying imageio reader object.
_index: int
The index of the current frame.
"""
def __init__(self, filename: str, **kwargs) -> None:
self._reader = None
super().__init__(filename=filename, **kwargs)
self._reader = imageio.get_reader(filename)
if self._reader is None:
LOG.error("Opening movie file (%s) failed", filename)
raise RuntimeError("Creating video reader object for file "
f"'{filename}' failed.")
self._meta = self._reader.get_meta_data()
self._index = -1
LOG.debug("Reader object: %r", self._reader)
LOG.info("Video file: %s", filename)
LOG.info("FFMPEG backend version %s (%s)",
imageio_ffmpeg.get_ffmpeg_version(),
imageio_ffmpeg.get_ffmpeg_exe())
def __del__(self) -> None:
"""Destructor for this :py:class:`WebcamBackend`.
The underlying :py:class:`.imageio.Reader` object will be
closed and deleted.
"""
if self._reader is not None:
LOG.info("Releasing video Reader (%r)", self._reader)
self._reader.close()
del self._reader
self._reader = None
#
# Iterator
#
def __next__(self) -> np.ndarray:
try:
frame = self._reader.get_next_data()
self._index += 1
except IndexError as ex:
raise StopIteration("IndexError ({ex})")
if frame is None:
raise RuntimeError("Reading a frame from "
"ImageIO Video Reader failed!")
return frame
def __len__(self) -> int:
# When reading from a video, the number of available frames is
# hard/expensive to calculate, which is why its set to inf by
# default, indicating “stream mode”. To get the number of
# frames before having read them all, you can use the
# reader.count_frames() method (the reader will then use
# imageio_ffmpeg.count_frames_and_secs() to get the exact
# number of frames, note that this operation can take a few
# seconds on large files). Alternatively, the number of frames
# can be estimated from the fps and duration in the meta data
# (though these values themselves are not always
# present/reliable).
return self._reader.count_frames()
def __getitem__(self, index: int) -> np.ndarray:
"""Get the frame for a given frame number.
Note: after getting a frame, the current frame number (obtained
by :py:meth:`frame`) will be frame+1, that is the number of
the next frame to read.
Arguments
---------
index: int
The number of the frame to be read. If no frame is given,
the next frame available will be read from the capture
object.
"""
if index is None:
return next(self)
self._index = index
image = self._reader.get_data(-index)
return image
@property
def frames_per_second(self) -> float:
"""Frames per second in this video.
"""
return self._meta['fps']
@property
def frame(self) -> int:
return self._index
class Webcam(video.Webcam):
"""A :py:class:`WebcamBackend` realized by an OpenCV
:py:class:`imageio.Reader` object.
Attributes
----------
_reader: imageio.plugins.ffmpeg.FfmpegFormat.Reader
A video reader object
"""
def __init__(self, device: int = None):
"""Constructor for this :py:class:`WebcamBackend`.
The underlying :py:class:`imageio.Reader` object will be
created.
"""
if device is None:
device = 0
super().__init__(device)
self._reader = imageio.get_reader(f'<video{device}>')
if not self._reader:
LOG.error("Acqiring Webcam (%d) failed", device)
raise RuntimeError("Creating video reader object for camera "
f"'{device}' failed.")
LOG.debug("Reader object: %r", self._reader)
LOG.info("Camera device: %d", device)
def __del__(self):
"""Destructor for this :py:class:`WebcamBackend`.
The underlying :py:class:`.imageio.Reader` object will be
closed and deleted.
"""
if self._reader is not None:
LOG.info("Releasing Webcam Reader (%d)", self._device)
self._reader.close()
del self._reader
self._reader = None
super().__del__()
def _get_frame(self) -> np.ndarray:
"""Get the next frame from the ImageIO Video Reader.
"""
# frame, meta = self._reader.get_next_data()
frame = self._reader.get_next_data()
if frame is None:
raise RuntimeError("Reading a frame from "
"ImageIO Video Reader failed!")
return frame
def __next__(self) -> np.ndarray:
return self._get_frame()
class VideoWriter(video.FileWriter):
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
self._writer = None
def _open(self) -> None:
if self._is_opened():
raise RuntimeError("Video was already opened.")
if self._filename is None:
raise RuntimeError("No video filename was provided.")
print("Opening:", self._filename, self._fps)
self._writer = imageio.get_writer(self._filename, fps=int(self._fps))
def _is_opened(self) -> bool:
return self._writer is not None
def _close(self) -> None:
print("Closing:", self._filename, self._fps)
if self._is_opened():
self._writer.close()
self._writer = None
def _write_frame(self, frame: np.ndarray) -> None:
writer.append_data(marked_image)
| {
"repo_name": "Petr-By/qtpyvis",
"path": "dltb/thirdparty/imageio.py",
"copies": "1",
"size": "8032",
"license": "mit",
"hash": 6719499344711924000,
"line_mean": 31.6341463415,
"line_max": 77,
"alpha_frac": 0.58632287,
"autogenerated": false,
"ratio": 4.12962962962963,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.521595249962963,
"avg_score": null,
"num_lines": null
} |
"""Access to the sounddevice library. This library allows to
use sound devices for recording and playback. The library
is based on the PortAudio library [1].
[1] http://www.portaudio.com/
"""
# FIXME[bug]: I am experiences frequent crashes on my office computer
# (Ubuntu 16.04):
# src/hostapi/alsa/pa_linux_alsa.c:3636:
# PaAlsaStreamComponent_BeginPolling:
# Assertion `ret == self->nfds' failed.
#
#
# cat /proc/asound/version
# Advanced Linux Sound Architecture Driver Version k4.4.0-179-generic.
#
# aplay --version
# aplay: version 1.1.0 by Jaroslav Kysela <perex@perex.cz>
#
# pulseaudio --version
# pulseaudio 8.0
#
# python -c "import sounddevice; print(sounddevice.__version__)"
# 0.4.0
# standard imports
from typing import Union
import logging
import threading
# third party imports
import numpy as np
import sounddevice as sd
# toolbox imports
from ..base.sound import (SoundPlayer as SoundPlayerBase,
SoundRecorder as SoundRecorderBase)
from ..base import get_default_run
# logging
LOG = logging.getLogger(__name__)
class SoundPlayer(SoundPlayerBase):
"""An implementation of a :py:class:`SoundPlayerBase` based on
the `sounddevice` library.
"""
def __init__(self, samplerate: float = None, channels: int = None,
**kwargs) -> None:
super().__init__(**kwargs)
self._lock = threading.Lock()
self._event = threading.Event()
if channels is None:
channels = 2 if self._sound is None else self._sound.channels
if samplerate is None:
samplerate = (44100 if self._sound is None else
self._sound.samplerate)
# _finishing: this is a hack - we need it to mark a stream that
# finishes, but that has not yet been stopped (see method _finished).
self._blocking = False
self._finishing = False
self._stream = None
self._check_stream(samplerate=samplerate, channels=channels)
def _check_stream(self, samplerate: float = None,
channels: int = None) -> None:
"""This function is a hack to fix a problem with an sounddevice
streams in an unsane state: these streams have both, `active`
and `stopped` flag (and also the `closed` flag) set to `False`.
Such a state seems to occur when the stream is stopped
(or aborted) from within the stream Thread (while stopping
or aborting from another Thread seems to be ok).
Such unsane streams can not be restarted by calling stream.start(),
they seem to be dead (at least I did not find a way to revive
them). As a workaround, we simply create a new stream here to
replace the original one.
"""
# Check the state of the current stream
if self._stream is not None and not self._stream.closed:
if self._stream.active or self._stream.stopped:
return # Stream seems to be ok
LOG.warning("SoundDevicePlayer: "
"discovered unsane stream - creating a new one ...")
# Stream seems to be dead - copy stream parameters
samplerate = samplerate or self._stream.samplerate
channels = channels or self._stream.channels
self._stream.close()
# create a new stream
self._stream = sd.OutputStream(samplerate=samplerate,
channels=channels,
callback=self._play_block,
finished_callback=self._finished)
def _set_position(self, position: float) -> None:
"""Set the current playback position.
"""
# as we set the position from within the playback loop,
# we lock the operation to avoid interferences.
with self._lock:
super()._set_position(position)
@property
def playing(self) -> bool:
return self._stream.active and not self._finishing
@property
def samplerate(self) -> float:
"""Samplerate to be used for playback.
"""
return self._stream.samplerate
@property
def channels(self) -> int:
"""Number of channels to be used for playback.
"""
return self._stream.channels
def play(self, *args, run: bool = None, **kwargs):
# we have to overwrite the super method to care for the 'run'
# parameter (which would usually be done by the @run decorator):
# as the stream playback is done in its own thread (and there
# is no way to prevent this from happening), we will realize
# a blocking call (run=False), explicitly waiting for the
# playback to finish.
self._blocking = not get_default_run(run)
super().play(self, *args, run=False, **kwargs)
def _play(self) -> None:
"""Start the actual playback in a background thread.
"""
self._check_stream()
# another hack:
self._finishing = False
self._event.clear()
# this will start the background thread, periodically invoking
# _play_block
self._stream.start()
print("Soundplayer: blocking:", self._blocking)
if self._blocking:
try:
self._event.wait()
finally:
# Playback/recording may have been stopped with
# a `KeyboardInterrupt` - make sure the stream
# is closed
self._stream.close(ignore_errors=True)
def _play_block(self, outdata: np.ndarray, frames: int,
time, status: sd.CallbackFlags) -> None:
"""Callback to be called by the output stream.
Arguments
---------
outdata: np.ndarray
An array of shape (frames, channels) and dtype float32.
This is a buffer provided by the OutputStream in which
the next block of output data should be stored.
frames: int
The number of frames to be stored. This should be the
sames as len(outdata)
"""
if status:
LOG.debug("SoundDevicePlayer: status = %s", status)
position = self._position
reverse = self.reverse
if position is None:
LOG.debug("play block: no position")
wave_frames = 0
else:
# obtain the relevant sound data
samplerate = self.samplerate
duration = frames / samplerate
if not reverse:
start = position
end = min(position+duration, self.end)
else:
start = max(self.start, position-duration)
end = position
wave = self._sound[start:end:samplerate]
wave_frames = len(wave)
# provide the wave to the OutputStream via the outdata array.
valid_frames = min(wave_frames, frames)
if not reverse:
outdata[:valid_frames, :] = wave[:valid_frames]
else:
outdata[:valid_frames, :] = wave[valid_frames-1::-1]
LOG.debug("block, position=%f:.2, reverse=%s; "
"start=%f:.2, end=%f:.2, duration=%f:.4/%f:.4, "
"frames=%d/%d", position, reverse,
start, end, duration, end-start,
wave_frames, valid_frames)
# pad missing data with zeros
if wave_frames < frames:
outdata[wave_frames:, :].fill(0)
# If we have not obtained any data (wave_frames == 0) we will stop
# playback here.
if not reverse:
new_position = end if wave_frames > 0 else None
if new_position is not None and new_position >= self.end:
new_position = self.start if self.loop else None
else:
new_position = start if wave_frames > 0 else None
if new_position is not None and new_position <= self.start:
new_position = self.end if self.loop else None
# We have to avoid overwriting a change of position
# that may have occured in the meantime (by some other thread)
with self._lock:
if self._position == position:
super()._set_position(new_position)
if new_position is None:
# We cannot call _stream.stop() (or _stream.abort()) from
# within the sub-thread (also not from finished_callback)
# this will cause some error in the underlying C library).
# The official way to stop the thread from within is to
# raise an exception:
raise sd.CallbackStop()
def _finished(self) -> None:
"""The finished_callback is called once the playback thread
finishes (either due to an exception in the inner loop or by
an explicit call to stream.stop() from the outside).
"""
# When the finihed_callback is called, the stream may not have
# stopped yet - so when informing the observers, the playing
# property may still report playing - to avoid this, we have
# introduced the _finishing flag, that indicates that playback
# has finished.
self._event.set()
if self.playing:
self._finishing = True
self.change('state_changed')
def _stop(self) -> None:
"""Stop an ungoing playback.
"""
# Here we could either call stream.stop() or stream.abort().
# The first would stop acquiring new data, but finish processing
# buffered data, while the second would abort immediately.
# For the sake of a responsive interface, we choose abort here.
if self._stream.active:
self._stream.abort(ignore_errors=True)
class SoundRecorder(SoundRecorderBase):
"""A :py:class:`SoundRecorder` based on the Python sounddevice
library.
"""
def __init__(self, channels: int = None, samplerate: float = None,
device: Union[int, str] = None, **kwargs):
super().__init__(**kwargs)
if channels is None:
channels = 2 if self._sound is None else self._sound.channels
if samplerate is None:
samplerate = (44100 if self._sound is None else
self._sound.samplerate)
# device: input device (numeric ID or substring)
# device_info = sd.query_devices(device, 'input')
# samplerate = device_info['default_samplerate']
self._stream = sd.InputStream(device=device, channels=channels,
samplerate=samplerate,
callback=self._record_block,
finished_callback=self._finished)
@property
def samplerate(self) -> float:
"""Samplerate used for recording.
"""
return self._stream.samplerate
@property
def channels(self) -> int:
"""Number of channels to be recorded.
"""
return self._stream.channels
@property
def recording(self) -> bool:
return self._stream.active
def _record(self) -> None:
"""
"""
LOG.info("Recorder: samplerate=%f", self.samplerate)
LOG.info("Recorder: sound=%s", self.sound)
LOG.info("Recorder: starting stream")
self._stream.start()
LOG.info("Recorder: stream started")
def _FIXME_old_record(self) -> None:
# This implementation assumes a plotter (like the
# MatplotlibSoundPlotter), that has to start its own Thread
# (as the matplotlib.animation.FuncAnimation class does).
# The context manager (with self._stream) will start
# the sounddevice.InputStream in its own Thread, and then
# execute the inner block.
#
# # the context manager will start the stream task
# with self._stream:
# # this will start the plotter and block until the
# # plotter has finished - hence we have to explicitly
# # stop the plotter, once the stream has finished.
# self._plotter.start_plot()
# stream = sd.InputStream(device=device, channels=channels,
# samplerate=samplerate, callback=audio_callback)
# ani = FuncAnimation(fig, update_plot, interval=interval, blit=True)
# with stream:
# plt.show()
pass
def _record_block(self, indata, _frames, _time, status):
"""This is called (from a separate thread) for each audio block."""
if status:
LOG.debug("SoundDeviceRecorder: %s", status)
# append new data to the sound object
self._sound += indata
def _finished(self) -> None:
LOG.info("SoundDeviceRecorder: finished")
def _stop(self) -> None:
"""Stop ongoing sound recording.
"""
# Here we could either call stream.stop() or stream.abort().
# The first would stop acquiring new data, but finish processing
# buffered data, while the second would abort immediately.
# In order to not loose any data, we choose stop here.
LOG.info("SoundDeviceRecorder: aborting stream")
# self._stream.abort()
if self._stream.active:
self._stream.stop()
LOG.info("SoundDeviceRecorder: stream aborted")
| {
"repo_name": "Petr-By/qtpyvis",
"path": "dltb/thirdparty/sounddevice.py",
"copies": "1",
"size": "13443",
"license": "mit",
"hash": 2750365134079159000,
"line_mean": 37.1903409091,
"line_max": 77,
"alpha_frac": 0.5906419698,
"autogenerated": false,
"ratio": 4.344861021331609,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5435502991131609,
"avg_score": null,
"num_lines": null
} |
"""Access to the soundfile library
"""
# third party imports
import soundfile as sf
# toolbox imports
from ..base.sound import (Sound,
SoundReader as SoundReaderBase,
SoundWriter as SoundWriterBase)
class SoundReader(SoundReaderBase):
"""A :py:class:`SoundReader` based on the `soundfile` library.
This python module is based on libsndfile, which provides
access to files in many audio formats, including 'wav' and
'mp3'.
"""
def read(self, filename: str) -> Sound:
# FIXME[todo]: soundfile.read() can also read from file like
# objects, e.g., open files:
#
# from urllib.request import urlopen
# url = "http://tinyurl.com/shepard-risset"
# data, samplerate = sf.read(io.BytesIO(urlopen(url).read()))
# FIXME[todo]: soundfile.read() can auto-detect file type of most
# soundfiles and obtain the correct metadata (channels and samplerate)
# However, for raw audio files this is not possible, one has
# to provide thos values explicitly:
#
# data, samplerate = sf.read('myfile.raw',
# channels=1, samplerate=44100,
# subtype='FLOAT')
# data is in the (frames, channels) format
data, samplerate = sf.read(filename)
return Sound(samplerate=samplerate, data=data)
class SoundWriter(SoundWriterBase):
def write(self, sound: Sound, filename: str) -> None:
"""
"""
sf.write(filename, data=sound.data, samplerate=sound.samplerate)
| {
"repo_name": "Petr-By/qtpyvis",
"path": "dltb/thirdparty/soundfile.py",
"copies": "1",
"size": "1640",
"license": "mit",
"hash": 8925507672980002000,
"line_mean": 32.4693877551,
"line_max": 78,
"alpha_frac": 0.6030487805,
"autogenerated": false,
"ratio": 4.019607843137255,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 49
} |
"""access to verb modules (local and imported"""
import sys
# FIXME PYTHON2
is_python3 = sys.version_info > (3,5)
import os
import glob
if is_python3:
import importlib.util
else:
import imp
from collections import OrderedDict
from mod import log, util, dep
# dictionary of "name: module"
verbs = {}
# dictionary of "projname: name"
proj_verbs = OrderedDict()
#-------------------------------------------------------------------------------
def import_verbs_from(proj_name, proj_dir, verb_dir) :
"""import all verb modules from a directory, populates the
verb and proj_verbs global variables
:param proj_dir: name of project that owns verb_dir
:param verb_dir: directory with verb python scripts (can be None)
"""
global verbs, proj_verbs
# make sure project-verbs find their modules
sys.path.insert(0, proj_dir)
if verb_dir and os.path.isdir(verb_dir):
# get all .py file in verb dir
verb_paths = glob.glob(verb_dir + '/*.py')
if verb_paths :
for verb_path in verb_paths :
verb_module_name = os.path.split(verb_path)[1]
verb_module_name = os.path.splitext(verb_module_name)[0]
if not verb_module_name.startswith('__') :
if is_python3:
spec = importlib.util.spec_from_file_location(verb_module_name, verb_path)
verb_module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(verb_module)
else:
# FIXME: PYTHON2
fp, pathname, desc = imp.find_module(verb_module_name, [verb_dir])
verb_module = imp.load_module(verb_module_name, fp, pathname, desc)
verbs[verb_module_name] = verb_module
if proj_name not in proj_verbs :
proj_verbs[proj_name] = []
proj_verbs[proj_name].append(verb_module_name)
#-------------------------------------------------------------------------------
def import_verbs(fips_dir, proj_dir) :
"""import verbs from local and imported projects, populates
the 'verbs' and 'proj_verbs' dictionaries
:param fipsdir: absolute fips directory
:param proj_dir: absolute project directory
"""
# first import verbs from fips directory
import_verbs_from('fips', fips_dir, fips_dir + '/verbs')
# now go through all imported projects
if fips_dir != proj_dir :
_, imported_projs = dep.get_all_imports_exports(fips_dir, proj_dir)
for imported_proj_name in imported_projs :
imported_proj_dir = imported_projs[imported_proj_name]['proj_dir']
import_verbs_from(imported_proj_name, imported_proj_dir, util.get_verbs_dir(imported_proj_dir))
| {
"repo_name": "floooh/fips",
"path": "mod/verb.py",
"copies": "1",
"size": "2849",
"license": "mit",
"hash": -4549220137953680000,
"line_mean": 36.4868421053,
"line_max": 107,
"alpha_frac": 0.5749385749,
"autogenerated": false,
"ratio": 3.8656716417910446,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4940610216691045,
"avg_score": null,
"num_lines": null
} |
"""access to verb modules (local and imported"""
import sys
import os
import glob
import imp
from collections import OrderedDict
from mod import log, util, dep
# dictionary of "name: module"
verbs = {}
# dictionary of "projname: name"
proj_verbs = OrderedDict()
#-------------------------------------------------------------------------------
def import_verbs_from(proj_name, proj_dir, verb_dir) :
"""import all verb modules from a directory, populates the
verb and proj_verbs global variables
:param proj_dir: name of project that owns verb_dir
:param verb_dir: directory with verb python scripts (can be None)
"""
global verbs, proj_verbs
# make sure project-verbs find their modules
sys.path.insert(0, proj_dir)
if verb_dir and os.path.isdir(verb_dir):
# get all .py file in verb dir
verb_paths = glob.glob(verb_dir + '/*.py')
if verb_paths :
for verb_path in verb_paths :
verb_module_name = os.path.split(verb_path)[1]
verb_module_name = os.path.splitext(verb_module_name)[0]
if not verb_module_name.startswith('__') :
fp, pathname, desc = imp.find_module(verb_module_name, [verb_dir])
verb_module = imp.load_module(verb_module_name, fp, pathname, desc)
verbs[verb_module_name] = verb_module
if proj_name not in proj_verbs :
proj_verbs[proj_name] = []
proj_verbs[proj_name].append(verb_module_name)
#-------------------------------------------------------------------------------
def import_verbs(fips_dir, proj_dir) :
"""import verbs from local and imported projects, populates
the 'verbs' and 'proj_verbs' dictionaries
:param fipsdir: absolute fips directory
:param proj_dir: absolute project directory
"""
# first import verbs from fips directory
import_verbs_from('fips', fips_dir, fips_dir + '/verbs')
# now go through all imported projects
if fips_dir != proj_dir :
_, imported_projs = dep.get_all_imports_exports(fips_dir, proj_dir)
for imported_proj_name in imported_projs :
imported_proj_dir = imported_projs[imported_proj_name]['proj_dir']
import_verbs_from(imported_proj_name, imported_proj_dir, util.get_verbs_dir(imported_proj_dir))
| {
"repo_name": "code-disaster/fips",
"path": "mod/verb.py",
"copies": "1",
"size": "2426",
"license": "mit",
"hash": -4604501551761702400,
"line_mean": 36.3230769231,
"line_max": 107,
"alpha_frac": 0.5812036274,
"autogenerated": false,
"ratio": 3.850793650793651,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9811737278485267,
"avg_score": 0.024051999941676658,
"num_lines": 65
} |
# Access WeakSet through the weakref module.
# This code is separated-out because it is needed
# by abc.py to load everything else at startup.
from __future__ import with_statement
from _weakref import ref
__all__ = ['WeakSet']
class _IterationGuard(object):
# This context manager registers itself in the current iterators of the
# weak container, such as to delay all removals until the context manager
# exits.
# This technique should be relatively thread-safe (since sets are).
def __init__(self, weakcontainer):
# Don't create cycles
self.weakcontainer = ref(weakcontainer)
def __enter__(self):
w = self.weakcontainer()
if w is not None:
w._iterating.add(self)
return self
def __exit__(self, e, t, b):
w = self.weakcontainer()
if w is not None:
s = w._iterating
s.remove(self)
if not s:
w._commit_removals()
class WeakSet(object):
def __init__(self, data=None):
self.data = set()
def _remove(item, selfref=ref(self)):
self = selfref()
if self is not None:
if self._iterating:
self._pending_removals.append(item)
else:
self.data.discard(item)
self._remove = _remove
# A list of keys to be removed
self._pending_removals = []
self._iterating = set()
if data is not None:
self.update(data)
def _commit_removals(self):
l = self._pending_removals
discard = self.data.discard
while l:
discard(l.pop())
def __iter__(self):
with _IterationGuard(self):
for itemref in self.data:
item = itemref()
if item is not None:
yield item
def __len__(self):
return sum(x() is not None for x in self.data)
def __contains__(self, item):
return ref(item) in self.data
def __reduce__(self):
return (self.__class__, (list(self),),
getattr(self, '__dict__', None))
__hash__ = None
def add(self, item):
if self._pending_removals:
self._commit_removals()
self.data.add(ref(item, self._remove))
def clear(self):
if self._pending_removals:
self._commit_removals()
self.data.clear()
def copy(self):
return self.__class__(self)
def pop(self):
if self._pending_removals:
self._commit_removals()
while True:
try:
itemref = self.data.pop()
except KeyError:
raise KeyError('pop from empty WeakSet')
item = itemref()
if item is not None:
return item
def remove(self, item):
if self._pending_removals:
self._commit_removals()
self.data.remove(ref(item))
def discard(self, item):
if self._pending_removals:
self._commit_removals()
self.data.discard(ref(item))
def update(self, other):
if self._pending_removals:
self._commit_removals()
if isinstance(other, self.__class__):
self.data.update(other.data)
else:
for element in other:
self.add(element)
def __ior__(self, other):
self.update(other)
return self
# Helper functions for simple delegating methods.
def _apply(self, other, method):
if not isinstance(other, self.__class__):
other = self.__class__(other)
newdata = method(other.data)
newset = self.__class__()
newset.data = newdata
return newset
def difference(self, other):
return self._apply(other, self.data.difference)
__sub__ = difference
def difference_update(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
def __isub__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
return self
def intersection(self, other):
return self._apply(other, self.data.intersection)
__and__ = intersection
def intersection_update(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
def __iand__(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
return self
def issubset(self, other):
return self.data.issubset(ref(item) for item in other)
__lt__ = issubset
def __le__(self, other):
return self.data <= set(ref(item) for item in other)
def issuperset(self, other):
return self.data.issuperset(ref(item) for item in other)
__gt__ = issuperset
def __ge__(self, other):
return self.data >= set(ref(item) for item in other)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.data == set(ref(item) for item in other)
def symmetric_difference(self, other):
return self._apply(other, self.data.symmetric_difference)
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item) for item in other)
def __ixor__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item) for item in other)
return self
def union(self, other):
return self._apply(other, self.data.union)
__or__ = union
def isdisjoint(self, other):
return len(self.intersection(other)) == 0
| {
"repo_name": "pombredanne/intessa",
"path": "lib/intessa/vendor/weakrefset.py",
"copies": "1",
"size": "6344",
"license": "unlicense",
"hash": -7212311137005245000,
"line_mean": 28.6448598131,
"line_max": 78,
"alpha_frac": 0.5635245902,
"autogenerated": false,
"ratio": 4.095545513234344,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5159070103434344,
"avg_score": null,
"num_lines": null
} |
# Access WeakSet through the weakref module.
# This code is separated-out because it is needed
# by abc.py to load everything else at startup.
from __go__.grumpy import WeakRefType as ref
__all__ = ['WeakSet']
class _IterationGuard(object):
# This context manager registers itself in the current iterators of the
# weak container, such as to delay all removals until the context manager
# exits.
# This technique should be relatively thread-safe (since sets are).
def __init__(self, weakcontainer):
# Don't create cycles
self.weakcontainer = ref(weakcontainer)
def __enter__(self):
w = self.weakcontainer()
if w is not None:
w._iterating.add(self)
return self
def __exit__(self, e, t, b):
w = self.weakcontainer()
if w is not None:
s = w._iterating
s.remove(self)
if not s:
w._commit_removals()
class WeakSet(object):
def __init__(self, data=None):
self.data = set()
def _remove(item, selfref=ref(self)):
self = selfref()
if self is not None:
if self._iterating:
self._pending_removals.append(item)
else:
self.data.discard(item)
self._remove = _remove
# A list of keys to be removed
self._pending_removals = []
self._iterating = set()
if data is not None:
self.update(data)
def _commit_removals(self):
l = self._pending_removals
discard = self.data.discard
while l:
discard(l.pop())
def __iter__(self):
with _IterationGuard(self):
for itemref in self.data:
item = itemref()
if item is not None:
# Caveat: the iterator will keep a strong reference to
# `item` until it is resumed or closed.
yield item
def __len__(self):
return len(self.data) - len(self._pending_removals)
def __contains__(self, item):
try:
wr = ref(item)
except TypeError:
return False
return wr in self.data
def __reduce__(self):
return (self.__class__, (list(self),),
getattr(self, '__dict__', None))
__hash__ = None
def add(self, item):
if self._pending_removals:
self._commit_removals()
self.data.add(ref(item, self._remove))
def clear(self):
if self._pending_removals:
self._commit_removals()
self.data.clear()
def copy(self):
return self.__class__(self)
def pop(self):
if self._pending_removals:
self._commit_removals()
while True:
try:
itemref = self.data.pop()
except KeyError:
raise KeyError('pop from empty WeakSet')
item = itemref()
if item is not None:
return item
def remove(self, item):
if self._pending_removals:
self._commit_removals()
self.data.remove(ref(item))
def discard(self, item):
if self._pending_removals:
self._commit_removals()
self.data.discard(ref(item))
def update(self, other):
if self._pending_removals:
self._commit_removals()
for element in other:
self.add(element)
def __ior__(self, other):
self.update(other)
return self
def difference(self, other):
newset = self.copy()
newset.difference_update(other)
return newset
__sub__ = difference
def difference_update(self, other):
self.__isub__(other)
def __isub__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
return self
def intersection(self, other):
return self.__class__(item for item in other if item in self)
__and__ = intersection
def intersection_update(self, other):
self.__iand__(other)
def __iand__(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
return self
def issubset(self, other):
return self.data.issubset(ref(item) for item in other)
__le__ = issubset
def __lt__(self, other):
return self.data < set(ref(item) for item in other)
def issuperset(self, other):
return self.data.issuperset(ref(item) for item in other)
__ge__ = issuperset
def __gt__(self, other):
return self.data > set(ref(item) for item in other)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.data == set(ref(item) for item in other)
def __ne__(self, other):
opposite = self.__eq__(other)
if opposite is NotImplemented:
return NotImplemented
return not opposite
def symmetric_difference(self, other):
newset = self.copy()
newset.symmetric_difference_update(other)
return newset
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
self.__ixor__(other)
def __ixor__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item, self._remove) for item in other)
return self
def union(self, other):
return self.__class__(e for s in (self, other) for e in s)
__or__ = union
def isdisjoint(self, other):
return len(self.intersection(other)) == 0
| {
"repo_name": "AlexEKoren/grumpy",
"path": "third_party/stdlib/_weakrefset.py",
"copies": "3",
"size": "5931",
"license": "apache-2.0",
"hash": 6949381329151338000,
"line_mean": 28.0735294118,
"line_max": 92,
"alpha_frac": 0.5565671893,
"autogenerated": false,
"ratio": 4.124478442280946,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6181045631580946,
"avg_score": null,
"num_lines": null
} |
# Access WeakSet through the weakref module.
# This code is separated-out because it is needed
# by abc.py to load everything else at startup.
from '__go__/grumpy' import WeakRefType as ref
__all__ = ['WeakSet']
class _IterationGuard(object):
# This context manager registers itself in the current iterators of the
# weak container, such as to delay all removals until the context manager
# exits.
# This technique should be relatively thread-safe (since sets are).
def __init__(self, weakcontainer):
# Don't create cycles
self.weakcontainer = ref(weakcontainer)
def __enter__(self):
w = self.weakcontainer()
if w is not None:
w._iterating.add(self)
return self
def __exit__(self, e, t, b):
w = self.weakcontainer()
if w is not None:
s = w._iterating
s.remove(self)
if not s:
w._commit_removals()
class WeakSet(object):
def __init__(self, data=None):
self.data = set()
def _remove(item, selfref=ref(self)):
self = selfref()
if self is not None:
if self._iterating:
self._pending_removals.append(item)
else:
self.data.discard(item)
self._remove = _remove
# A list of keys to be removed
self._pending_removals = []
self._iterating = set()
if data is not None:
self.update(data)
def _commit_removals(self):
l = self._pending_removals
discard = self.data.discard
while l:
discard(l.pop())
def __iter__(self):
with _IterationGuard(self):
for itemref in self.data:
item = itemref()
if item is not None:
# Caveat: the iterator will keep a strong reference to
# `item` until it is resumed or closed.
yield item
def __len__(self):
return len(self.data) - len(self._pending_removals)
def __contains__(self, item):
try:
wr = ref(item)
except TypeError:
return False
return wr in self.data
def __reduce__(self):
return (self.__class__, (list(self),),
getattr(self, '__dict__', None))
__hash__ = None
def add(self, item):
if self._pending_removals:
self._commit_removals()
self.data.add(ref(item, self._remove))
def clear(self):
if self._pending_removals:
self._commit_removals()
self.data.clear()
def copy(self):
return self.__class__(self)
def pop(self):
if self._pending_removals:
self._commit_removals()
while True:
try:
itemref = self.data.pop()
except KeyError:
raise KeyError('pop from empty WeakSet')
item = itemref()
if item is not None:
return item
def remove(self, item):
if self._pending_removals:
self._commit_removals()
self.data.remove(ref(item))
def discard(self, item):
if self._pending_removals:
self._commit_removals()
self.data.discard(ref(item))
def update(self, other):
if self._pending_removals:
self._commit_removals()
for element in other:
self.add(element)
def __ior__(self, other):
self.update(other)
return self
def difference(self, other):
newset = self.copy()
newset.difference_update(other)
return newset
__sub__ = difference
def difference_update(self, other):
self.__isub__(other)
def __isub__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
return self
def intersection(self, other):
return self.__class__(item for item in other if item in self)
__and__ = intersection
def intersection_update(self, other):
self.__iand__(other)
def __iand__(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
return self
def issubset(self, other):
return self.data.issubset(ref(item) for item in other)
__le__ = issubset
def __lt__(self, other):
return self.data < set(ref(item) for item in other)
def issuperset(self, other):
return self.data.issuperset(ref(item) for item in other)
__ge__ = issuperset
def __gt__(self, other):
return self.data > set(ref(item) for item in other)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.data == set(ref(item) for item in other)
def __ne__(self, other):
opposite = self.__eq__(other)
if opposite is NotImplemented:
return NotImplemented
return not opposite
def symmetric_difference(self, other):
newset = self.copy()
newset.symmetric_difference_update(other)
return newset
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
self.__ixor__(other)
def __ixor__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item, self._remove) for item in other)
return self
def union(self, other):
return self.__class__(e for s in (self, other) for e in s)
__or__ = union
def isdisjoint(self, other):
return len(self.intersection(other)) == 0
| {
"repo_name": "pombredanne/grumpy",
"path": "grumpy-runtime-src/third_party/stdlib/_weakrefset.py",
"copies": "5",
"size": "5933",
"license": "apache-2.0",
"hash": -56669412987478340,
"line_mean": 28.0833333333,
"line_max": 92,
"alpha_frac": 0.5563795719,
"autogenerated": false,
"ratio": 4.120138888888889,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7176518460788889,
"avg_score": null,
"num_lines": null
} |
# Access WeakSet through the weakref module.
# This code is separated-out because it is needed
# by abc.py to load everything else at startup.
from _weakref import ref
from types import GenericAlias
__all__ = ['WeakSet']
class _IterationGuard:
# This context manager registers itself in the current iterators of the
# weak container, such as to delay all removals until the context manager
# exits.
# This technique should be relatively thread-safe (since sets are).
def __init__(self, weakcontainer):
# Don't create cycles
self.weakcontainer = ref(weakcontainer)
def __enter__(self):
w = self.weakcontainer()
if w is not None:
w._iterating.add(self)
return self
def __exit__(self, e, t, b):
w = self.weakcontainer()
if w is not None:
s = w._iterating
s.remove(self)
if not s:
w._commit_removals()
class WeakSet:
def __init__(self, data=None):
self.data = set()
def _remove(item, selfref=ref(self)):
self = selfref()
if self is not None:
if self._iterating:
self._pending_removals.append(item)
else:
self.data.discard(item)
self._remove = _remove
# A list of keys to be removed
self._pending_removals = []
self._iterating = set()
if data is not None:
self.update(data)
def _commit_removals(self):
l = self._pending_removals
discard = self.data.discard
while l:
discard(l.pop())
def __iter__(self):
with _IterationGuard(self):
for itemref in self.data:
item = itemref()
if item is not None:
# Caveat: the iterator will keep a strong reference to
# `item` until it is resumed or closed.
yield item
def __len__(self):
return len(self.data) - len(self._pending_removals)
def __contains__(self, item):
try:
wr = ref(item)
except TypeError:
return False
return wr in self.data
def __reduce__(self):
return (self.__class__, (list(self),),
getattr(self, '__dict__', None))
def add(self, item):
if self._pending_removals:
self._commit_removals()
self.data.add(ref(item, self._remove))
def clear(self):
if self._pending_removals:
self._commit_removals()
self.data.clear()
def copy(self):
return self.__class__(self)
def pop(self):
if self._pending_removals:
self._commit_removals()
while True:
try:
itemref = self.data.pop()
except KeyError:
raise KeyError('pop from empty WeakSet') from None
item = itemref()
if item is not None:
return item
def remove(self, item):
if self._pending_removals:
self._commit_removals()
self.data.remove(ref(item))
def discard(self, item):
if self._pending_removals:
self._commit_removals()
self.data.discard(ref(item))
def update(self, other):
if self._pending_removals:
self._commit_removals()
for element in other:
self.add(element)
def __ior__(self, other):
self.update(other)
return self
def difference(self, other):
newset = self.copy()
newset.difference_update(other)
return newset
__sub__ = difference
def difference_update(self, other):
self.__isub__(other)
def __isub__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
return self
def intersection(self, other):
return self.__class__(item for item in other if item in self)
__and__ = intersection
def intersection_update(self, other):
self.__iand__(other)
def __iand__(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
return self
def issubset(self, other):
return self.data.issubset(ref(item) for item in other)
__le__ = issubset
def __lt__(self, other):
return self.data < set(map(ref, other))
def issuperset(self, other):
return self.data.issuperset(ref(item) for item in other)
__ge__ = issuperset
def __gt__(self, other):
return self.data > set(map(ref, other))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.data == set(map(ref, other))
def symmetric_difference(self, other):
newset = self.copy()
newset.symmetric_difference_update(other)
return newset
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
self.__ixor__(other)
def __ixor__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item, self._remove) for item in other)
return self
def union(self, other):
return self.__class__(e for s in (self, other) for e in s)
__or__ = union
def isdisjoint(self, other):
return len(self.intersection(other)) == 0
def __repr__(self):
return repr(self.data)
__class_getitem__ = classmethod(GenericAlias)
| {
"repo_name": "brython-dev/brython",
"path": "www/src/Lib/_weakrefset.py",
"copies": "16",
"size": "5817",
"license": "bsd-3-clause",
"hash": -7466073929119844000,
"line_mean": 27.797029703,
"line_max": 92,
"alpha_frac": 0.5569881382,
"autogenerated": false,
"ratio": 4.108050847457627,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
# Access WeakSet through the weakref module.
# This code is separated-out because it is needed
# by abc.py to load everything else at startup.
from _weakref import ref
__all__ = ['WeakSet']
class WeakSet:
def __init__(self, data=None):
self.data = set()
def _remove(item, selfref=ref(self)):
self = selfref()
if self is not None:
self.data.discard(item)
self._remove = _remove
if data is not None:
self.update(data)
def __iter__(self):
for itemref in self.data:
item = itemref()
if item is not None:
yield item
def __len__(self):
return sum(x() is not None for x in self.data)
def __contains__(self, item):
return ref(item) in self.data
def __reduce__(self):
return (self.__class__, (list(self),),
getattr(self, '__dict__', None))
def add(self, item):
self.data.add(ref(item, self._remove))
def clear(self):
self.data.clear()
def copy(self):
return self.__class__(self)
def pop(self):
while True:
try:
itemref = self.data.pop()
except KeyError:
raise KeyError('pop from empty WeakSet')
item = itemref()
if item is not None:
return item
def remove(self, item):
self.data.remove(ref(item))
def discard(self, item):
self.data.discard(ref(item))
def update(self, other):
if isinstance(other, self.__class__):
self.data.update(other.data)
else:
for element in other:
self.add(element)
def __ior__(self, other):
self.update(other)
return self
# Helper functions for simple delegating methods.
def _apply(self, other, method):
if not isinstance(other, self.__class__):
other = self.__class__(other)
newdata = method(other.data)
newset = self.__class__()
newset.data = newdata
return newset
def difference(self, other):
return self._apply(other, self.data.difference)
__sub__ = difference
def difference_update(self, other):
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
def __isub__(self, other):
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
return self
def intersection(self, other):
return self._apply(other, self.data.intersection)
__and__ = intersection
def intersection_update(self, other):
self.data.intersection_update(ref(item) for item in other)
def __iand__(self, other):
self.data.intersection_update(ref(item) for item in other)
return self
def issubset(self, other):
return self.data.issubset(ref(item) for item in other)
__lt__ = issubset
def __le__(self, other):
return self.data <= set(ref(item) for item in other)
def issuperset(self, other):
return self.data.issuperset(ref(item) for item in other)
__gt__ = issuperset
def __ge__(self, other):
return self.data >= set(ref(item) for item in other)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.data == set(ref(item) for item in other)
def symmetric_difference(self, other):
return self._apply(other, self.data.symmetric_difference)
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item) for item in other)
def __ixor__(self, other):
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item) for item in other)
return self
def union(self, other):
return self._apply(other, self.data.union)
__or__ = union
def isdisjoint(self, other):
return len(self.intersection(other)) == 0
| {
"repo_name": "MalloyPower/parsing-python",
"path": "front-end/testsuite-python-lib/Python-3.1/Lib/_weakrefset.py",
"copies": "1",
"size": "4257",
"license": "mit",
"hash": 872988750224217900,
"line_mean": 28.1575342466,
"line_max": 78,
"alpha_frac": 0.5701198027,
"autogenerated": false,
"ratio": 4.0236294896030245,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00149666660262445,
"num_lines": 146
} |
# Access WeakSet through the weakref module.
# This code is separated-out because it is needed
# by abc.py to load everything else at startup.
from weakref import ref
__all__ = ['WeakSet']
class _IterationGuard:
# This context manager registers itself in the current iterators of the
# weak container, such as to delay all removals until the context manager
# exits.
# This technique should be relatively thread-safe (since sets are).
def __init__(self, weakcontainer):
# Don't create cycles
self.weakcontainer = ref(weakcontainer)
def __enter__(self):
w = self.weakcontainer()
if w is not None:
w._iterating.add(self)
return self
def __exit__(self, e, t, b):
w = self.weakcontainer()
if w is not None:
s = w._iterating
s.remove(self)
if not s:
w._commit_removals()
class WeakSet:
def __init__(self, data=None):
self.data = set()
def _remove(item, selfref=ref(self)):
self = selfref()
if self is not None:
if self._iterating:
self._pending_removals.append(item)
else:
self.data.discard(item)
self._remove = _remove
# A list of keys to be removed
self._pending_removals = []
self._iterating = set()
if data is not None:
self.update(data)
def _commit_removals(self):
l = self._pending_removals
discard = self.data.discard
while l:
discard(l.pop())
def __iter__(self):
with _IterationGuard(self):
for itemref in self.data:
item = itemref()
if item is not None:
# Caveat: the iterator will keep a strong reference to
# `item` until it is resumed or closed.
yield item
def __len__(self):
return len(self.data) - len(self._pending_removals)
def __contains__(self, item):
try:
wr = ref(item)
except TypeError:
return False
return wr in self.data
def __reduce__(self):
return (self.__class__, (list(self),),
getattr(self, '__dict__', None))
def add(self, item):
if self._pending_removals:
self._commit_removals()
self.data.add(ref(item, self._remove))
def clear(self):
if self._pending_removals:
self._commit_removals()
self.data.clear()
def copy(self):
return self.__class__(self)
def pop(self):
if self._pending_removals:
self._commit_removals()
while True:
try:
itemref = self.data.pop()
except KeyError:
raise KeyError('pop from empty WeakSet')
item = itemref()
if item is not None:
return item
def remove(self, item):
if self._pending_removals:
self._commit_removals()
self.data.remove(ref(item))
def discard(self, item):
if self._pending_removals:
self._commit_removals()
self.data.discard(ref(item))
def update(self, other):
if self._pending_removals:
self._commit_removals()
for element in other:
self.add(element)
def __ior__(self, other):
self.update(other)
return self
def difference(self, other):
newset = self.copy()
newset.difference_update(other)
return newset
__sub__ = difference
def difference_update(self, other):
self.__isub__(other)
def __isub__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
return self
def intersection(self, other):
return self.__class__(item for item in other if item in self)
__and__ = intersection
def intersection_update(self, other):
self.__iand__(other)
def __iand__(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
return self
def issubset(self, other):
return self.data.issubset(ref(item) for item in other)
__le__ = issubset
def __lt__(self, other):
return self.data < set(ref(item) for item in other)
def issuperset(self, other):
return self.data.issuperset(ref(item) for item in other)
__ge__ = issuperset
def __gt__(self, other):
return self.data > set(ref(item) for item in other)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.data == set(ref(item) for item in other)
def symmetric_difference(self, other):
newset = self.copy()
newset.symmetric_difference_update(other)
return newset
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
self.__ixor__(other)
def __ixor__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item, self._remove) for item in other)
return self
def union(self, other):
return self.__class__(e for s in (self, other) for e in s)
__or__ = union
def isdisjoint(self, other):
return len(self.intersection(other)) == 0
| {
"repo_name": "alexrudy/Cauldron",
"path": "Cauldron/utils/_weakrefset.py",
"copies": "1",
"size": "5704",
"license": "bsd-3-clause",
"hash": 922267853704021200,
"line_mean": 28.1020408163,
"line_max": 92,
"alpha_frac": 0.5559256662,
"autogenerated": false,
"ratio": 4.106551475881929,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5162477142081928,
"avg_score": null,
"num_lines": null
} |
# Access WeakSet through the weakref module.
# This code is separated-out because it is needed
# by abc.py to load everything else at startup.
from _weakref import ref
__all__ = ['WeakSet']
class _IterationGuard:
# This context manager registers itself in the current iterators of the
# weak container, such as to delay all removals until the context manager
# exits.
# This technique should be relatively thread-safe (since sets are).
def __init__(self, weakcontainer):
# Don't create cycles
self.weakcontainer = ref(weakcontainer)
def __enter__(self):
w = self.weakcontainer()
if w is not None:
w._iterating.add(self)
return self
def __exit__(self, e, t, b):
w = self.weakcontainer()
if w is not None:
s = w._iterating
s.remove(self)
if not s:
w._commit_removals()
class WeakSet:
def __init__(self, data=None):
self.data = set()
def _remove(item, selfref=ref(self)):
self = selfref()
if self is not None:
if self._iterating:
self._pending_removals.append(item)
else:
self.data.discard(item)
self._remove = _remove
# A list of keys to be removed
self._pending_removals = []
self._iterating = set()
if data is not None:
self.update(data)
def _commit_removals(self):
l = self._pending_removals
discard = self.data.discard
while l:
discard(l.pop())
def __iter__(self):
with _IterationGuard(self):
for itemref in self.data:
item = itemref()
if item is not None:
yield item
def __len__(self):
return sum(x() is not None for x in self.data)
def __contains__(self, item):
try:
wr = ref(item)
except TypeError:
return False
return wr in self.data
def __reduce__(self):
return (self.__class__, (list(self),),
getattr(self, '__dict__', None))
def add(self, item):
if self._pending_removals:
self._commit_removals()
self.data.add(ref(item, self._remove))
def clear(self):
if self._pending_removals:
self._commit_removals()
self.data.clear()
def copy(self):
return self.__class__(self)
def pop(self):
if self._pending_removals:
self._commit_removals()
while True:
try:
itemref = self.data.pop()
except KeyError:
raise KeyError('pop from empty WeakSet')
item = itemref()
if item is not None:
return item
def remove(self, item):
if self._pending_removals:
self._commit_removals()
self.data.remove(ref(item))
def discard(self, item):
if self._pending_removals:
self._commit_removals()
self.data.discard(ref(item))
def update(self, other):
if self._pending_removals:
self._commit_removals()
if isinstance(other, self.__class__):
self.data.update(other.data)
else:
for element in other:
self.add(element)
def __ior__(self, other):
self.update(other)
return self
# Helper functions for simple delegating methods.
def _apply(self, other, method):
if not isinstance(other, self.__class__):
other = self.__class__(other)
newdata = method(other.data)
newset = self.__class__()
newset.data = newdata
return newset
def difference(self, other):
return self._apply(other, self.data.difference)
__sub__ = difference
def difference_update(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
def __isub__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
return self
def intersection(self, other):
return self._apply(other, self.data.intersection)
__and__ = intersection
def intersection_update(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
def __iand__(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
return self
def issubset(self, other):
return self.data.issubset(ref(item) for item in other)
__lt__ = issubset
def __le__(self, other):
return self.data <= set(ref(item) for item in other)
def issuperset(self, other):
return self.data.issuperset(ref(item) for item in other)
__gt__ = issuperset
def __ge__(self, other):
return self.data >= set(ref(item) for item in other)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.data == set(ref(item) for item in other)
def symmetric_difference(self, other):
return self._apply(other, self.data.symmetric_difference)
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item) for item in other)
def __ixor__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item) for item in other)
return self
def union(self, other):
return self._apply(other, self.data.union)
__or__ = union
def isdisjoint(self, other):
return len(self.intersection(other)) == 0
| {
"repo_name": "jcoady9/python-for-android",
"path": "python3-alpha/python3-src/Lib/_weakrefset.py",
"copies": "48",
"size": "6352",
"license": "apache-2.0",
"hash": -1486176255627819500,
"line_mean": 28.6822429907,
"line_max": 78,
"alpha_frac": 0.5602959698,
"autogenerated": false,
"ratio": 4.116655865197667,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
# Access WeakSet through the weakref module.
# This code is separated-out because it is needed
# by abc.py to load everything else at startup.
from _weakref import ref
__all__ = ['WeakSet']
class _IterationGuard(object):
# This context manager registers itself in the current iterators of the
# weak container, such as to delay all removals until the context manager
# exits.
# This technique should be relatively thread-safe (since sets are).
def __init__(self, weakcontainer):
# Don't create cycles
self.weakcontainer = ref(weakcontainer)
def __enter__(self):
w = self.weakcontainer()
if w is not None:
w._iterating.add(self)
return self
def __exit__(self, e, t, b):
w = self.weakcontainer()
if w is not None:
s = w._iterating
s.remove(self)
if not s:
w._commit_removals()
class WeakSet(object):
def __init__(self, data=None):
self.data = set()
def _remove(item, selfref=ref(self)):
self = selfref()
if self is not None:
if self._iterating:
self._pending_removals.append(item)
else:
self.data.discard(item)
self._remove = _remove
# A list of keys to be removed
self._pending_removals = []
self._iterating = set()
if data is not None:
self.update(data)
def _commit_removals(self):
l = self._pending_removals
discard = self.data.discard
while l:
discard(l.pop())
def __iter__(self):
with _IterationGuard(self):
for itemref in self.data:
item = itemref()
if item is not None:
yield item
def __len__(self):
return len(self.data) - len(self._pending_removals)
def __contains__(self, item):
try:
wr = ref(item)
except TypeError:
return False
return wr in self.data
def __reduce__(self):
return (self.__class__, (list(self),),
getattr(self, '__dict__', None))
__hash__ = None
def add(self, item):
if self._pending_removals:
self._commit_removals()
self.data.add(ref(item, self._remove))
def clear(self):
if self._pending_removals:
self._commit_removals()
self.data.clear()
def copy(self):
return self.__class__(self)
def pop(self):
if self._pending_removals:
self._commit_removals()
while True:
try:
itemref = self.data.pop()
except KeyError:
raise KeyError('pop from empty WeakSet')
item = itemref()
if item is not None:
return item
def remove(self, item):
if self._pending_removals:
self._commit_removals()
self.data.remove(ref(item))
def discard(self, item):
if self._pending_removals:
self._commit_removals()
self.data.discard(ref(item))
def update(self, other):
if self._pending_removals:
self._commit_removals()
for element in other:
self.add(element)
def __ior__(self, other):
self.update(other)
return self
def difference(self, other):
newset = self.copy()
newset.difference_update(other)
return newset
__sub__ = difference
def difference_update(self, other):
self.__isub__(other)
def __isub__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
return self
def intersection(self, other):
return self.__class__(item for item in other if item in self)
__and__ = intersection
def intersection_update(self, other):
self.__iand__(other)
def __iand__(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
return self
def issubset(self, other):
return self.data.issubset(ref(item) for item in other)
__le__ = issubset
def __lt__(self, other):
return self.data < set(ref(item) for item in other)
def issuperset(self, other):
return self.data.issuperset(ref(item) for item in other)
__ge__ = issuperset
def __gt__(self, other):
return self.data > set(ref(item) for item in other)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.data == set(ref(item) for item in other)
def __ne__(self, other):
opposite = self.__eq__(other)
if opposite is NotImplemented:
return NotImplemented
return not opposite
def symmetric_difference(self, other):
newset = self.copy()
newset.symmetric_difference_update(other)
return newset
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
self.__ixor__(other)
def __ixor__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item, self._remove) for item in other)
return self
def union(self, other):
return self.__class__(e for s in (self, other) for e in s)
__or__ = union
def isdisjoint(self, other):
return len(self.intersection(other)) == 0
| {
"repo_name": "mikedchavez1010/XX-Net",
"path": "python27/1.0/lib/_weakrefset.py",
"copies": "4",
"size": "5978",
"license": "bsd-2-clause",
"hash": -106286645403195310,
"line_mean": 27.5940594059,
"line_max": 92,
"alpha_frac": 0.5379725661,
"autogenerated": false,
"ratio": 4.251778093883357,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0007596171851227676,
"num_lines": 202
} |
# Accompanying blog post:
# http://www.jeffwidman.com/blog/847/
from sqlalchemy.ext import compiler
from sqlalchemy.schema import DDLElement
from .. import db
class CreateView(DDLElement):
def __init__(self, name, selectable):
self.name = name
self.selectable = selectable
@compiler.compiles(CreateView)
def compileView(element, compiler, **kw):
return 'CREATE OR REPLACE VIEW %s AS %s' % (
element.name,
compiler.sql_compiler.process(element.selectable, literal_binds=True),
)
def create_view(name, selectable, metadata=db.metadata):
_mt = db.MetaData() # temp metadata just for initial Table object creation
t = db.Table(name, _mt) # the actual mat view class is bound to db.metadata
for c in selectable.c:
t.append_column(db.Column(c.name, c.type, primary_key=c.primary_key))
db.event.listen(
metadata, 'after_create',
CreateView(name, selectable)
)
@db.event.listens_for(metadata, 'after_create')
def create_indexes(target, connection, **kw):
for idx in t.indexes:
idx.create(connection)
db.event.listen(
metadata, 'before_drop',
db.DDL('DROP VIEW IF EXISTS ' + name)
)
return t
class View(db.Model):
__abstract__ = True
| {
"repo_name": "bhzunami/reanalytics",
"path": "app/models/view_factory.py",
"copies": "1",
"size": "1299",
"license": "apache-2.0",
"hash": 2211264313412288000,
"line_mean": 26.6382978723,
"line_max": 83,
"alpha_frac": 0.6505003849,
"autogenerated": false,
"ratio": 3.588397790055249,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4738898174955249,
"avg_score": null,
"num_lines": null
} |
# according to http://jdfreder-notebook.readthedocs.org/en/docs/examples/Notebook/Importing%20Notebooks.html
import io, os, sys, types
from IPython import get_ipython
from nbformat import read
from IPython.core.interactiveshell import InteractiveShell
def find_notebook(fullname, path=None):
"""find a notebook, given its fully qualified name and an optional path
This turns "foo.bar" into "foo/bar.ipynb"
and tries turning "Foo_Bar" into "Foo Bar" if Foo_Bar
does not exist.
"""
name = fullname.rsplit('.', 1)[-1]
if not path:
path = ['']
for d in path:
nb_path = os.path.join(d, name + ".ipynb")
if os.path.isfile(nb_path):
return nb_path
# let import Notebook_Name find "Notebook Name.ipynb"
nb_path = nb_path.replace("_", " ")
if os.path.isfile(nb_path):
return nb_path
class NotebookLoader(object):
"""Module Loader for Jupyter Notebooks"""
def __init__(self, path=None):
self.shell = InteractiveShell.instance()
self.path = path
def load_module(self, fullname):
"""import a notebook as a module"""
path = find_notebook(fullname, self.path)
#print ("importing Jupyter notebook from %s" % path)
# load the notebook object
with io.open(path, 'r', encoding='utf-8') as f:
nb = read(f, 4)
# create the module and add it to sys.modules
# if name in sys.modules:
# return sys.modules[name]
mod = types.ModuleType(fullname)
mod.__file__ = path
mod.__loader__ = self
mod.__dict__['get_ipython'] = get_ipython
sys.modules[fullname] = mod
# extra work to ensure that magics that would affect the user_ns
# actually affect the notebook module's ns
save_user_ns = self.shell.user_ns
self.shell.user_ns = mod.__dict__
try:
for cell in nb.cells:
if cell.cell_type == 'code':
# transform the input to executable Python
code = self.shell.input_transformer_manager.transform_cell(cell.source)
# run the code in themodule
exec(code, mod.__dict__)
finally:
self.shell.user_ns = save_user_ns
return mod
class NotebookFinder(object):
"""Module finder that locates Jupyter Notebooks"""
def __init__(self):
self.loaders = {}
def find_module(self, fullname, path=None):
nb_path = find_notebook(fullname, path)
if not nb_path:
return
key = path
if path:
# lists aren't hashable
key = os.path.sep.join(path)
if key not in self.loaders:
self.loaders[key] = NotebookLoader(path)
return self.loaders[key]
sys.meta_path.append(NotebookFinder())
| {
"repo_name": "miklevin/pipulate",
"path": "examples/cleanup/notebook_finder.py",
"copies": "1",
"size": "2860",
"license": "mit",
"hash": 5564449299870722000,
"line_mean": 30.4285714286,
"line_max": 108,
"alpha_frac": 0.5954545455,
"autogenerated": false,
"ratio": 3.8440860215053765,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4939540567005376,
"avg_score": null,
"num_lines": null
} |
# According to the Wikipedia's article: "The Game of Life, also known simply as Life, is a cellular automaton devised
# by the British mathematician John Horton Conway in 1970."
#
# Given a board with m by n cells, each cell has an initial state live (1) or dead (0). Each cell interacts with its
# eight neighbors (horizontal, vertical, diagonal) using the following four rules (taken from the above Wikipedia
# article):
#
# Any live cell with fewer than two live neighbors dies, as if caused by under-population.
# Any live cell with two or three live neighbors lives on to the next generation.
# Any live cell with more than three live neighbors dies, as if by over-population..
# Any dead cell with exactly three live neighbors becomes a live cell, as if by reproduction.
#
# Write a function to compute the next state (after one update) of the board given its current state. The next state
# is created by applying the above rules simultaneously to every cell in the current state, where births and deaths
# occur simultaneously.
#
# Example:
#
# Input:
# [
# [0,1,0],
# [0,0,1],
# [1,1,1],
# [0,0,0]
# ]
# Output:
# [
# [0,0,0],
# [1,0,1],=
# [0,1,1],
# [0,1,0]
# ]
# Follow up:
#
# Could you solve it in-place? Remember that the board needs to be updated at the same time: You cannot update some
# cells first and then use their updated values to update other cells.
# In this question, we represent the board using a 2D array. In principle, the board is infinite, which would cause
# problems when the active area encroaches the border of the array. How would you address these problems?
class Solution(object):
def gameOfLife(self, board):
"""
:type board: List[List[int]]
:rtype: void Do not return anything, modify board in-place instead.
"""
rows = len(board)
cols = len(board[0])
for i in range(rows):
for j in range(cols):
board[i][j] = self.get_new_state(board, i, j)
for i in range(rows):
for j in range(cols):
if board[i][j] == 2:
board[i][j] = 1
elif board[i][j] == 3:
board[i][j] = 0
def get_new_state(self, board, i, j):
count = 0
for k in range(i-1, i+2):
for l in range(j-1, j+2):
if k == i and l == j:
continue
if 0 <= k < len(board) and 0 <= l < len(board[0]):
count += board[k][l] % 2
if board[i][j] == 0:
if count == 3:
return 2
else:
if count < 2 or count > 3:
return 3
return board[i][j]
# Note:
# Change cell state from 0 -> 2 if its ought to get alive and 1 -> 3 if it's ought to die
# While considering a cell value modulus 2 will give current state
| {
"repo_name": "jigarkb/Programming",
"path": "LeetCode/289-M-GameOfLife.py",
"copies": "2",
"size": "2886",
"license": "mit",
"hash": 2622683289418849300,
"line_mean": 34.6296296296,
"line_max": 117,
"alpha_frac": 0.6025641026,
"autogenerated": false,
"ratio": 3.5498154981549814,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5152379600754982,
"avg_score": null,
"num_lines": null
} |
"""Accordion directive for reStructuredText."""
import uuid
import logging
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from nikola.plugin_categories import RestExtension
from nikola.plugins.compile import rest
logger = logging.getLogger(__name__)
class Plugin(RestExtension):
"""Plugin for reST accordion directive."""
name = "rest_accordion"
def set_site(self, site):
"""Set Nikola site."""
self.site = site
directives.register_directive('accordion', Accordion)
Accordion.site = site
return super(Plugin, self).set_site(site)
class Accordion(Directive):
"""reST extension for inserting accordions."""
has_content = True
optional_arguments = 1
def rst2html(self, src):
null_logger = logging.getLogger('NULL')
null_logger.setLevel(1000)
output, error_level, deps, _ = rest.rst2html(
src, logger=null_logger, transforms=self.site.rst_transforms)
return output
def run(self):
"""Run the slides directive."""
if len(self.content) == 0: # pragma: no cover
return
if self.arguments and self.arguments[0] == 'bootstrap3':
template_name = 'accordion_bootstrap3.tmpl'
else:
template_name = 'accordion_bootstrap4.tmpl'
if self.site.invariant: # for testing purposes
hex_uuid4 = 'fixedvaluethatisnotauuid'
else:
hex_uuid4 = uuid.uuid4().hex
box_titles = []
box_contents = []
boxes = '\n'.join(self.content).split('\n\n\n')
if len(boxes) == 1:
logger.warn(
('Accordion directive used with only one box. '
'Remember to use two blank lines to separate the contents.')
)
for box in boxes:
title, content = box.split('\n', 1)
box_titles.append(self.rst2html(title))
box_contents.append(self.rst2html(content))
output = self.site.template_system.render_template(
template_name,
None,
{
'hex_uuid4': hex_uuid4,
'box_titles': box_titles,
'box_contents': box_contents,
}
)
return [nodes.raw('', output, format='html')]
| {
"repo_name": "getnikola/plugins",
"path": "v8/accordion/accordion.py",
"copies": "1",
"size": "2338",
"license": "mit",
"hash": -5552551831235421000,
"line_mean": 27.1686746988,
"line_max": 77,
"alpha_frac": 0.5876817793,
"autogenerated": false,
"ratio": 3.922818791946309,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5010500571246308,
"avg_score": null,
"num_lines": null
} |
""" Accordion ID3 Constants """
__author__ = "Alastair Tse <alastair@tse.id.au>"
__license__ = "BSD"
__copyright__ = "Copyright (c) 2004, Alastair Tse"
__revision__ = "$Id: constants.py,v 1.3 2004/12/21 12:02:06 acnt2 Exp $"
ID3_FILE_READ = 0
ID3_FILE_MODIFY = 1
ID3_FILE_NEW = 2
ID3V2_FILE_HEADER_LENGTH = 10
ID3V2_FILE_EXTHEADER_LENGTH = 5
ID3V2_FILE_FOOTER_LENGTH = 10
ID3V2_FILE_DEFAULT_PADDING = 512
ID3V2_DEFAULT_VERSION = '2.4'
ID3V2_FIELD_ENC_ISO8859_1 = 0
ID3V2_FIELD_ENC_UTF16 = 1
ID3V2_FIELD_ENC_UTF16BE = 2
ID3V2_FIELD_ENC_UTF8 = 3
# ID3v2 2.2 Variables
ID3V2_2_FRAME_HEADER_LENGTH = 6
ID3V2_2_TAG_HEADER_FLAGS = [('compression', 6),
('unsync', 7)]
ID3V2_2_FRAME_SUPPORTED_IDS = {
'UFI':('bin','Unique File Identifier'), # FIXME
'BUF':('bin','Recommended buffer size'), # FIXME
'CNT':('pcnt','Play counter'),
'COM':('comm','Comments'),
'CRA':('bin','Audio Encryption'), # FIXME
'CRM':('bin','Encrypted meta frame'), # FIXME
'EQU':('bin','Equalisation'), # FIXME
'ETC':('bin','Event timing codes'),
'GEO':('geob','General Encapsulated Object'),
'IPL':('bin','Involved People List'), # null term list FIXME
'LNK':('bin','Linked Information'), # FIXME
'MCI':('bin','Music CD Identifier'), # FIXME
'MLL':('bin','MPEG Location Lookup Table'), # FIXME
'PIC':('apic','Attached Picture'),
'POP':('bin','Popularimeter'), # FIXME
'REV':('bin','Reverb'), # FIXME
'RVA':('bin','Relative volume adjustment'), # FIXME
'STC':('bin','Synced Tempo Codes'), # FIXME
'SLT':('bin','Synced Lyrics/Text'), # FIXME
'TAL':('text','Album/Movie/Show'),
'TBP':('text','Beats per Minute'),
'TCM':('text','Composer'),
'TCO':('text','Content Type'),
'TCR':('text','Copyright message'),
'TDA':('text','Date'),
'TDY':('text','Playlist delay (ms)'),
'TEN':('text','Encoded by'),
'TIM':('text','Time'),
'TKE':('text','Initial key'),
'TLA':('text','Language(s)'),
'TLE':('text','Length'),
'TMT':('text','Media Type'),
'TP1':('text','Lead artist(s)/Lead performer(s)/Performing group'),
'TP2':('text','Band/Orchestra/Accompaniment'),
'TP3':('text','Conductor'),
'TP4':('text','Interpreted, remixed by'),
'TPA':('text','Part of a set'),
'TPB':('text','Publisher'),
'TOA':('text','Original artist(s)/performer(s)'),
'TOF':('text','Original Filename'),
'TOL':('text','Original Lyricist(s)/text writer(s)'),
'TOR':('text','Original Release Year'),
'TOT':('text','Original album/Movie/Show title'),
'TRC':('text','International Standard Recording Code (ISRC'),
'TRD':('text','Recording dates'),
'TRK':('text','Track number/Position in set'),
'TSI':('text','Size'),
'TSS':('text','Software/hardware and settings used for encoding'),
'TT1':('text','Content Group Description'),
'TT2':('text','Title/Songname/Content Description'),
'TT3':('text','Subtitle/Description refinement'),
'TXT':('text','Lyricist(s)/Text Writer(s)'),
'TYE':('text','Year'),
'TXX':('wxxx','User defined text information'),
'ULT':('bin','Unsynced Lyrics/Text'),
'WAF':('url','Official audio file webpage'),
'WAR':('url','Official artist/performer webpage'),
'WAS':('url','Official audio source webpage'),
'WCM':('url','Commercial information'),
'WCP':('url','Copyright/Legal Information'),
'WPM':('url','Official Publisher webpage'),
'WXX':('wxxx','User defined URL link frame')
}
ID3V2_2_FRAME_IMAGE_FORMAT_TO_MIME_TYPE = {
'JPG':'image/jpeg',
'PNG':'image/png',
'GIF':'image/gif'
}
ID3V2_2_FRAME_MIME_TYPE_TO_IMAGE_FORMAT = {
'image/jpeg':'JPG',
'image/png':'PNG',
'image/gif':'GIF'
}
# ID3v2 2.3 and above support
ID3V2_3_TAG_HEADER_FLAGS = [("ext", 6),
("exp", 5),
("footer", 4),
("unsync", 7)]
ID3V2_3_FRAME_HEADER_LENGTH = 10
ID3V2_4_FRAME_HEADER_LENGTH = ID3V2_3_FRAME_HEADER_LENGTH
ID3V2_3_FRAME_TEXT_ID_TYPE = ['TIT1', 'TIT2', 'TIT3', 'TALB', 'TOAL', \
'TRCK', 'TPOS', 'TSST', 'TSRC']
ID3V2_3_FRAME_TEXT_PERSON_TYPE = ['TPE1', 'TPE2', 'TPE3', 'TPE4', 'TOPE', \
'TEXT', 'TOLY', 'TCOM', 'TMCL', 'TIPL', \
'TENC']
ID3V2_3_FRAME_TEXT_PROP_TYPE = ['TBPM', 'TLEN', 'TKEY', 'TLAN', 'TCON', \
'TFLT', 'TMED']
ID3V2_3_FRAME_TEXT_RIGHTS_TYPE = ['TCOP', 'TPRO', 'TPUB', 'TOWN', 'TRSN', \
'TRSO']
ID3V2_3_FRAME_TEXT_OTHERS_TYPE = ['TOFN', 'TDLY', 'TDEN', 'TDOR', 'TDRC', \
'TDRL', 'TDTG', 'TSSE', 'TSOA', 'TSOP', \
'TSOT']
ID3V2_3_FRAME_IS_URL_TYPE = ['WCOM', 'WCOP', 'WOAF', 'WOAR', 'WOAS', \
'WORS', 'WPAY', 'WPUB']
ID3V2_3_FRAME_ONLY_FOR_2_3 = ['EQUA', 'IPLS', 'RVAD', 'TDAT', 'TIME', \
'TORY', 'TRDA', 'TSIZ', 'TYER']
ID3V2_4_FRAME_NEW_FOR_2_4 = ['ASPI', 'EQU2', 'RVA2', 'SEEK', 'SIGN', 'TDEN', \
'TDOR', 'TDRC', 'TDRL', 'TDTG', 'TIPL', 'TMCL', \
'TMOO', 'TPRO', 'TSOA', 'TSOP', 'TSOT', 'TSST']
ID3V2_3_FRAME_FLAGS = ['status', 'format', 'length', 'tagpreserve', \
'filepreserve', 'readonly', 'groupinfo', \
'compression', 'encryption', 'sync', 'datalength']
ID3V2_3_FRAME_STATUS_FLAGS = [('tagpreserve', 6),
('filepreserve', 5),
('readonly', 4)]
ID3V2_3_FRAME_FORMAT_FLAGS = [('groupinfo', 6),
('compression', 3),
('encryption', 2),
('sync', 1),
('datalength', 0)]
ID3V2_3_ABOVE_SUPPORTED_IDS = {
'AENC':('bin','Audio Encryption'), # FIXME
'APIC':('apic','Attached Picture'),
'ASPI':('bin','Seek Point Index'), # FIXME
'COMM':('comm','Comments'),
'COMR':('bin','Commerical Frame'), # FIXME
'EQU2':('bin','Equalisation'), # FIXME
'ENCR':('bin','Encryption method registration'), # FIXME
'ETCO':('bin','Event timing codes'), # FIXME
'GEOB':('geob','General Encapsulated Object'),
'GRID':('bin','Group ID Registration'), # FIXME
'LINK':('bin','Linked Information'), # FIXME
'MCDI':('bin','Music CD Identifier'),
'MLLT':('bin','Location lookup table'), # FIXME
'OWNE':('bin','Ownership frame'), # FIXME
'PCNT':('pcnt','Play Counter'),
'PRIV':('bin','Private frame'), # FIXME
'POPM':('bin','Popularimeter'), # FIXME
'POSS':('bin','Position Synchronisation frame'), # FIXME
'RBUF':('bin','Recommended buffer size'), # FIXME
'RVA2':('bin','Relative volume adjustment'), #FIXME
'RVRB':('bin','Reverb'), # FIXME
'SIGN':('bin','Signature'), # FIXME
'SEEK':('pcnt','Seek'),
'SYTC':('bin','Synchronised tempo codes'), # FIXME
'SYLT':('bin','Synchronised lyrics/text'), # FIXME
'TALB':('text','Album/Movie/Show Title'),
'TBPM':('text','BPM'),
'TCOM':('text','Composer'),
'TCON':('text','Content type'),
'TCOP':('text','Copyright'),
'TDEN':('text','Encoding time'),
'TDLY':('text','Playlist delay'),
'TDOR':('text','Original release time'),
'TDRC':('text','Recording time'),
'TDRL':('text','Release time'),
'TDTG':('text','Tagging time'),
'TENC':('text','Encoded by'),
'TEXT':('text','Lyricist/Text writer'),
'TFLT':('text','File type'),
'TIPL':('text','Musicians credits list'),
'TIT1':('text','Content group description'),
'TIT2':('text','Title/Songname/Content Description'),
'TIT3':('text','Subtitle/Description refinement'),
'TKEY':('text','Initial Key'),
'TLAN':('text','Language'),
'TLEN':('text','Length'),
'TMCL':('text','Musician credits list'),
'TMED':('text','Media type'),
'TMOO':('text','Mood type'),
'TOAL':('text','Original album/movie/show title'),
'TOFN':('text','Original Filename'),
'TOPE':('text','Original artist/performer'),
'TOLY':('text','Original lyricist/text writer'),
'TOWN':('text','File owner/licensee'),
'TPE1':('text','Lead Performer(s)/Soloist(s)'),
'TPE2':('text','Band/Orchestra Accompaniment'),
'TPE3':('text','Conductor'),
'TPE4':('text','Interpreted, remixed by'),
'TPOS':('text','Part of a set'), # [0-9/]
'TPRO':('text','Produced notice'),
'TPUB':('text','Publisher'),
'TRCK':('text','Track'), # [0-9/]
'TRSN':('text','Internet radio station name'),
'TRSO':('text','Internet radio station owner'),
'TSOA':('text','Album sort order'),
'TSOP':('text','Performer sort order'),
'TSOT':('text','Title sort order'),
'TSSE':('text','Software/Hardware and settings used for encoding'),
'TSST':('text','Set subtitle'),
'TSRC':('text','International Standard Recording Code (ISRC)'), # 12 chars
'TXXX':('wxxx','User defined text'),
'UFID':('bin','Unique File Identifier'), # FIXME
'USER':('bin','Terms of use frame'), # FIXME (similar to comment)
'USLT':('comm','Unsynchronised lyris/text transcription'),
'WCOM':('url','Commercial Information URL'),
'WCOP':('url','Copyright/Legal Information'),
'WOAF':('url','Official audio file webpage'),
'WOAR':('url','Official artist performance webpage'),
'WOAS':('url','Official audio source webpage'),
'WORS':('url','Official internet radio station homepage'),
'WPAY':('url','Payment URL'),
'WPUB':('url','Official publisher webpage'),
'WXXX':('wxxx','User defined URL link frame'),
# ID3v2.3 only tags
'EQUA':('bin','Equalization'),
'IPLS':('bin','Invovled people list'),
'RVAD':('bin','Relative volume adjustment'),
'TDAT':('text','Date'),
'TIME':('text','Time'),
'TORY':('text','Original Release Year'),
'TRDA':('text','Recording date'),
'TSIZ':('text','Size'),
'TYER':('text','Year')
}
ID3V2_3_APIC_PICT_TYPES = {
0x00: 'Other',
0x01: '32x32 PNG Icon',
0x02: 'Other Icon',
0x03: 'Cover (Front)',
0x04: 'Cover (Back)',
0x05: 'Leaflet Page',
0x06: 'Media',
0x07: 'Lead Artist/Lead Performer/Soloist',
0x08: 'Artist/Performer',
0x09: 'Conductor',
0x0a: 'Band/Orchestra',
0x0b: 'Composer',
0x0c: 'Lyricist/text writer',
0x0d: 'Recording Location',
0x0e: 'During Recording',
0x0f: 'During Performance',
0x10: 'Movie/Video Screen Capture',
0x11: 'A bright coloured fish',
0x12: 'Illustration',
0x13: 'Band/artist logotype',
0x14: 'Publisher/Studio logotype'
}
| {
"repo_name": "irvingruan/Accordion",
"path": "libacc/constants.py",
"copies": "1",
"size": "9770",
"license": "bsd-2-clause",
"hash": -7986977090576239000,
"line_mean": 34.5272727273,
"line_max": 78,
"alpha_frac": 0.6053224156,
"autogenerated": false,
"ratio": 2.740532959326788,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.38458553749267876,
"avg_score": null,
"num_lines": null
} |
ACCOUNT = 'account'
CHARACTER = 'char'
CORPORATION = 'corp'
BLUEPRINT_ORIGINAL = -1
BLUEPRINT_COPY = -2
_role_type_bases = {
'global': '',
'at_hq': 'AtHQ',
'at_base': 'AtBase',
'at_other': 'AtOther',
}
class Char(object):
corp_roles = dict((k, 'corporationRoles' + v) for k,v in _role_type_bases.items())
class Corp(object):
role_types = dict((k, 'roles' + v) for k,v in _role_type_bases.items())
grantable_types = dict((k, 'grantableRoles' + v) for k,v in _role_type_bases.items())
pos_states = ('unanchored', 'anchored', 'onlining', 'reinforced', 'online')
pos_permission_entities = (
'Starbase Config',
'Starbase Fuel Tech',
'Corporation Members',
'Alliance Members',
)
class Industry(object):
job_status = ('failed', 'delivered', 'aborted', 'gm-aborted', 'inflight-unanchored', 'destroyed')
class Market(object):
order_status = ('active', 'closed', 'expired', 'cancelled', 'pending', 'deleted')
class APIKey(object):
key_types = {
# This maps from EVE API values (keys) to our local constants (values)
'Account': ACCOUNT,
'Character': CHARACTER,
'Corporation': CORPORATION,
}
| {
"repo_name": "bastianh/evelink",
"path": "evelink/constants.py",
"copies": "7",
"size": "1229",
"license": "mit",
"hash": 3189523709429040000,
"line_mean": 27.5813953488,
"line_max": 101,
"alpha_frac": 0.6021155411,
"autogenerated": false,
"ratio": 3.135204081632653,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.007264562995984185,
"num_lines": 43
} |
ACCOUNT = 'account'
CHARACTER = 'char'
CORPORATION = 'corp'
_role_type_bases = {
'global': '',
'at_hq': 'AtHQ',
'at_base': 'AtBase',
'at_other': 'AtOther',
}
class Char(object):
corp_roles = dict((k, 'corporationRoles' + v) for k,v in _role_type_bases.iteritems())
class Corp(object):
role_types = dict((k, 'roles' + v) for k,v in _role_type_bases.iteritems())
grantable_types = dict((k, 'grantableRoles' + v) for k,v in _role_type_bases.iteritems())
pos_states = ('unanchored', 'anchored', 'onlining', 'reinforced', 'online')
pos_permission_entities = (
'Starbase Config',
'Starbase Fuel Tech',
'Corporation Members',
'Alliance Members',
)
class Industry(object):
job_status = ('failed', 'delivered', 'gm-aborted', 'inflight-unanchored', 'destroyed')
class Market(object):
order_status = ('active', 'closed', 'expired', 'cancelled', 'pending', 'deleted')
class APIKey(object):
key_types = {
# This maps from EVE API values (keys) to our local constants (values)
'Account': ACCOUNT,
'Character': CHARACTER,
'Corporation': CORPORATION,
}
| {
"repo_name": "EricE/evelink",
"path": "evelink/constants.py",
"copies": "2",
"size": "1185",
"license": "mit",
"hash": -5552078122674202000,
"line_mean": 28.625,
"line_max": 93,
"alpha_frac": 0.6016877637,
"autogenerated": false,
"ratio": 3.2027027027027026,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4804390466402703,
"avg_score": null,
"num_lines": null
} |
"""Account Adapter Template File
IMPORTANT: NOT A FUNCTIONAL ADAPTER. FUNCTIONS MUST BE IMPLEMENTED
Notes:
- Each of the functions defined below must return a json serializable
object, json_response, or valid HttpResponse object
- A json_response creates an HttpResponse object given parameters:
- content: string with the contents of the response
- status: string with the status of the response
- status_code: HTTP status code
- error: string with the error message if there is one
"""
from common.response import json_response
import logging
import re
logger = logging.getLogger("newt." + __name__)
def get_user_info(user_name=None, uid=None):
"""Returns information about the user
Keyword arguments:
user_name -- username
uid -- user id
"""
pass
def get_group_info(group_name=None, gid=None):
"""Returns information about the group
Keyword arguments:
group_name -- group name
gid -- group id
"""
pass
"""A tuple list in the form of:
(
(compiled_regex_exp, associated_function, request_required),
...
)
Note: The compiled_regex_exp must have named groups corresponding to
the arguments of the associated_function
Note: if request_required is True, the associated_function must have
request as the first argument
Example:
patterns = (
(re.compile(r'/usage/(?P<path>.+)$'), get_usage, False),
(re.compile(r'/image/(?P<query>.+)$'), get_image, False),
(re.compile(r'/(?P<path>.+)$'), get_resource, False),
)
"""
patterns = (
)
def extras_router(request, query):
"""Maps a query to a function if the pattern matches and returns result
Keyword arguments:
request -- Django HttpRequest
query -- the query to be matched against
"""
for pattern, func, req in patterns:
match = pattern.match(query)
if match and req:
return func(request, **match.groupdict())
elif match:
return func(**match.groupdict())
# Returns an Unimplemented response if no pattern matches
return json_response(status="Unimplemented",
status_code=501,
error="",
content="query: %s" % query) | {
"repo_name": "shreddd/newt-2.0",
"path": "account/adapters/template_adapter.py",
"copies": "3",
"size": "2347",
"license": "bsd-2-clause",
"hash": -5526485707202828000,
"line_mean": 28.7215189873,
"line_max": 75,
"alpha_frac": 0.6224968044,
"autogenerated": false,
"ratio": 4.395131086142322,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6517627890542322,
"avg_score": null,
"num_lines": null
} |
"Account and login pages."
import csv
import logging
from collections import OrderedDict as OD
from io import StringIO
import tornado.web
import orderportal
from orderportal import constants
from orderportal import saver
from orderportal import settings
from orderportal import utils
from orderportal.order import OrderApiV1Mixin
from orderportal.group import GroupSaver
from orderportal.message import MessageSaver
from orderportal.requesthandler import RequestHandler
class AccountSaver(saver.Saver):
doctype = constants.ACCOUNT
def set_email(self, email):
assert self.get('email') is None # Email must not have been set.
email = email.strip().lower()
if not email: raise ValueError('No email given.')
if not constants.EMAIL_RX.match(email):
raise ValueError('Malformed email value.')
if len(list(self.db.view('account/email', key=email))) > 0:
raise ValueError('Email is already in use.'
" Use 'Reset password' if you have lost it.")
self['email'] = email
def erase_password(self):
self['password'] = None
def set_password(self, new):
utils.check_password(new)
self['code'] = None
# Bypass ordinary 'set'; avoid logging password, even if hashed.
self.doc['password'] = utils.hashed_password(new)
self.changed['password'] = '******'
def reset_password(self):
"Invalidate any previous password and set activation code."
self.erase_password()
self['code'] = utils.get_iuid()
def check_required(self):
"Check that required data is present. Raise ValueError otherwise."
if not self['first_name']:
raise ValueError('First name is required.')
if not self['last_name']:
raise ValueError('Last name is required.')
if not self['university']:
raise ValueError('University is required.')
class Accounts(RequestHandler):
"Accounts list page."
@tornado.web.authenticated
def get(self):
self.check_staff()
self.set_filter()
self.render('accounts.html',
accounts=self.get_accounts(),
filter=self.filter)
def set_filter(self):
"Set the filter parameters dictionary."
self.filter = dict()
for key in ['university', 'status', 'role']:
try:
value = self.get_argument(key)
if not value: raise KeyError
self.filter[key] = value
except (tornado.web.MissingArgumentError, KeyError):
pass
def get_accounts(self):
"Get the accounts."
accounts = self.filter_by_university(self.filter.get('university'))
accounts = self.filter_by_role(self.filter.get('role'),
accounts=accounts)
accounts = self.filter_by_status(self.filter.get('status'),
accounts=accounts)
# No filter; all accounts
if accounts is None:
view = self.db.view('account/email', include_docs=True)
accounts = [r.doc for r in view]
# This is optimized for retrieval speed. The single-valued
# function 'get_account_order_count' is not good enough here.
view = self.db.view('order/owner',
group_level=1,
startkey=[''],
endkey=[constants.CEILING])
counts = dict([(r.key[0], r.value) for r in view])
for account in accounts:
account['order_count'] = counts.get(account['email'], 0)
account['name'] = utils.get_account_name(account=account)
return accounts
def filter_by_university(self, university, accounts=None):
"Return accounts list if any university filter, or None if none."
if university == '[other]':
if accounts is None:
view = self.db.view('account/email', include_docs=True)
accounts = [r.doc for r in view]
accounts = [a for a in accounts
if a['university'] not in settings['UNIVERSITIES']]
elif university:
if accounts is None:
view = self.db.view('account/university',
key=university,
include_docs=True)
accounts = [r.doc for r in view]
else:
account = [a for a in accounts
if a['university'] == university]
return accounts
def filter_by_role(self, role, accounts=None):
"Return accounts list if any role filter, or None if none."
if role:
if accounts is None:
view = self.db.view('account/role',
key=role,
include_docs=True)
accounts = [r.doc for r in view]
else:
accounts = [a for a in accounts if a['role'] == role]
return accounts
def filter_by_status(self, status, accounts=None):
"Return accounts list if any status filter, or None if none."
if status:
if accounts is None:
view = self.db.view('account/status',
key=status,
include_docs=True)
accounts = [r.doc for r in view]
else:
accounts = [a for a in accounts if a['status'] == status]
return accounts
class AccountsApiV1(Accounts):
"Accounts API; JSON output."
def get(self):
"JSON output."
URL = self.absolute_reverse_url
self.check_staff()
self.set_filter()
accounts = self.get_accounts()
data = utils.get_json(URL('accounts_api', **self.filter), 'accounts')
data['filter'] = self.filter
data['links'] = dict(api=dict(href=URL('accounts_api')),
display=dict(href=URL('accounts')))
data['items'] = []
for account in accounts:
item = OD()
item['email'] = account['email']
item['links'] = dict(
api=dict(href=URL('account_api',account['email'])),
display=dict(href=URL('account',account['email'])))
name = last_name = account.get('last_name')
first_name = account.get('first_name')
if name:
if first_name:
name += ', ' + first_name
else:
name = first_name
item['name'] = name
item['first_name'] = first_name
item['last_name'] = last_name
item['pi'] = bool(account.get('pi'))
item['gender'] = account.get('gender')
item['university'] = account.get('university')
item['role'] = account['role']
item['status'] = account['status']
item['address'] = account.get('address') or {}
item['invoice_ref'] = account.get('invoice_ref')
item['invoice_address'] = account.get('invoice_address') or {}
item['login'] = account.get('login', '-')
item['modified'] = account['modified']
item['orders'] = dict(
count=account['order_count'],
links=dict(
display=dict(href=URL('account_orders', account['email'])),
api=dict(href=URL('account_orders_api', account['email']))))
data['items'].append(item)
self.write(data)
class AccountsCsv(Accounts):
"Return a CSV file containing all data for a set of accounts."
@tornado.web.authenticated
def get(self):
"CSV file output."
self.check_staff()
self.set_filter()
accounts = self.get_accounts()
writer = self.get_writer()
writer.writerow((settings['SITE_NAME'], utils.today()))
writer.writerow(('Email', 'Last name', 'First name', 'Role',
'Status', 'Order count', 'University',
'Department', 'PI', 'Gender', 'Group size',
'Subject', 'Address', 'Zip', 'City', 'Country',
'Invoice ref', 'Invoice address', 'Invoice zip',
'Invoice city', 'Invoice country', 'Phone',
'Other data', 'Latest login', 'Modified', 'Created'))
for account in accounts:
addr = account.get('address') or dict()
iaddr = account.get('invoice_address') or dict()
try:
subject = "{0}: {1}".format(
account.get('subject'),
settings['subjects_lookup'][account.get('subject')])
except KeyError:
subject = ''
row = [account['email'],
account.get('last_name') or '',
account.get('first_name') or '',
account['role'],
account['status'],
account['order_count'],
account.get('university') or '',
account.get('department') or '',
account.get('pi') and 'yes' or 'no',
account.get('gender') or '',
account.get('group_size') or '',
subject,
addr.get('address') or '',
addr.get('zip') or '',
addr.get('city') or '',
addr.get('country') or '',
account.get('invoice_ref') or '',
iaddr.get('address') or '',
iaddr.get('zip') or '',
iaddr.get('city') or '',
iaddr.get('country') or '',
account.get('phone') or '',
account.get('other_data') or '',
account.get('login') or '',
account.get('modified') or '',
account.get('created') or '']
writer.writerow(row)
self.write(writer.getvalue())
self.write_finish()
def get_writer(self):
return utils.CsvWriter()
def write_finish(self):
self.set_header('Content-Type', constants.CSV_MIME)
self.set_header('Content-Disposition',
'attachment; filename="accounts.csv"')
class AccountsXlsx(AccountsCsv):
"Return an XLSX file containing all data for a set of accounts."
def get_writer(self):
return utils.XlsxWriter()
def write_finish(self):
self.set_header('Content-Type', constants.XLSX_MIME)
self.set_header('Content-Disposition',
'attachment; filename="accounts.xlsx"')
class AccountMixin(object):
"Mixin for various useful methods."
def is_readable(self, account):
"Is the account readable by the current user?"
if self.is_owner(account): return True
if self.is_staff(): return True
if self.is_colleague(account['email']): return True
return False
def check_readable(self, account):
"Check that the account is readable by the current user."
if self.is_readable(account): return
raise ValueError('You may not read the account.')
def is_editable(self, account):
"Is the account editable by the current user?"
if self.is_owner(account): return True
if self.is_staff(): return True
return False
def check_editable(self, account):
"Check that the account is editable by the current user."
if self.is_readable(account): return
raise ValueError('You may not edit the account.')
class Account(AccountMixin, RequestHandler):
"Account page."
@tornado.web.authenticated
def get(self, email):
try:
account = self.get_account(email)
self.check_readable(account)
except ValueError as msg:
self.see_other('home', error=str(msg))
return
account['order_count'] = self.get_account_order_count(account['email'])
view = self.db.view('log/account',
startkey=[account['email'], constants.CEILING],
lastkey=[account['email']],
descending=True,
limit=1)
try:
key = list(view)[0].key
if key[0] != account['email']: raise IndexError
latest_activity = key[1]
except IndexError:
latest_activity = None
if self.is_staff() or self.current_user['email'] == account['email']:
invitations = self.get_invitations(account['email'])
else:
invitations = []
self.render('account.html',
account=account,
groups=self.get_account_groups(account['email']),
latest_activity=latest_activity,
invitations=invitations,
is_deletable=self.is_deletable(account))
@tornado.web.authenticated
def post(self, email):
if self.get_argument('_http_method', None) == 'delete':
self.delete(email)
return
raise tornado.web.HTTPError(
405, reason='Internal problem; POST only allowed for DELETE.')
@tornado.web.authenticated
def delete(self, email):
"Delete a account that is pending; to get rid of spam application."
account = self.get_account(email)
self.check_admin()
if not self.is_deletable(account):
self.see_other('account', account['email'],
error='Account cannot be deleted.')
return
# Delete the groups this account owns.
view = self.db.view('group/owner',
include_docs=True,
key=account['email'])
for row in view:
group = row.doc
self.delete_logs(group['_id'])
self.db.delete(group)
# Remove this account from groups it is a member of.
view = self.db.view('group/owner',
include_docs=True,
key=account['email'])
for row in view:
group = row.doc
with GroupSaver(doc=row, rqh=self) as saver:
members = set(group['members'])
members.discard(account['email'])
saver['members'] = sorted(members)
# Delete the messages of the account.
view = self.db.view('message/recipient',
reduce=False,
include_docs=True,
startkey=[account['email']],
endkey=[account['email'], constants.CEILING])
for row in view:
message = row.doc
self.delete_logs(message['_id'])
self.db.delete(message)
# Delete the logs of the account.
self.delete_logs(account['_id'])
# Delete the account itself.
self.db.delete(account)
self.see_other('accounts')
def is_deletable(self, account):
"Can the account be deleted? Pending, or disabled and no orders."
if account['status'] == constants.PENDING: return True
if account['status'] == constants.ENABLED: return False
if self.get_account_order_count(account['email']) == 0: return True
return False
class AccountApiV1(AccountMixin, RequestHandler):
"Account API; JSON output."
def get(self, email):
URL = self.absolute_reverse_url
try:
account = self.get_account(email)
except ValueError as msg:
raise tornado.web.HTTPError(404, reason=str(msg))
try:
self.check_readable(account)
except ValueError as msg:
raise tornado.web.HTTPError(403, reason=str(msg))
data = utils.get_json(URL('account', email), 'account')
data['email'] = account['email']
name = last_name = account.get('last_name')
first_name = account.get('first_name')
if name:
if first_name:
name += ', ' + first_name
else:
name = first_name
data['links'] = dict(
api=dict(href=URL('account_api', account['email'])),
display=dict(href=URL('account', account['email'])))
data['name'] = name
data['first_name'] = first_name
data['last_name'] = last_name
data['pi'] = bool(account.get('pi'))
data['university'] = account['university']
data['role'] = account['role']
data['gender'] = account.get('gender')
data['group_size'] = account.get('group_size')
data['status'] = account['status']
data['address'] = account.get('address') or {}
data['invoice_ref'] = account.get('invoice_ref')
data['invoice_address'] = account.get('invoice_address') or {}
data['login'] = account.get('login', '-')
data['modified'] = account['modified']
view = self.db.view('log/account',
startkey=[account['email'], constants.CEILING],
lastkey=[account['email']],
descending=True,
limit=1)
try:
data['latest_activity'] = list(view)[0].key[1]
except IndexError:
data['latest_activity'] = None
data['orders'] = dict(
count=self.get_account_order_count(account['email']),
display=dict(href=URL('account_orders', account['email'])),
api=dict(href=URL('account_orders_api', account['email'])))
self.write(data)
class AccountOrdersMixin(object):
"Mixin containing access tests."
def is_readable(self, account):
"Is the account readable by the current user?"
if account['email'] == self.current_user['email']: return True
if self.is_staff(): return True
if self.is_colleague(account['email']): return True
return False
def check_readable(self, account):
"Check that the account is readable by the current user."
if self.is_readable(account): return
raise ValueError('You may not view these orders.')
def get_group_orders(self, account):
"Return all orders for the accounts in the account's group."
orders = []
for colleague in self.get_account_colleagues(account['email']):
view = self.db.view('order/owner',
reduce=False,
include_docs=True,
startkey=[colleague],
endkey=[colleague, constants.CEILING])
orders.extend([r.doc for r in view])
return orders
class AccountOrders(AccountOrdersMixin, RequestHandler):
"Page for a list of all orders for an account."
@tornado.web.authenticated
def get(self, email):
try:
account = self.get_account(email)
self.check_readable(account)
except ValueError as msg:
self.see_other('home', error=str(msg))
return
if self.is_staff():
order_column = 4
else:
order_column = 3
order_column += len(settings['ORDERS_LIST_STATUSES']) + \
len(settings['ORDERS_LIST_FIELDS'])
view = self.db.view('order/owner',
reduce=False,
include_docs=True,
startkey=[account['email']],
endkey=[account['email'], constants.CEILING])
orders = [r.doc for r in view]
self.render('account_orders.html',
all_forms=self.get_forms_titles(all=True),
form_titles=sorted(self.get_forms_titles().values()),
orders=orders,
account=account,
order_column=order_column,
account_names=self.get_account_names(),
any_groups=bool(self.get_account_groups(account['email'])))
class AccountOrdersApiV1(AccountOrdersMixin,
OrderApiV1Mixin,
RequestHandler):
"Account orders API; JSON output."
def get(self, email):
"JSON output."
URL = self.absolute_reverse_url
try:
account = self.get_account(email)
except ValueError as msg:
raise tornado.web.HTTPError(404, reason=str(msg))
try:
self.check_readable(account)
except ValueError as msg:
raise tornado.web.HTTPError(403, reason=str(msg))
# Get names and forms lookups
names = self.get_account_names()
forms = self.get_forms_titles(all=True)
data = utils.get_json(URL('account_orders', account['email']),
'account orders')
data['links'] = dict(
api=dict(href=URL('account_orders_api', account['email'])),
display=dict(href=URL('account_orders', account['email'])))
view = self.db.view('order/owner',
reduce=False,
include_docs=True,
startkey=[account['email']],
endkey=[account['email'], constants.CEILING])
data['orders'] = [self.get_order_json(r.doc, names, forms)
for r in view]
self.write(data)
class AccountGroupsOrders(AccountOrdersMixin, RequestHandler):
"Page for a list of all orders for the groups of an account."
@tornado.web.authenticated
def get(self, email):
try:
account = self.get_account(email)
self.check_readable(account)
except ValueError as msg:
self.see_other('home', error=str(msg))
return
if self.is_staff():
order_column = 5
else:
order_column = 4
order_column += len(settings['ORDERS_LIST_STATUSES']) + \
len(settings['ORDERS_LIST_FIELDS'])
self.render('account_groups_orders.html',
account=account,
all_forms=self.get_forms_titles(all=True),
orders=self.get_group_orders(account),
order_column=order_column)
class AccountGroupsOrdersApiV1(AccountOrdersMixin,
OrderApiV1Mixin,
RequestHandler):
"Account group orders API; JSON output."
def get(self, email):
"JSON output."
URL = self.absolute_reverse_url
try:
account = self.get_account(email)
except ValueError as msg:
raise tornado.web.HTTPError(404, reason=str(msg))
try:
self.check_readable(account)
except ValueError as msg:
raise tornado.web.HTTPError(403, reason=str(msg))
# Get names and forms lookups
names = self.get_account_names()
forms = self.get_forms_titles(all=True)
data =utils.get_json(URL('account_groups_orders_api',account['email']),
'account groups orders')
data['links'] = dict(
api=dict(href=URL('account_groups_orders_api', account['email'])),
display=dict(href=URL('account_groups_orders', account['email'])))
data['orders'] = [self.get_order_json(o, names, forms)
for o in self.get_group_orders(account)]
self.write(data)
class AccountLogs(AccountMixin, RequestHandler):
"Account log entries page."
@tornado.web.authenticated
def get(self, email):
try:
account = self.get_account(email)
self.check_readable(account)
except ValueError as msg:
self.see_other('home', error=str(msg))
return
self.render('logs.html',
entity=account,
logs=self.get_logs(account['_id']))
class AccountMessages(AccountMixin, RequestHandler):
"Account messages list page."
@tornado.web.authenticated
def get(self, email):
"Show list of messages sent to the account given by email address."
try:
account = self.get_account(email)
self.check_readable(account)
except ValueError as msg:
self.see_other('home', error=str(msg))
return
view = self.db.view('message/recipient',
startkey=[account['email']],
endkey=[account['email'], constants.CEILING])
view = self.db.view('message/recipient',
descending=True,
startkey=[account['email'], constants.CEILING],
endkey=[account['email']],
reduce=False,
include_docs=True)
messages = [r.doc for r in view]
self.render('account_messages.html',
account=account,
messages=messages)
class AccountEdit(AccountMixin, RequestHandler):
"Page for editing account information."
@tornado.web.authenticated
def get(self, email):
try:
account = self.get_account(email)
self.check_editable(account)
except ValueError as msg:
self.see_other('account', account['email'], error=str(msg))
return
self.render('account_edit.html', account=account)
@tornado.web.authenticated
def post(self, email):
try:
account = self.get_account(email)
self.check_editable(account)
except ValueError as msg:
self.see_other('account_edit', account['email'], error=str(msg))
return
try:
with AccountSaver(doc=account, rqh=self) as saver:
# Only admin may change role of an account.
if self.is_admin():
role = self.get_argument('role')
if role not in constants.ACCOUNT_ROLES:
raise ValueError('Invalid role.')
saver['role'] = role
saver['first_name'] = self.get_argument('first_name')
saver['last_name'] = self.get_argument('last_name')
university = self.get_argument('university', None)
if not university:
university = self.get_argument('university_other', None)
saver['university'] = university
saver['department'] = self.get_argument('department', None)
saver['pi'] = utils.to_bool(self.get_argument('pi', False))
try:
saver['gender'] = self.get_argument('gender').lower()
except tornado.web.MissingArgumentError:
try:
del saver['gender']
except KeyError:
pass
try:
saver['group_size'] = self.get_argument('group_size')
except tornado.web.MissingArgumentError:
try:
del saver['group_size']
except KeyError:
pass
try:
saver['subject'] = int(self.get_argument('subject'))
except (tornado.web.MissingArgumentError, ValueError,TypeError):
saver['subject'] = None
saver['address'] = dict(
address=self.get_argument('address', None),
zip=self.get_argument('zip', None),
city=self.get_argument('city', None),
country=self.get_argument('country', None))
saver['invoice_ref'] = self.get_argument('invoice_ref', None)
saver['invoice_address'] = dict(
address=self.get_argument('invoice_address', None),
zip=self.get_argument('invoice_zip', None),
city=self.get_argument('invoice_city', None),
country=self.get_argument('invoice_country', None))
saver['phone'] = self.get_argument('phone', None)
saver['other_data'] = self.get_argument('other_data', None)
if utils.to_bool(self.get_argument('api_key', False)):
saver['api_key'] = utils.get_iuid()
saver['update_info'] = False
saver.check_required()
except ValueError as msg:
self.see_other('account_edit', account['email'], error=str(msg))
else:
self.see_other('account', account['email'])
class Login(RequestHandler):
"Login to a account account. Set a secure cookie."
def get(self):
self.render('login.html', next=self.get_argument('next', None))
def post(self):
"""Login to a account account. Set a secure cookie.
Forward to account edit page if first login.
Log failed login attempt. Disable account if too many recent.
"""
try:
email = self.get_argument('email')
password = self.get_argument('password')
except tornado.web.MissingArgumentError:
self.see_other('home', error='Missing email or password argument.')
return
msg = 'Sorry, no such account or invalid password.'
try:
account = self.get_account(email)
except ValueError as msg:
self.see_other('home', error=str(msg))
return
if utils.hashed_password(password) != account.get('password'):
utils.log(self.db, self, account,
changed=dict(login_failure=account['email']))
view = self.db.view('log/login_failure',
startkey=[account['_id'], utils.timestamp(-1)],
endkey=[account['_id'], utils.timestamp()])
# Disable account if too many recent login failures.
if len(list(view)) > settings['LOGIN_MAX_FAILURES']:
logging.warning("account %s has been disabled due to"
" too many login failures", account['email'])
with AccountSaver(doc=account, rqh=self) as saver:
saver['status'] = constants.DISABLED
saver.erase_password()
msg = "Too many failed login attempts: Your account has been" \
" disabled. Contact the site administrator %s." % \
settings.get('SITE_SUPPORT_EMAIL', '')
# Prepare email message
try:
template = settings['ACCOUNT_MESSAGES'][constants.DISABLED]
except KeyError:
pass
else:
with MessageSaver(rqh=self) as saver:
saver.create(template)
# Recipient is hardwired here.
saver.send([account['email']])
self.see_other('home', error=msg)
return
try:
if not account.get('status') == constants.ENABLED:
raise ValueError
except ValueError:
msg = "Account is disabled. Contact the site administrator %s." % \
settings.get('SITE_SUPPORT_EMAIL', '')
self.see_other('home', error=msg)
return
if not self.global_modes['allow_login'] \
and account['role'] != constants.ADMIN:
self.see_other('home', error='Login is currently disabled.')
return
self.set_secure_cookie(constants.USER_COOKIE,
account['email'],
expires_days=settings['LOGIN_MAX_AGE_DAYS'])
logging.info("Basic auth login: account %s", account['email'])
with AccountSaver(doc=account, rqh=self) as saver:
saver['login'] = utils.timestamp() # Set login timestamp.
if account.get('update_info'):
self.see_other('account_edit', account['email'],
message='Please review and update your account information.')
return
next = self.get_argument('next', None)
if next is None:
self.see_other('home')
else:
# Not quite right: should be an absolute URL to redirect.
# But seems to work anyway.
self.redirect(next)
class Logout(RequestHandler):
"Logout; unset the secure cookie, and invalidate login session."
@tornado.web.authenticated
def post(self):
self.set_secure_cookie(constants.USER_COOKIE, '')
self.see_other('home')
class Reset(RequestHandler):
"Reset the password of a account account."
def get(self):
self.render('reset.html', email=self.get_argument('account', ''))
def post(self):
URL = self.absolute_reverse_url
try:
account = self.get_account(self.get_argument('email'))
except (tornado.web.MissingArgumentError, ValueError):
self.see_other('home') # Silent error! Should not show existence.
else:
if account.get('status') == constants.PENDING:
self.see_other('home', error='Cannot reset password.'
' Account has not been enabled.')
return
elif account.get('status') == constants.DISABLED:
self.see_other('home', error='Cannot reset password.'
' Account is disabled; contact the site admin.')
return
with AccountSaver(doc=account, rqh=self) as saver:
saver.reset_password()
try:
template = settings['ACCOUNT_MESSAGES'][constants.RESET]
except KeyError:
pass
else:
with MessageSaver(rqh=self) as saver:
saver.create(template,
account=account['email'],
url=URL('password'),
password_url=URL('password'),
password_code_url=URL('password',
email=account['email'],
code=account['code']),
code=account['code'])
# Recipient is hardwired here.
saver.send([account['email']])
if self.current_user:
if not self.is_admin():
# Log out the user
self.set_secure_cookie(constants.USER_COOKIE, '')
self.see_other('home',
message="An email has been sent containing"
" a reset code. Use the link in the email."
" (Check your spam filter!)")
class Password(RequestHandler):
"Set the password of a account account; requires a code."
def get(self):
self.render('password.html',
title='Set your password',
email=self.get_argument('email', default=''),
code=self.get_argument('code', default=''))
def post(self):
try:
account = self.get_account(self.get_argument('email', ''))
except ValueError as msg:
self.see_other('home', error=str(msg))
return
if account.get('code') != self.get_argument('code'):
self.see_other('home',
error="Either the email address or the code" +
" for setting password was wrong." +
" Try to request a new code using the" +
" 'Reset password' button.")
return
password = self.get_argument('password', '')
try:
utils.check_password(password)
except ValueError as msg:
self.see_other('password',
email=self.get_argument('email') or '',
code=self.get_argument('code') or '',
error=str(msg))
return
if password != self.get_argument('confirm_password'):
self.see_other('password',
email=self.get_argument('email') or '',
code=self.get_argument('code') or '',
error='password confirmation failed. Not the same!')
return
with AccountSaver(doc=account, rqh=self) as saver:
saver.set_password(password)
saver['login'] = utils.timestamp() # Set login session.
self.set_secure_cookie(constants.USER_COOKIE,
account['email'],
expires_days=settings['LOGIN_MAX_AGE_DAYS'])
if account.get('update_info'):
self.see_other('account_edit', account['email'],
message='Please review and update your account information.')
else:
self.see_other('home')
class Register(RequestHandler):
"Register a new account account."
KEYS = ['email', 'first_name', 'last_name',
'university', 'department', 'pi',
'gender', 'group_size', 'subject',
'invoice_ref', 'phone']
ADDRESS_KEYS = ['address', 'zip', 'city', 'country']
def get(self):
if not self.global_modes['allow_registration']:
self.see_other('home', error='Registration is currently disabled.')
return
values = OD()
for key in self.KEYS:
values[key] = self.get_argument(key, None)
for key in self.ADDRESS_KEYS:
values[key] = self.get_argument(key, None)
for key in self.ADDRESS_KEYS:
values['invoice_' + key] = self.get_argument('invoice_' + key, None)
self.render('register.html', values=values)
def post(self):
if not self.global_modes['allow_registration']:
self.see_other('home', error='Registration is currently disabled.')
return
try:
with AccountSaver(rqh=self) as saver:
email = self.get_argument('email', None)
saver['first_name'] = self.get_argument('first_name', None)
saver['last_name'] = self.get_argument('last_name', None)
university = self.get_argument('university', None)
if not university:
university = self.get_argument('university_other', None)
saver['university'] = university
saver['department'] = self.get_argument('department', None)
saver['pi'] = utils.to_bool(self.get_argument('pi', False))
gender = self.get_argument('gender', None)
if gender:
saver['gender'] = gender.lower()
group_size = self.get_argument('group_size', None)
if group_size:
saver['group_size'] = group_size
try:
saver['subject'] = int(self.get_argument('subject'))
except (tornado.web.MissingArgumentError,ValueError,TypeError):
saver['subject'] = None
saver['address'] = dict(
address=self.get_argument('address', None),
zip=self.get_argument('zip', None),
city=self.get_argument('city', None),
country=self.get_argument('country', None))
saver['invoice_ref'] = self.get_argument('invoice_ref', None)
saver['invoice_address'] = dict(
address=self.get_argument('invoice_address', None),
zip=self.get_argument('invoice_zip', None),
city=self.get_argument('invoice_city', None),
country=self.get_argument('invoice_country', None))
saver['phone'] = self.get_argument('phone', None)
if not email:
raise ValueError('Email is required.')
saver.set_email(email)
saver['owner'] = saver['email']
saver['role'] = constants.USER
saver['status'] = constants.PENDING
saver.check_required()
saver.erase_password()
except ValueError as msg:
kwargs = OD()
for key in self.KEYS:
kwargs[key] = saver.get(key) or ''
for key in self.ADDRESS_KEYS:
kwargs[key] = saver.get('address', {}).get(key) or ''
for key in self.ADDRESS_KEYS:
kwargs['invoice_' + key] = saver.get('invoice_address', {}).\
get(key) or ''
self.see_other('register', error=str(msg), **kwargs)
return
try:
template = settings['ACCOUNT_MESSAGES'][constants.PENDING]
except KeyError:
pass
else:
account = saver.doc
with MessageSaver(rqh=self) as saver:
saver.create(template,
account=account['email'],
url=self.absolute_reverse_url(
'account', account['email']))
# Recipients are hardwired here.
saver.send([a['email'] for a in self.get_admins()])
self.see_other('registered')
class Registered(RequestHandler):
"Successful registration. Display message."
def get(self):
self.render('registered.html')
class AccountEnable(RequestHandler):
"Enable the account; from status pending or disabled."
@tornado.web.authenticated
def post(self, email):
try:
account = self.get_account(email)
except ValueError as msg:
self.see_other('home', error=str(msg))
return
self.check_admin()
with AccountSaver(account, rqh=self) as saver:
saver['status'] = constants.ENABLED
saver.reset_password()
try:
template = settings['ACCOUNT_MESSAGES'][constants.ENABLED]
except KeyError:
pass
else:
with MessageSaver(rqh=self) as saver:
saver.create(template,
account=account['email'],
password_url=self.absolute_reverse_url('password'),
password_code_url=self.absolute_reverse_url(
'password',
email=account['email'],
code=account['code']),
code=account['code'])
# Recipient is hardwired here.
saver.send([account['email']])
self.see_other('account', account['email'])
class AccountDisable(RequestHandler):
"Disable the account; from status pending or enabled."
@tornado.web.authenticated
def post(self, email):
try:
account = self.get_account(email)
except ValueError as msg:
self.see_other('home', error=str(msg))
return
self.check_admin()
with AccountSaver(account, rqh=self) as saver:
saver['status'] = constants.DISABLED
saver.erase_password()
self.see_other('account', account['email'])
class AccountUpdateInfo(RequestHandler):
"Request an update of the account information by the user."
@tornado.web.authenticated
def post(self, email):
try:
account = self.get_account(email)
except ValueError as msg:
self.see_other('home', error=str(msg))
return
self.check_admin()
if not account.get('update_info'):
with AccountSaver(account, rqh=self) as saver:
saver['update_info'] = True
self.see_other('account', account['email'])
| {
"repo_name": "pekrau/OrderPortal",
"path": "orderportal/account.py",
"copies": "1",
"size": "44345",
"license": "mit",
"hash": 5666294850644271000,
"line_mean": 40.1745589601,
"line_max": 88,
"alpha_frac": 0.5280640433,
"autogenerated": false,
"ratio": 4.596289386401327,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5624353429701328,
"avg_score": null,
"num_lines": null
} |
"""accountant URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
import rest_framework.urls
import core.urls
import interface.urls
import ng.urls
urlpatterns = (
url(r'^ui/', include(interface.urls, namespace='ui')),
url(r'^api/', include(core.urls)),
url(r'^api-auth/', include(rest_framework.urls,
namespace='rest_framework')),
url(r'^', include(ng.urls, namespace='ng')),
)
| {
"repo_name": "XeryusTC/18xx-accountant",
"path": "accountant/accountant/urls.py",
"copies": "1",
"size": "1024",
"license": "mit",
"hash": -202408456602962530,
"line_mean": 34.3103448276,
"line_max": 79,
"alpha_frac": 0.6904296875,
"autogenerated": false,
"ratio": 3.4829931972789114,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4673422884778911,
"avg_score": null,
"num_lines": null
} |
"""Account balance trees."""
import collections
import datetime
from typing import Dict
from typing import Generator
from typing import Iterable
from typing import List
from typing import Optional
from beancount.core import account
from beancount.core import convert
from beancount.core.data import Directive
from beancount.core.data import Open
from beancount.core.prices import PriceMap
from fava.core.conversion import cost_or_value
from fava.core.inventory import CounterInventory
from fava.core.inventory import SimpleCounterInventory
from fava.util.typing import BeancountOptions
from fava.util.typing import TypedDict
class SerialisedTreeNode(TypedDict):
"""A serialised TreeNode."""
account: str
balance: SimpleCounterInventory
balance_children: SimpleCounterInventory
children: "SerialisedTreeNode" # type: ignore
class TreeNode:
"""A node in the account tree."""
__slots__ = ("name", "children", "balance", "balance_children", "has_txns")
def __init__(self, name: str) -> None:
#: Account name.
self.name: str = name
#: A list of :class:`.TreeNode`, its children.
self.children: List["TreeNode"] = []
#: The cumulative account balance.
self.balance_children = CounterInventory()
#: The account balance.
self.balance = CounterInventory()
#: Whether the account has any transactions.
self.has_txns = False
def serialise(
self,
conversion: str,
price_map: PriceMap,
end: Optional[datetime.date],
) -> SerialisedTreeNode:
"""Serialise the account.
Args:
end: A date to use for cost conversions.
"""
children = [
child.serialise(conversion, price_map, end)
for child in self.children
]
return {
"account": self.name,
"balance_children": cost_or_value(
self.balance_children, conversion, price_map, end
),
"balance": cost_or_value(self.balance, conversion, price_map, end),
"children": children,
}
class Tree(Dict[str, TreeNode]):
"""Account tree.
Args:
entries: A list of entries to compute balances from.
"""
def __init__(self, entries: Optional[Iterable[Directive]] = None):
super().__init__(self)
self.get("", insert=True)
if entries:
account_balances: Dict[
str, CounterInventory
] = collections.defaultdict(CounterInventory)
for entry in entries:
if isinstance(entry, Open):
self.get(entry.account, insert=True)
for posting in getattr(entry, "postings", []):
account_balances[posting.account].add_position(posting)
for name, balance in sorted(account_balances.items()):
self.insert(name, balance)
def ancestors(self, name: str) -> Generator[TreeNode, None, None]:
"""Ancestors of an account.
Args:
name: An account name.
Yields:
The ancestors of the given account from the bottom up.
"""
while name:
name = account.parent(name)
yield self.get(name)
def insert(self, name: str, balance: CounterInventory) -> None:
"""Insert account with a balance.
Insert account and update its balance and the balances of its
ancestors.
Args:
name: An account name.
balance: The balance of the account.
"""
node = self.get(name, insert=True)
node.balance.add_inventory(balance)
node.balance_children.add_inventory(balance)
node.has_txns = True
for parent_node in self.ancestors(name):
parent_node.balance_children.add_inventory(balance)
def get(self, name: str, insert: bool = False) -> TreeNode: # type: ignore
"""Get an account.
Args:
name: An account name.
insert: If True, insert the name into the tree if it does not
exist.
Returns:
TreeNode: The account of that name or an empty account if the
account is not in the tree.
"""
try:
return self[name]
except KeyError:
node = TreeNode(name)
if insert:
if name:
parent = self.get(account.parent(name), insert=True)
parent.children.append(node)
self[name] = node
return node
def net_profit(
self, options: BeancountOptions, account_name: str
) -> TreeNode:
"""Calculate the net profit.
Args:
options: The Beancount options.
account_name: The name to use for the account containing the net
profit.
"""
income = self.get(options["name_income"])
expenses = self.get(options["name_expenses"])
net_profit = Tree()
net_profit.insert(
account_name, income.balance_children + expenses.balance_children
)
return net_profit.get(account_name)
def cap(self, options: BeancountOptions, unrealized_account: str) -> None:
"""Transfer Income and Expenses, add conversions and unrealized gains.
Args:
options: The Beancount options.
unrealized_account: The name of the account to post unrealized
gains to (as a subaccount of Equity).
"""
equity = options["name_equity"]
conversions = CounterInventory(
{
(currency, None): -number
for currency, number in self.get("")
.balance_children.reduce(convert.get_cost)
.items()
}
)
# Add conversions
self.insert(
equity + ":" + options["account_current_conversions"], conversions
)
# Insert unrealized gains.
self.insert(
equity + ":" + unrealized_account, -self.get("").balance_children
)
# Transfer Income and Expenses
self.insert(
equity + ":" + options["account_current_earnings"],
self.get(options["name_income"]).balance_children,
)
self.insert(
equity + ":" + options["account_current_earnings"],
self.get(options["name_expenses"]).balance_children,
)
| {
"repo_name": "yagebu/fava",
"path": "src/fava/core/tree.py",
"copies": "2",
"size": "6509",
"license": "mit",
"hash": 971715438115993700,
"line_mean": 30.9068627451,
"line_max": 79,
"alpha_frac": 0.5841143033,
"autogenerated": false,
"ratio": 4.386118598382749,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5970232901682749,
"avg_score": null,
"num_lines": null
} |
"""Account balance trees."""
import collections
import datetime
from typing import List
from beancount.core import account
from beancount.core import convert
from beancount.core.data import Open
from fava.core.conversion import cost_or_value
from fava.core.inventory import CounterInventory
class TreeNode:
"""A node in the account tree."""
__slots__ = ("name", "children", "balance", "balance_children", "has_txns")
def __init__(self, name) -> None:
#: Account name.
self.name: str = name
#: A list of :class:`.TreeNode`, its children.
self.children: List["TreeNode"] = []
#: The cumulative account balance.
self.balance_children = CounterInventory()
#: The account balance.
self.balance = CounterInventory()
#: Whether the account has any transactions.
self.has_txns = False
def serialise(self, conversion, price_map, end: datetime.date):
"""Serialise the account.
Args:
end: A date to use for cost conversions.
"""
children = [
child.serialise(conversion, price_map, end)
for child in self.children
]
return {
"account": self.name,
"balance_children": cost_or_value(
self.balance_children, conversion, price_map, end
),
"balance": cost_or_value(self.balance, conversion, price_map, end),
"children": children,
}
class Tree(dict):
"""Account tree.
Args:
entries: A list of entries to compute balances from.
"""
def __init__(self, entries=None):
dict.__init__(self)
self.get("", insert=True)
if entries:
account_balances = collections.defaultdict(CounterInventory)
for entry in entries:
if isinstance(entry, Open):
self.get(entry.account, insert=True)
for posting in getattr(entry, "postings", []):
account_balances[posting.account].add_position(posting)
for name, balance in sorted(account_balances.items()):
self.insert(name, balance)
def ancestors(self, name):
"""Ancestors of an account.
Args:
name: An account name.
Yields:
The ancestors of the given account from the bottom up.
"""
while name:
name = account.parent(name)
yield self.get(name)
def insert(self, name, balance):
"""Insert account with a balance.
Insert account and update its balance and the balances of its
ancestors.
Args:
name: An account name.
balance: The balance of the account.
"""
node = self.get(name, insert=True)
node.balance.add_inventory(balance)
node.balance_children.add_inventory(balance)
node.has_txns = True
for parent_node in self.ancestors(name):
parent_node.balance_children.add_inventory(balance)
def get(self, name, insert=False):
"""Get an account.
Args:
name: An account name.
insert: If True, insert the name into the tree if it does not
exist.
Returns:
TreeNode: The account of that name or an empty account if the
account is not in the tree.
"""
try:
return self[name]
except KeyError:
node = TreeNode(name)
if insert:
if name:
parent = self.get(account.parent(name), insert=True)
parent.children.append(node)
self[name] = node
return node
def net_profit(self, options, account_name):
"""Calculate the net profit.
Args:
options: The Beancount options.
account_name: The name to use for the account containing the net
profit.
"""
income = self.get(options["name_income"])
expenses = self.get(options["name_expenses"])
net_profit = Tree()
net_profit.insert(
account_name, income.balance_children + expenses.balance_children
)
return net_profit.get(account_name)
def cap(self, options, unrealized_account):
"""Transfer Income and Expenses, add conversions and unrealized gains.
Args:
options: The Beancount options.
unrealized_account: The name of the account to post unrealized
gains to (as a subaccount of Equity).
"""
equity = options["name_equity"]
conversions = CounterInventory(
{
(currency, None): -number
for currency, number in self.get("")
.balance_children.reduce(convert.get_cost)
.items()
}
)
# Add conversions
self.insert(
equity + ":" + options["account_current_conversions"], conversions
)
# Insert unrealized gains.
self.insert(
equity + ":" + unrealized_account, -self.get("").balance_children
)
# Transfer Income and Expenses
self.insert(
equity + ":" + options["account_current_earnings"],
self.get(options["name_income"]).balance_children,
)
self.insert(
equity + ":" + options["account_current_earnings"],
self.get(options["name_expenses"]).balance_children,
)
| {
"repo_name": "aumayr/beancount-web",
"path": "src/fava/core/tree.py",
"copies": "1",
"size": "5560",
"license": "mit",
"hash": 3469645193492222000,
"line_mean": 30.4124293785,
"line_max": 79,
"alpha_frac": 0.5620503597,
"autogenerated": false,
"ratio": 4.419713831478537,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5481764191178538,
"avg_score": null,
"num_lines": null
} |
accountBook = [[34587,'Learning python',4,40.95],[98762,'Programming python',5,56.80],[77226,'Head first python',3,32.95]]
accountBook
# Out[100]:
# [[34587, 'Learning python', 4, 40.95],
# [98762, 'Programming python', 5, 56.8],
# [77226, 'Head first python', 3, 32.95]]
modCostList = list(map(lambda x: (x[0],x[1]+10) if x[1] < 10000 else (x[0],x[1]),list(map(lambda entry: (entry[0],entry[2]*entry[3]),accountBook))))
modCostList
#Out[102]: [(34587, 173.8), (98762, 294.0), (77226, 108.85000000000001)]
student_tuples = [('sreejith',100,'A'),
('noopur',120,'A'),
('prajakta',80,'B'),
('harshad',70,'C'),
('manish',125,'A')]
sorted(student_tuples)
# Out[68]:
# [('harshad', 70, 'C'),
# ('manish', 125, 'A'),
# ('noopur', 120, 'A'),
# ('prajakta', 80, 'B'),
# ('sreejith', 100, 'A')]
sorted(student_tuples,key = lambda student_tuples: student_tuples[1])
# Out[71]:
# [('harshad', 70, 'C'),
# ('prajakta', 80, 'B'),
# ('sreejith', 100, 'A'),
# ('noopur', 120, 'A'),
# ('manish', 125, 'A')]
sorted(student_tuples,key = lambda student_tuples: student_tuples[1],reverse = True)
# Out[74]:
# [('manish', 125, 'A'),
# ('noopur', 120, 'A'),
# ('sreejith', 100, 'A'),
# ('prajakta', 80, 'B'),
# ('harshad', 70, 'C')]
class Student:
def __init__(self,name,marks,grade):
self.name = name
self.marks = marks
self.grade = grade
def __repr__(self):
return str(self.__dict__)
s1 = Student('sreejith',80,'b')
s2 = Student('noopur',90,'a')
s3 = Student('harshad',70,'c')
s4 = Student('prajakta',65,'d')
studList = [s1,s2,s3,s4]
studList
# Out[92]:
# [{'marks': 80, 'grade': 'b', 'name': 'sreejith'},
# {'marks': 90, 'grade': 'a', 'name': 'noopur'},
# {'marks': 70, 'grade': 'c', 'name': 'harshad'},
# {'marks': 65, 'grade': 'd', 'name': 'prajakta'}]
sorted(studList, key = lambda stud: stud.marks)
# Out[94]:
# [{'marks': 65, 'grade': 'd', 'name': 'prajakta'},
# {'marks': 70, 'grade': 'c', 'name': 'harshad'},
# {'marks': 80, 'grade': 'b', 'name': 'sreejith'},
# {'marks': 90, 'grade': 'a', 'name': 'noopur'}]
sorted(studList, key = lambda stud: stud.grade, reverse = True)
# Out[95]:
# [{'marks': 65, 'grade': 'd', 'name': 'prajakta'},
# {'marks': 70, 'grade': 'c', 'name': 'harshad'},
# {'marks': 80, 'grade': 'b', 'name': 'sreejith'},
# {'marks': 90, 'grade': 'a', 'name': 'noopur'}]
| {
"repo_name": "smenon8/AlgDataStruct_practice",
"path": "practice_problems/lambdasAndRE.py",
"copies": "1",
"size": "2366",
"license": "mit",
"hash": -4952566905614567000,
"line_mean": 27.1666666667,
"line_max": 148,
"alpha_frac": 0.5519864751,
"autogenerated": false,
"ratio": 2.366,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.34179864751,
"avg_score": null,
"num_lines": null
} |
"""AccountDomainLookups API Version 1.0.
This API client was generated using a template. Make sure this code is valid before using it.
"""
import logging
from datetime import date, datetime
from .base import BaseCanvasAPI
class AccountDomainLookupsAPI(BaseCanvasAPI):
"""AccountDomainLookups API Version 1.0."""
def __init__(self, *args, **kwargs):
"""Init method for AccountDomainLookupsAPI."""
super(AccountDomainLookupsAPI, self).__init__(*args, **kwargs)
self.logger = logging.getLogger("py3canvas.AccountDomainLookupsAPI")
def search_account_domains(self, domain=None, latitude=None, longitude=None, name=None):
"""
Search account domains.
Returns a list of up to 5 matching account domains
Partial match on name / domain are supported
"""
path = {}
data = {}
params = {}
# OPTIONAL - name
"""campus name"""
if name is not None:
params["name"] = name
# OPTIONAL - domain
"""no description"""
if domain is not None:
params["domain"] = domain
# OPTIONAL - latitude
"""no description"""
if latitude is not None:
params["latitude"] = latitude
# OPTIONAL - longitude
"""no description"""
if longitude is not None:
params["longitude"] = longitude
self.logger.debug("GET /api/v1/accounts/search with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/search".format(**path), data=data, params=params, no_data=True)
| {
"repo_name": "tylerclair/py3canvas",
"path": "py3canvas/apis/account_domain_lookups.py",
"copies": "1",
"size": "1685",
"license": "mit",
"hash": -3169244160661558000,
"line_mean": 30.7924528302,
"line_max": 147,
"alpha_frac": 0.6160237389,
"autogenerated": false,
"ratio": 4.365284974093265,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5481308712993265,
"avg_score": null,
"num_lines": null
} |
"""AccountDomainLookups API Version 1.0.
This API client was generated using a template. Make sure this code is valid before using it.
"""
import logging
from datetime import date, datetime
from base import BaseCanvasAPI
class AccountDomainLookupsAPI(BaseCanvasAPI):
"""AccountDomainLookups API Version 1.0."""
def __init__(self, *args, **kwargs):
"""Init method for AccountDomainLookupsAPI."""
super(AccountDomainLookupsAPI, self).__init__(*args, **kwargs)
self.logger = logging.getLogger("pycanvas.AccountDomainLookupsAPI")
def search_account_domains(self, domain=None, latitude=None, longitude=None, name=None):
"""
Search account domains.
Returns a list of up to 5 matching account domains
Partial match on name / domain are supported
"""
path = {}
data = {}
params = {}
# OPTIONAL - name
"""campus name"""
if name is not None:
params["name"] = name
# OPTIONAL - domain
"""no description"""
if domain is not None:
params["domain"] = domain
# OPTIONAL - latitude
"""no description"""
if latitude is not None:
params["latitude"] = latitude
# OPTIONAL - longitude
"""no description"""
if longitude is not None:
params["longitude"] = longitude
self.logger.debug("GET /api/v1/accounts/search with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/search".format(**path), data=data, params=params, no_data=True)
| {
"repo_name": "PGower/PyCanvas",
"path": "pycanvas/apis/account_domain_lookups.py",
"copies": "1",
"size": "1736",
"license": "mit",
"hash": -8234783886584270000,
"line_mean": 30.7547169811,
"line_max": 147,
"alpha_frac": 0.5973502304,
"autogenerated": false,
"ratio": 4.4627249357326475,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.012397908051678792,
"num_lines": 53
} |
"""Account handler"""
from pyramid.httpexceptions import HTTPFound
from pyramid.url import route_url
from pyramid_handlers import action
from webhelpers import paginate
import uuid
from datetime import datetime
from haven.forms.account import RegistrationForm
from haven.forms.account import LoginForm
from haven.forms.account import EditForm
from haven.lib.paginate import list_users_url_generator
from haven.models.account import Account
class AccountHandler(object):
def __init__(self, request):
self.request = request
@action(renderer='account/view.mako')
def view(self):
"""View a user's account page."""
id = uuid.UUID(self.request.matchdict['id']).bytes
account = Account.by_id(id=id)
return {'account':account}
@action(renderer='account/register.mako')
def register(self):
account = Account(name=None, password='', email=None, activated=False,
is_admin = False)
form = RegistrationForm(self.request.POST)
if self.request.method == 'POST' and form.validate():
account.name = form.name.data
account.password = form.password.data
account.email = form.email.data
Account.add(account)
id = uuid.UUID(bytes=account.id)
return HTTPFound(location = route_url('account_view',
self.request,
id=id))
return {'form':form}
@action(renderer='account/list.mako')
def list(self):
page_number = self.request.GET.get('page', '1')
query = self.request.GET.get('query', '')
# TODO: Remove DBSession and move query to model.
accounts = Account.list()
currentPage = paginate.Page(accounts, page=page_number,
items_per_page=10,
url=list_users_url_generator)
return {'currentPage':currentPage, 'accounts':currentPage.items}
@action(renderer='account/edit.mako')
def edit(self):
id = uuid.UUID(self.request.matchdict['id']).bytes
account = Account.by_id(id=id)
form = EditForm(self.request.POST)
if self.request.method == 'POST' and form.validate():
if form.password.data != '':
account.password = form.password.data
if form.email.data != '':
account.email = form.email.data
if form.password.data != '' or form.email.data != '':
account.date_updated = datetime.now()
Account.add(account)
id = uuid.UUID(bytes=account.id)
return HTTPFound(location = route_url('account_view',
self.request,
id=id))
return {'account':account, 'form':form}
| {
"repo_name": "Niedra/Haven",
"path": "haven/handlers/account.py",
"copies": "1",
"size": "2913",
"license": "isc",
"hash": -8777797742889887000,
"line_mean": 40.0281690141,
"line_max": 78,
"alpha_frac": 0.5777548919,
"autogenerated": false,
"ratio": 4.367316341829086,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5445071233729085,
"avg_score": null,
"num_lines": null
} |
"""Account handler"""
from pyramid.httpexceptions import HTTPFound
from pyramid.url import route_url
from pyramid_handlers import action
import uuid
from datetime import datetime
from haven.models.account import Account
class AdminHandler(object):
def __init__(self, request):
self.request = request
@action(renderer='admin/activate.mako')
def activate(self):
""" Activate accounts. Allows the admin to manually activate accounts
as an alternative to email verification."""
if 'id' in self.request.matchdict:
id = uuid.UUID(self.request.matchdict['id']).bytes
inactive_account = Account.by_id(id=id)
inactive_account.activated = True
inactive_account.date_updated = datetime.now()
Account.add(inactive_account)
return HTTPFound(location = route_url('admin_activate_list',
self.request))
accounts = Account.by_inactivated()
return {'accounts':accounts}
| {
"repo_name": "Niedra/Haven",
"path": "haven/handlers/admin.py",
"copies": "1",
"size": "1041",
"license": "isc",
"hash": 2357238948758059000,
"line_mean": 32.5806451613,
"line_max": 77,
"alpha_frac": 0.6445725264,
"autogenerated": false,
"ratio": 4.58590308370044,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.035898898249759933,
"num_lines": 31
} |
accountimport os,json,pprint,netaddr
from colorama import Fore,Style
from netaddr import *
from api import *
print(Style.BRIGHT, Fore.CYAN + '\nChoose the environment for the new Direct-Connect\n s\n')
init_input = input(Fore.YELLOW + 'Available options:\n [1] for STAGE \n [2] for PROD\n >>>')
aws_virtual_iface_stage = 'aws --profile STAGE_PROFILE_NAME directconnect describe-virtual-interfaces | grep -v customerRouterConfig\n\n'
aws_virtual_iface_prod = 'aws --profile PROD__PROFILE_NAME directconnect describe-virtual-interfaces | grep -v customerRouterConfig\n\n'
aws_stage_vlans = []
aws_prod_vlans = []
#Upon execution, you are presented with two options:
#If "1" is chosen, then this portion is exectuted.
#note: "1" represents STAGE environment per this example.
if init_input == '1':
init_stage = json.loads(os.popen(aws_virtual_iface_stage).read())
for items in init_stage.items():
#print(Fore.GREEN + json.dumps(items[1], indent=4, sort_keys=True))
pprint.pprint(items, stream=None, indent=1, width=90, depth=None, compact=True)
for items in init_stage.items():
for vlans in items[1]:
aws_stage_vlans.append(vlans['vlan'])
#print (max(aws_vlans) + 1)
while True:
print(Style.BRIGHT, Fore.CYAN + '\n\n\n' + str(IPNetwork(ipv4_stage()[1]).cidr) + ' will be reserved for the dxcon-11111111 Direct Connect ID\n\n' )
aws_account_number = input(Fore.YELLOW + 'Enter AWS account number you need to set up Direct Connect with:')
aws_iface_name = input(Fore.YELLOW + 'Enter AWS Virtual Interface Name (i.e. STAGE_01 - TO - STAGE_02):')
aws_virtual_iface_create = 'aws --debug --profile STAGE_PROFILE_NAME directconnect allocate-private-virtual-interface --connection-id {} --owner-account {} --new-private-virtual-interface-allocation virtualInterfaceName={},vlan={},asn={},authKey={},amazonAddress={},customerAddress={} | grep -v DEBUG'.format(str('dxcon-11111111'), str(aws_account_number), str(aws_iface_name), str((max(aws_stage_vlans) + 1)), str('11111'), str('bgp' + str(max(aws_vlans) + 1)), str(ipv4_stage()[0]), str(ipv4_stage()[1]))
if os.popen(aws_virtual_iface_create).read():
prefix_add = {"description": aws_iface_name, "site": "1", "status": "1", "prefix": str(IPNetwork(ipv4_stage()[0]).cidr)}
ip_link = 'https://netbox-url.net/api/ipam/prefixes/'
ip_post = requests.post(ip_link, json=prefix_add, headers=headers)
print(Style.BRIGHT, Fore.CYAN + '\n\n\n' + str(IPNetwork(ipv4_stage()[1]).cidr) + ' will be reserved for the dxcon-22222222 Direct Connect ID\n\n' )
aws_account_number = input(Fore.YELLOW + 'Enter AWS account number you need to set up Direct Connect with:')
aws_iface_name = input(Fore.YELLOW + 'Enter AWS Virtual Interface Name (i.e. STAGE_01 - TO - STAGE_03):')
aws_virtual_iface_create = 'aws --debug --profile account-test directconnect allocate-private-virtual-interface --connection-id {} --owner-account {} --new-private-virtual-interface-allocation virtualInterfaceName={},vlan={},asn={},authKey={},amazonAddress={},customerAddress={} | grep -v DEBUG'.format(str('dxcon-11111111'), str(aws_account_number), str(aws_iface_name), str((max(aws_stage_vlans) + 2)), str('11111'), str('bgp' + str(max(aws_vlans) + 2)), str(ipv4_stage()[0]), str(ipv4_stage()[1]))
if os.popen(aws_virtual_iface_create).read():
prefix_add = {"description": aws_iface_name, "site": "1", "status": "1", "prefix": str(IPNetwork(ipv4_stage()[0]).cidr)}
ip_link = 'https://netbox-url.net/api/ipam/prefixes/'
ip_post = requests.post(ip_link, json=prefix_add, headers=headers)
else: break
break
#If "2" is chosen then this portion gets exectuted.
#note: "2" represents PROD environment.
else:
init_prod = json.loads(os.popen(aws_virtual_iface_prod).read())
for items in init_prod.items():
#print(Fore.GREEN + json.dumps(items[1], indent=4, sort_keys=True))
pprint.pprint(items, stream=None, indent=1, width=90, depth=None, compact=True)
for items in init_prod.items():
for vlans in items[1]:
aws_prod_vlans.append(vlans['vlan'])
#print (max(aws_prod_vlans) + 1)
while True:
print(Style.BRIGHT, Fore.CYAN + '\n\n\n' + str(IPNetwork(ipv4_prod()[1]).cidr) + ' will be reserved for the dxcon-11111111 Direct Connect ID\n\n' )
aws_account_number = input(Fore.YELLOW + 'Enter AWS account number you need to set up Direct Connect with:')
aws_iface_name = input(Fore.YELLOW + 'Enter AWS Virtual Interface Name (i.e. PROD_01 - TO - PROD_02):')
aws_virtual_iface_create = 'aws --debug --profile account-prod directconnect allocate-private-virtual-interface --connection-id {} --owner-account {} --new-private-virtual-interface-allocation virtualInterfaceName={},vlan={},asn={},authKey={},amazonAddress={},customerAddress={} | grep -v DEBUG'.format(str('dxcon-11111111'), str(aws_account_number), str(aws_iface_name), str((max(aws_prod_vlans) + 1)), str('11111'), str('bgp' + str(max(aws_prod_vlans) + 1)), str(ipv4_prod()[0]), str(ipv4_prod()[1]))
if os.popen(aws_virtual_iface_create).read():
prefix_add = {"description": aws_iface_name, "site": "16", "status": "1", "prefix": str(IPNetwork(ipv4_prod()[0]).cidr)}
ip_link = 'https://netbox-url.net/api/ipam/prefixes/'
ip_post = requests.post(ip_link, json=prefix_add, headers=headers)
print(Style.BRIGHT, Fore.CYAN + '\n\n\n' + str(IPNetwork(ipv4_prod()[1]).cidr) + ' will be reserved for the dxcon-22222222 Direct Connect ID\n\n' )
aws_account_number = input(Fore.YELLOW + 'Enter AWS account number you need to set up Direct Connect with:')
aws_iface_name = input(Fore.YELLOW + 'Enter AWS Virtual Interface Name (i.e. PROD_01 - TO - PROD_03):')
aws_virtual_iface_create = 'aws --debug --profile account-prod directconnect allocate-private-virtual-interface --connection-id {} --owner-account {} --new-private-virtual-interface-allocation virtualInterfaceName={},vlan={},asn={},authKey={},amazonAddress={},customerAddress={} | grep -v DEBUG'.format(str('dxcon-22222222'), str(aws_account_number), str(aws_iface_name), str((max(aws_prod_vlans) + 2)), str('11111'), str('bgp' + str(max(aws_prod_vlans) + 2)), str(ipv4_prod()[0]), str(ipv4_prod()[1]))
if os.popen(aws_virtual_iface_create).read():
prefix_add = {"description": aws_iface_name, "site": "1", "status": "1", "prefix": str(IPNetwork(ipv4_prod()[0]).cidr)}
ip_link = 'https://netbox-url.net/api/ipam/prefixes/'
ip_post = requests.post(ip_link, json=prefix_add, headers=headers)
else: break
break
| {
"repo_name": "rkutsel/netbox-scripts",
"path": "aws-direct-connect/directconnect.py",
"copies": "1",
"size": "6836",
"license": "mit",
"hash": -7575476168008229000,
"line_mean": 75.808988764,
"line_max": 514,
"alpha_frac": 0.6582796957,
"autogenerated": false,
"ratio": 3.34769833496572,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45059780306657193,
"avg_score": null,
"num_lines": null
} |
"""Account linking via the cloud."""
import asyncio
import logging
from typing import Any
from hass_nabucasa import account_link
from homeassistant.const import MAJOR_VERSION, MINOR_VERSION, PATCH_VERSION
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_entry_oauth2_flow, event
from .const import DOMAIN
DATA_SERVICES = "cloud_account_link_services"
CACHE_TIMEOUT = 3600
_LOGGER = logging.getLogger(__name__)
@callback
def async_setup(hass: HomeAssistant):
"""Set up cloud account link."""
config_entry_oauth2_flow.async_add_implementation_provider(
hass, DOMAIN, async_provide_implementation
)
async def async_provide_implementation(hass: HomeAssistant, domain: str):
"""Provide an implementation for a domain."""
services = await _get_services(hass)
for service in services:
if service["service"] == domain and _is_older(service["min_version"]):
return CloudOAuth2Implementation(hass, domain)
return
@callback
def _is_older(version: str) -> bool:
"""Test if a version is older than the current HA version."""
version_parts = version.split(".")
if len(version_parts) != 3:
return False
try:
version_parts = [int(val) for val in version_parts]
except ValueError:
return False
patch_number_str = ""
for char in PATCH_VERSION:
if char.isnumeric():
patch_number_str += char
else:
break
try:
patch_number = int(patch_number_str)
except ValueError:
patch_number = 0
cur_version_parts = [MAJOR_VERSION, MINOR_VERSION, patch_number]
return version_parts <= cur_version_parts
async def _get_services(hass):
"""Get the available services."""
services = hass.data.get(DATA_SERVICES)
if services is not None:
return services
services = await account_link.async_fetch_available_services(hass.data[DOMAIN])
hass.data[DATA_SERVICES] = services
@callback
def clear_services(_now):
"""Clear services cache."""
hass.data.pop(DATA_SERVICES, None)
event.async_call_later(hass, CACHE_TIMEOUT, clear_services)
return services
class CloudOAuth2Implementation(config_entry_oauth2_flow.AbstractOAuth2Implementation):
"""Cloud implementation of the OAuth2 flow."""
def __init__(self, hass: HomeAssistant, service: str):
"""Initialize cloud OAuth2 implementation."""
self.hass = hass
self.service = service
@property
def name(self) -> str:
"""Name of the implementation."""
return "Home Assistant Cloud"
@property
def domain(self) -> str:
"""Domain that is providing the implementation."""
return DOMAIN
async def async_generate_authorize_url(self, flow_id: str) -> str:
"""Generate a url for the user to authorize."""
helper = account_link.AuthorizeAccountHelper(
self.hass.data[DOMAIN], self.service
)
authorize_url = await helper.async_get_authorize_url()
async def await_tokens():
"""Wait for tokens and pass them on when received."""
try:
tokens = await helper.async_get_tokens()
except asyncio.TimeoutError:
_LOGGER.info("Timeout fetching tokens for flow %s", flow_id)
except account_link.AccountLinkException as err:
_LOGGER.info(
"Failed to fetch tokens for flow %s: %s", flow_id, err.code
)
else:
await self.hass.config_entries.flow.async_configure(
flow_id=flow_id, user_input=tokens
)
self.hass.async_create_task(await_tokens())
return authorize_url
async def async_resolve_external_data(self, external_data: Any) -> dict:
"""Resolve external data to tokens."""
# We already passed in tokens
return external_data
async def _async_refresh_token(self, token: dict) -> dict:
"""Refresh a token."""
return await account_link.async_fetch_access_token(
self.hass.data[DOMAIN], self.service, token["refresh_token"]
)
| {
"repo_name": "postlund/home-assistant",
"path": "homeassistant/components/cloud/account_link.py",
"copies": "3",
"size": "4238",
"license": "apache-2.0",
"hash": 142621370464582460,
"line_mean": 28.4305555556,
"line_max": 87,
"alpha_frac": 0.6385087305,
"autogenerated": false,
"ratio": 4.225324027916251,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00016158609908609907,
"num_lines": 144
} |
"""Account linking via the cloud."""
import asyncio
import logging
from typing import Any
import aiohttp
from hass_nabucasa import account_link
from homeassistant.const import MAJOR_VERSION, MINOR_VERSION, PATCH_VERSION
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_entry_oauth2_flow, event
from .const import DOMAIN
DATA_SERVICES = "cloud_account_link_services"
CACHE_TIMEOUT = 3600
_LOGGER = logging.getLogger(__name__)
@callback
def async_setup(hass: HomeAssistant):
"""Set up cloud account link."""
config_entry_oauth2_flow.async_add_implementation_provider(
hass, DOMAIN, async_provide_implementation
)
async def async_provide_implementation(hass: HomeAssistant, domain: str):
"""Provide an implementation for a domain."""
services = await _get_services(hass)
for service in services:
if service["service"] == domain and _is_older(service["min_version"]):
return CloudOAuth2Implementation(hass, domain)
return
@callback
def _is_older(version: str) -> bool:
"""Test if a version is older than the current HA version."""
version_parts = version.split(".")
if len(version_parts) != 3:
return False
try:
version_parts = [int(val) for val in version_parts]
except ValueError:
return False
patch_number_str = ""
for char in PATCH_VERSION:
if char.isnumeric():
patch_number_str += char
else:
break
try:
patch_number = int(patch_number_str)
except ValueError:
patch_number = 0
cur_version_parts = [MAJOR_VERSION, MINOR_VERSION, patch_number]
return version_parts <= cur_version_parts
async def _get_services(hass):
"""Get the available services."""
services = hass.data.get(DATA_SERVICES)
if services is not None:
return services
try:
services = await account_link.async_fetch_available_services(hass.data[DOMAIN])
except (aiohttp.ClientError, asyncio.TimeoutError):
return []
hass.data[DATA_SERVICES] = services
@callback
def clear_services(_now):
"""Clear services cache."""
hass.data.pop(DATA_SERVICES, None)
event.async_call_later(hass, CACHE_TIMEOUT, clear_services)
return services
class CloudOAuth2Implementation(config_entry_oauth2_flow.AbstractOAuth2Implementation):
"""Cloud implementation of the OAuth2 flow."""
def __init__(self, hass: HomeAssistant, service: str) -> None:
"""Initialize cloud OAuth2 implementation."""
self.hass = hass
self.service = service
@property
def name(self) -> str:
"""Name of the implementation."""
return "Home Assistant Cloud"
@property
def domain(self) -> str:
"""Domain that is providing the implementation."""
return DOMAIN
async def async_generate_authorize_url(self, flow_id: str) -> str:
"""Generate a url for the user to authorize."""
helper = account_link.AuthorizeAccountHelper(
self.hass.data[DOMAIN], self.service
)
authorize_url = await helper.async_get_authorize_url()
async def await_tokens():
"""Wait for tokens and pass them on when received."""
try:
tokens = await helper.async_get_tokens()
except asyncio.TimeoutError:
_LOGGER.info("Timeout fetching tokens for flow %s", flow_id)
except account_link.AccountLinkException as err:
_LOGGER.info(
"Failed to fetch tokens for flow %s: %s", flow_id, err.code
)
else:
await self.hass.config_entries.flow.async_configure(
flow_id=flow_id, user_input=tokens
)
self.hass.async_create_task(await_tokens())
return authorize_url
async def async_resolve_external_data(self, external_data: Any) -> dict:
"""Resolve external data to tokens."""
# We already passed in tokens
return external_data
async def _async_refresh_token(self, token: dict) -> dict:
"""Refresh a token."""
return await account_link.async_fetch_access_token(
self.hass.data[DOMAIN], self.service, token["refresh_token"]
)
| {
"repo_name": "home-assistant/home-assistant",
"path": "homeassistant/components/cloud/account_link.py",
"copies": "2",
"size": "4348",
"license": "apache-2.0",
"hash": 63840083992883950,
"line_mean": 28.3783783784,
"line_max": 87,
"alpha_frac": 0.638224471,
"autogenerated": false,
"ratio": 4.233690360272639,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5871914831272638,
"avg_score": null,
"num_lines": null
} |
"""
POSIX user information and management.
Usage:
%(cmd)s list
%(cmd)s show [ <user> ...]
%(cmd)s create <name> [options]
%(cmd)s delete [--no-delete-home] [--no-delete-group] [--force] <user> ...
Commands:
list Prints a list of users.
show Show detailed information about user. If no users are provided,
all of them are displayed.
create Creates a new user. See Create options below for options
description.
delete Delete specified user (or user list). See Delete options
below for options description.
Create options:
-c gecos, --gecos=gecos Set the GECOS field to gecos.
-d dir, --directory=dir Set the user's home directory to dir.
If this option is not set, a default value
is used.
-s shell, --shell=shell Set user's login shell to shell. If this
option is not set, a default value is used.
-u uid, --uid=uid Use user ID uid for the newly created user.
If this option is not set, a default value
is used.
-g gid, --gid=gid Set user's primary group ID to gid. If this
option is not set, a default value is used.
-r, --reserved The user is a system user.
Implies the -M option.
-M, --no-user-home Don't create a home directory.
-n, --no-user-group Don't create a primary group for user.
-p, --password=pwd Set user's password to 'pwd'.
-P, --plain-password If set, the password set in '-p' parameter
is plain text. Otherwise, it is already
encrypted by supported hash algorithm.
See crypt(3).
Delete options:
--no-delete-home Do not remove home directory.
--no-delete-group Do not remove users primary group.
--force Remove home directory even if the user is not owner.
"""
# TODO -- option separator
from lmi.scripts.common import command
from lmi.scripts.common.errors import LmiFailed
from lmi.scripts.common.errors import LmiInvalidOptions
from lmi.scripts import account
def get_user_info(ns, user):
"""
Return detailed information of the user to show.
"""
return (user.Name, user.UserID, user.HomeDirectory, user.LoginShell,
user.PasswordLastChange)
class Lister(command.LmiLister):
COLUMNS = ('Name', "UID", "Full name")
def execute(self, ns):
for s in sorted(account.list_users(ns),
key=lambda i: i.Name):
yield (s.Name, s.UserID, s.ElementName)
class Show(command.LmiInstanceLister):
PROPERTIES = (
'Name',
('UID', 'UserID'),
('Home', 'HomeDirectory'),
('Login shell', 'LoginShell'),
('Password last change', lambda i: i.PasswordLastChange.datetime.strftime("%Y/%m/%d"))
)
def transform_options(self, options):
"""
Rename 'user' option to 'users' parameter name for better
readability
"""
options['<users>'] = options.pop('<user>')
def execute(self, ns, users):
if users:
for user in users:
inst = account.get_user(ns, user)
yield inst
else:
for user in account.list_users(ns):
yield user
class Delete(command.LmiCheckResult):
OPT_NO_UNDERSCORES = True
EXPECT = None
def transform_options(self, options):
"""
Rename 'user' option to 'users' parameter name for better
readability
"""
options['<users>'] = options.pop('<user>')
def execute(self, ns, users, no_delete_group=False, no_delete_home=False,
force=False):
for user in users:
inst = account.get_user(ns, user)
account.delete_user(ns, inst, no_delete_group, no_delete_home,
force)
class Create(command.LmiCheckResult):
OPT_NO_UNDERSCORES = True
EXPECT = None
def verify_options(self, opts):
uid = opts['uid']
gid = opts['gid']
if uid is not None and not uid.isdigit():
raise LmiInvalidOptions("User ID must be a number")
if gid is not None and not gid.isdigit():
raise LmiInvalidOptions("Group ID must be a number")
def execute(self, ns, name,
gecos=None,
directory=None,
shell=None,
uid=None,
gid=None,
reserved=None,
no_user_home=False,
no_user_group=False,
password=None,
plain_password=False):
account.create_user(ns, name,
gecos=gecos,
home=directory,
create_home=not no_user_home,
shell=shell,
uid=uid,
gid=gid,
create_group=not no_user_group,
reserved=reserved,
password=password,
plain_password=plain_password)
User = command.register_subcommands(
'user', __doc__,
{ 'list' : Lister
, 'show' : Show
, 'create' : Create
, 'delete' : Delete
},
)
| {
"repo_name": "openlmi/openlmi-scripts",
"path": "commands/account/lmi/scripts/account/user_cmd.py",
"copies": "2",
"size": "7171",
"license": "bsd-2-clause",
"hash": -8029499722491833000,
"line_mean": 36.1554404145,
"line_max": 98,
"alpha_frac": 0.5969878678,
"autogenerated": false,
"ratio": 4.294011976047904,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5890999843847904,
"avg_score": null,
"num_lines": null
} |
"""Account management views for editorial app."""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from braces.views import LoginRequiredMixin
from django.views.generic import TemplateView, UpdateView
from editorial.forms import (
OrganizationSubscriptionForm,
ContractorSubscriptionForm,
)
from editorial.models import (
OrganizationSubscription,
ContractorSubscription,
)
class AccountSelectionView(LoginRequiredMixin, TemplateView):
"""View for selecting account type.
After user signup, the user is directed to account selection to
choose between creating an organization with a team account or a
contractor profile for an independent account.
"""
template_name = 'editorial/account_selection.html'
# ACCESS: Only org admins should be able to update and org's subscription
class OrganizationSubscriptionUpdateView(LoginRequiredMixin, UpdateView):
"""View for editing organization subscription details."""
model = OrganizationSubscription
form_class = OrganizationSubscriptionForm
# ACCESS: Only user with contractorprofile should be able to update their subscription
class ContractorSubscriptionUpdateView(LoginRequiredMixin, UpdateView):
"""View for editing contractor subscription details."""
model = ContractorSubscription
form_class = ContractorSubscriptionForm
| {
"repo_name": "ProjectFacet/facet",
"path": "project/editorial/views/accounts.py",
"copies": "1",
"size": "1362",
"license": "mit",
"hash": -6088957119891424000,
"line_mean": 30.6744186047,
"line_max": 86,
"alpha_frac": 0.7804698972,
"autogenerated": false,
"ratio": 4.7622377622377625,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6042707659437763,
"avg_score": null,
"num_lines": null
} |
"""Account model module."""
from __future__ import unicode_literals
from django.contrib.auth.models import (BaseUserManager, AbstractBaseUser)
from django.db import models
from django.utils.translation import ugettext as _
import uuid
class AppUserManager(BaseUserManager):
"""AppUser manager class."""
def create_user(self, email, first_name, last_name, password=None):
"""Create and save a User with the given email, date of birth and password."""
if not email:
raise ValueError(_('Users must have an email address'))
user = self.model(
email=self.normalize_email(email),
first_name=first_name,
last_name=last_name
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, first_name, last_name, password):
"""Create and save a superuser with the given email."""
user = self.create_user(email, password=password, first_name=first_name, last_name=last_name)
user.is_admin = True
user.save(using=self._db)
return user
class AppUser(AbstractBaseUser):
"""AppUser model class (for customizing user model)."""
# Fields
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
email = models.EmailField(verbose_name=_('E-mail address'), max_length=255, unique=True)
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
is_active = models.BooleanField(default=True)
is_verified = models.BooleanField(default=False)
is_admin = models.BooleanField(default=False)
timestamp_subscription = models.DateTimeField(auto_now_add=True)
timestamp_modified = models.DateTimeField(auto_now=True)
objects = AppUserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['first_name', 'last_name']
# Methods
def get_full_name(self):
"""Return the user full name."""
# The user is identified by their email address
return self.first_name+' '+self.last_name
def get_short_name(self):
"""Return the user first name."""
# The user is identified by their email address
return self.first_name
def __unicode__(self):
"""Unicode representation of the class."""
return self.email
def __str__(self): # __unicode__ on Python 2
"""String representation of the class."""
return self.email
def has_perm(self, perm, obj=None):
"""Do the user have a specific permission? Checking it."""
# Simplest possible answer: Yes, always
return True
def has_module_perms(self, app_label):
"""Do the user have permissions to view the app `Accounts`? Checking it."""
# Simplest possible answer: Yes, always
return True
@property
def is_staff(self):
"""Checking if the user is a member of staff."""
# Simplest possible answer: All admins are staff
return self.is_admin
| {
"repo_name": "davideferre/django-rest-framework-email-accounts",
"path": "models.py",
"copies": "2",
"size": "3049",
"license": "mit",
"hash": 3579307861967913000,
"line_mean": 33.2584269663,
"line_max": 101,
"alpha_frac": 0.6490652673,
"autogenerated": false,
"ratio": 4.125845737483085,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5774911004783085,
"avg_score": null,
"num_lines": null
} |
'''Account models definition.'''
import base64
import hashlib
import hmac
import time
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
class PendingUser(models.Model):
username = models.CharField(max_length=10, unique=True)
first_name = models.CharField('First name', max_length=25)
last_name = models.CharField('Last name', max_length=25)
email = models.EmailField('E-Mail', max_length=50, unique=True)
validation_token = models.CharField('Validation token', max_length=256)
add_date = models.DateTimeField('Date added', auto_now_add=True)
last_modified = models.DateTimeField('Last update', auto_now=True)
def generate_username(self):
'''Generates a proper username.
TODO: check if the username is not aleready used in LDAP.'''
self.username = (
self.last_name
+ self.first_name
).lower().replace(' ', '')[:10]
def generate_token(self):
dk = hmac.new(
bytes(str(time.time()), 'UTF-8'),
msg=bytes(str(time.time()) + self.email, 'UTF-8'),
digestmod=hashlib.sha256
).digest()
self.validation_token = base64.urlsafe_b64encode(dk).decode()
def format_last_name(self):
self.last_name = self.last_name.upper()
def clean(self):
self.generate_username()
self.generate_token()
self.format_last_name()
return super(PendingUser, self).clean()
def get_absolute_url(self):
return reverse('accounts:registration-complete')
def __str__(self):
return '{first_name} {last_name} ({username})'.format(
first_name=self.first_name,
last_name=self.last_name,
username=self.username,
)
class Account(models.Model):
"""
Account model where you can store all the information about a member.
From an User object (which you can get easily thanks to Django), you can
access the related Account entry by `user.account`
"""
user = models.OneToOneField(
User,
verbose_name='default_user',
on_delete=models.CASCADE,
)
cleaning = models.BooleanField(default=True)
has_paid_membership = models.BooleanField(default=False)
def __str__(self):
return '{} {} ({})'.format(
self.user.first_name,
self.user.last_name,
self.user.username)
| {
"repo_name": "Atilla106/members.atilla.org",
"path": "accounts/models.py",
"copies": "1",
"size": "2493",
"license": "mit",
"hash": -6114014394245146000,
"line_mean": 28.6785714286,
"line_max": 76,
"alpha_frac": 0.6225431207,
"autogenerated": false,
"ratio": 4.001605136436597,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5124148257136597,
"avg_score": null,
"num_lines": null
} |
"""AccountNotifications API Version 1.0.
This API client was generated using a template. Make sure this code is valid before using it.
"""
import logging
from datetime import date, datetime
from .base import BaseCanvasAPI
from .base import BaseModel
class AccountNotificationsAPI(BaseCanvasAPI):
"""AccountNotifications API Version 1.0."""
def __init__(self, *args, **kwargs):
"""Init method for AccountNotificationsAPI."""
super(AccountNotificationsAPI, self).__init__(*args, **kwargs)
self.logger = logging.getLogger("py3canvas.AccountNotificationsAPI")
def index_of_active_global_notification_for_user(self, user_id, account_id):
"""
Index of active global notification for the user.
Returns a list of all global notifications in the account for this user
Any notifications that have been closed by the user will not be returned
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - user_id
"""ID"""
path["user_id"] = user_id
self.logger.debug("GET /api/v1/accounts/{account_id}/users/{user_id}/account_notifications with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{account_id}/users/{user_id}/account_notifications".format(**path), data=data, params=params, all_pages=True)
def show_global_notification(self, id, user_id, account_id):
"""
Show a global notification.
Returns a global notification
A notification that has been closed by the user will not be returned
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - user_id
"""ID"""
path["user_id"] = user_id
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
self.logger.debug("GET /api/v1/accounts/{account_id}/users/{user_id}/account_notifications/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{account_id}/users/{user_id}/account_notifications/{id}".format(**path), data=data, params=params, single_item=True)
def close_notification_for_user(self, id, user_id, account_id):
"""
Close notification for user.
If the user no long wants to see this notification it can be excused with this call
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - user_id
"""ID"""
path["user_id"] = user_id
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
self.logger.debug("DELETE /api/v1/accounts/{account_id}/users/{user_id}/account_notifications/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("DELETE", "/api/v1/accounts/{account_id}/users/{user_id}/account_notifications/{id}".format(**path), data=data, params=params, single_item=True)
def create_global_notification(self, account_id, account_notification_end_at, account_notification_subject, account_notification_message, account_notification_start_at, account_notification_icon=None, account_notification_roles=None):
"""
Create a global notification.
Create and return a new global notification for an account.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - account_notification[subject]
"""The subject of the notification."""
data["account_notification[subject]"] = account_notification_subject
# REQUIRED - account_notification[message]
"""The message body of the notification."""
data["account_notification[message]"] = account_notification_message
# REQUIRED - account_notification[start_at]
"""The start date and time of the notification in ISO8601 format.
e.g. 2014-01-01T01:00Z"""
if issubclass(account_notification_start_at.__class__, str):
account_notification_start_at = self._validate_iso8601_string(account_notification_start_at)
elif issubclass(account_notification_start_at.__class__, date) or issubclass(account_notification_start_at.__class__, datetime):
account_notification_start_at = account_notification_start_at.strftime('%Y-%m-%dT%H:%M:%S+00:00')
data["account_notification[start_at]"] = account_notification_start_at
# REQUIRED - account_notification[end_at]
"""The end date and time of the notification in ISO8601 format.
e.g. 2014-01-01T01:00Z"""
if issubclass(account_notification_end_at.__class__, str):
account_notification_end_at = self._validate_iso8601_string(account_notification_end_at)
elif issubclass(account_notification_end_at.__class__, date) or issubclass(account_notification_end_at.__class__, datetime):
account_notification_end_at = account_notification_end_at.strftime('%Y-%m-%dT%H:%M:%S+00:00')
data["account_notification[end_at]"] = account_notification_end_at
# OPTIONAL - account_notification[icon]
"""The icon to display with the notification.
Note: Defaults to warning."""
if account_notification_icon is not None:
self._validate_enum(account_notification_icon, ["warning", "information", "question", "error", "calendar"])
data["account_notification[icon]"] = account_notification_icon
# OPTIONAL - account_notification_roles
"""The role(s) to send global notification to. Note: ommitting this field will send to everyone
Example:
account_notification_roles: ["StudentEnrollment", "TeacherEnrollment"]"""
if account_notification_roles is not None:
data["account_notification_roles"] = account_notification_roles
self.logger.debug("POST /api/v1/accounts/{account_id}/account_notifications with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/accounts/{account_id}/account_notifications".format(**path), data=data, params=params, no_data=True)
def update_global_notification(self, id, account_id, account_notification_end_at=None, account_notification_icon=None, account_notification_message=None, account_notification_roles=None, account_notification_start_at=None, account_notification_subject=None):
"""
Update a global notification.
Update global notification for an account.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
# OPTIONAL - account_notification[subject]
"""The subject of the notification."""
if account_notification_subject is not None:
data["account_notification[subject]"] = account_notification_subject
# OPTIONAL - account_notification[message]
"""The message body of the notification."""
if account_notification_message is not None:
data["account_notification[message]"] = account_notification_message
# OPTIONAL - account_notification[start_at]
"""The start date and time of the notification in ISO8601 format.
e.g. 2014-01-01T01:00Z"""
if account_notification_start_at is not None:
if issubclass(account_notification_start_at.__class__, str):
account_notification_start_at = self._validate_iso8601_string(account_notification_start_at)
elif issubclass(account_notification_start_at.__class__, date) or issubclass(account_notification_start_at.__class__, datetime):
account_notification_start_at = account_notification_start_at.strftime('%Y-%m-%dT%H:%M:%S+00:00')
data["account_notification[start_at]"] = account_notification_start_at
# OPTIONAL - account_notification[end_at]
"""The end date and time of the notification in ISO8601 format.
e.g. 2014-01-01T01:00Z"""
if account_notification_end_at is not None:
if issubclass(account_notification_end_at.__class__, str):
account_notification_end_at = self._validate_iso8601_string(account_notification_end_at)
elif issubclass(account_notification_end_at.__class__, date) or issubclass(account_notification_end_at.__class__, datetime):
account_notification_end_at = account_notification_end_at.strftime('%Y-%m-%dT%H:%M:%S+00:00')
data["account_notification[end_at]"] = account_notification_end_at
# OPTIONAL - account_notification[icon]
"""The icon to display with the notification."""
if account_notification_icon is not None:
self._validate_enum(account_notification_icon, ["warning", "information", "question", "error", "calendar"])
data["account_notification[icon]"] = account_notification_icon
# OPTIONAL - account_notification_roles
"""The role(s) to send global notification to. Note: ommitting this field will send to everyone
Example:
account_notification_roles: ["StudentEnrollment", "TeacherEnrollment"]"""
if account_notification_roles is not None:
data["account_notification_roles"] = account_notification_roles
self.logger.debug("PUT /api/v1/accounts/{account_id}/account_notifications/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("PUT", "/api/v1/accounts/{account_id}/account_notifications/{id}".format(**path), data=data, params=params, no_data=True)
class Accountnotification(BaseModel):
"""Accountnotification Model."""
def __init__(self, role_ids=None, start_at=None, roles=None, end_at=None, message=None, subject=None, icon=None):
"""Init method for Accountnotification class."""
self._role_ids = role_ids
self._start_at = start_at
self._roles = roles
self._end_at = end_at
self._message = message
self._subject = subject
self._icon = icon
self.logger = logging.getLogger('py3canvas.Accountnotification')
@property
def role_ids(self):
"""The roles to send the notification to. If roles is not passed it defaults to all roles."""
return self._role_ids
@role_ids.setter
def role_ids(self, value):
"""Setter for role_ids property."""
self.logger.warn("Setting values on role_ids will NOT update the remote Canvas instance.")
self._role_ids = value
@property
def start_at(self):
"""When to send out the notification."""
return self._start_at
@start_at.setter
def start_at(self, value):
"""Setter for start_at property."""
self.logger.warn("Setting values on start_at will NOT update the remote Canvas instance.")
self._start_at = value
@property
def roles(self):
"""(Deprecated) The roles to send the notification to. If roles is not passed it defaults to all roles."""
return self._roles
@roles.setter
def roles(self, value):
"""Setter for roles property."""
self.logger.warn("Setting values on roles will NOT update the remote Canvas instance.")
self._roles = value
@property
def end_at(self):
"""When to expire the notification."""
return self._end_at
@end_at.setter
def end_at(self, value):
"""Setter for end_at property."""
self.logger.warn("Setting values on end_at will NOT update the remote Canvas instance.")
self._end_at = value
@property
def message(self):
"""The message to be sent in the notification."""
return self._message
@message.setter
def message(self, value):
"""Setter for message property."""
self.logger.warn("Setting values on message will NOT update the remote Canvas instance.")
self._message = value
@property
def subject(self):
"""The subject of the notifications."""
return self._subject
@subject.setter
def subject(self, value):
"""Setter for subject property."""
self.logger.warn("Setting values on subject will NOT update the remote Canvas instance.")
self._subject = value
@property
def icon(self):
"""The icon to display with the message. Defaults to warning."""
return self._icon
@icon.setter
def icon(self, value):
"""Setter for icon property."""
self.logger.warn("Setting values on icon will NOT update the remote Canvas instance.")
self._icon = value
| {
"repo_name": "tylerclair/py3canvas",
"path": "py3canvas/apis/account_notifications.py",
"copies": "1",
"size": "13285",
"license": "mit",
"hash": -2953993395364189000,
"line_mean": 42.5573770492,
"line_max": 262,
"alpha_frac": 0.6396687994,
"autogenerated": false,
"ratio": 4.184251968503937,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5323920767903937,
"avg_score": null,
"num_lines": null
} |
"""AccountNotifications API Version 1.0.
This API client was generated using a template. Make sure this code is valid before using it.
"""
import logging
from datetime import date, datetime
from base import BaseCanvasAPI
from base import BaseModel
class AccountNotificationsAPI(BaseCanvasAPI):
"""AccountNotifications API Version 1.0."""
def __init__(self, *args, **kwargs):
"""Init method for AccountNotificationsAPI."""
super(AccountNotificationsAPI, self).__init__(*args, **kwargs)
self.logger = logging.getLogger("pycanvas.AccountNotificationsAPI")
def index_of_active_global_notification_for_user(self, user_id, account_id):
"""
Index of active global notification for the user.
Returns a list of all global notifications in the account for this user
Any notifications that have been closed by the user will not be returned
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - user_id
"""ID"""
path["user_id"] = user_id
self.logger.debug("GET /api/v1/accounts/{account_id}/users/{user_id}/account_notifications with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{account_id}/users/{user_id}/account_notifications".format(**path), data=data, params=params, all_pages=True)
def show_global_notification(self, id, user_id, account_id):
"""
Show a global notification.
Returns a global notification
A notification that has been closed by the user will not be returned
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - user_id
"""ID"""
path["user_id"] = user_id
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
self.logger.debug("GET /api/v1/accounts/{account_id}/users/{user_id}/account_notifications/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{account_id}/users/{user_id}/account_notifications/{id}".format(**path), data=data, params=params, single_item=True)
def close_notification_for_user(self, id, user_id, account_id):
"""
Close notification for user.
If the user no long wants to see this notification it can be excused with this call
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - user_id
"""ID"""
path["user_id"] = user_id
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
self.logger.debug("DELETE /api/v1/accounts/{account_id}/users/{user_id}/account_notifications/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("DELETE", "/api/v1/accounts/{account_id}/users/{user_id}/account_notifications/{id}".format(**path), data=data, params=params, single_item=True)
def create_global_notification(self, account_id, account_notification_end_at, account_notification_subject, account_notification_message, account_notification_start_at, account_notification_icon=None, account_notification_roles=None):
"""
Create a global notification.
Create and return a new global notification for an account.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - account_notification[subject]
"""The subject of the notification."""
data["account_notification[subject]"] = account_notification_subject
# REQUIRED - account_notification[message]
"""The message body of the notification."""
data["account_notification[message]"] = account_notification_message
# REQUIRED - account_notification[start_at]
"""The start date and time of the notification in ISO8601 format.
e.g. 2014-01-01T01:00Z"""
data["account_notification[start_at]"] = account_notification_start_at
# REQUIRED - account_notification[end_at]
"""The end date and time of the notification in ISO8601 format.
e.g. 2014-01-01T01:00Z"""
data["account_notification[end_at]"] = account_notification_end_at
# OPTIONAL - account_notification[icon]
"""The icon to display with the notification.
Note: Defaults to warning."""
if account_notification_icon is not None:
self._validate_enum(account_notification_icon, ["warning", "information", "question", "error", "calendar"])
data["account_notification[icon]"] = account_notification_icon
# OPTIONAL - account_notification_roles
"""The role(s) to send global notification to. Note: ommitting this field will send to everyone
Example:
account_notification_roles: ["StudentEnrollment", "TeacherEnrollment"]"""
if account_notification_roles is not None:
data["account_notification_roles"] = account_notification_roles
self.logger.debug("POST /api/v1/accounts/{account_id}/account_notifications with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/accounts/{account_id}/account_notifications".format(**path), data=data, params=params, no_data=True)
def update_global_notification(self, id, account_id, account_notification_end_at=None, account_notification_icon=None, account_notification_message=None, account_notification_roles=None, account_notification_start_at=None, account_notification_subject=None):
"""
Update a global notification.
Update global notification for an account.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
# OPTIONAL - account_notification[subject]
"""The subject of the notification."""
if account_notification_subject is not None:
data["account_notification[subject]"] = account_notification_subject
# OPTIONAL - account_notification[message]
"""The message body of the notification."""
if account_notification_message is not None:
data["account_notification[message]"] = account_notification_message
# OPTIONAL - account_notification[start_at]
"""The start date and time of the notification in ISO8601 format.
e.g. 2014-01-01T01:00Z"""
if account_notification_start_at is not None:
data["account_notification[start_at]"] = account_notification_start_at
# OPTIONAL - account_notification[end_at]
"""The end date and time of the notification in ISO8601 format.
e.g. 2014-01-01T01:00Z"""
if account_notification_end_at is not None:
data["account_notification[end_at]"] = account_notification_end_at
# OPTIONAL - account_notification[icon]
"""The icon to display with the notification."""
if account_notification_icon is not None:
self._validate_enum(account_notification_icon, ["warning", "information", "question", "error", "calendar"])
data["account_notification[icon]"] = account_notification_icon
# OPTIONAL - account_notification_roles
"""The role(s) to send global notification to. Note: ommitting this field will send to everyone
Example:
account_notification_roles: ["StudentEnrollment", "TeacherEnrollment"]"""
if account_notification_roles is not None:
data["account_notification_roles"] = account_notification_roles
self.logger.debug("PUT /api/v1/accounts/{account_id}/account_notifications/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("PUT", "/api/v1/accounts/{account_id}/account_notifications/{id}".format(**path), data=data, params=params, no_data=True)
class Accountnotification(BaseModel):
"""Accountnotification Model."""
def __init__(self, role_ids=None, start_at=None, roles=None, end_at=None, message=None, subject=None, icon=None):
"""Init method for Accountnotification class."""
self._role_ids = role_ids
self._start_at = start_at
self._roles = roles
self._end_at = end_at
self._message = message
self._subject = subject
self._icon = icon
self.logger = logging.getLogger('pycanvas.Accountnotification')
@property
def role_ids(self):
"""The roles to send the notification to. If roles is not passed it defaults to all roles."""
return self._role_ids
@role_ids.setter
def role_ids(self, value):
"""Setter for role_ids property."""
self.logger.warn("Setting values on role_ids will NOT update the remote Canvas instance.")
self._role_ids = value
@property
def start_at(self):
"""When to send out the notification."""
return self._start_at
@start_at.setter
def start_at(self, value):
"""Setter for start_at property."""
self.logger.warn("Setting values on start_at will NOT update the remote Canvas instance.")
self._start_at = value
@property
def roles(self):
"""(Deprecated) The roles to send the notification to. If roles is not passed it defaults to all roles."""
return self._roles
@roles.setter
def roles(self, value):
"""Setter for roles property."""
self.logger.warn("Setting values on roles will NOT update the remote Canvas instance.")
self._roles = value
@property
def end_at(self):
"""When to expire the notification."""
return self._end_at
@end_at.setter
def end_at(self, value):
"""Setter for end_at property."""
self.logger.warn("Setting values on end_at will NOT update the remote Canvas instance.")
self._end_at = value
@property
def message(self):
"""The message to be sent in the notification."""
return self._message
@message.setter
def message(self, value):
"""Setter for message property."""
self.logger.warn("Setting values on message will NOT update the remote Canvas instance.")
self._message = value
@property
def subject(self):
"""The subject of the notifications."""
return self._subject
@subject.setter
def subject(self, value):
"""Setter for subject property."""
self.logger.warn("Setting values on subject will NOT update the remote Canvas instance.")
self._subject = value
@property
def icon(self):
"""The icon to display with the message. Defaults to warning."""
return self._icon
@icon.setter
def icon(self, value):
"""Setter for icon property."""
self.logger.warn("Setting values on icon will NOT update the remote Canvas instance.")
self._icon = value
| {
"repo_name": "PGower/PyCanvas",
"path": "pycanvas/apis/account_notifications.py",
"copies": "1",
"size": "11882",
"license": "mit",
"hash": -8459665084057903000,
"line_mean": 39.1141868512,
"line_max": 262,
"alpha_frac": 0.6184144083,
"autogenerated": false,
"ratio": 4.330174927113703,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0027762005484308645,
"num_lines": 289
} |
"""account.py: Implementation of class AbstractTwitterAccountCommand
and its subclasses.
"""
from argparse import FileType
from . import AbstractTwitterCommand, call_decorator
from ..parsers import (filter_args,
parser_include_entities,
parser_skip_status)
# POST account/remove_profile_banner
# GET account/settings
# POST account/settings
# POST account/update_delivery_device
# POST account/update_profile
# POST account/update_profile_background_image
# POST account/update_profile_banner
# POST account/update_profile_image
# GET account/verify_credentials
ACCOUNT_REMOVE_PROFILE_BANNER = ('account/remove_profile_banner',
'remove_profile_banner')
ACCOUNT_SETTINGS_G = ('account/settings_g', 'get')
ACCOUNT_SETTINGS_P = ('account/settings_p', 'set')
ACCOUNT_UPDATE_DELIVERY_DEVICE = ('account/update_delivery_device',
'delivery_device')
ACCOUNT_UPDATE_PROFILE = ('account/update_profile',
'profile')
ACCOUNT_UPDATE_PROFILE_BACKGROUND_IMAGE = (
'account/update_profile_background_image',
'profile_background_image')
ACCOUNT_UPDATE_PROFILE_BANNER = ('account/update_profile_banner',
'profile_banner')
ACCOUNT_UPDATE_PROFILE_IMAGE = ('account/update_profile_image',
'profile_image')
ACCOUNT_VERIFY_CREDENTIALS = ('account/verify_credentials',
'verify')
# pylint: disable=abstract-method
class AbstractTwitterAccountCommand(AbstractTwitterCommand):
"""n/a"""
pass
class CommandRemoveProfileBanner(AbstractTwitterAccountCommand):
"""Remove the uploaded profile banner for the authenticating user."""
def create_parser(self, subparsers):
parser = subparsers.add_parser(
ACCOUNT_REMOVE_PROFILE_BANNER[0],
aliases=ACCOUNT_REMOVE_PROFILE_BANNER[1:],
help=self.__doc__)
return parser
@call_decorator
def __call__(self):
"""Request POST account/remove_profile_banner for Twitter."""
return {}, self.twhandler.account.remove_profile_banner
class CommandSettingsG(AbstractTwitterAccountCommand):
"""Print settings (including current trend, geo and sleep time
information) for the authenticating user.
"""
def create_parser(self, subparsers):
parser = subparsers.add_parser(
ACCOUNT_SETTINGS_G[0],
aliases=ACCOUNT_SETTINGS_G[1:],
help=self.__doc__)
return parser
@call_decorator
def __call__(self):
"""Request GET account/settings for Twitter."""
kwargs = dict(_method='GET')
return kwargs, self.twhandler.account.settings
class CommandSettingsP(AbstractTwitterAccountCommand):
"""Update the authenticating user's settings."""
def create_parser(self, subparsers):
parser = subparsers.add_parser(
ACCOUNT_SETTINGS_P[0],
aliases=ACCOUNT_SETTINGS_P[1:],
help=self.__doc__)
parser.add_argument(
'--sleep-time-enabled',
dest='sleep_time_enabled',
action='store_true',
help='enable sleep time')
parser.add_argument(
'--start-sleep-time',
dest='start_sleep_time',
metavar='<HH>',
help='the hour that sleep time should begin')
parser.add_argument(
'--end-sleep-time',
dest='end_sleep_time',
metavar='<HH>',
help='the hour that sleep time should end')
parser.add_argument(
'--time-zone',
dest='time_zone',
metavar='<TZ>',
help='the timezone dates and times should be displayed in')
parser.add_argument(
'--trend-location-woeid',
dest='trend_location_woeid',
metavar='<woeid>',
help='default trend location')
parser.add_argument(
'--allow-contributor-request',
dest='allow_contributor_request',
choices=['none', 'all', 'following'],
metavar='{none,all,following}',
help='allow others to include user as contributor')
parser.add_argument(
'--lang',
help='the language which Twitter should render in')
return parser
@call_decorator
def __call__(self):
"""Request POST account/settings for Twitter."""
args = vars(self.args)
kwargs = filter_args(
args,
'sleep_time_enabled', 'start_sleep_time', 'end_sleep_time',
'time_zone', 'trend_location_woeid',
'allow_contributor_request', 'lang')
return kwargs, self.twhandler.account.settings
class CommandUpdateDeliveryDevice(AbstractTwitterAccountCommand):
"""Set which device Twitter delivers updates to for the
authenticating user.
"""
def create_parser(self, subparsers):
parser = subparsers.add_parser(
ACCOUNT_UPDATE_DELIVERY_DEVICE[0],
aliases=ACCOUNT_UPDATE_DELIVERY_DEVICE[1:],
parents=[parser_include_entities()],
help=self.__doc__)
parser.add_argument(
'device',
choices=['ms', 'none'],
metavar='{ms,none}',
help='the device to which to update')
return parser
@call_decorator
def __call__(self):
"""Request POST account/update_delivery_device for Twitter."""
args = vars(self.args)
kwargs = filter_args(
args,
'device', 'include_entities')
return kwargs, self.twhandler.account.update_delivery_device
class CommandUpdateProfile(AbstractTwitterAccountCommand):
"""Set some values that users are able to set under the Account
tab of their settings page.
"""
def create_parser(self, subparsers):
parser = subparsers.add_parser(
ACCOUNT_UPDATE_PROFILE[0],
aliases=ACCOUNT_UPDATE_PROFILE[1:],
parents=[parser_include_entities(),
parser_skip_status()],
help=self.__doc__)
parser.add_argument(
'--name',
help='full name associated with the profile')
parser.add_argument(
'--url',
help='URL associated with the profile')
parser.add_argument(
'--location',
help='the city or country describing where '
'the user of the account is located')
parser.add_argument(
'--description',
help='a description of the user owning the account')
parser.add_argument(
'--profile-link-color',
dest='profile_link_color',
help='a hex value that controls the color scheme of '
'links on your profile page')
return parser
@call_decorator
def __call__(self):
"""Request POST account/update_profile for Twitter."""
args = vars(self.args)
kwargs = filter_args(
args,
'name', 'url', 'location', 'description',
'profile_link_color')
return kwargs, self.twhandler.account.update_profile
class CommandUpdateProfileBackgroundImage(AbstractTwitterAccountCommand):
"""Update the authenticating user's profile background image."""
def create_parser(self, subparsers):
parser = subparsers.add_parser(
ACCOUNT_UPDATE_PROFILE_BACKGROUND_IMAGE[0],
aliases=ACCOUNT_UPDATE_PROFILE_BACKGROUND_IMAGE[1:],
parents=[parser_include_entities(),
parser_skip_status()],
help=self.__doc__)
# required should be True
group = parser.add_mutually_exclusive_group(required=False)
group.add_argument(
'-I', '--image',
type=FileType('rb'),
help='the background image for the profile')
group.add_argument(
'-M', '--media-id',
dest='media_id',
metavar='<media_id>',
help='the media to use as the background image')
parser.add_argument(
'--tile',
action='store_true',
help='the background image will be displayed tiled')
return parser
@call_decorator
def __call__(self):
"""Request POST account/update_profile_background_image for Twitter."""
args = vars(self.args)
kwargs = filter_args(
args,
'image', # will be base64-encoded by PTT.
'media_id', 'tile')
return kwargs, self.twhandler.account.update_profile_background_image
class CommandUpdateProfileBanner(AbstractTwitterAccountCommand):
"""Upload a profile banner on behalf of the authenticating user."""
def create_parser(self, subparsers):
parser = subparsers.add_parser(
ACCOUNT_UPDATE_PROFILE_BANNER[0],
aliases=ACCOUNT_UPDATE_PROFILE_BANNER[1:],
help=self.__doc__)
# banner:The Base64-encoded or raw image data
parser.add_argument(
'banner',
type=FileType('rb'),
help='image data')
parser.add_argument(
'-W', '--width',
type=int,
help='the width of the preferred section of the image')
parser.add_argument(
'-H', '--height',
type=int,
help='the height of the preferred section of the image')
parser.add_argument(
'-L', '--offset-left',
dest='offset_left',
type=int,
help='the number of pixels by which to offset '
'the uploaded image from the left')
parser.add_argument(
'-T', '--offset-top',
dest='offset_top',
type=int,
help='the number of pixels by which to offset '
'the uploaded image from the top')
return parser
@call_decorator
def __call__(self):
"""Request POST account/update_profile_banner for Twitter."""
args = vars(self.args)
kwargs = filter_args(
args,
'banner', # will be base64-encoded by PTT.
'width', 'height', 'offset_left', 'offset_top')
return kwargs, self.twhandler.account.update_profile_banner
class CommandUpdateProfileImage(AbstractTwitterAccountCommand):
"""Update the authenticating user's profile image."""
def create_parser(self, subparsers):
parser = subparsers.add_parser(
ACCOUNT_UPDATE_PROFILE_IMAGE[0],
aliases=ACCOUNT_UPDATE_PROFILE_IMAGE[1:],
parents=[parser_include_entities(),
parser_skip_status()],
help=self.__doc__)
parser.add_argument(
'image',
type=FileType('rb'),
help='the avatar image for the profile')
return parser
@call_decorator
def __call__(self):
"""Request POST account/update_profile_image for Twitter."""
args = vars(self.args)
kwargs = filter_args(
args,
'image', # will be base64-encoded by PTT.
'include_entities', 'skip_status')
return kwargs, self.twhandler.account.update_profile_image
class CommandVerifyCredentials(AbstractTwitterAccountCommand):
"""Return an HTTP 200 OK response code and a representation of
the requesting user if authentication was successful.
"""
def create_parser(self, subparsers):
parser = subparsers.add_parser(
ACCOUNT_VERIFY_CREDENTIALS[0],
aliases=ACCOUNT_VERIFY_CREDENTIALS[1:],
parents=[parser_include_entities(),
parser_skip_status()],
help=self.__doc__)
parser.add_argument(
'--email',
action='store_true',
help='email will be returned in the user objects as a string')
return parser
@call_decorator
def __call__(self):
"""Request GET account/verify_credentials for Twitter."""
request = self.twhandler.account.verify_credentials
kwargs = filter_args(
vars(self.args),
'include_entities', 'skip_status', 'email')
return kwargs, request
def make_commands(manager):
"""Prototype"""
# pylint: disable=no-member
return (cmd_t(manager) for cmd_t in
AbstractTwitterAccountCommand.__subclasses__())
| {
"repo_name": "showa-yojyo/bin",
"path": "twmods/commands/account.py",
"copies": "1",
"size": "12507",
"license": "mit",
"hash": 2893237078742796000,
"line_mean": 33.8384401114,
"line_max": 79,
"alpha_frac": 0.5891900536,
"autogenerated": false,
"ratio": 4.500539762504498,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5589729816104498,
"avg_score": null,
"num_lines": null
} |
## account.py
# Account system support.
from pydle.features import rfc1459
import asyncio
class AccountSupport(rfc1459.RFC1459Support):
## Internal.
def _create_user(self, nickname):
super()._create_user(nickname)
if nickname in self.users:
self.users[nickname].update({
'account': None,
'identified': False
})
def _rename_user(self, user, new):
super()._rename_user(user, new)
# Unset account info to be certain until we get a new response.
self._sync_user(new, {'account': None, 'identified': False})
self.whois(new)
## IRC API.
@asyncio.coroutine
def whois(self, nickname):
info = yield from super().whois(nickname)
info.setdefault('account', None)
info.setdefault('identified', False)
return info
## Message handlers.
async def on_raw_307(self, message):
""" WHOIS: User has identified for this nickname. (Anope) """
target, nickname = message.params[:2]
info = {
'identified': True
}
if nickname in self.users:
self._sync_user(nickname, info)
if nickname in self._pending['whois']:
self._whois_info[nickname].update(info)
async def on_raw_330(self, message):
""" WHOIS account name (Atheme). """
target, nickname, account = message.params[:3]
info = {
'account': account,
'identified': True
}
if nickname in self.users:
self._sync_user(nickname, info)
if nickname in self._pending['whois']:
self._whois_info[nickname].update(info)
| {
"repo_name": "Shizmob/pydle",
"path": "pydle/features/account.py",
"copies": "1",
"size": "1698",
"license": "bsd-3-clause",
"hash": -4409469115854364700,
"line_mean": 28.7894736842,
"line_max": 71,
"alpha_frac": 0.5753828033,
"autogenerated": false,
"ratio": 4.042857142857143,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5118239946157143,
"avg_score": null,
"num_lines": null
} |
## account.py
# Account system support.
from pydle.features import rfc1459
class AccountSupport(rfc1459.RFC1459Support):
## Internal.
def _create_user(self, nickname):
super()._create_user(nickname)
if nickname in self.users:
self.users[nickname].update({
'account': None,
'identified': False
})
def _rename_user(self, user, new):
super()._rename_user(user, new)
# Unset account info.
self._sync_user(new, { 'account': None, 'identified': False })
## IRC API.
def whois(self, nickname):
future = super().whois(nickname)
# Add own info.
if nickname in self._whois_info:
self._whois_info[nickname].setdefault('account', None)
self._whois_info[nickname].setdefault('identified', False)
return future
## Message handlers.
def on_raw_307(self, message):
""" WHOIS: User has identified for this nickname. (Anope) """
target, nickname = message.params[:2]
info = {
'identified': True
}
if nickname in self.users:
self._sync_user(nickname, info)
if nickname in self._pending['whois']:
self._whois_info[nickname].update(info)
def on_raw_330(self, message):
""" WHOIS account name (Atheme). """
target, nickname, account = message.params[:3]
info = {
'account': account,
'identified': True
}
if nickname in self.users:
self._sync_user(nickname, info)
if nickname in self._pending['whois']:
self._whois_info[nickname].update(info)
| {
"repo_name": "suut/psychic-happiness",
"path": "pydle/features/account.py",
"copies": "1",
"size": "1699",
"license": "unlicense",
"hash": -5230760280550206000,
"line_mean": 26.8524590164,
"line_max": 70,
"alpha_frac": 0.5632725132,
"autogenerated": false,
"ratio": 4.016548463356974,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5079820976556975,
"avg_score": null,
"num_lines": null
} |
# account.py
import json
import falcon
import redis_plex
import server
_r = redis_plex.connection()
class Actions(object):
@staticmethod
def get_account_key(account_id):
return 'account:' + str(account_id)
@staticmethod
def get_account_server_relation_key(account_id):
return 'account:' + str(account_id) + ':servers'
@staticmethod
def validate_account_server_access(req, resp, resource, params):
if 'X-SERVER-ID' not in req.headers:
raise falcon.HTTPForbidden('Permission Denied', 'Missing header X-server-id')
server_id = req.headers['X-SERVER-ID']
account_id = params['account_id']
servers = Actions.get_servers(account_id)
try:
b = servers.index(server_id)
except ValueError:
raise falcon.HTTPForbidden('Permission Denied',
'Server ' + server_id + ' has no access to account id ' + account_id)
@staticmethod
def get_watched(account_id):
items = _r.smembers(Actions.get_account_key(account_id))
return list(items)
@staticmethod
def get_account_exists(account_id):
return _r.exists(Actions.get_account_key(account_id)) == 1
@staticmethod
def add_watched_to_account(account_id, items):
return _r.sadd(Actions.get_account_key(account_id), *items)
@staticmethod
def delete_account(account_id):
return _r.delete(Actions.get_account_key(account_id))
@staticmethod
def delete_watched_by_account(account_id, items):
return _r.srem(Actions.get_account_key(account_id), *items)
@staticmethod
def add_server(account_id, server_id):
return _r.sadd(Actions.get_account_server_relation_key(account_id), server_id)
@staticmethod
def remove_server(account_id, server_id):
return _r.srem(Actions.get_account_server_relation_key(account_id), server_id)
@staticmethod
def get_servers(account_id):
items = _r.smembers(Actions.get_account_server_relation_key(account_id))
return list(items)
class Resource(object):
@falcon.before(Actions.validate_account_server_access)
def on_get(self, req, resp, account_id):
"""Handles GET requests"""
watched = Actions.get_watched(account_id)
if watched is None:
resp.status = falcon.HTTP_404
return
servers = Actions.get_servers(account_id)
resp.status = falcon.HTTP_200 # This is the default status
json_resp = {'account_id': account_id, 'watched': watched, 'servers': servers}
resp.body = json.dumps(json_resp)
@falcon.before(Actions.validate_account_server_access)
def on_put(self, req, resp, account_id):
"""Handles PUT requests"""
try:
raw_json = req.stream.read()
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400,
'Error',
ex.message)
try:
result_json = json.loads(raw_json, encoding='utf-8')
except ValueError:
raise falcon.HTTPError(falcon.HTTP_400,
'Malformed JSON',
'Could not decode the request body. The '
'JSON was incorrect.')
items = result_json['watched']
result = Actions.add_watched_to_account(account_id, items)
resp.status = falcon.HTTP_202
jsonresp = {'account_id': account_id, 'tried_to_add': items, 'added': result}
resp.body = json.dumps(jsonresp)
@falcon.before(Actions.validate_account_server_access)
def on_delete(self, req, resp, account_id):
"""
Handles DELETE requests. Deletes the account from the database
"""
raise falcon.HTTPMethodNotAllowed
servers = Actions.get_servers(account_id)
for server_id in servers:
server.Actions.remove_account(server_id, account_id)
result = server.Actions.delete_account(account_id)
resp.status = falcon.HTTP_200
if result == 1:
result = 'success'
else:
result = 'failed'
jsonresp = {'deleted': result}
resp.body = json.dumps(jsonresp)
| {
"repo_name": "fiLLLip/plex-watched-sync",
"path": "server/account.py",
"copies": "1",
"size": "4302",
"license": "apache-2.0",
"hash": 8936401813842877000,
"line_mean": 34.5537190083,
"line_max": 108,
"alpha_frac": 0.6062296606,
"autogenerated": false,
"ratio": 3.8037135278514587,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9906689396399909,
"avg_score": 0.0006507584103098854,
"num_lines": 121
} |
"""AccountReports API Version 1.0.
This API client was generated using a template. Make sure this code is valid before using it.
"""
import logging
from datetime import date, datetime
from .base import BaseCanvasAPI
from .base import BaseModel
class AccountReportsAPI(BaseCanvasAPI):
"""AccountReports API Version 1.0."""
def __init__(self, *args, **kwargs):
"""Init method for AccountReportsAPI."""
super(AccountReportsAPI, self).__init__(*args, **kwargs)
self.logger = logging.getLogger("py3canvas.AccountReportsAPI")
def list_available_reports(self, account_id):
"""
List Available Reports.
Returns the list of reports for the current context.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
self.logger.debug("GET /api/v1/accounts/{account_id}/reports with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{account_id}/reports".format(**path), data=data, params=params, no_data=True)
def start_report(self, report, account_id, _parameters=None):
"""
Start a Report.
Generates a report instance for the account.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - report
"""ID"""
path["report"] = report
# OPTIONAL - [parameters]
"""The parameters will vary for each report"""
if _parameters is not None:
data["[parameters]"] = _parameters
self.logger.debug("POST /api/v1/accounts/{account_id}/reports/{report} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/accounts/{account_id}/reports/{report}".format(**path), data=data, params=params, single_item=True)
def index_of_reports(self, report, account_id):
"""
Index of Reports.
Shows all reports that have been run for the account of a specific type.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - report
"""ID"""
path["report"] = report
self.logger.debug("GET /api/v1/accounts/{account_id}/reports/{report} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{account_id}/reports/{report}".format(**path), data=data, params=params, all_pages=True)
def status_of_report(self, id, report, account_id):
"""
Status of a Report.
Returns the status of a report.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - report
"""ID"""
path["report"] = report
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
self.logger.debug("GET /api/v1/accounts/{account_id}/reports/{report}/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{account_id}/reports/{report}/{id}".format(**path), data=data, params=params, single_item=True)
def delete_report(self, id, report, account_id):
"""
Delete a Report.
Deletes a generated report instance.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - report
"""ID"""
path["report"] = report
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
self.logger.debug("DELETE /api/v1/accounts/{account_id}/reports/{report}/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("DELETE", "/api/v1/accounts/{account_id}/reports/{report}/{id}".format(**path), data=data, params=params, single_item=True)
class Report(BaseModel):
"""Report Model."""
def __init__(self, status=None, current_line=None, parameters=None, file_url=None, attachment=None, report=None, progress=None, id=None):
"""Init method for Report class."""
self._status = status
self._current_line = current_line
self._parameters = parameters
self._file_url = file_url
self._attachment = attachment
self._report = report
self._progress = progress
self._id = id
self.logger = logging.getLogger('py3canvas.Report')
@property
def status(self):
"""The status of the report."""
return self._status
@status.setter
def status(self, value):
"""Setter for status property."""
self.logger.warn("Setting values on status will NOT update the remote Canvas instance.")
self._status = value
@property
def current_line(self):
"""This is the current line count being written to the report. It updates every 1000 records."""
return self._current_line
@current_line.setter
def current_line(self, value):
"""Setter for current_line property."""
self.logger.warn("Setting values on current_line will NOT update the remote Canvas instance.")
self._current_line = value
@property
def parameters(self):
"""The report parameters."""
return self._parameters
@parameters.setter
def parameters(self, value):
"""Setter for parameters property."""
self.logger.warn("Setting values on parameters will NOT update the remote Canvas instance.")
self._parameters = value
@property
def file_url(self):
"""The url to the report download."""
return self._file_url
@file_url.setter
def file_url(self, value):
"""Setter for file_url property."""
self.logger.warn("Setting values on file_url will NOT update the remote Canvas instance.")
self._file_url = value
@property
def attachment(self):
"""attachment."""
return self._attachment
@attachment.setter
def attachment(self, value):
"""Setter for attachment property."""
self.logger.warn("Setting values on attachment will NOT update the remote Canvas instance.")
self._attachment = value
@property
def report(self):
"""The type of report."""
return self._report
@report.setter
def report(self, value):
"""Setter for report property."""
self.logger.warn("Setting values on report will NOT update the remote Canvas instance.")
self._report = value
@property
def progress(self):
"""The progress of the report."""
return self._progress
@progress.setter
def progress(self, value):
"""Setter for progress property."""
self.logger.warn("Setting values on progress will NOT update the remote Canvas instance.")
self._progress = value
@property
def id(self):
"""The unique identifier for the report."""
return self._id
@id.setter
def id(self, value):
"""Setter for id property."""
self.logger.warn("Setting values on id will NOT update the remote Canvas instance.")
self._id = value
class Reportparameters(BaseModel):
"""Reportparameters Model.
The parameters returned will vary for each report."""
def __init__(self, include_enrollment_state=None, sis_terms_csv=None, terms=None, users=None, enrollments=None, enrollment_term_id=None, include_deleted=None, courses=None, sis_accounts_csv=None, accounts=None, groups=None, course_id=None, start_at=None, enrollment_state=None, end_at=None, sections=None, order=None, xlist=None):
"""Init method for Reportparameters class."""
self._include_enrollment_state = include_enrollment_state
self._sis_terms_csv = sis_terms_csv
self._terms = terms
self._users = users
self._enrollments = enrollments
self._enrollment_term_id = enrollment_term_id
self._include_deleted = include_deleted
self._courses = courses
self._sis_accounts_csv = sis_accounts_csv
self._accounts = accounts
self._groups = groups
self._course_id = course_id
self._start_at = start_at
self._enrollment_state = enrollment_state
self._end_at = end_at
self._sections = sections
self._order = order
self._xlist = xlist
self.logger = logging.getLogger('py3canvas.Reportparameters')
@property
def include_enrollment_state(self):
"""Include enrollment state. Defaults to false."""
return self._include_enrollment_state
@include_enrollment_state.setter
def include_enrollment_state(self, value):
"""Setter for include_enrollment_state property."""
self.logger.warn("Setting values on include_enrollment_state will NOT update the remote Canvas instance.")
self._include_enrollment_state = value
@property
def sis_terms_csv(self):
"""sis_terms_csv."""
return self._sis_terms_csv
@sis_terms_csv.setter
def sis_terms_csv(self, value):
"""Setter for sis_terms_csv property."""
self.logger.warn("Setting values on sis_terms_csv will NOT update the remote Canvas instance.")
self._sis_terms_csv = value
@property
def terms(self):
"""Get the data for terms."""
return self._terms
@terms.setter
def terms(self, value):
"""Setter for terms property."""
self.logger.warn("Setting values on terms will NOT update the remote Canvas instance.")
self._terms = value
@property
def users(self):
"""Get the data for users."""
return self._users
@users.setter
def users(self, value):
"""Setter for users property."""
self.logger.warn("Setting values on users will NOT update the remote Canvas instance.")
self._users = value
@property
def enrollments(self):
"""Get the data for enrollments."""
return self._enrollments
@enrollments.setter
def enrollments(self, value):
"""Setter for enrollments property."""
self.logger.warn("Setting values on enrollments will NOT update the remote Canvas instance.")
self._enrollments = value
@property
def enrollment_term_id(self):
"""The canvas id of the term to get grades from."""
return self._enrollment_term_id
@enrollment_term_id.setter
def enrollment_term_id(self, value):
"""Setter for enrollment_term_id property."""
self.logger.warn("Setting values on enrollment_term_id will NOT update the remote Canvas instance.")
self._enrollment_term_id = value
@property
def include_deleted(self):
"""Include deleted objects."""
return self._include_deleted
@include_deleted.setter
def include_deleted(self, value):
"""Setter for include_deleted property."""
self.logger.warn("Setting values on include_deleted will NOT update the remote Canvas instance.")
self._include_deleted = value
@property
def courses(self):
"""Get the data for courses."""
return self._courses
@courses.setter
def courses(self, value):
"""Setter for courses property."""
self.logger.warn("Setting values on courses will NOT update the remote Canvas instance.")
self._courses = value
@property
def sis_accounts_csv(self):
"""sis_accounts_csv."""
return self._sis_accounts_csv
@sis_accounts_csv.setter
def sis_accounts_csv(self, value):
"""Setter for sis_accounts_csv property."""
self.logger.warn("Setting values on sis_accounts_csv will NOT update the remote Canvas instance.")
self._sis_accounts_csv = value
@property
def accounts(self):
"""Get the data for accounts."""
return self._accounts
@accounts.setter
def accounts(self, value):
"""Setter for accounts property."""
self.logger.warn("Setting values on accounts will NOT update the remote Canvas instance.")
self._accounts = value
@property
def groups(self):
"""Get the data for groups."""
return self._groups
@groups.setter
def groups(self, value):
"""Setter for groups property."""
self.logger.warn("Setting values on groups will NOT update the remote Canvas instance.")
self._groups = value
@property
def course_id(self):
"""The course to report on."""
return self._course_id
@course_id.setter
def course_id(self, value):
"""Setter for course_id property."""
self.logger.warn("Setting values on course_id will NOT update the remote Canvas instance.")
self._course_id = value
@property
def start_at(self):
"""The beginning date for submissions. Max time range is 2 weeks."""
return self._start_at
@start_at.setter
def start_at(self, value):
"""Setter for start_at property."""
self.logger.warn("Setting values on start_at will NOT update the remote Canvas instance.")
self._start_at = value
@property
def enrollment_state(self):
"""Include enrollment state. Defaults to 'all' Options: ['active'| 'invited'| 'creation_pending'| 'deleted'| 'rejected'| 'completed'| 'inactive'| 'all']."""
return self._enrollment_state
@enrollment_state.setter
def enrollment_state(self, value):
"""Setter for enrollment_state property."""
self.logger.warn("Setting values on enrollment_state will NOT update the remote Canvas instance.")
self._enrollment_state = value
@property
def end_at(self):
"""The end date for submissions. Max time range is 2 weeks."""
return self._end_at
@end_at.setter
def end_at(self, value):
"""Setter for end_at property."""
self.logger.warn("Setting values on end_at will NOT update the remote Canvas instance.")
self._end_at = value
@property
def sections(self):
"""Get the data for sections."""
return self._sections
@sections.setter
def sections(self, value):
"""Setter for sections property."""
self.logger.warn("Setting values on sections will NOT update the remote Canvas instance.")
self._sections = value
@property
def order(self):
"""The sort order for the csv, Options: 'users', 'courses', 'outcomes'."""
return self._order
@order.setter
def order(self, value):
"""Setter for order property."""
self.logger.warn("Setting values on order will NOT update the remote Canvas instance.")
self._order = value
@property
def xlist(self):
"""Get the data for cross-listed courses."""
return self._xlist
@xlist.setter
def xlist(self, value):
"""Setter for xlist property."""
self.logger.warn("Setting values on xlist will NOT update the remote Canvas instance.")
self._xlist = value
| {
"repo_name": "tylerclair/py3canvas",
"path": "py3canvas/apis/account_reports.py",
"copies": "1",
"size": "15614",
"license": "mit",
"hash": 2198437737656990000,
"line_mean": 32.7235421166,
"line_max": 334,
"alpha_frac": 0.6177148713,
"autogenerated": false,
"ratio": 4.29664281783159,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.541435768913159,
"avg_score": null,
"num_lines": null
} |
"""AccountReports API Version 1.0.
This API client was generated using a template. Make sure this code is valid before using it.
"""
import logging
from datetime import date, datetime
from base import BaseCanvasAPI
from base import BaseModel
class AccountReportsAPI(BaseCanvasAPI):
"""AccountReports API Version 1.0."""
def __init__(self, *args, **kwargs):
"""Init method for AccountReportsAPI."""
super(AccountReportsAPI, self).__init__(*args, **kwargs)
self.logger = logging.getLogger("pycanvas.AccountReportsAPI")
def list_available_reports(self, account_id):
"""
List Available Reports.
Returns the list of reports for the current context.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
self.logger.debug("GET /api/v1/accounts/{account_id}/reports with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{account_id}/reports".format(**path), data=data, params=params, no_data=True)
def start_report(self, report, account_id, _parameters=None):
"""
Start a Report.
Generates a report instance for the account.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - report
"""ID"""
path["report"] = report
# OPTIONAL - [parameters]
"""The parameters will vary for each report"""
if _parameters is not None:
data["[parameters]"] = _parameters
self.logger.debug("POST /api/v1/accounts/{account_id}/reports/{report} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/accounts/{account_id}/reports/{report}".format(**path), data=data, params=params, single_item=True)
def index_of_reports(self, report, account_id):
"""
Index of Reports.
Shows all reports that have been run for the account of a specific type.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - report
"""ID"""
path["report"] = report
self.logger.debug("GET /api/v1/accounts/{account_id}/reports/{report} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{account_id}/reports/{report}".format(**path), data=data, params=params, all_pages=True)
def status_of_report(self, id, report, account_id):
"""
Status of a Report.
Returns the status of a report.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - report
"""ID"""
path["report"] = report
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
self.logger.debug("GET /api/v1/accounts/{account_id}/reports/{report}/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{account_id}/reports/{report}/{id}".format(**path), data=data, params=params, single_item=True)
def delete_report(self, id, report, account_id):
"""
Delete a Report.
Deletes a generated report instance.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - report
"""ID"""
path["report"] = report
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
self.logger.debug("DELETE /api/v1/accounts/{account_id}/reports/{report}/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("DELETE", "/api/v1/accounts/{account_id}/reports/{report}/{id}".format(**path), data=data, params=params, single_item=True)
class Report(BaseModel):
"""Report Model."""
def __init__(self, status=None, parameters=None, file_url=None, report=None, progress=None, id=None):
"""Init method for Report class."""
self._status = status
self._parameters = parameters
self._file_url = file_url
self._report = report
self._progress = progress
self._id = id
self.logger = logging.getLogger('pycanvas.Report')
@property
def status(self):
"""The status of the report."""
return self._status
@status.setter
def status(self, value):
"""Setter for status property."""
self.logger.warn("Setting values on status will NOT update the remote Canvas instance.")
self._status = value
@property
def parameters(self):
"""The report parameters."""
return self._parameters
@parameters.setter
def parameters(self, value):
"""Setter for parameters property."""
self.logger.warn("Setting values on parameters will NOT update the remote Canvas instance.")
self._parameters = value
@property
def file_url(self):
"""The url to the report download."""
return self._file_url
@file_url.setter
def file_url(self, value):
"""Setter for file_url property."""
self.logger.warn("Setting values on file_url will NOT update the remote Canvas instance.")
self._file_url = value
@property
def report(self):
"""The type of report."""
return self._report
@report.setter
def report(self, value):
"""Setter for report property."""
self.logger.warn("Setting values on report will NOT update the remote Canvas instance.")
self._report = value
@property
def progress(self):
"""The progress of the report."""
return self._progress
@progress.setter
def progress(self, value):
"""Setter for progress property."""
self.logger.warn("Setting values on progress will NOT update the remote Canvas instance.")
self._progress = value
@property
def id(self):
"""The unique identifier for the report."""
return self._id
@id.setter
def id(self, value):
"""Setter for id property."""
self.logger.warn("Setting values on id will NOT update the remote Canvas instance.")
self._id = value
class Reportparameters(BaseModel):
"""Reportparameters Model.
The parameters returned will vary for each report."""
def __init__(self, include_enrollment_state=None, sis_terms_csv=None, terms=None, users=None, enrollments=None, enrollment_term_id=None, include_deleted=None, courses=None, sis_accounts_csv=None, accounts=None, groups=None, course_id=None, start_at=None, enrollment_state=None, end_at=None, sections=None, order=None, xlist=None):
"""Init method for Reportparameters class."""
self._include_enrollment_state = include_enrollment_state
self._sis_terms_csv = sis_terms_csv
self._terms = terms
self._users = users
self._enrollments = enrollments
self._enrollment_term_id = enrollment_term_id
self._include_deleted = include_deleted
self._courses = courses
self._sis_accounts_csv = sis_accounts_csv
self._accounts = accounts
self._groups = groups
self._course_id = course_id
self._start_at = start_at
self._enrollment_state = enrollment_state
self._end_at = end_at
self._sections = sections
self._order = order
self._xlist = xlist
self.logger = logging.getLogger('pycanvas.Reportparameters')
@property
def include_enrollment_state(self):
"""Include enrollment state. Defaults to false."""
return self._include_enrollment_state
@include_enrollment_state.setter
def include_enrollment_state(self, value):
"""Setter for include_enrollment_state property."""
self.logger.warn("Setting values on include_enrollment_state will NOT update the remote Canvas instance.")
self._include_enrollment_state = value
@property
def sis_terms_csv(self):
"""sis_terms_csv."""
return self._sis_terms_csv
@sis_terms_csv.setter
def sis_terms_csv(self, value):
"""Setter for sis_terms_csv property."""
self.logger.warn("Setting values on sis_terms_csv will NOT update the remote Canvas instance.")
self._sis_terms_csv = value
@property
def terms(self):
"""Get the data for terms."""
return self._terms
@terms.setter
def terms(self, value):
"""Setter for terms property."""
self.logger.warn("Setting values on terms will NOT update the remote Canvas instance.")
self._terms = value
@property
def users(self):
"""Get the data for users."""
return self._users
@users.setter
def users(self, value):
"""Setter for users property."""
self.logger.warn("Setting values on users will NOT update the remote Canvas instance.")
self._users = value
@property
def enrollments(self):
"""Get the data for enrollments."""
return self._enrollments
@enrollments.setter
def enrollments(self, value):
"""Setter for enrollments property."""
self.logger.warn("Setting values on enrollments will NOT update the remote Canvas instance.")
self._enrollments = value
@property
def enrollment_term_id(self):
"""The canvas id of the term to get grades from."""
return self._enrollment_term_id
@enrollment_term_id.setter
def enrollment_term_id(self, value):
"""Setter for enrollment_term_id property."""
self.logger.warn("Setting values on enrollment_term_id will NOT update the remote Canvas instance.")
self._enrollment_term_id = value
@property
def include_deleted(self):
"""Include deleted objects."""
return self._include_deleted
@include_deleted.setter
def include_deleted(self, value):
"""Setter for include_deleted property."""
self.logger.warn("Setting values on include_deleted will NOT update the remote Canvas instance.")
self._include_deleted = value
@property
def courses(self):
"""Get the data for courses."""
return self._courses
@courses.setter
def courses(self, value):
"""Setter for courses property."""
self.logger.warn("Setting values on courses will NOT update the remote Canvas instance.")
self._courses = value
@property
def sis_accounts_csv(self):
"""sis_accounts_csv."""
return self._sis_accounts_csv
@sis_accounts_csv.setter
def sis_accounts_csv(self, value):
"""Setter for sis_accounts_csv property."""
self.logger.warn("Setting values on sis_accounts_csv will NOT update the remote Canvas instance.")
self._sis_accounts_csv = value
@property
def accounts(self):
"""Get the data for accounts."""
return self._accounts
@accounts.setter
def accounts(self, value):
"""Setter for accounts property."""
self.logger.warn("Setting values on accounts will NOT update the remote Canvas instance.")
self._accounts = value
@property
def groups(self):
"""Get the data for groups."""
return self._groups
@groups.setter
def groups(self, value):
"""Setter for groups property."""
self.logger.warn("Setting values on groups will NOT update the remote Canvas instance.")
self._groups = value
@property
def course_id(self):
"""The course to report on."""
return self._course_id
@course_id.setter
def course_id(self, value):
"""Setter for course_id property."""
self.logger.warn("Setting values on course_id will NOT update the remote Canvas instance.")
self._course_id = value
@property
def start_at(self):
"""The beginning date for submissions. Max time range is 2 weeks."""
return self._start_at
@start_at.setter
def start_at(self, value):
"""Setter for start_at property."""
self.logger.warn("Setting values on start_at will NOT update the remote Canvas instance.")
self._start_at = value
@property
def enrollment_state(self):
"""Include enrollment state. Defaults to 'all' Options: ['active'| 'invited'| 'creation_pending'| 'deleted'| 'rejected'| 'completed'| 'inactive'| 'all']."""
return self._enrollment_state
@enrollment_state.setter
def enrollment_state(self, value):
"""Setter for enrollment_state property."""
self.logger.warn("Setting values on enrollment_state will NOT update the remote Canvas instance.")
self._enrollment_state = value
@property
def end_at(self):
"""The end date for submissions. Max time range is 2 weeks."""
return self._end_at
@end_at.setter
def end_at(self, value):
"""Setter for end_at property."""
self.logger.warn("Setting values on end_at will NOT update the remote Canvas instance.")
self._end_at = value
@property
def sections(self):
"""Get the data for sections."""
return self._sections
@sections.setter
def sections(self, value):
"""Setter for sections property."""
self.logger.warn("Setting values on sections will NOT update the remote Canvas instance.")
self._sections = value
@property
def order(self):
"""The sort order for the csv, Options: 'users', 'courses', 'outcomes'."""
return self._order
@order.setter
def order(self, value):
"""Setter for order property."""
self.logger.warn("Setting values on order will NOT update the remote Canvas instance.")
self._order = value
@property
def xlist(self):
"""Get the data for cross-listed courses."""
return self._xlist
@xlist.setter
def xlist(self, value):
"""Setter for xlist property."""
self.logger.warn("Setting values on xlist will NOT update the remote Canvas instance.")
self._xlist = value
| {
"repo_name": "PGower/PyCanvas",
"path": "pycanvas/apis/account_reports.py",
"copies": "1",
"size": "15167",
"license": "mit",
"hash": -6264841880186205000,
"line_mean": 32.5489749431,
"line_max": 334,
"alpha_frac": 0.5976791719,
"autogenerated": false,
"ratio": 4.3936848203939745,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5491363992293974,
"avg_score": null,
"num_lines": null
} |
"""Accounts API Version 1.0.
This API client was generated using a template. Make sure this code is valid before using it.
"""
import logging
from datetime import date, datetime
from base import BaseCanvasAPI
from base import BaseModel
class AccountsAPI(BaseCanvasAPI):
"""Accounts API Version 1.0."""
def __init__(self, *args, **kwargs):
"""Init method for AccountsAPI."""
super(AccountsAPI, self).__init__(*args, **kwargs)
self.logger = logging.getLogger("pycanvas.AccountsAPI")
def list_accounts(self, include=None):
"""
List accounts.
List accounts that the current user can view or manage. Typically,
students and even teachers will get an empty list in response, only
account admins can view the accounts that they are in.
"""
path = {}
data = {}
params = {}
# OPTIONAL - include
"""Array of additional information to include.
"lti_guid":: the 'tool_consumer_instance_guid' that will be sent for this account on LTI launches
"registration_settings":: returns info about the privacy policy and terms of use
"services":: returns services and whether they are enabled (requires account management permissions)"""
if include is not None:
self._validate_enum(include, ["lti_guid", "registration_settings", "services"])
params["include"] = include
self.logger.debug("GET /api/v1/accounts with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts".format(**path), data=data, params=params, all_pages=True)
def list_accounts_for_course_admins(self):
"""
List accounts for course admins.
List accounts that the current user can view through their admin course enrollments.
(Teacher, TA, or designer enrollments).
Only returns "id", "name", "workflow_state", "root_account_id" and "parent_account_id"
"""
path = {}
data = {}
params = {}
self.logger.debug("GET /api/v1/course_accounts with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/course_accounts".format(**path), data=data, params=params, all_pages=True)
def get_single_account(self, id):
"""
Get a single account.
Retrieve information on an individual account, given by id or sis
sis_account_id.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
self.logger.debug("GET /api/v1/accounts/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{id}".format(**path), data=data, params=params, single_item=True)
def get_sub_accounts_of_account(self, account_id, recursive=None):
"""
Get the sub-accounts of an account.
List accounts that are sub-accounts of the given account.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# OPTIONAL - recursive
"""If true, the entire account tree underneath
this account will be returned (though still paginated). If false, only
direct sub-accounts of this account will be returned. Defaults to false."""
if recursive is not None:
params["recursive"] = recursive
self.logger.debug("GET /api/v1/accounts/{account_id}/sub_accounts with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{account_id}/sub_accounts".format(**path), data=data, params=params, all_pages=True)
def list_active_courses_in_account(self, account_id, by_subaccounts=None, by_teachers=None, completed=None, enrollment_term_id=None, enrollment_type=None, hide_enrollmentless_courses=None, include=None, published=None, search_term=None, state=None, with_enrollments=None):
"""
List active courses in an account.
Retrieve the list of courses in this account.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# OPTIONAL - with_enrollments
"""If true, include only courses with at least one enrollment. If false,
include only courses with no enrollments. If not present, do not filter
on course enrollment status."""
if with_enrollments is not None:
params["with_enrollments"] = with_enrollments
# OPTIONAL - enrollment_type
"""If set, only return courses that have at least one user enrolled in
in the course with one of the specified enrollment types."""
if enrollment_type is not None:
self._validate_enum(enrollment_type, ["teacher", "student", "ta", "observer", "designer"])
params["enrollment_type"] = enrollment_type
# OPTIONAL - published
"""If true, include only published courses. If false, exclude published
courses. If not present, do not filter on published status."""
if published is not None:
params["published"] = published
# OPTIONAL - completed
"""If true, include only completed courses (these may be in state
'completed', or their enrollment term may have ended). If false, exclude
completed courses. If not present, do not filter on completed status."""
if completed is not None:
params["completed"] = completed
# OPTIONAL - by_teachers
"""List of User IDs of teachers; if supplied, include only courses taught by
one of the referenced users."""
if by_teachers is not None:
params["by_teachers"] = by_teachers
# OPTIONAL - by_subaccounts
"""List of Account IDs; if supplied, include only courses associated with one
of the referenced subaccounts."""
if by_subaccounts is not None:
params["by_subaccounts"] = by_subaccounts
# OPTIONAL - hide_enrollmentless_courses
"""If present, only return courses that have at least one enrollment.
Equivalent to 'with_enrollments=true'; retained for compatibility."""
if hide_enrollmentless_courses is not None:
params["hide_enrollmentless_courses"] = hide_enrollmentless_courses
# OPTIONAL - state
"""If set, only return courses that are in the given state(s). By default,
all states but "deleted" are returned."""
if state is not None:
self._validate_enum(state, ["created", "claimed", "available", "completed", "deleted", "all"])
params["state"] = state
# OPTIONAL - enrollment_term_id
"""If set, only includes courses from the specified term."""
if enrollment_term_id is not None:
params["enrollment_term_id"] = enrollment_term_id
# OPTIONAL - search_term
"""The partial course name, code, or full ID to match and return in the results list. Must be at least 3 characters."""
if search_term is not None:
params["search_term"] = search_term
# OPTIONAL - include
"""- All explanations can be seen in the {api:CoursesController#index Course API index documentation}
- "sections", "needs_grading_count" and "total_scores" are not valid options at the account level"""
if include is not None:
self._validate_enum(include, ["syllabus_body", "term", "course_progress", "storage_quota_used_mb", "total_students", "teachers"])
params["include"] = include
self.logger.debug("GET /api/v1/accounts/{account_id}/courses with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/accounts/{account_id}/courses".format(**path), data=data, params=params, all_pages=True)
def update_account(self, id, account_default_group_storage_quota_mb=None, account_default_storage_quota_mb=None, account_default_time_zone=None, account_default_user_storage_quota_mb=None, account_name=None, account_services=None, account_settings_lock_all_announcements_locked=None, account_settings_lock_all_announcements_value=None, account_settings_restrict_student_future_listing_locked=None, account_settings_restrict_student_future_listing_value=None, account_settings_restrict_student_future_view_locked=None, account_settings_restrict_student_future_view_value=None, account_settings_restrict_student_past_view_locked=None, account_settings_restrict_student_past_view_value=None):
"""
Update an account.
Update an existing account.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
# OPTIONAL - account[name]
"""Updates the account name"""
if account_name is not None:
data["account[name]"] = account_name
# OPTIONAL - account[default_time_zone]
"""The default time zone of the account. Allowed time zones are
{http://www.iana.org/time-zones IANA time zones} or friendlier
{http://api.rubyonrails.org/classes/ActiveSupport/TimeZone.html Ruby on Rails time zones}."""
if account_default_time_zone is not None:
data["account[default_time_zone]"] = account_default_time_zone
# OPTIONAL - account[default_storage_quota_mb]
"""The default course storage quota to be used, if not otherwise specified."""
if account_default_storage_quota_mb is not None:
data["account[default_storage_quota_mb]"] = account_default_storage_quota_mb
# OPTIONAL - account[default_user_storage_quota_mb]
"""The default user storage quota to be used, if not otherwise specified."""
if account_default_user_storage_quota_mb is not None:
data["account[default_user_storage_quota_mb]"] = account_default_user_storage_quota_mb
# OPTIONAL - account[default_group_storage_quota_mb]
"""The default group storage quota to be used, if not otherwise specified."""
if account_default_group_storage_quota_mb is not None:
data["account[default_group_storage_quota_mb]"] = account_default_group_storage_quota_mb
# OPTIONAL - account[settings][restrict_student_past_view][value]
"""Restrict students from viewing courses after end date"""
if account_settings_restrict_student_past_view_value is not None:
data["account[settings][restrict_student_past_view][value]"] = account_settings_restrict_student_past_view_value
# OPTIONAL - account[settings][restrict_student_past_view][locked]
"""Lock this setting for sub-accounts and courses"""
if account_settings_restrict_student_past_view_locked is not None:
data["account[settings][restrict_student_past_view][locked]"] = account_settings_restrict_student_past_view_locked
# OPTIONAL - account[settings][restrict_student_future_view][value]
"""Restrict students from viewing courses before start date"""
if account_settings_restrict_student_future_view_value is not None:
data["account[settings][restrict_student_future_view][value]"] = account_settings_restrict_student_future_view_value
# OPTIONAL - account[settings][restrict_student_future_view][locked]
"""Lock this setting for sub-accounts and courses"""
if account_settings_restrict_student_future_view_locked is not None:
data["account[settings][restrict_student_future_view][locked]"] = account_settings_restrict_student_future_view_locked
# OPTIONAL - account[settings][lock_all_announcements][value]
"""Disable comments on announcements"""
if account_settings_lock_all_announcements_value is not None:
data["account[settings][lock_all_announcements][value]"] = account_settings_lock_all_announcements_value
# OPTIONAL - account[settings][lock_all_announcements][locked]
"""Lock this setting for sub-accounts and courses"""
if account_settings_lock_all_announcements_locked is not None:
data["account[settings][lock_all_announcements][locked]"] = account_settings_lock_all_announcements_locked
# OPTIONAL - account[settings][restrict_student_future_listing][value]
"""Restrict students from viewing future enrollments in course list"""
if account_settings_restrict_student_future_listing_value is not None:
data["account[settings][restrict_student_future_listing][value]"] = account_settings_restrict_student_future_listing_value
# OPTIONAL - account[settings][restrict_student_future_listing][locked]
"""Lock this setting for sub-accounts and courses"""
if account_settings_restrict_student_future_listing_locked is not None:
data["account[settings][restrict_student_future_listing][locked]"] = account_settings_restrict_student_future_listing_locked
# OPTIONAL - account[services]
"""Give this a set of keys and boolean values to enable or disable services matching the keys"""
if account_services is not None:
data["account[services]"] = account_services
self.logger.debug("PUT /api/v1/accounts/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("PUT", "/api/v1/accounts/{id}".format(**path), data=data, params=params, single_item=True)
def delete_user_from_root_account(self, user_id, account_id):
"""
Delete a user from the root account.
Delete a user record from a Canvas root account. If a user is associated
with multiple root accounts (in a multi-tenant instance of Canvas), this
action will NOT remove them from the other accounts.
WARNING: This API will allow a user to remove themselves from the account.
If they do this, they won't be able to make API calls or log into Canvas at
that account.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - user_id
"""ID"""
path["user_id"] = user_id
self.logger.debug("DELETE /api/v1/accounts/{account_id}/users/{user_id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("DELETE", "/api/v1/accounts/{account_id}/users/{user_id}".format(**path), data=data, params=params, single_item=True)
def create_new_sub_account(self, account_id, account_name, account_default_group_storage_quota_mb=None, account_default_storage_quota_mb=None, account_default_user_storage_quota_mb=None, account_sis_account_id=None):
"""
Create a new sub-account.
Add a new sub-account to a given account.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - account[name]
"""The name of the new sub-account."""
data["account[name]"] = account_name
# OPTIONAL - account[sis_account_id]
"""The account's identifier in the Student Information System."""
if account_sis_account_id is not None:
data["account[sis_account_id]"] = account_sis_account_id
# OPTIONAL - account[default_storage_quota_mb]
"""The default course storage quota to be used, if not otherwise specified."""
if account_default_storage_quota_mb is not None:
data["account[default_storage_quota_mb]"] = account_default_storage_quota_mb
# OPTIONAL - account[default_user_storage_quota_mb]
"""The default user storage quota to be used, if not otherwise specified."""
if account_default_user_storage_quota_mb is not None:
data["account[default_user_storage_quota_mb]"] = account_default_user_storage_quota_mb
# OPTIONAL - account[default_group_storage_quota_mb]
"""The default group storage quota to be used, if not otherwise specified."""
if account_default_group_storage_quota_mb is not None:
data["account[default_group_storage_quota_mb]"] = account_default_group_storage_quota_mb
self.logger.debug("POST /api/v1/accounts/{account_id}/sub_accounts with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/accounts/{account_id}/sub_accounts".format(**path), data=data, params=params, single_item=True)
class Account(BaseModel):
"""Account Model."""
def __init__(self, integration_id=None, default_time_zone=None, name=None, default_storage_quota_mb=None, sis_account_id=None, root_account_id=None, default_group_storage_quota_mb=None, id=None, sis_import_id=None, lti_guid=None, workflow_state=None, parent_account_id=None, default_user_storage_quota_mb=None):
"""Init method for Account class."""
self._integration_id = integration_id
self._default_time_zone = default_time_zone
self._name = name
self._default_storage_quota_mb = default_storage_quota_mb
self._sis_account_id = sis_account_id
self._root_account_id = root_account_id
self._default_group_storage_quota_mb = default_group_storage_quota_mb
self._id = id
self._sis_import_id = sis_import_id
self._lti_guid = lti_guid
self._workflow_state = workflow_state
self._parent_account_id = parent_account_id
self._default_user_storage_quota_mb = default_user_storage_quota_mb
self.logger = logging.getLogger('pycanvas.Account')
@property
def integration_id(self):
"""The account's identifier in the Student Information System. Only included if the user has permission to view SIS information."""
return self._integration_id
@integration_id.setter
def integration_id(self, value):
"""Setter for integration_id property."""
self.logger.warn("Setting values on integration_id will NOT update the remote Canvas instance.")
self._integration_id = value
@property
def default_time_zone(self):
"""The default time zone of the account. Allowed time zones are {http://www.iana.org/time-zones IANA time zones} or friendlier {http://api.rubyonrails.org/classes/ActiveSupport/TimeZone.html Ruby on Rails time zones}."""
return self._default_time_zone
@default_time_zone.setter
def default_time_zone(self, value):
"""Setter for default_time_zone property."""
self.logger.warn("Setting values on default_time_zone will NOT update the remote Canvas instance.")
self._default_time_zone = value
@property
def name(self):
"""The display name of the account."""
return self._name
@name.setter
def name(self, value):
"""Setter for name property."""
self.logger.warn("Setting values on name will NOT update the remote Canvas instance.")
self._name = value
@property
def default_storage_quota_mb(self):
"""The storage quota for the account in megabytes, if not otherwise specified."""
return self._default_storage_quota_mb
@default_storage_quota_mb.setter
def default_storage_quota_mb(self, value):
"""Setter for default_storage_quota_mb property."""
self.logger.warn("Setting values on default_storage_quota_mb will NOT update the remote Canvas instance.")
self._default_storage_quota_mb = value
@property
def sis_account_id(self):
"""The account's identifier in the Student Information System. Only included if the user has permission to view SIS information."""
return self._sis_account_id
@sis_account_id.setter
def sis_account_id(self, value):
"""Setter for sis_account_id property."""
self.logger.warn("Setting values on sis_account_id will NOT update the remote Canvas instance.")
self._sis_account_id = value
@property
def root_account_id(self):
"""The ID of the root account, or null if this is the root account."""
return self._root_account_id
@root_account_id.setter
def root_account_id(self, value):
"""Setter for root_account_id property."""
self.logger.warn("Setting values on root_account_id will NOT update the remote Canvas instance.")
self._root_account_id = value
@property
def default_group_storage_quota_mb(self):
"""The storage quota for a group in the account in megabytes, if not otherwise specified."""
return self._default_group_storage_quota_mb
@default_group_storage_quota_mb.setter
def default_group_storage_quota_mb(self, value):
"""Setter for default_group_storage_quota_mb property."""
self.logger.warn("Setting values on default_group_storage_quota_mb will NOT update the remote Canvas instance.")
self._default_group_storage_quota_mb = value
@property
def id(self):
"""the ID of the Account object."""
return self._id
@id.setter
def id(self, value):
"""Setter for id property."""
self.logger.warn("Setting values on id will NOT update the remote Canvas instance.")
self._id = value
@property
def sis_import_id(self):
"""The id of the SIS import if created through SIS. Only included if the user has permission to manage SIS information."""
return self._sis_import_id
@sis_import_id.setter
def sis_import_id(self, value):
"""Setter for sis_import_id property."""
self.logger.warn("Setting values on sis_import_id will NOT update the remote Canvas instance.")
self._sis_import_id = value
@property
def lti_guid(self):
"""The account's identifier that is sent as context_id in LTI launches."""
return self._lti_guid
@lti_guid.setter
def lti_guid(self, value):
"""Setter for lti_guid property."""
self.logger.warn("Setting values on lti_guid will NOT update the remote Canvas instance.")
self._lti_guid = value
@property
def workflow_state(self):
"""The state of the account. Can be 'active' or 'deleted'."""
return self._workflow_state
@workflow_state.setter
def workflow_state(self, value):
"""Setter for workflow_state property."""
self.logger.warn("Setting values on workflow_state will NOT update the remote Canvas instance.")
self._workflow_state = value
@property
def parent_account_id(self):
"""The account's parent ID, or null if this is the root account."""
return self._parent_account_id
@parent_account_id.setter
def parent_account_id(self, value):
"""Setter for parent_account_id property."""
self.logger.warn("Setting values on parent_account_id will NOT update the remote Canvas instance.")
self._parent_account_id = value
@property
def default_user_storage_quota_mb(self):
"""The storage quota for a user in the account in megabytes, if not otherwise specified."""
return self._default_user_storage_quota_mb
@default_user_storage_quota_mb.setter
def default_user_storage_quota_mb(self, value):
"""Setter for default_user_storage_quota_mb property."""
self.logger.warn("Setting values on default_user_storage_quota_mb will NOT update the remote Canvas instance.")
self._default_user_storage_quota_mb = value
| {
"repo_name": "PGower/PyCanvas",
"path": "pycanvas/apis/accounts.py",
"copies": "1",
"size": "24615",
"license": "mit",
"hash": -9132528757599086000,
"line_mean": 46.4547244094,
"line_max": 693,
"alpha_frac": 0.6424537883,
"autogenerated": false,
"ratio": 4.17628096369189,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0029605302862589973,
"num_lines": 508
} |
"""Account service."""
from __future__ import division
from six import PY2, python_2_unicode_compatible
from collections import OrderedDict
from pyicloud.utils import underscore_to_camelcase
class AccountService(object):
"""The 'Account' iCloud service."""
def __init__(self, service_root, session, params):
self.session = session
self.params = params
self._service_root = service_root
self._devices = []
self._family = []
self._storage = None
self._acc_endpoint = "%s/setup/web" % self._service_root
self._acc_devices_url = "%s/device/getDevices" % self._acc_endpoint
self._acc_family_details_url = "%s/family/getFamilyDetails" % self._acc_endpoint
self._acc_family_member_photo_url = (
"%s/family/getMemberPhoto" % self._acc_endpoint
)
self._acc_storage_url = "https://setup.icloud.com/setup/ws/1/storageUsageInfo"
@property
def devices(self):
"""Returns current paired devices."""
if not self._devices:
req = self.session.get(self._acc_devices_url, params=self.params)
response = req.json()
for device_info in response["devices"]:
self._devices.append(AccountDevice(device_info))
return self._devices
@property
def family(self):
"""Returns family members."""
if not self._family:
req = self.session.get(self._acc_family_details_url, params=self.params)
response = req.json()
for member_info in response["familyMembers"]:
self._family.append(
FamilyMember(
member_info,
self.session,
self.params,
self._acc_family_member_photo_url,
)
)
return self._family
@property
def storage(self):
"""Returns storage infos."""
if not self._storage:
req = self.session.get(self._acc_storage_url, params=self.params)
response = req.json()
self._storage = AccountStorage(response)
return self._storage
def __unicode__(self):
return "{devices: %s, family: %s, storage: %s bytes free}" % (
len(self.devices),
len(self.family),
self.storage.usage.available_storage_in_bytes,
)
def __str__(self):
as_unicode = self.__unicode__()
if PY2:
return as_unicode.encode("utf-8", "ignore")
return as_unicode
def __repr__(self):
return "<%s: %s>" % (type(self).__name__, str(self))
@python_2_unicode_compatible
class AccountDevice(dict):
"""Account device."""
def __getattr__(self, key):
return self[underscore_to_camelcase(key)]
def __unicode__(self):
return "{model: %s, name: %s}" % (self.model_display_name, self.name)
def __str__(self):
as_unicode = self.__unicode__()
if PY2:
return as_unicode.encode("utf-8", "ignore")
return as_unicode
def __repr__(self):
return "<%s: %s>" % (type(self).__name__, str(self))
class FamilyMember(object):
"""A family member."""
def __init__(self, member_info, session, params, acc_family_member_photo_url):
self._attrs = member_info
self._session = session
self._params = params
self._acc_family_member_photo_url = acc_family_member_photo_url
@property
def last_name(self):
"""Gets the last name."""
return self._attrs.get("lastName")
@property
def dsid(self):
"""Gets the dsid."""
return self._attrs.get("dsid")
@property
def original_invitation_email(self):
"""Gets the original invitation."""
return self._attrs.get("originalInvitationEmail")
@property
def full_name(self):
"""Gets the full name."""
return self._attrs.get("fullName")
@property
def age_classification(self):
"""Gets the age classification."""
return self._attrs.get("ageClassification")
@property
def apple_id_for_purchases(self):
"""Gets the apple id for purchases."""
return self._attrs.get("appleIdForPurchases")
@property
def apple_id(self):
"""Gets the apple id."""
return self._attrs.get("appleId")
@property
def family_id(self):
"""Gets the family id."""
return self._attrs.get("familyId")
@property
def first_name(self):
"""Gets the first name."""
return self._attrs.get("firstName")
@property
def has_parental_privileges(self):
"""Has parental privileges."""
return self._attrs.get("hasParentalPrivileges")
@property
def has_screen_time_enabled(self):
"""Has screen time enabled."""
return self._attrs.get("hasScreenTimeEnabled")
@property
def has_ask_to_buy_enabled(self):
"""Has to ask for buying."""
return self._attrs.get("hasAskToBuyEnabled")
@property
def has_share_purchases_enabled(self):
"""Has share purshases."""
return self._attrs.get("hasSharePurchasesEnabled")
@property
def share_my_location_enabled_family_members(self):
"""Has share my location with family."""
return self._attrs.get("shareMyLocationEnabledFamilyMembers")
@property
def has_share_my_location_enabled(self):
"""Has share my location."""
return self._attrs.get("hasShareMyLocationEnabled")
@property
def dsid_for_purchases(self):
"""Gets the dsid for purchases."""
return self._attrs.get("dsidForPurchases")
def get_photo(self):
"""Returns the photo."""
params_photo = dict(self._params)
params_photo.update({"memberId": self.dsid})
return self._session.get(
self._acc_family_member_photo_url, params=params_photo, stream=True
)
def __getitem__(self, key):
if self._attrs.get(key):
return self._attrs[key]
return getattr(self, key)
def __unicode__(self):
return "{name: %s, age_classification: %s}" % (
self.full_name,
self.age_classification,
)
def __str__(self):
as_unicode = self.__unicode__()
if PY2:
return as_unicode.encode("utf-8", "ignore")
return as_unicode
def __repr__(self):
return "<%s: %s>" % (type(self).__name__, str(self))
class AccountStorageUsageForMedia(object):
"""Storage used for a specific media type into the account."""
def __init__(self, usage_data):
self.usage_data = usage_data
@property
def key(self):
"""Gets the key."""
return self.usage_data["mediaKey"]
@property
def label(self):
"""Gets the label."""
return self.usage_data["displayLabel"]
@property
def color(self):
"""Gets the HEX color."""
return self.usage_data["displayColor"]
@property
def usage_in_bytes(self):
"""Gets the usage in bytes."""
return self.usage_data["usageInBytes"]
def __unicode__(self):
return "{key: %s, usage: %s bytes}" % (self.key, self.usage_in_bytes)
def __str__(self):
as_unicode = self.__unicode__()
if PY2:
return as_unicode.encode("utf-8", "ignore")
return as_unicode
def __repr__(self):
return "<%s: %s>" % (type(self).__name__, str(self))
class AccountStorageUsage(object):
"""Storage used for a specific media type into the account."""
def __init__(self, usage_data, quota_data):
self.usage_data = usage_data
self.quota_data = quota_data
@property
def comp_storage_in_bytes(self):
"""Gets the comp storage in bytes."""
return self.usage_data["compStorageInBytes"]
@property
def used_storage_in_bytes(self):
"""Gets the used storage in bytes."""
return self.usage_data["usedStorageInBytes"]
@property
def used_storage_in_percent(self):
"""Gets the used storage in percent."""
return round(self.used_storage_in_bytes * 100 / self.total_storage_in_bytes, 2)
@property
def available_storage_in_bytes(self):
"""Gets the available storage in bytes."""
return self.total_storage_in_bytes - self.used_storage_in_bytes
@property
def available_storage_in_percent(self):
"""Gets the available storage in percent."""
return round(
self.available_storage_in_bytes * 100 / self.total_storage_in_bytes, 2
)
@property
def total_storage_in_bytes(self):
"""Gets the total storage in bytes."""
return self.usage_data["totalStorageInBytes"]
@property
def commerce_storage_in_bytes(self):
"""Gets the commerce storage in bytes."""
return self.usage_data["commerceStorageInBytes"]
@property
def quota_over(self):
"""Gets the over quota."""
return self.quota_data["overQuota"]
@property
def quota_tier_max(self):
"""Gets the max tier quota."""
return self.quota_data["haveMaxQuotaTier"]
@property
def quota_almost_full(self):
"""Gets the almost full quota."""
return self.quota_data["almost-full"]
@property
def quota_paid(self):
"""Gets the paid quota."""
return self.quota_data["paidQuota"]
def __unicode__(self):
return "%s%% used of %s bytes" % (
self.used_storage_in_percent,
self.total_storage_in_bytes,
)
def __str__(self):
as_unicode = self.__unicode__()
if PY2:
return as_unicode.encode("utf-8", "ignore")
return as_unicode
def __repr__(self):
return "<%s: %s>" % (type(self).__name__, str(self))
class AccountStorage(object):
"""Storage of the account."""
def __init__(self, storage_data):
self.usage = AccountStorageUsage(
storage_data.get("storageUsageInfo"), storage_data.get("quotaStatus")
)
self.usages_by_media = OrderedDict()
for usage_media in storage_data.get("storageUsageByMedia"):
self.usages_by_media[usage_media["mediaKey"]] = AccountStorageUsageForMedia(
usage_media
)
def __unicode__(self):
return "{usage: %s, usages_by_media: %s}" % (self.usage, self.usages_by_media)
def __str__(self):
as_unicode = self.__unicode__()
if PY2:
return as_unicode.encode("utf-8", "ignore")
return as_unicode
def __repr__(self):
return "<%s: %s>" % (type(self).__name__, str(self))
| {
"repo_name": "picklepete/pyicloud",
"path": "pyicloud/services/account.py",
"copies": "1",
"size": "10780",
"license": "mit",
"hash": 3523983888212115000,
"line_mean": 28.2140921409,
"line_max": 88,
"alpha_frac": 0.5780148423,
"autogenerated": false,
"ratio": 3.963235294117647,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0002842283620565406,
"num_lines": 369
} |
#account setup
username = '***';
password = '***';
server = 'smtp.gmail.com:587';
#imports
from time import sleep;
import smtplib;
from email.mime.application import MIMEApplication
from email.mime.text import MIMEText;
from email.mime.multipart import MIMEMultipart;
# create msg - MIME* object
# takes addresses to, from cc and a subject
# returns the MIME* object
def create_msg(to_address,
from_address='',
cc_address='',
bcc_address='',
subject=''):
msg = MIMEMultipart();
msg['Subject'] = subject;
msg['To'] = to_address;
msg['Cc'] = cc_address;
msg['From'] = from_address;
return msg;
# send an email
# takes an smtp address, user name, password and MIME* object
# if mode = 0 sends to and cc
# if mode = 1 sends to bcc
def send_email(smtp_address, usr, password, msg, mode):
server = smtplib.SMTP(smtp_address);
server.ehlo();
server.starttls();
server.ehlo();
server.login(username,password);
if (mode == 0 and msg['To'] != ''):
server.sendmail(msg['From'],(msg['To']+msg['Cc']).split(","), msg.as_string());
elif (mode == 1 and msg['Bcc'] != ''):
server.sendmail(msg['From'],msg['Bcc'].split(","),msg.as_string());
elif (mode != 0 and mode != 1):
print 'error in send mail bcc'; print 'email cancled'; exit();
server.quit();
# compose email
# takes all the details for an email and sends it
# address format: list, [0] - to
# [1] - cc
# [2] - bcc
# subject format: string
# body format: list of pairs [0] - text
# [1] - type:
# 0 - plain
# 1 - html
# files is list of strings
def compose_email(addresses, subject, body, files):
# addresses
to_address = addresses[0];
cc_address = addresses[1];
bcc_address = addresses[2];
# create a message
msg = create_msg(to_address, cc_address=cc_address , subject=subject);
# add text
for text in body:
attach_text(msg, text[0], text[1]);
# add files
if (files != ''):
file_list = files.split(',');
for afile in file_list:
attach_file(msg, afile);
# send message
send_email(server, username, password, msg, 0);
# check for bcc
if (bcc_address != ''):
msg['Bcc'] = bcc_address;
send_email(server, username, password, msg, 1);
print 'email sent'
# attach text
# attaches a plain text or html text to a message
def attach_text(msg, atext, mode):
part = MIMEText(atext, get_mode(mode));
msg.attach(part);
# util function to get mode type
def get_mode(mode):
if (mode == 0):
mode = 'plain';
elif (mode == 1):
mode = 'html';
else:
print 'error in text kind'; print 'email cancled'; exit();
return mode;
# attach file
# takes the message and a file name and attaches the file to the message
def attach_file(msg, afile):
part = MIMEApplication(open(afile, "rb").read());
part.add_header('Content-Disposition', 'attachment', filename=afile);
msg.attach(part);
#to be tested...
compose_email(['cpt@thelivingpearl.com','',''],
'test v.5.0',
[['some text goes here...\n',0]],
'');
#compose_email can take the following arguments:
# 1. to recipients (separated by a comma)
# 2. cc recipients (separated by a comma)
# 3. bcc recipients (separated by a comma)
# 4. subject
# 5. a list with message and mode (plain txt or html)
# 6. files to be attached
| {
"repo_name": "ActiveState/code",
"path": "recipes/Python/578807_Sending_Email/recipe-578807.py",
"copies": "1",
"size": "3638",
"license": "mit",
"hash": 7553518089418889000,
"line_mean": 28.3387096774,
"line_max": 87,
"alpha_frac": 0.5802638813,
"autogenerated": false,
"ratio": 3.5701668302257117,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46504307115257115,
"avg_score": null,
"num_lines": null
} |
# accounts.forms
# DJANGO
from django import forms
from django.contrib import auth
# CRISPY
from crispy_forms.helper import FormHelper
from crispy_forms.layout import (
Layout,
Field,
Fieldset,
ButtonHolder,
Submit,
Div
)
# ACCOUNTS
from .models import StaffProfile
class LoginForm(forms.Form):
name = forms.CharField(required=True, max_length=64, label='Name:')
password = forms.CharField(
required=True,
max_length=64,
label='Password:',
widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_class = 'vavs-form'
self.helper.error_text_inline = False
self.helper.layout = Layout(
Fieldset(
'',
Div(Field('name', css_class="inline-element"),
css_class="inline-elements"),
Div(Field('password', css_class="inline-element"),
css_class="inline-elements"),
),
ButtonHolder(
Submit('submit', 'Login',
css_class='button white push-down-24')
)
)
super(LoginForm, self).__init__(*args, **kwargs)
def login(self, request):
user = auth.authenticate(
username=self.cleaned_data['name'],
password=self.cleaned_data['password'])
if user and user.is_active:
auth.login(request, user)
return True
else:
return False
class StaffProfileForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_class = 'vavs-form'
self.helper.error_text_inline = False
self.helper.layout = Layout(
Div(Field('email_feeds')),
ButtonHolder(
Submit('submit', 'Update', css_class='button white push-down-24')
)
)
super(StaffProfileForm, self).__init__(*args, **kwargs)
self.fields['email_feeds'].label = 'Email feed summaries.'
class Meta:
model = StaffProfile
fields = ('email_feeds',)
class ConsentForm(forms.Form):
consented = forms.BooleanField(required=True)
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_class = 'vavs-form'
self.helper.error_text_inline = False
self.helper.layout = Layout(
Fieldset(
'',
Div(Field('consented', css_class="inline-element"),
css_class="inline-elements"),
),
Field('survey'),
ButtonHolder(
Submit('submit', 'Submit', css_class='button white push-down-24')
)
)
super(ConsentForm, self).__init__(*args, **kwargs)
self.fields['consented'].label = 'I have read the above agreement and give my consent to be included in the project.'
| {
"repo_name": "valuesandvalue/valuesandvalue",
"path": "vavs_project/accounts/forms.py",
"copies": "1",
"size": "3118",
"license": "mit",
"hash": -3291467692359558700,
"line_mean": 31.1443298969,
"line_max": 125,
"alpha_frac": 0.533675433,
"autogenerated": false,
"ratio": 4.312586445366528,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5346261878366528,
"avg_score": null,
"num_lines": null
} |
"""Account sharing
Revision ID: 73edd31eee69
Revises: a1498e3da19c
Create Date: 2017-04-23 20:00:28.658973
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '73edd31eee69'
down_revision = 'a1498e3da19c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('default_account_sharing_types',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('account_sharing_types',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('owner_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['owner_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('account_sharings',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('start_date', sa.DateTime(), nullable=False),
sa.Column('end_date', sa.DateTime(), nullable=True),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.Column('recipient_id', sa.Integer(), nullable=False),
sa.Column('type_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['account_id'], ['accounts.id'], ),
sa.ForeignKeyConstraint(['recipient_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['type_id'], ['account_sharing_types.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('account_sharings')
op.drop_table('account_sharing_types')
op.drop_table('default_account_sharing_types')
# ### end Alembic commands ###
| {
"repo_name": "csdt/Pawi",
"path": "migrations/versions/73edd31eee69_.py",
"copies": "1",
"size": "1907",
"license": "mit",
"hash": -1277361140435882800,
"line_mean": 33.6727272727,
"line_max": 73,
"alpha_frac": 0.6649187205,
"autogenerated": false,
"ratio": 3.47992700729927,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9609768676660165,
"avg_score": 0.007015410227820951,
"num_lines": 55
} |
""" ACCOUNTS MANAGEMENT """
class Accounts(object):
""" CLASS FOR ACCOUNT CREATION AND LOGIN"""
def __init__(self):
self.list_of_accounts = [{'uname':'default',
'email':'default@user.com',
'pwd':'default'}]
def get_uname_by_email(self, email):
"""Returns username when provided with email"""
for account in self.list_of_accounts:
if email == account['email']:
return account['uname']
def login(self, email, pwd):
"""Method for Handling Login Requests"""
for account in self.list_of_accounts:
if email == account['email']:
if pwd == account['pwd']:
return "Success!"
else:
return "Invalid email, password combination"
return "Account not registered, sign up"
def registration(self, uname, email, pwd, pwd_confirm):
"""Method for creating new accounts."""
dict_for_each_account = {}
for account in self.list_of_accounts:
if email == account['email']:
return "Your Account Already Active. Proceed to login"
else:
if len(pwd) < 6:
return "Password is too short"
elif pwd == pwd_confirm:
dict_for_each_account['uname'] = uname
dict_for_each_account['email'] = email
dict_for_each_account['pwd'] = pwd
self.list_of_accounts.append(dict_for_each_account)
else:
return "Your passwords should match"
return "Your account is now registered please proceed to login"
| {
"repo_name": "parseendavid/Andela-Developer-Challenge---Shopping-List-V2.0",
"path": "app/accounts.py",
"copies": "1",
"size": "1709",
"license": "mit",
"hash": 3251095291821929000,
"line_mean": 38.7441860465,
"line_max": 71,
"alpha_frac": 0.5330602692,
"autogenerated": false,
"ratio": 4.656675749318801,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0027973171299125886,
"num_lines": 43
} |
"""Accounts Module."""
import json
import pandas as pd
class Account(object):
"""Class with the account methods."""
def __init__(self, main):
"""Initialise with super's main."""
self.main = main
def create_account(self, currency, name, base_account_id, counter_id):
"""Create a new account in the selected currency.
:param currency: Currency of account
:param name: Name of account
:param base_account_id: Id of the base account
:param counter_id: Id of the counter account
:return: dict with name. currency and id of new account
"""
data = {
'currency': currency,
'name': name,
'base_account_id': base_account_id,
'counter_id': counter_id
}
return self.main.api_request('accounts', data=data, http_call='post')
def get_balance(self):
"""Get balances of all accounts."""
return self.main.api_request('balance', None)
def get_transactions(self, account_id, min_row=None, max_row=None):
"""Get list of transactions for an account."""
params = {}
if min_row is not None:
params['min_row'] = min_row
if max_row is not None:
params['max_row'] = max_row
return self.main.api_request(
'accounts/%s/transactions' % (account_id,), params)
def get_transactions_frame(self, account_id, min_row=None, max_row=None):
"""Get dataframe of transactions for an account."""
tx = self.get_transactions(
account_id, min_row, max_row)['transactions']
df = pd.DataFrame(tx)
df.index = pd.to_datetime(df.timestamp, unit='ms')
df.drop('timestamp', axis=1, inplace=True)
return df
def get_pending_transactions(self, account_id):
"""Get a list of pending transactions for an account."""
return self.main.api_request(
'accounts/%s/pending' % (account_id,), None)
def get_orders(self, state=None, pair=None):
"""Get a list of most recently placed orders.
You can specify an optional state='PENDING' parameter to
restrict the results to only open orders. You can also specify the
market by using the optional pair parameter.
The list is truncated after 100 items.
:param kind: typically 'auth' if you want this to return anything
useful
:param state: String optional 'COMPLETE', 'PENDING', or None (default)
:return:
"""
params = {'pair': self.main.pair if pair is None else pair}
if state is not None:
params['state'] = state
return self.main.api_request('listorders', params)
def get_orders_frame(self, state=None, kind='auth', pair=None):
"""Get a list of most recently placed orders as a dataframe."""
q = self.get_orders(state, pair)
tj = json.dumps(q['orders'])
df = pd.read_json(
tj, convert_dates=['creation_timestamp', 'expiration_timestamp'])
df.index = df.creation_timestamp
return df
def create_transfer(self, amount, currency, note,
source_account_id, target_account_id):
"""Transfer currency between accounts."""
data = {
'amount': amount,
'currency': currency,
'note': note,
'source_account_id': source_account_id,
'target_account_id': target_account_id,
}
return self.main.api_request('transfers', data=data,
http_call='post')
def get_transfers(self, tid=None):
"""Get list of transfers."""
return self.main.api_request('transfers/{}'.format(tid or ''),
http_call='get')
def confirm_transfer(self, tid):
"""Confirm a pending transfer."""
return self.main.api_request('transfers/{}'.format(tid),
http_call='put')
| {
"repo_name": "grantstephens/pyluno",
"path": "pyluno/accounts.py",
"copies": "1",
"size": "4030",
"license": "mit",
"hash": 6704411194936504000,
"line_mean": 37.0188679245,
"line_max": 78,
"alpha_frac": 0.5779156328,
"autogenerated": false,
"ratio": 4.1891891891891895,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 106
} |
'Accounts'
from __future__ import unicode_literals
from .... import ProResource, RelatedResourceMixin
import six
import sys
class Account(RelatedResourceMixin, ProResource):
'Abstraction of Accounts resource in duedil v3 pro api'
attribute_names = [
'uri',
'date',
'type'
]
account_classes = {
'financial': 'pro.company.accounts.financial.AccountDetailsFinancial',
'gaap': 'pro.company.accounts.gaap.AccountDetailsGAAP',
'ifrs': 'pro.company.accounts.ifrs.AccountDetailsIFRS',
'insurance': 'pro.company.accounts.insurance.AccountDetailsInsurance',
'statutory': 'pro.company.accounts.statutory.AccountDetailsStatutory',
}
full_endpoint = True
def __iter__(self):
return iter({i: getattr(self, i) for i in self.attribute_names})
@property
def path(self):
return self.uri.split('/', 5)[-1].rsplit('/', 1)[0]
@property
def details(self):
resource = self.account_classes[self.type]
if isinstance(resource, six.string_types):
module, resource = resource.rsplit('.', 1)
resource = getattr(sys.modules['duedil.resources.{0!s}'.format(module)], resource)
resource_obj = self.load_related('details', resource, self.full_endpoint)
resource_obj.path = '{0}'.format(self.path)
resource_obj.loaded = True
return resource_obj
| {
"repo_name": "founders4schools/duedilv3",
"path": "duedil/resources/pro/company/accounts/__init__.py",
"copies": "1",
"size": "1414",
"license": "apache-2.0",
"hash": 2379960115697224700,
"line_mean": 32.6666666667,
"line_max": 94,
"alpha_frac": 0.6485148515,
"autogenerated": false,
"ratio": 3.8010752688172045,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49495901203172044,
"avg_score": null,
"num_lines": null
} |
# accounts.py
from django.contrib.auth import login, logout, authenticate
from django.contrib import messages
from django.views.generic import View
from django.shortcuts import redirect, render
from forms import SignupForm, LoginForm
from django.contrib.auth.models import User
class Login(View):
login_template = 'login.html'
def get(self, request):
form = LoginForm()
return render(request, Login.login_template, {'form':form})
def post(self, request):
form = LoginForm(request.POST)
if form.is_valid():
user = authenticate(username=form.cleaned_data['username'], password=form.cleaned_data['password'])
if user is not None:
if user.is_active:
login(request, user)
return redirect('/')
else:
messages.error(request, 'Your account is disabled. Please check your email.')
else:
messages.error(request, 'Invalid username or password!')
return render(request, Login.login_template, {'form': form})
class Logout(View):
def get(self, request):
logout(request)
return redirect('/')
class Signup(View):
signup_template = 'signup.html'
def get(self, request):
form = SignupForm()
return render(request, Signup.signup_template, {'form': form})
def post(self, request):
form = SignupForm(request.POST)
if form.is_valid():
user = User.objects.create_user(
username = form.cleaned_data['username'],
password = form.cleaned_data['password'],
email = form.cleaned_data['email'],
first_name = form.cleaned_data['first_name'],
last_name = form.cleaned_data['last_name'],
)
user.is_active = True
user.save()
user = authenticate(username=form.cleaned_data['username'], password=form.cleaned_data['password'])
login(request, user)
return redirect('/')
else:
return render(request, Signup.signup_template, {'form': form})
| {
"repo_name": "devs4v/opinify",
"path": "opinify/accounts.py",
"copies": "1",
"size": "1830",
"license": "mit",
"hash": -3841572699967768000,
"line_mean": 28.5161290323,
"line_max": 102,
"alpha_frac": 0.7027322404,
"autogenerated": false,
"ratio": 3.4269662921348316,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46296985325348317,
"avg_score": null,
"num_lines": null
} |
"""Accounts serializers module."""
from rest_framework import serializers
from accounts.models import AppUser
class AppUserSerializer(serializers.ModelSerializer):
"""AppUser serializer class."""
class Meta:
model = AppUser
fields = (
'id',
'email',
'password',
'first_name',
'last_name',
'is_active',
'is_verified',
'is_admin',
'timestamp_subscription',
'timestamp_modified'
)
write_only_fields = ('password',)
read_only_fields = ('is_active', 'is_verified', 'is_admin', 'timestamp_subscription', 'timestamp_modified')
def update(self, instance, validated_data):
"""Update user method."""
instance.set_password(validated_data.get('password'))
instance.save()
return instance
def create(self, validated_data):
"""Create user method."""
instance = AppUser.objects.create_user(**validated_data)
instance.set_password(validated_data.get('password'))
instance.save()
return instance
| {
"repo_name": "davideferre/django-ember-jwt-tutorial",
"path": "server/accounts/serializers.py",
"copies": "1",
"size": "1132",
"license": "mit",
"hash": -6020760668961314000,
"line_mean": 29.5945945946,
"line_max": 115,
"alpha_frac": 0.5803886926,
"autogenerated": false,
"ratio": 4.582995951417004,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5663384644017004,
"avg_score": null,
"num_lines": null
} |
"""Accounts serializers module."""
from rest_framework import serializers
from .models import AppUser
class AppUserSerializer(serializers.ModelSerializer):
"""AppUser serializer class."""
class Meta:
model = AppUser
fields = (
'id',
'email',
'password',
'first_name',
'last_name',
'is_active',
'is_verified',
'is_admin',
'timestamp_subscription',
'timestamp_modified'
)
write_only_fields = ('password',)
read_only_fields = ('is_active', 'is_verified', 'is_admin', 'timestamp_subscription', 'timestamp_modified')
def update(self, instance, validated_data):
"""Update user method."""
instance.set_password(validated_data.get('password'))
instance.save()
return instance
def create(self, validated_data):
"""Create user method."""
instance = AppUser.objects.create_user(**validated_data)
instance.set_password(validated_data.get('password'))
instance.save()
return instance
| {
"repo_name": "davideferre/django-rest-framework-email-accounts",
"path": "serializers.py",
"copies": "1",
"size": "1124",
"license": "mit",
"hash": -6690558983657224000,
"line_mean": 29.3783783784,
"line_max": 115,
"alpha_frac": 0.5774021352,
"autogenerated": false,
"ratio": 4.569105691056911,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00023299161230195712,
"num_lines": 37
} |
# accounts.sys_stats
# PYTHON
import os
import subprocess
# DJANGO
from django.conf import settings
from django.db import connection
def get_disc_usage():
lines = []
process = subprocess.Popen(
['df', '-h'], shell=False, stdout=subprocess.PIPE)
result = process.communicate()
lines.append('Total disc usage:')
lines.append(result[0])
# downloads
dirpath = os.path.join(settings.VAVS_ROOT, 'downloads')
process = subprocess.Popen(
['du', '-h', dirpath],
shell=False, stdout=subprocess.PIPE)
result = process.communicate()
lines.append('Downloads:')
lines.append(result[0])
# media
process = subprocess.Popen(
['du', '-ch', settings.MEDIA_ROOT],
shell=False, stdout=subprocess.PIPE)
result = process.communicate()
lines.append('Media:')
lines.append(result[0])
return lines
def get_db_name():
return settings.DATABASES['default']['NAME']
def get_db_size():
cursor = connection.cursor()
dbname = cursor.db.settings_dict['NAME']
cursor.execute("SELECT pg_size_pretty(pg_database_size(%s))", [dbname])
row = cursor.fetchone()
if row:
return row[0]
else:
return 'no size'
| {
"repo_name": "valuesandvalue/valuesandvalue",
"path": "vavs_project/accounts/sys_stats.py",
"copies": "1",
"size": "1287",
"license": "mit",
"hash": -7795802205533882000,
"line_mean": 26.9782608696,
"line_max": 75,
"alpha_frac": 0.6076146076,
"autogenerated": false,
"ratio": 3.88821752265861,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.499583213025861,
"avg_score": null,
"num_lines": null
} |
"""accounts table
Revision ID: a1498e3da19c
Revises: 2cec471fc310
Create Date: 2017-04-23 19:15:13.138625
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a1498e3da19c'
down_revision = '2cec471fc310'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('accounts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('open_date', sa.DateTime(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('owner_id', sa.Integer(), nullable=False),
sa.Column('type_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['owner_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['type_id'], ['account_types.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('accounts')
# ### end Alembic commands ###
| {
"repo_name": "csdt/Pawi",
"path": "migrations/versions/a1498e3da19c_.py",
"copies": "1",
"size": "1094",
"license": "mit",
"hash": -8260216397159925000,
"line_mean": 27.7894736842,
"line_max": 65,
"alpha_frac": 0.6636197441,
"autogenerated": false,
"ratio": 3.4294670846394983,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4593086828739498,
"avg_score": null,
"num_lines": null
} |
# accounts.tasks
# PYTHON
from datetime import timedelta
import logging
# DJANGO
from django.contrib.auth.models import User
from django.utils.timezone import now
# CELERY
from celery import task
# ACCOUNTS
from .sys_stats import (
get_db_size,
get_disc_usage
)
@task.task(ignore_result=False,
name='accounts.tasks.purge_nonconsenting_participants')
def purge_nonconsenting_participants():
logger = logging.getLogger('vavs.tasks.analytics')
cutoff = now() - timedelta(days=7)
users = User.objects.filter(is_staff=False, useranalysis__isnull=False,
useranalysis__consent=False)
if users:
logger.info('Deleting expired new users: %d' % users.count())
users.delete()
else:
logger.info('No expired new users')
@task.task(ignore_result=False, name='accounts.tasks.disc_usage')
def disc_usage():
logger = logging.getLogger('vavs.tasks.analytics')
logger.info('\n'.join(get_disc_usage()))
logger.info('DB usage: %s' % get_db_size())
| {
"repo_name": "valuesandvalue/valuesandvalue",
"path": "vavs_project/accounts/tasks.py",
"copies": "1",
"size": "1041",
"license": "mit",
"hash": -1665759615427360000,
"line_mean": 27.1351351351,
"line_max": 75,
"alpha_frac": 0.6781940442,
"autogenerated": false,
"ratio": 3.4932885906040267,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.964174783122054,
"avg_score": 0.005946960716697558,
"num_lines": 37
} |
account=str(input("당신은 뭐시여"))
if account=="admin":
f=open("Coffee Stock.txt", "r", encoding="utf-8")
menu_status = []
while True:
menu = f.readline()
if not menu: break
menu_status.append(menu.split())
f.close()
dict_menu_status={}
for adder in menu_status:
dict_menu_status[adder[1]]=int(adder[0])
print(dict_menu_status)
print("사용자 메뉴 1. 재고변경 2. 가격변경")
admin_choice=int(input("뭐할라고"))
if admin_choice==1:
while True:
change_stock=input("무슨 커피를 더 넣으시겠습니까?")
if change_stock in dict_menu_status.keys():
adding_stock = int(input("몇 개 추가할래?"))
menu_status[dict_menu_status[change_stock]-1][3] = str(int(menu_status[dict_menu_status[change_stock]-1][3]) + adding_stock)
break
else:
continue
if admin_choice==2:
while True:
change_value=input("무슨 커피의 가격을 바꾸시겠습니까?")
if change_value in dict_menu_status.keys():
changing_value = int(input("얼마로 바꿀래?"))
menu_status[dict_menu_status[change_value]-1][2] = str(int(changing_value))
break
else:
continue
print(menu_status)
with open("Coffee Stock.txt", "w", encoding="utf-8") as f:
for index_menu_status in menu_status:
for j in index_menu_status:
f.write(j + " " )
f.write("\n")
else:
money=int(input("돈을 입력하십시오. \n"))
while True:
choice=int(input("1. 블랙커피(100원), 2. 밀크커피(150원), 3. 고급커피(250원), 4. 거스름돈 \n"))
price=[100,150,250,0]
pick=price[choice-1]
change=money-pick
if money>=pick and pick>0:
print(change)
print("원 남았습니다.")
money = change
elif money<pick:
print(" 거슬러줄게 돈 벌어와 ")
break
elif pick==0:
print(" %d 거슬러 드리겠습니다. ㅅㄱ " %change)
break | {
"repo_name": "imn00133/pythonSeminar17",
"path": "exercise/vending_machine/Junho/10_14_bendingmachine.py",
"copies": "1",
"size": "2213",
"license": "mit",
"hash": 5866571964878378000,
"line_mean": 34.3928571429,
"line_max": 140,
"alpha_frac": 0.5254921757,
"autogenerated": false,
"ratio": 2.39540507859734,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.34208972542973404,
"avg_score": null,
"num_lines": null
} |
"""Account summary."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
COLUMNS = ['datacenter',
'hardware',
'virtual_servers',
'vlans',
'subnets',
'public_ips']
@click.command()
@click.option('--sortby',
help='Column to sort by',
default='datacenter',
type=click.Choice(COLUMNS))
@environment.pass_env
def cli(env, sortby):
"""Account summary."""
mgr = SoftLayer.NetworkManager(env.client)
datacenters = mgr.summary_by_datacenter()
table = formatting.Table(COLUMNS)
table.sortby = sortby
for name, datacenter in datacenters.items():
table.add_row([
name,
datacenter['hardware_count'],
datacenter['virtual_guest_count'],
datacenter['vlan_count'],
datacenter['subnet_count'],
datacenter['public_ip_count'],
])
env.fout(table)
| {
"repo_name": "skraghu/softlayer-python",
"path": "SoftLayer/CLI/summary.py",
"copies": "5",
"size": "1051",
"license": "mit",
"hash": 6492692738974502000,
"line_mean": 22.8863636364,
"line_max": 48,
"alpha_frac": 0.5889628925,
"autogenerated": false,
"ratio": 4.307377049180328,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 44
} |
"""Account summary."""
# :license: MIT, see LICENSE for more details.
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
import click
@click.command()
@click.option('--sortby',
help='Column to sort by',
default='datacenter',
type=click.Choice(['datacenter',
'vlans',
'subnets',
'ips',
'networking',
'hardware',
'vs']))
@environment.pass_env
def cli(env, sortby):
"""Account summary."""
mgr = SoftLayer.NetworkManager(env.client)
datacenters = mgr.summary_by_datacenter()
table = formatting.Table([
'datacenter', 'vlans', 'subnets', 'ips', 'networking', 'hardware', 'vs'
])
table.sortby = sortby
for name, datacenter in datacenters.items():
table.add_row([
name,
datacenter['vlanCount'],
datacenter['subnetCount'],
datacenter['primaryIpCount'],
datacenter['networkingCount'],
datacenter['hardwareCount'],
datacenter['virtualGuestCount'],
])
env.fout(table)
| {
"repo_name": "briancline/softlayer-python",
"path": "SoftLayer/CLI/summary.py",
"copies": "2",
"size": "1285",
"license": "mit",
"hash": -653620810197669100,
"line_mean": 27.5555555556,
"line_max": 79,
"alpha_frac": 0.5143968872,
"autogenerated": false,
"ratio": 4.923371647509579,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6437768534709579,
"avg_score": null,
"num_lines": null
} |
"""Account Summary page"""
# :license: MIT, see LICENSE for more details.
import click
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
from SoftLayer.managers.account import AccountManager as AccountManager
from SoftLayer import utils
@click.command()
@environment.pass_env
def cli(env):
"""Prints some various bits of information about an account"""
manager = AccountManager(env.client)
summary = manager.get_summary()
env.fout(get_snapshot_table(summary))
def get_snapshot_table(account):
"""Generates a table for printing account summary data"""
table = formatting.KeyValueTable(["Name", "Value"], title="Account Snapshot")
table.align['Name'] = 'r'
table.align['Value'] = 'l'
table.add_row(['Company Name', account.get('companyName', '-')])
table.add_row(['Balance', utils.lookup(account, 'pendingInvoice', 'startingBalance')])
table.add_row(['Upcoming Invoice', utils.lookup(account, 'pendingInvoice', 'invoiceTotalAmount')])
table.add_row(['Image Templates', account.get('blockDeviceTemplateGroupCount', '-')])
table.add_row(['Dedicated Hosts', account.get('dedicatedHostCount', '-')])
table.add_row(['Hardware', account.get('hardwareCount', '-')])
table.add_row(['Virtual Guests', account.get('virtualGuestCount', '-')])
table.add_row(['Domains', account.get('domainCount', '-')])
table.add_row(['Network Storage Volumes', account.get('networkStorageCount', '-')])
table.add_row(['Open Tickets', account.get('openTicketCount', '-')])
table.add_row(['Network Vlans', account.get('networkVlanCount', '-')])
table.add_row(['Subnets', account.get('subnetCount', '-')])
table.add_row(['Users', account.get('userCount', '-')])
return table
| {
"repo_name": "allmightyspiff/softlayer-python",
"path": "SoftLayer/CLI/account/summary.py",
"copies": "3",
"size": "1762",
"license": "mit",
"hash": -147450281017231500,
"line_mean": 44.1794871795,
"line_max": 102,
"alpha_frac": 0.6923950057,
"autogenerated": false,
"ratio": 3.813852813852814,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6006247819552815,
"avg_score": null,
"num_lines": null
} |
"""Accounts URLconf.
This includes account authorization and administration.
"""
# Copyright 2015 Solinea, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf.urls import patterns, url
from djoser import views as djoser_views
# Hook up a subset of the djoser package. We don't include djoser's URLconf
# because that would root them at /accounts/XXX, making the URLs longer; and we
# need to override some of djoser's code in order to process user profiles.
urlpatterns = patterns(
'',
url(r'^register/$', djoser_views.RegistrationView.as_view(),
name='register'),
url(r'^login/$', djoser_views.LoginView.as_view(), name='login'),
url(r'^logout/$', djoser_views.LogoutView.as_view(), name='logout'),
url(r'^password/$',
djoser_views.SetPasswordView.as_view(),
name='set_password'),
url(r'^password/reset/$',
djoser_views.PasswordResetView.as_view(),
name='password_reset'),
url(r'^password/reset/confirm/$',
djoser_views.PasswordResetConfirmView.as_view(),
name='password_reset_confirm'),
)
| {
"repo_name": "slashk/goldstone-server",
"path": "goldstone/accounts/urls.py",
"copies": "2",
"size": "1594",
"license": "apache-2.0",
"hash": -4535627917164045300,
"line_mean": 38.85,
"line_max": 79,
"alpha_frac": 0.7107904642,
"autogenerated": false,
"ratio": 3.7417840375586855,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5452574501758685,
"avg_score": null,
"num_lines": null
} |
# accounts.views
# DJANGO
from django.conf import settings
from django.contrib import auth
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.generic import (
FormView,
TemplateView
)
# DJANGO-BRACES
from braces.views import LoginRequiredMixin
# FBDATA
from fbdata.participant import (
get_participants,
get_participant_profile
)
# SURVEYS
from surveys.handlers import get_answers_for_email
from surveys.models import (
Respondent,
Survey
)
# ACCOUNTS
from .forms import (
ConsentForm,
LoginForm,
StaffProfileForm
)
from .handlers import get_staff_profile
############
# CONTACT
############
class LoginView(FormView):
template_name = 'accounts/login.html'
form_class = LoginForm
success_url = '/accounts/profile/'
redirect_url = '/accounts/denied/'
def get_redirect_url(self):
return self.redirect_url
def form_valid(self, form):
if form.login(self.request):
return super(LoginView, self).form_valid(form)
else:
return HttpResponseRedirect(self.get_redirect_url())
class LogoutView(LoginRequiredMixin, TemplateView):
template_name = 'accounts/logout.html'
def get(self, request):
auth.logout(request)
return super(LogoutView, self).get(request)
class ConsentView(FormView):
template_name = 'accounts/consent.html'
form_class = ConsentForm
success_url = '/accounts/profile/'
redirect_url = '/accounts/denied/'
def get_redirect_url(self):
return self.redirect_url
def form_valid(self, form):
if form.login(self.request):
return super(LoginView, self).form_valid(form)
else:
return HttpResponseRedirect(self.get_redirect_url())
class ConsentView(TemplateView):
template_name = 'accounts/consent.html'
def get(self, request):
if request.user.is_staff:
return HttpResponseRedirect(reverse('profile'))
participant = get_participant_profile(request.user)
if participant.consent:
return HttpResponseRedirect(reverse('profile'))
else:
form = ConsentForm()
return self.render_to_response({'form':form})
def post(self, request):
if request.user.is_staff:
return HttpResponseRedirect(reverse('profile'))
form = ConsentForm(request.POST)
if form.is_valid():
participant = get_participant_profile(request.user)
participant.consent = True
participant.save()
return HttpResponseRedirect(reverse('profile'))
else:
return self.render_to_response({'form':form, 'survey':survey})
class ProfileView(LoginRequiredMixin, TemplateView):
template_name = 'accounts/profile.html'
def _user_surveys(self, user):
survey_data = {}
surveys = Survey.objects.filter(
questions__answers__respondent__email=user.email
).order_by('created')
for survey in surveys:
survey_data[survey] = Respondent.objects.filter(
email=user.email,
survey_answers__question__survey=survey)[0]
return survey_data
def _staff_data(self, user, profile, form):
data = {'user':user}
data['profile'] = profile
data['form'] = form
data['surveys'] = Survey.objects.all().order_by('created')
data['participants'] = get_participants()
return data
def get(self, request):
user = request.user
if user.is_staff:
profile = get_staff_profile(user)
form = StaffProfileForm(instance=profile)
data = self._staff_data(user, profile, form)
else:
try:
profile = user.get_profile()
except:
profile = None
participant = get_participant_profile(user)
if participant.consent:
data = {'user':user,
'profile':profile,
'participant':participant,
'surveys':self._user_surveys(user)}
else:
return HttpResponseRedirect(reverse('participant_consent'))
return self.render_to_response(data)
def post(self, request):
user = request.user
if user.is_staff:
profile = get_staff_profile(user)
form = StaffProfileForm(request.POST, instance=profile)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse('profile'))
else:
data = self._staff_data(user, profile, form)
else:
data = {'user':user}
return self.render_to_response(data)
| {
"repo_name": "valuesandvalue/valuesandvalue",
"path": "vavs_project/accounts/views.py",
"copies": "1",
"size": "4922",
"license": "mit",
"hash": -1485789706435000000,
"line_mean": 30.1518987342,
"line_max": 79,
"alpha_frac": 0.5969118245,
"autogenerated": false,
"ratio": 4.265164644714038,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5362076469214038,
"avg_score": null,
"num_lines": null
} |
# Django Imports
from django.shortcuts import render, redirect
from django.contrib import messages
from django.contrib.auth import logout as logout_user
from django.contrib.auth import login as login_user
from django.contrib.auth import authenticate
from django.utils.translation import ugettext_lazy as _
# Other Imports
from accounts.forms import AuthenticationForm, RegistrationForm
from accounts.models import User
from messenger.identifier import generate
import cr_config
# LOGOUT
# Pretty self explanatory. Logs out a user if they go to this URL whilst
# logged in. Note: The message seems to sit on the request if the user is
# sent to an error page. Waiting for the next successful page.
def logout(request):
if request.user.is_authenticated(): # Check if the user is logged in
logout_user(request) # Log out the user
messages.add_message(request, messages.INFO, _("Successfully logged out.")) # Add translated message
return redirect("frontpage") # Redirect to frontpage
# LOGIN
# Handles login requests. Note: This function actually doubles as a form,
# accepting inputs from itself if they exist. The majority of this function
# is dedicated to handling the login process from the POST data.
# TODO Put email verification timer in a code well/block
def login(request):
if request.user.is_authenticated(): # Check if the user is logged in
messages.add_message(request, messages.INFO, "You're already logged in to Comet. If you want to login to a different account please <a href='/logout'>logout</a> first.")
return redirect("frontpage")
if request.POST: # Check if there is any POST data.
# Create a form instance and populate it with the Post data
form = AuthenticationForm(request.POST)
# Check whether the form is valid
if form.is_valid():
data = form.cleaned_data # Get cleaned data from the form
user = authenticate( # Authenticate the user credentials against the db
# The email field is called 'username' because the of how the auth backend works.
username=data["email"],
password=data["password"],
)
if user is not None: # Check if the credentials match an account
if user.is_active: # Check if the account is active (not suspended)
login_user(request, user) # Log the user in
# Redirect to front page
if "next" in request.GET:
return redirect(request.GET["next"])
return redirect("messages")
else:
# Account has been suspended. Alert the user and render the page.
messages.add_message(request, messages.ERROR, "Sorry, this account has been suspended. <a href='#'>Find out more.</a>")
else:
# Invalid credentials where entered
messages.add_message(request, messages.ERROR, "Username or password is incorrect.")
next_dir = ""
if "next" in request.GET:
next_dir=request.GET["next"]
return renderLogin(request, next_dir=next_dir, form=form)
# If the function reaches this point, then we simply need to render
# a new page, taking the next direcory into account.
next_dir = ""
if "next" in request.GET:
next_dir=request.GET["next"]
return renderLogin(request, next_dir=next_dir)
# renderLogin(request, next_dir, [form])
# Gathers and formats any data that needs to be passed to the authentication
# template. It then returns an HttpResponse object with the compiled template
# to be sent to the client.
def renderLogin(request, next_dir="", form=AuthenticationForm()):
PAGE_NAME = "Login" # Name of page, used to format the title
# Make sure URL is kept if the user decides to register
if not next_dir == "":
next_dir = "?next=" + next_dir
return render(request, "accounts/index.html", {
"title": (cr_config.TITLE_FORMAT % PAGE_NAME),
"next_dir": next_dir,
"form": form,
"form_type": "login",
#"file_suffix": cr_config.FILE_SUFFIX,
})
# REGISTER
# Handles registration requests. Note: This function actually doubles as a form,
# accepting inputs from itself if they exist. The majority of this function
# is dedicated to handling the registration process from the POST data.
# TODO Recaptcha setup
def register(request):
if request.user.is_authenticated(): # Check if the user is logged in
messages.add_message(request, messages.INFO, "You're currently logged in to Comet. If you want to register another account please <a href='/logout'>logout</a> first.")
return redirect("frontpage") # User is logged in, return them to index
if request.POST: # Some data was posted
# Create a form instance and populate it with the Post data
form = RegistrationForm(request.POST)
# Check whether the form is valid.
if form.is_valid():
# Form data is valid, send a verification email.
data = form.cleaned_data
user_url = generate()
while User.objects.filter(user_url=user_url).exists():
user_url = generate()
# You need to call user.objects.create_user rather than accessing
# the user manager directly.
User.objects.create_user(
email=data["email"],
username=data["username"],
password=data["password"],
user_url=user_url,
)
# Authenticate
user = authenticate(
username=data["email"],
password=data["password"],
)
login_user(request, user) # Log the user in.
if "next" in request.GET:
return redirect(request.GET["next"])
return redirect("messages")
# Form data was invalid, render the page with error messages
next_dir = ""
if "next" in request.GET:
next_dir=request.GET["next"]
return renderRegister(request, next_dir=next_dir, form=form)
else: # No data was posted, render a regular page
next_dir = ""
if "next" in request.GET:
next_dir=request.GET["next"]
return renderRegister(request, next_dir=next_dir)
# renderRegister(request, next_dir, [form])
# Gathers and formats any data that needs to be passed to the Registration
# template. It then returns an HttpResponse object with the compiled template
# to be sent to the client.
def renderRegister(request, next_dir="", form=RegistrationForm()):
PAGE_NAME = "Register" # Name of page, used to format the title
# Make sure URL is formatted in case the form is regenerated.
if not next_dir == "":
next_dir = "?next=" + next_dir
return render(request, "accounts/index.html", {
"title": (cr_config.TITLE_FORMAT % PAGE_NAME),
"next_dir": next_dir,
"form": form,
"form_type": "register",
#"file_suffix": cr_config.FILE_SUFFIX,
})
| {
"repo_name": "LuckehPickle/Comet",
"path": "accounts/views.py",
"copies": "2",
"size": "7907",
"license": "apache-2.0",
"hash": -4663285429277430000,
"line_mean": 43.1731843575,
"line_max": 177,
"alpha_frac": 0.6534716074,
"autogenerated": false,
"ratio": 4.226082308925708,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5879553916325708,
"avg_score": null,
"num_lines": null
} |
# accounts/views.py
from django.contrib.auth import authenticate,login
from django.http import HttpResponse,Http404
import json
from accounts import forms
def auth(username,password):
ret = {}
user = authenticate(username=username, password=password)
if user is not None:
# correct username and password
if user.is_active:
# success
ret['error_no']=0
ret['id']=user.id
ret['username']=user.username
# if the first/last is blank, use username
fn = user.get_full_name()
ret['fullname'] = fn if fn else user.username
else:
# close, but no.
ret['error_no']=2
ret['error_text']="Account is not active."
else:
# incorrect username and/or password
ret['error_no']=1
ret['error_text']="Incorrect username and/or password."
return ret,user
def logax(request):
print 1
if request.method == 'POST':
form=forms.LoginForm(request.POST)
if form.is_valid():
username=form.cleaned_data['username']
password=form.cleaned_data['password']
# check for valid login:
ret,user = auth(username,password)
# if it is valid, log them in.
if not ret['error_no']:
l = login(request,user)
else:
ret = {'error_no':-1, 'error_text':'form error'}
else:
ret = {'error_no':-2, 'error_text':'not POST'}
print "#2", ret
response = HttpResponse(json.dumps(ret,indent=1))
response['Content-Type'] = 'application/json'
return response
| {
"repo_name": "EricSchles/veyepar",
"path": "dj/accounts/views.py",
"copies": "1",
"size": "1657",
"license": "mit",
"hash": 6106258434455011000,
"line_mean": 27.5689655172,
"line_max": 64,
"alpha_frac": 0.5733252867,
"autogenerated": false,
"ratio": 4.061274509803922,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5134599796503921,
"avg_score": null,
"num_lines": null
} |
"""Account types
Revision ID: 2cec471fc310
Revises: 9f2d4e946acb
Create Date: 2017-04-23 01:30:29.193837
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2cec471fc310'
down_revision = '9f2d4e946acb'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('default_account_types',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('account_types',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('owner_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['owner_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('account_types')
op.drop_table('default_account_types')
# ### end Alembic commands ###
| {
"repo_name": "csdt/Pawi",
"path": "migrations/versions/2cec471fc310_.py",
"copies": "1",
"size": "1211",
"license": "mit",
"hash": -3096960457579853000,
"line_mean": 27.8333333333,
"line_max": 65,
"alpha_frac": 0.6655656482,
"autogenerated": false,
"ratio": 3.440340909090909,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4605906557290909,
"avg_score": null,
"num_lines": null
} |
# Accumulated Cyclone Energy module
def calc(wind): # Converts a single wind point (in knots) to ACE
if (wind <= 34): # ACE does not calculate for tropical depressions
return 0
else:
wind = float(wind)
return float((wind * wind)/10000)
def calc_ignore(wind): # Converts single wind point to ACE regardless of strength
wind = float(wind)
return float((wind * wind)/10000)
def hdp(wind): # Converts single wind point to HDP
if (wind <= 64):
return 0
else:
wind = float(wind)
return float((wind * wind)/10000)
def cumACE(winds): # Parses winds and returns total ACE
total = 0.0
for wind in winds:
total += calc(wind)
return total
def cumACE_ignore(winds): # Returns total ACE in winds regardless of strength
total = 0.0
for wind in winds:
total += calc_ignore(wind)
return total
def cumHDP(winds): # Returns total HDP
total = 0.0
for wind in winds:
total += hdp(wind)
return total
def climo_at(ace): # Categorizes season according to Atlantic climatology
if (ace > 111):
return "Above normal"
elif (ace >= 153):
return "Above normal (Hyperactive)"
elif (ace < 66):
return "Below normal"
else:
return "Near normal"
def climo_ep(ace): # Categorizes season according to East Pacific climatology
if (ace > 135):
return "Above normal"
elif (ace < 86):
return "Below normal"
else:
return "Near normal"
| {
"repo_name": "harrisontran/encyclonepedia",
"path": "ace.py",
"copies": "1",
"size": "1560",
"license": "mit",
"hash": -4050872378706075600,
"line_mean": 27.3636363636,
"line_max": 82,
"alpha_frac": 0.6070512821,
"autogenerated": false,
"ratio": 3.586206896551724,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4693258178651724,
"avg_score": null,
"num_lines": null
} |
"""accumulated precip."""
import datetime
from pandas.io.sql import read_sql
from pyiem.util import get_autoplot_context, get_dbconn
from pyiem.plot import figure_axes
from pyiem.exceptions import NoDataFound
PDICT = {
"precip": "Precipitation",
"snow": "Snow",
}
def get_description():
""" Return a dict describing how to call this plotter """
desc = dict()
desc["data"] = True
desc[
"description"
] = """This chart presents year to date accumulated
precipitation for a station of your choice. The year with the highest and
lowest accumulation is shown along with the envelop of observations and
long term average. You can optionally plot up to three additional years
of your choice.</p>
<p>You can specify the start date (ignore the year) for when to start
the 365 day accumulation of precipitation. The year shown is the year
for the start of the accumulation period. For example, if you accumulate
after 1 October, the year 2020 would represent the period from 1 Oct 2020
to 30 Sep 2021.</p>
<p>Accumulating snowfall data is frought with peril, but this app will let
you do it! The app has a tight requirement of no less than 3 days of
missing data for the year to be considered in the plot.</p>
"""
today = datetime.date.today()
thisyear = today.year
desc["arguments"] = [
dict(
type="station",
name="station",
default="IATDSM",
label="Select Station:",
network="IACLIMATE",
),
dict(
type="select",
options=PDICT,
name="var",
default="precip",
label="Accumulate Precipitation or Snow?",
),
dict(
type="year",
name="year1",
default=thisyear,
label="Additional Year to Plot:",
),
dict(
type="year",
name="year2",
optional=True,
default=(thisyear - 1),
label="Additional Year to Plot: (optional)",
),
dict(
type="year",
name="year3",
optional=True,
default=(thisyear - 2),
label="Additional Year to Plot: (optional)",
),
dict(
type="date",
name="sdate",
default="2000/01/01",
min="2000/01/01",
max="2000/12/31",
label="Start Day of Year for Plot: (ignore year)",
),
dict(
optional=True,
type="date",
name="edate",
default=f"2000/{today.strftime('%m/%d')}",
min="2000/01/01",
max="2000/12/31",
label="End Day of Year for Plot: (ignore year)",
),
dict(
type="int",
default="3",
label="Number of missing days to allow before excluding year",
name="m",
),
]
return desc
def cull_missing(df, colname, missingdays):
"""Figure out which years need to go from the analysis."""
df2 = df[["binyear", colname]]
nancounts = df2.groupby("binyear").agg(lambda x: x.isnull().sum())
# cull anything with more than 3 days NaN
df2 = nancounts[nancounts[colname] > missingdays]
years = []
if not df2.empty:
years = list(df2.index.values)
return df[~df["binyear"].isin(years)], years
def plotter(fdict):
""" Go """
pgconn = get_dbconn("coop")
ctx = get_autoplot_context(fdict, get_description())
station = ctx["station"]
ab = ctx["_nt"].sts[station]["archive_begin"]
if ab is None:
raise NoDataFound("Unknown station metadata.")
year1 = ctx.get("year1")
year2 = ctx.get("year2")
year3 = ctx.get("year3")
sdate = ctx["sdate"]
table = "alldata_%s" % (station[:2],)
# belt and suspenders
assert ctx["var"] in PDICT
df = read_sql(
f"""
with obs as (
SELECT day, {ctx["var"]},
case when sday >= %s then year else year - 1 end as binyear
from {table} WHERE station = %s
)
SELECT day, binyear::int, {ctx["var"]},
row_number() OVER (PARTITION by binyear ORDER by day ASC) as row,
sum({ctx["var"]}) OVER (PARTITION by binyear ORDER by day ASC) as accum
from obs ORDER by day ASC
""",
pgconn,
params=(sdate.strftime("%m%d"), station),
index_col="day",
)
if df.empty:
raise NoDataFound("No data found!")
# Truncate plot
doy_trunc = 365
today = ctx.get("edate", datetime.date.today())
if ctx.get("edate") is not None:
today_doy = int(today.strftime("%j"))
sdate_doy = int(sdate.strftime("%j"))
offset = 0 if today_doy > sdate_doy else 365
doy_trunc = today_doy + offset - sdate_doy
df = df[df["row"] <= doy_trunc]
df, cullyears = cull_missing(df, ctx["var"], ctx["m"])
extra = "" if doy_trunc == 365 else f" till {today.strftime('%-d %B')}"
title = "Accumulated %s after %s%s" % (
sdate.strftime("%-d %B"),
PDICT[ctx["var"]],
extra,
)
subtitle = "[%s] %s (%s-%s)" % (
station,
ctx["_nt"].sts[station]["name"],
df["binyear"].min(),
datetime.date.today().year,
)
if cullyears:
subtitle += (
f", {len(cullyears)} years excluded due to "
f"missing > {ctx['m']} days"
)
(fig, ax) = figure_axes(title=title, subtitle=subtitle)
# Average
jday = df[["row", "accum"]].groupby("row").mean()
jday["accum"].values[-1] = jday["accum"].values[-2]
ax.plot(
range(1, len(jday.index) + 1),
jday["accum"],
lw=2,
zorder=5,
color="k",
label="Average - %.2f" % (jday["accum"].iloc[-1],),
)
# Min and Max
jmin = df[["row", "accum"]].groupby("row").min()
jmax = df[["row", "accum"]].groupby("row").max()
ax.fill_between(
range(1, len(jday.index) + 1),
jmin["accum"],
jmax["accum"],
zorder=2,
color="tan",
)
# find max year
plotted = []
for year, color in zip(
[
df["binyear"][df["accum"].idxmax()],
df["binyear"][df[df["row"] == doy_trunc]["accum"].idxmin()],
year1,
year2,
year3,
],
["b", "brown", "r", "g", "purple"],
):
if year is None or year in plotted:
continue
plotted.append(year)
df2 = df[df["binyear"] == year]
if df2.empty:
continue
lastrow = df2.iloc[-1]
extra = ""
if (lastrow["row"] + 2) < doy_trunc:
extra = f" to {df2.index.values[-1].strftime('%-d %b')}"
labelyear = year
if df2.index.values[0].year != df2.index.values[-1].year:
labelyear = "%s-%s" % (
df2.index.values[0].year,
df2.index.values[-1].year,
)
ax.plot(
range(1, len(df2.index) + 1),
df2["accum"],
label="%s - %.2f%s" % (labelyear, lastrow["accum"], extra),
color=color,
lw=2,
)
ax.set_ylabel(PDICT[ctx["var"]] + " [inch]")
ax.grid(True)
ax.legend(loc=2)
xticks = []
xticklabels = []
for i in range(doy_trunc + 1):
date = sdate + datetime.timedelta(days=i)
if date.day != 1:
continue
xticks.append(i)
xticklabels.append(date.strftime("%b"))
ax.set_xlim(0, doy_trunc + 1)
ax.set_ylim(bottom=-0.1)
ax.set_xticks(xticks)
ax.set_xticklabels(xticklabels)
return fig, df
if __name__ == "__main__":
plotter(dict(sdate="2000-07-01", station="IA8706", var="snow"))
| {
"repo_name": "akrherz/iem",
"path": "htdocs/plotting/auto/scripts100/p172.py",
"copies": "1",
"size": "7820",
"license": "mit",
"hash": -5308244102199103000,
"line_mean": 29.546875,
"line_max": 79,
"alpha_frac": 0.5267263427,
"autogenerated": false,
"ratio": 3.585511233379184,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4612237576079184,
"avg_score": null,
"num_lines": null
} |
"""Accumulate messages to show on the next page request.
The ``Flash`` class is useful when you want to redirect to another page and also
show a status message on that page, such as "Changes saved" or
"No previous search found; returning to home page".
THE IMPLEMENTATION DEPENDS ON PYLONS. However, it can easily be adapted
for another web framework.
PYRAMID USERS: use the flash methods built into Pyramid's ``Session`` object.
This implementation is incompatible with Pyramid.
A typical Pylons application instantiates a ``Flash`` object in
myapp/lib/helpers.py::
from webhelpers.pylonslib.flash import Flash as _Flash
flash = _Flash()
The helpers module is then imported into your controllers and
templates as ``h``. Whenever you want to set a message, call the instance::
h.flash("Record deleted.")
You can set additional messages too::
h.flash("Hope you didn't need it.")
Now make a place in your site template for the messages. In Mako you
might do:
.. code-block:: mako
<% messages = h.flash.pop_messages() %>
% if messages:
<ul id="flash-messages">
% for message in messages:
<li>${message}</li>
% endfor
</ul>
% endif
You can style this to look however you want:
.. code-block:: css
ul#flash-messages {
color: red;
background-color: #FFFFCC;
font-size: larger;
font-style: italic;
margin-left: 40px;
padding: 4px;
list-style: none;
}
Multiple flash objects
======================
You can define multiple flash objects in your application to display
different kinds of messages at different places on the page. For instance,
you might use the main flash object for general messages, and a second
flash object for "Added dookickey" / "Removed doohickey" messages next to a
doohickey manager.
Message categories
==================
WebHelpers 1.0 adds message categories, contributed by Wichert Akkerman.
These work like severity levels in Python's logging system. The standard
categories are "*warning*", "*notice*", "*error*", and "*success*", with
the default being "*notice*". The category is available in the message's
``.category`` attribute, and is normally used to set the container's CSS
class.
This is the *only* thing it does. Calling ``.pop_messages()`` pops all messages
in the order registered, regardless of category. It is *not* possible to pop
only a certain category, or all levels above a certain level, or to group
messages by category. If you want to group different kinds of messages
together, or pop only certain categories while leaving other categories, you
should use multiple ``Flash`` objects.
You can change the standard categories by overriding the ``.categories``
and ``.default_category`` class attributes, or by providing alternate
values using constructor keywords.
Category example
----------------
Let's show a standard way of using flash messages in your site: we will
demonstrate *self-healing messages* (similar to what Growl does on OSX)
to show messages in a site.
To send a message from python just call the flash helper method::
h.flash(u"Settings have been saved")
This will tell the system to show a message in the rendered page. If you need
more control you can specify a message category as well: one of *warning*,
*notice*, *error* or *success*. The default category is *notice*. For example::
h.flash(u"Failed to send confirmation email", "warning")
We will use a very simple markup style: messages will be placed in a ``div``
with id ``selfHealingFeedback`` at the end of the document body. The messages
are standard paragraphs with a class indicating the message category. For
example::
<html>
<body>
<div id="content">
...
...
</div>
<div id="selfHealingFeedback">
<p class="success">Succesfully updated your settings</p>
<p class="warning">Failed to send confirmation email</p>
</div>
</body>
</html>
This can easily created from a template. If you are using Genshi this
should work:
.. code-block: html
<div id="selfHealingFeedback">
<p class="notice" py:for="message in h.flash.pop_messages()"
py:attrs="{'class' : message.category}" py:content="message">
This is a notice.
</p>
</div>
The needed CSS is very simple:
.. code-block: css
#selfHealingFeedback {
position: fixed;
top: 20px;
left: 20px;
z-index: 2000;
}
#selfHealingFeedback p {
margin-bottom: 10px;
width: 250px;
opacity: 0.93;
}
p.notice,p.error,p.success,p.warning {
border: 3px solid silver;
padding: 10px;
-webkit-border-radius: 3px;
-moz-border-radius: 3px;
border-radius: 3px;
-webkit-box-shadow: 0 0 5px silver;
}
Choosing different colours for the categories is left as an exercise
for the reader.
Next we create the javascript that will manage the needed behaviour (this
implementation is based on jQuery)::
function _SetupMessage(el) {
var remover = function () {
msg.animate({opacity: 0}, "slow")
.slideUp("slow", function() { msg.remove() }); };
msg.data("healtimer", setTimeout(remover, 10000))
.click(function() { clearTimeout(msg.data("healtimer")); remover(); });
}
function ShowMessage(message, category) {
if (!category)
category="notice";
var container = $("#selfHealingFeedback");
if (!container.length)
container=$("<div id='selfHealingFeedback'/>").appendTo("body");
var msg = $("<p/>").addClass(category).html(message);
SetupMessage(msg);
msg.appendTo(container);
}
$(document).ready(function() {
$("#selfHealingFeedback p").each(function() { SetupMessage($(this)); });
}
The ``SetupMessage`` function configures the desired behaviour: a message
disappears after 10 seconds, or if you click on it. Removal is done using
a simple animation to avoid messages jumping around on the screen.
This function is called for all messages as soon as the document has fully
loaded. The ``ShowMessage`` function works exactly like the ``flash`` method
in python: you can call it with a message and optionally a category and it
will pop up a new message.
JSON integration
----------------
It is not unusual to perform a remote task using a JSON call and show a
result message to the user. This can easily be done using a simple wrapper
around the ShowMessage method::
function ShowJSONResponse(info) {
if (!info.message)
return;
ShowMessage(info.message, info.message_category);
}
You can use this direct as the success callback for the jQuery AJAX method::
$.ajax({type: "POST",
url: "http://your.domain/call/json",
dataType: "json",
success: ShowJSONResponse
});
if you need to perform extra work in your callback method you can call
it yourself as well, for example::
<form action="http://your.domain/call/form">
<input type="hidden" name="json_url" value="http://your.domain/call/json">
<button>Submit</button>
</form>
<sript type="text/javascript">
$(document).ready(function() {
$("button").click(function() {
var button = $(this);
button.addClass("processing");
$.ajax({type: "POST",
url: this.form["json_url"].value,
dataType: "json",
success: function(data, status) {
button.removeClass("processing");
ShowJSONResponse(data);
},
error: function(request, status, error) {
button.removeClass("processing");
ShowMessage("JSON call failed", "error");
}
});
return false;
});
});
</script>
This sets up a simple form which can be submitted normally by non-javascript
enabled browsers. If a user does have javascript an AJAX call will be made
to the server and the result will be shown in a message. While the call is
active the button will be marked with a *processing* class.
The server can return a message by including a ``message`` field in its
response. Optionally a ``message_category`` field can also be included
which will be used to determine the message category. For example::
@jsonify
def handler(self):
..
..
return dict(message=u"Settings successfully updated")
"""
# Do not import Pylons at module level; only within functions. All WebHelpers
# modules should be importable on any Python system for the standard
# regression tests.
from webhelpers.html import escape
__all__ = ["Flash", "Message"]
class Message(object):
"""A message returned by ``Flash.pop_messages()``.
Converting the message to a string returns the message text. Instances
also have the following attributes:
* ``message``: the message text.
* ``category``: the category specified when the message was created.
"""
def __init__(self, category, message):
self.category=category
self.message=message
def __str__(self):
return self.message
__unicode__ = __str__
def __html__(self):
return escape(self.message)
class Flash(object):
"""Accumulate a list of messages to show at the next page request.
"""
# List of allowed categories. If None, allow any category.
categories = ["warning", "notice", "error", "success"]
# Default category if none is specified.
default_category = "notice"
def __init__(self, session_key="flash", categories=None, default_category=None):
"""Instantiate a ``Flash`` object.
``session_key`` is the key to save the messages under in the user's
session.
``categories`` is an optional list which overrides the default list
of categories.
``default_category`` overrides the default category used for messages
when none is specified.
"""
self.session_key = session_key
if categories is not None:
self.categories = categories
if default_category is not None:
self.default_category = default_category
if self.categories and self.default_category not in self.categories:
raise ValueError("unrecognized default category %r" % (self.default_category,))
def __call__(self, message, category=None, ignore_duplicate=False):
"""Add a message to the session.
``message`` is the message text.
``category`` is the message's category. If not specified, the default
category will be used. Raise ``ValueError`` if the category is not
in the list of allowed categories.
If ``ignore_duplicate`` is true, don't add the message if another
message with identical text has already been added. If the new
message has a different category than the original message, change the
original message to the new category.
"""
if not category:
category = self.default_category
elif self.categories and category not in self.categories:
raise ValueError("unrecognized category %r" % (category,))
# Don't store Message objects in the session, to avoid unpickling
# errors in edge cases.
new_message_tuple = (category, message)
from pylons import session
messages = session.setdefault(self.session_key, [])
# ``messages`` is a mutable list, so changes to the local variable are
# reflected in the session.
if ignore_duplicate:
for i, m in enumerate(messages):
if m[1] == message:
if m[0] != category:
messages[i] = new_message_tuple
session.save()
return # Original message found, so exit early.
messages.append(new_message_tuple)
session.save()
def pop_messages(self):
"""Return all accumulated messages and delete them from the session.
The return value is a list of ``Message`` objects.
"""
from pylons import session
messages = session.pop(self.session_key, [])
session.save()
return [Message(*m) for m in messages]
| {
"repo_name": "grepme/CMPUT410Lab01",
"path": "virt_env/virt1/lib/python2.7/site-packages/WebHelpers-1.3-py2.7.egg/webhelpers/pylonslib/flash.py",
"copies": "3",
"size": "12551",
"license": "apache-2.0",
"hash": 5548771639208525000,
"line_mean": 32.3803191489,
"line_max": 91,
"alpha_frac": 0.6444904788,
"autogenerated": false,
"ratio": 4.245940460081191,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.002810647121481886,
"num_lines": 376
} |
"""accumulator.py: transcription of GeographicLib::Accumulator class."""
# accumulator.py
#
# This is a rather literal translation of the GeographicLib::Accumulator class
# from to python. See the documentation for the C++ class for more information
# at
#
# http://geographiclib.sourceforge.net/html/annotated.html
#
# Copyright (c) Charles Karney (2011) <charles@karney.com> and licensed under
# the MIT/X11 License. For more information, see
# http://geographiclib.sourceforge.net/
######################################################################
from geographiclib.geomath import Math
class Accumulator(object):
"""Like math.fsum, but allows a running sum"""
def Set(self, y):
"""Set value from argument"""
if type(self) == type(y):
self._s, self._t = y._s, y._t
else:
self._s, self._t = float(y), 0.0
def __init__(self, y = 0.0):
self.Set(y)
def Add(self, y):
"""Add a value"""
# Here's Shewchuk's solution...
# hold exact sum as [s, t, u]
y, u = Math.sum(y, self._t) # Accumulate starting at
self._s, self._t = Math.sum(y, self._s) # least significant end
# Start is _s, _t decreasing and non-adjacent. Sum is now (s + t + u)
# exactly with s, t, u non-adjacent and in decreasing order (except
# for possible zeros). The following code tries to normalize the
# result. Ideally, we want _s = round(s+t+u) and _u = round(s+t+u -
# _s). The follow does an approximate job (and maintains the
# decreasing non-adjacent property). Here are two "failures" using
# 3-bit floats:
#
# Case 1: _s is not equal to round(s+t+u) -- off by 1 ulp
# [12, -1] - 8 -> [4, 0, -1] -> [4, -1] = 3 should be [3, 0] = 3
#
# Case 2: _s+_t is not as close to s+t+u as it shold be
# [64, 5] + 4 -> [64, 8, 1] -> [64, 8] = 72 (off by 1)
# should be [80, -7] = 73 (exact)
#
# "Fixing" these problems is probably not worth the expense. The
# representation inevitably leads to small errors in the accumulated
# values. The additional errors illustrated here amount to 1 ulp of
# the less significant word during each addition to the Accumulator
# and an additional possible error of 1 ulp in the reported sum.
#
# Incidentally, the "ideal" representation described above is not
# canonical, because _s = round(_s + _t) may not be true. For
# example, with 3-bit floats:
#
# [128, 16] + 1 -> [160, -16] -- 160 = round(145).
# But [160, 0] - 16 -> [128, 16] -- 128 = round(144).
#
if self._s == 0: # This implies t == 0,
self._s = u # so result is u
else:
self._t += u # otherwise just accumulate u to t.
def Sum(self, y = 0.0):
"""Return sum + y"""
if y == 0.0:
return self._s
else:
b = Accumulator(self)
b.Add(y)
return b._s
def Negate(self):
"""Negate sum"""
self._s *= -1
self._t *= -1
| {
"repo_name": "devbharat/gtsam",
"path": "gtsam/3rdparty/GeographicLib/python/geographiclib/accumulator.py",
"copies": "5",
"size": "3000",
"license": "bsd-3-clause",
"hash": -2649415403908920300,
"line_mean": 36.037037037,
"line_max": 79,
"alpha_frac": 0.577,
"autogenerated": false,
"ratio": 3.264417845484222,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6341417845484222,
"avg_score": null,
"num_lines": null
} |
"""Accumuldated days"""
import datetime
from pandas.io.sql import read_sql
from pyiem.plot import figure_axes
from pyiem.util import get_autoplot_context, get_dbconn
from pyiem.exceptions import NoDataFound
PDICT = {
"high_above": "High Temperature At or Above",
"high_below": "High Temperature Below",
"low_above": "Low Temperature At or Above",
"low_below": "Low Temperature Below",
}
PDICT2 = {"jan1": "January 1", "jul1": "July 1"}
def get_description():
"""Return a dict describing how to call this plotter"""
desc = dict()
desc["data"] = True
desc["cache"] = 86400
desc[
"description"
] = """This plot displays the accumulated number of days
that the high or low temperature was above or below some threshold.
"""
desc["arguments"] = [
dict(
type="station",
name="station",
default="IATDSM",
label="Select Station:",
network="IACLIMATE",
),
dict(
type="select",
name="var",
default="high_above",
label="Which Metric",
options=PDICT,
),
dict(type="int", name="threshold", default=32, label="Threshold (F)"),
dict(
type="select",
name="split",
default="jan1",
options=PDICT2,
label="Where to split the year?",
),
dict(
type="year",
name="year",
default=datetime.date.today().year,
label="Year to Highlight in Chart",
),
]
return desc
def highcharts(fdict):
"""Highcharts Output"""
ctx = get_autoplot_context(fdict, get_description())
station = ctx["station"]
varname = ctx["var"]
df = get_data(ctx)
j = dict()
j["tooltip"] = {
"shared": True,
"headerFormat": (
'<span style="font-size: 10px">{point.key: %b %e}</span><br/>'
),
}
j["title"] = {
"text": "%s [%s] %s %sF"
% (
ctx["_nt"].sts[station]["name"],
station,
PDICT[varname],
int(fdict.get("threshold", 32)),
)
}
j["yAxis"] = {"title": {"text": "Accumulated Days"}, "startOnTick": False}
j["xAxis"] = {
"type": "datetime",
"dateTimeLabelFormats": { # don't display the dummy year
"month": "%e. %b",
"year": "%b",
},
"title": {"text": "Date"},
}
j["chart"] = {"zoomType": "xy", "type": "line"}
avgs = []
ranges = []
thisyear = []
for doy, row in df.iterrows():
ts = datetime.date(2001, 1, 1) + datetime.timedelta(days=(doy - 1))
ticks = (ts - datetime.date(1970, 1, 1)).total_seconds() * 1000.0
avgs.append([ticks, row["avg"]])
ranges.append([ticks, row["min"], row["max"]])
if row["thisyear"] >= 0:
thisyear.append([ticks, row["thisyear"]])
lbl = (
"%s" % (fdict.get("year", 2015),)
if fdict.get("split", "jan1") == "jan1"
else "%s - %s"
% (int(fdict.get("year", 2015)) - 1, int(fdict.get("year", 2015)))
)
j["series"] = [
{
"name": "Average",
"data": avgs,
"zIndex": 1,
"tooltip": {"valueDecimals": 2},
"marker": {
"fillColor": "white",
"lineWidth": 2,
"lineColor": "red",
},
},
{
"name": lbl,
"data": thisyear,
"zIndex": 1,
"marker": {
"fillColor": "blue",
"lineWidth": 2,
"lineColor": "green",
},
},
{
"name": "Range",
"data": ranges,
"type": "arearange",
"lineWidth": 0,
"linkedTo": ":previous",
"color": "tan",
"fillOpacity": 0.3,
"zIndex": 0,
},
]
return j
def get_data(ctx):
"""Get the data"""
pgconn = get_dbconn("coop")
station = ctx["station"]
threshold = ctx["threshold"]
varname = ctx["var"]
year = ctx["year"]
split = ctx["split"]
table = "alldata_%s" % (station[:2],)
days = 0 if split == "jan1" else 183
opp = " < " if varname.find("_below") > 0 else " >= "
col = "high" if varname.find("high") == 0 else "low"
# We need to do some magic to compute the start date, since we don't want
# an incomplete year mucking things up
sts = ctx["_nt"].sts[station]["archive_begin"]
if sts is None:
raise NoDataFound("Unknown station metadata.")
if sts.month > 1:
sts = sts + datetime.timedelta(days=365)
sts = sts.replace(month=1, day=1)
if split == "jul1":
sts = sts.replace(month=7, day=1)
df = read_sql(
f"""
with data as (
select extract(year from day + '%s days'::interval) as season,
extract(doy from day + '%s days'::interval) as doy,
(case when {col} {opp} %s then 1 else 0 end) as hit
from {table}
where station = %s and day >= %s),
agg1 as (
SELECT season, doy,
sum(hit) OVER (PARTITION by season ORDER by doy ASC) from data)
SELECT doy - %s as doy, min(sum), avg(sum), max(sum),
max(case when season = %s then sum else null end) as thisyear from agg1
WHERE doy < 365 GROUP by doy ORDER by doy ASC
""",
pgconn,
params=(days, days, threshold, station, sts, days, year),
index_col=None,
)
df["datestr"] = df["doy"].apply(
lambda x: (
datetime.date(2001, 1, 1) + datetime.timedelta(days=x)
).strftime("%-d %b")
)
df.set_index("doy", inplace=True)
return df
def plotter(fdict):
"""Go"""
ctx = get_autoplot_context(fdict, get_description())
station = ctx["station"]
threshold = ctx["threshold"]
varname = ctx["var"]
year = ctx["year"]
df = get_data(ctx)
if df.empty:
raise NoDataFound("Error, no results returned!")
title = ("%s [%s]\n" r"%s %.0f$^\circ$F") % (
ctx["_nt"].sts[station]["name"],
station,
PDICT[varname],
threshold,
)
(fig, ax) = figure_axes(title=title)
ax.plot(df.index.values, df["avg"], c="k", lw=2, label="Average")
ax.plot(df.index.values, df["thisyear"], c="g", lw=2, label="%s" % (year,))
ax.plot(df.index.values, df["max"], c="r", lw=2, label="Max")
ax.plot(df.index.values, df["min"], c="b", lw=2, label="Min")
ax.legend(ncol=1, loc=2)
xticks = []
xticklabels = []
for x in range(int(df.index.min()) - 1, int(df.index.max()) + 1):
ts = datetime.date(2000, 1, 1) + datetime.timedelta(days=x)
if ts.day == 1:
xticks.append(x)
xticklabels.append(ts.strftime("%b"))
ax.set_xticks(xticks)
ax.set_xticklabels(xticklabels)
ax.grid(True)
ax.set_xlim(int(df.index.min()) - 1, int(df.index.max()) + 1)
ax.set_ylabel("Accumulated Days")
return fig, df
if __name__ == "__main__":
plotter(dict())
| {
"repo_name": "akrherz/iem",
"path": "htdocs/plotting/auto/scripts100/p135.py",
"copies": "1",
"size": "7121",
"license": "mit",
"hash": 1867685346489167400,
"line_mean": 29.1737288136,
"line_max": 79,
"alpha_frac": 0.5062491223,
"autogenerated": false,
"ratio": 3.451769268056229,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44580183903562287,
"avg_score": null,
"num_lines": null
} |
accuracy = 1e-8
class Cell:
def __init__(self, vtkCell, bounds, q):
self.vtkCell = vtkCell
self.bounds = bounds
self.q = q
def __eq__(self, other):
global accuracy
if abs(self.q - other.q) > accuracy:
return false
if len(sel.bounds) != len(other.bounds):
return false
for i in xrange(len(self.bounds)):
if abs(self.bounds[i] - other.bounds[i]) > accuracy:
return false
return true
def __cmp__(self, other):
global accuracy
if self.q - other.q > accuracy:
return 1
elif other.q - self.q > accuracy:
return -1
if len(self.bounds) != len(other.bounds):
return false
for i in xrange(len(self.bounds)):
if self.bounds[i] - other.bounds[i] > accuracy:
return 1
elif other.bounds[i] - self.bounds[i] > accuracy:
return -1
return 0
def __str__(self):
return "q: " + str(self.q) + " bounds: " + str(self.bounds)
def parseRange(argument):
if ':' in argument:
return range(*map(int, argument.split(':')))
return range(int(argument), int(argument)+1)
def readCellsFromFile(cells, path, iteration, rank):
import vtk
import os.path
filename = path.replace('__ITERATION__', str(iteration)).replace('__RANK__', str(rank))
if os.path.exists(filename):
reader = vtk.vtkDataSetReader()
reader.SetFileName(filename)
reader.SetReadAllScalars(True)
reader.Update()
grid = reader.GetOutput()
numberOfCells = grid.GetNumberOfCells()
cellData = grid.GetCellData()
qs = cellData.GetScalars("q0")
for cellId in xrange(numberOfCells):
vtkCell = grid.GetCell(cellId)
q = qs.GetTuple(cellId)[0]
cells.append(Cell(vtkCell, vtkCell.GetBounds()[:], q))
return numberOfCells
else:
return 0
def findClosestMatch(cell, cells):
bestIndex = -1
minDistance = 1000000
import math
for index in xrange(len(cells)):
c = cells[index]
distance = 0
for i in xrange(len(cell.bounds)):
distance += (cell.bounds[i] - c.bounds[i])**2
distance = math.sqrt((c.q - cell.q)**2 * 10 + distance)
if distance < minDistance:
minDistance = distance
bestIndex = index
return bestIndex
def findCellInList(cell, cells):
lower = 0
upper = len(cells)
while(upper > lower):
middle = (upper + lower) / 2
middleCell = cells[middle]
if middleCell < cell:
lower = middle + 1
elif middleCell > cell:
upper = middle
else:
return middle
return -1
def main():
from argparse import ArgumentParser
parser = ArgumentParser(description='Tool for comparing vtk output of parallel runs.')
parser.add_argument('path1', help='The path to the first set of vtk files. Use __ITERATION__ for iteration number and __RANK__ for rank number.')
parser.add_argument('path2', help='The path to the second set of vtk files. Use __ITERATION__ for iteration number and __RANK__ for rank number.')
parser.add_argument('iteration1', type=int, help='The iteration number of the first set of vtk files.')
parser.add_argument('ranks1', help='The range of ranks for the first set of vtk files. Define single number or min:max.')
parser.add_argument('iteration2', type=int, help='The iteration number of the second set of vtk files.')
parser.add_argument('ranks2', help='The range of ranks for the second set of vtk files. Define single number or min:max.')
parser.add_argument('accuracy', help='The accuracy for numerical equality.', type=float, nargs='?', const='1e-5')
arguments = parser.parse_args()
global accuracy
accuracy = arguments.accuracy
if arguments.path2 == 'SameAsPath1':
path2 = arguments.path1
else:
path2 = arguments.path2
#Loop through ranks1
cells1 = [] #set()
ranks1 = parseRange(arguments.ranks1)
for rank in ranks1:
print "1: Parsing rank...", rank
numberOfCells = readCellsFromFile(cells1, arguments.path1, arguments.iteration1, rank)
print "Read", numberOfCells, "cells."
print "1: Total number of cells:", len(cells1)
#Loop through ranks2
cells2 = [] #set()
ranks2 = parseRange(arguments.ranks2)
print ranks2
for rank in ranks2:
print "2: Parsing rank", rank
numberOfCells = readCellsFromFile(cells2, path2, arguments.iteration2, rank)
print "Read", numberOfCells, "cells."
print "2: Total number of cells:", len(cells2)
#Compare lists
if len(cells1) != len(cells2):
raise Exception("Number of cells do not match!")
cells1.sort()
cells2.sort()
for cell in cells1:
index = findCellInList(cell, cells2)
if index == -1:
bestMatch = findClosestMatch(cell, cells2)
if bestMatch == -1:
bestMatchString = ""
else:
bestMatchString = "Best match is " + str(cells2[bestMatch])
raise Exception("No matching cell for " + str(cell) + ". " + bestMatchString)
else:
del cells2[index]
print "All cells match"
if __name__=="__main__":
main()
| {
"repo_name": "unterweg/peanoclaw",
"path": "testscenarios/tools/compareResult.py",
"copies": "1",
"size": "5050",
"license": "bsd-3-clause",
"hash": 5339664846835976000,
"line_mean": 28.3604651163,
"line_max": 148,
"alpha_frac": 0.6441584158,
"autogenerated": false,
"ratio": 3.6462093862815883,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4790367802081588,
"avg_score": null,
"num_lines": null
} |
"""AccuracyHandler.py: generates gr"""
__author__ = "Scott Munro"
__copyright__ = "Copyright 2015"
import math
import pygame
import game_constants
class AccuracyHandler(object):
RANGE = math.pi / 24
GROWTH_FACTOR = math.pi / 2880
FREEDOM = math.pi / 1000
def __init__(self, screen, player):
self.screen = screen # display to blit onto
self.player = player
self.theta_last = player.rotation
self.growth = AccuracyHandler.RANGE
def update(self):
if math.fabs(self.theta_last - self.player.rotation) < AccuracyHandler.FREEDOM:
self.growth -= AccuracyHandler.GROWTH_FACTOR
if self.growth < 0:
self.growth = 0
else:
self.growth += AccuracyHandler.GROWTH_FACTOR
self.growth += AccuracyHandler.GROWTH_FACTOR
if self.growth > AccuracyHandler.RANGE:
self.growth = AccuracyHandler.RANGE
self.theta_last = self.player.rotation
def draw_lines(self):
rotation1 = self.player.rotation + self.growth
rotation2 = self.player.rotation - self.growth
# print 'R1: '+str(rotation1)+'\tS: '+str(self.player.rotation)+'\tR2: '+str(rotation2)
self.draw(rotation1, game_constants.WHITE)
self.draw(rotation2, game_constants.RED)
self.draw(self.player.rotation, game_constants.BLUE)
def draw(self, rotation, color):
player_center = self.player.get_center()
x_pos = game_constants.WINDOW_DIAGONAL*math.cos(rotation)
y_pos = game_constants.WINDOW_DIAGONAL*math.sin(rotation)
pygame.draw.line(self.screen, color, (player_center[0], player_center[1]), (player_center[0]+x_pos, player_center[1]+y_pos), 1)
| {
"repo_name": "scottnm/itchnscratch",
"path": "AccuracyHandler.py",
"copies": "1",
"size": "1747",
"license": "apache-2.0",
"hash": -4458856723749836300,
"line_mean": 31.9622641509,
"line_max": 135,
"alpha_frac": 0.6393817974,
"autogenerated": false,
"ratio": 3.445759368836292,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9579843027440651,
"avg_score": 0.001059627759128314,
"num_lines": 53
} |
"""Accuracy"""
# Authors: Nicolas Pinto <nicolas.pinto@gmail.com>
# Nicolas Poilvert <nicolas.poilvert@gmail.com>
#
# License: BSD
__all__ = ['accuracy']
import numpy as np
def accuracy(y_true, y_pred, balanced=False):
"""Computes the Accuracy of the predictions (also known as the
zero-one score).
Parameters
----------
y_true: array, shape = [n_samples]
True values, interpreted as strictly positive or not
(i.e. converted to binary).
y_pred: array, shape = [n_samples]
Predicted values, interpreted as strictly positive or not
(i.e. converted to binary).
balanced: bool, optional (default=False)
Returns the balanced accuracy (equal weight for positive and
negative values).
Returns
-------
acc: float
Accuracy (zero-one score).
"""
assert len(y_true) == len(y_pred)
assert np.isfinite(y_true).all()
assert np.isfinite(y_pred).all()
# -- "binarize" the arguments
y_true = np.array(y_true) > 0
assert y_true.ndim == 1
y_pred = np.array(y_pred) > 0
assert y_pred.ndim == 1
if balanced:
pos = y_true > 0
neg = ~pos
pos_acc = (y_true[pos] == y_pred[pos]).mean()
neg_acc = (y_true[neg] == y_pred[neg]).mean()
acc = (pos_acc + neg_acc) / 2.
else:
acc = (y_true == y_pred).mean()
return acc
| {
"repo_name": "npinto/bangmetric",
"path": "bangmetric/accuracy.py",
"copies": "1",
"size": "1401",
"license": "bsd-3-clause",
"hash": -1504400192466673000,
"line_mean": 24.0178571429,
"line_max": 68,
"alpha_frac": 0.5824411135,
"autogenerated": false,
"ratio": 3.3199052132701423,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44023463267701424,
"avg_score": null,
"num_lines": null
} |
"""Accuracy tests against data pulled from HORIZONS."""
from numpy import max
from skyfield import api
from skyfield.constants import AU_M
from skyfield.io import download
from skyfield.jpllib import Kernel
one_second = 1.0 / 24.0 / 60.0 / 60.0
arcsecond = 1.0 / 60.0 / 60.0
ra_arcsecond = 24.0 / 360.0 / 60.0 / 60.0
meter = 1.0 / AU_M
base = 'http://naif.jpl.nasa.gov/pub/naif/generic_kernels/spk'
de430_url = base + '/planets/de430.bsp'
de431_url = base + '/planets/de431.bsp'
jup310_url = base + '/satellites/jup310.bsp'
def compare(value, expected_value, epsilon):
if hasattr(value, 'shape') or hasattr(expected_value, 'shape'):
assert max(abs(value - expected_value)) <= epsilon
else:
assert abs(value - expected_value) <= epsilon
def test_jupiter1():
astrometric = api.sun(utc=(1980, 1, 1, 0, 0)).observe(api.jupiter)
hlat, hlon, d = astrometric.ecliptic_latlon()
compare(hlat.degrees, 1.013, 0.001)
compare(hlon.degrees, 151.3229, 0.001)
def test_callisto_geometry():
k = Kernel(download(jup310_url))
a = k.earth.geometry_of(k.callisto).at(tdb=2471184.5)
compare(a.position.au,
[-4.884815926454119E+00, -3.705745549073268E+00, -1.493487818022234E+00],
0.001 * meter)
compare(a.velocity.au_per_d,
[9.604665478763035E-03, -1.552997751083403E-02, -6.678445860769302E-03],
0.000001 * meter)
def test_callisto_astrometric():
k = Kernel(download(jup310_url))
a = k.earth.observe(k.callisto).at(utc=(2053, 10, 9))
ra, dec, distance = a.radec()
compare(ra._degrees, 217.1839292, 0.001 * arcsecond)
compare(dec.degrees, -13.6892791, 0.001 * arcsecond)
compare(distance.au, 6.31079291776184, 0.1 * meter)
def test_boston_geometry():
k = Kernel(download(de430_url))
jd = api.JulianDate(tdb=(2015, 3, 2), delta_t=67.185390 + 0.5285957)
boston = api.Topos((42, 21, 24.1), (-71, 3, 24.8), x=0.003483, y=0.358609)
a = boston.geometry_of(k.earth).at(jd)
compare(a.position.km,
[-1.764697476371664E+02, -4.717131288041386E+03, -4.274926422016179E+03],
0.0027) # TODO: try to get this < 1 meter
def test_moon_from_boston_geometry():
k = Kernel(download(de430_url))
jd = api.JulianDate(tdb=(2015, 3, 2), delta_t=67.185390 + 0.5285957)
boston = api.Topos((42, 21, 24.1), (-71, 3, 24.8), x=0.003483, y=0.358609)
a = boston.geometry_of(k.moon).at(jd)
compare(a.position.au,
[-1.341501206552443E-03, 2.190483327459023E-03, 6.839177007993498E-04],
1.7 * meter) # TODO: improve this
def test_moon_from_boston_astrometric():
k = Kernel(download(de430_url))
jd = api.JulianDate(tdb=(2015, 3, 2), delta_t=67.185390 + 0.5285957)
boston = api.Topos((42, 21, 24.1), (-71, 3, 24.8), x=0.003483, y=0.358609)
a = boston.observe(k.moon).at(jd)
ra, dec, distance = a.radec()
compare(ra._degrees, 121.4796470, 0.001 * arcsecond)
compare(dec.degrees, 14.9108450, 0.001 * arcsecond)
compare(distance.au, 0.00265828588792, 1.4 * meter) # TODO: improve this
| {
"repo_name": "exoanalytic/python-skyfield",
"path": "skyfield/tests/test_against_horizons.py",
"copies": "1",
"size": "3037",
"license": "mit",
"hash": 3301222025952200000,
"line_mean": 39.4933333333,
"line_max": 79,
"alpha_frac": 0.6595324333,
"autogenerated": false,
"ratio": 2.4452495974235107,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8586756917841067,
"avg_score": 0.003605022576488753,
"num_lines": 75
} |
"""A central mechanism for settings with defaults.
"""
from django.conf import settings
threadlocal_settings_defaults = {
'DEBUG_DOMAIN' : None, # formatted as "dev" or "dev=com", which means to try substituting 'com' for 'dev'
'AUTO_WWW' : True # try with or without www
}
def add_setting_defaults(newdefaults):
"""
This method can be used by other applications to define their
default values.
newdefaults has to be a dictionary containing name -> value of
the settings.
"""
threadlocal_settings_defaults.update(newdefaults)
def set_threadlocal_setting(name, value):
if not hasattr(settings, 'THREADED_MULTIHOST_SETTINGS'):
settings.THREADED_MULTIHOST_SETTINGS = {}
settings.THREADED_MULTIHOST_SETTINGS[name] = value
def get_threadlocal_setting(name, default_value = None):
if not hasattr(settings, 'THREADED_MULTIHOST_SETTINGS'):
return threadlocal_settings_defaults.get(name, default_value)
return settings.THREADED_MULTIHOST_SETTINGS.get(name, threadlocal_settings_defaults.get(name, default_value))
| {
"repo_name": "diver-in-sky/django-threaded-multihost",
"path": "threaded_multihost/threadlocal_settings.py",
"copies": "2",
"size": "1082",
"license": "bsd-3-clause",
"hash": -1224817809742286300,
"line_mean": 31.7878787879,
"line_max": 113,
"alpha_frac": 0.7255083179,
"autogenerated": false,
"ratio": 3.7832167832167833,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.034498183900097776,
"num_lines": 33
} |
# Aceptando arreglo
print "Hola! Necesito que me ayudes a llenar un arreglo de 3 X 4 de enteros"
arreglo = []
sumaFila = []
filaPorArreglos = []
for i in range(3):
arregloPrimerNivel = []
suma = 0
filaArreglo = []
for j in range(4):
mensaje = "Dame el numero entero que va en la posicion: "+str(i)+","+str(j)+"\n"
arregloPrimerNivel.append(int(raw_input(mensaje)))
suma += arregloPrimerNivel[j]
filaArreglo.append(arregloPrimerNivel[j])
sumaFila.append(suma)
filaPorArreglos.append(filaArreglo)
arreglo.append(arregloPrimerNivel)
for i in range(3):
for j in range(4):
if j == 3:
print str(arreglo[i][j]),
else:
print str(arreglo[i][j]), " + ",
print " = ", sumaFila[i], " / Mayor Fila = ", max(filaPorArreglos[i]), " / Menor Fila = ", min(filaPorArreglos[i])
sumaColumna = []
columnaPorArreglos = []
for i in range(4):
suma = 0
columnaArreglo = []
for j in range(3):
columnaArreglo.append(arreglo[j][i])
suma += arreglo[j][i]
sumaColumna.append(suma)
columnaPorArreglos.append(columnaArreglo)
for i in range(4):
print "\nResultados columna: ", i+1
print "Suma de los numeros: ", sumaColumna[i]
print "Mayor de los numeros: ", max(columnaPorArreglos[i])
print "Menor de los numeros: ", min(columnaPorArreglos[i])
print "-" * 60
#Analizando Columnas
#Analizando TODO
print "\n"*2
print "-" * 60
print "Resultados GENERALES..."
print "\nMayor de todos los numeros: ", max(max(arreglo))
print "\Menor de todos los numeros: ", min(min(arreglo))
print "-" * 60
#Analizando TODO | {
"repo_name": "Dreedi/ApoyoAsesoriasProgramacionBatizArregloBidimensionalYGato",
"path": "Python/Arreglo Bidimensional de 3 X 4/ArregloBidimensionalOptimizado.py",
"copies": "1",
"size": "1514",
"license": "mit",
"hash": -2868117991117848000,
"line_mean": 28.1346153846,
"line_max": 115,
"alpha_frac": 0.6875825627,
"autogenerated": false,
"ratio": 2.2974203338391503,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.348500289653915,
"avg_score": null,
"num_lines": null
} |
# Aceptando arreglo
print "Hola! Necesito que me ayudes a llenar un arreglo de 3 X 4 de enteros"
print "\n"*2
print "-" * 60
arreglo = []
for i in range(3):
arregloPrimerNivel = []
for j in range(4):
mensaje = "Dame el numero entero que va en la posicion: "+str(i)+","+str(j)+"\n"
arregloPrimerNivel.append(int(raw_input(mensaje)))
arreglo.append(arregloPrimerNivel)
print "-" * 60
print "\n"*2
print "-" * 60
print "Me has dado un arreglo con el siguiente formato:\n", arreglo
print "El cual si lo pintamos en una matriz se representaria asi:\n"
for i in range(3):
for j in range(4):
print "| ", str(arreglo[i][j]), " |",
print ""
print "-" * 60
# Aceptando arreglo
# Analizando Filas
print "\n"*2
print "-" * 60
print "Resultados por fila ..."
sumaFila = []
filaPorArreglos = []
for i in range(3):
suma = 0
filaArreglo = []
for j in range(4):
filaArreglo.append(arreglo[i][j])
suma += arreglo[i][j]
sumaFila.append(suma)
filaPorArreglos.append(filaArreglo)
for i in range(3):
for j in range(4):
if j == 3:
print str(arreglo[i][j]),
else:
print str(arreglo[i][j]), " + ",
print " = ", sumaFila[i], " / Mayor Fila = ", max(filaPorArreglos[i]), " / Menor Fila = ", min(filaPorArreglos[i])
print "-" * 60
# Analizando Filas
#Analizando Columnas
print "\n"*2
print "-" * 60
print "Resultados por columna ..."
sumaColumna = []
columnaPorArreglos = []
for i in range(4):
suma = 0
columnaArreglo = []
for j in range(3):
columnaArreglo.append(arreglo[j][i])
suma += arreglo[j][i]
sumaColumna.append(suma)
columnaPorArreglos.append(columnaArreglo)
for i in range(4):
print "\nResultados columna: ", i+1
print "Suma de los numeros: ", sumaColumna[i]
print "Mayor de los numeros: ", max(columnaPorArreglos[i])
print "Menor de los numeros: ", min(columnaPorArreglos[i])
print "-" * 60
#Analizando Columnas
#Analizando TODO
print "\n"*2
print "-" * 60
print "Resultados GENERALES..."
print "\nMayor de todos los numeros: ", max(max(arreglo))
print "\Menor de todos los numeros: ", min(min(arreglo))
print "-" * 60
#Analizando TODO | {
"repo_name": "Dreedi/ApoyoAsesoriasProgramacionBatizArregloBidimensionalYGato",
"path": "Python/Arreglo Bidimensional de 3 X 4/ArregloBidimensional.py",
"copies": "1",
"size": "2067",
"license": "mit",
"hash": 91265175392913780,
"line_mean": 25.5128205128,
"line_max": 115,
"alpha_frac": 0.6695694243,
"autogenerated": false,
"ratio": 2.3813364055299537,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8440805900270818,
"avg_score": 0.022019985911827186,
"num_lines": 78
} |
# A CFFI version of numpy/linalg/lapack_module.c
import sys, os
import warnings
try:
import cffi
use_cffi = True
except ImportError:
use_cffi = False
if use_cffi:
import numpy as np
# dtype has not been imported yet
from numpy.core.multiarray import dtype
class Dummy(object):
pass
nt = Dummy()
nt.int32 = dtype('int32')
nt.int8 = dtype('int8')
nt.float32 = dtype('float32')
nt.float64 = dtype('float64')
nt.complex64 = dtype('complex64')
nt.complex128 = dtype('complex128')
from numpy.core.umath import frompyfunc
__version__ = '0.1.4'
macros = {'sfx': '_', 'pfx': ''}
ffi = cffi.FFI()
# The next section is a hack to find the lapack implementation, loosly based on
# numpy.distutils.system_info.get_info. The idea is to use a site.cfg file to specify
# where the shared object is located. Note that we need the lapack (high-level) interface,
# they in turn call a low-level implementation maybe using blas or atlas.
# This has not been tested on OSX
_C = None
from numpy.distutils import system_info
# temporarily mess with environ
saved_environ = os.environ.copy()
if sys.platform == 'win32':
ld_library_path = 'PATH'
so_prefix = ''
so_suffix = 'dll'
else:
ld_library_path = 'LD_LIBRARY_PATH'
so_prefix = 'lib'
so_suffix = 'so'
for lapack, prefix, suffix in [ \
['openblas_lapack', '', '_'],
['lapack_mkl', '', '_' ],
['lapack', '', '_'],
['lapack_lite', '', '_'],
]:
si = getattr(system_info, 'lapack_info')()
libs = si.get_lib_dirs()
if len(libs) > 0:
os.environ[ld_library_path] = os.pathsep.join(libs + [os.environ.get(ld_library_path, '')])
try:
_C = ffi.dlopen(lapack)
macros['sfx'] = suffix
macros['pfx'] = prefix
break
except Exception as e:
pass
# workaround for a distutils bugs where some env vars can
# become longer and longer every time it is used
for key, value in saved_environ.items():
if os.environ.get(key) != value:
os.environ[key] = value
if _C is None:
shared_name = os.path.abspath(os.path.dirname(__file__)) + '/' + \
so_prefix + 'lapack_lite.' + so_suffix
if not os.path.exists(shared_name):
# cffi should support some canonical name formatting like
# distutils.ccompiler.library_filename()
raise ValueError('could not find "%s", perhaps the name is slightly off' % shared_name)
try:
_C = ffi.dlopen(shared_name)
warnings.warn('tuned lapack (openblas, atlas ...) not found, using lapack_lite')
except:
warnings.warn("no lapack nor lapack_lite shared object available, will try cpyext version next")
use_cffi = False
if not use_cffi:
raise NotImplementedError("numpy installation failure: no lapack_lite compiled python module and no lapack shared object")
ffi.cdef('''
/*
* LAPACK functions
*/
typedef struct {{ float r, i; }} f2c_complex;
typedef struct {{ double r, i; }} f2c_doublecomplex;
/* typedef long int (*L_fp)(); */
extern int
{pfx}sgeev{sfx}(char *jobvl, char *jobvr, int *n,
float a[], int *lda, float wr[], float wi[],
float vl[], int *ldvl, float vr[], int *ldvr,
float work[], int lwork[],
int *info);
extern int
{pfx}dgeev{sfx}(char *jobvl, char *jobvr, int *n,
double a[], int *lda, double wr[], double wi[],
double vl[], int *ldvl, double vr[], int *ldvr,
double work[], int lwork[],
int *info);
extern int
{pfx}cgeev{sfx}(char *jobvl, char *jobvr, int *n,
f2c_doublecomplex a[], int *lda,
f2c_doublecomplex w[],
f2c_doublecomplex vl[], int *ldvl,
f2c_doublecomplex vr[], int *ldvr,
f2c_doublecomplex work[], int *lwork,
double rwork[],
int *info);
extern int
{pfx}zgeev{sfx}(char *jobvl, char *jobvr, int *n,
f2c_doublecomplex a[], int *lda,
f2c_doublecomplex w[],
f2c_doublecomplex vl[], int *ldvl,
f2c_doublecomplex vr[], int *ldvr,
f2c_doublecomplex work[], int *lwork,
double rwork[],
int *info);
extern int
{pfx}ssyevd{sfx}(char *jobz, char *uplo, int *n,
float a[], int *lda, float w[], float work[],
int *lwork, int iwork[], int *liwork,
int *info);
extern int
{pfx}dsyevd{sfx}(char *jobz, char *uplo, int *n,
double a[], int *lda, double w[], double work[],
int *lwork, int iwork[], int *liwork,
int *info);
extern int
{pfx}cheevd{sfx}(char *jobz, char *uplo, int *n,
f2c_complex a[], int *lda,
float w[], f2c_complex work[],
int *lwork, float rwork[], int *lrwork, int iwork[],
int *liwork,
int *info);
extern int
{pfx}zheevd{sfx}(char *jobz, char *uplo, int *n,
f2c_doublecomplex a[], int *lda,
double w[], f2c_doublecomplex work[],
int *lwork, double rwork[], int *lrwork, int iwork[],
int *liwork,
int *info);
extern int
{pfx}dgelsd{sfx}(int *m, int *n, int *nrhs,
double a[], int *lda, double b[], int *ldb,
double s[], double *rcond, int *rank,
double work[], int *lwork, int iwork[],
int *info);
extern int
{pfx}zgelsd{sfx}(int *m, int *n, int *nrhs,
f2c_doublecomplex a[], int *lda,
f2c_doublecomplex b[], int *ldb,
double s[], double *rcond, int *rank,
f2c_doublecomplex work[], int *lwork,
double rwork[], int iwork[],
int *info);
extern int
{pfx}sgesv{sfx}(int *n, int *nrhs,
float a[], int *lda,
int ipiv[],
float b[], int *ldb,
int *info);
extern int
{pfx}dgesv{sfx}(int *n, int *nrhs,
double a[], int *lda,
int ipiv[],
double b[], int *ldb,
int *info);
extern int
{pfx}cgesv{sfx}(int *n, int *nrhs,
f2c_complex a[], int *lda,
int ipiv[],
f2c_complex b[], int *ldb,
int *info);
extern int
{pfx}zgesv{sfx}(int *n, int *nrhs,
f2c_doublecomplex a[], int *lda,
int ipiv[],
f2c_doublecomplex b[], int *ldb,
int *info);
extern int
{pfx}sgetrf{sfx}(int *m, int *n,
float a[], int *lda,
int ipiv[],
int *info);
extern int
{pfx}dgetrf{sfx}(int *m, int *n,
double a[], int *lda,
int ipiv[],
int *info);
extern int
{pfx}cgetrf{sfx}(int *m, int *n,
f2c_complex a[], int *lda,
int ipiv[],
int *info);
extern int
{pfx}zgetrf{sfx}(int *m, int *n,
f2c_doublecomplex a[], int *lda,
int ipiv[],
int *info);
extern int
{pfx}spotrf{sfx}(char *uplo, int *n,
float a[], int *lda,
int *info);
extern int
{pfx}dpotrf{sfx}(char *uplo, int *n,
double a[], int *lda,
int *info);
extern int
{pfx}cpotrf{sfx}(char *uplo, int *n,
f2c_complex a[], int *lda,
int *info);
extern int
{pfx}zpotrf{sfx}(char *uplo, int *n,
f2c_doublecomplex a[], int *lda,
int *info);
extern int
{pfx}sgesdd{sfx}(char *jobz, int *m, int *n,
float a[], int *lda, float s[], float u[],
int *ldu, float vt[], int *ldvt, float work[],
int *lwork, int iwork[], int *info);
extern int
{pfx}dgesdd{sfx}(char *jobz, int *m, int *n,
double a[], int *lda, double s[], double u[],
int *ldu, double vt[], int *ldvt, double work[],
int *lwork, int iwork[], int *info);
extern int
{pfx}cgesdd{sfx}(char *jobz, int *m, int *n,
f2c_complex a[], int *lda,
float s[], f2c_complex u[], int *ldu,
f2c_complex vt[], int *ldvt,
f2c_complex work[], int *lwork,
float rwork[], int iwork[], int *info);
extern int
{pfx}zgesdd{sfx}(char *jobz, int *m, int *n,
f2c_doublecomplex a[], int *lda,
double s[], f2c_doublecomplex u[], int *ldu,
f2c_doublecomplex vt[], int *ldvt,
f2c_doublecomplex work[], int *lwork,
double rwork[], int iwork[], int *info);
extern int
{pfx}spotrs{sfx}(char *uplo, int *n, int *nrhs,
float a[], int *lda,
float b[], int *ldb,
int *info);
extern int
{pfx}dpotrs{sfx}(char *uplo, int *n, int *nrhs,
double a[], int *lda,
double b[], int *ldb,
int *info);
extern int
{pfx}cpotrs{sfx}(char *uplo, int *n, int *nrhs,
f2c_complex a[], int *lda,
f2c_complex b[], int *ldb,
int *info);
extern int
{pfx}zpotrs{sfx}(char *uplo, int *n, int *nrhs,
f2c_doublecomplex a[], int *lda,
f2c_doublecomplex b[], int *ldb,
int *info);
extern int
{pfx}spotri{sfx}(char *uplo, int *n,
float a[], int *lda,
int *info);
extern int
{pfx}dpotri{sfx}(char *uplo, int *n,
double a[], int *lda,
int *info);
extern int
{pfx}cpotri{sfx}(char *uplo, int *n,
f2c_complex a[], int *lda,
int *info);
extern int
{pfx}zpotri{sfx}(char *uplo, int *n,
f2c_doublecomplex a[], int *lda,
int *info);
extern int
{pfx}scopy{sfx}(int *n,
float *sx, int *incx,
float *sy, int *incy);
extern int
{pfx}dcopy{sfx}(int *n,
double *sx, int *incx,
double *sy, int *incy);
extern int
{pfx}ccopy{sfx}(int *n,
f2c_complex *sx, int *incx,
f2c_complex *sy, int *incy);
extern int
{pfx}zcopy{sfx}(int *n,
f2c_doublecomplex *sx, int *incx,
f2c_doublecomplex *sy, int *incy);
extern double
{pfx}sdot{sfx}(int *n,
float *sx, int *incx,
float *sy, int *incy);
extern double
{pfx}ddot{sfx}(int *n,
double *sx, int *incx,
double *sy, int *incy);
extern void
{pfx}cdotu{sfx}(f2c_complex *, int *,
f2c_complex *, int *,
f2c_complex *, int *);
extern void
{pfx}zdotu{sfx}(f2c_doublecomplex * ret_val, int *n,
f2c_doublecomplex *zx, int *incx,
f2c_doublecomplex *zy, int *incy);
extern void
{pfx}cdotc{sfx}(f2c_complex *, int *,
f2c_complex *, int *,
f2c_complex *, int *);
extern void
{pfx}zdotc{sfx}(f2c_doublecomplex * ret_val, int *n,
f2c_doublecomplex *zx, int *incx,
f2c_doublecomplex *zy, int *incy);
extern int
{pfx}sgemm{sfx}(char *transa, char *transb,
int *m, int *n, int *k,
float *alpha,
float *a, int *lda,
float *b, int *ldb,
float *beta,
float *c, int *ldc);
extern int
{pfx}dgemm{sfx}(char *transa, char *transb,
int *m, int *n, int *k,
double *alpha,
double *a, int *lda,
double *b, int *ldb,
double *beta,
double *c, int *ldc);
extern int
{pfx}cgemm{sfx}(char *transa, char *transb,
int *m, int *n, int *k,
f2c_complex *alpha,
f2c_complex *a, int *lda,
f2c_complex *b, int *ldb,
f2c_complex *beta,
f2c_complex *c, int *ldc);
extern int
{pfx}zgemm{sfx}(char *transa, char *transb,
int *m, int *n, int *k,
f2c_doublecomplex *alpha,
f2c_doublecomplex *a, int *lda,
f2c_doublecomplex *b, int *ldb,
f2c_doublecomplex *beta,
f2c_doublecomplex *c, int *ldc);
extern int
{pfx}dgeqrf{sfx}(int *, int *, double *, int *, double *,
double *, int *, int *);
extern int
{pfx}zgeqrf{sfx}(int *, int *, f2c_doublecomplex *, int *,
f2c_doublecomplex *, f2c_doublecomplex *, int *, int *);
'''.format(**macros))
'''
Create a shared_object which maps the bare name to the one with pfx, sfx
'''
shared_object = Dummy()
for name in ['sgeev', 'dgeev', 'cgeev', 'zgeev', 'ssyevd', 'dsyevd',
'cheevd', 'zheevd', 'dgelsd', 'zgelsd', 'sgesv', 'dgesv', 'cgesv', 'zgesv',
'sgetrf', 'dgetrf', 'cgetrf', 'zgetrf', 'spotrf', 'dpotrf', 'cpotrf', 'zpotrf',
'sgesdd', 'dgesdd', 'cgesdd', 'zgesdd', 'spotrs', 'dpotrs', 'cpotrs', 'zpotrs',
'spotri', 'dpotri', 'cpotri', 'zpotri', 'scopy', 'dcopy', 'ccopy', 'zcopy',
'sdot', 'ddot', 'cdotu', 'zdotu', 'cdotc', 'zdotc', 'dgeqrf', 'zgeqrf',
'sgemm', 'dgemm', 'cgemm', 'zgemm']:
setattr(shared_object, name, getattr(_C, macros['pfx'] + name + macros['sfx']))
'''
Since numpy expects to be able to call these functions with python objects,
create a mapping mechanism:
ndarray -> equivalent pointer to its data based on dtype
numpy scalar -> equivalent pointer based on ffi.cast
ffi.CData -> ready to be called
arbitrary cpython type -> use ffi.new to create a pointer to a type
determined from the function signature
'''
toCtypeP = {nt.int32: 'int*', nt.float32: 'float*', nt.float64: 'double*',
nt.complex64: 'f2c_complex*', nt.complex128: 'f2c_doublecomplex*',
nt.int8: 'char *'}
toCtypeA = {nt.int32: 'int[1]', nt.float32: 'float[1]', nt.float64: 'double[1]',
nt.complex64: 'f2c_complex[1]', nt.complex128: 'f2c_doublecomplex[1]'}
def toCptr(src):
if src is None:
return ffi.cast('void*', 0)
pData = src.__array_interface__['data'][0]
return ffi.cast(toCtypeP[src.dtype], pData)
def convert_arg(inarg, ffitype):
'''
try to convert the inarg to an appropriate c pointer
'''
if isinstance(inarg, np.ndarray):
return toCptr(inarg)
elif type(inarg) in toCtypeA:
return ffi.cast(toCtypeA[inarg], inarg)
elif isinstance(inarg, ffi.CData):
return inarg
# Hope for the best...
ctyp_p = ffi.getctype(ffitype)
ctyp = ctyp_p[:-2]
return ffi.new( ctyp + '[1]', [inarg])
def call_func(name):
def call_with_convert(*args):
func = getattr(shared_object, name)
fargs = ffi.typeof(func).args
converted_args = [convert_arg(a,b) for a,b in zip(args, fargs)]
res = func(*converted_args)
retval = {'info':converted_args[-1][0]}
# numpy expects a dictionary
if 'gelsd' in name:
# special case, the rank argument is returned as well
retval['rank'] = converted_args[9][0]
return retval
return call_with_convert
def not_implemented(*args):
raise NotImplementedError('function not found, does lapack_lite object exist?')
for name in ['sgeev', 'dgeev', 'cgeev', 'zgeev', 'ssyevd', 'dsyevd', 'cheevd',
'zheevd', 'dgelsd', 'zgelsd', 'sgesv', 'dgesv', 'cgesv', 'zgesv',
'sgetrf', 'dgetrf', 'cgetrf', 'zgetrf', 'spotrf', 'dpotrf', 'cpotrf',
'zpotrf', 'sgesdd', 'dgesdd', 'cgesdd', 'zgesdd', 'spotrs', 'dpotrs',
'cpotrs', 'zpotrs', 'spotri', 'dpotri', 'cpotri', 'zpotri', 'scopy',
'dcopy', 'ccopy', 'zcopy', 'sdot', 'ddot', 'cdotu', 'zdotu', 'cdotc',
'zdotc', 'sgemm', 'dgemm', 'cgemm', 'zgemm', 'dgessd', 'dgeqrf',
'dorggr', 'zgeqrf', 'zungqr', 'xerbla']:
if name in dir(shared_object):
globals()[name] = call_func(name)
else:
globals()[name] = not_implemented
| {
"repo_name": "NextThought/pypy-numpy",
"path": "numpy/linalg/lapack_lite.py",
"copies": "1",
"size": "15914",
"license": "bsd-3-clause",
"hash": -2609286443268297700,
"line_mean": 33.899122807,
"line_max": 126,
"alpha_frac": 0.5374513007,
"autogenerated": false,
"ratio": 3.0405043943446697,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.40779556950446694,
"avg_score": null,
"num_lines": null
} |
"""A Chaco file reader.
"""
# Author: Suyog Dutt Jain <suyog.jain@aero.iitb.ac.in>
# Copyright (c) 2009, Enthought, Inc.
# License: BSD Style.
# Enthought library imports.
from traits.api import Instance, Str
from traitsui.api import View, Group, Item
from tvtk.api import tvtk
# Local imports.
from mayavi.core.source import Source
from mayavi.core.pipeline_info import PipelineInfo
########################################################################
# `ChacoReader` class
########################################################################
class ChacoReader(Source):
"""A Chaco reader.
"""
# The version of this class. Used for persistence.
__version__ = 0
base_name = Str('', desc='basename of the Chaco files')
# The VTK data file reader.
reader = Instance(tvtk.ChacoReader, args=(), allow_none=False,
record=True)
# Information about what this object can produce.
output_info = PipelineInfo(datasets=['unstructured_grid'])
########################################
# View related code.
# Our view.
view = View(Group(Item(name='reader', style='custom',
resizable=True),
show_labels=False),
resizable=True)
######################################################################
# `FileDataSource` interface
######################################################################
def __init__(self, base_name='', configure=True, **traits):
super(ChacoReader, self).__init__(**traits)
if configure:
self.reader.edit_traits(kind='livemodal')
self.base_name = self.reader.base_name
def update(self):
if len(self.base_name) == 0:
return
self.reader.update()
self.render()
def has_output_port(self):
""" Return True as the reader has output port."""
return True
def get_output_object(self):
""" Return the reader output port."""
return self.reader.output_port
######################################################################
# Non-public interface
######################################################################
def _base_name_changed(self, value):
if len(value) == 0:
return
else:
self.reader.base_name = value
self._update_reader_output()
def _update_reader_output(self):
self.reader.update()
self.reader.update_information()
self.reader.on_trait_change(self.render)
self.outputs = [self.reader.output]
self.data_changed = True
| {
"repo_name": "dmsurti/mayavi",
"path": "mayavi/sources/chaco_reader.py",
"copies": "3",
"size": "2647",
"license": "bsd-3-clause",
"hash": -3196526411355711000,
"line_mean": 31.2804878049,
"line_max": 74,
"alpha_frac": 0.4967888175,
"autogenerated": false,
"ratio": 4.494057724957555,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00045167118337850043,
"num_lines": 82
} |
""" A Chaco Shell PlotSession which raises Workbench Editors instead of
free-standing windows.
"""
from traits.api import Any, Dict, List, Str
from chaco.shell.session import PlotSession
from plot_editor import PlotEditor
class WorkbenchSession(PlotSession):
""" A Chaco Shell PlotSession which raises Workbench Editors instead of
free-standing windows.
"""
# The Envisage Application we are in.
application = Any()
# The list of currently active windows.
windows = List()
# A dict mapping names to windows.
window_map = Dict(Str, Any)
def new_window(self, name=None, title=None, is_image=False):
"""Creates a new window and returns the index into the **windows** list
for the new window.
"""
workbench = self.application.get_service(
'envisage.ui.workbench.workbench.Workbench')
new_win = PlotEditor(
is_image=is_image,
size=(self.prefs.window_width, self.prefs.window_height),
bgcolor=self.prefs.bgcolor,
image_default_origin=self.prefs.image_default_origin,
window=workbench.active_window,
)
new_win.data = self.data
new_win.get_container().data = self.data
new_win.session = self
if title is not None:
new_win.set_title(title)
elif name != None:
new_win.set_title(name)
else:
new_win.set_title(self.prefs.default_window_name)
self.windows.append(new_win)
if name != None:
self.window_map[name] = new_win
workbench.edit(new_win.obj, kind=lambda *args, **kwds: new_win)
return len(self.windows)-1
| {
"repo_name": "tommy-u/chaco",
"path": "chaco/plugin/workbench_session.py",
"copies": "3",
"size": "1701",
"license": "bsd-3-clause",
"hash": 1996145580616405000,
"line_mean": 28.8421052632,
"line_max": 79,
"alpha_frac": 0.6278659612,
"autogenerated": false,
"ratio": 3.8053691275167787,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.593323508871678,
"avg_score": null,
"num_lines": null
} |
# a channel contains a corpus and associated settings
from Tkinter import *
import tkFont
from tkSimpleDialog import askstring
from tkFileDialog import asksaveasfilename
import corpus
import operator
class Channel(Frame):
def __init__(self, parent, textframe, corpus, num=0):
Frame.__init__(self, parent)
self.channel_name = corpus.name
self.channel_num = num
<<<<<<< Updated upstream
self.mode = 'letterlabel'
=======
self.mode = 'shift'
>>>>>>> Stashed changes
self.num_options = 15
self.settings = {'color': 'black'}
self.parent = parent
self.textframe = textframe
self.corpus = corpus
self.font = parent.font
self.pack(side = LEFT)
self.current_options = self.get_options()
self.keyboard = Frame()
self.wt_scale = Scale(from_=-100, to=100, orient=HORIZONTAL)
self.wt_scale.set(10)
self.refresh_keyboard()
self.keyboard.pack(anchor = W)
# first member of ith tuple is the label. second member is the keystroke to input i
def optionmap(self):
if self.mode == 'alpha':
return [('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'),
('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('0', '0'),
('a', 'a'), ('b', 'b'), ('c', 'c'), ('d', 'd'), ('e', 'e'),
('f', 'f'), ('g', 'g'), ('h', 'h'), ('i', 'i'), ('j', 'j')]
elif self.mode == 'shift':
return [('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'),
('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('0', '0'),
('1*', '!'), ('2*', '@'), ('3*', '#'), ('4*', '$'), ('5*', '%'),
('6*', '^'), ('7*', '&'), ('8*', '*'), ('9*', '('), ('0*', ')')]
elif self.mode == 'letterlabel':
return [('a', 'a'), ('b', 'b'), ('c', 'c'), ('d', 'd'), ('e', 'e'),
('f', 'f'), ('g', 'g'), ('h', 'h'), ('i', 'i'), ('j', 'j'),
('k', 'k'), ('l', 'l'), ('m', 'm'), ('n', 'n'), ('o', 'o'),
('p', 'p'), ('q', 'q'), ('r', 'r'), ('s', 's'), ('t', 't')]
def make_keyboard(self, parent, words_and_scores, weight=100):
keyboard = Frame(parent, padx = 10)
header = Frame(keyboard)
self.title = Label(header, text = self.channel_name, fg = self.settings['color'], font = self.font)
self.title.pack(side = LEFT)
Button(header, text = 'X', command = self.onDel).pack(side = RIGHT)
self.wt_scale = Scale(header, from_=-100, to=100, orient=HORIZONTAL)
self.wt_scale.set(weight)
self.wt_scale.pack()
header.pack()
mainkeys = Frame(keyboard)
for i in range(len(words_and_scores)):
optkey = Frame(mainkeys)
num = (i + 1) % 10
keylabel = '%s.' % self.optionmap()[i][0]
keystroke = self.optionmap()[i][1]
option = words_and_scores[i][0]
score = words_and_scores[i][1]
color = self.score_to_color(score*2)
#print score, color
Label(optkey, text = keylabel, width = 4, anchor = W, font = self.font, bg = color, fg = 'white').pack(side = LEFT)
label = option
b = Label(optkey, text=label, font = self.font, width = 14, anchor = W, borderwidth = 0,
#command= lambda word=option: self.onAddWord(word),
pady = 0, padx = 10)
b.pack(side = LEFT)
self.textframe.bind(keystroke, lambda event, arg=option: self.onAddWord(arg))
optkey.pack(side = TOP)
mainkeys.pack()
return keyboard
def score_to_color(self, score):
return self.get_color(score,0,0)
# convert list of three RGB values to a string representing a color
def get_color(self, x, y, z):
if x >= 256:
x = 255
if y >= 256:
y = 255
if z >= 256:
z = 255
#print x, y, z
return '#%02x%02x%02x' % (x, y, z)
def getWeight(self):
return self.wt_scale.get()
def onDel(self):
self.master.removeChannel(self.channel_num)
self.destroy()
def onAddWord(self, word):
t = self.textframe
t.insert(INSERT, " "+str(word))
t.see(END)
self.refresh_keyboard()
return 'break'
def refresh_keyboard(self):
self.current_options = self.get_options()
words_and_scores = []
#print '\n',self.channel_name
for word, score in self.current_options[0:self.num_options]:
weighted_score = score * self.wt_scale.get()
#print word, weighted_score
words_and_scores.append((word, weighted_score))
weight = self.wt_scale.get()
self.keyboard.destroy()
self.keyboard = self.make_keyboard(self, words_and_scores, weight)
self.keyboard.pack(anchor = W)
def get_options(self):
previous_words = self.parent.get_previous()
next_words = self.parent.get_next()
full_list = self.corpus.suggest(previous_words, next_words)
short_list = full_list[0:100]
suggestions = full_list[0:self.num_options]
return short_list
| {
"repo_name": "jbrew/stereotype",
"path": "channel.py",
"copies": "1",
"size": "4520",
"license": "apache-2.0",
"hash": 7809080411232063000,
"line_mean": 32.7313432836,
"line_max": 118,
"alpha_frac": 0.582079646,
"autogenerated": false,
"ratio": 2.7377347062386432,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8760341329716814,
"avg_score": 0.011894604504365818,
"num_lines": 134
} |
# A character in UTF8 can be from 1 to 4 bytes long, subjected to the following rules:
#
# For 1-byte character, the first bit is a 0, followed by its unicode code.
# For n-bytes character, the first n-bits are all one's, the n+1 bit is 0, followed by n-1 bytes with most significant
# 2 bits being 10.
# This is how the UTF-8 encoding would work:
#
# Char. number range | UTF-8 octet sequence
# (hexadecimal) | (binary)
# --------------------+---------------------------------------------
# 0000 0000-0000 007F | 0xxxxxxx
# 0000 0080-0000 07FF | 110xxxxx 10xxxxxx
# 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx
# 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
# Given an array of integers representing the data, return whether it is a valid utf-8 encoding.
#
# Note:
# The input is an array of integers. Only the least significant 8 bits of each integer is used to store the data. This
# means each integer represents only 1 byte of data.
#
# Example 1:
#
# data = [197, 130, 1], which represents the octet sequence: 11000101 10000010 00000001.
#
# Return true.
# It is a valid utf-8 encoding for a 2-bytes character followed by a 1-byte character.
# Example 2:
#
# data = [235, 140, 4], which represented the octet sequence: 11101011 10001100 00000100.
#
# Return false.
# The first 3 bits are all one's and the 4th bit is 0 means it is a 3-bytes character.
# The next byte is a continuation byte which starts with 10 and that's correct.
# But the second continuation byte does not start with 10, so it is invalid.
class Solution(object):
def validUtf8(self, data):
"""
:type data: List[int]
:rtype: bool
"""
b = 0
for num in data:
x = "{0:08b}".format(num)
if b == 0 and x[:2] == '11':
while x[b] == '1':
b += 1
if b > 4:
return False
b -= 1
elif x[:2] == '10':
b -= 1
elif b == 0:
continue
else:
return False
return b == 0
# Note:
# Maintaining count and returning False for invalid case
| {
"repo_name": "jigarkb/CTCI",
"path": "LeetCode/393-M-UTF8Validation.py",
"copies": "2",
"size": "2199",
"license": "mit",
"hash": -932966329786990500,
"line_mean": 34.4677419355,
"line_max": 118,
"alpha_frac": 0.5948158254,
"autogenerated": false,
"ratio": 3.6895973154362416,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0013539649327027681,
"num_lines": 62
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.