code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
"""Interface functions to automatically get descriptors."""
from mala.descriptors.snap import SNAP
def DescriptorInterface(params):
"""
Return a DescriptorBase object that adheres to the parameters provided.
Parameters
----------
params : mala.common.parameters.Parameters
Parameters for which a DescriptorBase object is desired.
"""
if params.descriptors.descriptor_type == 'SNAP':
return SNAP(params)
else:
raise Exception("Unknown type of descriptor calculator requested.")
| [
"mala.descriptors.snap.SNAP"
] | [((437, 449), 'mala.descriptors.snap.SNAP', 'SNAP', (['params'], {}), '(params)\n', (441, 449), False, 'from mala.descriptors.snap import SNAP\n')] |
import logging
from flask import Blueprint, request
from itsdangerous import BadSignature
from flask.ext.babel import gettext
from aleph.core import db, settings
from aleph.search import QueryParser, DatabaseQueryResult
from aleph.model import Role, Permission, Audit
from aleph.logic.roles import check_visible, check_editable, update_role
from aleph.logic.permissions import update_permission
from aleph.logic.collections import update_collection, update_collection_access
from aleph.notify import notify_role
from aleph.logic.audit import record_audit
from aleph.serializers.roles import RoleSchema, PermissionSchema
from aleph.serializers.roles import RoleCodeCreateSchema, RoleCreateSchema
from aleph.views.util import require, get_db_collection, jsonify, parse_request
from aleph.views.util import obj_or_404, serialize_data
blueprint = Blueprint('roles_api', __name__)
log = logging.getLogger(__name__)
@blueprint.route('/api/2/roles/_suggest', methods=['GET'])
def suggest():
require(request.authz.logged_in)
parser = QueryParser(request.args, request.authz, limit=10)
if parser.prefix is None or len(parser.prefix) < 3:
# Do not return 400 because it's a routine event.
return jsonify({
'status': 'error',
'message': gettext('prefix filter is too short'),
'results': [],
'total': 0
})
# this only returns users, not groups
q = Role.by_prefix(parser.prefix, exclude=parser.exclude)
result = DatabaseQueryResult(request, q, parser=parser, schema=RoleSchema)
return jsonify(result)
@blueprint.route('/api/2/roles/code', methods=['POST'])
def create_code():
data = parse_request(RoleCodeCreateSchema)
signature = Role.SIGNATURE.dumps(data['email'])
url = '{}activate/{}'.format(settings.APP_UI_URL, signature)
role = Role(email=data['email'], name='Visitor')
log.info("Confirmation URL [%r]: %s", role, url)
notify_role(role, gettext('Registration'),
'email/registration_code.html',
url=url)
return jsonify({
'status': 'ok',
'message': gettext('To proceed, please check your email.')
})
@blueprint.route('/api/2/roles', methods=['POST'])
def create():
require(not request.authz.in_maintenance, settings.PASSWORD_LOGIN)
data = parse_request(RoleCreateSchema)
try:
email = Role.SIGNATURE.loads(data.get('code'),
max_age=Role.SIGNATURE_MAX_AGE)
except BadSignature:
return jsonify({
'status': 'error',
'message': gettext('Invalid code')
}, status=400)
role = Role.by_email(email)
if role is not None:
return jsonify({
'status': 'error',
'message': gettext('Email is already registered')
}, status=409)
role = Role.load_or_create(
foreign_id='password:{}'.format(email),
type=Role.USER,
name=data.get('name') or email,
email=email
)
role.set_password(data.get('password'))
db.session.add(role)
db.session.commit()
update_role(role)
# Let the serializer return more info about this user
request.authz.id = role.id
return serialize_data(role, RoleSchema, status=201)
@blueprint.route('/api/2/roles/<int:id>', methods=['GET'])
def view(id):
role = obj_or_404(Role.by_id(id))
require(check_editable(role, request.authz))
return serialize_data(role, RoleSchema)
@blueprint.route('/api/2/roles/<int:id>', methods=['POST', 'PUT'])
def update(id):
role = obj_or_404(Role.by_id(id))
require(request.authz.session_write)
require(check_editable(role, request.authz))
data = parse_request(RoleSchema)
role.update(data)
db.session.add(role)
db.session.commit()
update_role(role)
return view(role.id)
@blueprint.route('/api/2/collections/<int:id>/permissions')
def permissions_index(id):
collection = get_db_collection(id, request.authz.WRITE)
record_audit(Audit.ACT_COLLECTION, id=id)
roles = [r for r in Role.all_groups() if check_visible(r, request.authz)]
q = Permission.all()
q = q.filter(Permission.collection_id == collection.id)
permissions = []
for permission in q.all():
if not check_visible(permission.role, request.authz):
continue
permissions.append(permission)
if permission.role in roles:
roles.remove(permission.role)
# this workaround ensures that all groups are visible for the user to
# select in the UI even if they are not currently associated with the
# collection.
for role in roles:
if collection.casefile and role.is_public:
continue
permissions.append({
'collection_id': collection.id,
'write': False,
'read': False,
'role': role
})
permissions, errors = PermissionSchema().dump(permissions, many=True)
return jsonify({
'total': len(permissions),
'results': permissions
})
@blueprint.route('/api/2/collections/<int:id>/permissions',
methods=['POST', 'PUT'])
def permissions_update(id):
collection = get_db_collection(id, request.authz.WRITE)
for permission in parse_request(PermissionSchema, many=True):
role_id = permission.get('role', {}).get('id')
role = Role.by_id(role_id)
if not check_visible(role, request.authz):
continue
if collection.casefile and role.is_public:
permission['read'] = False
permission['write'] = False
update_permission(role,
collection,
permission['read'],
permission['write'],
editor_id=request.authz.id)
update_collection_access.delay(id)
update_collection(collection)
return permissions_index(id)
| [
"logging.getLogger",
"aleph.views.util.serialize_data",
"aleph.logic.collections.update_collection_access.delay",
"aleph.model.Role.all_groups",
"aleph.logic.roles.check_editable",
"aleph.logic.roles.update_role",
"aleph.views.util.get_db_collection",
"aleph.logic.collections.update_collection",
"al... | [((845, 877), 'flask.Blueprint', 'Blueprint', (['"""roles_api"""', '__name__'], {}), "('roles_api', __name__)\n", (854, 877), False, 'from flask import Blueprint, request\n'), ((884, 911), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (901, 911), False, 'import logging\n'), ((992, 1024), 'aleph.views.util.require', 'require', (['request.authz.logged_in'], {}), '(request.authz.logged_in)\n', (999, 1024), False, 'from aleph.views.util import require, get_db_collection, jsonify, parse_request\n'), ((1038, 1088), 'aleph.search.QueryParser', 'QueryParser', (['request.args', 'request.authz'], {'limit': '(10)'}), '(request.args, request.authz, limit=10)\n', (1049, 1088), False, 'from aleph.search import QueryParser, DatabaseQueryResult\n'), ((1432, 1485), 'aleph.model.Role.by_prefix', 'Role.by_prefix', (['parser.prefix'], {'exclude': 'parser.exclude'}), '(parser.prefix, exclude=parser.exclude)\n', (1446, 1485), False, 'from aleph.model import Role, Permission, Audit\n'), ((1499, 1564), 'aleph.search.DatabaseQueryResult', 'DatabaseQueryResult', (['request', 'q'], {'parser': 'parser', 'schema': 'RoleSchema'}), '(request, q, parser=parser, schema=RoleSchema)\n', (1518, 1564), False, 'from aleph.search import QueryParser, DatabaseQueryResult\n'), ((1576, 1591), 'aleph.views.util.jsonify', 'jsonify', (['result'], {}), '(result)\n', (1583, 1591), False, 'from aleph.views.util import require, get_db_collection, jsonify, parse_request\n'), ((1680, 1715), 'aleph.views.util.parse_request', 'parse_request', (['RoleCodeCreateSchema'], {}), '(RoleCodeCreateSchema)\n', (1693, 1715), False, 'from aleph.views.util import require, get_db_collection, jsonify, parse_request\n'), ((1732, 1767), 'aleph.model.Role.SIGNATURE.dumps', 'Role.SIGNATURE.dumps', (["data['email']"], {}), "(data['email'])\n", (1752, 1767), False, 'from aleph.model import Role, Permission, Audit\n'), ((1844, 1885), 'aleph.model.Role', 'Role', ([], {'email': "data['email']", 'name': '"""Visitor"""'}), "(email=data['email'], name='Visitor')\n", (1848, 1885), False, 'from aleph.model import Role, Permission, Audit\n'), ((2249, 2315), 'aleph.views.util.require', 'require', (['(not request.authz.in_maintenance)', 'settings.PASSWORD_LOGIN'], {}), '(not request.authz.in_maintenance, settings.PASSWORD_LOGIN)\n', (2256, 2315), False, 'from aleph.views.util import require, get_db_collection, jsonify, parse_request\n'), ((2327, 2358), 'aleph.views.util.parse_request', 'parse_request', (['RoleCreateSchema'], {}), '(RoleCreateSchema)\n', (2340, 2358), False, 'from aleph.views.util import require, get_db_collection, jsonify, parse_request\n'), ((2656, 2676), 'aleph.model.Role.by_email', 'Role.by_email', (['email'], {}), '(email)\n', (2669, 2676), False, 'from aleph.model import Role, Permission, Audit\n'), ((3062, 3082), 'aleph.core.db.session.add', 'db.session.add', (['role'], {}), '(role)\n', (3076, 3082), False, 'from aleph.core import db, settings\n'), ((3087, 3106), 'aleph.core.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3104, 3106), False, 'from aleph.core import db, settings\n'), ((3111, 3128), 'aleph.logic.roles.update_role', 'update_role', (['role'], {}), '(role)\n', (3122, 3128), False, 'from aleph.logic.roles import check_visible, check_editable, update_role\n'), ((3229, 3273), 'aleph.views.util.serialize_data', 'serialize_data', (['role', 'RoleSchema'], {'status': '(201)'}), '(role, RoleSchema, status=201)\n', (3243, 3273), False, 'from aleph.views.util import obj_or_404, serialize_data\n'), ((3447, 3479), 'aleph.views.util.serialize_data', 'serialize_data', (['role', 'RoleSchema'], {}), '(role, RoleSchema)\n', (3461, 3479), False, 'from aleph.views.util import obj_or_404, serialize_data\n'), ((3607, 3643), 'aleph.views.util.require', 'require', (['request.authz.session_write'], {}), '(request.authz.session_write)\n', (3614, 3643), False, 'from aleph.views.util import require, get_db_collection, jsonify, parse_request\n'), ((3704, 3729), 'aleph.views.util.parse_request', 'parse_request', (['RoleSchema'], {}), '(RoleSchema)\n', (3717, 3729), False, 'from aleph.views.util import require, get_db_collection, jsonify, parse_request\n'), ((3756, 3776), 'aleph.core.db.session.add', 'db.session.add', (['role'], {}), '(role)\n', (3770, 3776), False, 'from aleph.core import db, settings\n'), ((3781, 3800), 'aleph.core.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3798, 3800), False, 'from aleph.core import db, settings\n'), ((3805, 3822), 'aleph.logic.roles.update_role', 'update_role', (['role'], {}), '(role)\n', (3816, 3822), False, 'from aleph.logic.roles import check_visible, check_editable, update_role\n'), ((3954, 3996), 'aleph.views.util.get_db_collection', 'get_db_collection', (['id', 'request.authz.WRITE'], {}), '(id, request.authz.WRITE)\n', (3971, 3996), False, 'from aleph.views.util import require, get_db_collection, jsonify, parse_request\n'), ((4001, 4042), 'aleph.logic.audit.record_audit', 'record_audit', (['Audit.ACT_COLLECTION'], {'id': 'id'}), '(Audit.ACT_COLLECTION, id=id)\n', (4013, 4042), False, 'from aleph.logic.audit import record_audit\n'), ((4129, 4145), 'aleph.model.Permission.all', 'Permission.all', ([], {}), '()\n', (4143, 4145), False, 'from aleph.model import Role, Permission, Audit\n'), ((5203, 5245), 'aleph.views.util.get_db_collection', 'get_db_collection', (['id', 'request.authz.WRITE'], {}), '(id, request.authz.WRITE)\n', (5220, 5245), False, 'from aleph.views.util import require, get_db_collection, jsonify, parse_request\n'), ((5268, 5310), 'aleph.views.util.parse_request', 'parse_request', (['PermissionSchema'], {'many': '(True)'}), '(PermissionSchema, many=True)\n', (5281, 5310), False, 'from aleph.views.util import require, get_db_collection, jsonify, parse_request\n'), ((5827, 5861), 'aleph.logic.collections.update_collection_access.delay', 'update_collection_access.delay', (['id'], {}), '(id)\n', (5857, 5861), False, 'from aleph.logic.collections import update_collection, update_collection_access\n'), ((5866, 5895), 'aleph.logic.collections.update_collection', 'update_collection', (['collection'], {}), '(collection)\n', (5883, 5895), False, 'from aleph.logic.collections import update_collection, update_collection_access\n'), ((1961, 1984), 'flask.ext.babel.gettext', 'gettext', (['"""Registration"""'], {}), "('Registration')\n", (1968, 1984), False, 'from flask.ext.babel import gettext\n'), ((3371, 3385), 'aleph.model.Role.by_id', 'Role.by_id', (['id'], {}), '(id)\n', (3381, 3385), False, 'from aleph.model import Role, Permission, Audit\n'), ((3399, 3434), 'aleph.logic.roles.check_editable', 'check_editable', (['role', 'request.authz'], {}), '(role, request.authz)\n', (3413, 3434), False, 'from aleph.logic.roles import check_visible, check_editable, update_role\n'), ((3587, 3601), 'aleph.model.Role.by_id', 'Role.by_id', (['id'], {}), '(id)\n', (3597, 3601), False, 'from aleph.model import Role, Permission, Audit\n'), ((3656, 3691), 'aleph.logic.roles.check_editable', 'check_editable', (['role', 'request.authz'], {}), '(role, request.authz)\n', (3670, 3691), False, 'from aleph.logic.roles import check_visible, check_editable, update_role\n'), ((5382, 5401), 'aleph.model.Role.by_id', 'Role.by_id', (['role_id'], {}), '(role_id)\n', (5392, 5401), False, 'from aleph.model import Role, Permission, Audit\n'), ((5613, 5721), 'aleph.logic.permissions.update_permission', 'update_permission', (['role', 'collection', "permission['read']", "permission['write']"], {'editor_id': 'request.authz.id'}), "(role, collection, permission['read'], permission['write'],\n editor_id=request.authz.id)\n", (5630, 5721), False, 'from aleph.logic.permissions import update_permission\n'), ((2123, 2170), 'flask.ext.babel.gettext', 'gettext', (['"""To proceed, please check your email."""'], {}), "('To proceed, please check your email.')\n", (2130, 2170), False, 'from flask.ext.babel import gettext\n'), ((4067, 4084), 'aleph.model.Role.all_groups', 'Role.all_groups', ([], {}), '()\n', (4082, 4084), False, 'from aleph.model import Role, Permission, Audit\n'), ((4088, 4119), 'aleph.logic.roles.check_visible', 'check_visible', (['r', 'request.authz'], {}), '(r, request.authz)\n', (4101, 4119), False, 'from aleph.logic.roles import check_visible, check_editable, update_role\n'), ((4273, 4318), 'aleph.logic.roles.check_visible', 'check_visible', (['permission.role', 'request.authz'], {}), '(permission.role, request.authz)\n', (4286, 4318), False, 'from aleph.logic.roles import check_visible, check_editable, update_role\n'), ((4912, 4930), 'aleph.serializers.roles.PermissionSchema', 'PermissionSchema', ([], {}), '()\n', (4928, 4930), False, 'from aleph.serializers.roles import RoleSchema, PermissionSchema\n'), ((5417, 5451), 'aleph.logic.roles.check_visible', 'check_visible', (['role', 'request.authz'], {}), '(role, request.authz)\n', (5430, 5451), False, 'from aleph.logic.roles import check_visible, check_editable, update_role\n'), ((1282, 1319), 'flask.ext.babel.gettext', 'gettext', (['"""prefix filter is too short"""'], {}), "('prefix filter is too short')\n", (1289, 1319), False, 'from flask.ext.babel import gettext\n'), ((2781, 2819), 'flask.ext.babel.gettext', 'gettext', (['"""Email is already registered"""'], {}), "('Email is already registered')\n", (2788, 2819), False, 'from flask.ext.babel import gettext\n'), ((2597, 2620), 'flask.ext.babel.gettext', 'gettext', (['"""Invalid code"""'], {}), "('Invalid code')\n", (2604, 2620), False, 'from flask.ext.babel import gettext\n')] |
# PYTHON_ARGCOMPLETE_OK
import argcomplete
import argparse
from . import io
from . import cfg
from . import utils
@utils.logExceptionsAndRaise
def main():
# show startup message
io.startup()
# options used by multiple subparsers
def addVerboseArg(parser):
parser.add_argument('-v', '--verbose', action='store_true',
help='print more detailed output')
def addTexFileArg(parser):
parser.add_argument('tex_file', default='', nargs='?',
help='toplevel tex file of the document')
# setup argparser and global arguments
p = argparse.ArgumentParser()
addVerboseArg(p)
p.add_argument('--version', action='store_true',
help='print installed paperman version')
sub = p.add_subparsers(metavar='subcommands', dest='command')
# img command
s = sub.add_parser('img', help='image helper: check if unused or missing '
'images exist or import missing images')
addTexFileArg(s)
addVerboseArg(s)
s.add_argument('-i', '--import', action='store_true',
help='try importing missing images from image search path')
#s.add_argument('-c', '--clean', action='store_true',
# help='remove unused images')
# bib command
s = sub.add_parser('bib', help='bibliography helper: check if unused or '
'missing bib entries exist or import '
'missing entries')
addTexFileArg(s)
addVerboseArg(s)
s.add_argument('-p', '--print', action='store_true',
help='try to find missing citations in existing bibliographies '
'and show them in the shell, conflicts with --import, '
'--rewrite, and --sort')
s.add_argument('-i', '--import', action='store_true',
help='try importing missing citations from existing '
'bibliographies')
#s.add_argument('-c', '--clean', action='store_true',
# help='remove unused citations from bibliography, implies '
# '--rewrite')
s.add_argument('-r', '--rewrite', action='store_true',
help='rewrite and repair entire bibliography')
s.add_argument('-s', '--sort', nargs='?', default=False,
help='sort bibliography, sort order can be one of "date", '
'"author" or "key", default is "key", implies --rewrite')
# input command
s = sub.add_parser('input',
help=r'input helper: check if all \input{} files exist '
r'or import files')
addVerboseArg(s)
s.add_argument('-i', '--import', action='store_true',
help=r'try importing missing \input{} files from input '
r'search path')
# call img, bib and input command with --import flag set
s = sub.add_parser('import-all',
help='shortcut to run img, bib and input subcommands '
'with --import option enabled')
addVerboseArg(s)
# sort authors command
s = sub.add_parser('sort-authors',
help='alphabetic sorting and formatting of author lists '
'for scientific publications and presentations')
addVerboseArg(s)
s.add_argument('authors', nargs='+',
help='comma separated list of authors seperated. Use -s '
'option for a different separator')
s.add_argument('-s', '--separator', default=',',
help='separation character in author list')
s.add_argument('-f', '--full-name', action='store_true',
help='output full author names.')
s.add_argument('--keep-first', action='store_true',
help='keep position of first author in list.')
s.add_argument('--keep-last', action='store_true',
help='keep position of last author in list.')
s.add_argument('-k', '--keep-first-and-last', action='store_true',
help='keep position of first and last author in list.')
s.add_argument('-q', '--quiet', action='store_true',
help='just output the sorted author list, nothing else.')
s.add_argument('--no-and', action='store_true',
help='use the separator instead of "and" before the final '
'name and ignore ands in input.')
# collect bib/pdf pairs from specified collect directories
s = sub.add_parser('collect',
help='rename and move bib-pdf file pairs from specified '
'folders to library')
addVerboseArg(s)
s.add_argument('paths', nargs='*',
help='specify paths to import bib-pdf file pairs from, '
'defaults to paths given in config')
s.add_argument('-w', '--watch', action='store_true',
help='never exit and keep watching the collect directories')
s.add_argument('-e', '--err-to-file', action='store_true',
help='write errors not only to stdout, but also to a txt file '
'located in the same position as the bib-pdf file pair '
'that caused the error')
# library subcommand
s = sub.add_parser('lib',
help='search library, check library health, detect '
'corrupt bib files, duplicates and possibly '
'broken pdfs')
addVerboseArg(s)
s.add_argument('-f', '--find', nargs='+',
help='find library entries that contain given words in '
'bibtex fields')
s.add_argument('-F', '--find-fulltext', nargs='+',
help='find library entries that contain given words in '
'full manuscript text (requires pdf2txt program to '
'to be installed on your $PATH)')
s.add_argument('-k', '--key', action='store_true',
help='print citation key instead of path')
s.add_argument('-l', '--long', action='store_true',
help='print full bibtex entries instead of path only')
#s.add_argument('-L', '--extra-long', action='store_true',
# help='print full pdf2txt result instead of path only')
# sync subcommand
s = sub.add_parser('sync',
help='sync library pdfs to a tablet or other device')
addVerboseArg(s)
s.add_argument('-p', '--sync-path',
help='path to the device root that is to be synced.')
# diff subcommand
s = sub.add_parser('diff',
help='build pdfs that show changes between file versions')
addVerboseArg(s)
s.add_argument('-t' , '--old-is-tag', action='store_true',
help='do not treat "old" argument as filename, '
'but as git tag name (requires git to be installed '
'in your $PATH, requires current directory to be '
'git repository)')
s.add_argument('-T' , '--new-is-tag', action='store_true',
help='do not treat "new" argument as filename, '
'but as git tag name (requires git to be installed '
'in your $PATH, requires current directory to be '
'git repository)')
s.add_argument('-o', '--outfile', nargs='?', default='diff.pdf',
help='name of the pdf file to be built')
s.add_argument('-c', '--clean', action='store_true',
help='remove all build files except for output pdf')
s.add_argument('old', help='filename of old version')
s.add_argument('new', help='filename of new version')
s.add_argument('filename', nargs='?',
help='only used if both --old-is-tag and '
'--new-is-tag are set, specifies filename')
# make-diff subcommand
s = sub.add_parser('journal',
help='find full journal names and abbreviations')
addVerboseArg(s)
s.add_argument('-f', '--full', action='store_true',
help='print full names of found journals')
s.add_argument('-a', '--abbreviated', action='store_true',
help='print abbreviated names of found journals ')
s.add_argument('--list-suspicious', action='store_true',
help='print all journals in database that might by errornous, '
'i.e., abbreviations that do not end with a dot')
s.add_argument('name', nargs='*',
help='journal name, abbreviation or partial name')
# lint subcommand
s = sub.add_parser('lint',
help='search latex project for potential errors')
addVerboseArg(s)
addTexFileArg(s)
# config subcommand
s = sub.add_parser('config',
help='print location of config file and exit')
s.add_argument('-o', '--open', action='store_true',
help='open config file in vim')
addVerboseArg(s)
# enable autocompletion and parse args
argcomplete.autocomplete(p)
args = p.parse_args()
# apply verbosity setting
io.isVerbose = args.verbose
# if version is requested, print version and exit
if args.version:
io.info(f'paperman version {io.__version__}')
return
# select submodule for subcommands
cmd, cmds = None, None
if not args.command:
io.err('subcommand is required')
return
elif args.command == 'img':
from .subcommands import img as cmd
elif args.command == 'bib':
from .subcommands import bib as cmd
elif args.command == 'input':
from .subcommands import inp as cmd
elif args.command == 'import-all':
from .subcommands import inp as cmd1
from .subcommands import img as cmd2
from .subcommands import bib as cmd3
setattr(args, 'import', True)
args.clean = False
args.rewrite = False
args.sort = False
args.print = False
cmds = [cmd1, cmd2, cmd3]
elif args.command == 'sort-authors':
from .subcommands import sort_authors as cmd
elif args.command == 'collect':
from .subcommands import collect as cmd
elif args.command == 'lib':
from .subcommands import lib as cmd
elif args.command == 'sync':
from .subcommands import sync as cmd
elif args.command == 'diff':
from .subcommands import diff as cmd
elif args.command == 'journal':
from .subcommands import journal as cmd
elif args.command == 'lint':
from .subcommands import lint as cmd
elif args.command == 'config':
from .subcommands import config as cmd
else:
io.dbg(f'args={args}')
raise ValueError(f'args.command has unexpected value')
# raise error if required config keys are missing
cfg.testIfRequiredExist()
# run subcommand module
cmdStr = str(cmd or '\n'+'\n'.join([str(c) for c in cmds]))
io.verb(f'selected subcommand(s): {cmdStr}')
try:
if cmd:
cmds = [cmd]
if cmds:
for cmd in cmds:
cmd.main(args)
except KeyboardInterrupt:
raise
except RuntimeError as e:
io.err(str(e))
except Exception as e:
if cfg.get('debug'):
raise
io.err(str(e))
if __name__ == '__main__':
main()
| [
"argcomplete.autocomplete",
"argparse.ArgumentParser"
] | [((594, 619), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (617, 619), False, 'import argparse\n'), ((8945, 8972), 'argcomplete.autocomplete', 'argcomplete.autocomplete', (['p'], {}), '(p)\n', (8969, 8972), False, 'import argcomplete\n')] |
from eylanglexer import reserved
from eylanglexer import rules as lex_rules
from eylanginterpreter import *
from rply import ParserGenerator
EYLANG_VARS = EylangVars()
pg = ParserGenerator(
list(reserved.values()) + [name for name, _ in lex_rules],
precedence=[
('left', ['AND', 'OR', 'NOT']),
('left', ['EQ', 'LT', 'LE', 'GT', 'GE', 'NE']),
('left', ['PLUS', 'MINUS']),
('left', ['MUL', 'DIV', 'MOD']),
('left', ['POWER']),
('right', ['SIGN']),
]
)
@pg.production('program : program statement')
@pg.production('program : statement')
def p_program(p):
if len(p) == 1:
line, stat = p[0]
return Program({line: stat})
elif len(p) == 2:
line, stat = p[1]
p[0].set(line, stat)
return p[0]
@pg.production('statement : command NEWLINE')
def p_statement(p):
return (p[1].getsourcepos().lineno, p[0])
@pg.production('statement : NEWLINE')
def p_statement_newline(p):
return (p[0].getsourcepos().lineno, Newline())
# @pg.production('command : IMPORT expr')
# def p_command_return(p):
# return ('IMPORT', p[1])
@pg.production('command : RETURN expr')
def p_command_return(p):
return Return(p[1])
@pg.production('command : DEF variable LPAR paralist RPAR NEWLINE program END')
@pg.production('command : DEF variable LPAR RPAR NEWLINE program END')
def p_command_def(p):
if len(p) > 7:
return DEF(p[1], p[6], p[3])
else:
return DEF(p[1], p[5])
@pg.production('command : FOR varlist IN expr NEWLINE program ELSE NEWLINE program END')
@pg.production('command : FOR varlist IN expr NEWLINE program END')
def p_command_for(p):
if len(p) > 7:
return For(p[1], p[3], p[5], p[8])
else:
return For(p[1], p[3], p[5])
@pg.production('command : WHILE expr NEWLINE program ELSE NEWLINE program END')
@pg.production('command : WHILE expr NEWLINE program END')
def p_command_while(p):
if len(p) > 5:
return While(p[1], p[3], p[6])
else:
return While(p[1], p[3])
@pg.production('command : ELIF expr')
def p_command_elif(p):
return ELIF(p[1])
@pg.production('command : IF expr NEWLINE program ELSE NEWLINE program END')
@pg.production('command : IF expr NEWLINE program END')
def p_command_if(p):
if len(p) > 5:
return IF(p[1], p[3], p[6])
else:
return IF(p[1], p[3])
@pg.production('command : varlist ASSIGN expr')
def p_command_assign(p):
return Assign(p[0], p[2])
@pg.production('command : PUTS expr')
def p_command_puts(p):
return Puts(p[1])
@pg.production('command : expr')
def p_command_expr(p):
return Expr(p[0])
@pg.production('expr : expr PLUS expr')
@pg.production('expr : expr MINUS expr')
@pg.production('expr : expr MUL expr')
@pg.production('expr : expr DIV expr')
@pg.production('expr : expr MOD expr')
@pg.production('expr : expr POWER expr')
@pg.production('expr : expr EQ expr')
@pg.production('expr : expr LT expr')
@pg.production('expr : expr LE expr')
@pg.production('expr : expr GT expr')
@pg.production('expr : expr GE expr')
@pg.production('expr : expr NE expr')
def p_expr_binop(p):
return BinaryOp(p[1], p[0], p[2])
@pg.production('expr : PLUS expr', precedence='SIGN')
@pg.production('expr : MINUS expr', precedence='SIGN')
def p_expr_sign(p):
return Sign(p[0], p[1])
@pg.production('expr : expr AND expr')
@pg.production('expr : expr OR expr')
@pg.production('expr : NOT expr')
def p_expr_logic(p):
if len(p) > 2:
return (p[1].gettokentype(), (p[0], p[2]))
else:
return (p[0].gettokentype(), (p[1], ))
@pg.production('expr : object')
def p_expr_object(p):
return p[0]
@pg.production('object : parensexpr')
@pg.production('object : func')
@pg.production('object : dict')
@pg.production('object : list')
@pg.production('object : index')
@pg.production('object : attrivar')
@pg.production('object : variable')
@pg.production('object : constant')
def p_object(p):
return p[0]
@pg.production('parensexpr : LPAR expr RPAR')
def p_parensexpr(p):
return Parens(p[1])
@pg.production('func : variable LPAR arglist RPAR')
@pg.production('func : attrivar LPAR arglist RPAR')
@pg.production('func : variable LPAR RPAR')
@pg.production('func : attrivar LPAR RPAR')
def p_func(p):
if len(p) > 3:
return Func(p[0], p[2])
else:
return Func(p[0])
@pg.production('dict : LBRACE pairlist RBRACE')
@pg.production('dict : LBRACE RBRACE')
def p_dict(p):
if len(p) > 2:
return Dictionary(p[1])
else:
return Dictionary()
@pg.production('pairlist : pairlist COMMA pair')
@pg.production('pairlist : pair')
def p_pairlist(p):
if len(p) > 1:
key, value = p[2]
p[0][key] = value
return p[0]
else:
key, value = p[0]
return {key: value}
@pg.production('pair : expr COLON expr')
@pg.production('pair : expr')
def p_pair(p):
if len(p) > 1:
return (p[0], p[2])
else:
return (p[0], None)
@pg.production('list : LSQB exprlist RSQB')
@pg.production('list : LSQB RSQB')
def p_list(p):
if len(p) > 2:
return List(p[1])
else:
return List()
@pg.production('exprlist : exprlist COMMA expr')
@pg.production('exprlist : expr')
def p_exprlist(p):
if len(p) > 1:
p[0].append(p[2])
return p[0]
else:
return [p[0]]
@pg.production('index : object LSQB indexlist RSQB')
def p_index(p):
return Index(p[0], p[2])
@pg.production('indexlist : indexlist COMMA indexexpr')
@pg.production('indexlist : indexexpr')
def p_indexlist(p):
if len(p) > 1:
p[0].append(p[2])
return p[0]
else:
return [p[0]]
@pg.production('indexexpr : expr COLON expr COLON expr')
def p_indexexprc(p): return (p[0], p[2], p[4])
@pg.production('indexexpr : expr COLON expr COLON')
def p_indexexprb(p): return (p[0], p[2], None)
@pg.production('indexexpr : expr COLON COLON expr')
def p_indexexpra(p): return (p[0], None, p[3])
@pg.production('indexexpr : COLON expr COLON expr')
def p_indexexpr9(p): return (None, p[1], p[3])
@pg.production('indexexpr : COLON expr COLON')
def p_indexexpr8(p): return (None, p[1], None)
@pg.production('indexexpr : COLON COLON expr')
def p_indexexpr7(p): return (None, None, p[2])
@pg.production('indexexpr : expr COLON COLON')
def p_indexexpr6(p): return (p[0], None, None)
@pg.production('indexexpr : expr COLON expr')
def p_indexexpr5(p): return (p[0], p[2], None)
@pg.production('indexexpr : COLON COLON')
def p_indexexpr4(p): return (None, None, None)
@pg.production('indexexpr : expr COLON')
def p_indexexpr3(p): return (p[0], None, None)
@pg.production('indexexpr : COLON expr')
def p_indexexpr2(p): return (None, p[1], None)
@pg.production('indexexpr : COLON')
def p_indexexpr1(p): return (None, None, None)
@pg.production('indexexpr : expr')
def p_indexexpr0(p): return (p[0], )
@pg.production('paralist : paralist COMMA parameter')
@pg.production('paralist : parameter')
def p_paralist(p):
if len(p) > 1:
p[0].append(p[2])
return p[0]
else:
return ParaList(p[0])
@pg.production('parameter : variable ASSIGN expr')
@pg.production('parameter : variable')
def p_parameter(p):
if len(p) > 1:
return (p[0], p[2])
else:
return (p[0], )
@pg.production('arglist : arglist COMMA argument')
@pg.production('arglist : argument')
def p_arglist(p):
if len(p) > 1:
p[0].append(p[2])
return p[0]
else:
return ArgList(p[0])
@pg.production('argument : variable ASSIGN expr')
@pg.production('argument : expr')
def p_argument(p):
if len(p) > 1:
return (p[0], p[2])
else:
return (p[0], )
@pg.production('varlist : varlist COMMA attrivar')
@pg.production('varlist : varlist COMMA variable')
@pg.production('varlist : attrivar')
@pg.production('varlist : variable')
def p_varlist(p):
if len(p) > 1:
p[0].append(p[2])
return p[0]
else:
return VarList([p[0]])
@pg.production('attrivar : object DOT variable')
def p_attrivar(p):
return Attribute(p[0], p[2], eylang_vars=EYLANG_VARS)
@pg.production('variable : NAME')
def p_variable(p):
return Variable(p[0], eylang_vars=EYLANG_VARS)
@pg.production('constant : NUMBER')
@pg.production('constant : STRING')
def p_constant(p):
return Constant(p[0])
parser = pg.build()
| [
"eylanglexer.reserved.values"
] | [((202, 219), 'eylanglexer.reserved.values', 'reserved.values', ([], {}), '()\n', (217, 219), False, 'from eylanglexer import reserved\n')] |
from rest_framework.viewsets import ModelViewSet
from orders.models import OrderInfo
from meiduo_admin.utils import PageNum
from meiduo_admin.serializers.order import OrderSeriazlier
from rest_framework.decorators import action
from rest_framework.response import Response
class OrdersView(ModelViewSet):
serializer_class = OrderSeriazlier
queryset = OrderInfo.objects.all()
pagination_class = PageNum
# 在视图中定义status方法修改订单状态
@action(methods=['put'], detail=True)
def status(self, request, pk):
# 获取订单对象
order = self.get_object()
# 获取要修改的状态值
status = request.data.get('status')
# 修改订单状态
order.status = status
order.save()
# 返回结果
ser = self.get_serializer(order)
return Response({
'order_id': order.order_id,
'status': status
})
| [
"orders.models.OrderInfo.objects.all",
"rest_framework.response.Response",
"rest_framework.decorators.action"
] | [((360, 383), 'orders.models.OrderInfo.objects.all', 'OrderInfo.objects.all', ([], {}), '()\n', (381, 383), False, 'from orders.models import OrderInfo\n'), ((448, 484), 'rest_framework.decorators.action', 'action', ([], {'methods': "['put']", 'detail': '(True)'}), "(methods=['put'], detail=True)\n", (454, 484), False, 'from rest_framework.decorators import action\n'), ((774, 830), 'rest_framework.response.Response', 'Response', (["{'order_id': order.order_id, 'status': status}"], {}), "({'order_id': order.order_id, 'status': status})\n", (782, 830), False, 'from rest_framework.response import Response\n')] |
import unittest
from unittest import TestCase
from importCertificate import getCommandForImport
class TestImportCertificate(TestCase):
def test_getKeyStoreCommand(self):
command = getCommandForImport("certbotlocation", "location", "password")
self.assertEqual(command, "keytool -import -trustcacerts -alias tomcat -file certbotlocation/0001_cert.perm -keystore location -noprompt -storepass password")
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"importCertificate.getCommandForImport"
] | [((195, 257), 'importCertificate.getCommandForImport', 'getCommandForImport', (['"""certbotlocation"""', '"""location"""', '"""password"""'], {}), "('certbotlocation', 'location', 'password')\n", (214, 257), False, 'from importCertificate import getCommandForImport\n'), ((465, 480), 'unittest.main', 'unittest.main', ([], {}), '()\n', (478, 480), False, 'import unittest\n')] |
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.magic.tools.statistics Provides statistical functions.
# -----------------------------------------------------------------
# Ensure Python 3 functionality
from __future__ import absolute_import, division, print_function
# Import standard modules
import copy
import numpy as np
# Import astronomical modules
from astropy.stats import sigma_clip, sigma_clipped_stats
# Import the relevant PTS classes and modules
from . import general
from ..basics.mask import Mask
# -----------------------------------------------------------------
# Calculate sigma-to-FWHM and FWHM-to-sigma conversion factors
sigma_to_fwhm = (8 * np.log(2))**0.5
fwhm_to_sigma = 1.0 / sigma_to_fwhm
# -----------------------------------------------------------------
def sigma_clip_mask_list(data, sigma=3.0, mask=None):
"""
This function ...
:param data:
:param sigma:
:param mask:
:return:
"""
masked_list = sigma_clip(data, sigma=sigma, iters=None, copy=False)
new_mask = copy.deepcopy(mask) if mask is not None else [0]*len(data)
for i, masked in enumerate(masked_list.mask):
if masked: new_mask[i] = True
# Return the new or updated mask
return new_mask
# -----------------------------------------------------------------
def sigma_clip_mask(data, sigma_level=3.0, mask=None):
"""
This function ...
:param data:
:param sigma_level:
:param mask:
:return:
"""
# Split the x, y and z values of the data, without the masked values
x_values, y_values, z_values = general.split_xyz(data, mask=mask)
# Sigma-clip z-values that are outliers
masked_z_values = sigma_clip(z_values, sigma=sigma_level, iters=None, copy=False)
# Copy the mask or create a new one if none was provided
new_mask = copy.deepcopy(mask) if mask is not None else Mask(np.zeros_like(data))
for i, masked in enumerate(masked_z_values.mask):
if masked:
x = x_values[i]
y = y_values[i]
new_mask[y,x] = True
#if not isinstance(new_mask, Mask): print(new_mask, mask)
# Assert the mask is of type 'Mask'
assert isinstance(new_mask, Mask)
# Return the new or updated mask
return new_mask
# -----------------------------------------------------------------
def sigma_clipped_median(data, sigma=3.0, mask=None):
"""
This function ...
:param data:
:param sigma:
:param mask:
:return:
"""
# Calculate the sigma-clipped mean and median
_, median, _ = sigma_clipped_stats(data, mask=mask, sigma=sigma)
# Return the median value
return median
# -----------------------------------------------------------------
def sigma_clipped_statistics(data, sigma=3.0, mask=None):
"""
This function ...
:param data:
:param sigma:
:param mask:
:return:
"""
# Calculate the sigma-clipped mean and median
mean, median, stddev = sigma_clipped_stats(data, mask=mask, sigma=sigma)
# Return the statistical parameters
return mean, median, stddev
# -----------------------------------------------------------------
def sigma_clip_split(input_list, criterion, sigma=3.0, only_high=False, only_low=False, nans="low"):
"""
This function ...
:param input_list:
:param criterion:
:param sigma:
:param only_high:
:param only_low:
:param nans:
:return:
"""
# Initialize an empty list of widths
determinants = []
# Loop over all the star candidates and calculate their width
for item in input_list: determinants.append(criterion(item))
# Use sigma clipping to seperate stars and unidentified objects
mask = sigma_clip_mask_list(determinants, sigma=sigma)
# Calculate the mean value of the determinants that are not masked
mean = np.ma.mean(np.ma.masked_array(determinants, mask=mask))
# Create a seperate list for the stars and for the ufos
valid_list = []
invalid_list = []
# Loop over all items in the input list, putting them in either the valid or invalid list
for index, item in enumerate(input_list):
value = criterion(item)
if only_high:
if mask[index] and value > mean: invalid_list.append(item)
else: valid_list.append(item)
elif only_low:
if mask[index] and value < mean: invalid_list.append(item)
else: valid_list.append(item)
else:
if mask[index]: invalid_list.append(item)
else: valid_list.append(item)
# Return the valid and invalid lists
return valid_list, invalid_list
# -----------------------------------------------------------------
def cutoff(values, method, limit):
"""
This function ...
:param values:
:param method:
:param limit:
"""
# Percentage method
if method == "percentage":
# Create a sorted list for the input values
sorted_values = sorted(values)
# Determine the splitting point
split = (1.0-limit) * len(sorted_values)
index = int(round(split))
# Return the corresponding value in the sorted list
return sorted_values[index]
# Sigma-clipping method
elif method == "sigma_clip":
# Perform sigma clipping on the input list
masked_values = sigma_clip(np.array(values), sigma=limit, iters=None, copy=False)
# Calculate the maximum of the masked array
return np.ma.max(masked_values)
else: raise ValueError("Invalid cutoff method (must be 'percentage' or 'sigma_clip'")
# -----------------------------------------------------------------
| [
"numpy.ma.max",
"astropy.stats.sigma_clip",
"numpy.log",
"numpy.array",
"copy.deepcopy",
"numpy.ma.masked_array",
"astropy.stats.sigma_clipped_stats",
"numpy.zeros_like"
] | [((1256, 1309), 'astropy.stats.sigma_clip', 'sigma_clip', (['data'], {'sigma': 'sigma', 'iters': 'None', 'copy': '(False)'}), '(data, sigma=sigma, iters=None, copy=False)\n', (1266, 1309), False, 'from astropy.stats import sigma_clip, sigma_clipped_stats\n'), ((1979, 2042), 'astropy.stats.sigma_clip', 'sigma_clip', (['z_values'], {'sigma': 'sigma_level', 'iters': 'None', 'copy': '(False)'}), '(z_values, sigma=sigma_level, iters=None, copy=False)\n', (1989, 2042), False, 'from astropy.stats import sigma_clip, sigma_clipped_stats\n'), ((2854, 2903), 'astropy.stats.sigma_clipped_stats', 'sigma_clipped_stats', (['data'], {'mask': 'mask', 'sigma': 'sigma'}), '(data, mask=mask, sigma=sigma)\n', (2873, 2903), False, 'from astropy.stats import sigma_clip, sigma_clipped_stats\n'), ((3263, 3312), 'astropy.stats.sigma_clipped_stats', 'sigma_clipped_stats', (['data'], {'mask': 'mask', 'sigma': 'sigma'}), '(data, mask=mask, sigma=sigma)\n', (3282, 3312), False, 'from astropy.stats import sigma_clip, sigma_clipped_stats\n'), ((957, 966), 'numpy.log', 'np.log', (['(2)'], {}), '(2)\n', (963, 966), True, 'import numpy as np\n'), ((1326, 1345), 'copy.deepcopy', 'copy.deepcopy', (['mask'], {}), '(mask)\n', (1339, 1345), False, 'import copy\n'), ((2120, 2139), 'copy.deepcopy', 'copy.deepcopy', (['mask'], {}), '(mask)\n', (2133, 2139), False, 'import copy\n'), ((4150, 4193), 'numpy.ma.masked_array', 'np.ma.masked_array', (['determinants'], {'mask': 'mask'}), '(determinants, mask=mask)\n', (4168, 4193), True, 'import numpy as np\n'), ((2170, 2189), 'numpy.zeros_like', 'np.zeros_like', (['data'], {}), '(data)\n', (2183, 2189), True, 'import numpy as np\n'), ((5778, 5802), 'numpy.ma.max', 'np.ma.max', (['masked_values'], {}), '(masked_values)\n', (5787, 5802), True, 'import numpy as np\n'), ((5655, 5671), 'numpy.array', 'np.array', (['values'], {}), '(values)\n', (5663, 5671), True, 'import numpy as np\n')] |
import unittest
b32 = 0xFFFFFFFF
def bitInsertionFor(N, M, i, j):
clearmask = 0
for b in range(i, j):
clearmask |= 1 << b
clearmask = ~clearmask
r = N & clearmask
return r | (M << i)
def bitInsertionBit(N, M, i, j):
clearmask = (~1 & b32) << j
clearmask |= (1 << i) - 1
R = N & clearmask
R |= (M << i)
return R
class Playground(unittest.TestCase):
def setUp(self):
self.A = [
dict(N=0b10000000000, M=0b10011, i=2, j=6, E=0b10001001100),
dict(N=0b10000000000, M=0b10011, i=1, j=5, E=0b10000100110),
dict(N=0b11111111111, M=0b10011, i=1, j=5, E=0b11111100111),
dict(N=0b11111111111, M=0b10011, i=1, j=5, E=0b11111100111),
]
def runAllTests(self, method):
for test in self.A:
N = test['N']
M = test['M']
i = test['i']
j = test['j']
E = test['E']
self.assertEqual(bin(method(N, M, i, j)), bin(E))
def test_example_for(self):
self.runAllTests(bitInsertionFor)
def test_example_bit(self):
self.runAllTests(bitInsertionBit)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main"
] | [((1210, 1225), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1223, 1225), False, 'import unittest\n')] |
#!/usr/bin/python3
import sys
import subprocess
import pathlib
BROKER = "ig.com"
BASE_PATH = "/mnt/storage/Data/market/ig.com/dumps"
PREFIX = "full"
MARKETS = {
"CS.D.AUDNZD.MINI.IP": "AUDNZD",
"CS.D.AUDUSD.MINI.IP": "AUDUSD",
"CS.D.EURCAD.MINI.IP": "EURCAD",
"CS.D.EURCHF.MINI.IP": "EURCHF",
"CS.D.EURGBP.MINI.IP": "EURGBP",
"CS.D.EURUSD.MINI.IP": "EURUSD",
"IX.D.DAX.IFMM.IP": "GER30",
"CS.D.GBPUSD.MINI.IP": "GBPUSD",
"IX.D.NASDAQ.IFE.IP": "NAS100",
"IX.D.SPTRD.IFE.IP": "SPX500",
"IX.D.DOW.IFE.IP": "US30",
"CS.D.USDCHF.MINI.IP": "USDCHF",
"CS.D.USDJPY.MINI.IP": "USDJPY",
# "CS.D.CFDSILVER.CFM.IP": "XAGUSD",
"CS.D.CFEGOLD.CFE.IP": "XAUUSD"
# @todo WTI
}
IMPORT_TFS = {
"1m": "2017-12-25T00:00:00",
"3m": "2019-12-25T00:00:00",
"5m": "2019-12-15T00:00:00",
"15m": "2019-12-01T00:00:00",
"30m": "2019-10-01T00:00:00",
"1h": "2019-07-01T00:00:00",
"2h": "2019-07-01T00:00:00",
"4h": "2019-01-01T00:00:00",
"1d": "2017-01-01T00:00:00",
"1w": "2010-01-01T00:00:00",
"1M": "2000-01-01T00:00:00"
}
def import_siis_any(market, symbol, prefix="full"):
"""Unique file for any timeframes"""
src_path = pathlib.Path(BASE_PATH, market)
if not src_path.exists():
print("! Missing path for %s" % market)
return
print("Import %s in %s from %s" % (market, "any", src_path))
with subprocess.Popen(["python", "siis.py", "real", "--import",
"--filename=%s/full-%s-%s-any.siis" % (src_path, BROKER, market)]):
print("-- Done")
def import_siis(market, symbol, prefix="full"):
"""Distinct file per timeframe"""
for tf, lfrom in IMPORT_TFS.items():
src_path = pathlib.Path(BASE_PATH, market)
if not src_path.exists():
print("! Missing path for %s" % market)
return
print("Import %s in %s from %s" % (market, tf, src_path))
with subprocess.Popen(["python", "siis.py", "real", "--import",
"--filename=%s/full-%s-%s-%s.siis" % (src_path, BROKER, market, tf)]):
print("-- Done")
if __name__ == "__main__":
if len(sys.argv) > 1:
# overrides
BASE_PATH = sys.argv[1]
if len(sys.argv) > 2:
# overrides
PREFIX = sys.argv[2]
for _market, _symbol in MARKETS.items():
# use unique file
import_siis_any(_market, _symbol, PREFIX)
| [
"subprocess.Popen",
"pathlib.Path"
] | [((1218, 1249), 'pathlib.Path', 'pathlib.Path', (['BASE_PATH', 'market'], {}), '(BASE_PATH, market)\n', (1230, 1249), False, 'import pathlib\n'), ((1419, 1549), 'subprocess.Popen', 'subprocess.Popen', (["['python', 'siis.py', 'real', '--import', \n '--filename=%s/full-%s-%s-any.siis' % (src_path, BROKER, market)]"], {}), "(['python', 'siis.py', 'real', '--import', \n '--filename=%s/full-%s-%s-any.siis' % (src_path, BROKER, market)])\n", (1435, 1549), False, 'import subprocess\n'), ((1746, 1777), 'pathlib.Path', 'pathlib.Path', (['BASE_PATH', 'market'], {}), '(BASE_PATH, market)\n', (1758, 1777), False, 'import pathlib\n'), ((1964, 2097), 'subprocess.Popen', 'subprocess.Popen', (["['python', 'siis.py', 'real', '--import', \n '--filename=%s/full-%s-%s-%s.siis' % (src_path, BROKER, market, tf)]"], {}), "(['python', 'siis.py', 'real', '--import', \n '--filename=%s/full-%s-%s-%s.siis' % (src_path, BROKER, market, tf)])\n", (1980, 2097), False, 'import subprocess\n')] |
from flask import Blueprint, render_template, redirect, url_for,request, flash
from . import db
from .models import User
from flask_login import login_user, logout_user, login_required
auth = Blueprint("auth", __name__)
@auth.route("/login", methods=['GET', 'POST'])
def login():
email = request.form.get("email")
password = request.form.get("password")
return render_template("login.html")
@auth.route("/sign-up", methods=['GET', 'POST'])
def signup():
if request.method == 'POST':
username = request.form.get("username")
email = request.form.get("email")
password1 = request.form.get("<PASSWORD>")
password2 = request.form.get("<PASSWORD>")
email_exist = User.query.filter_by(email=email).first()
username_exists = User.query.filter_by(username=username).first()
if email_exist:
"""Flash a msg on the screen"""
flash('Email is already in use.', category='error')
elif username_exists:
flash('Username is already in use', category='error')
elif password1 != password2:
flash('Password don\t match', category='error')
elif len(username) < 3:
flash('Username too short!', category='error')
elif len(password1) < 6:
flash('Password too short!')
elif len(email) < 5:
flash('Email too short!', category='error')
else:
new_user = User(email='email', username='username', password = '<PASSWORD>')
db.session.add(new_user)
db.session.commit()
flash('User created!')
return redirect(url_for('views.home'))
return render_template("signup.html")
@auth.route("/logout")
def logout():
return redirect(url_for("views.home"))
| [
"flask.render_template",
"flask.flash",
"flask.url_for",
"flask.request.form.get",
"flask.Blueprint"
] | [((192, 219), 'flask.Blueprint', 'Blueprint', (['"""auth"""', '__name__'], {}), "('auth', __name__)\n", (201, 219), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((294, 319), 'flask.request.form.get', 'request.form.get', (['"""email"""'], {}), "('email')\n", (310, 319), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((335, 363), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (351, 363), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((375, 404), 'flask.render_template', 'render_template', (['"""login.html"""'], {}), "('login.html')\n", (390, 404), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((1678, 1708), 'flask.render_template', 'render_template', (['"""signup.html"""'], {}), "('signup.html')\n", (1693, 1708), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((521, 549), 'flask.request.form.get', 'request.form.get', (['"""username"""'], {}), "('username')\n", (537, 549), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((566, 591), 'flask.request.form.get', 'request.form.get', (['"""email"""'], {}), "('email')\n", (582, 591), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((612, 642), 'flask.request.form.get', 'request.form.get', (['"""<PASSWORD>"""'], {}), "('<PASSWORD>')\n", (628, 642), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((663, 693), 'flask.request.form.get', 'request.form.get', (['"""<PASSWORD>"""'], {}), "('<PASSWORD>')\n", (679, 693), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((1767, 1788), 'flask.url_for', 'url_for', (['"""views.home"""'], {}), "('views.home')\n", (1774, 1788), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((913, 964), 'flask.flash', 'flash', (['"""Email is already in use."""'], {'category': '"""error"""'}), "('Email is already in use.', category='error')\n", (918, 964), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((1007, 1060), 'flask.flash', 'flash', (['"""Username is already in use"""'], {'category': '"""error"""'}), "('Username is already in use', category='error')\n", (1012, 1060), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((1110, 1157), 'flask.flash', 'flash', (['"""Password don\t match"""'], {'category': '"""error"""'}), "('Password don\\t match', category='error')\n", (1115, 1157), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((1202, 1248), 'flask.flash', 'flash', (['"""Username too short!"""'], {'category': '"""error"""'}), "('Username too short!', category='error')\n", (1207, 1248), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((1294, 1322), 'flask.flash', 'flash', (['"""Password too short!"""'], {}), "('Password too short!')\n", (1299, 1322), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((1364, 1407), 'flask.flash', 'flash', (['"""Email too short!"""'], {'category': '"""error"""'}), "('Email too short!', category='error')\n", (1369, 1407), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((1592, 1614), 'flask.flash', 'flash', (['"""User created!"""'], {}), "('User created!')\n", (1597, 1614), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n'), ((1643, 1664), 'flask.url_for', 'url_for', (['"""views.home"""'], {}), "('views.home')\n", (1650, 1664), False, 'from flask import Blueprint, render_template, redirect, url_for, request, flash\n')] |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from os import path
def plot(args, conn):
df = pd.read_sql_query("select * from SHOT;", conn)
plt.hist(df["LENGTH"], bins=args.bins)
plt.xlabel("Shot length / m")
plt.ylabel("Count")
if args.log:
plt.yscale("log")
if args.save:
plt.savefig(path.join(args.save, "shot_lengths_histogram." + args.format))
else:
plt.show()
| [
"pandas.read_sql_query",
"matplotlib.pyplot.hist",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"os.path.join",
"matplotlib.pyplot.yscale",
"matplotlib.pyplot.show"
] | [((124, 170), 'pandas.read_sql_query', 'pd.read_sql_query', (['"""select * from SHOT;"""', 'conn'], {}), "('select * from SHOT;', conn)\n", (141, 170), True, 'import pandas as pd\n'), ((176, 214), 'matplotlib.pyplot.hist', 'plt.hist', (["df['LENGTH']"], {'bins': 'args.bins'}), "(df['LENGTH'], bins=args.bins)\n", (184, 214), True, 'import matplotlib.pyplot as plt\n'), ((219, 248), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Shot length / m"""'], {}), "('Shot length / m')\n", (229, 248), True, 'import matplotlib.pyplot as plt\n'), ((253, 272), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Count"""'], {}), "('Count')\n", (263, 272), True, 'import matplotlib.pyplot as plt\n'), ((298, 315), 'matplotlib.pyplot.yscale', 'plt.yscale', (['"""log"""'], {}), "('log')\n", (308, 315), True, 'import matplotlib.pyplot as plt\n'), ((436, 446), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (444, 446), True, 'import matplotlib.pyplot as plt\n'), ((355, 416), 'os.path.join', 'path.join', (['args.save', "('shot_lengths_histogram.' + args.format)"], {}), "(args.save, 'shot_lengths_histogram.' + args.format)\n", (364, 416), False, 'from os import path\n')] |
from botorch.models.gpytorch import GPyTorchModel
from gpytorch.distributions import MultivariateNormal
from gpytorch.kernels import MaternKernel, ScaleKernel
from gpytorch.likelihoods import GaussianLikelihood
from gpytorch.means import ConstantMean
from gpytorch.models import ExactGP
from greattunes.transformed_kernel_models.transformation import GP_kernel_transform
class SimpleCustomMaternGP(ExactGP, GPyTorchModel):
"""
Simple customer Gaussian Process model with Matérn kernel and Gaussian likelihood model
"""
_num_outputs = 1 # to inform GPyTorchModel API
def __init__(self, train_X, train_Y, nu, GP_kernel_mapping_covar_identification):
# squeeze output dim before passing train_Y to ExactGP
super().__init__(train_X, train_Y.squeeze(-1), GaussianLikelihood())
self.GP_kernel_mapping_covar_identification = (
GP_kernel_mapping_covar_identification
)
self.mean_module = ConstantMean()
if nu is not None:
self.covar_module = ScaleKernel(
base_kernel=MaternKernel(
nu=nu, ard_num_dims=train_X.shape[-1]
), # set parameter nu in Matern kernel
)
else:
self.covar_module = ScaleKernel(
base_kernel=MaternKernel(ard_num_dims=train_X.shape[-1]),
# parameter nu in Matern kernel defauls to 2.5
)
self.to(train_X) # make sure we're on the right device/dtype
def forward(self, x):
mean_x = self.mean_module(x)
covar_x = self.covar_module(
GP_kernel_transform(x, self.GP_kernel_mapping_covar_identification)
)
# covar_x = self.covar_module(x)
return MultivariateNormal(mean_x, covar_x)
| [
"gpytorch.kernels.MaternKernel",
"gpytorch.means.ConstantMean",
"greattunes.transformed_kernel_models.transformation.GP_kernel_transform",
"gpytorch.distributions.MultivariateNormal",
"gpytorch.likelihoods.GaussianLikelihood"
] | [((958, 972), 'gpytorch.means.ConstantMean', 'ConstantMean', ([], {}), '()\n', (970, 972), False, 'from gpytorch.means import ConstantMean\n'), ((1743, 1778), 'gpytorch.distributions.MultivariateNormal', 'MultivariateNormal', (['mean_x', 'covar_x'], {}), '(mean_x, covar_x)\n', (1761, 1778), False, 'from gpytorch.distributions import MultivariateNormal\n'), ((792, 812), 'gpytorch.likelihoods.GaussianLikelihood', 'GaussianLikelihood', ([], {}), '()\n', (810, 812), False, 'from gpytorch.likelihoods import GaussianLikelihood\n'), ((1609, 1676), 'greattunes.transformed_kernel_models.transformation.GP_kernel_transform', 'GP_kernel_transform', (['x', 'self.GP_kernel_mapping_covar_identification'], {}), '(x, self.GP_kernel_mapping_covar_identification)\n', (1628, 1676), False, 'from greattunes.transformed_kernel_models.transformation import GP_kernel_transform\n'), ((1074, 1125), 'gpytorch.kernels.MaternKernel', 'MaternKernel', ([], {'nu': 'nu', 'ard_num_dims': 'train_X.shape[-1]'}), '(nu=nu, ard_num_dims=train_X.shape[-1])\n', (1086, 1125), False, 'from gpytorch.kernels import MaternKernel, ScaleKernel\n'), ((1303, 1347), 'gpytorch.kernels.MaternKernel', 'MaternKernel', ([], {'ard_num_dims': 'train_X.shape[-1]'}), '(ard_num_dims=train_X.shape[-1])\n', (1315, 1347), False, 'from gpytorch.kernels import MaternKernel, ScaleKernel\n')] |
# ---
# Module to read grenoble.inp agenda (edt) and filter keep only the groups I belong to
# Configuration is done into configPhelma.py
#
# example of configPhelma.py (not commited for security raison)
#
# ICAL_FEED = 'https://loggin:passwd@edt.grenoble-inp.fr/directCal/2020-2021/etudiant/phelma?resources=20868,5574'
#
# This config keep G4 for English, G7 for Math & Physique and remove all Educ. Phy
# SET_TO_REMOVE=(("English","PMKANG6_2020_S5_TD_G4"),("TC Math - TI PET-PMP S5","PET_DE_PMP_CDE_G7\n"),\
# ("TC Physique PET-PMP S5","PET_DE_PMP_CDE_G7\n"),("Educ. Phy. et Sportive PET-PMP S5","Impossibletomatch"))
#
# ----
from ics import Calendar, Event
import requests
# Switch following import if used standalone
# import configPhelma as cfg
from phelma_calendar import configPhelma as cfg
def get_phelma_calendar():
# Read grenoble-inp agenda
c = Calendar()
url = cfg.ICAL_FEED
c = Calendar(requests.get(url).text)
# create the list of events to be filtered out from the one read previously
eventOut=c.events.copy()
# for each event, if it matches the filter out rule, remove them from the event list
for EventEdt in c.events:
for (f1,f2) in cfg.SET_TO_REMOVE:
if(EventEdt.name.find(f1)!=-1): # If it matches a candidate topic
if(EventEdt.description.find(f2)==-1): # Check if the group in the description <> from my group
eventOut.remove(EventEdt) # remove this event
#print("Remove "+EventEdt.name + " Desc: " + EventEdt.description)
# print(str(eventOut))
return eventOut
# ----
# Main : used to test the module and produce and .ics file for a manual import into google agenda
# To fully automate, this module is used with a modified version of ical_to_gcal_sync (start-> begin, summary-> name, call this module instead of the ulr)
# Do not forget to set the proper values into config.py (one for this module, one for ical_to_gcal_sync)
# ----
calendarOut = open("./ADECalFiltered.ics", "w")
calendarOut.write("BEGIN:VCALENDAR\nMETHOD:REQUEST\nPRODID:-//ADE/version 6.0\nVERSION:2.0\nCALSCALE:GREGORIAN\n")
eventOut=get_phelma_calendar()
for EventEdt in eventOut:
calendarOut.write(str(EventEdt)+'\n')
calendarOut.write("END:VCALENDAR\n")
calendarOut.close() | [
"ics.Calendar",
"requests.get"
] | [((872, 882), 'ics.Calendar', 'Calendar', ([], {}), '()\n', (880, 882), False, 'from ics import Calendar, Event\n'), ((924, 941), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (936, 941), False, 'import requests\n')] |
# Python Standard Library Imports
import re
from utils import (
InputConfig,
ingest,
)
INPUT_FILE = '01.in'
EXPECTED_ANSWERS = (271, 153, )
# INPUT_FILE = '01.test.in'
# EXPECTED_ANSWERS = (8, 4, )
def main():
solution = Solution()
answers = (solution.solve1(), solution.solve2(), )
print(answers)
assert(answers == EXPECTED_ANSWERS)
class Solution:
def __init__(self):
data = ingest(INPUT_FILE, InputConfig(as_oneline=True))
self.directions = [Direction(direction.strip()) for direction in data.split(',')]
def solve1(self):
grid = Grid()
for direction in self.directions:
grid.move(direction)
answer = grid.manhattan_distance
return answer
def solve2(self):
answer = None
grid = Grid()
for direction in self.directions:
was_reached = grid.move(direction, crawl_mode=True)
if was_reached:
answer = grid.manhattan_distance
break
return answer
class Grid:
def __init__(self):
self.x = 0
self.y = 0
self.orientation_x = 0
self.orientation_y = 1
self.visited = {
'0.0': True,
}
def move(self, direction, crawl_mode=False):
was_reached = None
self.turn(direction.turn)
if crawl_mode:
was_reached = self.crawl(direction.distance)
else:
self.go(direction.distance)
return was_reached
def turn(self, turn):
if turn == 'L':
if self.orientation_x != 0:
self.orientation_y = self.orientation_x
self.orientation_x = 0
elif self.orientation_y != 0:
self.orientation_x = self.orientation_y * -1
self.orientation_y = 0
elif turn == 'R':
if self.orientation_x != 0:
self.orientation_y = self.orientation_x * -1
self.orientation_x = 0
elif self.orientation_y != 0:
self.orientation_x = self.orientation_y
self.orientation_y = 0
def go(self, distance):
self.x += distance * self.orientation_x
self.y += distance * self.orientation_y
def crawl(self, distance):
reached = False
for i in range(distance):
self.x += self.orientation_x
self.y += self.orientation_y
xy = '{}.{}'.format(self.x, self.y)
if xy in self.visited:
reached = True
break
else:
self.visited[xy] = True
return reached
@property
def manhattan_distance(self):
return abs(self.x) + abs(self.y)
class Direction:
REGEXP = re.compile(r'^(?P<turn>R|L)(?P<distance>\d+)$')
def __init__(self, direction):
m = self.REGEXP.match(direction)
if m:
self.turn, self.distance = (
m.group('turn'),
int(m.group('distance'))
)
else:
raise Exception('Bad direction: %s' % direction)
if __name__ == '__main__':
main()
| [
"utils.InputConfig",
"re.compile"
] | [((2780, 2827), 're.compile', 're.compile', (['"""^(?P<turn>R|L)(?P<distance>\\\\d+)$"""'], {}), "('^(?P<turn>R|L)(?P<distance>\\\\d+)$')\n", (2790, 2827), False, 'import re\n'), ((440, 468), 'utils.InputConfig', 'InputConfig', ([], {'as_oneline': '(True)'}), '(as_oneline=True)\n', (451, 468), False, 'from utils import InputConfig, ingest\n')] |
# -*- coding: utf-8 -*-
#!/usr/bin/env python3
"""
Servidor de Conexao
<NAME>
UFSJ
"""
import sys
import socket
import _thread as thread
import getpass
import xmlrpc.client
import pickle
from Usuario import Usuario
import ssl
import socket
class ServidorConexao(object):
"""docstring for ServidorConexao."""
def __init__(self):
self.porta=int(sys.argv[1])
self.ip=""
self.tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
#carrega a chave do servidor..
self.context.load_cert_chain(certfile="chave_servidor/server.pem", keyfile="chave_servidor/server.key")
self.tcp.bind((self.ip,self.porta))
self.tcp.listen(1)
#chamada RPC para o servidor de senha.
# Atualizar ip servidor de senha.
#----------------------------------------------------------
self.ip_servidor_senha="http://127.0.0.1:8000"
#ip_servidor_senha="http://172.18.1.34:8000"
#ip_servidor_senha_local="http://localhost:8000/"
#-----------------------------------------------------------
try:
self.senha_auth=xmlrpc.client.ServerProxy(self.ip_servidor_senha)
except Exception as e:
print("erro ao estabelecer comunicação com o servidor de senha.")
#chamada RPC para o servidor de arquivos.
#-------------------------------------------------------------------------
# Atualiza ip do servidor de arquivos.
#ip_servidor_arquivo="http://172.18.1.35:8001"
self.ip_servidor_arquivo="http://127.0.0.1:8001"
#------------------------------------------------------------------------
self.arquivos=xmlrpc.client.ServerProxy(self.ip_servidor_arquivo)
self.usuarios_logados=[]
def senha_on(self):
print(self.senha_auth.on())
def envia_resposta(self,conexao,dados):
"""envia uma resposta para o cliente"""
conexao.sendall(dados)
def rpc_senha(self,usuario,senha):
"""Valida a senha e usuario"""
try:
retorno,permissao=self.senha_auth.login(usuario,senha)
print(retorno)
if(retorno):
#devolve a resposta de login sucesso
user=self.sucess_login(usuario,permissao)
return True,user
else:
return False,False
except xmlrpc.client.Fault as err:
print("Ocorreu um erro")
print("Código de erro %d" % err.faultCode)
print("Fault string: %s" % err.faultString)
def sucess_login(self,login,permissao):
"""adiciona um novo usuário na lista de usuários logados"""
#instancia um novo usuario
user=Usuario(login,permissao)
#adiciona na lista de usuarios
self.usuarios_logados.append(user)
return user
def comando_invalido(self,conexao):
"""retorna mensagem de comando invalido"""
msg="Comando inválido ou faltando parametros."
self.envia_resposta(conexao,pickle.dumps(msg))
def logoff(self,login):
"""realiza o logoff"""
for i in range(0,len(self.usuarios_logados)):
if(login==self.usuarios_logados[i].getLogin()):
index=i
self.usuarios_logados.pop(index)
def processa_comando(self,conexao,comando,user):
"""processa os comandos"""
if(comando['cmd']=="quit"):
print("Logout")
instrucao={}
instrucao['quit']="logout"
instrucao=pickle.dumps(instrucao)
print(instrucao)
conexao.sendall(instrucao)
#logoff(login)
conexao.close()
thread.exit()
elif(comando['cmd']=="newuser"):
instrucao={}
if(comando['user'] and user.getPermissao()):
usuario=comando['user']
senha=comando['senha']
permissao=comando['permissao']
estado=self.senha_auth.novo_usuario(usuario,senha,permissao)
if(estado):
#cria uma nova home de Usuario
estado=self.arquivos.novahome(usuario)
instrucao['newuser']=estado
instrucao['root']=user.getPermissao()
instrucao=pickle.dumps(instrucao)
self.envia_resposta(conexao,instrucao)
else:
instrucao['newuser']=False
instrucao['root']=user.getPermissao()
instrucao=pickle.dumps(instrucao)
self.envia_resposta(conexao,instrucao)
elif(comando['cmd']=="ls"):
print("Retornando LS")
instrucao={}
lista=self.arquivos.ls(user.getHome())
instrucao['ls']=lista
instrucao=pickle.dumps(instrucao)
self.envia_resposta(conexao,instrucao)
elif(comando['cmd'] =="cd"):
#print("Comando cd")
pasta=comando['caminho']
print("CMD",comando,"Instrucao",comando['caminho'])
instrucao={}
path=user.getHome()+pasta
print("Caminho",path)
lista,new_path=self.arquivos.cd(path)
#atualiza a rota da home..
if(lista is True):
user.setHome(new_path)
print("Retorno cd",lista)
instrucao['cd']=lista
instrucao['home']=user.getHome()
print("Instrucao CD",instrucao)
instrucao=pickle.dumps(instrucao)
self.envia_resposta(conexao,instrucao)
elif(comando['cmd']=="mkdir"):
if(comando['caminho']):
instrucao={}
estado=self.arquivos.mkdir(user.getHome()+"/"+comando['caminho'])
instrucao['mkdir']=estado
instrucao=pickle.dumps(instrucao)
self.envia_resposta(conexao,instrucao)
elif(comando['cmd']=="delete"):
if(comando['caminho']):
instrucao={}
#print(user.getHome())
estado=self.arquivos.delete(user.getHome()+"/"+comando['caminho'])
instrucao['delete']=estado
instrucao=pickle.dumps(instrucao)
self.envia_resposta(conexao,instrucao)
elif(comando['cmd']=="get"):
#print("Comando cd")
print("Comando Get")
pasta=comando['caminho']
instrucao={}
path=user.getHome()+pasta
lista,file=self.arquivos.get(path)
#atualiza a rota da home..
if(lista is True):
instrucao['get']=True
instrucao['file']=file
instrucao['nome']=pasta
else:
instrucao['get']=False
instrucao=pickle.dumps(instrucao)
self.envia_resposta(conexao,instrucao)
elif(comando['cmd']=="put"):
print("Comando put")
if(comando['file']):
nome_arquivo=comando['caminho']
instrucao={}
path=user.getHome()+nome_arquivo
lista,file=self.arquivos.put(path,comando['file'])
#atualiza a rota da home..
if(lista is True):
instrucao['put']=True
else:
instrucao['put']=False
instrucao=pickle.dumps(instrucao)
self.envia_resposta(conexao,instrucao)
else:
self.comando_invalido(conexao)
elif(comando['cmd']=="help"):
print("Comando HELP")
instrucao={}
retorno=self.arquivos.help()
instrucao['help']=retorno
print(instrucao)
instrucao=pickle.dumps(retorno)
self.envia_resposta(conexao,instrucao)
elif(comando['cmd']=="rmdir"):
if(comando['caminho']):
instrucao={}
estado=self.arquivos.rmdir(user.getHome()+"/"+comando['caminho'])
instrucao['rmdir']=estado
instrucao=pickle.dumps(instrucao)
self.envia_resposta(conexao,instrucao)
else:
self.comando_invalido(conexao)
else:
print('sending data back to the client')
self.envia_resposta(conexao,pickle.dumps(comando))
def conectado(self,connection, cliente):
"""verificar o disparo e encerramento de threads"""
msg='Bem vindo ao Zeus FTP v 1.0\nConexão segura estabelecida!\nDigite help para ver os comandos válidos.'
msg=pickle.dumps(msg)
connection.sendall(msg)
#dicionario do estado de resposta
estado_login={}
estado_login['estado']=False
data=connection.recv(1024)
dados_login=pickle.loads(data)
retorno,user =self.rpc_senha(dados_login['usuario'],dados_login['senha'])
if(retorno):
estado_login['estado']=True
estado_login['home']=user.getHome()
#sucesso ao realizar o login
connection.sendall(pickle.dumps(estado_login))
else:
#erro ao efetuar o login
connection.sendall(pickle.dumps(estado_login))
if(estado_login['estado']== True):
while True:
# Wait for a connection
try:
print('connection from', cliente)
# Receive the data in small chunks and retransmit it
rec=[]
recebido=0
data=""
while True:
data=""
#buffer de recebimento dos dados.
data=connection.recv(1024)
rec.append(data)
recebido=len(data)-1024
if(recebido<0):
break
#combina os dados
data=pickle.loads(b"".join(rec))
self.processa_comando(connection,data,user)
except Exception as e:
# Clean up the connection
print("e",e)
connection.close()
thread.exit()
def iniciar_servidor(self):
ip_local="http://127.0.0.1"
print("Servidor Iniciado.. no IP ",ip_local," Porta:",self.porta)
print("IP Servidor de senha ",self.ip_servidor_senha)
print("IP servidor de Arquivos ",self.ip_servidor_arquivo)
while True:
con, cliente = self.tcp.accept()
try:
con_secure = self.context.wrap_socket(con, server_side=True)
thread.start_new_thread(self.conectado, tuple([con_secure, cliente]))
except Exception as e:
print("Tentativa de acesso invalida..")
servidor= ServidorConexao()
# try:
# print(servidor.senha_on())
# except Exception as e:
# print("erro",e)
servidor.iniciar_servidor()
| [
"socket.socket",
"_thread.exit",
"pickle.dumps",
"ssl.create_default_context",
"pickle.loads",
"Usuario.Usuario"
] | [((410, 459), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (423, 459), False, 'import socket\n'), ((483, 534), 'ssl.create_default_context', 'ssl.create_default_context', (['ssl.Purpose.CLIENT_AUTH'], {}), '(ssl.Purpose.CLIENT_AUTH)\n', (509, 534), False, 'import ssl\n'), ((2777, 2802), 'Usuario.Usuario', 'Usuario', (['login', 'permissao'], {}), '(login, permissao)\n', (2784, 2802), False, 'from Usuario import Usuario\n'), ((8752, 8769), 'pickle.dumps', 'pickle.dumps', (['msg'], {}), '(msg)\n', (8764, 8769), False, 'import pickle\n'), ((8960, 8978), 'pickle.loads', 'pickle.loads', (['data'], {}), '(data)\n', (8972, 8978), False, 'import pickle\n'), ((3087, 3104), 'pickle.dumps', 'pickle.dumps', (['msg'], {}), '(msg)\n', (3099, 3104), False, 'import pickle\n'), ((3585, 3608), 'pickle.dumps', 'pickle.dumps', (['instrucao'], {}), '(instrucao)\n', (3597, 3608), False, 'import pickle\n'), ((3744, 3757), '_thread.exit', 'thread.exit', ([], {}), '()\n', (3755, 3757), True, 'import _thread as thread\n'), ((9242, 9268), 'pickle.dumps', 'pickle.dumps', (['estado_login'], {}), '(estado_login)\n', (9254, 9268), False, 'import pickle\n'), ((9352, 9378), 'pickle.dumps', 'pickle.dumps', (['estado_login'], {}), '(estado_login)\n', (9364, 9378), False, 'import pickle\n'), ((4348, 4371), 'pickle.dumps', 'pickle.dumps', (['instrucao'], {}), '(instrucao)\n', (4360, 4371), False, 'import pickle\n'), ((4568, 4591), 'pickle.dumps', 'pickle.dumps', (['instrucao'], {}), '(instrucao)\n', (4580, 4591), False, 'import pickle\n'), ((4852, 4875), 'pickle.dumps', 'pickle.dumps', (['instrucao'], {}), '(instrucao)\n', (4864, 4875), False, 'import pickle\n'), ((5538, 5561), 'pickle.dumps', 'pickle.dumps', (['instrucao'], {}), '(instrucao)\n', (5550, 5561), False, 'import pickle\n'), ((10390, 10403), '_thread.exit', 'thread.exit', ([], {}), '()\n', (10401, 10403), True, 'import _thread as thread\n'), ((5869, 5892), 'pickle.dumps', 'pickle.dumps', (['instrucao'], {}), '(instrucao)\n', (5881, 5892), False, 'import pickle\n'), ((6245, 6268), 'pickle.dumps', 'pickle.dumps', (['instrucao'], {}), '(instrucao)\n', (6257, 6268), False, 'import pickle\n'), ((6896, 6919), 'pickle.dumps', 'pickle.dumps', (['instrucao'], {}), '(instrucao)\n', (6908, 6919), False, 'import pickle\n'), ((7533, 7556), 'pickle.dumps', 'pickle.dumps', (['instrucao'], {}), '(instrucao)\n', (7545, 7556), False, 'import pickle\n'), ((7916, 7937), 'pickle.dumps', 'pickle.dumps', (['retorno'], {}), '(retorno)\n', (7928, 7937), False, 'import pickle\n'), ((8245, 8268), 'pickle.dumps', 'pickle.dumps', (['instrucao'], {}), '(instrucao)\n', (8257, 8268), False, 'import pickle\n'), ((8496, 8517), 'pickle.dumps', 'pickle.dumps', (['comando'], {}), '(comando)\n', (8508, 8517), False, 'import pickle\n')] |
import unittest
from core.picture import Picture
from PIL import Image
from shutil import copyfile, rmtree
import os
class PictureTestCase(unittest.TestCase):
def setUp(self):
self.current_dir = os.path.dirname(os.path.abspath(__file__))
self.test_dir = "{}{}test_dir".format(self.current_dir, os.path.sep)
self.base_picture = "python_image_2000_2000.png"
os.mkdir(self.test_dir)
def tearDown(self):
rmtree(self.test_dir)
def test_nonexistent_file(self):
fake_file = "{}{}fake_file".format(self.current_dir, os.path.sep)
p = Picture(fake_file)
response = p.resize_picture()
self.assertEqual(response, {"It will not change": "Not a file"})
def test_image_2mp(self):
# Testing picture with 2000px X 2000px
copyfile("{}{}{}".format(self.current_dir, os.path.sep, self.base_picture),
"{}{}{}".format(self.test_dir, os.path.sep, self.base_picture))
im = Image.open("{}{}{}".format(self.test_dir, os.path.sep, self.base_picture))
width_before, height_before = im.size
width_after, height_after = None, None
im.close()
p = Picture("{}{}{}".format(self.test_dir, os.path.sep, self.base_picture))
response = p.resize_picture()
im = Image.open("{}{}{}_resized{}".format(self.test_dir, os.path.sep,
os.path.splitext(self.base_picture)[0], os.path.splitext(self.base_picture)[1]))
width_after, height_after = im.size
im.close()
'''
Selection rule: the height will have a greater difference, using the "new_height"
method of the Dimensions class. New image size: 1920px X 1920px (square image).
Calculation: (2000/2000) * 1920
'''
self.assertEqual(width_before, int(2000))
self.assertEqual(height_before, int(2000))
self.assertEqual(width_after, int(1920))
self.assertEqual(height_after, int(1920))
self.assertEqual(response, {"Changed file!": "Archive was in favorable conditions"}) | [
"os.path.splitext",
"core.picture.Picture",
"os.mkdir",
"shutil.rmtree",
"os.path.abspath"
] | [((394, 417), 'os.mkdir', 'os.mkdir', (['self.test_dir'], {}), '(self.test_dir)\n', (402, 417), False, 'import os\n'), ((451, 472), 'shutil.rmtree', 'rmtree', (['self.test_dir'], {}), '(self.test_dir)\n', (457, 472), False, 'from shutil import copyfile, rmtree\n'), ((597, 615), 'core.picture.Picture', 'Picture', (['fake_file'], {}), '(fake_file)\n', (604, 615), False, 'from core.picture import Picture\n'), ((225, 250), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (240, 250), False, 'import os\n'), ((1379, 1414), 'os.path.splitext', 'os.path.splitext', (['self.base_picture'], {}), '(self.base_picture)\n', (1395, 1414), False, 'import os\n'), ((1419, 1454), 'os.path.splitext', 'os.path.splitext', (['self.base_picture'], {}), '(self.base_picture)\n', (1435, 1454), False, 'import os\n')] |
import sys
import numpy as np
from skimage.measure import label
def getSegType(mid):
m_type = np.uint64
if mid<2**8:
m_type = np.uint8
elif mid<2**16:
m_type = np.uint16
elif mid<2**32:
m_type = np.uint32
return m_type
def seg2Count(seg,do_sort=True,rm_zero=False):
sm = seg.max()
if sm==0:
return None,None
if sm>1:
segIds,segCounts = np.unique(seg,return_counts=True)
if rm_zero:
segCounts = segCounts[segIds>0]
segIds = segIds[segIds>0]
if do_sort:
sort_id = np.argsort(-segCounts)
segIds=segIds[sort_id]
segCounts=segCounts[sort_id]
else:
segIds=np.array([1])
segCounts=np.array([np.count_nonzero(seg)])
return segIds, segCounts
def removeSeg(seg, did, invert=False):
sm = seg.max()
did = did[did<=sm]
if invert:
rl = np.zeros(1+sm).astype(seg.dtype)
rl[did] = did
else:
rl = np.arange(1+sm).astype(seg.dtype)
rl[did] = 0
return rl[seg]
def remove_small(seg, thres=100,bid=None):
if thres>0:
if bid is None:
uid, uc = np.unique(seg, return_counts=True)
bid = uid[uc<thres]
if len(bid)>0:
sz = seg.shape
seg = removeSeg(seg,bid)
return seg
def relabel(seg, uid=None,nid=None,do_sort=False,do_type=False):
if seg is None or seg.max()==0:
return seg
if do_sort:
uid,_ = seg2Count(seg,do_sort=True)
else:
# get the unique labels
if uid is None:
uid = np.unique(seg)
else:
uid = np.array(uid)
uid = uid[uid>0] # leave 0 as 0, the background seg-id
# get the maximum label for the segment
mid = int(max(uid)) + 1
# create an array from original segment id to reduced id
# format opt
m_type = seg.dtype
if do_type:
mid2 = len(uid) if nid is None else max(nid)+1
m_type = getSegType(mid2)
mapping = np.zeros(mid, dtype=m_type)
if nid is None:
mapping[uid] = np.arange(1,1+len(uid), dtype=m_type)
else:
mapping[uid] = nid.astype(m_type)
# if uid is given, need to remove bigger seg id
seg[seg>=mid] = 0
return mapping[seg]
def get_bb(seg, do_count=False):
dim = len(seg.shape)
a=np.where(seg>0)
if len(a[0])==0:
return [-1]*dim*2
out=[]
for i in range(dim):
out+=[a[i].min(), a[i].max()]
if do_count:
out+=[len(a[0])]
return out
def label_chunk(get_chunk, numC, rr=1, rm_sz=0, m_type=np.uint64):
# label chunks or slices
mid = 0
seg = [None]*numC
for zi in range(numC):
print('%d/%d [%d], '%(zi,numC,mid)),
sys.stdout.flush()
tmp = get_chunk(zi)>0
sz = tmp.shape
numD = len(sz)
if numD==2:
tmp = tmp[np.newaxis]
seg_c = np.zeros(sz).astype(m_type)
bb=get_bb(tmp)
print(bb)
seg_c[bb[0]:bb[1]+1,bb[2]:bb[3]+1,bb[4]:bb[5]+1] = \
label(tmp[bb[0]:bb[1]+1,bb[2]:bb[3]+1,bb[4]:bb[5]+1]).astype(m_type)
if rm_sz>0:
# preserve continuous id
seg_c = remove_small(seg_c, rm_sz)
seg_c = relabel(seg_c).astype(m_type)
if zi == 0: # first seg, relabel seg index
print('_%d_'%0)
slice_b = seg_c[-1]
seg[zi] = seg_c[:,::rr,::rr] # save a low-res one
mid += seg[zi].max()
rlA = np.arange(mid+1,dtype=m_type)
else: # link to previous slice
slice_t = seg_c[0]
slices = label(np.stack([slice_b>0, slice_t>0],axis=0)).astype(m_type)
# create mapping for seg cur
lc = np.unique(seg_c);lc=lc[lc>0]
rl_c = np.zeros(int(lc.max())+1, dtype=int)
# merge curr seg
# for 1 pre seg id -> slices id -> cur seg ids
l0_p = np.unique(slice_b*(slices[0]>0))
bbs = get_bb_label2d_v2(slice_b,uid=l0_p)[:,1:]
#bbs2 = get_bb_label2d_v2(slices[1])
print('_%d_'%len(l0_p))
for i,l in enumerate(l0_p):
bb = bbs[i]
sid = np.unique(slices[0,bb[0]:bb[1]+1,bb[2]:bb[3]+1]*(slice_b[bb[0]:bb[1]+1,bb[2]:bb[3]+1]==l))
sid = sid[sid>0]
# multiple ids
if len(sid)==1:
#bb = bbs2[bbs2[:,0]==sid,1:]
#cid = np.unique(slice_t[bb[0]:bb[1]+1,bb[2]:bb[3]+1]*(slices[1,bb[0]:bb[1]+1,bb[2]:bb[3]+1]==sid))
cid = np.unique(slice_t*(slices[1]==sid))
else:
cid = np.unique(slice_t*np.in1d(slices[1].reshape(-1),sid).reshape(sz[-2:]))
rl_c[cid[cid>0]] = l
# new id
new_num = np.where(rl_c==0)[0][1:] # except the first one
new_id = np.arange(mid+1,mid+1+len(new_num),dtype=m_type)
rl_c[new_num] = new_id
slice_b = rl_c[seg_c[-1]] # save a high-res
seg[zi] = rl_c[seg_c[:,::rr,::rr]]
mid += len(new_num)
# update global id
rlA = np.hstack([rlA,new_id])
# merge prev seg
# for 1 cur seg id -> slices id -> prev seg ids
l1_c = np.unique(slice_t*(slices[1]>0))
for l in l1_c:
sid = np.unique(slices[1]*(slice_t==l))
sid = sid[sid>0]
pid = np.unique(slice_b*np.in1d(slices[0].reshape(-1),sid).reshape(sz[-2:]))
pid = pid[pid>0]
# get all previous m-to-1 labels
pid_p = np.where(np.in1d(rlA,rlA[pid]))[0]
if len(pid_p)>1:
rlA[pid_p] = pid.max()
# memory reduction: each seg
m2_type = getSegType(seg[zi].max())
seg[zi] = seg[zi].astype(m2_type)
# memory reduction: final output
m2_type = getSegType(rlA.max())
rlA = rlA.astype(m2_type)
print('output type:',m2_type)
return rlA[np.vstack(seg)]
def get_bb_label3d_v2(seg,do_count=False, uid=None):
sz = seg.shape
assert len(sz)==3
if uid is None:
uid = np.unique(seg)
uid = uid[uid>0]
um = int(uid.max())
out = np.zeros((1+um,7+do_count),dtype=np.uint32)
out[:,0] = np.arange(out.shape[0])
out[:,1] = sz[0]
out[:,3] = sz[1]
out[:,5] = sz[2]
# for each slice
zids = np.where((seg>0).sum(axis=1).sum(axis=1)>0)[0]
for zid in zids:
sid = np.unique(seg[zid])
sid = sid[(sid>0)*(sid<=um)]
out[sid,1] = np.minimum(out[sid,1],zid)
out[sid,2] = np.maximum(out[sid,2],zid)
# for each row
rids = np.where((seg>0).sum(axis=0).sum(axis=1)>0)[0]
for rid in rids:
sid = np.unique(seg[:,rid])
sid = sid[(sid>0)*(sid<=um)]
out[sid,3] = np.minimum(out[sid,3],rid)
out[sid,4] = np.maximum(out[sid,4],rid)
# for each col
cids = np.where((seg>0).sum(axis=0).sum(axis=0)>0)[0]
for cid in cids:
sid = np.unique(seg[:,:,cid])
sid = sid[(sid>0)*(sid<=um)]
out[sid,5] = np.minimum(out[sid,5],cid)
out[sid,6] = np.maximum(out[sid,6],cid)
if do_count:
ui,uc = np.unique(seg,return_counts=True)
out[ui[ui<=um],-1]=uc[ui<=um]
return out[uid]
| [
"numpy.unique",
"numpy.minimum",
"numpy.hstack",
"numpy.where",
"numpy.in1d",
"numpy.argsort",
"numpy.array",
"numpy.zeros",
"numpy.count_nonzero",
"numpy.stack",
"numpy.vstack",
"skimage.measure.label",
"numpy.maximum",
"sys.stdout.flush",
"numpy.arange"
] | [((2022, 2049), 'numpy.zeros', 'np.zeros', (['mid'], {'dtype': 'm_type'}), '(mid, dtype=m_type)\n', (2030, 2049), True, 'import numpy as np\n'), ((2348, 2365), 'numpy.where', 'np.where', (['(seg > 0)'], {}), '(seg > 0)\n', (2356, 2365), True, 'import numpy as np\n'), ((6322, 6371), 'numpy.zeros', 'np.zeros', (['(1 + um, 7 + do_count)'], {'dtype': 'np.uint32'}), '((1 + um, 7 + do_count), dtype=np.uint32)\n', (6330, 6371), True, 'import numpy as np\n'), ((6381, 6404), 'numpy.arange', 'np.arange', (['out.shape[0]'], {}), '(out.shape[0])\n', (6390, 6404), True, 'import numpy as np\n'), ((410, 444), 'numpy.unique', 'np.unique', (['seg'], {'return_counts': '(True)'}), '(seg, return_counts=True)\n', (419, 444), True, 'import numpy as np\n'), ((712, 725), 'numpy.array', 'np.array', (['[1]'], {}), '([1])\n', (720, 725), True, 'import numpy as np\n'), ((2758, 2776), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2774, 2776), False, 'import sys\n'), ((6101, 6115), 'numpy.vstack', 'np.vstack', (['seg'], {}), '(seg)\n', (6110, 6115), True, 'import numpy as np\n'), ((6248, 6262), 'numpy.unique', 'np.unique', (['seg'], {}), '(seg)\n', (6257, 6262), True, 'import numpy as np\n'), ((6583, 6602), 'numpy.unique', 'np.unique', (['seg[zid]'], {}), '(seg[zid])\n', (6592, 6602), True, 'import numpy as np\n'), ((6661, 6689), 'numpy.minimum', 'np.minimum', (['out[sid, 1]', 'zid'], {}), '(out[sid, 1], zid)\n', (6671, 6689), True, 'import numpy as np\n'), ((6709, 6737), 'numpy.maximum', 'np.maximum', (['out[sid, 2]', 'zid'], {}), '(out[sid, 2], zid)\n', (6719, 6737), True, 'import numpy as np\n'), ((6849, 6871), 'numpy.unique', 'np.unique', (['seg[:, rid]'], {}), '(seg[:, rid])\n', (6858, 6871), True, 'import numpy as np\n'), ((6929, 6957), 'numpy.minimum', 'np.minimum', (['out[sid, 3]', 'rid'], {}), '(out[sid, 3], rid)\n', (6939, 6957), True, 'import numpy as np\n'), ((6977, 7005), 'numpy.maximum', 'np.maximum', (['out[sid, 4]', 'rid'], {}), '(out[sid, 4], rid)\n', (6987, 7005), True, 'import numpy as np\n'), ((7121, 7146), 'numpy.unique', 'np.unique', (['seg[:, :, cid]'], {}), '(seg[:, :, cid])\n', (7130, 7146), True, 'import numpy as np\n'), ((7203, 7231), 'numpy.minimum', 'np.minimum', (['out[sid, 5]', 'cid'], {}), '(out[sid, 5], cid)\n', (7213, 7231), True, 'import numpy as np\n'), ((7251, 7279), 'numpy.maximum', 'np.maximum', (['out[sid, 6]', 'cid'], {}), '(out[sid, 6], cid)\n', (7261, 7279), True, 'import numpy as np\n'), ((7312, 7346), 'numpy.unique', 'np.unique', (['seg'], {'return_counts': '(True)'}), '(seg, return_counts=True)\n', (7321, 7346), True, 'import numpy as np\n'), ((588, 610), 'numpy.argsort', 'np.argsort', (['(-segCounts)'], {}), '(-segCounts)\n', (598, 610), True, 'import numpy as np\n'), ((1174, 1208), 'numpy.unique', 'np.unique', (['seg'], {'return_counts': '(True)'}), '(seg, return_counts=True)\n', (1183, 1208), True, 'import numpy as np\n'), ((1608, 1622), 'numpy.unique', 'np.unique', (['seg'], {}), '(seg)\n', (1617, 1622), True, 'import numpy as np\n'), ((1655, 1668), 'numpy.array', 'np.array', (['uid'], {}), '(uid)\n', (1663, 1668), True, 'import numpy as np\n'), ((3527, 3559), 'numpy.arange', 'np.arange', (['(mid + 1)'], {'dtype': 'm_type'}), '(mid + 1, dtype=m_type)\n', (3536, 3559), True, 'import numpy as np\n'), ((3780, 3796), 'numpy.unique', 'np.unique', (['seg_c'], {}), '(seg_c)\n', (3789, 3796), True, 'import numpy as np\n'), ((3972, 4008), 'numpy.unique', 'np.unique', (['(slice_b * (slices[0] > 0))'], {}), '(slice_b * (slices[0] > 0))\n', (3981, 4008), True, 'import numpy as np\n'), ((5234, 5258), 'numpy.hstack', 'np.hstack', (['[rlA, new_id]'], {}), '([rlA, new_id])\n', (5243, 5258), True, 'import numpy as np\n'), ((5366, 5402), 'numpy.unique', 'np.unique', (['(slice_t * (slices[1] > 0))'], {}), '(slice_t * (slices[1] > 0))\n', (5375, 5402), True, 'import numpy as np\n'), ((754, 775), 'numpy.count_nonzero', 'np.count_nonzero', (['seg'], {}), '(seg)\n', (770, 775), True, 'import numpy as np\n'), ((917, 933), 'numpy.zeros', 'np.zeros', (['(1 + sm)'], {}), '(1 + sm)\n', (925, 933), True, 'import numpy as np\n'), ((995, 1012), 'numpy.arange', 'np.arange', (['(1 + sm)'], {}), '(1 + sm)\n', (1004, 1012), True, 'import numpy as np\n'), ((2924, 2936), 'numpy.zeros', 'np.zeros', (['sz'], {}), '(sz)\n', (2932, 2936), True, 'import numpy as np\n'), ((3070, 3131), 'skimage.measure.label', 'label', (['tmp[bb[0]:bb[1] + 1, bb[2]:bb[3] + 1, bb[4]:bb[5] + 1]'], {}), '(tmp[bb[0]:bb[1] + 1, bb[2]:bb[3] + 1, bb[4]:bb[5] + 1])\n', (3075, 3131), False, 'from skimage.measure import label\n'), ((4241, 4351), 'numpy.unique', 'np.unique', (['(slices[0, bb[0]:bb[1] + 1, bb[2]:bb[3] + 1] * (slice_b[bb[0]:bb[1] + 1, bb\n [2]:bb[3] + 1] == l))'], {}), '(slices[0, bb[0]:bb[1] + 1, bb[2]:bb[3] + 1] * (slice_b[bb[0]:bb[1\n ] + 1, bb[2]:bb[3] + 1] == l))\n', (4250, 4351), True, 'import numpy as np\n'), ((5448, 5485), 'numpy.unique', 'np.unique', (['(slices[1] * (slice_t == l))'], {}), '(slices[1] * (slice_t == l))\n', (5457, 5485), True, 'import numpy as np\n'), ((4624, 4663), 'numpy.unique', 'np.unique', (['(slice_t * (slices[1] == sid))'], {}), '(slice_t * (slices[1] == sid))\n', (4633, 4663), True, 'import numpy as np\n'), ((4872, 4891), 'numpy.where', 'np.where', (['(rl_c == 0)'], {}), '(rl_c == 0)\n', (4880, 4891), True, 'import numpy as np\n'), ((3666, 3710), 'numpy.stack', 'np.stack', (['[slice_b > 0, slice_t > 0]'], {'axis': '(0)'}), '([slice_b > 0, slice_t > 0], axis=0)\n', (3674, 3710), True, 'import numpy as np\n'), ((5723, 5745), 'numpy.in1d', 'np.in1d', (['rlA', 'rlA[pid]'], {}), '(rlA, rlA[pid])\n', (5730, 5745), True, 'import numpy as np\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as F
class AttentionXCosNet(nn.Module):
def __init__(self, conf):
super(AttentionXCosNet, self).__init__()
self.embedding_net = nn.Sequential(
nn.Conv2d(32, 16, 3, padding=1),
nn.BatchNorm2d(16),
nn.PReLU()
)
self.attention = nn.Sequential(
nn.Conv2d(32, 16, 3, padding=1),
nn.BatchNorm2d(16),
nn.PReLU(),
nn.Conv2d(16, 1, 3, padding=1),
nn.BatchNorm2d(1),
nn.PReLU(),
)
self.name = 'AttenCosNet'
self.USE_SOFTMAX = conf.USE_SOFTMAX
self.SOFTMAX_T = conf.SOFTMAX_T
def softmax(self, x, T=1):
x /= T
return F.softmax(x.reshape(x.size(0), x.size(1), -1), 2).view_as(x)
def divByNorm(self, x):
'''
attention_weights.size(): [bs, 1, 7, 6]
'''
x -= x.view(x.size(0), x.size(1), -1).min(dim=2)[0].repeat(1, 1, x.size(2) * x.size(3)).view(x.size(0), x.size(1), x.size(2), x.size(3))
x /= x.view(x.size(0), x.size(1), -1).sum(dim=2).repeat(1, 1, x.size(2) * x.size(3)).view(x.size(0), x.size(1), x.size(2), x.size(3))
return x
def forward(self, feat_grid_1, feat_grid_2):
'''
feat_grid_1.size(): [bs, 32, 7, 7]
attention_weights.size(): [bs, 1, 7, 7]
'''
# XXX Do I need to normalize grid_feat?
conv1 = self.embedding_net(feat_grid_1)
conv2 = self.embedding_net(feat_grid_2)
fused_feat = torch.cat((conv1, conv2), dim=1)
attention_weights = self.attention(fused_feat)
# To Normalize attention
if self.USE_SOFTMAX:
attention_weights = self.softmax(attention_weights, self.SOFTMAX_T)
else:
attention_weights = self.divByNorm(attention_weights)
return attention_weights
class AttentionCosNet(nn.Module):
def __init__(self):
super(AttentionCosNet, self).__init__()
self.embedding_net = nn.Sequential(
nn.Conv2d(512, 256, 3, padding=1),
nn.BatchNorm2d(256),
nn.PReLU()
)
self.attention = nn.Sequential(
nn.Conv2d(512, 256, 3, padding=1),
nn.BatchNorm2d(256),
nn.PReLU(),
nn.Conv2d(256, 1, 3, padding=1),
nn.BatchNorm2d(1),
nn.PReLU(),
)
self.name = 'AttentionCosNet'
def softmax(self, x):
return F.softmax(x.reshape(x.size(0), x.size(1), -1), 2).view_as(x)
def forward(self, x1, x2):
'''
x1.size(): [bs, 512, 7, 6]
attention_weights.size(): [bs, 1, 7, 6]
'''
conv1 = self.embedding_net(x1)
conv2 = self.embedding_net(x2)
fused_feat = torch.cat((conv1, conv2), dim=1)
attention_weights = self.attention(fused_feat)
# XXX: I use softmax instead of normalize
# attention_weights = F.normalize(attention_weights, p=2, dim=1)
attention_weights = self.softmax(attention_weights)
return x1, x2, attention_weights
class EmbeddingNet(nn.Module):
def __init__(self):
super(EmbeddingNet, self).__init__()
self.convnet = nn.Sequential(nn.Conv2d(1, 32, 5), nn.PReLU(),
nn.MaxPool2d(2, stride=2),
nn.Conv2d(32, 64, 5), nn.PReLU(),
nn.MaxPool2d(2, stride=2))
self.fc = nn.Sequential(nn.Linear(64 * 4 * 4, 256),
nn.PReLU(),
nn.Linear(256, 256),
nn.PReLU(),
nn.Linear(256, 2)
)
def forward(self, x):
output = self.convnet(x)
output = output.view(output.size()[0], -1)
output = self.fc(output)
return output
def get_embedding(self, x):
return self.forward(x)
class EmbeddingNetL2(EmbeddingNet):
def __init__(self):
super(EmbeddingNetL2, self).__init__()
def forward(self, x):
output = super(EmbeddingNetL2, self).forward(x)
output /= output.pow(2).sum(1, keepdim=True).sqrt()
return output
def get_embedding(self, x):
return self.forward(x)
class ClassificationNet(nn.Module):
def __init__(self, embedding_net, n_classes):
super(ClassificationNet, self).__init__()
self.embedding_net = embedding_net
self.n_classes = n_classes
self.nonlinear = nn.PReLU()
self.fc1 = nn.Linear(2, n_classes)
def forward(self, x):
output = self.embedding_net(x)
output = self.nonlinear(output)
scores = F.log_softmax(self.fc1(output), dim=-1)
return scores
def get_embedding(self, x):
return self.nonlinear(self.embedding_net(x))
class SiameseNet(nn.Module):
def __init__(self, embedding_net):
super(SiameseNet, self).__init__()
self.embedding_net = embedding_net
def forward(self, x1, x2):
output1 = self.embedding_net(x1)
output2 = self.embedding_net(x2)
return output1, output2
def get_embedding(self, x):
return self.embedding_net(x)
class TripletNet(nn.Module):
def __init__(self, embedding_net):
super(TripletNet, self).__init__()
self.embedding_net = embedding_net
def forward(self, x1, x2, x3):
output1 = self.embedding_net(x1)
output2 = self.embedding_net(x2)
output3 = self.embedding_net(x3)
return output1, output2, output3
def get_embedding(self, x):
return self.embedding_net(x)
class ENMSiameseNet(nn.Module):
def __init__(self, embedding_net):
super(ENMSiameseNet, self).__init__()
self.embedding_net = embedding_net
self.name = 'Siamese'
def forward(self, x1, x2):
output1 = self.embedding_net(x1)
output2 = self.embedding_net(x2)
return output1, output2
def get_embedding(self, x):
return self.embedding_net(x)
class ENMTripletNet(nn.Module):
def __init__(self, embedding_net):
super(ENMTripletNet, self).__init__()
self.embedding_net = embedding_net
self.name = 'Triplet'
def forward(self, x1, x2, x3):
output1 = self.embedding_net(x1)
output2 = self.embedding_net(x2)
output3 = self.embedding_net(x3)
return output1, output2, output3
def get_embedding(self, x):
return self.embedding_net(x)
class ENMEmbeddingNet(nn.Module):
def __init__(self):
super(ENMEmbeddingNet, self).__init__()
self.fc = nn.Sequential(nn.Linear(1024, 1024),
nn.PReLU(),
nn.Dropout(p=0.5),
nn.Linear(1024, 1024),
nn.PReLU(),
nn.Dropout(p=0.5),
nn.Linear(1024, 1024)
)
self.name = 'ENMEmb'
def forward(self, x):
output = self.fc(x)
return output
def get_embedding(self, x):
return self.forward(x)
| [
"torch.nn.BatchNorm2d",
"torch.nn.Dropout",
"torch.nn.Conv2d",
"torch.nn.PReLU",
"torch.nn.MaxPool2d",
"torch.nn.Linear",
"torch.cat"
] | [((1664, 1696), 'torch.cat', 'torch.cat', (['(conv1, conv2)'], {'dim': '(1)'}), '((conv1, conv2), dim=1)\n', (1673, 1696), False, 'import torch\n'), ((3001, 3033), 'torch.cat', 'torch.cat', (['(conv1, conv2)'], {'dim': '(1)'}), '((conv1, conv2), dim=1)\n', (3010, 3033), False, 'import torch\n'), ((4779, 4789), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (4787, 4789), True, 'import torch.nn as nn\n'), ((4809, 4832), 'torch.nn.Linear', 'nn.Linear', (['(2)', 'n_classes'], {}), '(2, n_classes)\n', (4818, 4832), True, 'import torch.nn as nn\n'), ((247, 278), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(16)', '(3)'], {'padding': '(1)'}), '(32, 16, 3, padding=1)\n', (256, 278), True, 'import torch.nn as nn\n'), ((300, 318), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(16)'], {}), '(16)\n', (314, 318), True, 'import torch.nn as nn\n'), ((340, 350), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (348, 350), True, 'import torch.nn as nn\n'), ((429, 460), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(16)', '(3)'], {'padding': '(1)'}), '(32, 16, 3, padding=1)\n', (438, 460), True, 'import torch.nn as nn\n'), ((482, 500), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(16)'], {}), '(16)\n', (496, 500), True, 'import torch.nn as nn\n'), ((522, 532), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (530, 532), True, 'import torch.nn as nn\n'), ((554, 584), 'torch.nn.Conv2d', 'nn.Conv2d', (['(16)', '(1)', '(3)'], {'padding': '(1)'}), '(16, 1, 3, padding=1)\n', (563, 584), True, 'import torch.nn as nn\n'), ((606, 623), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(1)'], {}), '(1)\n', (620, 623), True, 'import torch.nn as nn\n'), ((645, 655), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (653, 655), True, 'import torch.nn as nn\n'), ((2179, 2212), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(256)', '(3)'], {'padding': '(1)'}), '(512, 256, 3, padding=1)\n', (2188, 2212), True, 'import torch.nn as nn\n'), ((2234, 2253), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(256)'], {}), '(256)\n', (2248, 2253), True, 'import torch.nn as nn\n'), ((2275, 2285), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (2283, 2285), True, 'import torch.nn as nn\n'), ((2364, 2397), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(256)', '(3)'], {'padding': '(1)'}), '(512, 256, 3, padding=1)\n', (2373, 2397), True, 'import torch.nn as nn\n'), ((2419, 2438), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(256)'], {}), '(256)\n', (2433, 2438), True, 'import torch.nn as nn\n'), ((2460, 2470), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (2468, 2470), True, 'import torch.nn as nn\n'), ((2492, 2523), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(1)', '(3)'], {'padding': '(1)'}), '(256, 1, 3, padding=1)\n', (2501, 2523), True, 'import torch.nn as nn\n'), ((2545, 2562), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(1)'], {}), '(1)\n', (2559, 2562), True, 'import torch.nn as nn\n'), ((2584, 2594), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (2592, 2594), True, 'import torch.nn as nn\n'), ((3452, 3471), 'torch.nn.Conv2d', 'nn.Conv2d', (['(1)', '(32)', '(5)'], {}), '(1, 32, 5)\n', (3461, 3471), True, 'import torch.nn as nn\n'), ((3473, 3483), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (3481, 3483), True, 'import torch.nn as nn\n'), ((3522, 3547), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)'}), '(2, stride=2)\n', (3534, 3547), True, 'import torch.nn as nn\n'), ((3586, 3606), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(64)', '(5)'], {}), '(32, 64, 5)\n', (3595, 3606), True, 'import torch.nn as nn\n'), ((3608, 3618), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (3616, 3618), True, 'import torch.nn as nn\n'), ((3657, 3682), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)'}), '(2, stride=2)\n', (3669, 3682), True, 'import torch.nn as nn\n'), ((3717, 3743), 'torch.nn.Linear', 'nn.Linear', (['(64 * 4 * 4)', '(256)'], {}), '(64 * 4 * 4, 256)\n', (3726, 3743), True, 'import torch.nn as nn\n'), ((3777, 3787), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (3785, 3787), True, 'import torch.nn as nn\n'), ((3821, 3840), 'torch.nn.Linear', 'nn.Linear', (['(256)', '(256)'], {}), '(256, 256)\n', (3830, 3840), True, 'import torch.nn as nn\n'), ((3874, 3884), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (3882, 3884), True, 'import torch.nn as nn\n'), ((3918, 3935), 'torch.nn.Linear', 'nn.Linear', (['(256)', '(2)'], {}), '(256, 2)\n', (3927, 3935), True, 'import torch.nn as nn\n'), ((6911, 6932), 'torch.nn.Linear', 'nn.Linear', (['(1024)', '(1024)'], {}), '(1024, 1024)\n', (6920, 6932), True, 'import torch.nn as nn\n'), ((6966, 6976), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (6974, 6976), True, 'import torch.nn as nn\n'), ((7010, 7027), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)'}), '(p=0.5)\n', (7020, 7027), True, 'import torch.nn as nn\n'), ((7061, 7082), 'torch.nn.Linear', 'nn.Linear', (['(1024)', '(1024)'], {}), '(1024, 1024)\n', (7070, 7082), True, 'import torch.nn as nn\n'), ((7116, 7126), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (7124, 7126), True, 'import torch.nn as nn\n'), ((7160, 7177), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)'}), '(p=0.5)\n', (7170, 7177), True, 'import torch.nn as nn\n'), ((7211, 7232), 'torch.nn.Linear', 'nn.Linear', (['(1024)', '(1024)'], {}), '(1024, 1024)\n', (7220, 7232), True, 'import torch.nn as nn\n')] |
# Copyright 2018 HTCondor Team, Computer Sciences Department,
# University of Wisconsin-Madison, WI.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from htmap import mapping
def test_exception_inside_submit_removes_map_dir(mocker, doubler):
class Marker(Exception):
pass
def bad_execute_submit(*args, **kwargs):
raise Marker()
mocker.patch("htmap.mapping.execute_submit", bad_execute_submit)
with pytest.raises(Marker):
mapping.map(doubler, range(10))
assert len(list(mapping.maps_dir_path().iterdir())) == 0
| [
"pytest.raises",
"htmap.mapping.maps_dir_path"
] | [((948, 969), 'pytest.raises', 'pytest.raises', (['Marker'], {}), '(Marker)\n', (961, 969), False, 'import pytest\n'), ((1032, 1055), 'htmap.mapping.maps_dir_path', 'mapping.maps_dir_path', ([], {}), '()\n', (1053, 1055), False, 'from htmap import mapping\n')] |
"""
LFW dataloading
"""
import argparse
import time
import numpy as np
import torch
from PIL import Image
from torch.utils.data import DataLoader, Dataset
from torchvision import transforms
import os
import glob
import matplotlib.pyplot as plt
class LFWDataset(Dataset):
def __init__(self, path_to_folder: str, transform) -> None:
self.imgs_path = path_to_folder
file_list = glob.glob(self.imgs_path + "*")
# print(file_list)
self.data = []
for class_path in file_list:
class_name = class_path.split("\\")[-1]
for img_path in glob.glob(class_path + "\\*.jpg"):
self.data.append([img_path, class_name])
self.transform = transform
def __len__(self):
return len(self.data)
def __getitem__(self, index: int) -> torch.Tensor:
entry = self.data[index]
image = Image.open(entry[0])
label = entry[1]
return self.transform(image), label
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-path_to_folder', default='lfw/', type=str)
parser.add_argument('-batch_size', default=1028, type=int)
parser.add_argument('-num_workers', default=0, type=int)
parser.add_argument('-visualize_batch', action='store_true')
parser.add_argument('-get_timing', action='store_true')
parser.add_argument('-batches_to_check', default=5, type=int)
args = parser.parse_args()
lfw_trans = transforms.Compose([
transforms.RandomAffine(5, (0.1, 0.1), (0.5, 2.0)),
transforms.ToTensor()
])
# Define dataset
dataset = LFWDataset(args.path_to_folder, lfw_trans)
# Define dataloader
dataloader = DataLoader(
dataset,
batch_size=args.batch_size,
shuffle=False,
num_workers=args.num_workers
)
if args.visualize_batch:
# TODO: visualize a batch of images
figure = plt.figure(figsize=(14, 8))
cols, rows = int(len(dataloader)/2), 2
batch = next(iter(dataloader))
images = batch[0]
labels = batch[1]
for i in range(1, cols * rows + 1):
img, label = images[i - 1], labels[i - 1]
figure.add_subplot(rows, cols, i)
plt.title(label)
plt.axis("off")
plt.imshow(img.permute(1,2,0), cmap="gray")
plt.savefig("visualization.jpg")
if args.get_timing:
# lets do some repetitions
res = [ ]
for _ in range(5):
start = time.time()
for batch_idx, batch in enumerate(dataloader):
if batch_idx > args.batches_to_check:
break
end = time.time()
res.append(end - start)
res = np.array(res)
print(f'Timing: {np.mean(res)}+-{np.std(res)}')
| [
"numpy.mean",
"PIL.Image.open",
"matplotlib.pyplot.savefig",
"torchvision.transforms.RandomAffine",
"argparse.ArgumentParser",
"numpy.std",
"matplotlib.pyplot.axis",
"numpy.array",
"matplotlib.pyplot.figure",
"torch.utils.data.DataLoader",
"matplotlib.pyplot.title",
"torchvision.transforms.ToT... | [((1044, 1069), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1067, 1069), False, 'import argparse\n'), ((1762, 1859), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'args.batch_size', 'shuffle': '(False)', 'num_workers': 'args.num_workers'}), '(dataset, batch_size=args.batch_size, shuffle=False, num_workers=\n args.num_workers)\n', (1772, 1859), False, 'from torch.utils.data import DataLoader, Dataset\n'), ((398, 429), 'glob.glob', 'glob.glob', (["(self.imgs_path + '*')"], {}), "(self.imgs_path + '*')\n", (407, 429), False, 'import glob\n'), ((895, 915), 'PIL.Image.open', 'Image.open', (['entry[0]'], {}), '(entry[0])\n', (905, 915), False, 'from PIL import Image\n'), ((1990, 2017), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(14, 8)'}), '(figsize=(14, 8))\n', (2000, 2017), True, 'import matplotlib.pyplot as plt\n'), ((2421, 2453), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""visualization.jpg"""'], {}), "('visualization.jpg')\n", (2432, 2453), True, 'import matplotlib.pyplot as plt\n'), ((2840, 2853), 'numpy.array', 'np.array', (['res'], {}), '(res)\n', (2848, 2853), True, 'import numpy as np\n'), ((597, 630), 'glob.glob', 'glob.glob', (["(class_path + '\\\\*.jpg')"], {}), "(class_path + '\\\\*.jpg')\n", (606, 630), False, 'import glob\n'), ((1540, 1590), 'torchvision.transforms.RandomAffine', 'transforms.RandomAffine', (['(5)', '(0.1, 0.1)', '(0.5, 2.0)'], {}), '(5, (0.1, 0.1), (0.5, 2.0))\n', (1563, 1590), False, 'from torchvision import transforms\n'), ((1600, 1621), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1619, 1621), False, 'from torchvision import transforms\n'), ((2312, 2328), 'matplotlib.pyplot.title', 'plt.title', (['label'], {}), '(label)\n', (2321, 2328), True, 'import matplotlib.pyplot as plt\n'), ((2341, 2356), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (2349, 2356), True, 'import matplotlib.pyplot as plt\n'), ((2595, 2606), 'time.time', 'time.time', ([], {}), '()\n', (2604, 2606), False, 'import time\n'), ((2764, 2775), 'time.time', 'time.time', ([], {}), '()\n', (2773, 2775), False, 'import time\n'), ((2879, 2891), 'numpy.mean', 'np.mean', (['res'], {}), '(res)\n', (2886, 2891), True, 'import numpy as np\n'), ((2895, 2906), 'numpy.std', 'np.std', (['res'], {}), '(res)\n', (2901, 2906), True, 'import numpy as np\n')] |
import requests
lis = [
{'http': '172.16.17.32:8888'},
{'http': '192.168.3.11:3129'},
{'http': '172.16.58.3:8181'},
{'http': '172.16.31.10:8010'},
{'http': '172.16.31.10:80'},
{'http': '192.168.3.11:31773'},
]
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
}
url = 'http://www.baidu.com/s?ie=UTF-8&wd=ip'
while len(lis):
try:
proxy = lis.pop()
r = requests.get(url=url, headers=headers, proxies=proxy)
break
except Exception as e:
print(e)
print(r.content)
with open('pic/daili1.html', 'wb') as fp:
fp.write(r.content)
# 打开daili.html
# 页面显示 本机IP: 172.16.31.10陕西省西安市 联通
# 证明使用了代理,访问;因为本地ip是北京是联通
| [
"requests.get"
] | [((514, 567), 'requests.get', 'requests.get', ([], {'url': 'url', 'headers': 'headers', 'proxies': 'proxy'}), '(url=url, headers=headers, proxies=proxy)\n', (526, 567), False, 'import requests\n')] |
import os
from pandas import Timedelta
import requests
import time
from . import get_airport_code_source
from ..caada_errors import HTMLRequestError
from ..caada_typing import pathlike
from ..caada_logging import logger
def _download_airport_codes(source='openflights', update='never'):
"""Download geographic data linked to airport codes.
Parameters
----------
source
Which web source to pull data from. Currently the only allowed option is `"openflights"`.
update
Controls whether CAADA redownloads the needed data or not. Possible values are:
* `"never"` - only download if no local copy is available.
* `"periodically"` - only download if the local copy is more than a week old.
* `"always"` - always redownloads
Returns
-------
None
"""
entry = get_airport_code_source(source)
local_file = entry['local']
remote_url = entry['remote']
_download_airport_code_data(source, local_file, remote_url, update)
def _download_airport_code_data(source_name: str, local_file: pathlike, remote_url: str, update: str = 'never'):
"""General driver for geographic airport data in .csv format.
Parameters
----------
source_name
Name the user passes to identify this source.
local_file
Path to where the local file is or should be
remote_url
URL to where the data is on the web
update
Controls whether CAADA redownloads the needed data or not. Possible values are:
* `"never"` - only download if no local copy is available.
* `"periodically"` - only download if the local copy is more than a week old.
* `"always"` - always redownloads
Returns
-------
None
Returns nothing, downloads the file to `local_file`.
"""
if update == 'never':
if local_file.exists():
logger.debug('%s already exists', local_file)
return
else:
logger.info('%s does not exist, must download', local_file)
elif update == 'periodically':
if local_file.exists():
mtime = os.path.getmtime(local_file)
age = time.time() - mtime
td = str(Timedelta(seconds=age))
if age < 7*24*3600:
# don't update if it's been modified within the last week
logger.debug('%s recently updated (%s old), not updating', local_file, td)
return
else:
logger.debug('%s more than 7 days old (%s old), will update', local_file, td)
elif update != 'always':
raise ValueError('Bad value for update: "{}". Options are "never", "periodically", and "always".'.format(update))
logger.info('Downloading %s to %s', remote_url, local_file)
r = requests.get(remote_url)
if r.status_code != 200:
raise HTMLRequestError('Error retrieving {} airport codes. HTTP status code was {}'.format(source_name, r.status_code))
with open(local_file, 'wb') as wobj:
wobj.write(r.content)
logger.info('Download successful.')
| [
"os.path.getmtime",
"pandas.Timedelta",
"time.time",
"requests.get"
] | [((2816, 2840), 'requests.get', 'requests.get', (['remote_url'], {}), '(remote_url)\n', (2828, 2840), False, 'import requests\n'), ((2148, 2176), 'os.path.getmtime', 'os.path.getmtime', (['local_file'], {}), '(local_file)\n', (2164, 2176), False, 'import os\n'), ((2195, 2206), 'time.time', 'time.time', ([], {}), '()\n', (2204, 2206), False, 'import time\n'), ((2236, 2258), 'pandas.Timedelta', 'Timedelta', ([], {'seconds': 'age'}), '(seconds=age)\n', (2245, 2258), False, 'from pandas import Timedelta\n')] |
from django.contrib import admin
from .models import Group, Event, Market, Order, MarketPosition, Trade, Account
admin.site.register(Group)
admin.site.register(Event)
admin.site.register(Market)
admin.site.register(Order)
admin.site.register(MarketPosition)
admin.site.register(Trade)
admin.site.register(Account) | [
"django.contrib.admin.site.register"
] | [((114, 140), 'django.contrib.admin.site.register', 'admin.site.register', (['Group'], {}), '(Group)\n', (133, 140), False, 'from django.contrib import admin\n'), ((141, 167), 'django.contrib.admin.site.register', 'admin.site.register', (['Event'], {}), '(Event)\n', (160, 167), False, 'from django.contrib import admin\n'), ((168, 195), 'django.contrib.admin.site.register', 'admin.site.register', (['Market'], {}), '(Market)\n', (187, 195), False, 'from django.contrib import admin\n'), ((196, 222), 'django.contrib.admin.site.register', 'admin.site.register', (['Order'], {}), '(Order)\n', (215, 222), False, 'from django.contrib import admin\n'), ((223, 258), 'django.contrib.admin.site.register', 'admin.site.register', (['MarketPosition'], {}), '(MarketPosition)\n', (242, 258), False, 'from django.contrib import admin\n'), ((259, 285), 'django.contrib.admin.site.register', 'admin.site.register', (['Trade'], {}), '(Trade)\n', (278, 285), False, 'from django.contrib import admin\n'), ((286, 314), 'django.contrib.admin.site.register', 'admin.site.register', (['Account'], {}), '(Account)\n', (305, 314), False, 'from django.contrib import admin\n')] |
# This file is part of the QuTIpy package.
# https://github.com/sumeetkhatri/QuTIpy
#
# Copyright (c) 2022 <NAME>.
# --.- ..- - .. .--. -.--
#
#
# SPDX-License-Identifier: AGPL-3.0
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import nox
PYTHON_ENV = python = ["3.6", "3.7", "3.8", "3.9"]
SOURCE_FILES = (
"setup.py",
"noxfile.py",
"qutipy/",
"test/",
)
@nox.session(python=PYTHON_ENV)
def tests(session):
"""Run the test suite."""
session.install("black")
session.install("pytest")
session.install(".")
session.run("pytest")
@nox.session(python=PYTHON_ENV)
def lint(session):
"""Run the lint suite."""
session.install("flake8", "black", "mypy", "isort", "types-requests")
session.run("isort", "--check", "--profile=black", *SOURCE_FILES)
session.run("black", "--target-version=py39", "--check", *SOURCE_FILES)
session.run("python", "utils/license-headers.py", "check", *SOURCE_FILES)
@nox.session(python=PYTHON_ENV)
def formatting(session):
"""Run the formatter suite."""
session.install(
"black", "isort", "autopep8", "flake8-black", "flake8-bugbear", "flake8-bandit"
)
session.run("isort", "--profile=black", *SOURCE_FILES)
session.run("black", "--target-version=py39", *SOURCE_FILES)
session.run("stubgen", "-p", "qutipy")
session.run("python", "utils/license-headers.py", "fix", *SOURCE_FILES)
| [
"nox.session"
] | [((973, 1003), 'nox.session', 'nox.session', ([], {'python': 'PYTHON_ENV'}), '(python=PYTHON_ENV)\n', (984, 1003), False, 'import nox\n'), ((1167, 1197), 'nox.session', 'nox.session', ([], {'python': 'PYTHON_ENV'}), '(python=PYTHON_ENV)\n', (1178, 1197), False, 'import nox\n'), ((1550, 1580), 'nox.session', 'nox.session', ([], {'python': 'PYTHON_ENV'}), '(python=PYTHON_ENV)\n', (1561, 1580), False, 'import nox\n')] |
import logging
import sys
import unittest
sys.path.insert(0, '..')
import expect
import time
# python 2 & 3 compatibility
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
class StringIo(object):
INDATA = None
def __init__(self):
self.in_stream = StringIO(self.INDATA)
self.out_stream = StringIO()
def read(self, _):
return self.in_stream.read(1)
def write(self, string):
self.out_stream.write(string)
class ExpectTest(unittest.TestCase):
def test_uboot(self):
"""U-boot communication example.
"""
class UBoot(StringIo):
INDATA = """
Booting in 3 seconds...
Booting in 2 seconds...
u-boot> fatload mmc 0 0x3000000 uImage
u-boot> fatload mmc 0 0x2A00000 devicetree.dtb
u-boot> fatload mmc 0 0x2000000 uramdisk.image.gz
u-boot> bootm 0x3000000 0x2000000 0x2A00000
...
~ $
"""
OUTDATA = """
fatload mmc 0 0x3000000 uImage
fatload mmc 0 0x2A00000 devicetree.dtb
fatload mmc 0 0x2000000 uramdisk.image.gz
bootm 0x3000000 0x2000000 0x2A00000
"""
prompt = r'u-boot> '
# create the handler object and start to communicate with u-boot
uboot = expect.Handler(UBoot())
uboot.expect(r'Booting in \d+ seconds...')
uboot.send('')
uboot.expect(prompt)
uboot.send('fatload mmc 0 0x3000000 uImage')
uboot.expect(prompt)
uboot.send('fatload mmc 0 0x2A00000 devicetree.dtb')
uboot.expect(prompt)
uboot.send('fatload mmc 0 0x2000000 uramdisk.image.gz')
uboot.expect(prompt)
uboot.send('bootm 0x3000000 0x2000000 0x2A00000')
uboot.expect(r'~ \$')
self.assertEqual(uboot.iostream.out_stream.getvalue(), UBoot.OUTDATA)
sys.stdout.flush()
def test_expect_return_value(self):
"""Verify the return value from the expect function.
"""
foobar = expect.Handler(StringIO('barfoo'))
match = foobar.expect(r'foo|bar')
self.assertEqual(match, 'bar')
match = foobar.expect(r'foo|bar')
self.assertEqual(match, 'foo')
def test_eol(self):
"""End of line testing.
"""
iostream = StringIO()
handler = expect.Handler(iostream, eol='\r\n')
handler.send('')
self.assertEqual(iostream.getvalue(), '\r\n')
handler.send('', send_eol=False)
self.assertEqual(iostream.getvalue(), '\r\n')
def test_break_condition(self):
"""Verify that the expect function throws an exception
when the break condition is met.
"""
# The StringIO object returns '' when EOF is encountered.
iostream = StringIO()
handler = expect.Handler(iostream)
with self.assertRaises(expect.BreakConditionError) as e:
handler.expect(r'foo')
print(e)
def test_no_split(self):
"""Verify that the expect function can match over multiple lines.
"""
class Handler(StringIo):
INDATA = """
foo
bar
"""
handler = expect.Handler(Handler(), receive_buffer_max=8)
handler.expect(r'foo\nbar')
def test_expect_timeout(self):
"""Test expect timeout functionality.
"""
class Handler(object):
def read(self, _):
time.sleep(0.2)
return "1"
handler = expect.Handler(Handler())
with self.assertRaises(expect.TimeoutError) as e:
handler.expect(r"no match", timeout=0.1)
print(e)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main()
| [
"logging.basicConfig",
"sys.stdout.flush",
"sys.path.insert",
"time.sleep",
"unittest.main",
"io.StringIO",
"expect.Handler"
] | [((42, 66), 'sys.path.insert', 'sys.path.insert', (['(0)', '""".."""'], {}), "(0, '..')\n", (57, 66), False, 'import sys\n'), ((3575, 3615), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (3594, 3615), False, 'import logging\n'), ((3620, 3635), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3633, 3635), False, 'import unittest\n'), ((305, 326), 'io.StringIO', 'StringIO', (['self.INDATA'], {}), '(self.INDATA)\n', (313, 326), False, 'from io import StringIO\n'), ((353, 363), 'io.StringIO', 'StringIO', ([], {}), '()\n', (361, 363), False, 'from io import StringIO\n'), ((1776, 1794), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1792, 1794), False, 'import sys\n'), ((2211, 2221), 'io.StringIO', 'StringIO', ([], {}), '()\n', (2219, 2221), False, 'from io import StringIO\n'), ((2240, 2276), 'expect.Handler', 'expect.Handler', (['iostream'], {'eol': "'\\r\\n'"}), "(iostream, eol='\\r\\n')\n", (2254, 2276), False, 'import expect\n'), ((2689, 2699), 'io.StringIO', 'StringIO', ([], {}), '()\n', (2697, 2699), False, 'from io import StringIO\n'), ((2718, 2742), 'expect.Handler', 'expect.Handler', (['iostream'], {}), '(iostream)\n', (2732, 2742), False, 'import expect\n'), ((1941, 1959), 'io.StringIO', 'StringIO', (['"""barfoo"""'], {}), "('barfoo')\n", (1949, 1959), False, 'from io import StringIO\n'), ((3325, 3340), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (3335, 3340), False, 'import time\n')] |
from rest_framework.serializers import HyperlinkedModelSerializer, ModelSerializer
from rest_framework import serializers
from .models import Blog, BlogCategory, Comments
class CategorySerializer(ModelSerializer):
class Meta:
model = BlogCategory
exclude = ['id']
class BlogListSerializer(HyperlinkedModelSerializer):
category = CategorySerializer()
class Meta:
model = Blog
exclude = ['active', 'text']
class CommentsSerializer(HyperlinkedModelSerializer):
user = serializers.StringRelatedField()
# blog = serializers.StringRelatedField()
class Meta:
model = Comments
fields = "__all__"
class BlogDetailSerializer(ModelSerializer):
comment = CommentsSerializer(many=True)
class Meta:
model = Blog
exclude = ['active', 'category']
| [
"rest_framework.serializers.StringRelatedField"
] | [((521, 553), 'rest_framework.serializers.StringRelatedField', 'serializers.StringRelatedField', ([], {}), '()\n', (551, 553), False, 'from rest_framework import serializers\n')] |
import hammer as h
signals = h.choice(
h.token("hmi.signal1"),
h.token("hmi.signal2"),
h.token("hmi.signal3"))
| [
"hammer.token"
] | [((39, 61), 'hammer.token', 'h.token', (['"""hmi.signal1"""'], {}), "('hmi.signal1')\n", (46, 61), True, 'import hammer as h\n'), ((63, 85), 'hammer.token', 'h.token', (['"""hmi.signal2"""'], {}), "('hmi.signal2')\n", (70, 85), True, 'import hammer as h\n'), ((87, 109), 'hammer.token', 'h.token', (['"""hmi.signal3"""'], {}), "('hmi.signal3')\n", (94, 109), True, 'import hammer as h\n')] |
"""Scrape the first ten pages of stackoverflow jobs for python jobs.
- The job title
- The company name
- The location
- The date posted (in whatever date format makes the most sense to you)
- The link to the actual job posting
"""
from bs4 import BeautifulSoup as bs
from datetime import datetime
import os
import requests
DOMAIN = 'https://stackoverflow.com'
def scrape_for_jobs(response):
"""Scrape a page for Python jobs.
Returns the url for the next page of jobs.
"""
content = bs(response.content, 'html.parser')
jobs = content.find_all('div', class_='-job-summary ')
all_job_data = []
for job in jobs:
languages = job.find('div', class_='-tags')
if not languages:
continue
if 'python' not in languages.get_text():
continue
job_data = []
title = job.find('a', class_='job-link').text
job_data.append(title if title else '')
company = job.find('div', class_='-company')
company_name = company.find('div', class_='-name').text.strip()
job_data.append(company_name if company_name else '')
company_location = company.find('div', class_='-location').text.strip('\r\n -')
job_data.append('"{}"'.format(company_location) if company_location else '')
date_posted = job.find('p', class_='-posted-date').text.strip()
job_data.append(date_posted if date_posted else '')
link = job.find('a', class_='job-link').get('href')
full_link = DOMAIN + link
job_data.append(full_link)
all_job_data.append(job_data)
return all_job_data
def save_results(results, output):
"""Save the scraping results to a file."""
data = [','.join(job_data) for job_data in results]
output.write('\n' + '\n'.join(data))
def get_job_page(page_num):
"""Scrape num page of the job postings."""
response = requests.get(DOMAIN + '/jobs?pg={}'.format(page_num))
return scrape_for_jobs(response)
if __name__ == '__main__':
dir_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'results')
output_file = 'Python jobs - {}.csv'.format(datetime.now().strftime('%m-%d-%y'))
output_path = os.path.join(dir_path, output_file)
with open(output_path, 'w') as output:
output.write('Job Title,Company,Location,Date Posted,Link')
output = open(output_path, 'a')
print('Scraping the StackOverflow Job site for Python jobs!')
for n in range(1, 11):
print('Scraping page {}...'.format(n))
data = get_job_page(n)
save_results(data, output)
output.close()
print('Done! Results saved in results/{}'.format(output_file))
| [
"bs4.BeautifulSoup",
"datetime.datetime.now",
"os.path.realpath",
"os.path.join"
] | [((504, 539), 'bs4.BeautifulSoup', 'bs', (['response.content', '"""html.parser"""'], {}), "(response.content, 'html.parser')\n", (506, 539), True, 'from bs4 import BeautifulSoup as bs\n'), ((2209, 2244), 'os.path.join', 'os.path.join', (['dir_path', 'output_file'], {}), '(dir_path, output_file)\n', (2221, 2244), False, 'import os\n'), ((2066, 2092), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (2082, 2092), False, 'import os\n'), ((2154, 2168), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2166, 2168), False, 'from datetime import datetime\n')] |
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import check_password
from django.shortcuts import get_object_or_404
from rest_framework import viewsets, mixins, status
from rest_framework.decorators import action
from rest_framework.permissions import IsAdminUser, AllowAny
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from .utils import reorder_queue
from .serializers import (
TaskSerializer, OrderSerializer, StatusSerializer, SystemSerializer,
UserSerializer
)
from tasks.models import Task, Order, Status, System
User = get_user_model()
class TaskViewSet(viewsets.ModelViewSet):
serializer_class = TaskSerializer
def get_queryset(self):
if self.kwargs.get('pk'):
return Task.objects.filter(
id=self.kwargs.get('pk'), user=self.request.user
)
return Task.objects.filter(
user=self.request.user
).order_by('-priority')
def perform_create(self, serializer):
serializer.save(user=self.request.user)
Order.objects.create(task_id=serializer.data['id'], order_number=1)
Status.objects.create(task_id=serializer.data['id'])
reorder_queue()
def perform_update(self, serializer):
serializer.save()
if serializer.data.get('priority'):
reorder_queue()
def destroy(self, request, *args, **kwargs):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
@action(methods=["post"], detail=True, url_path="execute",
url_name="execute_task")
def execute_task(self, request, pk=None):
current_status = Status.objects.get(task_id=pk)
if current_status.status == 'running':
return Response(data={'error': 'Эта задача уже запущена'},
status=status.HTTP_400_BAD_REQUEST)
if current_status.status == 'deleted':
return Response(data={
'error': 'Невозможно изменить удалённую задачу'
}, status=status.HTTP_400_BAD_REQUEST)
task = Task.objects.get(id=pk)
available_system = System.objects.filter(
active=True, available_threads__gte=task.num_threads
).first()
if available_system:
available_system.available_threads -= task.num_threads
task.running_on = available_system
task.save()
current_status.status = 'running'
current_status.save()
return Response(data={
'info': 'Ваша задача запущена на выполнение'
})
return Response(data={
'info': 'На данный момент нет свободных машин, попробуйте позже'
})
@action(methods=["post"], detail=True, url_path="pause",
url_name="pause_task")
def pause_task(self, request, pk=None):
current_status = Status.objects.get(task_id=pk)
if current_status.status == 'stopped':
return Response(data={'error': 'Эта задача уже приостановлена'},
status=status.HTTP_400_BAD_REQUEST)
if current_status.status == 'deleted':
return Response(data={
'error': 'Невозможно изменить удалённую задачу'
}, status=status.HTTP_400_BAD_REQUEST)
if current_status.status == 'in_queue':
return Response(data={
'error': 'Невозможно приостановить не запущенную задачу'
}, status=status.HTTP_400_BAD_REQUEST)
task = Task.objects.get(id=pk)
current_system = System.objects.get(id=task.running_on.id)
current_system.available_threads += task.num_threads
task.running_on = None
current_status.status = 'stopped'
current_status.save()
return Response(data={'info': 'Ваша задача приостановлена'})
@action(methods=["post"], detail=True, url_path="resume",
url_name="resume_task")
def resume_task(self, request, pk=None):
current_status = Status.objects.get(task_id=pk)
if current_status.status == 'running':
return Response(data={'error': 'Эта задача уже выполняется'},
status=status.HTTP_400_BAD_REQUEST)
if current_status.status == 'deleted':
return Response(data={
'error': 'Невозможно изменить удалённую задачу'
}, status=status.HTTP_400_BAD_REQUEST)
task = Task.objects.get(id=pk)
available_system = System.objects.filter(
active=True, available_threads__gte=task.num_threads
).first()
if available_system:
available_system.available_threads -= task.num_threads
task.running_on = available_system
task.save()
current_status.status = 'running'
current_status.save()
return Response(data={'info': 'Ваша задача возобновлена'})
return Response(data={
'info': 'На данный момент нет свободных машин, попробуйте позже'
})
@action(methods=["post"], detail=True, url_path="delete",
url_name="delete_task")
def delete_task(self, request, pk=None):
order = Order.objects.order_by('-order_number').all()
current_task_order = Order.objects.get(task_id=pk).order_number
Task.objects.get(id=pk).delete()
for task in order:
if task.order_number > current_task_order:
task.order_number -= 1
return Response(data={'info': 'Ваша задача удалена'})
class OrderViewSet(mixins.ListModelMixin, GenericViewSet):
serializer_class = OrderSerializer
def get_queryset(self):
return Order.objects.filter(
task__user=self.request.user
).order_by('order_number')
class StatusViewSet(mixins.ListModelMixin, GenericViewSet):
serializer_class = StatusSerializer
def get_queryset(self):
return Status.objects.filter(
task__user=self.request.user
).order_by('task')
class SystemViewSet(viewsets.ModelViewSet):
queryset = System.objects.all()
serializer_class = SystemSerializer
permission_classes = IsAdminUser
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = AllowAny
@action(methods=["post"], detail=False, url_path="login",
url_name="login")
def login(self, request):
if not (request.data.get('password') and request.data.get('username')):
return Response(status=status.HTTP_400_BAD_REQUEST)
user = get_object_or_404(
User,
username=request.data.get('username')
)
if check_password(request.data.get('password'), user.password):
return Response(status=status.HTTP_200_OK)
return Response(status=status.HTTP_400_BAD_REQUEST)
| [
"django.contrib.auth.get_user_model",
"tasks.models.System.objects.get",
"tasks.models.Order.objects.get",
"tasks.models.Task.objects.filter",
"tasks.models.Status.objects.get",
"tasks.models.Status.objects.filter",
"tasks.models.Task.objects.get",
"tasks.models.System.objects.all",
"tasks.models.Or... | [((619, 635), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (633, 635), False, 'from django.contrib.auth import get_user_model\n'), ((1519, 1606), 'rest_framework.decorators.action', 'action', ([], {'methods': "['post']", 'detail': '(True)', 'url_path': '"""execute"""', 'url_name': '"""execute_task"""'}), "(methods=['post'], detail=True, url_path='execute', url_name=\n 'execute_task')\n", (1525, 1606), False, 'from rest_framework.decorators import action\n'), ((2750, 2828), 'rest_framework.decorators.action', 'action', ([], {'methods': "['post']", 'detail': '(True)', 'url_path': '"""pause"""', 'url_name': '"""pause_task"""'}), "(methods=['post'], detail=True, url_path='pause', url_name='pause_task')\n", (2756, 2828), False, 'from rest_framework.decorators import action\n'), ((3878, 3963), 'rest_framework.decorators.action', 'action', ([], {'methods': "['post']", 'detail': '(True)', 'url_path': '"""resume"""', 'url_name': '"""resume_task"""'}), "(methods=['post'], detail=True, url_path='resume', url_name='resume_task'\n )\n", (3884, 3963), False, 'from rest_framework.decorators import action\n'), ((5069, 5154), 'rest_framework.decorators.action', 'action', ([], {'methods': "['post']", 'detail': '(True)', 'url_path': '"""delete"""', 'url_name': '"""delete_task"""'}), "(methods=['post'], detail=True, url_path='delete', url_name='delete_task'\n )\n", (5075, 5154), False, 'from rest_framework.decorators import action\n'), ((6105, 6125), 'tasks.models.System.objects.all', 'System.objects.all', ([], {}), '()\n', (6123, 6125), False, 'from tasks.models import Task, Order, Status, System\n'), ((6359, 6433), 'rest_framework.decorators.action', 'action', ([], {'methods': "['post']", 'detail': '(False)', 'url_path': '"""login"""', 'url_name': '"""login"""'}), "(methods=['post'], detail=False, url_path='login', url_name='login')\n", (6365, 6433), False, 'from rest_framework.decorators import action\n'), ((1102, 1169), 'tasks.models.Order.objects.create', 'Order.objects.create', ([], {'task_id': "serializer.data['id']", 'order_number': '(1)'}), "(task_id=serializer.data['id'], order_number=1)\n", (1122, 1169), False, 'from tasks.models import Task, Order, Status, System\n'), ((1178, 1230), 'tasks.models.Status.objects.create', 'Status.objects.create', ([], {'task_id': "serializer.data['id']"}), "(task_id=serializer.data['id'])\n", (1199, 1230), False, 'from tasks.models import Task, Order, Status, System\n'), ((1461, 1512), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_405_METHOD_NOT_ALLOWED'}), '(status=status.HTTP_405_METHOD_NOT_ALLOWED)\n', (1469, 1512), False, 'from rest_framework.response import Response\n'), ((1685, 1715), 'tasks.models.Status.objects.get', 'Status.objects.get', ([], {'task_id': 'pk'}), '(task_id=pk)\n', (1703, 1715), False, 'from tasks.models import Task, Order, Status, System\n'), ((2110, 2133), 'tasks.models.Task.objects.get', 'Task.objects.get', ([], {'id': 'pk'}), '(id=pk)\n', (2126, 2133), False, 'from tasks.models import Task, Order, Status, System\n'), ((2640, 2725), 'rest_framework.response.Response', 'Response', ([], {'data': "{'info': 'На данный момент нет свободных машин, попробуйте позже'}"}), "(data={'info':\n 'На данный момент нет свободных машин, попробуйте позже'})\n", (2648, 2725), False, 'from rest_framework.response import Response\n'), ((2910, 2940), 'tasks.models.Status.objects.get', 'Status.objects.get', ([], {'task_id': 'pk'}), '(task_id=pk)\n', (2928, 2940), False, 'from tasks.models import Task, Order, Status, System\n'), ((3548, 3571), 'tasks.models.Task.objects.get', 'Task.objects.get', ([], {'id': 'pk'}), '(id=pk)\n', (3564, 3571), False, 'from tasks.models import Task, Order, Status, System\n'), ((3597, 3638), 'tasks.models.System.objects.get', 'System.objects.get', ([], {'id': 'task.running_on.id'}), '(id=task.running_on.id)\n', (3615, 3638), False, 'from tasks.models import Task, Order, Status, System\n'), ((3818, 3871), 'rest_framework.response.Response', 'Response', ([], {'data': "{'info': 'Ваша задача приостановлена'}"}), "(data={'info': 'Ваша задача приостановлена'})\n", (3826, 3871), False, 'from rest_framework.response import Response\n'), ((4041, 4071), 'tasks.models.Status.objects.get', 'Status.objects.get', ([], {'task_id': 'pk'}), '(task_id=pk)\n', (4059, 4071), False, 'from tasks.models import Task, Order, Status, System\n'), ((4469, 4492), 'tasks.models.Task.objects.get', 'Task.objects.get', ([], {'id': 'pk'}), '(id=pk)\n', (4485, 4492), False, 'from tasks.models import Task, Order, Status, System\n'), ((4959, 5044), 'rest_framework.response.Response', 'Response', ([], {'data': "{'info': 'На данный момент нет свободных машин, попробуйте позже'}"}), "(data={'info':\n 'На данный момент нет свободных машин, попробуйте позже'})\n", (4967, 5044), False, 'from rest_framework.response import Response\n'), ((5518, 5564), 'rest_framework.response.Response', 'Response', ([], {'data': "{'info': 'Ваша задача удалена'}"}), "(data={'info': 'Ваша задача удалена'})\n", (5526, 5564), False, 'from rest_framework.response import Response\n'), ((6874, 6918), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_400_BAD_REQUEST'}), '(status=status.HTTP_400_BAD_REQUEST)\n', (6882, 6918), False, 'from rest_framework.response import Response\n'), ((1782, 1874), 'rest_framework.response.Response', 'Response', ([], {'data': "{'error': 'Эта задача уже запущена'}", 'status': 'status.HTTP_400_BAD_REQUEST'}), "(data={'error': 'Эта задача уже запущена'}, status=status.\n HTTP_400_BAD_REQUEST)\n", (1790, 1874), False, 'from rest_framework.response import Response\n'), ((1964, 2069), 'rest_framework.response.Response', 'Response', ([], {'data': "{'error': 'Невозможно изменить удалённую задачу'}", 'status': 'status.HTTP_400_BAD_REQUEST'}), "(data={'error': 'Невозможно изменить удалённую задачу'}, status=\n status.HTTP_400_BAD_REQUEST)\n", (1972, 2069), False, 'from rest_framework.response import Response\n'), ((2533, 2594), 'rest_framework.response.Response', 'Response', ([], {'data': "{'info': 'Ваша задача запущена на выполнение'}"}), "(data={'info': 'Ваша задача запущена на выполнение'})\n", (2541, 2594), False, 'from rest_framework.response import Response\n'), ((3007, 3105), 'rest_framework.response.Response', 'Response', ([], {'data': "{'error': 'Эта задача уже приостановлена'}", 'status': 'status.HTTP_400_BAD_REQUEST'}), "(data={'error': 'Эта задача уже приостановлена'}, status=status.\n HTTP_400_BAD_REQUEST)\n", (3015, 3105), False, 'from rest_framework.response import Response\n'), ((3195, 3300), 'rest_framework.response.Response', 'Response', ([], {'data': "{'error': 'Невозможно изменить удалённую задачу'}", 'status': 'status.HTTP_400_BAD_REQUEST'}), "(data={'error': 'Невозможно изменить удалённую задачу'}, status=\n status.HTTP_400_BAD_REQUEST)\n", (3203, 3300), False, 'from rest_framework.response import Response\n'), ((3393, 3506), 'rest_framework.response.Response', 'Response', ([], {'data': "{'error': 'Невозможно приостановить не запущенную задачу'}", 'status': 'status.HTTP_400_BAD_REQUEST'}), "(data={'error': 'Невозможно приостановить не запущенную задачу'},\n status=status.HTTP_400_BAD_REQUEST)\n", (3401, 3506), False, 'from rest_framework.response import Response\n'), ((4138, 4233), 'rest_framework.response.Response', 'Response', ([], {'data': "{'error': 'Эта задача уже выполняется'}", 'status': 'status.HTTP_400_BAD_REQUEST'}), "(data={'error': 'Эта задача уже выполняется'}, status=status.\n HTTP_400_BAD_REQUEST)\n", (4146, 4233), False, 'from rest_framework.response import Response\n'), ((4323, 4428), 'rest_framework.response.Response', 'Response', ([], {'data': "{'error': 'Невозможно изменить удалённую задачу'}", 'status': 'status.HTTP_400_BAD_REQUEST'}), "(data={'error': 'Невозможно изменить удалённую задачу'}, status=\n status.HTTP_400_BAD_REQUEST)\n", (4331, 4428), False, 'from rest_framework.response import Response\n'), ((4892, 4943), 'rest_framework.response.Response', 'Response', ([], {'data': "{'info': 'Ваша задача возобновлена'}"}), "(data={'info': 'Ваша задача возобновлена'})\n", (4900, 4943), False, 'from rest_framework.response import Response\n'), ((5298, 5327), 'tasks.models.Order.objects.get', 'Order.objects.get', ([], {'task_id': 'pk'}), '(task_id=pk)\n', (5315, 5327), False, 'from tasks.models import Task, Order, Status, System\n'), ((6575, 6619), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_400_BAD_REQUEST'}), '(status=status.HTTP_400_BAD_REQUEST)\n', (6583, 6619), False, 'from rest_framework.response import Response\n'), ((6823, 6858), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (6831, 6858), False, 'from rest_framework.response import Response\n'), ((915, 958), 'tasks.models.Task.objects.filter', 'Task.objects.filter', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (934, 958), False, 'from tasks.models import Task, Order, Status, System\n'), ((2161, 2236), 'tasks.models.System.objects.filter', 'System.objects.filter', ([], {'active': '(True)', 'available_threads__gte': 'task.num_threads'}), '(active=True, available_threads__gte=task.num_threads)\n', (2182, 2236), False, 'from tasks.models import Task, Order, Status, System\n'), ((4520, 4595), 'tasks.models.System.objects.filter', 'System.objects.filter', ([], {'active': '(True)', 'available_threads__gte': 'task.num_threads'}), '(active=True, available_threads__gte=task.num_threads)\n', (4541, 4595), False, 'from tasks.models import Task, Order, Status, System\n'), ((5223, 5262), 'tasks.models.Order.objects.order_by', 'Order.objects.order_by', (['"""-order_number"""'], {}), "('-order_number')\n", (5245, 5262), False, 'from tasks.models import Task, Order, Status, System\n'), ((5349, 5372), 'tasks.models.Task.objects.get', 'Task.objects.get', ([], {'id': 'pk'}), '(id=pk)\n', (5365, 5372), False, 'from tasks.models import Task, Order, Status, System\n'), ((5709, 5759), 'tasks.models.Order.objects.filter', 'Order.objects.filter', ([], {'task__user': 'self.request.user'}), '(task__user=self.request.user)\n', (5729, 5759), False, 'from tasks.models import Task, Order, Status, System\n'), ((5953, 6004), 'tasks.models.Status.objects.filter', 'Status.objects.filter', ([], {'task__user': 'self.request.user'}), '(task__user=self.request.user)\n', (5974, 6004), False, 'from tasks.models import Task, Order, Status, System\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-28 23:51
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0003_department'),
]
operations = [
migrations.CreateModel(
name='Assets',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=15, verbose_name='Name of Asset')),
('department', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='assets', to='users.Department')),
],
),
migrations.CreateModel(
name='Employee',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, verbose_name='Name of Employee')),
('department', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='employees', to='users.Department')),
],
),
]
| [
"django.db.models.AutoField",
"django.db.models.CharField",
"django.db.models.ForeignKey"
] | [((417, 510), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (433, 510), False, 'from django.db import migrations, models\n'), ((534, 607), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(15)', 'verbose_name': '"""Name of Asset"""'}), "(blank=True, max_length=15, verbose_name='Name of Asset')\n", (550, 607), False, 'from django.db import migrations, models\n'), ((641, 754), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""assets"""', 'to': '"""users.Department"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='assets', to='users.Department')\n", (658, 754), False, 'from django.db import migrations, models\n'), ((883, 976), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (899, 976), False, 'from django.db import migrations, models\n'), ((1000, 1077), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(255)', 'verbose_name': '"""Name of Employee"""'}), "(blank=True, max_length=255, verbose_name='Name of Employee')\n", (1016, 1077), False, 'from django.db import migrations, models\n'), ((1111, 1227), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""employees"""', 'to': '"""users.Department"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='employees', to='users.Department')\n", (1128, 1227), False, 'from django.db import migrations, models\n')] |
import numpy as np
import pytest
from rsgeo.geometry import Polygon # noqa
class TestPolygon:
def setup_method(self):
self.p = Polygon([(0, 0), (1, 1), (1, 0), (0, 0)])
def test_repr(self):
str_repr = str(self.p)
exp = "Polygon([(0, 0), (1, 1), (1, 0), (0, 0)])"
assert str_repr == exp
def test_seq_to_2darray(self):
seq = [(1, 2), (3, 4)]
res = self.p._seq_to_2darray(seq)
np.testing.assert_array_equal(res, np.array([[1, 2], [3, 4]]))
def test_seq_to_2darray_sad_case(self):
seq = [(1, 2, 3), (4, 5, 6)]
with pytest.raises(ValueError):
_ = self.p._seq_to_2darray(seq)
@pytest.mark.parametrize("x, expected", [
(np.array([1, 2, 3]), np.array([1, 2, 3])),
(np.array([[1], [2], [3]]), np.array([1, 2, 3])),
])
def test_to_1d(self, x, expected):
result = self.p._to_1d(x)
np.testing.assert_array_equal(result, expected)
def test_to_1d_sad_case(self):
x = np.array([(1, 2, 3), (4, 5, 6)])
with pytest.raises(ValueError):
_ = self.p._to_1d(x)
def test_contains(self, xs, ys):
res = self.p.contains(xs, ys)
np.testing.assert_array_equal(res, np.array([False, False, False, True]))
def test_distance(self, xs, ys):
result = self.p.distance(xs, ys)
np.testing.assert_array_equal(result, np.array([0, 0, 1.4142135623730951, 0]))
| [
"numpy.testing.assert_array_equal",
"numpy.array",
"pytest.raises",
"rsgeo.geometry.Polygon"
] | [((143, 184), 'rsgeo.geometry.Polygon', 'Polygon', (['[(0, 0), (1, 1), (1, 0), (0, 0)]'], {}), '([(0, 0), (1, 1), (1, 0), (0, 0)])\n', (150, 184), False, 'from rsgeo.geometry import Polygon\n'), ((922, 969), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['result', 'expected'], {}), '(result, expected)\n', (951, 969), True, 'import numpy as np\n'), ((1018, 1050), 'numpy.array', 'np.array', (['[(1, 2, 3), (4, 5, 6)]'], {}), '([(1, 2, 3), (4, 5, 6)])\n', (1026, 1050), True, 'import numpy as np\n'), ((483, 509), 'numpy.array', 'np.array', (['[[1, 2], [3, 4]]'], {}), '([[1, 2], [3, 4]])\n', (491, 509), True, 'import numpy as np\n'), ((606, 631), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (619, 631), False, 'import pytest\n'), ((1064, 1089), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1077, 1089), False, 'import pytest\n'), ((1243, 1280), 'numpy.array', 'np.array', (['[False, False, False, True]'], {}), '([False, False, False, True])\n', (1251, 1280), True, 'import numpy as np\n'), ((1407, 1446), 'numpy.array', 'np.array', (['[0, 0, 1.4142135623730951, 0]'], {}), '([0, 0, 1.4142135623730951, 0])\n', (1415, 1446), True, 'import numpy as np\n'), ((733, 752), 'numpy.array', 'np.array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (741, 752), True, 'import numpy as np\n'), ((754, 773), 'numpy.array', 'np.array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (762, 773), True, 'import numpy as np\n'), ((785, 810), 'numpy.array', 'np.array', (['[[1], [2], [3]]'], {}), '([[1], [2], [3]])\n', (793, 810), True, 'import numpy as np\n'), ((812, 831), 'numpy.array', 'np.array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (820, 831), True, 'import numpy as np\n')] |
import os
import json
raw_data = open("bitcoin-util-test.json").read()
input_data = json.loads(raw_data)
for testObj in input_data:
inp = ""
out = ""
args = " ".join(testObj['args'])
if 'input' in testObj:
inp = testObj['input']
if 'output_cmp' in testObj:
out = testObj['output_cmp']
if out:
if inp:
# print("echo \"../../../src/tesseract-tx {} < {} > {}\"".format(args, inp, out))
# print("../../../src/tesseract-tx {} < {} > {}".format(args, inp, out))
pass
else:
# print("echo \"../../../src/tesseract-tx {} > {}\"".format(args, out))
print("../../../src/tesseract-tx {} > {}".format(args, out))
| [
"json.loads"
] | [((85, 105), 'json.loads', 'json.loads', (['raw_data'], {}), '(raw_data)\n', (95, 105), False, 'import json\n')] |
"""Defines a command message that processes the input for a job"""
from __future__ import unicode_literals
import logging
from django.db import transaction
from data.data.exceptions import InvalidData
from job.models import Job
from messaging.messages.message import CommandMessage
logger = logging.getLogger(__name__)
def create_process_job_input_messages(job_ids):
"""Creates messages to process the input for the given jobs
:param job_ids: The job IDs
:type job_ids: list
:return: The list of messages
:rtype: list
"""
messages = []
for job_id in job_ids:
message = ProcessJobInput()
message.job_id = job_id
messages.append(message)
return messages
class ProcessJobInput(CommandMessage):
"""Command message that processes the input for a job
"""
def __init__(self):
"""Constructor
"""
super(ProcessJobInput, self).__init__('process_job_input')
self.job_id = None
def to_json(self):
"""See :meth:`messaging.messages.message.CommandMessage.to_json`
"""
return {'job_id': self.job_id}
@staticmethod
def from_json(json_dict):
"""See :meth:`messaging.messages.message.CommandMessage.from_json`
"""
message = ProcessJobInput()
message.job_id = json_dict['job_id']
return message
def execute(self):
"""See :meth:`messaging.messages.message.CommandMessage.execute`
"""
from queue.messages.queued_jobs import create_queued_jobs_messages, QueuedJob
job = Job.objects.get_job_with_interfaces(self.job_id)
if not job.has_input():
if not job.recipe:
logger.error('Job %d has no input and is not in a recipe. Message will not re-run.', self.job_id)
return True
try:
self._generate_input_data_from_recipe(job)
except InvalidData:
logger.exception('Recipe created invalid input data for job %d. Message will not re-run.', self.job_id)
return True
# Lock job model and process job's input data
with transaction.atomic():
job = Job.objects.get_locked_job(self.job_id)
Job.objects.process_job_input(job)
# Create message to queue the job
if job.num_exes == 0:
logger.info('Processed input for job %d, sending message to queue job', self.job_id)
self.new_messages.extend(create_queued_jobs_messages([QueuedJob(job.id, 0)], requeue=False))
return True
def _generate_input_data_from_recipe(self, job):
"""Generates the job's input data from its recipe dependencies and validates and sets the input data on the job
:param job: The job with related job_type_rev and recipe__recipe_type_rev models
:type job: :class:`job.models.Job`
:raises :class:`data.data.exceptions.InvalidData`: If the data is invalid
"""
from recipe.models import RecipeNode
# TODO: this is a hack to work with old legacy recipe data with workspaces, remove when legacy job types go
old_recipe_input_dict = dict(job.recipe.input)
# Get job input from dependencies in the recipe
recipe_input_data = job.recipe.get_input_data()
node_outputs = RecipeNode.objects.get_recipe_node_outputs(job.recipe_id)
for node_output in node_outputs.values():
if node_output.node_type == 'job' and node_output.id == job.id:
node_name = node_output.node_name
break
# TODO: this is a hack to work with old legacy recipe data with workspaces, remove when legacy job types go
job.recipe.input = old_recipe_input_dict
definition = job.recipe.recipe_type_rev.get_definition()
input_data = definition.generate_node_input_data(node_name, recipe_input_data, node_outputs)
Job.objects.set_job_input_data_v6(job, input_data)
| [
"logging.getLogger",
"job.models.Job.objects.set_job_input_data_v6",
"django.db.transaction.atomic",
"recipe.models.RecipeNode.objects.get_recipe_node_outputs",
"job.models.Job.objects.get_job_with_interfaces",
"queue.messages.queued_jobs.QueuedJob",
"job.models.Job.objects.process_job_input",
"job.mo... | [((296, 323), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (313, 323), False, 'import logging\n'), ((1587, 1635), 'job.models.Job.objects.get_job_with_interfaces', 'Job.objects.get_job_with_interfaces', (['self.job_id'], {}), '(self.job_id)\n', (1622, 1635), False, 'from job.models import Job\n'), ((3346, 3403), 'recipe.models.RecipeNode.objects.get_recipe_node_outputs', 'RecipeNode.objects.get_recipe_node_outputs', (['job.recipe_id'], {}), '(job.recipe_id)\n', (3388, 3403), False, 'from recipe.models import RecipeNode\n'), ((3943, 3993), 'job.models.Job.objects.set_job_input_data_v6', 'Job.objects.set_job_input_data_v6', (['job', 'input_data'], {}), '(job, input_data)\n', (3976, 3993), False, 'from job.models import Job\n'), ((2167, 2187), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (2185, 2187), False, 'from django.db import transaction\n'), ((2207, 2246), 'job.models.Job.objects.get_locked_job', 'Job.objects.get_locked_job', (['self.job_id'], {}), '(self.job_id)\n', (2233, 2246), False, 'from job.models import Job\n'), ((2259, 2293), 'job.models.Job.objects.process_job_input', 'Job.objects.process_job_input', (['job'], {}), '(job)\n', (2288, 2293), False, 'from job.models import Job\n'), ((2530, 2550), 'queue.messages.queued_jobs.QueuedJob', 'QueuedJob', (['job.id', '(0)'], {}), '(job.id, 0)\n', (2539, 2550), False, 'from queue.messages.queued_jobs import create_queued_jobs_messages, QueuedJob\n')] |
from __future__ import unicode_literals
import os.path
from wand.image import Image as ImageHandler
from mock import patch, Mock, call
from nose.tools import eq_, nottest
from tests import TestCase
from tests.image_helper import SMALL_JPG, SOME_GIF, SOME_PNG
from catsnap import Client
from catsnap.image_truck import ImageTruck
from catsnap.table.image import Image as ImageTable, ImageResize
from catsnap.resize_image import ResizeImage
class TestResizeImage(TestCase):
def test_resize_an_image(self):
image_handler = ImageHandler(filename=SMALL_JPG)
truck = Mock()
session = Client().session()
image = ImageTable(filename='badcafe')
session.add(image)
session.flush()
after_upload = Mock()
ResizeImage._resize_image(image,
image_handler,
truck,
'thumbnail',
after_upload)
resizes = session.query(ImageResize).all()
eq_(len(resizes), 1)
eq_(resizes[0].width, 100)
eq_(resizes[0].height, 66)
eq_(resizes[0].suffix, 'thumbnail')
(args, kwargs) = truck.upload_resize.call_args
eq_(args[1], 'thumbnail')
after_upload.assert_called_once_with('thumbnail')
@patch('catsnap.resize_image.ImageHandler')
def test_resize_a_portrait_image(self, MockImage):
session = Client().session()
image_handler = Mock()
image_handler.size = (427, 640)
image_handler.format = 'JPEG'
image = ImageTable(filename='badcafe')
session.add(image)
session.flush()
after_upload = Mock()
ResizeImage._resize_image(image,
image_handler,
Mock(),
'medium',
after_upload)
resizes = session.query(ImageResize).all()
eq_(len(resizes), 1)
eq_(resizes[0].height, 500)
eq_(resizes[0].width, 333)
after_upload.assert_called_once_with('medium')
@patch('catsnap.resize_image.ImageHandler')
@patch('catsnap.resize_image.ResizeImage._resize_image')
def test_creates_various_resizes(self, resize_image_method, MockImage):
session = Client().session()
image = ImageTable(filename='faded')
session.add(image)
session.flush()
truck = ImageTruck('contents', None, None)
image_handler = Mock()
image_handler.size = (3648, 2736)
MockImage.return_value = image_handler
after_upload = Mock()
ResizeImage.make_resizes(image, truck, after_upload)
resize_image_method.assert_has_calls([
call(image, image_handler, truck, 'thumbnail', after_upload),
call(image, image_handler, truck, 'small', after_upload),
call(image, image_handler, truck, 'medium', after_upload),
call(image, image_handler, truck, 'large', after_upload),
])
@patch('catsnap.resize_image.ImageHandler')
@patch('catsnap.resize_image.ResizeImage._resize_image')
def test_only_scales_images_down_not_up(self,
resize_image_method,
MockImage):
image_handler = Mock()
image_handler.size = (360, 360)
MockImage.return_value = image_handler
session = Client().session()
image = ImageTable(filename='faded')
session.add(image)
session.flush()
truck = Mock()
after_upload = Mock()
ResizeImage.make_resizes(image, truck, after_upload)
resize_image_method.assert_has_calls([
call(image, image_handler, truck, 'thumbnail', after_upload),
call(image, image_handler, truck, 'small', after_upload),
], any_order=True)
@patch('catsnap.Client.bucket')
def test_handles_jpegs(self, bucket_method):
self.file_type_test(bucket_method,
SMALL_JPG,
'image/jpeg',
(100, 66))
@patch('catsnap.Client.bucket')
def test_handles_pngs(self, bucket_method):
self.file_type_test(bucket_method,
SOME_PNG,
'image/png',
(72, 100))
@patch('catsnap.Client.bucket')
def test_handles_gifs(self, bucket_method):
self.file_type_test(bucket_method,
SOME_GIF,
'image/gif',
(100, 64))
@nottest
def file_type_test(self,
bucket_method,
test_file,
content_type,
resized_size):
bucket = Mock()
bucket_method.return_value = bucket
new_key = Mock()
bucket.new_key.return_value = new_key
image_handler = ImageHandler(filename=test_file)
truck = ImageTruck.new_from_file(test_file)
session = Client().session()
image = ImageTable(filename='badcafe')
session.add(image)
session.flush()
after_upload = Mock()
ResizeImage._resize_image(image,
image_handler,
truck,
'thumbnail',
after_upload)
new_key.set_metadata.assert_called_with('Content-Type', content_type)
resized_contents = new_key.set_contents_from_string.call_args[0][0]
image_handler = ImageHandler(blob=resized_contents)
eq_(image_handler.size, resized_size)
after_upload.assert_called_once_with('thumbnail')
| [
"catsnap.resize_image.ResizeImage._resize_image",
"mock.patch",
"nose.tools.eq_",
"catsnap.image_truck.ImageTruck",
"catsnap.Client",
"mock.Mock",
"wand.image.Image",
"catsnap.table.image.Image",
"catsnap.image_truck.ImageTruck.new_from_file",
"mock.call",
"catsnap.resize_image.ResizeImage.make_... | [((1332, 1374), 'mock.patch', 'patch', (['"""catsnap.resize_image.ImageHandler"""'], {}), "('catsnap.resize_image.ImageHandler')\n", (1337, 1374), False, 'from mock import patch, Mock, call\n'), ((2143, 2185), 'mock.patch', 'patch', (['"""catsnap.resize_image.ImageHandler"""'], {}), "('catsnap.resize_image.ImageHandler')\n", (2148, 2185), False, 'from mock import patch, Mock, call\n'), ((2191, 2246), 'mock.patch', 'patch', (['"""catsnap.resize_image.ResizeImage._resize_image"""'], {}), "('catsnap.resize_image.ResizeImage._resize_image')\n", (2196, 2246), False, 'from mock import patch, Mock, call\n'), ((3071, 3113), 'mock.patch', 'patch', (['"""catsnap.resize_image.ImageHandler"""'], {}), "('catsnap.resize_image.ImageHandler')\n", (3076, 3113), False, 'from mock import patch, Mock, call\n'), ((3119, 3174), 'mock.patch', 'patch', (['"""catsnap.resize_image.ResizeImage._resize_image"""'], {}), "('catsnap.resize_image.ResizeImage._resize_image')\n", (3124, 3174), False, 'from mock import patch, Mock, call\n'), ((3942, 3972), 'mock.patch', 'patch', (['"""catsnap.Client.bucket"""'], {}), "('catsnap.Client.bucket')\n", (3947, 3972), False, 'from mock import patch, Mock, call\n'), ((4191, 4221), 'mock.patch', 'patch', (['"""catsnap.Client.bucket"""'], {}), "('catsnap.Client.bucket')\n", (4196, 4221), False, 'from mock import patch, Mock, call\n'), ((4437, 4467), 'mock.patch', 'patch', (['"""catsnap.Client.bucket"""'], {}), "('catsnap.Client.bucket')\n", (4442, 4467), False, 'from mock import patch, Mock, call\n'), ((535, 567), 'wand.image.Image', 'ImageHandler', ([], {'filename': 'SMALL_JPG'}), '(filename=SMALL_JPG)\n', (547, 567), True, 'from wand.image import Image as ImageHandler\n'), ((584, 590), 'mock.Mock', 'Mock', ([], {}), '()\n', (588, 590), False, 'from mock import patch, Mock, call\n'), ((644, 674), 'catsnap.table.image.Image', 'ImageTable', ([], {'filename': '"""badcafe"""'}), "(filename='badcafe')\n", (654, 674), True, 'from catsnap.table.image import Image as ImageTable, ImageResize\n'), ((750, 756), 'mock.Mock', 'Mock', ([], {}), '()\n', (754, 756), False, 'from mock import patch, Mock, call\n'), ((765, 850), 'catsnap.resize_image.ResizeImage._resize_image', 'ResizeImage._resize_image', (['image', 'image_handler', 'truck', '"""thumbnail"""', 'after_upload'], {}), "(image, image_handler, truck, 'thumbnail',\n after_upload)\n", (790, 850), False, 'from catsnap.resize_image import ResizeImage\n'), ((1072, 1098), 'nose.tools.eq_', 'eq_', (['resizes[0].width', '(100)'], {}), '(resizes[0].width, 100)\n', (1075, 1098), False, 'from nose.tools import eq_, nottest\n'), ((1107, 1133), 'nose.tools.eq_', 'eq_', (['resizes[0].height', '(66)'], {}), '(resizes[0].height, 66)\n', (1110, 1133), False, 'from nose.tools import eq_, nottest\n'), ((1142, 1177), 'nose.tools.eq_', 'eq_', (['resizes[0].suffix', '"""thumbnail"""'], {}), "(resizes[0].suffix, 'thumbnail')\n", (1145, 1177), False, 'from nose.tools import eq_, nottest\n'), ((1242, 1267), 'nose.tools.eq_', 'eq_', (['args[1]', '"""thumbnail"""'], {}), "(args[1], 'thumbnail')\n", (1245, 1267), False, 'from nose.tools import eq_, nottest\n'), ((1491, 1497), 'mock.Mock', 'Mock', ([], {}), '()\n', (1495, 1497), False, 'from mock import patch, Mock, call\n'), ((1592, 1622), 'catsnap.table.image.Image', 'ImageTable', ([], {'filename': '"""badcafe"""'}), "(filename='badcafe')\n", (1602, 1622), True, 'from catsnap.table.image import Image as ImageTable, ImageResize\n'), ((1698, 1704), 'mock.Mock', 'Mock', ([], {}), '()\n', (1702, 1704), False, 'from mock import patch, Mock, call\n'), ((2018, 2045), 'nose.tools.eq_', 'eq_', (['resizes[0].height', '(500)'], {}), '(resizes[0].height, 500)\n', (2021, 2045), False, 'from nose.tools import eq_, nottest\n'), ((2054, 2080), 'nose.tools.eq_', 'eq_', (['resizes[0].width', '(333)'], {}), '(resizes[0].width, 333)\n', (2057, 2080), False, 'from nose.tools import eq_, nottest\n'), ((2376, 2404), 'catsnap.table.image.Image', 'ImageTable', ([], {'filename': '"""faded"""'}), "(filename='faded')\n", (2386, 2404), True, 'from catsnap.table.image import Image as ImageTable, ImageResize\n'), ((2473, 2507), 'catsnap.image_truck.ImageTruck', 'ImageTruck', (['"""contents"""', 'None', 'None'], {}), "('contents', None, None)\n", (2483, 2507), False, 'from catsnap.image_truck import ImageTruck\n'), ((2533, 2539), 'mock.Mock', 'Mock', ([], {}), '()\n', (2537, 2539), False, 'from mock import patch, Mock, call\n'), ((2652, 2658), 'mock.Mock', 'Mock', ([], {}), '()\n', (2656, 2658), False, 'from mock import patch, Mock, call\n'), ((2668, 2720), 'catsnap.resize_image.ResizeImage.make_resizes', 'ResizeImage.make_resizes', (['image', 'truck', 'after_upload'], {}), '(image, truck, after_upload)\n', (2692, 2720), False, 'from catsnap.resize_image import ResizeImage\n'), ((3370, 3376), 'mock.Mock', 'Mock', ([], {}), '()\n', (3374, 3376), False, 'from mock import patch, Mock, call\n'), ((3518, 3546), 'catsnap.table.image.Image', 'ImageTable', ([], {'filename': '"""faded"""'}), "(filename='faded')\n", (3528, 3546), True, 'from catsnap.table.image import Image as ImageTable, ImageResize\n'), ((3615, 3621), 'mock.Mock', 'Mock', ([], {}), '()\n', (3619, 3621), False, 'from mock import patch, Mock, call\n'), ((3645, 3651), 'mock.Mock', 'Mock', ([], {}), '()\n', (3649, 3651), False, 'from mock import patch, Mock, call\n'), ((3660, 3712), 'catsnap.resize_image.ResizeImage.make_resizes', 'ResizeImage.make_resizes', (['image', 'truck', 'after_upload'], {}), '(image, truck, after_upload)\n', (3684, 3712), False, 'from catsnap.resize_image import ResizeImage\n'), ((4884, 4890), 'mock.Mock', 'Mock', ([], {}), '()\n', (4888, 4890), False, 'from mock import patch, Mock, call\n'), ((4953, 4959), 'mock.Mock', 'Mock', ([], {}), '()\n', (4957, 4959), False, 'from mock import patch, Mock, call\n'), ((5030, 5062), 'wand.image.Image', 'ImageHandler', ([], {'filename': 'test_file'}), '(filename=test_file)\n', (5042, 5062), True, 'from wand.image import Image as ImageHandler\n'), ((5079, 5114), 'catsnap.image_truck.ImageTruck.new_from_file', 'ImageTruck.new_from_file', (['test_file'], {}), '(test_file)\n', (5103, 5114), False, 'from catsnap.image_truck import ImageTruck\n'), ((5168, 5198), 'catsnap.table.image.Image', 'ImageTable', ([], {'filename': '"""badcafe"""'}), "(filename='badcafe')\n", (5178, 5198), True, 'from catsnap.table.image import Image as ImageTable, ImageResize\n'), ((5273, 5279), 'mock.Mock', 'Mock', ([], {}), '()\n', (5277, 5279), False, 'from mock import patch, Mock, call\n'), ((5289, 5374), 'catsnap.resize_image.ResizeImage._resize_image', 'ResizeImage._resize_image', (['image', 'image_handler', 'truck', '"""thumbnail"""', 'after_upload'], {}), "(image, image_handler, truck, 'thumbnail',\n after_upload)\n", (5314, 5374), False, 'from catsnap.resize_image import ResizeImage\n'), ((5687, 5722), 'wand.image.Image', 'ImageHandler', ([], {'blob': 'resized_contents'}), '(blob=resized_contents)\n', (5699, 5722), True, 'from wand.image import Image as ImageHandler\n'), ((5731, 5768), 'nose.tools.eq_', 'eq_', (['image_handler.size', 'resized_size'], {}), '(image_handler.size, resized_size)\n', (5734, 5768), False, 'from nose.tools import eq_, nottest\n'), ((1829, 1835), 'mock.Mock', 'Mock', ([], {}), '()\n', (1833, 1835), False, 'from mock import patch, Mock, call\n'), ((609, 617), 'catsnap.Client', 'Client', ([], {}), '()\n', (615, 617), False, 'from catsnap import Client\n'), ((1448, 1456), 'catsnap.Client', 'Client', ([], {}), '()\n', (1454, 1456), False, 'from catsnap import Client\n'), ((2341, 2349), 'catsnap.Client', 'Client', ([], {}), '()\n', (2347, 2349), False, 'from catsnap import Client\n'), ((2781, 2841), 'mock.call', 'call', (['image', 'image_handler', 'truck', '"""thumbnail"""', 'after_upload'], {}), "(image, image_handler, truck, 'thumbnail', after_upload)\n", (2785, 2841), False, 'from mock import patch, Mock, call\n'), ((2855, 2911), 'mock.call', 'call', (['image', 'image_handler', 'truck', '"""small"""', 'after_upload'], {}), "(image, image_handler, truck, 'small', after_upload)\n", (2859, 2911), False, 'from mock import patch, Mock, call\n'), ((2925, 2982), 'mock.call', 'call', (['image', 'image_handler', 'truck', '"""medium"""', 'after_upload'], {}), "(image, image_handler, truck, 'medium', after_upload)\n", (2929, 2982), False, 'from mock import patch, Mock, call\n'), ((2996, 3052), 'mock.call', 'call', (['image', 'image_handler', 'truck', '"""large"""', 'after_upload'], {}), "(image, image_handler, truck, 'large', after_upload)\n", (3000, 3052), False, 'from mock import patch, Mock, call\n'), ((3483, 3491), 'catsnap.Client', 'Client', ([], {}), '()\n', (3489, 3491), False, 'from catsnap import Client\n'), ((3773, 3833), 'mock.call', 'call', (['image', 'image_handler', 'truck', '"""thumbnail"""', 'after_upload'], {}), "(image, image_handler, truck, 'thumbnail', after_upload)\n", (3777, 3833), False, 'from mock import patch, Mock, call\n'), ((3847, 3903), 'mock.call', 'call', (['image', 'image_handler', 'truck', '"""small"""', 'after_upload'], {}), "(image, image_handler, truck, 'small', after_upload)\n", (3851, 3903), False, 'from mock import patch, Mock, call\n'), ((5133, 5141), 'catsnap.Client', 'Client', ([], {}), '()\n', (5139, 5141), False, 'from catsnap import Client\n')] |
from os import path
dirname = path.dirname(__file__)
class Config:
# SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_DATABASE_URI = f'sqlite:///{dirname}/db.sqlite3'
BATATINHAS = 3
class development(Config):
DEBUG = True
SQLALCHEMY_ECHO = True
class production(Config):
DEBUG = False
| [
"os.path.dirname"
] | [((30, 52), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (42, 52), False, 'from os import path\n')] |
# Generate some graph to verify the quality of Metis' partitioning
# A graph with 300 vertices, vertices 0~99, 100~199, 200~299 form
# a complete graph respectively
# and add some random disturbing edges
import random
import os
os.environ["METIS_DLL"] = "/usr/local/lib/libmetis.so"
import metis
import networkx as nx
dg = nx.MultiDiGraph()
def gen_complete_graph(low, high): # [low, high)
for l in range(low, high):
for r in range(low, high):
if l != r:
dg.add_edge(l, r)
gen_complete_graph(0, 100)
gen_complete_graph(100, 200)
gen_complete_graph(200, 300)
for _ in range(100):
l = random.randint(0, 299)
r = random.randint(0, 299)
dg.add_edge(l, r)
(edgecuts, parts) = metis.part_graph(dg, 3) # num of shards
fp = open("clustered_verify_metis.txt", "w")
print(parts, file=fp)
fp.close()
| [
"networkx.MultiDiGraph",
"metis.part_graph",
"random.randint"
] | [((325, 342), 'networkx.MultiDiGraph', 'nx.MultiDiGraph', ([], {}), '()\n', (340, 342), True, 'import networkx as nx\n'), ((730, 753), 'metis.part_graph', 'metis.part_graph', (['dg', '(3)'], {}), '(dg, 3)\n', (746, 753), False, 'import metis\n'), ((633, 655), 'random.randint', 'random.randint', (['(0)', '(299)'], {}), '(0, 299)\n', (647, 655), False, 'import random\n'), ((664, 686), 'random.randint', 'random.randint', (['(0)', '(299)'], {}), '(0, 299)\n', (678, 686), False, 'import random\n')] |
"""
Manage Configuration AppMap recorder for Python.
"""
import inspect
import logging
from os.path import realpath
from pathlib import Path
import re
import sys
from textwrap import dedent
import importlib_metadata
import yaml
from yaml.parser import ParserError
from . import utils
from .env import Env
from .instrument import instrument
from .metadata import Metadata
from .recording import Filter, FilterableCls, Recorder
logger = logging.getLogger(__name__)
def warn_config_missing(path):
"""Display a warning about missing config file in path."""
name = path.resolve().parent.name
package = re.sub(r'\W', '.', name).lower()
def default_app_name(rootdir):
rootdir = Path(rootdir)
if not (rootdir / '.git').exists():
return rootdir.name
git = utils.git(cwd=str(rootdir))
repo_root = git('rev-parse --show-toplevel')
return Path(repo_root).name
# Make it easy to mock sys.prefix
def _get_sys_prefix():
return realpath(sys.prefix)
def find_top_packages(rootdir):
"""
Scan a directory tree for packages that should appear in the
default config file.
Examine directories in rootdir, to see if they contains an
__init__.py. If it does, add it to the list of packages and don't
scan any of its subdirectories. If it doesn't, scan its
subdirectories to find __init__.py.
Some directories are automatically excluded from the search:
* sys.prefix
* Hidden directories (i.e. those that start with a '.')
* node_modules
For example, in a directory like this
% ls -F
LICENSE Makefile appveyor.yml docs/ src/ tests/
MANIFEST.in README.rst blog/ setup.py tddium.yml tox.ini
docs, src, tests, and blog will get scanned.
Only src has a subdirectory containing an __init__.py:
% for f in docs src tests blog; do find $f | head -5; done
docs
docs/index.rst
docs/testing.rst
docs/_templates
docs/_templates/.gitkeep
src
src/wrapt
src/wrapt/importer.py
src/wrapt/__init__.py
src/wrapt/wrappers.py
tests
tests/test_outer_classmethod.py
tests/test_inner_classmethod.py
tests/conftest.py
tests/test_class.py
blog
blog/04-implementing-a-universal-decorator.md
blog/03-implementing-a-factory-for-creating-decorators.md
blog/05-decorators-which-accept-arguments.md
blog/09-performance-overhead-of-using-decorators.md
Thus, the list of top packages returned will be ['wrapt'].
"""
# Use a set so we don't get duplicates, e.g. if the project's
# build process copies its source to a subdirectory.
packages = set()
import os
def excluded(dir):
excluded = dir == 'node_modules' or dir[0] == '.'
if excluded:
logger.debug('excluding dir %s', dir)
return excluded
sys_prefix = _get_sys_prefix()
for dir,dirs,files in os.walk(rootdir):
logger.debug('dir %s dirs %s', dir, dirs)
if realpath(dir) == sys_prefix:
logger.debug('skipping sys.prefix %s', sys_prefix)
dirs.clear()
continue
if '__init__.py' in files:
packages.add(Path(dir).name)
dirs.clear()
else:
dirs[:] = [d for d in dirs if not excluded(d)]
return packages
class Config:
""" Singleton Config class """
_instance = None
def __new__(cls):
if cls._instance is None:
logger.debug('Creating the Config object')
cls._instance = super(Config, cls).__new__(cls)
cls._instance._initialized = False
return cls._instance
def __init__(self):
if self._initialized:
return
self.file_present = False
self.file_valid = False
self._load_config()
self._initialized = True
@classmethod
def initialize(cls):
cls._instance = None
@property
def name(self):
return self._config['name']
@property
def packages(self):
return self._config['packages']
@property
def default(self):
root_dir = Env.current.root_dir
return {
'name': default_app_name(root_dir),
'packages': [{'path': p} for p in find_top_packages(root_dir)],
}
def _load_config(self):
self._config = {'name': None, 'packages': []}
# Only use a default config if the user hasn't specified a
# config.
env_config = Env.current.get("APPMAP_CONFIG")
use_default_config = not env_config
if use_default_config:
env_config = 'appmap.yml'
path = Path(env_config).resolve()
if path.is_file():
self.file_present = True
Env.current.enabled = False
self.file_valid = False
try:
self._config = yaml.safe_load(path.read_text())
self.file_valid = True
Env.current.enabled = True
except ParserError:
pass
logger.info('config: %s', self._config)
return
if not Env.current.enabled:
return
logger.warning(dedent(f'Config file "{path}" is missing.'))
if use_default_config:
logger.warning(dedent(f'''
This default configuration will be used:
{yaml.dump(self.default)}
'''))
self._config = self.default
else:
# disable appmap and return a dummy config
# so the errors don't accumulate
Env.current.enabled = False
def startswith(prefix, sequence):
"""
Check if a sequence starts with the prefix.
"""
return len(prefix) <= len(sequence) and all(a == b for a, b in zip(sequence, prefix))
class PathMatcher:
def __init__(self, prefix, excludes=None, shallow=False):
excludes = excludes or []
self.prefix = []
if prefix:
self.prefix = prefix.split('.')
self.excludes = [x.split('.') for x in excludes]
self.shallow = shallow
def matches(self, filterable):
fqname = name = filterable.fqname.split('.')
if startswith(self.prefix, name):
name = name[len(self.prefix):]
result = not any(startswith(x, name) for x in self.excludes)
else:
result = False
logger.debug('%r.matches(%r) -> %r', self, fqname, result)
return result
def __repr__(self):
return 'PathMatcher(%r, %r, shallow=%r)' % (
'.'.join(self.prefix),
['.'.join(ex) for ex in self.excludes],
self.shallow
)
class DistMatcher(PathMatcher):
def __init__(self, dist, *args, **kwargs):
super().__init__(*args, **kwargs)
self.dist = dist
self.files = [str(pp.locate()) for pp in importlib_metadata.files(dist)]
def matches(self, filterable):
try:
obj = filterable.obj
logger.debug('%r.matches(%r): %s in %r', self, obj, inspect.getfile(obj), self.files)
if inspect.getfile(obj) not in self.files:
return False
except TypeError:
# builtins don't have file associated
return False
return super().matches(filterable)
def __repr__(self):
return 'DistMatcher(%r, %r, %r, shallow=%r)' % (
self.dist,
'.'.join(self.prefix),
['.'.join(ex) for ex in self.excludes],
self.shallow
)
class MatcherFilter(Filter):
def __init__(self, matchers, *args, **kwargs):
super().__init__(*args, **kwargs)
self.matchers = matchers
def filter(self, filterable):
result = any(m.matches(filterable) for m in self.matchers) or self.next_filter.filter(filterable)
logger.debug('ConfigFilter.filter(%r) -> %r', filterable.fqname, result)
return result
def wrap(self, filterable):
rule = self.match(filterable)
if rule:
wrapped = getattr(filterable.obj, '_appmap_wrapped', None)
if wrapped is None:
logger.debug(' wrapping %s', filterable.fqname)
ret = instrument(filterable)
if rule.shallow:
setattr(ret, '_appmap_shallow', rule)
else:
logger.debug(' already wrapped %s', filterable.fqname)
ret = filterable.obj
return ret
return self.next_filter.wrap(filterable)
def match(self, filterable):
return next((m for m in self.matchers if m.matches(filterable)), None)
def matcher_of_config(package):
dist = package.get('dist', None)
if dist:
return DistMatcher(
dist,
package.get('path', None),
package.get('exclude', []),
shallow=package.get('shallow', True)
)
return PathMatcher(
package['path'],
package.get('exclude', []),
shallow=package.get('shallow', False)
)
class ConfigFilter(MatcherFilter):
def __init__(self, *args, **kwargs):
matchers = []
if Env.current.enabled:
matchers = [matcher_of_config(p) for p in Config().packages]
super().__init__(matchers, *args, **kwargs)
class BuiltinFilter(MatcherFilter):
def __init__(self, *args, **kwargs):
matchers = []
if Env.current.enabled:
matchers = [PathMatcher(f) for f in {'os.read', 'os.write'}]
super().__init__(matchers, *args, **kwargs)
def initialize():
Config().initialize()
Recorder().use_filter(BuiltinFilter)
Recorder().use_filter(ConfigFilter)
initialize()
| [
"logging.getLogger",
"textwrap.dedent",
"pathlib.Path",
"yaml.dump",
"inspect.getfile",
"os.path.realpath",
"re.sub",
"importlib_metadata.files",
"os.walk"
] | [((439, 466), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (456, 466), False, 'import logging\n'), ((694, 707), 'pathlib.Path', 'Path', (['rootdir'], {}), '(rootdir)\n', (698, 707), False, 'from pathlib import Path\n'), ((966, 986), 'os.path.realpath', 'realpath', (['sys.prefix'], {}), '(sys.prefix)\n', (974, 986), False, 'from os.path import realpath\n'), ((2985, 3001), 'os.walk', 'os.walk', (['rootdir'], {}), '(rootdir)\n', (2992, 3001), False, 'import os\n'), ((875, 890), 'pathlib.Path', 'Path', (['repo_root'], {}), '(repo_root)\n', (879, 890), False, 'from pathlib import Path\n'), ((615, 639), 're.sub', 're.sub', (['"""\\\\W"""', '"""."""', 'name'], {}), "('\\\\W', '.', name)\n", (621, 639), False, 'import re\n'), ((3064, 3077), 'os.path.realpath', 'realpath', (['dir'], {}), '(dir)\n', (3072, 3077), False, 'from os.path import realpath\n'), ((5268, 5311), 'textwrap.dedent', 'dedent', (['f"""Config file "{path}" is missing."""'], {}), '(f\'Config file "{path}" is missing.\')\n', (5274, 5311), False, 'from textwrap import dedent\n'), ((4733, 4749), 'pathlib.Path', 'Path', (['env_config'], {}), '(env_config)\n', (4737, 4749), False, 'from pathlib import Path\n'), ((6919, 6949), 'importlib_metadata.files', 'importlib_metadata.files', (['dist'], {}), '(dist)\n', (6943, 6949), False, 'import importlib_metadata\n'), ((7097, 7117), 'inspect.getfile', 'inspect.getfile', (['obj'], {}), '(obj)\n', (7112, 7117), False, 'import inspect\n'), ((7146, 7166), 'inspect.getfile', 'inspect.getfile', (['obj'], {}), '(obj)\n', (7161, 7166), False, 'import inspect\n'), ((3263, 3272), 'pathlib.Path', 'Path', (['dir'], {}), '(dir)\n', (3267, 3272), False, 'from pathlib import Path\n'), ((5450, 5473), 'yaml.dump', 'yaml.dump', (['self.default'], {}), '(self.default)\n', (5459, 5473), False, 'import yaml\n')] |
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django.forms import ModelForm
from .models import Course, Submission, Assignment
class SignUpForm(UserCreationForm):
def clean_email(self):
email = self.cleaned_data.get('email')
username = self.cleaned_data.get('username')
if email and User.objects.filter(email=email).exclude(username=username).exists():
raise forms.ValidationError(u'Email addresses must be unique.')
return email
class Meta:
model = User
fields = ('username', 'first_name', 'last_name', 'email' , '<PASSWORD>', '<PASSWORD>', )
class EnrollForm(forms.Form):
secret_key = forms.CharField(
widget=forms.TextInput(attrs={'placeholder': '<KEY>'}),
label='Secret Key',
required=False)
class Meta:
fields = ('secret_key')
class ChangeEmailForm(forms.Form):
email = forms.EmailField()
def clean_email(self):
email = self.cleaned_data.get('email')
if email and User.objects.filter(email=email).exists():
raise forms.ValidationError(u'That email is already used.')
return email
class Meta:
fields = ('email')
| [
"django.contrib.auth.models.User.objects.filter",
"django.forms.TextInput",
"django.forms.EmailField",
"django.forms.ValidationError"
] | [((976, 994), 'django.forms.EmailField', 'forms.EmailField', ([], {}), '()\n', (992, 994), False, 'from django import forms\n'), ((483, 540), 'django.forms.ValidationError', 'forms.ValidationError', (['u"""Email addresses must be unique."""'], {}), "(u'Email addresses must be unique.')\n", (504, 540), False, 'from django import forms\n'), ((777, 824), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': '<KEY>'}"}), "(attrs={'placeholder': '<KEY>'})\n", (792, 824), False, 'from django import forms\n'), ((1152, 1205), 'django.forms.ValidationError', 'forms.ValidationError', (['u"""That email is already used."""'], {}), "(u'That email is already used.')\n", (1173, 1205), False, 'from django import forms\n'), ((1091, 1123), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'email': 'email'}), '(email=email)\n', (1110, 1123), False, 'from django.contrib.auth.models import User\n'), ((395, 427), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'email': 'email'}), '(email=email)\n', (414, 427), False, 'from django.contrib.auth.models import User\n')] |
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 15 18:47:17 2017
@author: Khagendra
The following fills the missing data
"""
from sklearn.preprocessing import Imputer
from numba import jit
@jit
def MisDat(X):
#Filling the most frequent value in place of empty values
imputer=Imputer(missing_values="NaN",strategy="most_frequent",axis=0)
imputer=imputer.fit(X[:,9:38])
X[:,9:38]=imputer.transform(X[:,9:38])
return X | [
"sklearn.preprocessing.Imputer"
] | [((288, 351), 'sklearn.preprocessing.Imputer', 'Imputer', ([], {'missing_values': '"""NaN"""', 'strategy': '"""most_frequent"""', 'axis': '(0)'}), "(missing_values='NaN', strategy='most_frequent', axis=0)\n", (295, 351), False, 'from sklearn.preprocessing import Imputer\n')] |
#!/usr/bin/env python
"""Test faster version of sematic similarity"""
from __future__ import print_function
# Computing basic semantic similarities between GO terms
# Adapted from book chapter written by _<NAME> and <NAME>_
# How to compute semantic similarity between GO terms.
# First we need to write a function that calculates the minimum number
# of branches connecting two GO terms.
import os
import timeit
from collections import Counter
## from goatools.base import get_godag
## from goatools.associations import dnld_assc
## from goatools.semantic import semantic_similarity
## from goatools.semantic import TermCounts
## from goatools.semantic import get_info_content
## from goatools.semantic import deepest_common_ancestor
## from goatools.semantic import resnik_sim
## from goatools.semantic import lin_sim
## from goatools.godag.consts import NS2GO
from goatools.anno.gpad_reader import GpadReader
from goatools.semantic import TermCounts
from tests.utils import get_godag
from tests.utils import get_anno_fullname
from tests.utils import prt_hms
REPO = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
def test_semantic_similarity():
"""Test faster version of sematic similarity"""
godag_r0 = get_godag('go-basic.obo')
## godag_r1 = get_godag('go-basic.obo', optional_attrs=['relationship'])
annoobj = GpadReader(get_anno_fullname('goa_human.gpad'), godag=godag_r0)
ns2assoc = annoobj.get_ns2assc()
assoc = annoobj.get_id2gos('all')
# Get TermCounts for each namespace and for all namespaces
ns2tcnt = {ns:TermCounts(godag_r0, ns2assoc[ns]) for ns in ['BP', 'MF', 'CC']}
tic = timeit.default_timer()
tcntobj = TermCounts(godag_r0, assoc)
prt_hms(tic, 'CUR ACTUAL {N:,} TermCounts initialized'.format(N=len(tcntobj.gocnts)))
# Compare various TermCount counts
for nspc in ['BP', 'MF', 'CC']:
for goid, cnt in ns2tcnt[nspc].gocnts.items():
assert tcntobj.gocnts[goid] == cnt
# Compare old and new count
tic = timeit.default_timer()
gocnts_old = _old_init_count_terms(godag_r0, assoc.values())
assert gocnts_old
prt_hms(tic, 'OLD EXPECTED {N:,} TermCounts initialized'.format(N=len(gocnts_old)))
for goid, cnt_old in gocnts_old.items():
assert cnt_old == tcntobj.gocnts[goid]
def _old_init_count_terms(go2obj, annots_values):
'''
Fills in the counts and overall aspect counts.
'''
gocnts = Counter()
gonotindag = set()
# Fill gocnts with GO IDs in annotations and their corresponding counts
for terms in annots_values: # key is 'gene'
# Make a union of all the terms for a gene, if term parents are
# propagated but they won't get double-counted for the gene
allterms = set()
for go_id in terms:
goobj = go2obj.get(go_id, None)
if goobj is not None:
allterms.add(go_id)
allterms |= goobj.get_all_parents()
else:
gonotindag.add(go_id)
# Add 1 for each GO annotated to this gene product
for parent in allterms:
gocnts[parent] += 1
if gonotindag:
print("{N} Assc. GO IDs not found in the GODag\n".format(N=len(gonotindag)))
return gocnts
if __name__ == '__main__':
test_semantic_similarity()
| [
"tests.utils.get_godag",
"goatools.semantic.TermCounts",
"timeit.default_timer",
"collections.Counter",
"tests.utils.get_anno_fullname",
"os.path.abspath"
] | [((1240, 1265), 'tests.utils.get_godag', 'get_godag', (['"""go-basic.obo"""'], {}), "('go-basic.obo')\n", (1249, 1265), False, 'from tests.utils import get_godag\n'), ((1653, 1675), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (1673, 1675), False, 'import timeit\n'), ((1690, 1717), 'goatools.semantic.TermCounts', 'TermCounts', (['godag_r0', 'assoc'], {}), '(godag_r0, assoc)\n', (1700, 1717), False, 'from goatools.semantic import TermCounts\n'), ((2030, 2052), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (2050, 2052), False, 'import timeit\n'), ((2456, 2465), 'collections.Counter', 'Counter', ([], {}), '()\n', (2463, 2465), False, 'from collections import Counter\n'), ((1106, 1131), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1121, 1131), False, 'import os\n'), ((1368, 1403), 'tests.utils.get_anno_fullname', 'get_anno_fullname', (['"""goa_human.gpad"""'], {}), "('goa_human.gpad')\n", (1385, 1403), False, 'from tests.utils import get_anno_fullname\n'), ((1578, 1612), 'goatools.semantic.TermCounts', 'TermCounts', (['godag_r0', 'ns2assoc[ns]'], {}), '(godag_r0, ns2assoc[ns])\n', (1588, 1612), False, 'from goatools.semantic import TermCounts\n')] |
#!/usr/bin/env python3
import sqlite3
import time
import smtplib
import sys
from email.mime.text import MIMEText
from email.header import Header
from email.utils import formataddr
import traceback
# -------------------请修改以下部分的配置-------------------
# QQ号
qq = 1840686745
# 检测间隔时间(秒)
sleep_time = 60
# 邮箱服务器地址
mail_host = 'smtp.kokona.tech'
# SMTP端口
mail_port = 465
# SMTP是否使用SSL
mail_use_ssl = True
# 邮箱用户名
mail_user = '<EMAIL>'
# 邮箱密码(部分邮箱为授权码)
mail_pass = ''
# 邮件发送方邮箱地址
sender = '<EMAIL>'
# 邮件接受方邮箱地址
receivers = ['<EMAIL>', '<EMAIL>']
# 邮件标题
mail_subject = '酷Q监测: 警告'
# 发件人昵称
mail_sender_nick = 'CoolQ错误监测系统'
# 邮件内容, 可以使用sprintf格式化占位符
mail_content = '警告: \n%d: %s'
# 邮件格式化占位符的对应参数, 以str保存, ele[1]代表检测到的错误信息
mail_para = '(qq, ele[1])'
# -------------------配置部分结束-------------------
# 数据库文件位置
file_loc = './data/%d/eventv2.db' % qq
# 版本
ver = 'V1.0.0'
print('CoolQ异常监视系统: %s' % ver)
print('提示: 请将此文件放于CQA/CQP.exe同文件夹下运行(非app文件夹!)')
print()
print('[INFO] QQ: %d' % qq)
print('[INFO] 监测间隔时间: %d 秒' % sleep_time)
print('[INFO] SMTP服务器: %s:%d' % (mail_host, mail_port))
print('[INFO] SMTP SSL: %s' % mail_use_ssl)
print('[INFO] SMTP 用户名: %s' % mail_user)
print('[INFO] SMTP 发送地址: %s' % sender)
print('[INFO] SMTP 收件人: %s' % receivers)
print('[INFO] 邮件发件人昵称: %s' % repr(mail_sender_nick))
print('[INFO] 邮件标题: %s' % repr(mail_subject))
print('[INFO] 邮件内容: %s' % repr(mail_content))
print('[INFO] 邮件内容参数: %s' % repr(mail_para))
print('[INFO] 开始监测')
# 获取当前最后一次日志的id
last_id = 0
try:
last_id = sqlite3.connect(file_loc).cursor().execute('select id from event order by id desc ').fetchone()[0]
except sqlite3.OperationalError:
print('[ERROR] 数据库连接错误')
print(traceback.format_exc())
print('[CRITICAL] 致命错误, 正在退出')
sys.exit()
while True:
time.sleep(sleep_time)
conn = None
try:
conn = sqlite3.connect(file_loc)
except sqlite3.OperationalError:
print('[ERROR] 数据库连接错误')
print(traceback.format_exc())
continue
new_id = conn.cursor().execute('select id from event order by id desc ').fetchone()[0]
res = conn.cursor().execute('select type,content from event where id > ? and type = 1101', (last_id,)).fetchall()
for ele in res:
print("[INFO] 检测到酷Q异常: %s" % str(ele))
message = MIMEText(mail_content % eval(mail_para), 'plain', 'utf-8')
message['From'] = formataddr([Header(mail_sender_nick, 'utf-8').encode(), sender])
message['To'] = ';'.join(receivers)
message['Subject'] = Header(mail_subject, 'utf-8')
try:
if mail_use_ssl:
smtpObj = smtplib.SMTP_SSL(mail_host, mail_port)
else:
smtpObj = smtplib.SMTP(mail_host, mail_port)
smtpObj.login(mail_user, mail_pass)
smtpObj.sendmail(sender, receivers, message.as_string())
smtpObj.quit()
print("[INFO] 邮件发送成功")
except smtplib.SMTPException:
print("[ERROR] 无法发送邮件")
print(traceback.format_exc())
last_id = new_id
| [
"traceback.format_exc",
"smtplib.SMTP",
"sqlite3.connect",
"smtplib.SMTP_SSL",
"time.sleep",
"sys.exit",
"email.header.Header"
] | [((1839, 1861), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (1849, 1861), False, 'import time\n'), ((1806, 1816), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1814, 1816), False, 'import sys\n'), ((1905, 1930), 'sqlite3.connect', 'sqlite3.connect', (['file_loc'], {}), '(file_loc)\n', (1920, 1930), False, 'import sqlite3\n'), ((2585, 2614), 'email.header.Header', 'Header', (['mail_subject', '"""utf-8"""'], {}), "(mail_subject, 'utf-8')\n", (2591, 2614), False, 'from email.header import Header\n'), ((1741, 1763), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1761, 1763), False, 'import traceback\n'), ((2018, 2040), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (2038, 2040), False, 'import traceback\n'), ((2686, 2724), 'smtplib.SMTP_SSL', 'smtplib.SMTP_SSL', (['mail_host', 'mail_port'], {}), '(mail_host, mail_port)\n', (2702, 2724), False, 'import smtplib\n'), ((2771, 2805), 'smtplib.SMTP', 'smtplib.SMTP', (['mail_host', 'mail_port'], {}), '(mail_host, mail_port)\n', (2783, 2805), False, 'import smtplib\n'), ((3084, 3106), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3104, 3106), False, 'import traceback\n'), ((2457, 2490), 'email.header.Header', 'Header', (['mail_sender_nick', '"""utf-8"""'], {}), "(mail_sender_nick, 'utf-8')\n", (2463, 2490), False, 'from email.header import Header\n'), ((1567, 1592), 'sqlite3.connect', 'sqlite3.connect', (['file_loc'], {}), '(file_loc)\n', (1582, 1592), False, 'import sqlite3\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import re
import time
import json
from django.shortcuts import render, get_object_or_404, redirect, get_list_or_404
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, HttpResponse, Http404
from django.template.response import TemplateResponse
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.db.models import Q, Count, Sum
from django.utils.translation import ugettext as _
from django.views.decorators.cache import cache_page
from wsgiref.util import FileWrapper
from django.utils import timezone
from app.rpt.models import MailLog, LogReport, LogActive
from app.utils.domain_session import get_domainid_bysession, get_session_domain
from app.utils.response.excel_response import ExcelResponse
from app.utils import MailboxSearch
from lib.licence import licence_required
from .utils import add_condition, get_date_offset, get_day, get_mail_stat_data, get_save_days
from app.rpt.constants import MAILLOG_SEND_PERMIT, MAILLOG_RECV_PERMIT
from app.core.models import (
Mailbox, MailboxUser, MailboxSize, DomainAttr,
Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog )
from app.maintain.tools import getLogDesc, LogFormat
from .models import CoUserLog
from .forms import MailLogSearchForm, MailboxStatForm, ActiveUserStatForm, UserLogForm, AdminLogForm, VisitLogForm, AuthLogForm
from app.core.templatetags.tags import smooth_timedelta
from django.apps import apps as dapps
from auditlog.models import LogEntry
#########################################
### 按邮箱统计
@licence_required
def maillog(request):
form = MailboxStatForm(request.GET)
return render(request, "rpt/maillog.html", context={
"form": form,
})
def maillog_mailbox_search(request):
data = request.GET
order_column = data.get('order[0][column]', '')
order_dir = data.get('order[0][dir]', '')
search = data.get('search[value]', '')
username = data.get('username', '')
name = data.get('name', '')
department = data.get('department', '')
position = data.get('position', '')
worknumber = data.get('worknumber', '')
quota = data.get('quota', '')
netdisk_quota = data.get('netdisk_quota', '')
send_permit = data.get('send_permit', '')
recv_permit = data.get('recv_permit', '')
disabled = data.get('disabled', '0')
domain_id = get_domainid_bysession(request)
q_domain = Q(domain_id=domain_id)
condition_mailbox = q_domain
condition_user = None
id_list = []
if name:
condition_user = add_condition(condition_user, Q(realname__icontains=name))
if worknumber:
condition_user = add_condition(condition_user, Q(eenumber__icontains=worknumber))
if condition_user:
condition_user = add_condition(condition_user, q_domain)
for obj in MailboxUser.objects.filter( condition_user ):
id_list.append( obj.mailbox_id )
if not id_list:
return [], 0, 1, 1
if position or department:
condition_dept = None
condition_position = None
dept_list = []
if department:
condition_dept = add_condition(condition_dept, Q(title__icontains=department))
condition_dept = add_condition(condition_dept, q_domain)
for obj in Department.objects.filter( condition_dept ):
dept_list.append( obj.id )
if position:
condition_position = add_condition(condition_position, Q(position__icontains=position))
condition_position = add_condition(condition_position, q_domain)
else:
condition_position = add_condition(condition_position, q_domain)
q_dept = None
for dept_id in dept_list:
if q_dept:
q_dept = q_dept | Q(dept_id=dept_id)
else:
q_dept = Q(dept_id=dept_id)
condition_position = add_condition(q_dept, condition_position)
q_box = None
for mailbox_id in id_list:
if q_box:
q_box = q_box | Q(mailbox_id=mailbox_id)
else:
q_box = Q(mailbox_id=mailbox_id)
condition_position = add_condition(q_box, condition_position)
id_list = []
for obj in DepartmentMember.objects.filter( condition_position ):
id_list.append( obj.mailbox_id )
if not id_list:
return [], 0, 1, 1
condition_mailbox = add_condition(condition_mailbox, q_domain)
if username:
condition_mailbox = add_condition(condition_mailbox, Q(name__icontains=username))
if send_permit and send_permit!="0":
box_list = MailboxSearch.search_send_recv_limit(domain_id=domain_id,type="send",limit=send_permit)
condition_mailbox = add_condition(condition_mailbox, Q(id__in=box_list))
if recv_permit and recv_permit!="0":
box_list = MailboxSearch.search_send_recv_limit(domain_id=domain_id,type="recv",limit=recv_permit)
condition_mailbox = add_condition(condition_mailbox, Q(id__in=box_list))
if quota:
condition_mailbox = add_condition(condition_mailbox, Q(quota_mailbox=quota))
if netdisk_quota:
condition_mailbox = add_condition(condition_mailbox, Q(quota_netdisk=netdisk_quota))
if disabled and disabled!="0":
condition_mailbox = add_condition(condition_mailbox, Q(disabled=disabled))
q_box = None
for mailbox_id in id_list:
if q_box:
q_box = q_box | Q(id=mailbox_id)
else:
q_box = Q(id=mailbox_id)
condition_mailbox = add_condition(q_box, condition_mailbox)
mailbox_lists = Mailbox.objects.filter( condition_mailbox )
colums = ['id', 'username', 'mailboxuser__realname', 'id', 'id', 'mailboxuser__eenumber', 'limit_send',
'limit_recv', 'quota_mailbox', 'quota_netdisk', 'mailboxsize__size', 'disabled']
if order_column and int(order_column) < len(colums):
if order_dir == 'desc':
mailbox_lists = mailbox_lists.order_by('-%s' % colums[int(order_column)])
else:
mailbox_lists = mailbox_lists.order_by('%s' % colums[int(order_column)])
try:
length = int(data.get('length', 1))
except ValueError:
length = 1
try:
start_num = int(data.get('start', '0'))
page = start_num / length + 1
except ValueError:
start_num = 0
page = 1
return mailbox_lists, start_num, page, length
def cal_mailboxstat(number, d):
send_permit_map = dict(MAILLOG_SEND_PERMIT)
recv_permit_map = dict(MAILLOG_RECV_PERMIT)
def get_send_permit(v):
v = str(v)
return send_permit_map.get(v,'-1')
def get_recv_permit(v):
v = str(v)
return recv_permit_map.get(v,'-1')
username = d.name
name = d.name
department = ""
position = ""
worknumber = ""
disabled = "-1"
sendpermit = get_send_permit(d.getSendLimit)
recvpermit = get_recv_permit(d.getRecvLimit)
quotamailbox = d.quota_mailbox
quotanetdisk = d.quota_netdisk
disabled = str(d.disabled)
quotamailbox_used = 0
obj_user = MailboxUser.objects.filter(mailbox_id=d.id).first()
if obj_user:
name = obj_user.realname
worknumber = obj_user.eenumber
obj_member = DepartmentMember.objects.filter(mailbox_id=d.id).first()
if obj_member:
position = obj_member.position
dept_id = obj_member.dept_id
obj_dept = Department.objects.filter(id=dept_id).first()
if obj_dept:
department = obj_dept.title
size_obj = MailboxSize.objects.filter(mailbox_id=d.id).first()
quotamailbox_used = 0 if not size_obj else size_obj.size
obj = VisitLog.objects.filter(mailbox_id=d.id).order_by('-logintime').first()
last_weblogin = obj.logintime.strftime('%Y-%m-%d %H:%M:%S') if obj else u"--"
obj = AuthLog.objects.filter(user=d.username,is_login=True).order_by('-time').first()
last_clientlogin = obj.time.strftime('%Y-%m-%d %H:%M:%S') if obj else u"--"
data = {
'number': number,
'username': username,
'name': name,
'department': department,
'position': position,
'worknumber': worknumber,
'sendpermit': sendpermit,
'recvpermit': recvpermit,
'quotamailbox': quotamailbox,
'quotamailbox_used': quotamailbox_used,
'quotanetdisk': quotanetdisk,
"last_weblogin": last_weblogin,
"last_clientlogin": last_clientlogin,
"disabled": disabled,
}
return data
@licence_required
def maillog_ajax(request):
mailbox_lists, start_num, page, length = maillog_mailbox_search(request)
count = len(mailbox_lists)
if start_num >= count:
page = 1
paginator = Paginator(mailbox_lists, length)
try:
mailbox_lists = paginator.page(page)
except (EmptyPage, InvalidPage):
mailbox_lists = paginator.page(paginator.num_pages)
rs = {"sEcho": 0, "iTotalRecords": count, "iTotalDisplayRecords": count, "aaData": []}
re_str = '<td.*?>(.*?)</td>'
number = length * (page-1) + 1
for d in mailbox_lists.object_list:
data = cal_mailboxstat(number, d)
t = TemplateResponse(request, 'rpt/maillog_ajax.html', data )
t.render()
rs["aaData"].append(re.findall(re_str, t.content, re.DOTALL))
number += 1
return HttpResponse(json.dumps(rs), content_type="application/json")
def maillog_export(request):
lists = [[_(u'序号'), _(u'用户名称'), _(u'用户姓名'), _(u'部门'), _(u'职位'), _(u'工号'), _(u'发送权限'), _(u'接收权限'), _(u'邮箱容量(MB)'), _(u'网络硬盘容量(MB)'), _(u'已用邮箱容量(MB)'), _(u'邮箱状态')]]
mailbox_lists, start_num, page, length = maillog_mailbox_search(request)
current_row = 1
for d in mailbox_lists:
data = cal_mailboxstat(current_row, d)
disabled_name = _(u"启用") if data["disabled"]!="1" else _(u"禁用")
#需要提前翻译好
limit_send = _(data["sendpermit"])
limit_recv = _(data["recvpermit"])
lists.append([current_row, data["username"], data["name"], data["department"], data["position"],
data["worknumber"], limit_send, limit_recv, data["quotamailbox"], data["quotanetdisk"], data["quotamailbox_used"], disabled_name ])
current_row += 1
return ExcelResponse(lists, "mailbox", encoding='gbk')
#########################################
### 邮件收发统计
@licence_required
def maillog_user(request):
domain_id = get_domainid_bysession(request)
form = ActiveUserStatForm(domain_id, request.GET)
return render(request, "rpt/maillog_user.html", context={
"form": form,
})
def maillog_user_search(request):
data = request.GET
order_column = data.get('order[0][column]', '')
order_dir = data.get('order[0][dir]', '')
search = data.get('search[value]', '')
username = data.get('username', '')
department = data.get('department', '0')
department = 0 if not department else int(department)
#最大搜索用户数,没什么用,先去掉
#showmax = data.get('showmax', '')
#showmax = 0 if not showmax.strip() else int(showmax)
showmax = 0
date_select = data.get('date_select', '')
if not date_select or date_select=="None":
date_select = "0"
date_select = int(date_select)
date_select2 = data.get('date_select2', '')
if not date_select2 or date_select2=="None":
date_select2 = "-1"
date_select2 = int(date_select2)
start_day = max(date_select, date_select2)
end_day = min(date_select, date_select2)
start_time=get_day(int(start_day))
domain_id = get_domainid_bysession(request)
#-------------------------- 筛选 部门 ----------------------------
lists = MailLog.objects.filter(domain_id=domain_id)
if department and int(department)>0:
id_dept = DepartmentMember.objects.filter(domain_id=domain_id, dept_id=department).values_list('mailbox_id',flat=True)
lists = lists.filter(mailbox_id__in=id_dept)
#-------------------------- 筛选 部门 完毕 ------------------------
#-------------------------- 筛选 邮箱 ----------------------------
condition_mailbox = None
if username:
condition_mailbox = add_condition(condition_mailbox, Q(name__icontains=username))
if condition_mailbox:
condition_mailbox = add_condition(condition_mailbox, Q(domain_id=domain_id))
id_box = Mailbox.objects.filter(condition_mailbox).values_list('id',flat=True)
lists = lists.filter(mailbox_id__in=id_box)
#-------------------------- 筛选 邮箱 完毕 ------------------------
condition = Q(domain_id=domain_id)
condition_single = Q(domain_id=domain_id)
condition = add_condition(condition, Q(recv_time__gte=start_time))
condition_single = add_condition(condition_single, Q(recv_time__gte=start_time))
if end_day>-1 and end_day != start_day:
end_time=get_day(int(end_day))
condition = add_condition(condition, Q(recv_time__lt=end_time))
condition_single = add_condition(condition_single, Q(recv_time__lt=end_time))
lists = lists.filter(condition).values('mailbox_id').annotate(Count('size'),Sum('size')).order_by('-size__count')
flag = "stat"
try:
length = int(data.get('length', 1))
except ValueError:
length = 1
try:
start_num = int(data.get('start', '0'))
page = start_num / length + 1
except ValueError:
start_num = 0
page = 1
return flag, lists, condition_single, start_num, page, length, showmax
def maillog_user_single(flag, lists, d, condition, lists_in_data=None, lists_out_success_data=None, lists_spam_data=None):
MB=1024*1024.0
count = lists.count()
mailbox_id = d["mailbox_id"]
total_count = d["size__count"]
total_flow = round(int(d["size__sum"])/MB,2)
in_count = in_flow = out_count = out_flow =0
success_count = success_flow = 0
spam_count = spam_flow = 0
failure_count = failure_flow = 0
spam_ratio = '--'
out_ratio = '--'
obj_box = Mailbox.objects.filter(id=mailbox_id).first()
if not obj_box:
name = _(u"已删除邮箱_%s")%mailbox_id
else:
name = obj_box.name
last_time = time.time()
q = add_condition(condition, Q(mailbox_id=mailbox_id))
if flag == "cache":
#last_time = count_time(last_time)
#入站流量
lists = lists.filter(q).values('mailbox_id').annotate(
Sum('total_count'), Sum('total_flow'),
Sum('in_count'), Sum('in_flow'),
Sum('spam_count'), Sum('spam_flow'),
Sum('success_count'), Sum('success_flow'),
).first()
#last_time = count_time(last_time)
in_count = int(lists["in_count__sum"])
in_flow_base = int(lists["in_flow__sum"])
in_flow = round(in_flow_base/MB,2)
out_count = max(total_count - in_count, 0)
out_flow = (int(d["size__sum"]) - in_flow_base)/MB
out_flow = max(round(out_flow,2), 0)
success_count = int(lists["success_count__sum"])
success_flow = int(lists["success_flow__sum"])
spam_count = int(lists["spam_count__sum"])
spam_flow = round(lists["spam_flow__sum"]/MB,2)
else:
if lists_in_data is None:
#入站流量
lists_in = lists.filter(q & Q(type='in')).values('mailbox_id').annotate(Count('size'),Sum('size')).first()
#出站成功数量
lists_out_success = lists.filter(q & Q(result='1') & Q(type='out')).values('mailbox_id').annotate(Count('size'),Sum('size')).first()
#垃圾数量
lists_spam = lists.filter(q & Q(type='in',status='spam-flag')).values('mailbox_id').annotate(Count('size'),Sum('size')).first()
else:
lists_in = lists_in_data.get(mailbox_id,{})
lists_out_success = lists_out_success_data.get(mailbox_id,{})
lists_spam = lists_spam_data.get(mailbox_id,{})
in_flow_base = 0
if lists_in:
in_count = int(lists_in["size__count"])
in_flow_base = int(lists_in["size__sum"])
in_flow = round(in_flow_base/MB,2)
out_count = max(total_count - in_count, 0)
out_flow = (int(d["size__sum"]) - in_flow_base)/MB
out_flow = max(round(out_flow,2), 0)
if lists_out_success:
success_count = int(lists_out_success["size__count"])
success_flow = round(lists_out_success["size__sum"]/MB,2)
#因为浮点数计算可能有误差,所以取两者最大值,避免显示看起来很奇怪
out_flow = max(out_flow, success_flow)
failure_count = max(out_count - success_count,0)
failure_flow = max(round(out_flow - success_flow,2),0)
if lists_spam:
spam_count = int(lists_spam["size__count"])
spam_flow = round(lists_spam["size__sum"]/MB,2)
if in_count > 0:
ratio = round( spam_count*1.0/in_count, 3 )
spam_ratio = "%s%%"%(ratio*100)
if out_count > 0:
ratio = round( success_count*1.0/out_count, 3 )
out_ratio = "%s%%"%(ratio*100)
data = {
'name':name,
'total_used' : 0,
'total_count': total_count,
'total_flow': total_flow,
'd': d,
'in_count': in_count, 'in_flow': in_flow,
'out_count': out_count, 'out_flow': out_flow,
'spam_count': spam_count, 'spam_flow': spam_flow,
'success_count': success_count, 'success_flow': success_flow,
'failure_count': failure_count, 'failure_flow': failure_flow,
'spam_ratio': spam_ratio, 'out_ratio': out_ratio,
}
return data
@licence_required
def maillog_user_ajax(request):
flag, lists, condition, start_num, page, length, showmax = maillog_user_search(request)
MB=1024*1024.0
count = lists.count()
if showmax >0 and count > showmax:
count = showmax
if start_num >= count:
page = 1
paginator = Paginator(lists, length)
#print "mailLogActiveSearch Paginator"
#last_time = count_time(last_time)
try:
page_lists = paginator.page(page)
except (EmptyPage, InvalidPage):
page_lists = paginator.page(paginator.num_pages)
rs = {"sEcho": 0, "iTotalRecords": count, "iTotalDisplayRecords": count, "aaData": []}
re_str = '<td.*?>(.*?)</td>'
number = length * (page-1) + 1
#进站数量
lists_in = lists.filter(Q(type='in')).values('mailbox_id').annotate(Count('size'),Sum('size'))
#出站成功数量
lists_out_success = lists.filter(Q(result='1') & Q(type='out')).values('mailbox_id').annotate(Count('size'),Sum('size'))
#垃圾数量
lists_spam = lists.filter(Q(type='in',status='spam-flag')).values('mailbox_id').annotate(Count('size'),Sum('size'))
lists_in_data = {}
for d in lists_in:
mailbox_id=d["mailbox_id"]
lists_in_data[mailbox_id] = d
lists_out_success_data = {}
for d in lists_out_success:
mailbox_id=d["mailbox_id"]
lists_out_success_data[mailbox_id] = d
lists_spam_data = {}
for d in lists_spam:
mailbox_id=d["mailbox_id"]
lists_spam_data[mailbox_id] = d
for d in page_lists.object_list:
data = maillog_user_single(flag, lists, d, condition, lists_in_data, lists_out_success_data, lists_spam_data)
#print "mailLogActiveStatSingle: ",d
#last_time = count_time(last_time)
data["number"] = number
t = TemplateResponse(request, 'rpt/maillog_user_ajax.html', data )
t.render()
rs["aaData"].append(re.findall(re_str, t.content, re.DOTALL))
number += 1
return HttpResponse(json.dumps(rs), content_type="application/json")
@licence_required
def maillog_user_export(request):
lists = [[_(u'序号'), _(u'用户名'), _(u'已用容量'), _(u'邮件数量'), _(u'总流量'), _(u'入站数量'), _(u'入站流量'),
_(u'垃圾过滤数量'), _(u'垃圾过滤流量'), _(u'出站数量'), _(u'出站流量'), _(u'成功数量'), _(u'成功流量'), _(u'失败数量'), _(u'失败流量'), _(u'垃圾率'), _(u'出站成功率')]]
flag, user_lists, condition, start_num, page, length, showmax = maillog_user_search(request)
current_row = 1
lists_in = user_lists.filter(Q(type='in')).values('mailbox_id').annotate(Count('size'),Sum('size'))
#出站成功数量
lists_out_success = user_lists.filter(Q(result='1') & Q(type='out')).values('mailbox_id').annotate(Count('size'),Sum('size'))
#垃圾数量
lists_spam = user_lists.filter(Q(type='in',status='spam-flag')).values('mailbox_id').annotate(Count('size'),Sum('size'))
lists_in_data = {}
for d in lists_in:
mailbox_id=d["mailbox_id"]
lists_in_data[mailbox_id] = d
lists_out_success_data = {}
for d in lists_out_success:
mailbox_id=d["mailbox_id"]
lists_out_success_data[mailbox_id] = d
lists_spam_data = {}
for d in lists_spam:
mailbox_id=d["mailbox_id"]
lists_spam_data[mailbox_id] = d
#last_time = count_time(last_time)
for d in user_lists:
data = maillog_user_single(flag, user_lists, d, condition, lists_in_data, lists_out_success_data, lists_spam_data)
lists.append([current_row, data["name"], data["total_used"], data["total_count"], data["total_flow"],
data["in_count"], data["in_flow"], data["spam_count"], data["spam_flow"],
data["out_count"], data["out_flow"], data["success_count"], data["success_flow"],
data["failure_count"], data["failure_flow"], data["spam_ratio"], data["out_ratio"], ])
current_row += 1
if showmax and current_row>=showmax:
break
return ExcelResponse(lists, "active.xls", encoding='gbk')
#########################################
### 邮件统计报告
@licence_required
def maillog_stat(request):
mailbox_id = 0
domain_id = get_domainid_bysession(request)
save_days = get_save_days()
smtp_in = get_mail_stat_data(domain_id, mailbox_id,"smtp_in")
smtp_out = get_mail_stat_data(domain_id, mailbox_id,"smtp_out")
imap_session = get_mail_stat_data(domain_id, mailbox_id,"imap_session")
pop3_session = get_mail_stat_data(domain_id, mailbox_id,"pop3_session")
spam_receive = get_mail_stat_data(domain_id, mailbox_id,"spam_receive")
spam_reject = get_mail_stat_data(domain_id, mailbox_id,"spam_reject")
spam_virus = get_mail_stat_data(domain_id, mailbox_id,"spam_virus")
return render(request, "rpt/maillog_stat.html", context={
"smtp_in": smtp_in,
"smtp_out": smtp_out,
"imap_session": imap_session,
"pop3_session": pop3_session,
"spam_receive": spam_receive,
"spam_reject": spam_reject,
"spam_virus": spam_virus,
"save_days": save_days,
})
@licence_required
def maillog_stat_export(request):
mailbox_id = 0
domain_id = get_domainid_bysession(request)
save_days = get_save_days()
smtp_in = get_mail_stat_data(domain_id,mailbox_id,"smtp_in")
smtp_out = get_mail_stat_data(domain_id,mailbox_id,"smtp_out")
imap_session = get_mail_stat_data(domain_id,mailbox_id,"imap_session")
pop3_session = get_mail_stat_data(domain_id,mailbox_id,"pop3_session")
spam_receive = get_mail_stat_data(domain_id,mailbox_id,"spam_receive")
spam_reject = get_mail_stat_data(domain_id,mailbox_id,"spam_reject")
spam_virus = get_mail_stat_data(domain_id,mailbox_id,"spam_virus")
nearday_name = _(u"{}天总计").format(save_days)
lists = [[_(u'序号'), _(u'名称'), _(u'近期总计'), nearday_name, _(u'今日'), _(u'昨日'), _(u'2日之前'), _(u'3日之前'),_(u'4日之前'), _(u'5日之前'), _(u'6日之前')]]
rows_mail =(
(_(u"SMTP邮件(收信)"),smtp_in),
(_(u"SMTP邮件(发信)"),smtp_out),
(_(u"IMAP会话"),imap_session),
(_(u"POP3会话"),pop3_session),
(_(u"已接收的垃圾邮件"), spam_receive),
(_(u"已拒绝的垃圾邮件"), spam_reject),
(_(u"已拒绝的病毒邮件"), spam_virus),
)
current_row = 1
for name, data in rows_mail:
lists.append([current_row, name, data["stat_total"], data["stat_week"], data["stat_today"],
data["stat_1"], data["stat_2"], data["stat_3"], data["stat_4"], data["stat_5"],data["stat_6"],])
current_row += 1
return ExcelResponse(lists, "mail_report.xls", encoding='gbk')
#########################################
### 邮件日志查询
@licence_required
def maillog_list(request):
form = MailLogSearchForm(request.GET)
return render(request, "rpt/maillog_list.html", context={
"form": form,
})
@licence_required
def maillog_list_export(request):
data = request.GET
log_type = data.get('type', '')
start_time = data.get('start_time', '')
end_time = data.get('end_time', '')
username = data.get('username', '')
send_mail = data.get('send_mail', '')
recv_mail = data.get('recv_mail', '')
max_attach = data.get('max_attach', '')
min_attach = data.get('min_attach', '')
senderip = data.get('senderip', '')
rcv_server = data.get('rcv_server', '')
text = data.get('text', '')
start_time = "" if start_time == 'None' else start_time
end_time = "" if end_time == 'None' else end_time
result = data.get('result', '0')
condition = None
domain_id = get_domainid_bysession(request)
if domain_id:
condition = add_condition(condition, Q(domain_id=domain_id))
if log_type:
condition = add_condition(condition, Q(type=log_type))
if username:
condition = add_condition(condition, (Q(send_mail__icontains=username) | Q(recv_mail__icontains=username)))
if send_mail:
condition = add_condition(condition, Q(send_mail__icontains=send_mail))
if recv_mail:
condition = add_condition(condition, Q(recv_mail__icontains=recv_mail))
if senderip:
condition = add_condition(condition, Q(senderip__icontains=senderip))
if rcv_server:
condition = add_condition(condition, Q(rcv_server__icontains=rcv_server))
if result and result!="0":
condition = add_condition(condition, Q(result=result))
if text:
condition = add_condition(condition, Q(subject__icontains=text) | Q(attachment__icontains=text))
if start_time or end_time:
q = None
if start_time:
q = add_condition(q, Q(recv_time__gte=start_time))
if end_time:
q = add_condition(q, Q(recv_time__lte=end_time))
condition = add_condition(condition, q)
if max_attach or min_attach:
q = None
if min_attach:
min_attach = int(float(min_attach) * 1024 * 1024)
q = add_condition(q, Q(attachment_size__gte=min_attach))
if max_attach:
max_attach = int(float(max_attach) * 1024 * 1024)
q = add_condition(q, Q(attachment_size__lte=max_attach))
condition = add_condition(condition, q)
# 每次查询只显示前10000结果
max_show = 10000
if condition:
lists = MailLog.objects.filter(condition).order_by("-recv_time")[:max_show]
else:
lists = MailLog.objects.all().order_by("-recv_time")[:max_show]
lists2 = [[_(u'序号'), _(u'时间'), _(u'用户名'), _(u'类型'), _(u'发件邮箱'), _(u'收件邮箱'), _(u'发件服务器'), _(u'收件服务器'), _(u'邮件标题'), _(u'附件名称'), _(u'附件大小'), _(u'投递位置'), _(u'结果'), _(u'投递提示')]]
current_row = 1
for d in lists:
result = _(u'成功') if d.get_result == '1' else _(u'失败')
#由 ugettext_lazy 包起来的数据要提前翻译
t = _(d.get_type)
lists2.append([current_row, d.get_time, d.get_username, t, d.send_mail, d.recv_mail, d.senderip, d.rcv_server, d.subject, d.attachment, d.get_attach_size, d.folder, result, d.remark])
current_row += 1
return ExcelResponse(lists2, "maillog_list", encoding='gbk')
@licence_required
def maillog_list_ajax(request):
data = request.GET
order_column = data.get('order[0][column]', '')
order_dir = data.get('order[0][dir]', '')
search = data.get('search[value]', '')
log_type = data.get('type', '')
start_time = data.get('start_time', '')
end_time = data.get('end_time', '')
username = data.get('username', '')
send_mail = data.get('send_mail', '')
recv_mail = data.get('recv_mail', '')
max_attach = data.get('max_attach', '')
min_attach = data.get('min_attach', '')
senderip = data.get('senderip', '')
rcv_server = data.get('rcv_server', '')
text = data.get('text', '')
result = data.get('result', '0')
start_time = "" if start_time=='None' else start_time
end_time = "" if end_time=='None' else end_time
colums = [
'id', 'recv_time', 'mailbox_id', 'type', 'rcv_server', 'send_mail',
'senderip', 'recv_mail', 'subject', 'attachment', 'attachment_size',
'folder', 'result', 'remark',
]
domain_id = get_domainid_bysession(request)
condition = Q(domain_id=domain_id)
if search:
condition = add_condition(condition, Q(send_mail__icontains=search) | Q(recv_mail__icontains=search))
if log_type:
condition = add_condition(condition, Q(type=log_type))
if username:
condition = add_condition(condition, (Q(send_mail__icontains=username) | Q(recv_mail__icontains=username)))
if send_mail:
condition = add_condition(condition, Q(send_mail__icontains=send_mail))
if recv_mail:
condition = add_condition(condition, Q(recv_mail__icontains=recv_mail))
if senderip:
condition = add_condition(condition, Q(senderip__icontains=senderip))
if rcv_server:
condition = add_condition(condition, Q(rcv_server__icontains=rcv_server))
if result and result!="0":
condition = add_condition(condition, Q(result=result))
if text:
condition = add_condition(condition, Q(subject__icontains=text) | Q(attachment__icontains=text) \
| Q(send_mail__icontains=text) | Q(recv_mail__icontains=text) )
if start_time or end_time:
q = None
if start_time:
q = add_condition(q,Q(recv_time__gte=start_time))
if end_time:
q = add_condition(q,Q(recv_time__lte=end_time))
condition = add_condition(condition, q)
if max_attach or min_attach:
q = None
if min_attach:
min_attach = int(float(min_attach)*1024*1024)
q = add_condition(q,Q(attachment_size__gte=min_attach))
if max_attach:
max_attach = int(float(max_attach)*1024*1024)
q = add_condition(q,Q(attachment_size__lte=max_attach))
condition = add_condition(condition, q)
#每次查询只显示前1000结果
max_show = 1000
lists = MailLog.objects.filter( condition )
if order_column and int(order_column) < len(colums):
if order_dir == 'desc':
lists = lists.order_by('-%s' % colums[int(order_column)])[:max_show]
else:
lists = lists.order_by('%s' % colums[int(order_column)])[:max_show]
else:
lists = lists.order_by("-recv_time")[:max_show]
try:
length = int(data.get('length', 1))
except ValueError:
length = 1
try:
start_num = int(data.get('start', '0'))
page = start_num / length + 1
except ValueError:
start_num = 0
page = 1
count = lists.count()
if start_num >= count:
page = 1
paginator = Paginator(lists, length)
try:
lists = paginator.page(page)
except (EmptyPage, InvalidPage):
lists = paginator.page(paginator.num_pages)
rs = {"sEcho": 0, "iTotalRecords": count, "iTotalDisplayRecords": count, "aaData": []}
re_str = '<td.*?>(.*?)</td>'
number = length * (page-1) + 1
for d in lists.object_list:
t = TemplateResponse(request, 'rpt/maillog_list_ajax.html', {'d': d, 'number': number})
t.render()
rs["aaData"].append(re.findall(re_str, t.content, re.DOTALL))
number += 1
return HttpResponse(json.dumps(rs), content_type="application/json")
#########################################
# 管理员操作日志
@licence_required
def user_log(request):
form = UserLogForm(request.GET)
return render(request, "rpt/user_log.html", context={
"form": form,
})
def get_user_log_lists(request):
start_time = request.GET.get('start_time', '')
end_time = request.GET.get('end_time', '')
username = request.GET.get('username', '')
ip = request.GET.get('ip', '')
classify = request.GET.get('classify', '')
result = request.GET.get('result', '')
domain_id = get_domainid_bysession(request)
lists = CoUserLog.objects.filter(domain_id=domain_id)
if start_time:
lists = lists.filter(datetime__gte=start_time)
if end_time:
lists = lists.filter(datetime__lte=end_time)
if username:
uids = MailboxUser.objects.filter(domain_id=domain_id, realname__icontains=username).values_list('mailbox_id')
lists = lists.filter(mailbox_id__in=uids)
if ip:
lists = lists.filter(clientip__icontains=ip)
if classify:
#邮件搬家有两种协议
if classify in ('mail_moving',):
lists = lists.filter(Q(classify='pop') | Q(classify='imap'))
else:
lists = lists.filter(classify=classify)
if result:
lists = lists.filter(result=result)
return lists
from django.template.defaultfilters import date as date_format
@licence_required
def user_log_export(request):
lists = get_user_log_lists(request)
lists = lists[:1000]
lists2 = [
[_(u'序号'), _(u'时间'), _(u'用户名'), _(u'真实姓名'), _(u'邮箱'), _(u'手机号'), _(u'微信昵称'), _(u'头像'), _(u'操作类型'), _(u'模块动作'), _(u'结果'), _(u'详情'), _(u'客户端IP'),]]
current_row = 1
for d in lists:
name, realname, mailbox, tel_mobile, nickname, img = "", "", "", "", "", ""
# d,mailbox 可能为None
m = d.mailbox if hasattr(d, "mailbox") else None
if m:
name, mailbox = m.name, m.username
u = m.user
if u:
realname, tel_mobile = u.realname, u.tel_mobile
w = d.wxuser
if w:
nickname, img = w.nickname, w.img
lists2.append(
[current_row, date_format(d.datetime, 'Y-m-d H:i'), name, realname, mailbox, tel_mobile, nickname, img,
d.get_classify_display(), d.action, d.get_result_display(), d.description, d.clientip])
current_row += 1
return ExcelResponse(lists2, "user_log", encoding='gbk')
@licence_required
def user_log_ajax(request):
data = request.GET
order_column = data.get('order[0][column]', '')
order_dir = data.get('order[0][dir]', '')
search = data.get('search[value]', '')
domain_id = get_domainid_bysession(request)
colums = ['id', 'datetime', 'mailbox__name', 'mailbox__mailboxuser__realname', 'mailbox__username',
'id', 'id', 'id', 'classify', 'id', 'result', 'id', 'clientip']
lists = get_user_log_lists(request)
if search:
uids = MailboxUser.objects.filter(domain_id=domain_id, realname__icontains=search).values_list('mailbox_id')
lists = lists.filter(mailbox_id__in=uids)
if lists.exists() and order_column and int(order_column) < len(colums):
if order_dir == 'desc':
lists = lists.order_by('-%s' % colums[int(order_column)])
else:
lists = lists.order_by('%s' % colums[int(order_column)])
try:
length = int(data.get('length', 1))
except ValueError:
length = 1
try:
start_num = int(data.get('start', '0'))
page = start_num / length + 1
except ValueError:
start_num = 0
page = 1
count = lists.count()
if start_num >= count:
page = 1
paginator = Paginator(lists, length)
try:
lists = paginator.page(page)
except (EmptyPage, InvalidPage):
lists = paginator.page(paginator.num_pages)
rs = {"sEcho": 0, "iTotalRecords": count, "iTotalDisplayRecords": count, "aaData": []}
re_str = '<td.*?>(.*?)</td>'
number = length * (page-1) + 1
for d in lists.object_list:
t = TemplateResponse(request, 'rpt/user_log_ajax.html', {'d': d, 'number': number})
t.render()
rs["aaData"].append(re.findall(re_str, t.content, re.DOTALL))
number += 1
return HttpResponse(json.dumps(rs), content_type="application/json")
@licence_required
def user_log_web(request):
form = VisitLogForm(request.GET)
return render(request, template_name='rpt/user_log_web.html', context={
"form": form,
})
@licence_required
def user_log_web_ajax(request):
data = request.GET
order_column = data.get('order[0][column]', '')
order_dir = data.get('order[0][dir]', '')
search = data.get('search[value]', '')
colums = ['logintime', 'mailbox__name', 'mailbox__username', 'logintime', 'lasttime', 'id', 'clienttype', 'clientip', 'id']
domain_id = get_domainid_bysession(request)
lists = VisitLog.objects.filter(domain_id=domain_id)
name = request.GET.get('name', '')
username = request.GET.get('username', '')
start_time = request.GET.get('start_time', '')
end_time = request.GET.get('end_time', '')
ip = request.GET.get('ip', '')
login_type = request.GET.get('login_type', '')
is_online = request.GET.get('is_online', '')
try:
online_time_lt = int(request.GET.get('online_time_lt', '0'))
except:
online_time_lt = 0
try:
online_time_gt = int(request.GET.get('online_time_gt', '0'))
except:
online_time_gt = 0
if name:
lists = lists.filter(mailbox__name__icontains=name)
if username:
lists = lists.filter(mailbox__username__icontains=username)
if start_time:
lists = lists.filter(logintime__gte=start_time)
if end_time:
lists = lists.filter(logintime__lte=end_time)
if ip:
lists = lists.filter(clientip__icontains=ip)
if login_type:
lists = lists.filter(clienttype__icontains=login_type)
if is_online == '1':
lists = lists.extra(where=['( UNIX_TIMESTAMP(NOW()) - UNIX_TIMESTAMP(lasttime) )<600'])
if is_online == '-1':
lists = lists.extra(where=['( UNIX_TIMESTAMP(NOW()) - UNIX_TIMESTAMP(lasttime) )>=600'])
if online_time_lt:
lists = lists.extra(where=['( UNIX_TIMESTAMP(lasttime) - UNIX_TIMESTAMP(logintime) )<=%s'],
params=[online_time_lt*3600])
if online_time_gt:
lists = lists.extra(where=['( UNIX_TIMESTAMP(lasttime) - UNIX_TIMESTAMP(logintime) )>=%s'],
params=[online_time_gt*3600])
if search:
lists = lists.filter(mailbox__username__icontains=search)
if order_column and int(order_column) < len(colums):
if order_dir == 'desc':
lists = lists.order_by('-%s' % colums[int(order_column)])
else:
lists = lists.order_by('%s' % colums[int(order_column)])
try:
length = int(data.get('length', 1))
except ValueError:
length = 1
try:
start_num = int(data.get('start', '0'))
page = start_num / length + 1
except ValueError:
start_num = 0
page = 1
count = lists.count()
if start_num >= count:
page = 1
paginator = Paginator(lists, length)
try:
lists = paginator.page(page)
except (EmptyPage, InvalidPage):
lists = paginator.page(paginator.num_pages)
rs = {"sEcho": 0, "iTotalRecords": count, "iTotalDisplayRecords": count, "aaData": []}
re_str = '<td.*?>(.*?)</td>'
number = length * (page-1) + 1
for l in lists.object_list:
continuetime = smooth_timedelta(l.lasttime - l.logintime)
out_time = timezone.now() - l.lasttime
is_login = True if out_time.total_seconds() <= 600 else False
t = TemplateResponse(request, 'rpt/user_log_web_ajax.html', {'l': l, 'number': number, 'continuetime': continuetime, 'is_login': is_login})
t.render()
rs["aaData"].append(re.findall(re_str, t.content, re.DOTALL))
number += 1
return HttpResponse(json.dumps(rs), content_type="application/json")
@licence_required
def user_log_client(request):
form = AuthLogForm(request.GET)
return render(request, template_name='rpt/user_log_client.html', context={
"form": form,
})
@licence_required
def user_log_client_ajax(request):
data = request.GET
order_column = data.get('order[0][column]', '')
order_dir = data.get('order[0][dir]', '')
search = data.get('search[value]', '')
colums = ['id', 'user', 'type', 'time', 'client_ip', 'is_login']
domain_id = get_domainid_bysession(request)
lists = AuthLog.objects.filter(domain_id=domain_id)
vtype = request.GET.get('vtype', '')
username = request.GET.get('username', '')
start_time = request.GET.get('start_time', '')
end_time = request.GET.get('end_time', '')
ip = request.GET.get('ip', '')
is_login = request.GET.get('is_login', '')
if vtype:
lists = lists.filter(type=vtype)
if username:
lists = lists.filter(user__icontains=username)
if start_time:
lists = lists.filter(time__gte=start_time)
if end_time:
lists = lists.filter(time__lte=end_time)
if ip:
lists = lists.filter(client_ip__icontains=ip)
if is_login == '-1':
lists = lists.filter(is_login=False)
if is_login == '1':
lists = lists.filter(is_login=False)
if search:
lists = lists.filter(user__icontains=search)
if order_column and int(order_column) < len(colums):
if order_dir == 'desc':
lists = lists.order_by('-%s' % colums[int(order_column)])
else:
lists = lists.order_by('%s' % colums[int(order_column)])
try:
length = int(data.get('length', 1))
except ValueError:
length = 1
try:
start_num = int(data.get('start', '0'))
page = start_num / length + 1
except ValueError:
start_num = 0
page = 1
count = lists.count()
if start_num >= count:
page = 1
paginator = Paginator(lists, length)
try:
lists = paginator.page(page)
except (EmptyPage, InvalidPage):
lists = paginator.page(paginator.num_pages)
rs = {"sEcho": 0, "iTotalRecords": count, "iTotalDisplayRecords": count, "aaData": []}
re_str = '<td.*?>(.*?)</td>'
number = length * (page-1) + 1
for l in lists.object_list:
t = TemplateResponse(request, 'rpt/user_log_client_ajax.html', {'l': l, 'number': number})
t.render()
rs["aaData"].append(re.findall(re_str, t.content, re.DOTALL))
number += 1
return HttpResponse(json.dumps(rs), content_type="application/json")
#########################################
# 管理员操作日志
@licence_required
def admin_log(request):
form = AdminLogForm(request.GET)
return render(request, "rpt/admin_log.html", context={
"form": form,
})
from django.contrib.contenttypes.models import ContentType
@licence_required
def admin_log_ajax(request):
data = request.GET
order_column = data.get('order[0][column]', '')
order_dir = data.get('order[0][dir]', '')
search = data.get('search[value]', '')
start_time = data.get('start_time', '')
end_time = data.get('end_time', '')
content_type = data.get('content_type', '')
domain_id = data.get('domain', '')
logs = LogEntry.objects.all()
if content_type:
try:
content_type_id = int(content_type)
logs = logs.filter(content_type_id=content_type_id)
except BaseException as e:
logs = logs.filter(extend_type=content_type)
if domain_id:
logs = logs.filter(domain_id=domain_id)
if start_time:
logs = logs.filter(timestamp__gte=start_time)
if end_time:
logs = logs.filter(timestamp__lte=start_time)
if search:
logs = logs.filter(remote_addr__icontains=search)
# Q(remote_addr__icontains=search) | Q(changes__icontains=search) )
colums = ['id', 'content_type', 'changes', 'action', 'actor', 'remote_addr', 'timestamp']
if logs.exists() and order_column and int(order_column) < len(colums):
col_name = colums[int(order_column)]
if order_dir == 'desc':
logs = logs.order_by('-%s' % col_name)
else:
logs = logs.order_by('%s' % col_name)
try:
length = int(data.get('length', 1))
except ValueError:
length = 1
try:
page = int(data.get('start', '0')) / length + 1
except ValueError:
page = 1
count = len(logs)
paginator = Paginator(logs, length)
try:
logs = paginator.page(page)
except (EmptyPage, InvalidPage):
logs = paginator.page(paginator.num_pages)
rs = {"sEcho": 0, "iTotalRecords": count, "iTotalDisplayRecords": count, "aaData": []}
re_str = '<td.*?>(.*?)</td>'
for d in logs.object_list:
t = TemplateResponse(request, 'rpt/admin_log_ajax.html', {'d': d})
t.render()
rs["aaData"].append(re.findall(re_str, t.content, re.DOTALL))
return HttpResponse(json.dumps(rs), content_type="application/json")
# return HttpResponse(json.dumps(rs, ensure_ascii=False), content_type="application/json")
#########################################
# 底层程序日志
@cache_page(60 * 5)
@licence_required
def sys_log(request):
logpath = "/usr/local/u-mail/log/app"
if request.method == 'POST':
name = request.POST.get('name')
status = request.POST.get('status')
if status == "download":
filepath = os.path.join(logpath, name)
if os.path.exists(filepath):
wrapper = FileWrapper(file(filepath))
response = HttpResponse(wrapper, content_type='application/octet-stream')
response['Content-Length'] = os.path.getsize(filepath)
response['Content-Disposition'] = 'attachment; filename=%s' % name
return response
else:
messages.add_message(request, messages.ERROR, _(u'日志文件不存在'))
return redirect("log_maintain")
index = 0
lists = []
listsa = os.listdir(logpath)
listsa.sort()
for line in listsa:
filepath = os.path.join(logpath, line)
if os.path.isfile(filepath):
size = os.path.getsize(filepath)
desc = getLogDesc(line)
index += 1
lists.append(
LogFormat._make( [index, line, desc, size] )
)
return render(request, "rpt/sys_log.html", context={
"lists": lists,
})
| [
"django.db.models.Count",
"app.rpt.models.MailLog.objects.all",
"app.maintain.tools.LogFormat._make",
"django.db.models.Sum",
"django.shortcuts.render",
"os.path.exists",
"app.utils.domain_session.get_domainid_bysession",
"os.listdir",
"app.core.models.AuthLog.objects.filter",
"app.utils.MailboxSe... | [((45079, 45097), 'django.views.decorators.cache.cache_page', 'cache_page', (['(60 * 5)'], {}), '(60 * 5)\n', (45089, 45097), False, 'from django.views.decorators.cache import cache_page\n'), ((1830, 1889), 'django.shortcuts.render', 'render', (['request', '"""rpt/maillog.html"""'], {'context': "{'form': form}"}), "(request, 'rpt/maillog.html', context={'form': form})\n", (1836, 1889), False, 'from django.shortcuts import render, get_object_or_404, redirect, get_list_or_404\n'), ((2544, 2575), 'app.utils.domain_session.get_domainid_bysession', 'get_domainid_bysession', (['request'], {}), '(request)\n', (2566, 2575), False, 'from app.utils.domain_session import get_domainid_bysession, get_session_domain\n'), ((2591, 2613), 'django.db.models.Q', 'Q', ([], {'domain_id': 'domain_id'}), '(domain_id=domain_id)\n', (2592, 2613), False, 'from django.db.models import Q, Count, Sum\n'), ((5800, 5841), 'app.core.models.Mailbox.objects.filter', 'Mailbox.objects.filter', (['condition_mailbox'], {}), '(condition_mailbox)\n', (5822, 5841), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((8932, 8964), 'django.core.paginator.Paginator', 'Paginator', (['mailbox_lists', 'length'], {}), '(mailbox_lists, length)\n', (8941, 8964), False, 'from django.core.paginator import Paginator, EmptyPage, InvalidPage\n'), ((10449, 10496), 'app.utils.response.excel_response.ExcelResponse', 'ExcelResponse', (['lists', '"""mailbox"""'], {'encoding': '"""gbk"""'}), "(lists, 'mailbox', encoding='gbk')\n", (10462, 10496), False, 'from app.utils.response.excel_response import ExcelResponse\n'), ((10612, 10643), 'app.utils.domain_session.get_domainid_bysession', 'get_domainid_bysession', (['request'], {}), '(request)\n', (10634, 10643), False, 'from app.utils.domain_session import get_domainid_bysession, get_session_domain\n'), ((10709, 10773), 'django.shortcuts.render', 'render', (['request', '"""rpt/maillog_user.html"""'], {'context': "{'form': form}"}), "(request, 'rpt/maillog_user.html', context={'form': form})\n", (10715, 10773), False, 'from django.shortcuts import render, get_object_or_404, redirect, get_list_or_404\n'), ((11734, 11765), 'app.utils.domain_session.get_domainid_bysession', 'get_domainid_bysession', (['request'], {}), '(request)\n', (11756, 11765), False, 'from app.utils.domain_session import get_domainid_bysession, get_session_domain\n'), ((11845, 11888), 'app.rpt.models.MailLog.objects.filter', 'MailLog.objects.filter', ([], {'domain_id': 'domain_id'}), '(domain_id=domain_id)\n', (11867, 11888), False, 'from app.rpt.models import MailLog, LogReport, LogActive\n'), ((12713, 12735), 'django.db.models.Q', 'Q', ([], {'domain_id': 'domain_id'}), '(domain_id=domain_id)\n', (12714, 12735), False, 'from django.db.models import Q, Count, Sum\n'), ((12759, 12781), 'django.db.models.Q', 'Q', ([], {'domain_id': 'domain_id'}), '(domain_id=domain_id)\n', (12760, 12781), False, 'from django.db.models import Q, Count, Sum\n'), ((14303, 14314), 'time.time', 'time.time', ([], {}), '()\n', (14312, 14314), False, 'import time\n'), ((17965, 17989), 'django.core.paginator.Paginator', 'Paginator', (['lists', 'length'], {}), '(lists, length)\n', (17974, 17989), False, 'from django.core.paginator import Paginator, EmptyPage, InvalidPage\n'), ((21559, 21609), 'app.utils.response.excel_response.ExcelResponse', 'ExcelResponse', (['lists', '"""active.xls"""'], {'encoding': '"""gbk"""'}), "(lists, 'active.xls', encoding='gbk')\n", (21572, 21609), False, 'from app.utils.response.excel_response import ExcelResponse\n'), ((21744, 21775), 'app.utils.domain_session.get_domainid_bysession', 'get_domainid_bysession', (['request'], {}), '(request)\n', (21766, 21775), False, 'from app.utils.domain_session import get_domainid_bysession, get_session_domain\n'), ((22327, 22599), 'django.shortcuts.render', 'render', (['request', '"""rpt/maillog_stat.html"""'], {'context': "{'smtp_in': smtp_in, 'smtp_out': smtp_out, 'imap_session': imap_session,\n 'pop3_session': pop3_session, 'spam_receive': spam_receive,\n 'spam_reject': spam_reject, 'spam_virus': spam_virus, 'save_days':\n save_days}"}), "(request, 'rpt/maillog_stat.html', context={'smtp_in': smtp_in,\n 'smtp_out': smtp_out, 'imap_session': imap_session, 'pop3_session':\n pop3_session, 'spam_receive': spam_receive, 'spam_reject': spam_reject,\n 'spam_virus': spam_virus, 'save_days': save_days})\n", (22333, 22599), False, 'from django.shortcuts import render, get_object_or_404, redirect, get_list_or_404\n'), ((22751, 22782), 'app.utils.domain_session.get_domainid_bysession', 'get_domainid_bysession', (['request'], {}), '(request)\n', (22773, 22782), False, 'from app.utils.domain_session import get_domainid_bysession, get_session_domain\n'), ((24101, 24156), 'app.utils.response.excel_response.ExcelResponse', 'ExcelResponse', (['lists', '"""mail_report.xls"""'], {'encoding': '"""gbk"""'}), "(lists, 'mail_report.xls', encoding='gbk')\n", (24114, 24156), False, 'from app.utils.response.excel_response import ExcelResponse\n'), ((24309, 24373), 'django.shortcuts.render', 'render', (['request', '"""rpt/maillog_list.html"""'], {'context': "{'form': form}"}), "(request, 'rpt/maillog_list.html', context={'form': form})\n", (24315, 24373), False, 'from django.shortcuts import render, get_object_or_404, redirect, get_list_or_404\n'), ((25102, 25133), 'app.utils.domain_session.get_domainid_bysession', 'get_domainid_bysession', (['request'], {}), '(request)\n', (25124, 25133), False, 'from app.utils.domain_session import get_domainid_bysession, get_session_domain\n'), ((27509, 27562), 'app.utils.response.excel_response.ExcelResponse', 'ExcelResponse', (['lists2', '"""maillog_list"""'], {'encoding': '"""gbk"""'}), "(lists2, 'maillog_list', encoding='gbk')\n", (27522, 27562), False, 'from app.utils.response.excel_response import ExcelResponse\n'), ((28605, 28636), 'app.utils.domain_session.get_domainid_bysession', 'get_domainid_bysession', (['request'], {}), '(request)\n', (28627, 28636), False, 'from app.utils.domain_session import get_domainid_bysession, get_session_domain\n'), ((28653, 28675), 'django.db.models.Q', 'Q', ([], {'domain_id': 'domain_id'}), '(domain_id=domain_id)\n', (28654, 28675), False, 'from django.db.models import Q, Count, Sum\n'), ((30428, 30461), 'app.rpt.models.MailLog.objects.filter', 'MailLog.objects.filter', (['condition'], {}), '(condition)\n', (30450, 30461), False, 'from app.rpt.models import MailLog, LogReport, LogActive\n'), ((31136, 31160), 'django.core.paginator.Paginator', 'Paginator', (['lists', 'length'], {}), '(lists, length)\n', (31145, 31160), False, 'from django.core.paginator import Paginator, EmptyPage, InvalidPage\n'), ((31907, 31967), 'django.shortcuts.render', 'render', (['request', '"""rpt/user_log.html"""'], {'context': "{'form': form}"}), "(request, 'rpt/user_log.html', context={'form': form})\n", (31913, 31967), False, 'from django.shortcuts import render, get_object_or_404, redirect, get_list_or_404\n'), ((32304, 32335), 'app.utils.domain_session.get_domainid_bysession', 'get_domainid_bysession', (['request'], {}), '(request)\n', (32326, 32335), False, 'from app.utils.domain_session import get_domainid_bysession, get_session_domain\n'), ((34158, 34207), 'app.utils.response.excel_response.ExcelResponse', 'ExcelResponse', (['lists2', '"""user_log"""'], {'encoding': '"""gbk"""'}), "(lists2, 'user_log', encoding='gbk')\n", (34171, 34207), False, 'from app.utils.response.excel_response import ExcelResponse\n'), ((34435, 34466), 'app.utils.domain_session.get_domainid_bysession', 'get_domainid_bysession', (['request'], {}), '(request)\n', (34457, 34466), False, 'from app.utils.domain_session import get_domainid_bysession, get_session_domain\n'), ((35473, 35497), 'django.core.paginator.Paginator', 'Paginator', (['lists', 'length'], {}), '(lists, length)\n', (35482, 35497), False, 'from django.core.paginator import Paginator, EmptyPage, InvalidPage\n'), ((36192, 36270), 'django.shortcuts.render', 'render', (['request'], {'template_name': '"""rpt/user_log_web.html"""', 'context': "{'form': form}"}), "(request, template_name='rpt/user_log_web.html', context={'form': form})\n", (36198, 36270), False, 'from django.shortcuts import render, get_object_or_404, redirect, get_list_or_404\n'), ((36645, 36676), 'app.utils.domain_session.get_domainid_bysession', 'get_domainid_bysession', (['request'], {}), '(request)\n', (36667, 36676), False, 'from app.utils.domain_session import get_domainid_bysession, get_session_domain\n'), ((36689, 36733), 'app.core.models.VisitLog.objects.filter', 'VisitLog.objects.filter', ([], {'domain_id': 'domain_id'}), '(domain_id=domain_id)\n', (36712, 36733), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((39010, 39034), 'django.core.paginator.Paginator', 'Paginator', (['lists', 'length'], {}), '(lists, length)\n', (39019, 39034), False, 'from django.core.paginator import Paginator, EmptyPage, InvalidPage\n'), ((39971, 40056), 'django.shortcuts.render', 'render', (['request'], {'template_name': '"""rpt/user_log_client.html"""', 'context': "{'form': form}"}), "(request, template_name='rpt/user_log_client.html', context={'form':\n form})\n", (39977, 40056), False, 'from django.shortcuts import render, get_object_or_404, redirect, get_list_or_404\n'), ((40371, 40402), 'app.utils.domain_session.get_domainid_bysession', 'get_domainid_bysession', (['request'], {}), '(request)\n', (40393, 40402), False, 'from app.utils.domain_session import get_domainid_bysession, get_session_domain\n'), ((40415, 40458), 'app.core.models.AuthLog.objects.filter', 'AuthLog.objects.filter', ([], {'domain_id': 'domain_id'}), '(domain_id=domain_id)\n', (40437, 40458), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((41849, 41873), 'django.core.paginator.Paginator', 'Paginator', (['lists', 'length'], {}), '(lists, length)\n', (41858, 41873), False, 'from django.core.paginator import Paginator, EmptyPage, InvalidPage\n'), ((42625, 42686), 'django.shortcuts.render', 'render', (['request', '"""rpt/admin_log.html"""'], {'context': "{'form': form}"}), "(request, 'rpt/admin_log.html', context={'form': form})\n", (42631, 42686), False, 'from django.shortcuts import render, get_object_or_404, redirect, get_list_or_404\n'), ((43155, 43177), 'auditlog.models.LogEntry.objects.all', 'LogEntry.objects.all', ([], {}), '()\n', (43175, 43177), False, 'from auditlog.models import LogEntry\n'), ((44380, 44403), 'django.core.paginator.Paginator', 'Paginator', (['logs', 'length'], {}), '(logs, length)\n', (44389, 44403), False, 'from django.core.paginator import Paginator, EmptyPage, InvalidPage\n'), ((45938, 45957), 'os.listdir', 'os.listdir', (['logpath'], {}), '(logpath)\n', (45948, 45957), False, 'import os\n'), ((46300, 46361), 'django.shortcuts.render', 'render', (['request', '"""rpt/sys_log.html"""'], {'context': "{'lists': lists}"}), "(request, 'rpt/sys_log.html', context={'lists': lists})\n", (46306, 46361), False, 'from django.shortcuts import render, get_object_or_404, redirect, get_list_or_404\n'), ((3004, 3046), 'app.core.models.MailboxUser.objects.filter', 'MailboxUser.objects.filter', (['condition_user'], {}), '(condition_user)\n', (3030, 3046), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((4433, 4484), 'app.core.models.DepartmentMember.objects.filter', 'DepartmentMember.objects.filter', (['condition_position'], {}), '(condition_position)\n', (4464, 4484), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((4824, 4917), 'app.utils.MailboxSearch.search_send_recv_limit', 'MailboxSearch.search_send_recv_limit', ([], {'domain_id': 'domain_id', 'type': '"""send"""', 'limit': 'send_permit'}), "(domain_id=domain_id, type='send',\n limit=send_permit)\n", (4860, 4917), False, 'from app.utils import MailboxSearch\n'), ((5053, 5146), 'app.utils.MailboxSearch.search_send_recv_limit', 'MailboxSearch.search_send_recv_limit', ([], {'domain_id': 'domain_id', 'type': '"""recv"""', 'limit': 'recv_permit'}), "(domain_id=domain_id, type='recv',\n limit=recv_permit)\n", (5089, 5146), False, 'from app.utils import MailboxSearch\n'), ((9369, 9425), 'django.template.response.TemplateResponse', 'TemplateResponse', (['request', '"""rpt/maillog_ajax.html"""', 'data'], {}), "(request, 'rpt/maillog_ajax.html', data)\n", (9385, 9425), False, 'from django.template.response import TemplateResponse\n'), ((9560, 9574), 'json.dumps', 'json.dumps', (['rs'], {}), '(rs)\n', (9570, 9574), False, 'import json\n'), ((10089, 10110), 'django.utils.translation.ugettext', '_', (["data['sendpermit']"], {}), "(data['sendpermit'])\n", (10090, 10110), True, 'from django.utils.translation import ugettext as _\n'), ((10132, 10153), 'django.utils.translation.ugettext', '_', (["data['recvpermit']"], {}), "(data['recvpermit'])\n", (10133, 10153), True, 'from django.utils.translation import ugettext as _\n'), ((12823, 12851), 'django.db.models.Q', 'Q', ([], {'recv_time__gte': 'start_time'}), '(recv_time__gte=start_time)\n', (12824, 12851), False, 'from django.db.models import Q, Count, Sum\n'), ((12908, 12936), 'django.db.models.Q', 'Q', ([], {'recv_time__gte': 'start_time'}), '(recv_time__gte=start_time)\n', (12909, 12936), False, 'from django.db.models import Q, Count, Sum\n'), ((14348, 14372), 'django.db.models.Q', 'Q', ([], {'mailbox_id': 'mailbox_id'}), '(mailbox_id=mailbox_id)\n', (14349, 14372), False, 'from django.db.models import Q, Count, Sum\n'), ((18459, 18472), 'django.db.models.Count', 'Count', (['"""size"""'], {}), "('size')\n", (18464, 18472), False, 'from django.db.models import Q, Count, Sum\n'), ((18473, 18484), 'django.db.models.Sum', 'Sum', (['"""size"""'], {}), "('size')\n", (18476, 18484), False, 'from django.db.models import Q, Count, Sum\n'), ((18596, 18609), 'django.db.models.Count', 'Count', (['"""size"""'], {}), "('size')\n", (18601, 18609), False, 'from django.db.models import Q, Count, Sum\n'), ((18610, 18621), 'django.db.models.Sum', 'Sum', (['"""size"""'], {}), "('size')\n", (18613, 18621), False, 'from django.db.models import Q, Count, Sum\n'), ((18726, 18739), 'django.db.models.Count', 'Count', (['"""size"""'], {}), "('size')\n", (18731, 18739), False, 'from django.db.models import Q, Count, Sum\n'), ((18740, 18751), 'django.db.models.Sum', 'Sum', (['"""size"""'], {}), "('size')\n", (18743, 18751), False, 'from django.db.models import Q, Count, Sum\n'), ((19433, 19494), 'django.template.response.TemplateResponse', 'TemplateResponse', (['request', '"""rpt/maillog_user_ajax.html"""', 'data'], {}), "(request, 'rpt/maillog_user_ajax.html', data)\n", (19449, 19494), False, 'from django.template.response import TemplateResponse\n'), ((19629, 19643), 'json.dumps', 'json.dumps', (['rs'], {}), '(rs)\n', (19639, 19643), False, 'import json\n'), ((20159, 20172), 'django.db.models.Count', 'Count', (['"""size"""'], {}), "('size')\n", (20164, 20172), False, 'from django.db.models import Q, Count, Sum\n'), ((20173, 20184), 'django.db.models.Sum', 'Sum', (['"""size"""'], {}), "('size')\n", (20176, 20184), False, 'from django.db.models import Q, Count, Sum\n'), ((20301, 20314), 'django.db.models.Count', 'Count', (['"""size"""'], {}), "('size')\n", (20306, 20314), False, 'from django.db.models import Q, Count, Sum\n'), ((20315, 20326), 'django.db.models.Sum', 'Sum', (['"""size"""'], {}), "('size')\n", (20318, 20326), False, 'from django.db.models import Q, Count, Sum\n'), ((20436, 20449), 'django.db.models.Count', 'Count', (['"""size"""'], {}), "('size')\n", (20441, 20449), False, 'from django.db.models import Q, Count, Sum\n'), ((20450, 20461), 'django.db.models.Sum', 'Sum', (['"""size"""'], {}), "('size')\n", (20453, 20461), False, 'from django.db.models import Q, Count, Sum\n'), ((27267, 27280), 'django.utils.translation.ugettext', '_', (['d.get_type'], {}), '(d.get_type)\n', (27268, 27280), True, 'from django.utils.translation import ugettext as _\n'), ((31499, 31586), 'django.template.response.TemplateResponse', 'TemplateResponse', (['request', '"""rpt/maillog_list_ajax.html"""', "{'d': d, 'number': number}"], {}), "(request, 'rpt/maillog_list_ajax.html', {'d': d, 'number':\n number})\n", (31515, 31586), False, 'from django.template.response import TemplateResponse\n'), ((31717, 31731), 'json.dumps', 'json.dumps', (['rs'], {}), '(rs)\n', (31727, 31731), False, 'import json\n'), ((35836, 35915), 'django.template.response.TemplateResponse', 'TemplateResponse', (['request', '"""rpt/user_log_ajax.html"""', "{'d': d, 'number': number}"], {}), "(request, 'rpt/user_log_ajax.html', {'d': d, 'number': number})\n", (35852, 35915), False, 'from django.template.response import TemplateResponse\n'), ((36049, 36063), 'json.dumps', 'json.dumps', (['rs'], {}), '(rs)\n', (36059, 36063), False, 'import json\n'), ((39385, 39427), 'app.core.templatetags.tags.smooth_timedelta', 'smooth_timedelta', (['(l.lasttime - l.logintime)'], {}), '(l.lasttime - l.logintime)\n', (39401, 39427), False, 'from app.core.templatetags.tags import smooth_timedelta\n'), ((39557, 39696), 'django.template.response.TemplateResponse', 'TemplateResponse', (['request', '"""rpt/user_log_web_ajax.html"""', "{'l': l, 'number': number, 'continuetime': continuetime, 'is_login': is_login}"], {}), "(request, 'rpt/user_log_web_ajax.html', {'l': l, 'number':\n number, 'continuetime': continuetime, 'is_login': is_login})\n", (39573, 39696), False, 'from django.template.response import TemplateResponse\n'), ((39826, 39840), 'json.dumps', 'json.dumps', (['rs'], {}), '(rs)\n', (39836, 39840), False, 'import json\n'), ((42213, 42303), 'django.template.response.TemplateResponse', 'TemplateResponse', (['request', '"""rpt/user_log_client_ajax.html"""', "{'l': l, 'number': number}"], {}), "(request, 'rpt/user_log_client_ajax.html', {'l': l,\n 'number': number})\n", (42229, 42303), False, 'from django.template.response import TemplateResponse\n'), ((42433, 42447), 'json.dumps', 'json.dumps', (['rs'], {}), '(rs)\n', (42443, 42447), False, 'import json\n'), ((44706, 44768), 'django.template.response.TemplateResponse', 'TemplateResponse', (['request', '"""rpt/admin_log_ajax.html"""', "{'d': d}"], {}), "(request, 'rpt/admin_log_ajax.html', {'d': d})\n", (44722, 44768), False, 'from django.template.response import TemplateResponse\n'), ((44882, 44896), 'json.dumps', 'json.dumps', (['rs'], {}), '(rs)\n', (44892, 44896), False, 'import json\n'), ((46019, 46046), 'os.path.join', 'os.path.join', (['logpath', 'line'], {}), '(logpath, line)\n', (46031, 46046), False, 'import os\n'), ((46058, 46082), 'os.path.isfile', 'os.path.isfile', (['filepath'], {}), '(filepath)\n', (46072, 46082), False, 'import os\n'), ((2759, 2786), 'django.db.models.Q', 'Q', ([], {'realname__icontains': 'name'}), '(realname__icontains=name)\n', (2760, 2786), False, 'from django.db.models import Q, Count, Sum\n'), ((2862, 2895), 'django.db.models.Q', 'Q', ([], {'eenumber__icontains': 'worknumber'}), '(eenumber__icontains=worknumber)\n', (2863, 2895), False, 'from django.db.models import Q, Count, Sum\n'), ((3476, 3517), 'app.core.models.Department.objects.filter', 'Department.objects.filter', (['condition_dept'], {}), '(condition_dept)\n', (3501, 3517), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((4735, 4762), 'django.db.models.Q', 'Q', ([], {'name__icontains': 'username'}), '(name__icontains=username)\n', (4736, 4762), False, 'from django.db.models import Q, Count, Sum\n'), ((4973, 4991), 'django.db.models.Q', 'Q', ([], {'id__in': 'box_list'}), '(id__in=box_list)\n', (4974, 4991), False, 'from django.db.models import Q, Count, Sum\n'), ((5202, 5220), 'django.db.models.Q', 'Q', ([], {'id__in': 'box_list'}), '(id__in=box_list)\n', (5203, 5220), False, 'from django.db.models import Q, Count, Sum\n'), ((5297, 5319), 'django.db.models.Q', 'Q', ([], {'quota_mailbox': 'quota'}), '(quota_mailbox=quota)\n', (5298, 5319), False, 'from django.db.models import Q, Count, Sum\n'), ((5404, 5434), 'django.db.models.Q', 'Q', ([], {'quota_netdisk': 'netdisk_quota'}), '(quota_netdisk=netdisk_quota)\n', (5405, 5434), False, 'from django.db.models import Q, Count, Sum\n'), ((5532, 5552), 'django.db.models.Q', 'Q', ([], {'disabled': 'disabled'}), '(disabled=disabled)\n', (5533, 5552), False, 'from django.db.models import Q, Count, Sum\n'), ((5699, 5715), 'django.db.models.Q', 'Q', ([], {'id': 'mailbox_id'}), '(id=mailbox_id)\n', (5700, 5715), False, 'from django.db.models import Q, Count, Sum\n'), ((7297, 7340), 'app.core.models.MailboxUser.objects.filter', 'MailboxUser.objects.filter', ([], {'mailbox_id': 'd.id'}), '(mailbox_id=d.id)\n', (7323, 7340), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((7456, 7504), 'app.core.models.DepartmentMember.objects.filter', 'DepartmentMember.objects.filter', ([], {'mailbox_id': 'd.id'}), '(mailbox_id=d.id)\n', (7487, 7504), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((7750, 7793), 'app.core.models.MailboxSize.objects.filter', 'MailboxSize.objects.filter', ([], {'mailbox_id': 'd.id'}), '(mailbox_id=d.id)\n', (7776, 7793), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((9474, 9514), 're.findall', 're.findall', (['re_str', 't.content', 're.DOTALL'], {}), '(re_str, t.content, re.DOTALL)\n', (9484, 9514), False, 'import re\n'), ((9653, 9661), 'django.utils.translation.ugettext', '_', (['u"""序号"""'], {}), "(u'序号')\n", (9654, 9661), True, 'from django.utils.translation import ugettext as _\n'), ((9667, 9677), 'django.utils.translation.ugettext', '_', (['u"""用户名称"""'], {}), "(u'用户名称')\n", (9668, 9677), True, 'from django.utils.translation import ugettext as _\n'), ((9687, 9697), 'django.utils.translation.ugettext', '_', (['u"""用户姓名"""'], {}), "(u'用户姓名')\n", (9688, 9697), True, 'from django.utils.translation import ugettext as _\n'), ((9707, 9715), 'django.utils.translation.ugettext', '_', (['u"""部门"""'], {}), "(u'部门')\n", (9708, 9715), True, 'from django.utils.translation import ugettext as _\n'), ((9721, 9729), 'django.utils.translation.ugettext', '_', (['u"""职位"""'], {}), "(u'职位')\n", (9722, 9729), True, 'from django.utils.translation import ugettext as _\n'), ((9735, 9743), 'django.utils.translation.ugettext', '_', (['u"""工号"""'], {}), "(u'工号')\n", (9736, 9743), True, 'from django.utils.translation import ugettext as _\n'), ((9749, 9759), 'django.utils.translation.ugettext', '_', (['u"""发送权限"""'], {}), "(u'发送权限')\n", (9750, 9759), True, 'from django.utils.translation import ugettext as _\n'), ((9769, 9779), 'django.utils.translation.ugettext', '_', (['u"""接收权限"""'], {}), "(u'接收权限')\n", (9770, 9779), True, 'from django.utils.translation import ugettext as _\n'), ((9789, 9803), 'django.utils.translation.ugettext', '_', (['u"""邮箱容量(MB)"""'], {}), "(u'邮箱容量(MB)')\n", (9790, 9803), True, 'from django.utils.translation import ugettext as _\n'), ((9805, 9821), 'django.utils.translation.ugettext', '_', (['u"""网络硬盘容量(MB)"""'], {}), "(u'网络硬盘容量(MB)')\n", (9806, 9821), True, 'from django.utils.translation import ugettext as _\n'), ((9805, 9821), 'django.utils.translation.ugettext', '_', (['u"""已用邮箱容量(MB)"""'], {}), "(u'已用邮箱容量(MB)')\n", (9806, 9821), True, 'from django.utils.translation import ugettext as _\n'), ((9805, 9815), 'django.utils.translation.ugettext', '_', (['u"""邮箱状态"""'], {}), "(u'邮箱状态')\n", (9806, 9815), True, 'from django.utils.translation import ugettext as _\n'), ((10003, 10011), 'django.utils.translation.ugettext', '_', (['u"""启用"""'], {}), "(u'启用')\n", (10004, 10011), True, 'from django.utils.translation import ugettext as _\n'), ((10046, 10054), 'django.utils.translation.ugettext', '_', (['u"""禁用"""'], {}), "(u'禁用')\n", (10047, 10054), True, 'from django.utils.translation import ugettext as _\n'), ((12351, 12378), 'django.db.models.Q', 'Q', ([], {'name__icontains': 'username'}), '(name__icontains=username)\n', (12352, 12378), False, 'from django.db.models import Q, Count, Sum\n'), ((12467, 12489), 'django.db.models.Q', 'Q', ([], {'domain_id': 'domain_id'}), '(domain_id=domain_id)\n', (12468, 12489), False, 'from django.db.models import Q, Count, Sum\n'), ((13066, 13091), 'django.db.models.Q', 'Q', ([], {'recv_time__lt': 'end_time'}), '(recv_time__lt=end_time)\n', (13067, 13091), False, 'from django.db.models import Q, Count, Sum\n'), ((13152, 13177), 'django.db.models.Q', 'Q', ([], {'recv_time__lt': 'end_time'}), '(recv_time__lt=end_time)\n', (13153, 13177), False, 'from django.db.models import Q, Count, Sum\n'), ((14141, 14178), 'app.core.models.Mailbox.objects.filter', 'Mailbox.objects.filter', ([], {'id': 'mailbox_id'}), '(id=mailbox_id)\n', (14163, 14178), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((14222, 14236), 'django.utils.translation.ugettext', '_', (['u"""已删除邮箱_%s"""'], {}), "(u'已删除邮箱_%s')\n", (14223, 14236), True, 'from django.utils.translation import ugettext as _\n'), ((19543, 19583), 're.findall', 're.findall', (['re_str', 't.content', 're.DOTALL'], {}), '(re_str, t.content, re.DOTALL)\n', (19553, 19583), False, 'import re\n'), ((19745, 19753), 'django.utils.translation.ugettext', '_', (['u"""序号"""'], {}), "(u'序号')\n", (19746, 19753), True, 'from django.utils.translation import ugettext as _\n'), ((19759, 19768), 'django.utils.translation.ugettext', '_', (['u"""用户名"""'], {}), "(u'用户名')\n", (19760, 19768), True, 'from django.utils.translation import ugettext as _\n'), ((19776, 19786), 'django.utils.translation.ugettext', '_', (['u"""已用容量"""'], {}), "(u'已用容量')\n", (19777, 19786), True, 'from django.utils.translation import ugettext as _\n'), ((19796, 19806), 'django.utils.translation.ugettext', '_', (['u"""邮件数量"""'], {}), "(u'邮件数量')\n", (19797, 19806), True, 'from django.utils.translation import ugettext as _\n'), ((19816, 19825), 'django.utils.translation.ugettext', '_', (['u"""总流量"""'], {}), "(u'总流量')\n", (19817, 19825), True, 'from django.utils.translation import ugettext as _\n'), ((19824, 19834), 'django.utils.translation.ugettext', '_', (['u"""入站数量"""'], {}), "(u'入站数量')\n", (19825, 19834), True, 'from django.utils.translation import ugettext as _\n'), ((19824, 19834), 'django.utils.translation.ugettext', '_', (['u"""入站流量"""'], {}), "(u'入站流量')\n", (19825, 19834), True, 'from django.utils.translation import ugettext as _\n'), ((19839, 19851), 'django.utils.translation.ugettext', '_', (['u"""垃圾过滤数量"""'], {}), "(u'垃圾过滤数量')\n", (19840, 19851), True, 'from django.utils.translation import ugettext as _\n'), ((19865, 19877), 'django.utils.translation.ugettext', '_', (['u"""垃圾过滤流量"""'], {}), "(u'垃圾过滤流量')\n", (19866, 19877), True, 'from django.utils.translation import ugettext as _\n'), ((19891, 19901), 'django.utils.translation.ugettext', '_', (['u"""出站数量"""'], {}), "(u'出站数量')\n", (19892, 19901), True, 'from django.utils.translation import ugettext as _\n'), ((19911, 19921), 'django.utils.translation.ugettext', '_', (['u"""出站流量"""'], {}), "(u'出站流量')\n", (19912, 19921), True, 'from django.utils.translation import ugettext as _\n'), ((19931, 19941), 'django.utils.translation.ugettext', '_', (['u"""成功数量"""'], {}), "(u'成功数量')\n", (19932, 19941), True, 'from django.utils.translation import ugettext as _\n'), ((19951, 19961), 'django.utils.translation.ugettext', '_', (['u"""成功流量"""'], {}), "(u'成功流量')\n", (19952, 19961), True, 'from django.utils.translation import ugettext as _\n'), ((19963, 19973), 'django.utils.translation.ugettext', '_', (['u"""失败数量"""'], {}), "(u'失败数量')\n", (19964, 19973), True, 'from django.utils.translation import ugettext as _\n'), ((19963, 19973), 'django.utils.translation.ugettext', '_', (['u"""失败流量"""'], {}), "(u'失败流量')\n", (19964, 19973), True, 'from django.utils.translation import ugettext as _\n'), ((19963, 19972), 'django.utils.translation.ugettext', '_', (['u"""垃圾率"""'], {}), "(u'垃圾率')\n", (19964, 19972), True, 'from django.utils.translation import ugettext as _\n'), ((19963, 19974), 'django.utils.translation.ugettext', '_', (['u"""出站成功率"""'], {}), "(u'出站成功率')\n", (19964, 19974), True, 'from django.utils.translation import ugettext as _\n'), ((23336, 23347), 'django.utils.translation.ugettext', '_', (['u"""{}天总计"""'], {}), "(u'{}天总计')\n", (23337, 23347), True, 'from django.utils.translation import ugettext as _\n'), ((23380, 23388), 'django.utils.translation.ugettext', '_', (['u"""序号"""'], {}), "(u'序号')\n", (23381, 23388), True, 'from django.utils.translation import ugettext as _\n'), ((23394, 23402), 'django.utils.translation.ugettext', '_', (['u"""名称"""'], {}), "(u'名称')\n", (23395, 23402), True, 'from django.utils.translation import ugettext as _\n'), ((23408, 23418), 'django.utils.translation.ugettext', '_', (['u"""近期总计"""'], {}), "(u'近期总计')\n", (23409, 23418), True, 'from django.utils.translation import ugettext as _\n'), ((23442, 23450), 'django.utils.translation.ugettext', '_', (['u"""今日"""'], {}), "(u'今日')\n", (23443, 23450), True, 'from django.utils.translation import ugettext as _\n'), ((23456, 23464), 'django.utils.translation.ugettext', '_', (['u"""昨日"""'], {}), "(u'昨日')\n", (23457, 23464), True, 'from django.utils.translation import ugettext as _\n'), ((23470, 23480), 'django.utils.translation.ugettext', '_', (['u"""2日之前"""'], {}), "(u'2日之前')\n", (23471, 23480), True, 'from django.utils.translation import ugettext as _\n'), ((23488, 23498), 'django.utils.translation.ugettext', '_', (['u"""3日之前"""'], {}), "(u'3日之前')\n", (23489, 23498), True, 'from django.utils.translation import ugettext as _\n'), ((23505, 23515), 'django.utils.translation.ugettext', '_', (['u"""4日之前"""'], {}), "(u'4日之前')\n", (23506, 23515), True, 'from django.utils.translation import ugettext as _\n'), ((23505, 23515), 'django.utils.translation.ugettext', '_', (['u"""5日之前"""'], {}), "(u'5日之前')\n", (23506, 23515), True, 'from django.utils.translation import ugettext as _\n'), ((23505, 23515), 'django.utils.translation.ugettext', '_', (['u"""6日之前"""'], {}), "(u'6日之前')\n", (23506, 23515), True, 'from django.utils.translation import ugettext as _\n'), ((23532, 23548), 'django.utils.translation.ugettext', '_', (['u"""SMTP邮件(收信)"""'], {}), "(u'SMTP邮件(收信)')\n", (23533, 23548), True, 'from django.utils.translation import ugettext as _\n'), ((23568, 23584), 'django.utils.translation.ugettext', '_', (['u"""SMTP邮件(发信)"""'], {}), "(u'SMTP邮件(发信)')\n", (23569, 23584), True, 'from django.utils.translation import ugettext as _\n'), ((23605, 23617), 'django.utils.translation.ugettext', '_', (['u"""IMAP会话"""'], {}), "(u'IMAP会话')\n", (23606, 23617), True, 'from django.utils.translation import ugettext as _\n'), ((23642, 23654), 'django.utils.translation.ugettext', '_', (['u"""POP3会话"""'], {}), "(u'POP3会话')\n", (23643, 23654), True, 'from django.utils.translation import ugettext as _\n'), ((23679, 23693), 'django.utils.translation.ugettext', '_', (['u"""已接收的垃圾邮件"""'], {}), "(u'已接收的垃圾邮件')\n", (23680, 23693), True, 'from django.utils.translation import ugettext as _\n'), ((23719, 23733), 'django.utils.translation.ugettext', '_', (['u"""已拒绝的垃圾邮件"""'], {}), "(u'已拒绝的垃圾邮件')\n", (23720, 23733), True, 'from django.utils.translation import ugettext as _\n'), ((23758, 23772), 'django.utils.translation.ugettext', '_', (['u"""已拒绝的病毒邮件"""'], {}), "(u'已拒绝的病毒邮件')\n", (23759, 23772), True, 'from django.utils.translation import ugettext as _\n'), ((25197, 25219), 'django.db.models.Q', 'Q', ([], {'domain_id': 'domain_id'}), '(domain_id=domain_id)\n', (25198, 25219), False, 'from django.db.models import Q, Count, Sum\n'), ((25283, 25299), 'django.db.models.Q', 'Q', ([], {'type': 'log_type'}), '(type=log_type)\n', (25284, 25299), False, 'from django.db.models import Q, Count, Sum\n'), ((25497, 25530), 'django.db.models.Q', 'Q', ([], {'send_mail__icontains': 'send_mail'}), '(send_mail__icontains=send_mail)\n', (25498, 25530), False, 'from django.db.models import Q, Count, Sum\n'), ((25595, 25628), 'django.db.models.Q', 'Q', ([], {'recv_mail__icontains': 'recv_mail'}), '(recv_mail__icontains=recv_mail)\n', (25596, 25628), False, 'from django.db.models import Q, Count, Sum\n'), ((25692, 25723), 'django.db.models.Q', 'Q', ([], {'senderip__icontains': 'senderip'}), '(senderip__icontains=senderip)\n', (25693, 25723), False, 'from django.db.models import Q, Count, Sum\n'), ((25789, 25824), 'django.db.models.Q', 'Q', ([], {'rcv_server__icontains': 'rcv_server'}), '(rcv_server__icontains=rcv_server)\n', (25790, 25824), False, 'from django.db.models import Q, Count, Sum\n'), ((25902, 25918), 'django.db.models.Q', 'Q', ([], {'result': 'result'}), '(result=result)\n', (25903, 25918), False, 'from django.db.models import Q, Count, Sum\n'), ((26953, 26961), 'django.utils.translation.ugettext', '_', (['u"""序号"""'], {}), "(u'序号')\n", (26954, 26961), True, 'from django.utils.translation import ugettext as _\n'), ((26967, 26975), 'django.utils.translation.ugettext', '_', (['u"""时间"""'], {}), "(u'时间')\n", (26968, 26975), True, 'from django.utils.translation import ugettext as _\n'), ((26981, 26990), 'django.utils.translation.ugettext', '_', (['u"""用户名"""'], {}), "(u'用户名')\n", (26982, 26990), True, 'from django.utils.translation import ugettext as _\n'), ((26998, 27006), 'django.utils.translation.ugettext', '_', (['u"""类型"""'], {}), "(u'类型')\n", (26999, 27006), True, 'from django.utils.translation import ugettext as _\n'), ((27012, 27022), 'django.utils.translation.ugettext', '_', (['u"""发件邮箱"""'], {}), "(u'发件邮箱')\n", (27013, 27022), True, 'from django.utils.translation import ugettext as _\n'), ((27032, 27042), 'django.utils.translation.ugettext', '_', (['u"""收件邮箱"""'], {}), "(u'收件邮箱')\n", (27033, 27042), True, 'from django.utils.translation import ugettext as _\n'), ((27052, 27063), 'django.utils.translation.ugettext', '_', (['u"""发件服务器"""'], {}), "(u'发件服务器')\n", (27053, 27063), True, 'from django.utils.translation import ugettext as _\n'), ((27075, 27086), 'django.utils.translation.ugettext', '_', (['u"""收件服务器"""'], {}), "(u'收件服务器')\n", (27076, 27086), True, 'from django.utils.translation import ugettext as _\n'), ((27098, 27108), 'django.utils.translation.ugettext', '_', (['u"""邮件标题"""'], {}), "(u'邮件标题')\n", (27099, 27108), True, 'from django.utils.translation import ugettext as _\n'), ((27114, 27124), 'django.utils.translation.ugettext', '_', (['u"""附件名称"""'], {}), "(u'附件名称')\n", (27115, 27124), True, 'from django.utils.translation import ugettext as _\n'), ((27114, 27124), 'django.utils.translation.ugettext', '_', (['u"""附件大小"""'], {}), "(u'附件大小')\n", (27115, 27124), True, 'from django.utils.translation import ugettext as _\n'), ((27114, 27124), 'django.utils.translation.ugettext', '_', (['u"""投递位置"""'], {}), "(u'投递位置')\n", (27115, 27124), True, 'from django.utils.translation import ugettext as _\n'), ((27114, 27122), 'django.utils.translation.ugettext', '_', (['u"""结果"""'], {}), "(u'结果')\n", (27115, 27122), True, 'from django.utils.translation import ugettext as _\n'), ((27114, 27124), 'django.utils.translation.ugettext', '_', (['u"""投递提示"""'], {}), "(u'投递提示')\n", (27115, 27124), True, 'from django.utils.translation import ugettext as _\n'), ((27172, 27180), 'django.utils.translation.ugettext', '_', (['u"""成功"""'], {}), "(u'成功')\n", (27173, 27180), True, 'from django.utils.translation import ugettext as _\n'), ((27213, 27221), 'django.utils.translation.ugettext', '_', (['u"""失败"""'], {}), "(u'失败')\n", (27214, 27221), True, 'from django.utils.translation import ugettext as _\n'), ((28863, 28879), 'django.db.models.Q', 'Q', ([], {'type': 'log_type'}), '(type=log_type)\n', (28864, 28879), False, 'from django.db.models import Q, Count, Sum\n'), ((29077, 29110), 'django.db.models.Q', 'Q', ([], {'send_mail__icontains': 'send_mail'}), '(send_mail__icontains=send_mail)\n', (29078, 29110), False, 'from django.db.models import Q, Count, Sum\n'), ((29175, 29208), 'django.db.models.Q', 'Q', ([], {'recv_mail__icontains': 'recv_mail'}), '(recv_mail__icontains=recv_mail)\n', (29176, 29208), False, 'from django.db.models import Q, Count, Sum\n'), ((29272, 29303), 'django.db.models.Q', 'Q', ([], {'senderip__icontains': 'senderip'}), '(senderip__icontains=senderip)\n', (29273, 29303), False, 'from django.db.models import Q, Count, Sum\n'), ((29369, 29404), 'django.db.models.Q', 'Q', ([], {'rcv_server__icontains': 'rcv_server'}), '(rcv_server__icontains=rcv_server)\n', (29370, 29404), False, 'from django.db.models import Q, Count, Sum\n'), ((29482, 29498), 'django.db.models.Q', 'Q', ([], {'result': 'result'}), '(result=result)\n', (29483, 29498), False, 'from django.db.models import Q, Count, Sum\n'), ((31630, 31670), 're.findall', 're.findall', (['re_str', 't.content', 're.DOTALL'], {}), '(re_str, t.content, re.DOTALL)\n', (31640, 31670), False, 'import re\n'), ((33281, 33289), 'django.utils.translation.ugettext', '_', (['u"""序号"""'], {}), "(u'序号')\n", (33282, 33289), True, 'from django.utils.translation import ugettext as _\n'), ((33295, 33303), 'django.utils.translation.ugettext', '_', (['u"""时间"""'], {}), "(u'时间')\n", (33296, 33303), True, 'from django.utils.translation import ugettext as _\n'), ((33309, 33318), 'django.utils.translation.ugettext', '_', (['u"""用户名"""'], {}), "(u'用户名')\n", (33310, 33318), True, 'from django.utils.translation import ugettext as _\n'), ((33326, 33336), 'django.utils.translation.ugettext', '_', (['u"""真实姓名"""'], {}), "(u'真实姓名')\n", (33327, 33336), True, 'from django.utils.translation import ugettext as _\n'), ((33346, 33354), 'django.utils.translation.ugettext', '_', (['u"""邮箱"""'], {}), "(u'邮箱')\n", (33347, 33354), True, 'from django.utils.translation import ugettext as _\n'), ((33360, 33369), 'django.utils.translation.ugettext', '_', (['u"""手机号"""'], {}), "(u'手机号')\n", (33361, 33369), True, 'from django.utils.translation import ugettext as _\n'), ((33377, 33387), 'django.utils.translation.ugettext', '_', (['u"""微信昵称"""'], {}), "(u'微信昵称')\n", (33378, 33387), True, 'from django.utils.translation import ugettext as _\n'), ((33397, 33405), 'django.utils.translation.ugettext', '_', (['u"""头像"""'], {}), "(u'头像')\n", (33398, 33405), True, 'from django.utils.translation import ugettext as _\n'), ((33411, 33421), 'django.utils.translation.ugettext', '_', (['u"""操作类型"""'], {}), "(u'操作类型')\n", (33412, 33421), True, 'from django.utils.translation import ugettext as _\n'), ((33425, 33435), 'django.utils.translation.ugettext', '_', (['u"""模块动作"""'], {}), "(u'模块动作')\n", (33426, 33435), True, 'from django.utils.translation import ugettext as _\n'), ((33425, 33433), 'django.utils.translation.ugettext', '_', (['u"""结果"""'], {}), "(u'结果')\n", (33426, 33433), True, 'from django.utils.translation import ugettext as _\n'), ((33425, 33433), 'django.utils.translation.ugettext', '_', (['u"""详情"""'], {}), "(u'详情')\n", (33426, 33433), True, 'from django.utils.translation import ugettext as _\n'), ((33425, 33436), 'django.utils.translation.ugettext', '_', (['u"""客户端IP"""'], {}), "(u'客户端IP')\n", (33426, 33436), True, 'from django.utils.translation import ugettext as _\n'), ((35963, 36003), 're.findall', 're.findall', (['re_str', 't.content', 're.DOTALL'], {}), '(re_str, t.content, re.DOTALL)\n', (35973, 36003), False, 'import re\n'), ((39447, 39461), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (39459, 39461), False, 'from django.utils import timezone\n'), ((39740, 39780), 're.findall', 're.findall', (['re_str', 't.content', 're.DOTALL'], {}), '(re_str, t.content, re.DOTALL)\n', (39750, 39780), False, 'import re\n'), ((42347, 42387), 're.findall', 're.findall', (['re_str', 't.content', 're.DOTALL'], {}), '(re_str, t.content, re.DOTALL)\n', (42357, 42387), False, 'import re\n'), ((44816, 44856), 're.findall', 're.findall', (['re_str', 't.content', 're.DOTALL'], {}), '(re_str, t.content, re.DOTALL)\n', (44826, 44856), False, 'import re\n'), ((45353, 45380), 'os.path.join', 'os.path.join', (['logpath', 'name'], {}), '(logpath, name)\n', (45365, 45380), False, 'import os\n'), ((45396, 45420), 'os.path.exists', 'os.path.exists', (['filepath'], {}), '(filepath)\n', (45410, 45420), False, 'import os\n'), ((46103, 46128), 'os.path.getsize', 'os.path.getsize', (['filepath'], {}), '(filepath)\n', (46118, 46128), False, 'import os\n'), ((46148, 46164), 'app.maintain.tools.getLogDesc', 'getLogDesc', (['line'], {}), '(line)\n', (46158, 46164), False, 'from app.maintain.tools import getLogDesc, LogFormat\n'), ((3352, 3382), 'django.db.models.Q', 'Q', ([], {'title__icontains': 'department'}), '(title__icontains=department)\n', (3353, 3382), False, 'from django.db.models import Q, Count, Sum\n'), ((3653, 3684), 'django.db.models.Q', 'Q', ([], {'position__icontains': 'position'}), '(position__icontains=position)\n', (3654, 3684), False, 'from django.db.models import Q, Count, Sum\n'), ((4030, 4048), 'django.db.models.Q', 'Q', ([], {'dept_id': 'dept_id'}), '(dept_id=dept_id)\n', (4031, 4048), False, 'from django.db.models import Q, Count, Sum\n'), ((4298, 4322), 'django.db.models.Q', 'Q', ([], {'mailbox_id': 'mailbox_id'}), '(mailbox_id=mailbox_id)\n', (4299, 4322), False, 'from django.db.models import Q, Count, Sum\n'), ((5648, 5664), 'django.db.models.Q', 'Q', ([], {'id': 'mailbox_id'}), '(id=mailbox_id)\n', (5649, 5664), False, 'from django.db.models import Q, Count, Sum\n'), ((7627, 7664), 'app.core.models.Department.objects.filter', 'Department.objects.filter', ([], {'id': 'dept_id'}), '(id=dept_id)\n', (7652, 7664), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((11948, 12020), 'app.core.models.DepartmentMember.objects.filter', 'DepartmentMember.objects.filter', ([], {'domain_id': 'domain_id', 'dept_id': 'department'}), '(domain_id=domain_id, dept_id=department)\n', (11979, 12020), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((12508, 12549), 'app.core.models.Mailbox.objects.filter', 'Mailbox.objects.filter', (['condition_mailbox'], {}), '(condition_mailbox)\n', (12530, 12549), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((13245, 13258), 'django.db.models.Count', 'Count', (['"""size"""'], {}), "('size')\n", (13250, 13258), False, 'from django.db.models import Q, Count, Sum\n'), ((13259, 13270), 'django.db.models.Sum', 'Sum', (['"""size"""'], {}), "('size')\n", (13262, 13270), False, 'from django.db.models import Q, Count, Sum\n'), ((25364, 25396), 'django.db.models.Q', 'Q', ([], {'send_mail__icontains': 'username'}), '(send_mail__icontains=username)\n', (25365, 25396), False, 'from django.db.models import Q, Count, Sum\n'), ((25399, 25431), 'django.db.models.Q', 'Q', ([], {'recv_mail__icontains': 'username'}), '(recv_mail__icontains=username)\n', (25400, 25431), False, 'from django.db.models import Q, Count, Sum\n'), ((25978, 26004), 'django.db.models.Q', 'Q', ([], {'subject__icontains': 'text'}), '(subject__icontains=text)\n', (25979, 26004), False, 'from django.db.models import Q, Count, Sum\n'), ((26007, 26036), 'django.db.models.Q', 'Q', ([], {'attachment__icontains': 'text'}), '(attachment__icontains=text)\n', (26008, 26036), False, 'from django.db.models import Q, Count, Sum\n'), ((26143, 26171), 'django.db.models.Q', 'Q', ([], {'recv_time__gte': 'start_time'}), '(recv_time__gte=start_time)\n', (26144, 26171), False, 'from django.db.models import Q, Count, Sum\n'), ((26227, 26253), 'django.db.models.Q', 'Q', ([], {'recv_time__lte': 'end_time'}), '(recv_time__lte=end_time)\n', (26228, 26253), False, 'from django.db.models import Q, Count, Sum\n'), ((26471, 26505), 'django.db.models.Q', 'Q', ([], {'attachment_size__gte': 'min_attach'}), '(attachment_size__gte=min_attach)\n', (26472, 26505), False, 'from django.db.models import Q, Count, Sum\n'), ((26625, 26659), 'django.db.models.Q', 'Q', ([], {'attachment_size__lte': 'max_attach'}), '(attachment_size__lte=max_attach)\n', (26626, 26659), False, 'from django.db.models import Q, Count, Sum\n'), ((28736, 28766), 'django.db.models.Q', 'Q', ([], {'send_mail__icontains': 'search'}), '(send_mail__icontains=search)\n', (28737, 28766), False, 'from django.db.models import Q, Count, Sum\n'), ((28769, 28799), 'django.db.models.Q', 'Q', ([], {'recv_mail__icontains': 'search'}), '(recv_mail__icontains=search)\n', (28770, 28799), False, 'from django.db.models import Q, Count, Sum\n'), ((28944, 28976), 'django.db.models.Q', 'Q', ([], {'send_mail__icontains': 'username'}), '(send_mail__icontains=username)\n', (28945, 28976), False, 'from django.db.models import Q, Count, Sum\n'), ((28979, 29011), 'django.db.models.Q', 'Q', ([], {'recv_mail__icontains': 'username'}), '(recv_mail__icontains=username)\n', (28980, 29011), False, 'from django.db.models import Q, Count, Sum\n'), ((29686, 29714), 'django.db.models.Q', 'Q', ([], {'recv_mail__icontains': 'text'}), '(recv_mail__icontains=text)\n', (29687, 29714), False, 'from django.db.models import Q, Count, Sum\n'), ((29820, 29848), 'django.db.models.Q', 'Q', ([], {'recv_time__gte': 'start_time'}), '(recv_time__gte=start_time)\n', (29821, 29848), False, 'from django.db.models import Q, Count, Sum\n'), ((29903, 29929), 'django.db.models.Q', 'Q', ([], {'recv_time__lte': 'end_time'}), '(recv_time__lte=end_time)\n', (29904, 29929), False, 'from django.db.models import Q, Count, Sum\n'), ((30142, 30176), 'django.db.models.Q', 'Q', ([], {'attachment_size__gte': 'min_attach'}), '(attachment_size__gte=min_attach)\n', (30143, 30176), False, 'from django.db.models import Q, Count, Sum\n'), ((30291, 30325), 'django.db.models.Q', 'Q', ([], {'attachment_size__lte': 'max_attach'}), '(attachment_size__lte=max_attach)\n', (30292, 30325), False, 'from django.db.models import Q, Count, Sum\n'), ((32570, 32647), 'app.core.models.MailboxUser.objects.filter', 'MailboxUser.objects.filter', ([], {'domain_id': 'domain_id', 'realname__icontains': 'username'}), '(domain_id=domain_id, realname__icontains=username)\n', (32596, 32647), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((33931, 33967), 'django.template.defaultfilters.date', 'date_format', (['d.datetime', '"""Y-m-d H:i"""'], {}), "(d.datetime, 'Y-m-d H:i')\n", (33942, 33967), True, 'from django.template.defaultfilters import date as date_format\n'), ((34719, 34794), 'app.core.models.MailboxUser.objects.filter', 'MailboxUser.objects.filter', ([], {'domain_id': 'domain_id', 'realname__icontains': 'search'}), '(domain_id=domain_id, realname__icontains=search)\n', (34745, 34794), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((45503, 45565), 'django.http.HttpResponse', 'HttpResponse', (['wrapper'], {'content_type': '"""application/octet-stream"""'}), "(wrapper, content_type='application/octet-stream')\n", (45515, 45565), False, 'from django.http import HttpResponseRedirect, HttpResponse, Http404\n'), ((45611, 45636), 'os.path.getsize', 'os.path.getsize', (['filepath'], {}), '(filepath)\n', (45626, 45636), False, 'import os\n'), ((45870, 45894), 'django.shortcuts.redirect', 'redirect', (['"""log_maintain"""'], {}), "('log_maintain')\n", (45878, 45894), False, 'from django.shortcuts import render, get_object_or_404, redirect, get_list_or_404\n'), ((46230, 46272), 'app.maintain.tools.LogFormat._make', 'LogFormat._make', (['[index, line, desc, size]'], {}), '([index, line, desc, size])\n', (46245, 46272), False, 'from app.maintain.tools import getLogDesc, LogFormat\n'), ((3968, 3986), 'django.db.models.Q', 'Q', ([], {'dept_id': 'dept_id'}), '(dept_id=dept_id)\n', (3969, 3986), False, 'from django.db.models import Q, Count, Sum\n'), ((4231, 4255), 'django.db.models.Q', 'Q', ([], {'mailbox_id': 'mailbox_id'}), '(mailbox_id=mailbox_id)\n', (4232, 4255), False, 'from django.db.models import Q, Count, Sum\n'), ((7874, 7914), 'app.core.models.VisitLog.objects.filter', 'VisitLog.objects.filter', ([], {'mailbox_id': 'd.id'}), '(mailbox_id=d.id)\n', (7897, 7914), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((8038, 8092), 'app.core.models.AuthLog.objects.filter', 'AuthLog.objects.filter', ([], {'user': 'd.username', 'is_login': '(True)'}), '(user=d.username, is_login=True)\n', (8060, 8092), False, 'from app.core.models import Mailbox, MailboxUser, MailboxSize, DomainAttr, Domain, CoreMonitor, CoreAlias, Department, DepartmentMember, VisitLog, AuthLog\n'), ((14531, 14549), 'django.db.models.Sum', 'Sum', (['"""total_count"""'], {}), "('total_count')\n", (14534, 14549), False, 'from django.db.models import Q, Count, Sum\n'), ((14554, 14571), 'django.db.models.Sum', 'Sum', (['"""total_flow"""'], {}), "('total_flow')\n", (14557, 14571), False, 'from django.db.models import Q, Count, Sum\n'), ((14585, 14600), 'django.db.models.Sum', 'Sum', (['"""in_count"""'], {}), "('in_count')\n", (14588, 14600), False, 'from django.db.models import Q, Count, Sum\n'), ((14605, 14619), 'django.db.models.Sum', 'Sum', (['"""in_flow"""'], {}), "('in_flow')\n", (14608, 14619), False, 'from django.db.models import Q, Count, Sum\n'), ((14633, 14650), 'django.db.models.Sum', 'Sum', (['"""spam_count"""'], {}), "('spam_count')\n", (14636, 14650), False, 'from django.db.models import Q, Count, Sum\n'), ((14655, 14671), 'django.db.models.Sum', 'Sum', (['"""spam_flow"""'], {}), "('spam_flow')\n", (14658, 14671), False, 'from django.db.models import Q, Count, Sum\n'), ((14685, 14705), 'django.db.models.Sum', 'Sum', (['"""success_count"""'], {}), "('success_count')\n", (14688, 14705), False, 'from django.db.models import Q, Count, Sum\n'), ((14710, 14729), 'django.db.models.Sum', 'Sum', (['"""success_flow"""'], {}), "('success_flow')\n", (14713, 14729), False, 'from django.db.models import Q, Count, Sum\n'), ((26787, 26820), 'app.rpt.models.MailLog.objects.filter', 'MailLog.objects.filter', (['condition'], {}), '(condition)\n', (26809, 26820), False, 'from app.rpt.models import MailLog, LogReport, LogActive\n'), ((26881, 26902), 'app.rpt.models.MailLog.objects.all', 'MailLog.objects.all', ([], {}), '()\n', (26900, 26902), False, 'from app.rpt.models import MailLog, LogReport, LogActive\n'), ((29655, 29683), 'django.db.models.Q', 'Q', ([], {'send_mail__icontains': 'text'}), '(send_mail__icontains=text)\n', (29656, 29683), False, 'from django.db.models import Q, Count, Sum\n'), ((32898, 32915), 'django.db.models.Q', 'Q', ([], {'classify': '"""pop"""'}), "(classify='pop')\n", (32899, 32915), False, 'from django.db.models import Q, Count, Sum\n'), ((32918, 32936), 'django.db.models.Q', 'Q', ([], {'classify': '"""imap"""'}), "(classify='imap')\n", (32919, 32936), False, 'from django.db.models import Q, Count, Sum\n'), ((45832, 45845), 'django.utils.translation.ugettext', '_', (['u"""日志文件不存在"""'], {}), "(u'日志文件不存在')\n", (45833, 45845), True, 'from django.utils.translation import ugettext as _\n'), ((15452, 15465), 'django.db.models.Count', 'Count', (['"""size"""'], {}), "('size')\n", (15457, 15465), False, 'from django.db.models import Q, Count, Sum\n'), ((15466, 15477), 'django.db.models.Sum', 'Sum', (['"""size"""'], {}), "('size')\n", (15469, 15477), False, 'from django.db.models import Q, Count, Sum\n'), ((15617, 15630), 'django.db.models.Count', 'Count', (['"""size"""'], {}), "('size')\n", (15622, 15630), False, 'from django.db.models import Q, Count, Sum\n'), ((15631, 15642), 'django.db.models.Sum', 'Sum', (['"""size"""'], {}), "('size')\n", (15634, 15642), False, 'from django.db.models import Q, Count, Sum\n'), ((15775, 15788), 'django.db.models.Count', 'Count', (['"""size"""'], {}), "('size')\n", (15780, 15788), False, 'from django.db.models import Q, Count, Sum\n'), ((15789, 15800), 'django.db.models.Sum', 'Sum', (['"""size"""'], {}), "('size')\n", (15792, 15800), False, 'from django.db.models import Q, Count, Sum\n'), ((18415, 18427), 'django.db.models.Q', 'Q', ([], {'type': '"""in"""'}), "(type='in')\n", (18416, 18427), False, 'from django.db.models import Q, Count, Sum\n'), ((18663, 18695), 'django.db.models.Q', 'Q', ([], {'type': '"""in"""', 'status': '"""spam-flag"""'}), "(type='in', status='spam-flag')\n", (18664, 18695), False, 'from django.db.models import Q, Count, Sum\n'), ((20115, 20127), 'django.db.models.Q', 'Q', ([], {'type': '"""in"""'}), "(type='in')\n", (20116, 20127), False, 'from django.db.models import Q, Count, Sum\n'), ((20373, 20405), 'django.db.models.Q', 'Q', ([], {'type': '"""in"""', 'status': '"""spam-flag"""'}), "(type='in', status='spam-flag')\n", (20374, 20405), False, 'from django.db.models import Q, Count, Sum\n'), ((29558, 29584), 'django.db.models.Q', 'Q', ([], {'subject__icontains': 'text'}), '(subject__icontains=text)\n', (29559, 29584), False, 'from django.db.models import Q, Count, Sum\n'), ((29587, 29616), 'django.db.models.Q', 'Q', ([], {'attachment__icontains': 'text'}), '(attachment__icontains=text)\n', (29588, 29616), False, 'from django.db.models import Q, Count, Sum\n'), ((18535, 18548), 'django.db.models.Q', 'Q', ([], {'result': '"""1"""'}), "(result='1')\n", (18536, 18548), False, 'from django.db.models import Q, Count, Sum\n'), ((18551, 18564), 'django.db.models.Q', 'Q', ([], {'type': '"""out"""'}), "(type='out')\n", (18552, 18564), False, 'from django.db.models import Q, Count, Sum\n'), ((20240, 20253), 'django.db.models.Q', 'Q', ([], {'result': '"""1"""'}), "(result='1')\n", (20241, 20253), False, 'from django.db.models import Q, Count, Sum\n'), ((20256, 20269), 'django.db.models.Q', 'Q', ([], {'type': '"""out"""'}), "(type='out')\n", (20257, 20269), False, 'from django.db.models import Q, Count, Sum\n'), ((15408, 15420), 'django.db.models.Q', 'Q', ([], {'type': '"""in"""'}), "(type='in')\n", (15409, 15420), False, 'from django.db.models import Q, Count, Sum\n'), ((15572, 15585), 'django.db.models.Q', 'Q', ([], {'type': '"""out"""'}), "(type='out')\n", (15573, 15585), False, 'from django.db.models import Q, Count, Sum\n'), ((15712, 15744), 'django.db.models.Q', 'Q', ([], {'type': '"""in"""', 'status': '"""spam-flag"""'}), "(type='in', status='spam-flag')\n", (15713, 15744), False, 'from django.db.models import Q, Count, Sum\n'), ((15556, 15569), 'django.db.models.Q', 'Q', ([], {'result': '"""1"""'}), "(result='1')\n", (15557, 15569), False, 'from django.db.models import Q, Count, Sum\n')] |
"""
Conf Helper Bases
~~~~~~~~~~~~~~~~~
"""
import abc
import threading
from abc import ABCMeta
from typing import Any
from typing import Generic
from typing import TypeVar
class DefaultCache(dict):
"""
Very similar to :py:class:`collections.defaultdict` (using __missing__)
however passes the specified key to the default factory method.
"""
__slots__ = ("default_factory",)
def __init__(self, default_factory=None, **kwargs):
super().__init__(**kwargs)
self.default_factory = default_factory
def __missing__(self, key: Any):
if not self.default_factory:
raise KeyError(key)
self[key] = value = self.default_factory(key)
return value
FT = TypeVar("FT")
class FactoryMixin(Generic[FT], metaclass=ABCMeta):
"""
Mixing to provide a factory interface
"""
__slots__ = ()
@abc.abstractmethod
def create(self, name: str = None) -> FT:
"""
Create an instance based on a named setting.
"""
class SingletonFactoryMixin(FactoryMixin[FT], metaclass=ABCMeta):
""""
Mixin that provides a single named instance.
This instance factory type is useful for instance types that only require
a single instance eg database connections, web service agents.
If your instance types are not thread safe it is recommended that the
:py:class:`ThreadLocalSingletonFactoryMixin` is used.
"""
__slots__ = ("_instances",)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._instances = DefaultCache(self.create)
instances_lock = threading.RLock()
def create_wrapper(name: str = None) -> FT:
with instances_lock:
return self._instances[name]
self.create = create_wrapper
class ThreadLocalSingletonFactoryMixin(FactoryMixin[FT], metaclass=ABCMeta):
"""
Mixin that provides a single named instance per thread.
This instance factory type is useful for instance types that only require
a single instance eg database connections, web service agents and that are
not thread safe.
"""
__slots__ = ("_instances",)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._instances = threading.local()
create = self.create
def create_wrapper(name: str = None) -> FT:
try:
cache = self._instances.cache
except AttributeError:
cache = self._instances.cache = DefaultCache(create)
return cache[name]
self.create = create_wrapper
| [
"threading.local",
"threading.RLock",
"typing.TypeVar"
] | [((728, 741), 'typing.TypeVar', 'TypeVar', (['"""FT"""'], {}), "('FT')\n", (735, 741), False, 'from typing import TypeVar\n'), ((1631, 1648), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1646, 1648), False, 'import threading\n'), ((2297, 2314), 'threading.local', 'threading.local', ([], {}), '()\n', (2312, 2314), False, 'import threading\n')] |
#! /usr/bin/env python3
import json
import os
import sys
import re
import argparse
import time
from math import floor
from os.path import dirname
from subprocess import Popen, PIPE, STDOUT
from blessings import Terminal
class Heatmap(object):
coords = [
[
# Row 0
[ 4, 0], [ 4, 2], [ 2, 0], [ 1, 0], [ 2, 2], [ 3, 0], [ 3, 2],
[ 3, 4], [ 3, 6], [ 2, 4], [ 1, 2], [ 2, 6], [ 4, 4], [ 4, 6],
],
[
# Row 1
[ 8, 0], [ 8, 2], [ 6, 0], [ 5, 0], [ 6, 2], [ 7, 0], [ 7, 2],
[ 7, 4], [ 7, 6], [ 6, 4], [ 5, 2], [ 6, 6], [ 8, 4], [ 8, 6],
],
[
# Row 2
[12, 0], [12, 2], [10, 0], [ 9, 0], [10, 2], [11, 0], [ ],
[ ], [11, 2], [10, 4], [ 9, 2], [10, 6], [12, 4], [12, 6],
],
[
# Row 3
[17, 0], [17, 2], [15, 0], [14, 0], [15, 2], [16, 0], [13, 0],
[13, 2], [16, 2], [15, 4], [14, 2], [15, 6], [17, 4], [17, 6],
],
[
# Row 4
[20, 0], [20, 2], [19, 0], [18, 0], [19, 2], [], [], [], [],
[19, 4], [18, 2], [19, 6], [20, 4], [20, 6], [], [], [], []
],
[
# Row 5
[ ], [23, 0], [22, 2], [22, 0], [22, 4], [21, 0], [21, 2],
[24, 0], [24, 2], [25, 0], [25, 4], [25, 2], [26, 0], [ ],
],
]
def set_attr_at(self, block, n, attr, fn, val):
blk = self.heatmap[block][n]
if attr in blk:
blk[attr] = fn(blk[attr], val)
else:
blk[attr] = fn(None, val)
def coord(self, col, row):
return self.coords[row][col]
@staticmethod
def set_attr(orig, new):
return new
def set_bg(self, coords, color):
(block, n) = coords
self.set_attr_at(block, n, "c", self.set_attr, color)
#self.set_attr_at(block, n, "g", self.set_attr, False)
def set_tap_info(self, coords, count, cap):
(block, n) = coords
def _set_tap_info(o, _count, _cap):
ns = 4 - o.count ("\n")
return o + "\n" * ns + "%.02f%%" % (float(_count) / float(_cap) * 100)
if not cap:
cap = 1
self.heatmap[block][n + 1] = _set_tap_info (self.heatmap[block][n + 1], count, cap)
@staticmethod
def heatmap_color (v):
colors = [ [0.3, 0.3, 1], [0.3, 1, 0.3], [1, 1, 0.3], [1, 0.3, 0.3]]
fb = 0
if v <= 0:
idx1, idx2 = 0, 0
elif v >= 1:
idx1, idx2 = len(colors) - 1, len(colors) - 1
else:
val = v * (len(colors) - 1)
idx1 = int(floor(val))
idx2 = idx1 + 1
fb = val - float(idx1)
r = (colors[idx2][0] - colors[idx1][0]) * fb + colors[idx1][0]
g = (colors[idx2][1] - colors[idx1][1]) * fb + colors[idx1][1]
b = (colors[idx2][2] - colors[idx1][2]) * fb + colors[idx1][2]
r, g, b = [x * 255 for x in (r, g, b)]
return "#%02x%02x%02x" % (int(r), int(g), int(b))
def __init__(self, layout):
self.log = {}
self.total = 0
self.max_cnt = 0
self.layout = layout
def update_log(self, coords):
(c, r) = coords
if not (c, r) in self.log:
self.log[(c, r)] = 0
self.log[(c, r)] = self.log[(c, r)] + 1
self.total = self.total + 1
if self.max_cnt < self.log[(c, r)]:
self.max_cnt = self.log[(c, r)]
def get_heatmap(self):
with open("%s/heatmap-layout.%s.json" % (dirname(sys.argv[0]), self.layout), "r") as f:
self.heatmap = json.load (f)
## Reset colors
for row in self.coords:
for coord in row:
if coord != []:
self.set_bg (coord, "#d9dae0")
for (c, r) in self.log:
coords = self.coord(c, r)
b, n = coords
cap = self.max_cnt
if cap == 0:
cap = 1
v = float(self.log[(c, r)]) / cap
self.set_bg (coords, self.heatmap_color (v))
self.set_tap_info (coords, self.log[(c, r)], self.total)
return self.heatmap
def get_stats(self):
usage = [
# left hand
[0, 0, 0, 0, 0],
# right hand
[0, 0, 0, 0, 0]
]
finger_map = [0, 0, 1, 2, 3, 3, 3, 1, 1, 1, 2, 3, 4, 4]
for (c, r) in self.log:
if r == 5: # thumb cluster
if c <= 6: # left side
usage[0][4] = usage[0][4] + self.log[(c, r)]
else:
usage[1][0] = usage[1][0] + self.log[(c, r)]
elif r == 4 and (c == 4 or c == 9): # bottom row thumb keys
if c <= 6: # left side
usage[0][4] = usage[0][4] + self.log[(c, r)]
else:
usage[1][0] = usage[1][0] + self.log[(c, r)]
else:
fc = c
hand = 0
if fc >= 7:
hand = 1
fm = finger_map[fc]
usage[hand][fm] = usage[hand][fm] + self.log[(c, r)]
hand_usage = [0, 0]
for f in usage[0]:
hand_usage[0] = hand_usage[0] + f
for f in usage[1]:
hand_usage[1] = hand_usage[1] + f
total = self.total
if total == 0:
total = 1
stats = {
"total-keys": total,
"hands": {
"left": {
"usage": round(float(hand_usage[0]) / total * 100, 2),
"fingers": {
"pinky": 0,
"ring": 0,
"middle": 0,
"index": 0,
"thumb": 0,
}
},
"right": {
"usage": round(float(hand_usage[1]) / total * 100, 2),
"fingers": {
"thumb": 0,
"index": 0,
"middle": 0,
"ring": 0,
"pinky": 0,
}
},
}
}
hmap = ['left', 'right']
fmap = ['pinky', 'ring', 'middle', 'index', 'thumb',
'thumb', 'index', 'middle', 'ring', 'pinky']
for hand_idx in range(len(usage)):
hand = usage[hand_idx]
for finger_idx in range(len(hand)):
stats['hands'][hmap[hand_idx]]['fingers'][fmap[finger_idx + hand_idx * 5]] = round(float(hand[finger_idx]) / total * 100, 2)
return stats
def dump_all(out_dir, heatmaps):
stats = {}
t = Terminal()
t.clear()
sys.stdout.write("\x1b[2J\x1b[H")
print ('{t.underline}{outdir}{t.normal}\n'.format(t=t, outdir=out_dir))
keys = list(heatmaps.keys())
keys.sort()
for layer in keys:
if len(heatmaps[layer].log) == 0:
continue
with open ("%s/%s.json" % (out_dir, layer), "w") as f:
json.dump(heatmaps[layer].get_heatmap(), f)
stats[layer] = heatmaps[layer].get_stats()
left = stats[layer]['hands']['left']
right = stats[layer]['hands']['right']
print ('{t.bold}{layer}{t.normal} ({total:,} taps):'.format(t=t, layer=layer,
total=int(stats[layer]['total-keys'] / 2)))
print (('{t.underline} | ' + \
'left ({l[usage]:6.2f}%) | ' + \
'right ({r[usage]:6.2f}%) |{t.normal}').format(t=t, l=left, r=right))
print ((' {t.bright_magenta}pinky{t.white} | {left[pinky]:6.2f}% | {right[pinky]:6.2f}% |\n' + \
' {t.bright_cyan}ring{t.white} | {left[ring]:6.2f}% | {right[ring]:6.2f}% |\n' + \
' {t.bright_blue}middle{t.white} | {left[middle]:6.2f}% | {right[middle]:6.2f}% |\n' + \
' {t.bright_green}index{t.white} | {left[index]:6.2f}% | {right[index]:6.2f}% |\n' + \
' {t.bright_red}thumb{t.white} | {left[thumb]:6.2f}% | {right[thumb]:6.2f}% |\n' + \
'').format(left=left['fingers'], right=right['fingers'], t=t))
def process_line(line, heatmaps, opts, stamped_log = None):
m = re.search ('KL: col=(\d+), row=(\d+), pressed=(\d+), layer=(.*)', line)
if not m:
return False
if stamped_log is not None:
if line.startswith("KL:"):
print ("%10.10f %s" % (time.time(), line),
file = stamped_log, end = '')
else:
print (line,
file = stamped_log, end = '')
stamped_log.flush()
(c, r, l) = (int(m.group (2)), int(m.group (1)), m.group (4))
if (c, r) not in opts.allowed_keys:
return False
heatmaps[l].update_log ((c, r))
return True
def setup_allowed_keys(opts):
if len(opts.only_key):
incmap={}
for v in opts.only_key:
m = re.search ('(\d+),(\d+)', v)
if not m:
continue
(c, r) = (int(m.group(1)), int(m.group(2)))
incmap[(c, r)] = True
else:
incmap={}
for r in range(0, 6):
for c in range(0, 14):
incmap[(c, r)] = True
for v in opts.ignore_key:
m = re.search ('(\d+),(\d+)', v)
if not m:
continue
(c, r) = (int(m.group(1)), int(m.group(2)))
del(incmap[(c, r)])
return incmap
def main(opts):
heatmaps = {"Dvorak": Heatmap("Dvorak"),
"ADORE": Heatmap("ADORE")
}
cnt = 0
out_dir = opts.outdir
if not os.path.exists(out_dir):
os.makedirs(out_dir)
opts.allowed_keys = setup_allowed_keys(opts)
if not opts.one_shot:
try:
with open("%s/stamped-log" % out_dir, "r") as f:
while True:
line = f.readline()
if not line:
break
if not process_line(line, heatmaps, opts):
continue
except:
pass
stamped_log = open ("%s/stamped-log" % (out_dir), "a+")
else:
stamped_log = None
while True:
line = sys.stdin.readline()
if not line:
break
if not process_line(line, heatmaps, opts, stamped_log):
continue
cnt = cnt + 1
if opts.dump_interval != -1 and cnt >= opts.dump_interval and not opts.one_shot:
cnt = 0
dump_all(out_dir, heatmaps)
dump_all (out_dir, heatmaps)
if __name__ == "__main__":
parser = argparse.ArgumentParser (description = "keylog to heatmap processor")
parser.add_argument ('outdir', action = 'store',
help = 'Output directory')
parser.add_argument ('--dump-interval', dest = 'dump_interval', action = 'store', type = int,
default = 100, help = 'Dump stats and heatmap at every Nth event, -1 for dumping at EOF only')
parser.add_argument ('--ignore-key', dest = 'ignore_key', action = 'append', type = str,
default = [], help = 'Ignore the key at position (x, y)')
parser.add_argument ('--only-key', dest = 'only_key', action = 'append', type = str,
default = [], help = 'Only include key at position (x, y)')
parser.add_argument ('--one-shot', dest = 'one_shot', action = 'store_true',
help = 'Do not load previous data, and do not update it, either.')
args = parser.parse_args()
if len(args.ignore_key) and len(args.only_key):
print ("--ignore-key and --only-key are mutually exclusive, please only use one of them!",
file = sys.stderr)
sys.exit(1)
main(args)
| [
"os.path.exists",
"argparse.ArgumentParser",
"blessings.Terminal",
"os.makedirs",
"math.floor",
"sys.stdout.write",
"sys.stdin.readline",
"os.path.dirname",
"sys.exit",
"json.load",
"time.time",
"re.search"
] | [((6805, 6815), 'blessings.Terminal', 'Terminal', ([], {}), '()\n', (6813, 6815), False, 'from blessings import Terminal\n'), ((6834, 6867), 'sys.stdout.write', 'sys.stdout.write', (['"""\x1b[2J\x1b[H"""'], {}), "('\\x1b[2J\\x1b[H')\n", (6850, 6867), False, 'import sys\n'), ((8473, 8546), 're.search', 're.search', (['"""KL: col=(\\\\d+), row=(\\\\d+), pressed=(\\\\d+), layer=(.*)"""', 'line'], {}), "('KL: col=(\\\\d+), row=(\\\\d+), pressed=(\\\\d+), layer=(.*)', line)\n", (8482, 8546), False, 'import re\n'), ((10857, 10923), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""keylog to heatmap processor"""'}), "(description='keylog to heatmap processor')\n", (10880, 10923), False, 'import argparse\n'), ((9864, 9887), 'os.path.exists', 'os.path.exists', (['out_dir'], {}), '(out_dir)\n', (9878, 9887), False, 'import os\n'), ((9897, 9917), 'os.makedirs', 'os.makedirs', (['out_dir'], {}), '(out_dir)\n', (9908, 9917), False, 'import os\n'), ((10464, 10484), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (10482, 10484), False, 'import sys\n'), ((11997, 12008), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (12005, 12008), False, 'import sys\n'), ((3710, 3722), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3719, 3722), False, 'import json\n'), ((9173, 9202), 're.search', 're.search', (['"""(\\\\d+),(\\\\d+)"""', 'v'], {}), "('(\\\\d+),(\\\\d+)', v)\n", (9182, 9202), False, 'import re\n'), ((9521, 9550), 're.search', 're.search', (['"""(\\\\d+),(\\\\d+)"""', 'v'], {}), "('(\\\\d+),(\\\\d+)', v)\n", (9530, 9550), False, 'import re\n'), ((2733, 2743), 'math.floor', 'floor', (['val'], {}), '(val)\n', (2738, 2743), False, 'from math import floor\n'), ((3636, 3656), 'os.path.dirname', 'dirname', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (3643, 3656), False, 'from os.path import dirname\n'), ((8682, 8693), 'time.time', 'time.time', ([], {}), '()\n', (8691, 8693), False, 'import time\n')] |
#!/usr/bin/python3
import argparse
import logging
import os
import re
import shlex
import sys
from subprocess import Popen, PIPE
# Main function
def build_index(parser_result):
aligner = parser_result.aligner.lower()
global quiet
quiet = parser_result.quiet
check_tools(aligner)
if not quiet:
log_formatter = logging.Formatter("[%(asctime)s] [%(levelname)s] %(message)s", datefmt='%Y-%m-%d %I:%M:%S %p')
global root_logger
root_logger = logging.getLogger()
root_logger.setLevel("INFO")
if parser_result.prefix is None:
prefix = os.path.splitext(os.path.basename(parser_result.genome_file.split(",")[0]))[0].\
rstrip(".fasta").rstrip(".fa")
else:
prefix = parser_result.prefix
if parser_result.output_dir is None:
output_prefix = prefix
if not quiet:
log_file_handler = logging.FileHandler("%s_index_build.log" % prefix, mode="w")
else:
output_prefix = "%s/%s" % (parser_result.output_dir.rstrip("/"), prefix)
try:
os.mkdir(parser_result.output_dir)
except FileExistsError:
pass
if not quiet:
log_file_handler = logging.FileHandler("%s/%s_index_build.log" %
(parser_result.output_dir.rstrip("/"), prefix), mode="w")
if not quiet:
log_file_handler.setFormatter(log_formatter)
root_logger.addHandler(log_file_handler)
if not quiet and len(root_logger.handlers) == 1:
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setFormatter(log_formatter)
root_logger.addHandler(console_handler)
tools_names = {"star": "STAR", "subread": "Subread"}
if not quiet:
root_logger.info("Building index for %s..." % tools_names[aligner])
if aligner == "star":
genome_index = build_star(parser_result, output_prefix)
elif aligner == "subread":
genome_index = build_subread(parser_result, output_prefix)
else:
genome_index = None
if not quiet:
root_logger.info("Completed building index")
root_logger.removeHandler(log_file_handler)
return genome_index
# Adds all the arguments to the argument parser
def add_args(parser, required_args):
required_args.add_argument("--genome_file", "-G",
dest="genome_file",
required=True,
help="FASTA Genome file")
required_args.add_argument("--aligner_tool", "-at",
dest="aligner",
required=True,
type=aligner_string,
help="Aligner to build index (STAR|Subread)")
parser.add_argument("--builder_extra_args", "-be",
dest="builder_extra_args",
default="",
nargs="?",
help="Extra argument to be passed to aligner index build")
parser.add_argument("--annotation", "-a",
dest="annotation",
help="Annotation file to be used")
parser.add_argument("--output_dir", "-o",
dest="output_dir",
nargs="?",
help="Output directory (default: current directory)")
parser.add_argument("--output_prefix", "-p",
dest="prefix",
help="Prefix for all the output files (default: uses input read file name)")
parser.add_argument("--quiet", "-q",
action="store_true",
dest="quiet",
help=argparse.SUPPRESS)
parser.add_argument("--threads", "-t",
dest="threads",
default=4,
type=int,
help="Number of threads to be used by aligner index builder (default: %(default)s)")
# Checks for valid aligner
def aligner_string(s):
s = s.lower()
regex = "star|subread"
if not re.match(regex, s):
error = "Aligner to be used (STAR|Subread)"
raise argparse.ArgumentTypeError(error)
else:
return s
# Checks for tools execution
def check_tools(aligner):
tools_dir = {"star": "STAR", "subread": "subread-buildindex"}
try:
build = Popen(shlex.split(tools_dir[aligner]), stdout=PIPE, stderr=PIPE)
build.communicate()
except:
error = "[%s] Error encountered when being called. Script will not run." % tools_dir[aligner]
raise RuntimeError(error)
# Builds STAR index
def build_star(parser_result, output_prefix):
genome_index = "%s_star" % output_prefix
os.mkdir(genome_index) if not os.path.exists(genome_index) else 0
output_file = "%s/Genome" % genome_index
command = "STAR --runThreadN {threads} --runMode genomeGenerate " \
"--genomeDir {genome_index} " \
"--genomeFastaFiles {genome_file} {annotation_option} " \
"{builder_extra_args}". \
format(threads=parser_result.threads,
genome_index=genome_index,
genome_file=parser_result.genome_file,
annotation_option="--sjdbGTFfile %s" % parser_result.annotation
if parser_result.annotation is not None else "",
builder_extra_args=parser_result.builder_extra_args)
run_tool("STAR", command, output_file)
return genome_index
# Builds Subread index
def build_subread(parser_result, output_prefix):
genome_index = "%s_subread/" % output_prefix
os.mkdir(genome_index) if not os.path.exists(genome_index) else 0
genome_index += "genome"
output_file = "%s.00.b.tab" % genome_index
command = "subread-buildindex -o {genome_index} " \
"{builder_extra_args} {genome_file}". \
format(genome_index=genome_index,
builder_extra_args=parser_result.builder_extra_args,
genome_file=parser_result.genome_file)
run_tool("Subread-Build", command, output_file)
return genome_index
# Runs tools with the given command
# Also checks for the existence of one of the expected output from the tools
def run_tool(tool, command, output_file):
if not quiet:
root_logger.info("Command: %s" % command)
tool_process = Popen(shlex.split(command), stdout=PIPE, stderr=PIPE)
tool_out, tool_err = tool_process.communicate()
if tool_process.returncode != 0:
error = "{tool} failed to complete (non-zero return code)!\n" \
"{tool} stdout: {out}\n{tool} stderr: {err}\n". \
format(tool=tool,
out=tool_out.decode("utf8"),
err=tool_err.decode("utf8"))
raise RuntimeError(error)
elif not os.path.exists(output_file):
error = "{tool} failed to complete (no output file is found)!\n" \
"{tool} stdout: {out}\n{tool} stderr: {err}\n". \
format(tool=tool,
out=tool_out.decode("utf8"),
err=tool_err.decode("utf8"))
raise RuntimeError(error)
elif "[Errno" in tool_err.decode("utf8").strip():
error = "{tool} failed to complete (error)!\n" \
"{tool} stdout: {out}\n{tool} stderr: {err}\n". \
format(tool=tool,
out=tool_out.decode("utf8"),
err=tool_err.decode("utf8"))
raise RuntimeError(error)
if __name__ == "__main__":
arg_parser = argparse.ArgumentParser(description="Aligner Index Builder")
required_arguments = arg_parser.add_argument_group('required arguments')
add_args(arg_parser, required_arguments)
build_index(arg_parser.parse_args())
| [
"logging.getLogger",
"os.path.exists",
"logging.StreamHandler",
"argparse.ArgumentParser",
"shlex.split",
"logging.Formatter",
"re.match",
"argparse.ArgumentTypeError",
"logging.FileHandler",
"os.mkdir"
] | [((7602, 7662), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Aligner Index Builder"""'}), "(description='Aligner Index Builder')\n", (7625, 7662), False, 'import argparse\n'), ((342, 441), 'logging.Formatter', 'logging.Formatter', (['"""[%(asctime)s] [%(levelname)s] %(message)s"""'], {'datefmt': '"""%Y-%m-%d %I:%M:%S %p"""'}), "('[%(asctime)s] [%(levelname)s] %(message)s', datefmt=\n '%Y-%m-%d %I:%M:%S %p')\n", (359, 441), False, 'import logging\n'), ((486, 505), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (503, 505), False, 'import logging\n'), ((1573, 1606), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (1594, 1606), False, 'import logging\n'), ((4150, 4168), 're.match', 're.match', (['regex', 's'], {}), '(regex, s)\n', (4158, 4168), False, 'import re\n'), ((4236, 4269), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['error'], {}), '(error)\n', (4262, 4269), False, 'import argparse\n'), ((4803, 4825), 'os.mkdir', 'os.mkdir', (['genome_index'], {}), '(genome_index)\n', (4811, 4825), False, 'import os\n'), ((5694, 5716), 'os.mkdir', 'os.mkdir', (['genome_index'], {}), '(genome_index)\n', (5702, 5716), False, 'import os\n'), ((6440, 6460), 'shlex.split', 'shlex.split', (['command'], {}), '(command)\n', (6451, 6460), False, 'import shlex\n'), ((901, 961), 'logging.FileHandler', 'logging.FileHandler', (["('%s_index_build.log' % prefix)"], {'mode': '"""w"""'}), "('%s_index_build.log' % prefix, mode='w')\n", (920, 961), False, 'import logging\n'), ((1079, 1113), 'os.mkdir', 'os.mkdir', (['parser_result.output_dir'], {}), '(parser_result.output_dir)\n', (1087, 1113), False, 'import os\n'), ((4451, 4482), 'shlex.split', 'shlex.split', (['tools_dir[aligner]'], {}), '(tools_dir[aligner])\n', (4462, 4482), False, 'import shlex\n'), ((4833, 4861), 'os.path.exists', 'os.path.exists', (['genome_index'], {}), '(genome_index)\n', (4847, 4861), False, 'import os\n'), ((5724, 5752), 'os.path.exists', 'os.path.exists', (['genome_index'], {}), '(genome_index)\n', (5738, 5752), False, 'import os\n'), ((6889, 6916), 'os.path.exists', 'os.path.exists', (['output_file'], {}), '(output_file)\n', (6903, 6916), False, 'import os\n')] |
from tempfile import mkstemp
import cProfile
import pstats
from artemis.general.display import surround_with_header
import os
def what_are_we_waiting_for(command, sort_by ='time', max_len = 20, print_here = True):
"""
An easy way to show what is taking all the time when you run something.
Taken from docs: https://docs.python.org/2/library/profile.html#module-cProfile
:param command: A string python command
:param sort_by: How to sort results. {'time', 'cumtime', 'calls', ...}.
See https://docs.python.org/2/library/profile.html#pstats.Stats.sort_stats
:param max_len: Maximum number of things to show in profile.
:param print_here: Print the results here (instead of returning them).
:return: A pstats.Stats object containing the profiling results.
"""
_, filepath = mkstemp()
try:
cProfile.run(command, filepath)
finally:
p = pstats.Stats(filepath)
os.remove(filepath)
p.strip_dirs()
p.sort_stats(sort_by)
if print_here:
print(surround_with_header('Profile for "{}"'.format(command), width=100, char='='))
p.print_stats(max_len)
print('='*100)
return p
| [
"pstats.Stats",
"cProfile.run",
"tempfile.mkstemp",
"os.remove"
] | [((823, 832), 'tempfile.mkstemp', 'mkstemp', ([], {}), '()\n', (830, 832), False, 'from tempfile import mkstemp\n'), ((850, 881), 'cProfile.run', 'cProfile.run', (['command', 'filepath'], {}), '(command, filepath)\n', (862, 881), False, 'import cProfile\n'), ((908, 930), 'pstats.Stats', 'pstats.Stats', (['filepath'], {}), '(filepath)\n', (920, 930), False, 'import pstats\n'), ((939, 958), 'os.remove', 'os.remove', (['filepath'], {}), '(filepath)\n', (948, 958), False, 'import os\n')] |
import numpy as np
from sim.sim2d import sim_run
# Simulator options.
options = {}
options['FIG_SIZE'] = [8,8]
options['OBSTACLES'] = True
class ModelPredictiveControl:
def __init__(self):
self.horizon = 20
self.dt = 0.2
# Reference or set point the controller will achieve.
self.reference1 = [10, 0, 45*3.14/180]
self.reference2 = None
self.x_obs = 5
self.y_obs = 0.1
def plant_model(self,prev_state, dt, pedal, steering):
x_t = prev_state[0]
y_t = prev_state[1]
psi_t = prev_state[2]
v_t = prev_state[3]
v_t = v_t + dt * pedal - v_t/25.0
x_t = x_t + dt * v_t * np.cos(psi_t)
y_t = y_t + dt * v_t * np.sin(psi_t)
psi_t += dt * v_t *np.tan(steering)/2.5
return [x_t, y_t, psi_t, v_t]
def cost_function(self,u, *args):
state = args[0]
ref = args[1]
cost = 0.0
for k in range(self.horizon):
v_start = state[3]
state = self.plant_model(state, self.dt, u[k*2], u[k*2+1])
x_diff = abs(state[0] - ref[0])
y_diff = abs(state[1] - ref[1])
psi_diff = abs(state[2] - ref[2])
obs_dist_x = abs(state[0] - self.x_obs)
obs_dist_y = abs(state[1] - self.y_obs)
obs_dist = np.sqrt(obs_dist_x**2 + obs_dist_y**2)
cost += np.sqrt(x_diff**2+y_diff**2 + psi_diff**2 + (state[3] - v_start)**2)
cost += 1/obs_dist**2*10
speed_kph = state[3]*3.6
if speed_kph > 10.0:
cost += speed_kph * 100
return cost
sim_run(options, ModelPredictiveControl)
| [
"numpy.sqrt",
"numpy.tan",
"sim.sim2d.sim_run",
"numpy.cos",
"numpy.sin"
] | [((1671, 1711), 'sim.sim2d.sim_run', 'sim_run', (['options', 'ModelPredictiveControl'], {}), '(options, ModelPredictiveControl)\n', (1678, 1711), False, 'from sim.sim2d import sim_run\n'), ((1370, 1412), 'numpy.sqrt', 'np.sqrt', (['(obs_dist_x ** 2 + obs_dist_y ** 2)'], {}), '(obs_dist_x ** 2 + obs_dist_y ** 2)\n', (1377, 1412), True, 'import numpy as np\n'), ((1431, 1509), 'numpy.sqrt', 'np.sqrt', (['(x_diff ** 2 + y_diff ** 2 + psi_diff ** 2 + (state[3] - v_start) ** 2)'], {}), '(x_diff ** 2 + y_diff ** 2 + psi_diff ** 2 + (state[3] - v_start) ** 2)\n', (1438, 1509), True, 'import numpy as np\n'), ((690, 703), 'numpy.cos', 'np.cos', (['psi_t'], {}), '(psi_t)\n', (696, 703), True, 'import numpy as np\n'), ((735, 748), 'numpy.sin', 'np.sin', (['psi_t'], {}), '(psi_t)\n', (741, 748), True, 'import numpy as np\n'), ((777, 793), 'numpy.tan', 'np.tan', (['steering'], {}), '(steering)\n', (783, 793), True, 'import numpy as np\n')] |
import matplotlib.pyplot as plt
import csv
import pandas as pd
desired_width = 320
pd.set_option('display.width', desired_width)
pd.set_option('display.max_columns', 10)
# Task 1 - Open and read a csv file
def openCSV():
csvfile = open("./csv/oscar_age_female.csv", newline='')
data = csv.reader(csvfile, delimiter=",")
for row in data:
print(row)
# openCSV()
# Task 2 - Open a csv file with pandas module
def openWithPandas():
csvfile = pd.read_csv("./csv/oscar_age_female.csv", index_col="Index", )
print(csvfile.head(20))
print(csvfile.tail(5))
# openWithPandas()
# Task 3 - Open specific columns
def openColumns():
csvfile = pd.read_csv("./csv/oscar_age_female.csv",
usecols=["Age", "Name"])
print(csvfile.head(10))
summ = csvfile["Age"].sum()
rows = len(csvfile["Age"])
avg = summ / rows
print(summ)
print("The average age of the winners is", round(avg, 2), "years.")
# openColumns()
# Task 4 - Find oldest winner
def oldestWinner():
csvfile = pd.read_csv("./csv/oscar_age_female.csv",
usecols=["Age", "Name"])
max_age = max(csvfile["Age"])
myRow = csvfile[csvfile["Age"] == max_age]
print("The oldest winner is " +
str(myRow.iloc[0, 0]) + " years old and she is" + str(myRow.iloc[0, 1]).replace("\"", ""))
# oldestWinner()
# Task 5 - Find year of Fargo win
def fargoWin():
csvfile = pd.read_csv("./csv/oscar_age_female.csv", usecols=[
"Year", "Movie"], skipinitialspace=True)
myRow = csvfile[csvfile["Movie"] == "Fargo"]
print(str(myRow.iloc[0, 1]) + " won the Oscar in " + str(myRow.iloc[0, 0]))
# fargoWin()
# Task 6 - Data of one row
def dataRow(row):
csvfile = pd.read_csv("./csv/oscar_age_female.csv")
myRow = csvfile.iloc[row]
print(myRow)
print(myRow["Movie"])
# dataRow(5)
# Task 7 - Plotting data
def plotData():
csvfile = pd.read_csv("./csv/oscar_age_female.csv",
usecols=["Year", "Age"])
# csvfile.plot(x="Year", y="Age", kind='scatter')
plt.title("Year-Age graph")
plt.scatter(csvfile["Year"], csvfile["Age"])
plt.show()
# plotData()
# Task 8 - Plotting data 2
def plotData2():
csvfile = pd.read_csv("./csv/oscar_age_female.csv", usecols=["Age"])
piePlot = csvfile.value_counts()
print(piePlot)
repetation = csvfile["Age"].nunique()
separate = [0.1 for i in range(repetation)]
piePlot.plot(kind='pie', explode=separate)
plt.title("Rate of winners by age")
plt.show()
# plotData2()
# Task 9 - Number of p+% letters
def commonFreq(p):
csvfile = pd.read_csv("./csv/letter_frequency.csv", skipinitialspace=True)
number = 0
for index, row in csvfile.iterrows():
if row["Percentage"] >= p:
number += 1
print(row["Letter"], row["Percentage"])
print(str(number) + " numbers have " + str(p) + "+% frequency.")
# commonFreq(4)
# Task 10 - Frequency of vowels
def vowelFreq():
csvfile = pd.read_csv("./csv/letter_frequency.csv", skipinitialspace=True)
vowels = ["A", "E", "I", "O", "U", "a", "e", "i", "o", "u"]
vowel = 0
for index, row in csvfile.iterrows():
if row["Letter"] in vowels:
vowel += row["Percentage"]
print(str(round(vowel, 2)) + "% is the frequency of vowels.")
# vowelFreq()
# Task 11 - Secret Santa
def secretSanta():
csvfile = pd.read_csv("./csv/secret_santa.csv", skipinitialspace=True)
new_csv = csvfile.sample(frac=1)
print(new_csv)
new_csv.to_csv("secret_santa_final.csv")
new_csvfile = pd.read_csv("secret_santa_final.csv", skipinitialspace=True)
for i in range(len(new_csvfile["Name"])):
if i != len(new_csvfile["Name"])-1:
print(new_csvfile["Name"][i],
"will give the present to", new_csvfile["Name"][i+1])
else:
print(new_csvfile["Name"][i],
"will give the present to", new_csvfile["Name"][0])
secretSanta()
| [
"pandas.read_csv",
"pandas.set_option",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.title",
"csv.reader",
"matplotlib.pyplot.show"
] | [((84, 129), 'pandas.set_option', 'pd.set_option', (['"""display.width"""', 'desired_width'], {}), "('display.width', desired_width)\n", (97, 129), True, 'import pandas as pd\n'), ((130, 170), 'pandas.set_option', 'pd.set_option', (['"""display.max_columns"""', '(10)'], {}), "('display.max_columns', 10)\n", (143, 170), True, 'import pandas as pd\n'), ((297, 331), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (307, 331), False, 'import csv\n'), ((470, 530), 'pandas.read_csv', 'pd.read_csv', (['"""./csv/oscar_age_female.csv"""'], {'index_col': '"""Index"""'}), "('./csv/oscar_age_female.csv', index_col='Index')\n", (481, 530), True, 'import pandas as pd\n'), ((677, 743), 'pandas.read_csv', 'pd.read_csv', (['"""./csv/oscar_age_female.csv"""'], {'usecols': "['Age', 'Name']"}), "('./csv/oscar_age_female.csv', usecols=['Age', 'Name'])\n", (688, 743), True, 'import pandas as pd\n'), ((1053, 1119), 'pandas.read_csv', 'pd.read_csv', (['"""./csv/oscar_age_female.csv"""'], {'usecols': "['Age', 'Name']"}), "('./csv/oscar_age_female.csv', usecols=['Age', 'Name'])\n", (1064, 1119), True, 'import pandas as pd\n'), ((1449, 1544), 'pandas.read_csv', 'pd.read_csv', (['"""./csv/oscar_age_female.csv"""'], {'usecols': "['Year', 'Movie']", 'skipinitialspace': '(True)'}), "('./csv/oscar_age_female.csv', usecols=['Year', 'Movie'],\n skipinitialspace=True)\n", (1460, 1544), True, 'import pandas as pd\n'), ((1771, 1812), 'pandas.read_csv', 'pd.read_csv', (['"""./csv/oscar_age_female.csv"""'], {}), "('./csv/oscar_age_female.csv')\n", (1782, 1812), True, 'import pandas as pd\n'), ((1960, 2026), 'pandas.read_csv', 'pd.read_csv', (['"""./csv/oscar_age_female.csv"""'], {'usecols': "['Year', 'Age']"}), "('./csv/oscar_age_female.csv', usecols=['Year', 'Age'])\n", (1971, 2026), True, 'import pandas as pd\n'), ((2111, 2138), 'matplotlib.pyplot.title', 'plt.title', (['"""Year-Age graph"""'], {}), "('Year-Age graph')\n", (2120, 2138), True, 'import matplotlib.pyplot as plt\n'), ((2143, 2187), 'matplotlib.pyplot.scatter', 'plt.scatter', (["csvfile['Year']", "csvfile['Age']"], {}), "(csvfile['Year'], csvfile['Age'])\n", (2154, 2187), True, 'import matplotlib.pyplot as plt\n'), ((2192, 2202), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2200, 2202), True, 'import matplotlib.pyplot as plt\n'), ((2278, 2336), 'pandas.read_csv', 'pd.read_csv', (['"""./csv/oscar_age_female.csv"""'], {'usecols': "['Age']"}), "('./csv/oscar_age_female.csv', usecols=['Age'])\n", (2289, 2336), True, 'import pandas as pd\n'), ((2534, 2569), 'matplotlib.pyplot.title', 'plt.title', (['"""Rate of winners by age"""'], {}), "('Rate of winners by age')\n", (2543, 2569), True, 'import matplotlib.pyplot as plt\n'), ((2574, 2584), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2582, 2584), True, 'import matplotlib.pyplot as plt\n'), ((2669, 2733), 'pandas.read_csv', 'pd.read_csv', (['"""./csv/letter_frequency.csv"""'], {'skipinitialspace': '(True)'}), "('./csv/letter_frequency.csv', skipinitialspace=True)\n", (2680, 2733), True, 'import pandas as pd\n'), ((3050, 3114), 'pandas.read_csv', 'pd.read_csv', (['"""./csv/letter_frequency.csv"""'], {'skipinitialspace': '(True)'}), "('./csv/letter_frequency.csv', skipinitialspace=True)\n", (3061, 3114), True, 'import pandas as pd\n'), ((3452, 3512), 'pandas.read_csv', 'pd.read_csv', (['"""./csv/secret_santa.csv"""'], {'skipinitialspace': '(True)'}), "('./csv/secret_santa.csv', skipinitialspace=True)\n", (3463, 3512), True, 'import pandas as pd\n'), ((3632, 3692), 'pandas.read_csv', 'pd.read_csv', (['"""secret_santa_final.csv"""'], {'skipinitialspace': '(True)'}), "('secret_santa_final.csv', skipinitialspace=True)\n", (3643, 3692), True, 'import pandas as pd\n')] |
"""This module does the argument and config parsing, and contains the main
function (that is called when calling pep8radius from shell)."""
from __future__ import print_function
import os
import sys
try:
from configparser import ConfigParser as SafeConfigParser, NoSectionError
except ImportError: # py2, pragma: no cover
from ConfigParser import SafeConfigParser, NoSectionError
from pep8radius.radius import Radius, RadiusFromDiff
from pep8radius.shell import CalledProcessError # with 2.6 compat
__version__ = version = '0.9.2'
DEFAULT_IGNORE = 'E24'
DEFAULT_INDENT_SIZE = 4
if sys.platform == 'win32': # pragma: no cover
DEFAULT_CONFIG = os.path.expanduser(r'~\.pep8')
else:
DEFAULT_CONFIG = os.path.join(os.getenv('XDG_CONFIG_HOME') or
os.path.expanduser('~/.config'), 'pep8')
PROJECT_CONFIG = ('setup.cfg', 'tox.ini', '.pep8')
def main(args=None, vc=None, cwd=None, apply_config=False):
"""PEP8 clean only the parts of the files touched since the last commit, a
previous commit or branch."""
import signal
try: # pragma: no cover
# Exit on broken pipe.
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
except AttributeError: # pragma: no cover
# SIGPIPE is not available on Windows.
pass
try:
if args is None:
args = []
try:
# Note: argparse on py 2.6 you can't pass a set
# TODO neater solution for this!
args_set = set(args)
except TypeError:
args_set = args # args is a Namespace
if '--version' in args_set or getattr(args_set, 'version', 0):
print(version)
return 0
if '--list-fixes' in args_set or getattr(args_set, 'list_fixes', 0):
from autopep8 import supported_fixes
for code, description in sorted(supported_fixes()):
print('{code} - {description}'.format(
code=code, description=description))
return 0
try:
try:
args = parse_args(args, apply_config=apply_config)
except TypeError:
pass # args is already a Namespace (testing)
if args.from_diff: # pragma: no cover
r = Radius.from_diff(args.from_diff.read(),
options=args, cwd=cwd)
else:
r = Radius(rev=args.rev, options=args, vc=vc, cwd=cwd)
except NotImplementedError as e: # pragma: no cover
print(e)
return 1
except CalledProcessError as c: # pragma: no cover
# cut off usage and exit
output = c.output.splitlines()[0]
print(output)
return c.returncode
r.fix()
return 0
except KeyboardInterrupt: # pragma: no cover
return 1
def create_parser():
"""Create the parser for the pep8radius CLI."""
from argparse import ArgumentParser, FileType
description = ("PEP8 clean only the parts of the files which you have "
"touched since the last commit, a previous commit or "
"(the merge-base of) a branch.")
epilog = ("Run before you commit, against a previous commit or "
"branch before merging.")
parser = ArgumentParser(description=description,
epilog=epilog,
prog='pep8radius')
parser.add_argument('rev',
help='commit or name of branch to compare against',
nargs='?')
parser.add_argument('--version',
help='print version number and exit',
action='store_true')
parser.add_argument('-d', '--diff', action='store_true', dest='diff',
help='print the diff of fixed source vs original')
parser.add_argument('-i', '--in-place', action='store_true',
help="make the fixes in place; modify the files")
parser.add_argument('--no-color', action='store_true',
help='do not print diffs in color '
'(default is to use color)')
parser.add_argument('-v', '--verbose', action='count', dest='verbose',
default=0,
help='print verbose messages; '
'multiple -v result in more verbose messages '
'(one less -v is passed to autopep8)')
parser.add_argument('--from-diff', type=FileType('r'), metavar='DIFF',
help="Experimental: rather than calling out to version"
" control, just pass in a diff; "
"the modified lines will be fixed")
ap = parser.add_argument_group('pep8', 'Pep8 options to pass to autopep8.')
ap.add_argument('-p', '--pep8-passes', metavar='n',
default=-1, type=int,
help='maximum number of additional pep8 passes '
'(default: infinite)')
ap.add_argument('-a', '--aggressive', action='count', default=0,
help='enable non-whitespace changes; '
'multiple -a result in more aggressive changes')
ap.add_argument('--experimental', action='store_true',
help='enable experimental fixes')
ap.add_argument('--exclude', metavar='globs',
help='exclude file/directory names that match these '
'comma-separated globs')
ap.add_argument('--list-fixes', action='store_true',
help='list codes for fixes and exit; '
'used by --ignore and --select')
ap.add_argument('--ignore', metavar='errors', default='',
help='do not fix these errors/warnings '
'(default: {0})'.format(DEFAULT_IGNORE))
ap.add_argument('--select', metavar='errors', default='',
help='fix only these errors/warnings (e.g. E4,W)')
ap.add_argument('--max-line-length', metavar='n', default=79, type=int,
help='set maximum allowed line length '
'(default: %(default)s)')
ap.add_argument('--indent-size', default=DEFAULT_INDENT_SIZE,
type=int, metavar='n',
help='number of spaces per indent level '
'(default %(default)s)')
df = parser.add_argument_group('docformatter',
'Fix docstrings for PEP257.')
df.add_argument('-f', '--docformatter', action='store_true',
help='Use docformatter')
df.add_argument('--no-blank', dest='post_description_blank',
action='store_false',
help='Do not add blank line after description')
df.add_argument('--pre-summary-newline',
action='store_true',
help='add a newline before the summary of a '
'multi-line docstring')
df.add_argument('--force-wrap', action='store_true',
help='force descriptions to be wrapped even if it may '
'result in a mess')
cg = parser.add_argument_group('config',
'Change default options based on global '
'or local (project) config files.')
cg.add_argument('--global-config',
default=DEFAULT_CONFIG,
metavar='filename',
help='path to global pep8 config file; ' +
" if this file does not exist then this is ignored" +
'(default: %s)' % DEFAULT_CONFIG)
cg.add_argument('--ignore-local-config', action='store_true',
help="don't look for and apply local config files; "
'if not passed, defaults are updated with any '
"config files in the project's root directory")
yp = parser.add_argument_group('yapf',
'Options for yapf, alternative to autopep8. '
'Currently any other options are ignored.')
yp.add_argument('-y', '--yapf', action='store_true',
help='Use yapf rather than autopep8. '
'This ignores other arguments outside of this group.')
yp.add_argument('--style', metavar='', default='pep8',
help='style either pep8, google, name of file with style'
'settings, or a dict')
return parser
def parse_args(arguments=None, root=None, apply_config=False):
"""Parse the arguments from the CLI.
If apply_config then we first look up and apply configs using
apply_config_defaults.
"""
if arguments is None:
arguments = []
parser = create_parser()
args = parser.parse_args(arguments)
if apply_config:
parser = apply_config_defaults(parser, args, root=root)
args = parser.parse_args(arguments)
# sanity check args (from autopep8)
if args.max_line_length <= 0: # pragma: no cover
parser.error('--max-line-length must be greater than 0')
if args.select:
args.select = _split_comma_separated(args.select)
if args.ignore:
args.ignore = _split_comma_separated(args.ignore)
elif not args.select and args.aggressive:
# Enable everything by default if aggressive.
args.select = ['E', 'W']
else:
args.ignore = _split_comma_separated(DEFAULT_IGNORE)
if args.exclude:
args.exclude = _split_comma_separated(args.exclude)
else:
args.exclude = []
return args
def apply_config_defaults(parser, args, root):
"""Update the parser's defaults from either the arguments' config_arg or
the config files given in config_files(root)."""
if root is None:
try:
from pep8radius.vcs import VersionControl
root = VersionControl.which().root_dir()
except NotImplementedError:
pass # don't update local, could be using as module
config = SafeConfigParser()
config.read(args.global_config)
if root and not args.ignore_local_config:
config.read(local_config_files(root))
try:
defaults = dict((k.lstrip('-').replace('-', '_'), v)
for k, v in config.items("pep8"))
parser.set_defaults(**defaults)
except NoSectionError:
pass # just do nothing, potentially this could raise ?
return parser
def local_config_files(root):
"""Returns a list of (possible) config files in the project root
directory."""
return [os.path.join(root, c) for c in PROJECT_CONFIG]
def _split_comma_separated(string):
"""Return a set of strings."""
return set(filter(None, string.split(',')))
def _main(args=None, vc=None, cwd=None): # pragma: no cover
if args is None:
args = sys.argv[1:]
return sys.exit(main(args=args, vc=vc, cwd=cwd, apply_config=True))
if __name__ == "__main__": # pragma: no cover
_main()
| [
"signal.signal",
"argparse.FileType",
"argparse.ArgumentParser",
"ConfigParser.SafeConfigParser",
"os.getenv",
"os.path.join",
"pep8radius.vcs.VersionControl.which",
"autopep8.supported_fixes",
"pep8radius.radius.Radius",
"os.path.expanduser"
] | [((665, 695), 'os.path.expanduser', 'os.path.expanduser', (['"""~\\\\.pep8"""'], {}), "('~\\\\.pep8')\n", (683, 695), False, 'import os\n'), ((3343, 3416), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': 'description', 'epilog': 'epilog', 'prog': '"""pep8radius"""'}), "(description=description, epilog=epilog, prog='pep8radius')\n", (3357, 3416), False, 'from argparse import ArgumentParser, FileType\n'), ((10161, 10179), 'ConfigParser.SafeConfigParser', 'SafeConfigParser', ([], {}), '()\n', (10177, 10179), False, 'from ConfigParser import SafeConfigParser, NoSectionError\n'), ((1156, 1201), 'signal.signal', 'signal.signal', (['signal.SIGPIPE', 'signal.SIG_DFL'], {}), '(signal.SIGPIPE, signal.SIG_DFL)\n', (1169, 1201), False, 'import signal\n'), ((10717, 10738), 'os.path.join', 'os.path.join', (['root', 'c'], {}), '(root, c)\n', (10729, 10738), False, 'import os\n'), ((736, 764), 'os.getenv', 'os.getenv', (['"""XDG_CONFIG_HOME"""'], {}), "('XDG_CONFIG_HOME')\n", (745, 764), False, 'import os\n'), ((802, 833), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.config"""'], {}), "('~/.config')\n", (820, 833), False, 'import os\n'), ((4571, 4584), 'argparse.FileType', 'FileType', (['"""r"""'], {}), "('r')\n", (4579, 4584), False, 'from argparse import ArgumentParser, FileType\n'), ((1884, 1901), 'autopep8.supported_fixes', 'supported_fixes', ([], {}), '()\n', (1899, 1901), False, 'from autopep8 import supported_fixes\n'), ((2436, 2486), 'pep8radius.radius.Radius', 'Radius', ([], {'rev': 'args.rev', 'options': 'args', 'vc': 'vc', 'cwd': 'cwd'}), '(rev=args.rev, options=args, vc=vc, cwd=cwd)\n', (2442, 2486), False, 'from pep8radius.radius import Radius, RadiusFromDiff\n'), ((10012, 10034), 'pep8radius.vcs.VersionControl.which', 'VersionControl.which', ([], {}), '()\n', (10032, 10034), False, 'from pep8radius.vcs import VersionControl\n')] |
# -*- coding: utf-8 -*-
'''
Created on 2 mars 2017
@author: Jacky
'''
import logging
from discord.ext import commands
from ArkDiscordBot.apps import bot
from ArkDiscordBot.discord.utils import parse_context
logger = logging.getLogger('BOT.{}'.format(__name__))
class Commons:
@commands.command(pass_context=True, name='test2', brief='Test the bot.')
async def test(self, ctx):
"""
Test command. Count the number of messages posted by the user on the current channel.
"""
bot, channel, messages, author = parse_context(ctx)
messages
counter = 0
tmp = await bot.send_message(channel, 'Calculating messages...')
async for log in bot.logs_from(channel, limit=10000):
if log.author == author:
counter += 1
messages = '{}, You have posted {} messages on this channel.'.format(author, counter)
logger.debug(messages)
await bot.edit_message(tmp, messages)
bot.add_cog(Commons())
| [
"ArkDiscordBot.apps.bot.logs_from",
"ArkDiscordBot.apps.bot.send_message",
"ArkDiscordBot.discord.utils.parse_context",
"discord.ext.commands.command",
"ArkDiscordBot.apps.bot.edit_message"
] | [((293, 365), 'discord.ext.commands.command', 'commands.command', ([], {'pass_context': '(True)', 'name': '"""test2"""', 'brief': '"""Test the bot."""'}), "(pass_context=True, name='test2', brief='Test the bot.')\n", (309, 365), False, 'from discord.ext import commands\n'), ((565, 583), 'ArkDiscordBot.discord.utils.parse_context', 'parse_context', (['ctx'], {}), '(ctx)\n', (578, 583), False, 'from ArkDiscordBot.discord.utils import parse_context\n'), ((737, 772), 'ArkDiscordBot.apps.bot.logs_from', 'bot.logs_from', (['channel'], {'limit': '(10000)'}), '(channel, limit=10000)\n', (750, 772), False, 'from ArkDiscordBot.apps import bot\n'), ((650, 702), 'ArkDiscordBot.apps.bot.send_message', 'bot.send_message', (['channel', '"""Calculating messages..."""'], {}), "(channel, 'Calculating messages...')\n", (666, 702), False, 'from ArkDiscordBot.apps import bot\n'), ((988, 1019), 'ArkDiscordBot.apps.bot.edit_message', 'bot.edit_message', (['tmp', 'messages'], {}), '(tmp, messages)\n', (1004, 1019), False, 'from ArkDiscordBot.apps import bot\n')] |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Global context for knowledge bank operations."""
import threading
from typing import Text
from research.carls import dynamic_embedding_config_pb2 as de_config_pb2
# A map from variable name to DynamicEmbeddingConfig.
_knowledge_bank_collections = {}
_lock = threading.Lock()
def add_to_collection(name: Text, config: de_config_pb2.DynamicEmbeddingConfig):
"""Adds given (name, config) pair to global collectionss.
Args:
name: A string denoting the variable name.
config: An instance of DynamicEmbeddingConfig.
Raises:
TypeError: Invalid input.
ValueError: Name is empty, or a different config is added for an existing
variable.
"""
if not name:
raise ValueError("Empty name.")
if not isinstance(config, de_config_pb2.DynamicEmbeddingConfig):
raise TypeError("Config is not an instance of DynamicEmbeddingConfig.")
if name in _knowledge_bank_collections.keys():
existing_config = _knowledge_bank_collections[name]
if config.SerializeToString() != existing_config.SerializeToString():
raise ValueError(
"Adding a new config for the same var name is not allowed, existing:"
" %r, new: %r." % (existing_config, config))
with _lock:
_knowledge_bank_collections[name] = de_config_pb2.DynamicEmbeddingConfig()
_knowledge_bank_collections[name].CopyFrom(config)
def get_all_collection():
"""Returns a list of all (name, config) pairs."""
with _lock:
return [(key, value) for key, value in _knowledge_bank_collections.items()]
def clear_all_collection():
"""Clears existing all (name, config) pairs."""
with _lock:
_knowledge_bank_collections.clear()
| [
"threading.Lock",
"research.carls.dynamic_embedding_config_pb2.DynamicEmbeddingConfig"
] | [((839, 855), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (853, 855), False, 'import threading\n'), ((1832, 1870), 'research.carls.dynamic_embedding_config_pb2.DynamicEmbeddingConfig', 'de_config_pb2.DynamicEmbeddingConfig', ([], {}), '()\n', (1868, 1870), True, 'from research.carls import dynamic_embedding_config_pb2 as de_config_pb2\n')] |
#!/usr/bin/python
# Copyright: nbr23
# License: MIT
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: fios_dhcp_static_lease
short_description: Manage Fios Verizon Router DHCP static leases
description:
- Manage Fios Verizon Router DHCP static leases
author: "nbr23 <<EMAIL>>"
requirements:
options:
name:
required: true
description:
- hostname of the DHCP static lease target
ip:
required: true
description:
- IP Address of the DHCP static lease target
mac:
required: true
description:
- MAC Address of the DHCP static lease target
router_password:
required: true
description:
- Fios router admin password
router_ip:
required: false
default: '192.168.1.1'
description:
- Fios router ip
router_port:
required: false
default: 443
description:
- Fios router https listening port
state:
required: true
default: present
choices:
- present
- absent
description:
- Whether the static DHCP lease should exist in the Fios router
'''
EXAMPLES = '''
- name: Delete static lease
nbr23.fiosrouter.fios_dhcp_static_lease:
name: myhost
ip: 192.168.1.5
mac: 00:00:00:00:00:00
router_ip: 192.168.1.1
router_password: '{{ <PASSWORD> }}'
state: absent
- name: Set / Update static lease
nbr23.fiosrouter.fios_dhcp_static_lease:
name: myhost
ip: 192.168.1.5
mac: 00:00:00:00:00:00
router_ip: 192.168.1.1
router_password: '{{ <PASSWORD> }}'
state: present
'''
RETURN = '''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.nbr23.fiosrouter.plugins.module_utils.verizon_fios import RouterSession
def module_fail(module, session, msg):
session.logout()
module.fail_json(msg=msg)
def main():
module = AnsibleModule(
argument_spec=dict(
router_password=dict(required=True, type='str', no_log=True),
router_ip=dict(required=False, default='192.168.1.1', type='str'),
router_port=dict(type='int', default=443),
state=dict(default='present', choices=['present', 'absent']),
name=dict(required=True, type='str'),
mac=dict(required=True, type='str'),
ip=dict(required=True, type='str')
),
supports_check_mode=True
)
result = dict(
changed=False
)
present = module.params['state'] == 'present'
session = RouterSession(module.params['router_ip'], module.params['router_port'])
log_res = session.login(module.params['router_password'])
if log_res is not None and 'error' in log_res:
if log_res.get('error') == 2:
module.fail_json(msg='API Login error: too many sessions open')
else:
module.fail_json(msg='API Login error: Incorrect fios credentials')
current = session.get_dhcp_client(module.params['name'], module.params['ip'], module.params['mac'])
if len(current) > 1:
module_fail(module, session, 'Conflicting entries found for specified ip, mac, name')
current = None if len(current) < 1 else current[0]
if current is not None:
if not present and current['staticIp']:
if not module.check_mode:
result['result'] = session.del_dhcp_client(current['id'])
result['changed'] = True
elif present and not current['staticIp'] \
and current['name'] == module.params['name'] \
and current['ipAddress'] == module.params['ip'] \
and current['mac'] == module.params['mac']:
if not module.check_mode:
result['result'] = session.post_dhcp_client(current['name'], current['ipAddress'], current['mac'], True)
result['changed'] = True
elif present and (current['name'] != module.params['name']
or current['ipAddress'] != module.params['ip']
or current['mac'] != module.params['mac']):
if not module.check_mode:
session.del_dhcp_client(current['id'])
result['result'] = session.post_dhcp_client(module.params['name'], module.params['ip'], module.params['mac'], True)
result['changed'] = True
elif present:
if not module.check_mode:
result['result'] = session.post_dhcp_client(module.params['name'], module.params['ip'], module.params['mac'], True)
result['changed'] = True
session.logout()
module.exit_json(**result)
if __name__ == '__main__':
main()
| [
"ansible_collections.nbr23.fiosrouter.plugins.module_utils.verizon_fios.RouterSession"
] | [((2669, 2740), 'ansible_collections.nbr23.fiosrouter.plugins.module_utils.verizon_fios.RouterSession', 'RouterSession', (["module.params['router_ip']", "module.params['router_port']"], {}), "(module.params['router_ip'], module.params['router_port'])\n", (2682, 2740), False, 'from ansible_collections.nbr23.fiosrouter.plugins.module_utils.verizon_fios import RouterSession\n')] |
# to test the gradient back-propagation
from __future__ import absolute_import, division, print_function
import torch
import pickle
import torch.nn as nn
import torch.nn.functional as F
import matplotlib.pyplot as plt
import numpy as np
import cv2
import torch.utils.data as data
from mesh.multiview_utility import MutualProjectionLoss, MultiviewConsistencyLoss
from dataset.nyu_dataset import create_nyu_dataset
from network.util_modules import PosePriorLoss
from dataset.joint_angle import JointAngleDataset
import matplotlib.pyplot as plt
from mesh.kinematicsTransformation import HandTransformationMat
from mesh.pointTransformation import LinearBlendSkinning, RandScale
from network.constants import Constant
from network.util_modules import HandSynthesizer
constant = Constant()
synt_key_points = [[0,1],
[0,2],
[33,34]]
real_key_points = [[30,31],
[30,32],
[0,1]]
def bone_length(joints, indices):
bone_length = []
for [idx_1, idx_2] in indices:
diff = joints[idx_1] - joints[idx_2]
bone_length.append(diff.view(-1).norm())
bone_length = [bone_length[0]/bone_length[1], bone_length[0]/bone_length[2]]
return bone_length
dataset_dir = 'D:\\data\\nyu_hand_dataset_v2\\npy-64\\test'
nyu_dataset = create_nyu_dataset(dataset_dir)
with open('mesh/model/preprocessed_hand.pkl', 'rb') as f:
mesh = pickle.load(f)
hand_synthsizer = HandSynthesizer(mesh, 64, 16, 1.0, 0.01).cuda()
joint_data = JointAngleDataset()
for _ in range(10):
real_data = nyu_dataset[0]
real_joints = real_data[1][0][constant.real_key_points]
real_joints = real_joints * 64 / 300 + 32
fig = plt.figure()
ax = fig.add_subplot(1, 2, 1)
ax.scatter(real_joints[:,0], real_joints[:,1])
for idx in range(len(real_joints)):
ax.annotate('%d'%idx, (real_joints[idx,0], real_joints[idx,1]))
ax.imshow(real_data[0][0].squeeze())
para = joint_data[0].unsqueeze(dim=0).cuda()
synt_result = hand_synthsizer(para)
dm = synt_result[0].squeeze().detach().cpu().numpy()
synt_joints = synt_result[3].squeeze().detach().cpu().numpy()[constant.synt_key_points]
synt_joints = synt_joints * 64 / 300 + 32
ax = fig.add_subplot(1, 2, 2)
ax.scatter(synt_joints[:,0], synt_joints[:,1])
for idx in range(len(synt_joints)):
ax.annotate('%d'%idx, (synt_joints[idx,0], synt_joints[idx,1]))
ax.imshow(dm)
plt.show() | [
"network.constants.Constant",
"pickle.load",
"dataset.nyu_dataset.create_nyu_dataset",
"matplotlib.pyplot.figure",
"dataset.joint_angle.JointAngleDataset",
"network.util_modules.HandSynthesizer",
"matplotlib.pyplot.show"
] | [((774, 784), 'network.constants.Constant', 'Constant', ([], {}), '()\n', (782, 784), False, 'from network.constants import Constant\n'), ((1315, 1346), 'dataset.nyu_dataset.create_nyu_dataset', 'create_nyu_dataset', (['dataset_dir'], {}), '(dataset_dir)\n', (1333, 1346), False, 'from dataset.nyu_dataset import create_nyu_dataset\n'), ((1511, 1530), 'dataset.joint_angle.JointAngleDataset', 'JointAngleDataset', ([], {}), '()\n', (1528, 1530), False, 'from dataset.joint_angle import JointAngleDataset\n'), ((1417, 1431), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1428, 1431), False, 'import pickle\n'), ((1702, 1714), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1712, 1714), True, 'import matplotlib.pyplot as plt\n'), ((2460, 2470), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2468, 2470), True, 'import matplotlib.pyplot as plt\n'), ((1450, 1490), 'network.util_modules.HandSynthesizer', 'HandSynthesizer', (['mesh', '(64)', '(16)', '(1.0)', '(0.01)'], {}), '(mesh, 64, 16, 1.0, 0.01)\n', (1465, 1490), False, 'from network.util_modules import HandSynthesizer\n')] |
import io
import json
from typing import Iterable
from zipfile import ZipFile
import boto3
import pandas as pd
from tmdprimer.stop_classification.data_loaders import DataLoader
from tmdprimer.stop_classification.datasets.dvdt_dataset import DVDTFile, DVDTDataset
from tmdprimer.stop_classification.datasets.sensorlog_dataset import SensorLogFile, SensorLogDataset
class DVDTS3DataLoader(DataLoader):
def load_dataset(self, bucket, path: str, labels_to_load: Iterable = None) -> DVDTDataset:
s3client = boto3.client("s3")
files = []
for entry in s3client.list_objects(Bucket=bucket, Prefix=path)["Contents"]:
file_name = entry["Key"]
if file_name.endswith("high.zip"):
data_file = self.load_file(bucket=bucket, file_name=file_name, s3client=s3client)
if labels_to_load is None or data_file.transport_mode in labels_to_load:
files.append(data_file)
return DVDTDataset(files)
def load_file(self, bucket, file_name, s3client=None) -> DVDTFile:
if s3client is None:
s3client = boto3.client("s3")
print("loading", file_name)
response = s3client.get_object(Bucket=bucket, Key=file_name)
with io.BytesIO(response["Body"].read()) as datafile:
# rewind the file
datafile.seek(0)
with ZipFile(datafile, mode="r") as zip_file:
for file in zip_file.namelist():
if file.endswith(".json") and "/" not in file:
with zip_file.open(file) as accel_json:
return DVDTFile.from_json(json.loads(accel_json.read()))
class SensorLogS3DataLoader(DataLoader):
def load_dataset(self, bucket, path: str, labels_to_load: Iterable = None) -> SensorLogDataset:
s3client = boto3.client("s3")
files = []
for entry in s3client.list_objects(Bucket=bucket, Prefix=path)["Contents"]:
file_name = entry["Key"]
if file_name.endswith(".csv"):
data_file = self.load_file(bucket=bucket, file_name=file_name, s3client=s3client)
files.append(data_file)
return SensorLogDataset(files)
def load_file(self, bucket, file_name, s3client=None) -> SensorLogFile:
if s3client is None:
s3client = boto3.client("s3")
print("loading", file_name)
response = s3client.get_object(Bucket=bucket, Key=file_name)
with io.BytesIO(response["Body"].read()) as datafile:
return SensorLogFile.from_csv(pd.read_csv(datafile, sep=";"))
| [
"tmdprimer.stop_classification.datasets.sensorlog_dataset.SensorLogDataset",
"boto3.client",
"zipfile.ZipFile",
"pandas.read_csv",
"tmdprimer.stop_classification.datasets.dvdt_dataset.DVDTDataset"
] | [((518, 536), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (530, 536), False, 'import boto3\n'), ((970, 988), 'tmdprimer.stop_classification.datasets.dvdt_dataset.DVDTDataset', 'DVDTDataset', (['files'], {}), '(files)\n', (981, 988), False, 'from tmdprimer.stop_classification.datasets.dvdt_dataset import DVDTFile, DVDTDataset\n'), ((1843, 1861), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (1855, 1861), False, 'import boto3\n'), ((2198, 2221), 'tmdprimer.stop_classification.datasets.sensorlog_dataset.SensorLogDataset', 'SensorLogDataset', (['files'], {}), '(files)\n', (2214, 2221), False, 'from tmdprimer.stop_classification.datasets.sensorlog_dataset import SensorLogFile, SensorLogDataset\n'), ((1113, 1131), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (1125, 1131), False, 'import boto3\n'), ((2351, 2369), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (2363, 2369), False, 'import boto3\n'), ((1375, 1402), 'zipfile.ZipFile', 'ZipFile', (['datafile'], {'mode': '"""r"""'}), "(datafile, mode='r')\n", (1382, 1402), False, 'from zipfile import ZipFile\n'), ((2579, 2609), 'pandas.read_csv', 'pd.read_csv', (['datafile'], {'sep': '""";"""'}), "(datafile, sep=';')\n", (2590, 2609), True, 'import pandas as pd\n')] |
import pytest
from django.db import transaction
from django.utils import timezone
from ..models import Message, FOIRequest, Esic, PublicBody
@pytest.fixture
def public_body(esic):
return PublicBody(
name='example',
esic=esic
)
@pytest.fixture
def esic():
return Esic(
url='http://example.com'
)
@pytest.fixture
def foi_request():
return FOIRequest()
@pytest.fixture
def message(foi_request):
return Message(
foi_request=foi_request
)
@pytest.fixture
def foi_request_with_sent_user_message(foi_request, message_from_user):
with transaction.atomic():
message_from_user.approve()
message_from_user.foi_request = foi_request
message_from_user.sent_at = timezone.now()
save_message(message_from_user)
foi_request.refresh_from_db()
return foi_request
@pytest.fixture
def message_from_user(public_body):
return Message(
sender=None,
receiver=public_body
)
@pytest.fixture
def message_from_government(public_body):
return Message(
sender=public_body,
sent_at=timezone.now(),
receiver=None
)
def save_message(message):
# FIXME: Ideally a simple message.save() would save everything, but I
# couldn't find out how to do so in Django. Not yet.
with transaction.atomic():
if message.sender:
save_public_body(message.sender)
message.sender_id = message.sender.id
if message.receiver:
save_public_body(message.receiver)
message.receiver_id = message.receiver.id
message.foi_request.save()
message.foi_request_id = message.foi_request.id
message.save()
def save_public_body(public_body):
with transaction.atomic():
if public_body.esic:
public_body.esic.save()
public_body.esic_id = public_body.esic.id
public_body.save()
return public_body
| [
"django.utils.timezone.now",
"django.db.transaction.atomic"
] | [((602, 622), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (620, 622), False, 'from django.db import transaction\n'), ((748, 762), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (760, 762), False, 'from django.utils import timezone\n'), ((1327, 1347), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (1345, 1347), False, 'from django.db import transaction\n'), ((1761, 1781), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (1779, 1781), False, 'from django.db import transaction\n'), ((1114, 1128), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1126, 1128), False, 'from django.utils import timezone\n')] |
import numpy as np
import random
import cv2
import os
import json
from csv_utils import load_csv
import rect
import mask
def plot_one_box(x, img, color=None, label=None, line_thickness=None):
"""
description: Plots one bounding box on image img,
this function comes from YoLov5 project.
arguments:
x(list): a box likes [x1,y1,x2,y2]
img(np array): a opencv image object in BGR format
color(tuple): color to draw rectangle, such as (0,255,0)
label(str): the class name
line_thickness(int): the thickness of the line
return:
no return
"""
tl = (
line_thickness or round(0.002 * (img.shape[0] + img.shape[1]) / 2) + 1
) # line/font thickness
color = color or [random.randint(0, 255) for _ in range(3)]
c1, c2 = (int(x[0]), int(x[1])), (int(x[2]), int(x[3]))
cv2.rectangle(img, c1, c2, color, thickness=tl, lineType=cv2.LINE_AA)
if label:
tf = max(tl - 1, 1) # font thickness
t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0]
c2 = c1[0] + t_size[0], c1[1] - t_size[1] - 3
cv2.rectangle(img, c1, c2, color, -1, cv2.LINE_AA) # filled
cv2.putText(
img,
label,
(c1[0], c1[1] - 2),
0,
tl / 3,
[225, 255, 255],
thickness=tf,
lineType=cv2.LINE_AA,
)
def plot_one_polygon(pts, img, color=None, label=None, line_thickness=None):
"""
description: Plots one bounding box on image img,
this function comes from YoLov5 project.
arguments:
pts(np array): a numpy array of size [1,1,2]
img(np array): a opencv image object in BGR format
color(tuple): color to draw rectangle, such as (0,255,0)
label(str): the class name
line_thickness(int): the thickness of the line
return:
no return
"""
tl = (
line_thickness or round(0.002 * (img.shape[0] + img.shape[1]) / 2) + 1
) # line/font thickness
color = color or [random.randint(0, 255) for _ in range(3)]
cv2.polylines(img, [pts], isClosed=True, color=color, thickness=tl)
if label:
c1 = (int(np.min(pts[:,:,0])), int(np.min(pts[:,:,1])))
tf = max(tl - 1, 1) # font thickness
t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0]
c2 = c1[0] + t_size[0], c1[1] - t_size[1] - 3
cv2.rectangle(img, c1, c2, color, -1, cv2.LINE_AA) # filled
cv2.putText(
img,
label,
(c1[0], c1[1] - 2),
0,
tl / 3,
[225, 255, 255],
thickness=tf,
lineType=cv2.LINE_AA,
)
if __name__ == '__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('--path_imgs', required=True, help='the path to the input image folder')
ap.add_argument('--path_out', required=True, help='the path to the output folder')
ap.add_argument('--path_csv', default='labels.csv', help='[optinal] the path of a csv file that corresponds to path_imgs, default="labels.csv" in path_imgs')
ap.add_argument('--class_map_json', default=None, help='[optinal] the path of a class map json file')
args = vars(ap.parse_args())
path_imgs = args['path_imgs']
path_csv = args['path_csv'] if args['path_csv']!='labels.csv' else os.path.join(path_imgs, args['path_csv'])
output_path = args['path_out']
if args['class_map_json']:
with open(args['class_map_json']) as f:
class_map = json.load(f)
print(f'loaded class map: {class_map}')
else:
class_map = None
if not os.path.isfile(path_csv):
raise Exception(f'Not found file: {path_csv}')
assert path_imgs!=output_path, 'output path must be different with input path'
if not os.path.isdir(output_path):
os.makedirs(output_path)
fname_to_shape, class_map = load_csv(path_csv, path_imgs, class_map)
min_id = min(class_map.values())
colors = [(0,0,255),(255,0,0),(0,255,0),(102,51,153),(255,140,0),(105,105,105),(127,25,27),(9,200,100)]
color_map = {}
for cls in class_map:
i = class_map[cls]
if min_id != 0:
i -= 1
if i < len(colors):
color_map[cls] = colors[i]
else:
color_map[cls] = tuple([random.randint(0,255) for _ in range(3)])
for im_name in fname_to_shape:
print(f'[FILE] {im_name}')
shapes = fname_to_shape[im_name]
im = cv2.imread(shapes[0].fullpath)
for shape in shapes:
if isinstance(shape, rect.Rect):
box = shape.up_left + shape.bottom_right
plot_one_box(box, im, label=shape.category, color=color_map[shape.category])
elif isinstance(shape, mask.Mask):
pts = np.array([[x,y] for x,y in zip(shape.X,shape.Y)])
pts = pts.reshape((-1, 1, 2))
plot_one_polygon(pts, im, label=shape.category, color=color_map[shape.category])
outname = os.path.join(output_path, im_name)
cv2.imwrite(outname, im)
| [
"cv2.rectangle",
"cv2.imwrite",
"argparse.ArgumentParser",
"os.makedirs",
"cv2.polylines",
"csv_utils.load_csv",
"os.path.join",
"cv2.putText",
"os.path.isfile",
"os.path.isdir",
"numpy.min",
"json.load",
"cv2.getTextSize",
"cv2.imread",
"random.randint"
] | [((882, 951), 'cv2.rectangle', 'cv2.rectangle', (['img', 'c1', 'c2', 'color'], {'thickness': 'tl', 'lineType': 'cv2.LINE_AA'}), '(img, c1, c2, color, thickness=tl, lineType=cv2.LINE_AA)\n', (895, 951), False, 'import cv2\n'), ((2169, 2236), 'cv2.polylines', 'cv2.polylines', (['img', '[pts]'], {'isClosed': '(True)', 'color': 'color', 'thickness': 'tl'}), '(img, [pts], isClosed=True, color=color, thickness=tl)\n', (2182, 2236), False, 'import cv2\n'), ((2843, 2868), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2866, 2868), False, 'import argparse\n'), ((4015, 4055), 'csv_utils.load_csv', 'load_csv', (['path_csv', 'path_imgs', 'class_map'], {}), '(path_csv, path_imgs, class_map)\n', (4023, 4055), False, 'from csv_utils import load_csv\n'), ((1152, 1202), 'cv2.rectangle', 'cv2.rectangle', (['img', 'c1', 'c2', 'color', '(-1)', 'cv2.LINE_AA'], {}), '(img, c1, c2, color, -1, cv2.LINE_AA)\n', (1165, 1202), False, 'import cv2\n'), ((1221, 1332), 'cv2.putText', 'cv2.putText', (['img', 'label', '(c1[0], c1[1] - 2)', '(0)', '(tl / 3)', '[225, 255, 255]'], {'thickness': 'tf', 'lineType': 'cv2.LINE_AA'}), '(img, label, (c1[0], c1[1] - 2), 0, tl / 3, [225, 255, 255],\n thickness=tf, lineType=cv2.LINE_AA)\n', (1232, 1332), False, 'import cv2\n'), ((2501, 2551), 'cv2.rectangle', 'cv2.rectangle', (['img', 'c1', 'c2', 'color', '(-1)', 'cv2.LINE_AA'], {}), '(img, c1, c2, color, -1, cv2.LINE_AA)\n', (2514, 2551), False, 'import cv2\n'), ((2570, 2681), 'cv2.putText', 'cv2.putText', (['img', 'label', '(c1[0], c1[1] - 2)', '(0)', '(tl / 3)', '[225, 255, 255]'], {'thickness': 'tf', 'lineType': 'cv2.LINE_AA'}), '(img, label, (c1[0], c1[1] - 2), 0, tl / 3, [225, 255, 255],\n thickness=tf, lineType=cv2.LINE_AA)\n', (2581, 2681), False, 'import cv2\n'), ((3456, 3497), 'os.path.join', 'os.path.join', (['path_imgs', "args['path_csv']"], {}), "(path_imgs, args['path_csv'])\n", (3468, 3497), False, 'import os\n'), ((3744, 3768), 'os.path.isfile', 'os.path.isfile', (['path_csv'], {}), '(path_csv)\n', (3758, 3768), False, 'import os\n'), ((3921, 3947), 'os.path.isdir', 'os.path.isdir', (['output_path'], {}), '(output_path)\n', (3934, 3947), False, 'import os\n'), ((3957, 3981), 'os.makedirs', 'os.makedirs', (['output_path'], {}), '(output_path)\n', (3968, 3981), False, 'import os\n'), ((4600, 4630), 'cv2.imread', 'cv2.imread', (['shapes[0].fullpath'], {}), '(shapes[0].fullpath)\n', (4610, 4630), False, 'import cv2\n'), ((5135, 5169), 'os.path.join', 'os.path.join', (['output_path', 'im_name'], {}), '(output_path, im_name)\n', (5147, 5169), False, 'import os\n'), ((5178, 5202), 'cv2.imwrite', 'cv2.imwrite', (['outname', 'im'], {}), '(outname, im)\n', (5189, 5202), False, 'import cv2\n'), ((776, 798), 'random.randint', 'random.randint', (['(0)', '(255)'], {}), '(0, 255)\n', (790, 798), False, 'import random\n'), ((1029, 1086), 'cv2.getTextSize', 'cv2.getTextSize', (['label', '(0)'], {'fontScale': '(tl / 3)', 'thickness': 'tf'}), '(label, 0, fontScale=tl / 3, thickness=tf)\n', (1044, 1086), False, 'import cv2\n'), ((2123, 2145), 'random.randint', 'random.randint', (['(0)', '(255)'], {}), '(0, 255)\n', (2137, 2145), False, 'import random\n'), ((2378, 2435), 'cv2.getTextSize', 'cv2.getTextSize', (['label', '(0)'], {'fontScale': '(tl / 3)', 'thickness': 'tf'}), '(label, 0, fontScale=tl / 3, thickness=tf)\n', (2393, 2435), False, 'import cv2\n'), ((3636, 3648), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3645, 3648), False, 'import json\n'), ((2269, 2289), 'numpy.min', 'np.min', (['pts[:, :, 0]'], {}), '(pts[:, :, 0])\n', (2275, 2289), True, 'import numpy as np\n'), ((2294, 2314), 'numpy.min', 'np.min', (['pts[:, :, 1]'], {}), '(pts[:, :, 1])\n', (2300, 2314), True, 'import numpy as np\n'), ((4433, 4455), 'random.randint', 'random.randint', (['(0)', '(255)'], {}), '(0, 255)\n', (4447, 4455), False, 'import random\n')] |
from collections import Counter
from .....core import BaseAnalyzer, Validator, Required
from .....taxonomies import Taxonomy
from .....config.consts import CONFIG_HEADER_FIELDS, CONFIG_TAXONOMY_ID
class TaxonomyGuesserAnalyzer(BaseAnalyzer):
REQUIRES = Validator(
Required(CONFIG_HEADER_FIELDS)
)
def run(self):
if self.config[CONFIG_TAXONOMY_ID] is None:
fields = self.config[CONFIG_HEADER_FIELDS]
c = Counter()
taxonomies = self.context.taxonomies
for field in fields:
taxonomy: Taxonomy
for taxonomy in taxonomies:
for tf in taxonomy.header_mapping.keys():
if tf == field:
c[taxonomy.id] += 1
selected = c.most_common(1)
if len(selected) > 0:
selected = selected[0][0]
self.config[CONFIG_TAXONOMY_ID] = selected
| [
"collections.Counter"
] | [((460, 469), 'collections.Counter', 'Counter', ([], {}), '()\n', (467, 469), False, 'from collections import Counter\n')] |
import csv
"""Set of functions used to import from
csv into the FIDO model. The import is
specified as a dictionary, defining the
model, the name of the primary key and
the list of fields. Recursions are used
to defind foreign keys."""
IMPORT_CSV_MODEL_KEY = "model"
IMPORT_CSV_PK_NAME_KEY = "pk_name"
IMPORT_CSV_PK_KEY = "pk"
IMPORT_CSV_FIELDLIST_KEY = "fieldlist"
IMPORT_CSV_IS_FK = "isforeignkey"
def convert_to_bool_string(s):
"""The csv used for importing, may
have several different values for
a boolean field. This routine
converts them to True or False
"""
true_list = ["y", "yes", "true", "1"]
if s.lower() in true_list:
return True
else:
return False
# build the dict from the header row
# make everything lower case to make
# it case insensitive
def csv_header_to_dict(row):
d = {
k.strip().lower(): v for v, k in enumerate(row)
} # swap key with value in the header row
return d
# build the dict from the header row
# make everything lower case to make
# it case insensitive
def xslx_header_to_dict(row):
d = {}
col = 0
for cell in row:
if cell.value:
d[cell.value.lower()] = col
col += 1
return d
def add_position(d, h):
"""It substitute the header title with
the column number in the dictionary
passed to describe the imported model.
Used recursion, because d can have a
dictionary inside"""
c = {}
for k, v in d.items():
if type(v) is dict:
c[k] = add_position(v, h)
else:
if type(v) is str:
v = v.lower()
if v in h:
c[k] = h[v]
else:
c[k] = v
return c
def get_pk_verbose_name(m):
"""Returns the name of the primary key
of the model passed as argument."""
if m._meta.pk._verbose_name is None:
pkname = m._meta.pk.name
else:
pkname = m._meta.pk._verbose_name
return pkname
# TODO modified to handle two fields to make unique key, for archived tables.
def get_fk(m, pk_value):
"""Read an object to be used as
foreign key in another record.
It return a formatted message
if it finds an error
"""
msg = ""
try:
obj = m.objects.get(pk=pk_value)
except m.DoesNotExist:
msg = f'{get_pk_verbose_name(m)} "{pk_value}" does not exist.\n'
obj = None
except ValueError:
msg = f'{get_pk_verbose_name(m)} "{pk_value}" is the wrong type.\n'
obj = None
return obj, msg
def get_fk_from_field(m, f_name, f_value):
"""Read an object to be used as foreign key in another record.
It return a formatted message if it finds an error
"""
msg = ""
try:
obj = m.objects.get(**{f_name: f_value})
except m.DoesNotExist:
msg = f'{f_name} "{f_value}"'
obj = None
except ValueError:
msg = f'{f_name} "{f_value}": wrong type. \n'
obj = None
return obj, msg
def get_value_from_field(type, row_val):
if type == "BooleanField":
# convert the value to be True or False
return convert_to_bool_string(row_val)
return row_val
def read_csv_from_dict(d, row, year):
m = d[IMPORT_CSV_MODEL_KEY]
if IMPORT_CSV_PK_NAME_KEY in d:
unique_name = d[IMPORT_CSV_PK_NAME_KEY]
else:
unique_name = m._meta.pk.name
pk_header_name = ""
if IMPORT_CSV_PK_KEY in d:
pk_header_name = d[IMPORT_CSV_PK_KEY]
error_msg = ""
# if we are only reading a foreign key
# (we don't want to create it!), get
# the value and return
if IMPORT_CSV_IS_FK in d:
return get_fk_from_field(m, unique_name, row[pk_header_name])
default_list = {}
for k, v in d[IMPORT_CSV_FIELDLIST_KEY].items():
if type(v) is dict:
default_list[k], errormsg = read_csv_from_dict(v, row, year)
else:
default_list[k] = get_value_from_field(
m._meta.get_field(k).get_internal_type(),
row[v].strip()
)
try:
if pk_header_name == "":
obj = m.objects.create(**default_list)
else:
if year:
kwargs = {
unique_name: row[pk_header_name].strip(),
'financial_year_id': year
}
obj, created = m.objects.update_or_create(
**kwargs,
defaults=default_list,
)
else:
obj, created = m.objects.update_or_create(
**{unique_name: row[pk_header_name].strip()},
defaults=default_list,
)
except ValueError:
obj = None
error_msg = "ValueError"
return obj, error_msg
def get_col_from_obj_key(obj_key):
"""Takes the dictionary used to
define the import, and return the
list of the expected headers"""
header_list = []
if IMPORT_CSV_PK_KEY in obj_key:
header_list.append(obj_key[IMPORT_CSV_PK_KEY])
if IMPORT_CSV_IS_FK in obj_key:
header_list.append(obj_key[IMPORT_CSV_IS_FK])
if IMPORT_CSV_FIELDLIST_KEY in obj_key:
for k, v in obj_key[IMPORT_CSV_FIELDLIST_KEY].items():
if type(v) is dict:
header_list = header_list + get_col_from_obj_key(v)
else:
header_list.append(v)
return list(filter(None, [element.lower() for element in header_list]))
def always_true(a, b):
return True
def import_obj(csv_file, obj_key, op=always_true, pos=1, value=1, year=0):
reader = csv.reader(csv_file)
header = csv_header_to_dict(next(reader))
l1 = get_col_from_obj_key(obj_key)
# Before starting to read, check that all the expected columns exists
if not all(elem in header for elem in l1):
msg = f"Missing/wrong headers: expected {l1}, the file has: {header.keys()}."
return False, msg
d = add_position(obj_key, header)
if isinstance(pos, str):
pos = header[pos.lower()]
row_number = 1
for row in reader:
row_number = row_number + 1
if op(row[pos], value):
obj, msg = read_csv_from_dict(d, row, year)
return True, msg
# used for import of lists
# needed to populate tables,
# when the primary key is
# created by the system
def import_list_obj(csv_file, model, fieldname):
reader = csv.reader(csv_file)
next(reader) # skip the header
for row in reader:
obj, created = model.objects.update_or_create(**{fieldname: row[0].strip()})
def import_list_archived_obj(csv_file, model, fieldname, year):
reader = csv.reader(csv_file)
next(reader) # skip the header
for row in reader:
obj, created = model.objects.update_or_create(**{fieldname: row[0].strip()})
class ImportInfo:
"""Use to define the function used to import from the Admin view list"""
# key is the dictionary describing the import
# title is the title to display in the import
# form. If not specified, it is the name of
# the model defined in key h_list is the header
# row as list. Required if key is not defined.
# It is used to validate the file being
# imported my_import_func is the function to
# use for import if key is not defined
# filter[] is a dictionary defining filtering
# to use for selecting the row to import
# extra_func is a clean up function to run
# when the import is completed successfully
def __init__(
self,
key={},
title="",
h_list=[],
special_import_func=None,
filter=[],
extra_func=None,
):
self.key = key
self.special_func = special_import_func
if bool(key):
self.header_list = get_col_from_obj_key(key)
else:
self.header_list = h_list
if title == "":
if bool(key):
self.form_title = key[IMPORT_CSV_MODEL_KEY]._meta.verbose_name.title()
else:
self.form_title = title
if filter:
self.op = filter[0]
self.header = filter[1]
self.value = filter[2]
else:
self.op = None
# extra_func is used to perform clean
# up after the import, like setting
# empty references to a valid value
self.extra_func = extra_func
def import_func(self, c):
if bool(self.key):
if self.op:
success, message = import_obj(
c, self.key, self.op, self.header, self.value
)
else:
success, message = import_obj(c, self.key)
else:
success, message = self.special_func(c)
if success and self.extra_func:
self.extra_func()
return success, message
def my_check_headers(self, t):
reader = csv.reader(t)
header = csv_header_to_dict(next(reader))
l1 = [x.lower() for x in [x.lower() for x in self.header_list]]
# Before starting to read, check that all the expected columns exists
if not all(elem in header for elem in l1):
msg = f"Missing/wrong headers: expected {l1}, " \
f"the file has: {header.keys()}."
return False, msg
return True, ""
def get_field_name(obj_key, prefix):
"""Takes the dictionary used to
define the import, and return
the list of fields to be used
for exporting"""
field_list = []
model = obj_key[IMPORT_CSV_MODEL_KEY]
if IMPORT_CSV_PK_KEY in obj_key:
field_list.append(prefix + model._meta.pk.name)
if IMPORT_CSV_FIELDLIST_KEY in obj_key:
for k, v in obj_key[IMPORT_CSV_FIELDLIST_KEY].items():
if type(v) is dict:
field_list = field_list + get_field_name(v, prefix + k + "__")
else:
field_list.append(prefix + k)
return field_list
| [
"csv.reader"
] | [((5631, 5651), 'csv.reader', 'csv.reader', (['csv_file'], {}), '(csv_file)\n', (5641, 5651), False, 'import csv\n'), ((6430, 6450), 'csv.reader', 'csv.reader', (['csv_file'], {}), '(csv_file)\n', (6440, 6450), False, 'import csv\n'), ((6674, 6694), 'csv.reader', 'csv.reader', (['csv_file'], {}), '(csv_file)\n', (6684, 6694), False, 'import csv\n'), ((8922, 8935), 'csv.reader', 'csv.reader', (['t'], {}), '(t)\n', (8932, 8935), False, 'import csv\n')] |
import os
# RESOURCES_PATH = 'resources/'
# UPDATE_FREQUENCY = 60 * 60 * 6 # deprecated. Controlled in Jenkins
MARIADB_CONFIG = {
"user": os.environ["MARIADB_USR"],
"psw": os.environ["MARIADB_PSW"],
"host": "cubus.cxxwabvgrdub.eu-central-1.rds.amazonaws.com",
"port": 3306,
"db": "input",
}
## GOOGLE
SCOPE = [u'https://www.googleapis.com/auth/adwords', ]
GOOGLE_CLIENT_CUSTOMER_ID = os.getenv('GOOGLE_ADS_LOGIN_CUSTOMER_ID')
YOUSEE_CUSTOMER_ID = '298-878-5433'
LAG_TIME = 7 | [
"os.getenv"
] | [((408, 449), 'os.getenv', 'os.getenv', (['"""GOOGLE_ADS_LOGIN_CUSTOMER_ID"""'], {}), "('GOOGLE_ADS_LOGIN_CUSTOMER_ID')\n", (417, 449), False, 'import os\n')] |
from googletrans import Translator
translator = Translator()
print(translator.translate(' ' + '안녕하세요.', dest='pt')) | [
"googletrans.Translator"
] | [((48, 60), 'googletrans.Translator', 'Translator', ([], {}), '()\n', (58, 60), False, 'from googletrans import Translator\n')] |
import requests
import zipfile
import os
import errno
import nltk
from nltk.tokenize import sent_tokenize
ALICE_URL = 'https://ota.bodleian.ox.ac.uk/repository/xmlui/bitstream/handle/20.500.12024/1476/alice28-1476.txt'
WIZARD_URL = 'https://ota.bodleian.ox.ac.uk/repository/xmlui/bitstream/handle/20.500.12024/1740/wizoz10-1740.txt'
def download_text(url, localfolder='texts'):
localfile = os.path.split(url)[-1]
try:
os.mkdir(f'{localfolder}')
except OSError as e:
if e.errno != errno.EEXIST:
raise
try:
r = requests.get(url, allow_redirects=True)
open(os.path.join(localfolder, localfile), 'wb').write(r.content)
except Exception as e:
print(f'Error downloading file: {str(e)}')
def sentence_tokenize(source, quote_char='\\', sep_char=',',
include_header=True, include_source=True,
extensions=('txt'), **kwargs):
nltk.download('punkt')
# If source is a folder, goes through all files inside it
# that match the desired extensions ('txt' by default)
if os.path.isdir(source):
filenames = [f for f in os.listdir(source)
if os.path.isfile(os.path.join(source, f)) and
os.path.splitext(f)[1][1:] in extensions]
elif isinstance(source, str):
filenames = [source]
# If there is a configuration file, builds a dictionary with
# the corresponding start and end lines of each text file
config_file = os.path.join(source, 'lines.cfg')
config = {}
if os.path.exists(config_file):
with open(config_file, 'r') as f:
rows = f.readlines()
for r in rows[1:]:
fname, start, end = r.strip().split(',')
config.update({fname: (int(start), int(end))})
new_fnames = []
# For each file of text
for fname in filenames:
# If there's a start and end line for that file, use it
try:
start, end = config[fname]
except KeyError:
start = None
end = None
# Opens the file, slices the configures lines (if any)
# cleans line breaks and uses the sentence tokenizer
with open(os.path.join(source, fname), 'r') as f:
contents = (''.join(f.readlines()[slice(start, end, None)])
.replace('\n', ' ').replace('\r', ''))
corpus = sent_tokenize(contents, **kwargs)
# Builds a CSV file containing tokenized sentences
base = os.path.splitext(fname)[0]
new_fname = f'{base}.sent.csv'
new_fname = os.path.join(source, new_fname)
with open(new_fname, 'w') as f:
# Header of the file
if include_header:
if include_source:
f.write('sentence,source\n')
else:
f.write('sentence\n')
# Writes one line for each sentence
for sentence in corpus:
if include_source:
f.write(f'{quote_char}{sentence}{quote_char}{sep_char}{fname}\n')
else:
f.write(f'{quote_char}{sentence}{quote_char}\n')
new_fnames.append(new_fname)
# Returns list of the newly generated CSV files
return sorted(new_fnames) | [
"os.path.exists",
"os.listdir",
"nltk.download",
"os.path.join",
"os.path.splitext",
"requests.get",
"os.path.split",
"os.path.isdir",
"os.mkdir",
"nltk.tokenize.sent_tokenize"
] | [((963, 985), 'nltk.download', 'nltk.download', (['"""punkt"""'], {}), "('punkt')\n", (976, 985), False, 'import nltk\n'), ((1114, 1135), 'os.path.isdir', 'os.path.isdir', (['source'], {}), '(source)\n', (1127, 1135), False, 'import os\n'), ((1535, 1568), 'os.path.join', 'os.path.join', (['source', '"""lines.cfg"""'], {}), "(source, 'lines.cfg')\n", (1547, 1568), False, 'import os\n'), ((1592, 1619), 'os.path.exists', 'os.path.exists', (['config_file'], {}), '(config_file)\n', (1606, 1619), False, 'import os\n'), ((412, 430), 'os.path.split', 'os.path.split', (['url'], {}), '(url)\n', (425, 430), False, 'import os\n'), ((452, 478), 'os.mkdir', 'os.mkdir', (['f"""{localfolder}"""'], {}), "(f'{localfolder}')\n", (460, 478), False, 'import os\n'), ((579, 618), 'requests.get', 'requests.get', (['url'], {'allow_redirects': '(True)'}), '(url, allow_redirects=True)\n', (591, 618), False, 'import requests\n'), ((2456, 2489), 'nltk.tokenize.sent_tokenize', 'sent_tokenize', (['contents'], {}), '(contents, **kwargs)\n', (2469, 2489), False, 'from nltk.tokenize import sent_tokenize\n'), ((2659, 2690), 'os.path.join', 'os.path.join', (['source', 'new_fname'], {}), '(source, new_fname)\n', (2671, 2690), False, 'import os\n'), ((2573, 2596), 'os.path.splitext', 'os.path.splitext', (['fname'], {}), '(fname)\n', (2589, 2596), False, 'import os\n'), ((1169, 1187), 'os.listdir', 'os.listdir', (['source'], {}), '(source)\n', (1179, 1187), False, 'import os\n'), ((2264, 2291), 'os.path.join', 'os.path.join', (['source', 'fname'], {}), '(source, fname)\n', (2276, 2291), False, 'import os\n'), ((632, 668), 'os.path.join', 'os.path.join', (['localfolder', 'localfile'], {}), '(localfolder, localfile)\n', (644, 668), False, 'import os\n'), ((1227, 1250), 'os.path.join', 'os.path.join', (['source', 'f'], {}), '(source, f)\n', (1239, 1250), False, 'import os\n'), ((1280, 1299), 'os.path.splitext', 'os.path.splitext', (['f'], {}), '(f)\n', (1296, 1299), False, 'import os\n')] |
import sys
import click
from tabulate import tabulate
import textwrap
from . import admin
from ...helper import is_admin
from ...session import Session, is_legacy_server
from ...versioning import get_naming, apply_version_aware_fields
from ..pretty import print_error, print_fail
# Lets say formattable options are:
format_options = {
'name': ('Session Name',
lambda api_session: get_naming(api_session.api_version, 'name_gql_field')),
'type': ('Type',
lambda api_session: get_naming(api_session.api_version, 'type_gql_field')),
'task_id': ('Task ID', 'id'),
'kernel_id': ('Kernel ID', 'id'),
'status': ('Status', 'status'),
'status_info': ('Status Info', 'status_info'),
'created_at': ('Created At', 'created_at'),
'last_updated': ('Last updated', 'status_changed'),
'result': ('Result', 'result'),
'owner': ('Owner', 'access_key'),
'image': ('Image', 'image'),
'tag': ('Tag', 'tag'),
'occupied_slots': ('Occupied Resource', 'occupied_slots'),
'used_memory': ('Used Memory (MiB)', 'mem_cur_bytes'),
'max_used_memory': ('Max Used Memory (MiB)', 'mem_max_bytes'),
'cpu_using': ('CPU Using (%)', 'cpu_using'),
}
@admin.command()
@click.option('-s', '--status', default=None,
type=click.Choice([
'PENDING',
'PREPARING', 'BUILDING', 'RUNNING', 'RESTARTING',
'RESIZING', 'SUSPENDED', 'TERMINATING',
'TERMINATED', 'ERROR', 'CANCELLED',
'ALL', # special case
]),
help='Filter by the given status')
@click.option('--access-key', type=str, default=None,
help='Get sessions for a specific access key '
'(only works if you are a super-admin)')
@click.option('--name-only', is_flag=True, help='Display session names only.')
@click.option('--show-tid', is_flag=True, help='Display task/kernel IDs.')
@click.option('--dead', is_flag=True,
help='Filter only dead sessions. Ignores --status option.')
@click.option('--running', is_flag=True,
help='Filter only scheduled and running sessions. Ignores --status option.')
@click.option('-a', '--all', is_flag=True,
help='Display all sessions matching the condition using pagination.')
@click.option('--detail', is_flag=True, help='Show more details using more columns.')
@click.option('-f', '--format', default=None, help='Display only specified fields.')
@click.option('--plain', is_flag=True,
help='Display the session list without decorative line drawings and the header.')
def sessions(status, access_key, name_only, show_tid, dead, running, all, detail, plain, format):
'''
List and manage compute sessions.
'''
fields = []
try:
with Session() as session:
name_key = get_naming(session.api_version, 'name_gql_field')
fields.append(format_options['name'])
if is_admin(session) and not is_legacy_server():
fields.append(format_options['owner'])
except Exception as e:
print_error(e)
sys.exit(1)
if name_only:
pass
elif format is not None:
options = format.split(',')
for opt in options:
if opt not in format_options:
print_fail(f'There is no such format option: {opt}')
sys.exit(1)
fields = [
format_options[opt] for opt in options
]
else:
fields.extend([
format_options['image'],
format_options['type'],
format_options['status'],
format_options['status_info'],
format_options['last_updated'],
format_options['result'],
])
if show_tid:
fields.insert(
2,
format_options['id'])
if detail:
fields.extend([
format_options['tag'],
format_options['created_at'],
format_options['occupied_slots'],
format_options['used_memory'],
format_options['max_used_memory'],
format_options['cpu_using'],
])
no_match_name = None
if status is None:
status = 'PENDING,PREPARING,PULLING,RUNNING,RESTARTING,TERMINATING,RESIZING,SUSPENDED,ERROR'
no_match_name = 'active'
if running:
status = 'PREPARING,PULLING,RUNNING'
no_match_name = 'running'
if dead:
status = 'CANCELLED,TERMINATED'
no_match_name = 'dead'
if status == 'ALL':
status = ('PENDING,PREPARING,PULLING,RUNNING,RESTARTING,TERMINATING,RESIZING,SUSPENDED,ERROR,'
'CANCELLED,TERMINATED')
no_match_name = 'in any status'
if no_match_name is None:
no_match_name = status.lower()
def execute_paginated_query(limit, offset):
nonlocal fields
q = '''
query($limit:Int!, $offset:Int!, $ak:String, $status:String) {
compute_session_list(
limit:$limit, offset:$offset, access_key:$ak, status:$status) {
items { $fields }
total_count
}
}'''
q = textwrap.dedent(q).strip()
q = q.replace('$fields', ' '.join(item[1] for item in fields))
v = {
'limit': limit,
'offset': offset,
'status': status,
'ak': access_key,
}
try:
resp = session.Admin.query(q, v)
except Exception as e:
print_error(e)
sys.exit(1)
return resp['compute_session_list']
def round_mem(items):
for item in items:
if 'mem_cur_bytes' in item:
item['mem_cur_bytes'] = round(item['mem_cur_bytes'] / 2 ** 20, 1)
if 'mem_max_bytes' in item:
item['mem_max_bytes'] = round(item['mem_max_bytes'] / 2 ** 20, 1)
return items
def _generate_paginated_results(interval):
nonlocal fields
offset = 0
is_first = True
total_count = -1
while True:
limit = (interval if is_first else
min(interval, total_count - offset))
try:
result = execute_paginated_query(limit, offset)
except Exception as e:
print_error(e)
sys.exit(1)
offset += interval
total_count = result['total_count']
items = result['items']
items = round_mem(items)
if name_only:
yield '\n'.join([item[name_key] for item in items]) + '\n'
else:
table = tabulate(
[item.values() for item in items],
headers=[] if plain else (item[0] for item in fields),
tablefmt="plain" if plain else None
)
if not is_first:
table_rows = table.split('\n')
table = '\n'.join(table_rows[2:])
yield table + '\n'
if is_first:
is_first = False
if not offset < total_count:
break
with Session() as session:
fields = apply_version_aware_fields(session, fields)
paginating_interval = 10
try:
if all:
click.echo_via_pager(_generate_paginated_results(paginating_interval))
else:
result = execute_paginated_query(paginating_interval, offset=0)
total_count = result['total_count']
if total_count == 0:
print('There are no compute sessions currently {0}.'
.format(no_match_name))
return
items = result['items']
items = round_mem(items)
if name_only:
for item in items:
print(item[name_key])
else:
print(tabulate([item.values() for item in items],
headers=[] if plain else (item[0] for item in fields),
tablefmt="plain" if plain else None))
if total_count > paginating_interval:
print("More sessions can be displayed by using -a/--all option.")
except Exception as e:
print_error(e)
sys.exit(1)
@admin.command()
@click.argument('name', metavar='NAME')
def session(name):
'''
Show detailed information for a running compute session.
SESSID: Session id or its alias.
'''
fields = [
('Session Name', lambda api_session: get_naming(api_session.api_version, 'name_gql_field')),
('Session Type', lambda api_session: get_naming(api_session.api_version, 'type_gql_field')),
('Role', 'role'),
('Image', 'image'),
('Tag', 'tag'),
('Created At', 'created_at'),
('Terminated At', 'terminated_at'),
('Agent', 'agent'),
('Status', 'status'),
('Status Info', 'status_info'),
('Occupied Resources', 'occupied_slots'),
('CPU Used (ms)', 'cpu_used'),
('Used Memory (MiB)', 'mem_cur_bytes'),
('Max Used Memory (MiB)', 'mem_max_bytes'),
('Number of Queries', 'num_queries'),
('Network RX Bytes', 'net_rx_bytes'),
('Network TX Bytes', 'net_tx_bytes'),
('IO Read Bytes', 'io_read_bytes'),
('IO Write Bytes', 'io_write_bytes'),
('IO Max Scratch Size', 'io_max_scratch_size'),
('IO Current Scratch Size', 'io_cur_scratch_size'),
('CPU Using (%)', 'cpu_using'),
]
if is_legacy_server():
del fields[4] # tag
with Session() as session:
fields = apply_version_aware_fields(session, fields)
name_key = get_naming(session, 'name_gql_field')
q = 'query($name: String!) {' \
f' compute_session({name_key}: $name) {{ $fields }}' \
'}'
q = q.replace('$fields', ' '.join(item[1] for item in fields))
name_key = get_naming(session.api_version, 'name_gql_field')
v = {name_key: name}
try:
resp = session.Admin.query(q, v)
except Exception as e:
print_error(e)
sys.exit(1)
if resp['compute_session'][name_key] is None:
print('There is no such running compute session.')
return
print('Session detail:\n---------------')
for i, value in enumerate(resp['compute_session'].values()):
if fields[i][1] in ['mem_cur_bytes', 'mem_max_bytes']:
value = round(value / 2 ** 20, 1)
print(fields[i][0] + ': ' + str(value))
| [
"click.Choice",
"click.argument",
"textwrap.dedent",
"click.option",
"sys.exit"
] | [((1752, 1899), 'click.option', 'click.option', (['"""--access-key"""'], {'type': 'str', 'default': 'None', 'help': '"""Get sessions for a specific access key (only works if you are a super-admin)"""'}), "('--access-key', type=str, default=None, help=\n 'Get sessions for a specific access key (only works if you are a super-admin)'\n )\n", (1764, 1899), False, 'import click\n'), ((1927, 2004), 'click.option', 'click.option', (['"""--name-only"""'], {'is_flag': '(True)', 'help': '"""Display session names only."""'}), "('--name-only', is_flag=True, help='Display session names only.')\n", (1939, 2004), False, 'import click\n'), ((2006, 2079), 'click.option', 'click.option', (['"""--show-tid"""'], {'is_flag': '(True)', 'help': '"""Display task/kernel IDs."""'}), "('--show-tid', is_flag=True, help='Display task/kernel IDs.')\n", (2018, 2079), False, 'import click\n'), ((2081, 2182), 'click.option', 'click.option', (['"""--dead"""'], {'is_flag': '(True)', 'help': '"""Filter only dead sessions. Ignores --status option."""'}), "('--dead', is_flag=True, help=\n 'Filter only dead sessions. Ignores --status option.')\n", (2093, 2182), False, 'import click\n'), ((2193, 2314), 'click.option', 'click.option', (['"""--running"""'], {'is_flag': '(True)', 'help': '"""Filter only scheduled and running sessions. Ignores --status option."""'}), "('--running', is_flag=True, help=\n 'Filter only scheduled and running sessions. Ignores --status option.')\n", (2205, 2314), False, 'import click\n'), ((2325, 2441), 'click.option', 'click.option', (['"""-a"""', '"""--all"""'], {'is_flag': '(True)', 'help': '"""Display all sessions matching the condition using pagination."""'}), "('-a', '--all', is_flag=True, help=\n 'Display all sessions matching the condition using pagination.')\n", (2337, 2441), False, 'import click\n'), ((2452, 2541), 'click.option', 'click.option', (['"""--detail"""'], {'is_flag': '(True)', 'help': '"""Show more details using more columns."""'}), "('--detail', is_flag=True, help=\n 'Show more details using more columns.')\n", (2464, 2541), False, 'import click\n'), ((2538, 2626), 'click.option', 'click.option', (['"""-f"""', '"""--format"""'], {'default': 'None', 'help': '"""Display only specified fields."""'}), "('-f', '--format', default=None, help=\n 'Display only specified fields.')\n", (2550, 2626), False, 'import click\n'), ((2624, 2753), 'click.option', 'click.option', (['"""--plain"""'], {'is_flag': '(True)', 'help': '"""Display the session list without decorative line drawings and the header."""'}), "('--plain', is_flag=True, help=\n 'Display the session list without decorative line drawings and the header.'\n )\n", (2636, 2753), False, 'import click\n'), ((8614, 8652), 'click.argument', 'click.argument', (['"""name"""'], {'metavar': '"""NAME"""'}), "('name', metavar='NAME')\n", (8628, 8652), False, 'import click\n'), ((1419, 1585), 'click.Choice', 'click.Choice', (["['PENDING', 'PREPARING', 'BUILDING', 'RUNNING', 'RESTARTING', 'RESIZING',\n 'SUSPENDED', 'TERMINATING', 'TERMINATED', 'ERROR', 'CANCELLED', 'ALL']"], {}), "(['PENDING', 'PREPARING', 'BUILDING', 'RUNNING', 'RESTARTING',\n 'RESIZING', 'SUSPENDED', 'TERMINATING', 'TERMINATED', 'ERROR',\n 'CANCELLED', 'ALL'])\n", (1431, 1585), False, 'import click\n'), ((3267, 3278), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3275, 3278), False, 'import sys\n'), ((5349, 5367), 'textwrap.dedent', 'textwrap.dedent', (['q'], {}), '(q)\n', (5364, 5367), False, 'import textwrap\n'), ((5717, 5728), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5725, 5728), False, 'import sys\n'), ((8582, 8593), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8590, 8593), False, 'import sys\n'), ((10467, 10478), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (10475, 10478), False, 'import sys\n'), ((3530, 3541), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3538, 3541), False, 'import sys\n'), ((6519, 6530), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6527, 6530), False, 'import sys\n')] |
from django.urls import path
from dplhooks.deploys import views
urlpatterns = [
path('p/deploy', views.DeployView.as_view())
]
| [
"dplhooks.deploys.views.DeployView.as_view"
] | [((103, 129), 'dplhooks.deploys.views.DeployView.as_view', 'views.DeployView.as_view', ([], {}), '()\n', (127, 129), False, 'from dplhooks.deploys import views\n')] |
"""
Classes to handle logging
(c) 2015 Massachusetts Institute of Technology
"""
# Native
import time
import logging
logger = logging.getLogger(__name__)
# LO-PHI
import lophi.globals as G
class LogFile:
"""
This class will handle all of the writing to files for LO-PHI. Simply
initialize with a desired filename and output type, and then just feed
output data structures.
"""
def __init__(self,filename,output_type="tsv",reprint_header=False):
try:
# Globals
self.log_init = False # Remember if we need to print the headers
self.reprint_header = reprint_header
self.outfd = None
self.separator = "\t"
# Open output
G.ensure_dir_exists(filename)
self.outfd = open(filename,"a+")
# Set the proper delimiter
if output_type == "tsv":
self.separator = "\t"
if output_type == "csv":
self.separator = ","
if output_type == "space":
self.separator = " "
self.reprint_header = reprint_header
except:
logger.error("ERROR: Could not open %s for logging output."%filename)
import traceback
logger.error(traceback.format_exc())
def append(self,output):
"""
Given output from a Volatility module, will write to the logfile in
the desired format
"""
if self.log_init is False or self.reprint_header:
# Insert our new headers in reverse order
line = self.separator.join(output['HEADER'])
self.outfd.write(line)
self.log_init = True
self.outfd.write("\n")
for row in output['DATA']:
# Convert everything to a string
row = [str(i) for i in row]
line = self.separator.join(row)
self.outfd.write(line)
self.outfd.write("\n")
self.outfd.flush()
def close(self):
"""
Simply close our file descriptor
"""
self.outfd.close()
class LogDataStruct(LogFile):
"""
This class will ingest DataStruct objects and log them appropriately
"""
def append(self,data_struct,memory_address=None, extra=None):
"""
Write our data structure to a log file on disk
"""
# Write the header if needed
if self.log_init is False or self.reprint_header:
keys = [str(x) for x in data_struct.keys()]
line_list = ["Timestamp"]+keys
# Was extra Data Provided?
if extra is not None:
line_list += extra.keys()
# Was a memory address provided?
if memory_address is not None:
line_list += ["Memory Address"]
line = self.separator.join(line_list)
self.outfd.write(line)
self.log_init = True
self.outfd.write("\n")
# Write the actual data
values = [str(x) for x in data_struct.values()]
line_list = [str(time.time())]+values
# Was extra data provided?
if extra is not None:
for k in extra.keys():
line_list.append(extra[k])
# Was a memory address provided?
if memory_address is not None:
line_list += ["0x%016X"%memory_address]
line = self.separator.join(line_list)
self.outfd.write(line+"\n")
self.outfd.flush()
| [
"logging.getLogger",
"traceback.format_exc",
"lophi.globals.ensure_dir_exists",
"time.time"
] | [((135, 162), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (152, 162), False, 'import logging\n'), ((758, 787), 'lophi.globals.ensure_dir_exists', 'G.ensure_dir_exists', (['filename'], {}), '(filename)\n', (777, 787), True, 'import lophi.globals as G\n'), ((1298, 1320), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1318, 1320), False, 'import traceback\n'), ((3260, 3271), 'time.time', 'time.time', ([], {}), '()\n', (3269, 3271), False, 'import time\n')] |
import smbus2, time
address = 0x63 #Atlas PH Probe standard I2C address is 99 decimal.
class atlasPH(object):
def __init__(self):
self.bus = smbus2.SMBus(1)
def write(self, command):
self.bus.write_byte(address, ord(command))
def readBlock(self, numBytes):
return self.bus.read_i2c_block_data(address, 0, 5)
# val:str, -> float
def extractFloatFromString(self, val):
try:
return float(val)
except ValueError:
print("Atlas PH probe value error: " + block)
return 0.00
def extractPH(self, block):
if block[0] == 1:
block.pop(0)
return self.extractFloatFromString("".join(map(chr, block)))
else:
print("Atlas PH probe status code error: " + block[0])
return 0.00
# -> float
def getPH(self):
self.write('R')
time.sleep(0.9)
block = self.readBlock(8)
return self.extractPH(block)
def test(self):
'Self test of the object'
print('*** Test Atlas PH ***')
print('PH: %.2f' %self.getPH())
if __name__=="__main__":
t=atlasPH()
t.test()
| [
"smbus2.SMBus",
"time.sleep"
] | [((173, 188), 'smbus2.SMBus', 'smbus2.SMBus', (['(1)'], {}), '(1)\n', (185, 188), False, 'import smbus2, time\n'), ((917, 932), 'time.sleep', 'time.sleep', (['(0.9)'], {}), '(0.9)\n', (927, 932), False, 'import smbus2, time\n')] |
""" This is a modified version of simple.py script bundled with Read Until API"""
import argparse
import logging
import sys
import traceback
import time
import numpy
import read_until
import cffi
import os
import h5py
import glob
import concurrent.futures
import dyss
def _get_parser():
parser = argparse.ArgumentParser('Dyss -- a tiny program for selective sequencing on MinION')
parser.add_argument('--port', type=int, default=8000,
help='MinKNOW server port.')
parser.add_argument('--analysis_delay', type=int, default=1,
help='Period to wait before starting analysis.')
parser.add_argument('--run_time', type=int, default=900,
help='Period to run the analysis.')
parser.add_argument('--min_chunk_size', type=int, default=3500,
help='Minimum read chunk size to receive.')
parser.add_argument('--control_group', default=2, type=int,
help='Inverse proportion of channels in control group.')
parser.add_argument('--batch_size', default=30, type=int,
help='Inverse proportion of channels in control group.')
parser.add_argument(
'--debug', help="Print all debugging information",
action="store_const", dest="log_level",
const=logging.DEBUG, default=logging.WARNING,
)
parser.add_argument(
'--verbose', help="Print verbose messaging.",
action="store_const", dest="log_level",
const=logging.INFO,
)
parser.add_argument('--num_scouts', default=14, type=int,
help='number of scouts. Default is 14')
parser.add_argument('--num_packs', default=3, type=int,
help='number of packs. Default is 3')
parser.add_argument('--reference', required=True,
help='reference seqence to be amplified. Currently reference size is bounded by 100Kbp.')
parser.add_argument('--model', required=True,
help='model file.')
parser.add_argument('--param', required=True,
help='training data.')
parser.add_argument('--power', default=9, type=int,
help='chunking power. Integer type. Default is 9.')
parser.add_argument('--referencesize', default=400000, type=int,
help='Reference size(Event num = 2*bp). Default is 400000')
return parser
def signal_based_analysis(client, classifier, batch_size=30, delay=1, throttle=0.5, control_group=16):
"""A tiny analysis function based on raw signal comparison.
:param client: an instance of a `ReadUntilClient` object.
:param batch_size: number of reads to pull from `client` at a time.
:param delay: number of seconds to wait before starting analysis.
:param throttle: minimum interval between requests to `client`.
:param dyss: an instance of Dyss object constructed by libdyss.construct_dyss().
:param debug: flag whether or not output every query into stdout.
"""
logger = logging.getLogger('Dyss')
logger.warn(
'Initialization of Dyss classification'
'When debug and verbose flag is on, it generates literaly all inputs.'
'If you want to apply this application to real sequencing experiment,'
'it is highly recommended to turn these flags off.'
)
# we sleep a little simply to ensure the client has started initialised
logger.info('Starting analysis of reads in {}s.'.format(delay))
time.sleep(delay)
while client.is_running:
# If thre are too many queries, reject them all.
if client.queue_length > 300:
read_batch = client.get_read_chunks(batch_size = client.queue_length, last = True)
for (channel, read) in read_batch:
read.raw_data = read_until.NullRaw
if channel % control_group != 0:
client.unblock_read(channel, read.number)
client.stop_receiving_read(channel, read.number)
t0 = time.time()
# Then, running usual classification step
read_batch = client.get_read_chunks(batch_size=batch_size, last=True)
# convert the read data into a numpy array of correct type
queries = [(read.id, channel,read.number,
numpy.fromstring(read.raw_data, client.signal_dtype).tolist()
) for (channel,read) in read_batch if channel % control_group != 0]
querylen = len(queries)
# clear the raw reads from allocated memory
for (channel,read) in read_batch:
read.raw_data = read_until.NullRaw
if channel % control_group == 0:
client.stop_receiving_read(channel, read.number)
result = classifier.batch_classify(queries)
if result is not None:
for (status,channel,number,id) in result:
if status == 0:
# The i th read doesn't seems to be in the target region. Reject it.
client.unblock_read(channel, number)
client.stop_receiving_read(channel, number)
logger.info('Rejected {} {} {}'.format(id,channel,number))
elif status == 1:
# The i th read seems to be in the target region.
client.stop_receiving_read(channel, number)
logger.info('Accepted {} {} {}'.format(id,channel,number))
else:
logger.info('Chunked {} {} {}'.format(id,channel, number))
# else, the i th read didn't have enough signal. Keep going.
# limit the rate at which we make requests
t1 = time.time()
if t0 + throttle > t1:
time.sleep(throttle + t0 - t1)
logger.info('process {} reads in {}.'.format(querylen, t1-t0))
logger.info('Finished analysis of reads.')
classifier.free()
def main():
args = _get_parser().parse_args()
logging.basicConfig(format='[%(asctime)s - %(name)s] %(message)s',
datefmt='%H:%M:%S', level=args.log_level)
logger = logging.getLogger('Manager')
classifier = dyss.Dyss(num_scouts=args.num_scouts,
num_packs=args.num_packs,
reference=args.reference,
model=args.model,
param=args.param,
power=args.power,
referencesize=args.referencesize)
read_until_client = read_until.ReadUntilClient(
mk_port=args.port, one_chunk=False, filter_strands=True
)
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = list()
futures.append(executor.submit(
read_until_client.run, runner_kwargs={
'run_time':args.run_time, 'min_chunk_size':args.min_chunk_size
}
))
futures.append(executor.submit(
signal_based_analysis, read_until_client, classifier,
batch_size=args.batch_size, delay=args.analysis_delay,control_group=args.control_group
))
for f in concurrent.futures.as_completed(futures):
if f.exception() is not None:
logger.warning(f.exception())
if __name__=="__main__":
main()
| [
"logging.getLogger",
"logging.basicConfig",
"argparse.ArgumentParser",
"dyss.Dyss",
"time.sleep",
"read_until.ReadUntilClient",
"numpy.fromstring",
"time.time"
] | [((301, 390), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Dyss -- a tiny program for selective sequencing on MinION"""'], {}), "(\n 'Dyss -- a tiny program for selective sequencing on MinION')\n", (324, 390), False, 'import argparse\n'), ((3065, 3090), 'logging.getLogger', 'logging.getLogger', (['"""Dyss"""'], {}), "('Dyss')\n", (3082, 3090), False, 'import logging\n'), ((3528, 3545), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (3538, 3545), False, 'import time\n'), ((6012, 6125), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""[%(asctime)s - %(name)s] %(message)s"""', 'datefmt': '"""%H:%M:%S"""', 'level': 'args.log_level'}), "(format='[%(asctime)s - %(name)s] %(message)s', datefmt=\n '%H:%M:%S', level=args.log_level)\n", (6031, 6125), False, 'import logging\n'), ((6158, 6186), 'logging.getLogger', 'logging.getLogger', (['"""Manager"""'], {}), "('Manager')\n", (6175, 6186), False, 'import logging\n'), ((6204, 6390), 'dyss.Dyss', 'dyss.Dyss', ([], {'num_scouts': 'args.num_scouts', 'num_packs': 'args.num_packs', 'reference': 'args.reference', 'model': 'args.model', 'param': 'args.param', 'power': 'args.power', 'referencesize': 'args.referencesize'}), '(num_scouts=args.num_scouts, num_packs=args.num_packs, reference=\n args.reference, model=args.model, param=args.param, power=args.power,\n referencesize=args.referencesize)\n', (6213, 6390), False, 'import dyss\n'), ((6533, 6620), 'read_until.ReadUntilClient', 'read_until.ReadUntilClient', ([], {'mk_port': 'args.port', 'one_chunk': '(False)', 'filter_strands': '(True)'}), '(mk_port=args.port, one_chunk=False,\n filter_strands=True)\n', (6559, 6620), False, 'import read_until\n'), ((4057, 4068), 'time.time', 'time.time', ([], {}), '()\n', (4066, 4068), False, 'import time\n'), ((5720, 5731), 'time.time', 'time.time', ([], {}), '()\n', (5729, 5731), False, 'import time\n'), ((5775, 5805), 'time.sleep', 'time.sleep', (['(throttle + t0 - t1)'], {}), '(throttle + t0 - t1)\n', (5785, 5805), False, 'import time\n'), ((4335, 4387), 'numpy.fromstring', 'numpy.fromstring', (['read.raw_data', 'client.signal_dtype'], {}), '(read.raw_data, client.signal_dtype)\n', (4351, 4387), False, 'import numpy\n')] |
# Copyright 2018 Twitter, Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
""" This module contains classes and methods for extracting metrics from the
Heron Topology Master instance. """
import logging
import warnings
import datetime as dt
from typing import Dict, List, Any, Callable, Union, Tuple, Optional
import pandas as pd
from requests.exceptions import HTTPError
from caladrius.metrics.heron.client import HeronMetricsClient
from caladrius.common.heron import tracker
from caladrius.config.keys import ConfKeys
LOG: logging.Logger = logging.getLogger(__name__)
# pylint: disable=too-many-locals, too-many-arguments
# Type definitions
ROW_DICT = Dict[str, Union[str, int, float, dt.datetime, None]]
# The TMaster metrics are aggregated into minute long periods by default
DEFAULT_METRIC_PERIOD: int = 60
def time_check(start: dt.datetime, end: dt.datetime,
time_limit_hrs: float) -> None:
""" Checks the time period, defined by the supplied start and end points,
against the period defined from now back by the supplied time limit in
hours. If the time check passes then nothing will be returned.
Arguments:
start (datetime): The start of the time period. Should be UTC.
end (datetime): The end of the time period. Should be UTC.
time_limit_hrs (float): The number of hours back from now that define
the allowed time period.
Raises:
RuntimeError: If the supplied time period is not within the defined
limit or if the end time is before the start time.
RuntimeWarning: If the supplied time period crosses the limits of the
metrics storage period.
"""
if end < start:
msg: str = (f"The supplied end time ({end.isoformat}) is before the "
f"supplied start time ({start.isoformat}). No data will "
f"be returned.")
LOG.error(msg)
raise RuntimeError(msg)
now: dt.datetime = dt.datetime.now(dt.timezone.utc)
limit: dt.datetime = now - dt.timedelta(hours=time_limit_hrs)
if start < limit and end < limit:
limit_msg: str = (f"The defined time period ({start.isoformat()} to "
f"{end.isoformat()}) is outside of the "
f"{time_limit_hrs} hours of data stored by the "
f"Topology Master. No data will be returned.")
LOG.error(limit_msg)
raise RuntimeError(limit_msg)
if start < limit and end > limit:
truncated_duration: float = round(((end - limit).total_seconds() /
3600), 2)
truncated_msg: str = (f"The start ({start.isoformat()}) of the "
f"supplied time window is beyond the "
f"{time_limit_hrs} hours stored by the Topology "
f"Master. Results will be limited to "
f"{truncated_duration} hours from "
f"{limit.isoformat()} to {end.isoformat()}")
LOG.warning(truncated_msg)
warnings.warn(truncated_msg, RuntimeWarning)
def instance_timelines_to_dataframe(
instance_timelines: dict, stream: Optional[str], measurement_name: str,
conversion_func: Callable[[str], Union[str, int, float]] = None,
source_component: str = None) -> pd.DataFrame:
""" Converts the timeline dictionaries of a *single metric* into a single
combined DataFrame for all instances. All timestamps are converted to UTC
Python datetime objects and the returned DataFrame (for each instance) is
sorted by ascending date.
Arguments:
instance_timelines (dict): A dictionary of instance metric timelines,
where each key is an instance name linking
to a dictionary of <timestamp> :
<measurement> pairs.
stream (str): The stream name that these metrics are related to.
measurement_name (str): The name of the measurements being processed.
This will be used as the measurement column
heading.
conversion_func (function): An optional function for converting the
measurement in the timeline. If not
supplied the measurement will be left as a
string.
Returns:
pandas.DataFrame: A DataFrame containing the timelines of all instances
in the supplied dictionary.
"""
output: List[ROW_DICT] = []
instance_name: str
timeline: Dict[str, str]
for instance_name, timeline in instance_timelines.items():
details = tracker.parse_instance_name(instance_name)
instance_list: List[ROW_DICT] = []
timestamp_str: str
measurement_str: str
for timestamp_str, measurement_str in timeline.items():
timestamp: dt.datetime = \
dt.datetime.utcfromtimestamp(int(timestamp_str))
if "nan" in measurement_str:
measurement: Union[str, int, float, None] = None
else:
if conversion_func:
measurement = conversion_func(measurement_str)
else:
measurement = measurement_str
row: ROW_DICT = {
"timestamp": timestamp,
"container": details["container"],
"task": details["task_id"],
"component": details["component"],
measurement_name: measurement}
if stream:
row["stream"] = stream
if source_component:
row["source_component"] = source_component
instance_list.append(row)
# Because the original dict returned by the tracker is
# unsorted we need to sort the rows by ascending time
instance_list.sort(
key=lambda instance: instance["timestamp"])
output.extend(instance_list)
return pd.DataFrame(output)
def str_nano_to_float_milli(nano_str: str) -> float:
""" Converts a string of a nano measurement into a millisecond float value.
"""
return float(nano_str) / 1000000.0
class HeronTMasterClient(HeronMetricsClient):
""" Class for extracting metrics from the Heron Topology Master metrics
store. """
def __init__(self, config: dict) -> None:
super().__init__(config)
self.tracker_url = config[ConfKeys.HERON_TRACKER_URL.value]
self.time_limit_hrs = \
config.get(ConfKeys.HERON_TMASTER_METRICS_MAX_HOURS.value, 3)
LOG.info("Created Topology Master metrics client using Heron Tracker "
"at: %s", self.tracker_url)
def __hash__(self) -> int:
return hash(self.tracker_url)
def __eq__(self, other: object) -> bool:
if not isinstance(other, HeronTMasterClient):
return False
if self.tracker_url == other.tracker_url:
return True
return False
def _query_setup(self, topology_id: str, cluster: str, environ: str,
start: dt.datetime, end: dt.datetime,
) -> Tuple[Dict[str, Any], int, int]:
""" Helper method for setting up each of the query methods with the
required variables."""
time_check(start, end, self.time_limit_hrs)
start_time: int = int(round(start.timestamp()))
end_time: int = int(round(end.timestamp()))
logical_plan: Dict[str, Any] = tracker.get_logical_plan(
self.tracker_url, cluster, environ, topology_id)
return logical_plan, start_time, end_time
def get_component_service_times(self, topology_id: str, cluster: str,
environ: str, component_name: str,
start: int, end: int, logical_plan:
Dict[str, Any]=None) -> pd.DataFrame:
""" Gets the service times, as a timeseries, for every instance of the
specified component of the specified topology. The start and end times
define the window over which to gather the metrics. The window duration
should be less then 3 hours as this is the limit of what the Topology
master stores.
Arguments:
topology_id (str): The topology identification string.
cluster (str): The cluster the topology is running in.
environ (str): The environment the topology is running in (eg.
prod, devel, test, etc).
component_name (str): The name of the component whose metrics are
required.
start (int): Start time for the time period the query is run
against. This should be a UTC POSIX time integer
(seconds since epoch).
end (int): End time for the time period the query is run against.
This should be a UTC POSIX time integer (seconds since
epoch).
logical_plan (dict): Optional dictionary logical plan returned
by the Heron Tracker API. If not supplied
this method will call the API to get the
logical plan.
Returns:
pandas.DataFrame: A DataFrame containing the service time
measurements as a timeseries. Each row represents a measurement
(aggregated over one minute) with the following columns:
* timestamp: The UTC timestamp for the metric time period,
* component: The component this metric comes from,
* task: The instance ID number for the instance that the metric
comes from,
* container: The ID for the container this metric comes from,
* stream: The name of the incoming stream from which the tuples
that lead to this metric came from,
* execute-latency-ms: The average execute latency measurement in
milliseconds for that metric time period.
"""
LOG.info("Getting service time metrics for component %s of topology "
"%s", component_name, topology_id)
if not logical_plan:
LOG.debug("Logical plan not supplied, fetching from Heron Tracker")
logical_plan = tracker.get_logical_plan(self.tracker_url, cluster,
environ, topology_id)
incoming_streams: List[Tuple[str, str]] = \
tracker.incoming_sources_and_streams(logical_plan, component_name)
metrics: List[str] = ["__execute-latency/" + source + "/" + stream
for source, stream in incoming_streams]
results: Dict[str, Any] = tracker.get_metrics_timeline(
self.tracker_url, cluster, environ, topology_id, component_name,
start, end, metrics)
output: pd.DataFrame = None
for stream_metric, instance_timelines in results["timeline"].items():
metric_list: List[str] = stream_metric.split("/")
incoming_source: str = metric_list[1]
incoming_stream: str = metric_list[2]
instance_tls_df: pd.DataFrame = instance_timelines_to_dataframe(
instance_timelines, incoming_stream, "latency_ms",
str_nano_to_float_milli, incoming_source)
if output is None:
output = instance_tls_df
else:
output = output.append(instance_tls_df, ignore_index=True)
return output
def get_service_times(self, topology_id: str, cluster: str, environ: str,
start: dt.datetime, end: dt.datetime,
**kwargs: Union[str, int, float]) -> pd.DataFrame:
""" Gets the service times, as a timeseries, for every instance of the
of all the bolt components of the specified topology. The start and end
times define the window over which to gather the metrics. The window
duration should be less than 3 hours as this is the limit of what the
Topology master stores.
Arguments:
topology_id (str): The topology identification string.
start (datetime): utc datetime instance for the start of the
metrics gathering period.
end (datetime): utc datetime instance for the end of the
metrics gathering period.
**cluster (str): The cluster the topology is running in.
**environ (str): The environment the topology is running in (eg.
prod, devel, test, etc).
Returns:
pandas.DataFrame: A DataFrame containing the service time
measurements as a timeseries. Each row represents a measurement
(aggregated over one minute) with the following columns:
* timestamp:The UTC timestamp for the metric time period,
* component: The component this metric comes from,
* task: The instance ID number for the instance that the metric
comes from,
* container: The ID for the container this metric comes from,
* stream: The name of the incoming stream from which the tuples
that lead to this metric came from,
* latency_ms: The average execute latency measurement in
milliseconds for that metric time period.
"""
LOG.info("Getting service times for topology %s over a %d second "
"period from %s to %s", topology_id,
(end-start).total_seconds(), start.isoformat(),
end.isoformat())
logical_plan, start_time, end_time = self._query_setup(
topology_id, cluster, environ, start, end)
output: pd.DataFrame = None
bolts: Dict[str, Any] = logical_plan["bolts"]
bolt_component: str
for bolt_component in bolts:
try:
bolt_service_times: pd.DataFrame = \
self.get_component_service_times(topology_id,
cluster, environ,
bolt_component,
start_time, end_time,
logical_plan)
except HTTPError as http_error:
LOG.warning("Fetching execute latencies for component %s "
"failed with status code %s", bolt_component,
str(http_error.response.status_code))
else:
if output is None:
output = bolt_service_times
else:
output = output.append(bolt_service_times,
ignore_index=True)
return output
def get_component_emission_counts(self, topology_id: str, cluster: str,
environ: str, component_name: str,
start: int, end: int,
logical_plan: Dict[str, Any] = None
) -> pd.DataFrame:
""" Gets the emit counts, as a timeseries, for every instance of the
specified component of the specified topology. The start and end times
define the window over which to gather the metrics. The window duration
should be less then 3 hours as this is the limit of what the Topology
master stores.
Arguments:
topology_id (str): The topology identification string.
cluster (str): The cluster the topology is running in.
environ (str): The environment the topology is running in (eg.
prod, devel, test, etc).
component_name (str): The name of the component whose metrics are
required.
start (int): Start time for the time period the query is run
against. This should be a UTC POSIX time integer
(seconds since epoch).
end (int): End time for the time period the query is run against.
This should be a UTC POSIX time integer (seconds since
epoch).
logical_plan (dict): Optional dictionary logical plan returned
by the Heron Tracker API. If not supplied
this method will call the API to get the
logical plan.
Returns:
pandas.DataFrame: A DataFrame containing the emit count
measurements as a timeseries. Each row represents a measurement
(aggregated over one minute) with the following columns:
* timestamp:The UTC timestamp for the metric time period,
* component: The component this metric comes from,
* task: The instance ID number for the instance that the metric
comes from,
* container: The ID for the container this metric comes from,
* stream: The name of the incoming stream from which the tuples
that lead to this metric came from,
* emit_count: The emit count in that metric time period.
"""
LOG.info("Getting emit count metrics for component %s of topology "
"%s", component_name, topology_id)
if not logical_plan:
LOG.debug("Logical plan not supplied, fetching from Heron Tracker")
logical_plan = tracker.get_logical_plan(self.tracker_url, cluster,
environ, topology_id)
outgoing_streams: List[str] = tracker.get_outgoing_streams(
logical_plan, component_name)
metrics: List[str] = ["__emit-count/" + stream
for stream in outgoing_streams]
results: Dict[str, Any] = tracker.get_metrics_timeline(
self.tracker_url, cluster, environ, topology_id, component_name,
start, end, metrics)
output: pd.DataFrame = None
for stream_metric, instance_timelines in results["timeline"].items():
outgoing_stream: str = stream_metric.split("/")[-1]
instance_tls_df: pd.DataFrame = instance_timelines_to_dataframe(
instance_timelines, outgoing_stream, "emit_count",
lambda m: int(float(m)))
if output is None:
output = instance_tls_df
else:
output = output.append(instance_tls_df, ignore_index=True)
return output
def get_emit_counts(self, topology_id: str, cluster: str, environ: str,
start: dt.datetime, end: dt.datetime,
**kwargs: Union[str, int, float]) -> pd.DataFrame:
""" Gets the emit counts, as a timeseries, for every instance of each
of the components of the specified topology. The start and end times
define the window over which to gather the metrics. The window duration
should be less than 3 hours as this is the limit of what the Topology
master stores.
Arguments:
topology_id (str): The topology identification string.
start (datetime): utc datetime instance for the start of the
metrics gathering period.
end (datetime): utc datetime instance for the end of the
metrics gathering period.
**cluster (str): The cluster the topology is running in.
**environ (str): The environment the topology is running in (eg.
prod, devel, test, etc).
Returns:
pandas.DataFrame: A DataFrame containing the emit count
measurements as a timeseries. Each row represents a measurement
(aggregated over one minute) with the following columns:
* timestamp: The UTC timestamp for the metric,
* component: The component this metric comes from,
* task: The instance ID number for the instance that the metric
comes from,
* container: The ID for the container this metric comes from,
* stream: The name of the outing stream from which the tuples that
lead to this metric came from,
* emit_count: The emit count during the metric time period.
"""
LOG.info("Getting emit counts for topology %s over a %d second "
"period from %s to %s", topology_id,
(end-start).total_seconds(), start.isoformat(),
end.isoformat())
logical_plan, start_time, end_time = self._query_setup(
topology_id, cluster, environ, start, end)
output: pd.DataFrame = None
components: List[str] = (list(logical_plan["spouts"].keys()) +
list(logical_plan["bolts"].keys()))
for component in components:
try:
comp_emit_counts: pd.DataFrame = \
self.get_component_emission_counts(
topology_id, cluster, environ, component,
start_time, end_time, logical_plan)
except HTTPError as http_error:
LOG.warning("Fetching emit counts for component %s failed with"
" status code %s", component,
str(http_error.response.status_code))
if output is None:
output = comp_emit_counts
else:
output = output.append(comp_emit_counts, ignore_index=True)
return output
def get_component_execute_counts(self, topology_id: str, cluster: str,
environ: str, component_name: str,
start: int, end: int,
logical_plan: Dict[str, Any] = None
) -> pd.DataFrame:
""" Gets the execute counts, as a timeseries, for every instance of the
specified component of the specified topology. The start and end times
define the window over which to gather the metrics. The window duration
should be less then 3 hours as this is the limit of what the Topology
master stores.
Arguments:
topology_id (str): The topology identification string.
cluster (str): The cluster the topology is running in.
environ (str): The environment the topology is running in (eg.
prod, devel, test, etc).
component_name (str): The name of the component whose metrics are
required.
start (int): Start time for the time period the query is run
against. This should be a UTC POSIX time integer
(seconds since epoch).
end (int): End time for the time period the query is run against.
This should be a UTC POSIX time integer (seconds since
epoch).
logical_plan (dict): Optional dictionary logical plan returned
by the Heron Tracker API. If not supplied
this method will call the API to get the
logical plan.
Returns:
pandas.DataFrame: A DataFrame containing the emit count
measurements as a timeseries. Each row represents a measurement
(aggregated over one minute) with the following columns:
* timestamp: The UTC timestamp for the metric time period,
* component: The component this metric comes from,
* task: The instance ID number for the instance that the metric
comes from,
* container: The ID for the container this metric comes from.
* stream: The name of the incoming stream from which the tuples
that lead to this metric came from,
* execute_count: The execute count in that metric time period.
"""
LOG.info("Getting execute count metrics for component %s of topology "
"%s", component_name, topology_id)
if not logical_plan:
LOG.debug("Logical plan not supplied, fetching from Heron Tracker")
logical_plan = tracker.get_logical_plan(self.tracker_url, cluster,
environ, topology_id)
incoming_streams: List[Tuple[str, str]] = \
tracker.incoming_sources_and_streams(logical_plan, component_name)
metrics: List[str] = ["__execute-count/" + source + "/" + stream
for source, stream in incoming_streams]
results: Dict[str, Any] = tracker.get_metrics_timeline(
self.tracker_url, cluster, environ, topology_id, component_name,
start, end, metrics)
output: pd.DataFrame = None
for stream_metric, instance_timelines in results["timeline"].items():
metric_list: List[str] = stream_metric.split("/")
incoming_source: str = metric_list[1]
incoming_stream: str = metric_list[2]
instance_tls_df: pd.DataFrame = instance_timelines_to_dataframe(
instance_timelines, incoming_stream, "execute_count",
lambda m: int(float(m)), incoming_source)
if output is None:
output = instance_tls_df
else:
output = output.append(instance_tls_df, ignore_index=True)
return output
def get_execute_counts(self, topology_id: str, cluster: str, environ: str,
start: dt.datetime, end: dt.datetime,
**kwargs: Union[str, int, float]) -> pd.DataFrame:
""" Gets the execute counts, as a timeseries, for every instance of
each of the components of the specified topology. The start and end
times define the window over which to gather the metrics. The window
duration should be less than 3 hours as this is the limit of what the
Topology master stores.
Arguments:
topology_id (str): The topology identification string.
start (datetime): UTC datetime instance for the start of the
metrics gathering period.
end (datetime): UTC datetime instance for the end of the
metrics gathering period.
**cluster (str): The cluster the topology is running in.
**environ (str): The environment the topology is running in (eg.
prod, devel, test, etc).
Returns:
pandas.DataFrame: A DataFrame containing the service time
measurements as a timeseries. Each row represents a measurement
(aggregated over one minute) with the following columns:
* timestamp: The UTC timestamp for the metric,
* component: The component this metric comes from,
* task: The instance ID number for the instance that the metric
comes from,
* container: The ID for the container this metric comes from.
* stream: The name of the incoming stream from which the tuples
that lead to this metric came from,
* source_component: The name of the component the stream's source
instance belongs to,
* execute_count: The execute count during the metric time period.
"""
LOG.info("Getting execute counts for topology %s over a %d second "
"period from %s to %s", topology_id,
(end-start).total_seconds(), start.isoformat(),
end.isoformat())
logical_plan, start_time, end_time = self._query_setup(
topology_id, cluster, environ, start, end)
output: pd.DataFrame = None
for component in logical_plan["bolts"].keys():
try:
comp_execute_counts: pd.DataFrame = \
self.get_component_execute_counts(topology_id, cluster,
environ, component,
start_time, end_time,
logical_plan)
except HTTPError as http_error:
LOG.warning("Fetching execute counts for component %s failed "
"with status code %s", component,
str(http_error.response.status_code))
if output is None:
output = comp_execute_counts
else:
output = output.append(comp_execute_counts, ignore_index=True)
return output
def get_spout_complete_latencies(self, topology_id: str, cluster: str,
environ: str, component_name: str,
start: int, end: int,
logical_plan: Dict[str, Any] = None
) -> pd.DataFrame:
""" Gets the complete latency, as a timeseries, for every instance of
the specified component of the specified topology. The start and end
times define the window over which to gather the metrics. The window
duration should be less then 3 hours as this is the limit of what the
Topology master stores.
Arguments:
topology_id (str): The topology identification string.
cluster (str): The cluster the topology is running in.
environ (str): The environment the topology is running in (eg.
prod, devel, test, etc).
component_name (str): The name of the spout component whose
metrics are required.
start (int): Start time for the time period the query is run
against. This should be a UTC POSIX time integer
(seconds since epoch).
end (int): End time for the time period the query is run against.
This should be a UTC POSIX time integer (seconds since
epoch).
logical_plan (dict): Optional dictionary logical plan returned
by the Heron Tracker API. If not supplied
this method will call the API to get the
logical plan.
Returns:
pandas.DataFrame: A DataFrame containing the complete latency
measurements as a timeseries. Each row represents a measurement
(averaged over one minute) with the following columns:
* timestamp: The UTC timestamp for the metric,
* component: The component this metric comes from,
* task: The instance ID number for the instance that the metric
comes from,
* container: The ID for the container this metric comes from,
* stream: The name of the incoming stream from which the tuples
that lead to this metric came from,
* latency_ms: The average execute latency measurement in
milliseconds for that metric time period.
"""
LOG.info("Getting complete latency metrics for component %s of "
"topology %s", component_name, topology_id)
if not logical_plan:
LOG.debug("Logical plan not supplied, fetching from Heron Tracker")
logical_plan = tracker.get_logical_plan(self.tracker_url, cluster,
environ, topology_id)
outgoing_streams: List[str] = \
tracker.get_outgoing_streams(logical_plan, component_name)
metrics: List[str] = ["__complete-latency/" + stream
for stream in outgoing_streams]
results: Dict[str, Any] = tracker.get_metrics_timeline(
self.tracker_url, cluster, environ, topology_id, component_name,
start, end, metrics)
output: pd.DataFrame = None
for stream_metric, instance_timelines in results["timeline"].items():
metric_list: List[str] = stream_metric.split("/")
outgoing_stream: str = metric_list[1]
instance_tls_df: pd.DataFrame = instance_timelines_to_dataframe(
instance_timelines, outgoing_stream, "latency_ms",
str_nano_to_float_milli)
if output is None:
output = instance_tls_df
else:
output = output.append(instance_tls_df, ignore_index=True)
return output
def get_complete_latencies(self, topology_id: str, cluster: str,
environ: str, start: dt.datetime,
end: dt.datetime,
**kwargs: Union[str, int, float]
) -> pd.DataFrame:
""" Gets the complete latencies, as a timeseries, for every instance of
the of all the spout components of the specified topology. The start
and end times define the window over which to gather the metrics. The
window duration should be less than 3 hours as this is the limit of
what the Topology master stores.
Arguments:
topology_id (str): The topology identification string.
cluster (str): The cluster the topology is running in.
environ (str): The environment the topology is running in (eg.
prod, devel, test, etc).
start (datetime): utc datetime instance for the start of the
metrics gathering period.
end (datetime): utc datetime instance for the end of the
metrics gathering period.
Returns:
pandas.DataFrame: A DataFrame containing the service time
measurements as a timeseries. Each row represents a measurement
(aggregated over one minute) with the following columns:
* timestamp: The UTC timestamp for the metric,
* component: The component this metric comes from,
* task: The instance ID number for the instance that the metric
comes from,
* container: The ID for the container this metric comes from,
stream: The name of the incoming stream from which the tuples
that lead to this metric came from,
* latency_ms: The average execute latency measurement in
milliseconds for that metric time period.
Raises:
RuntimeWarning: If the specified topology has a reliability mode
that does not enable complete latency.
"""
LOG.info("Getting complete latencies for topology %s over a %d second "
"period from %s to %s", topology_id,
(end-start).total_seconds(), start.isoformat(),
end.isoformat())
logical_plan, start_time, end_time = self._query_setup(
topology_id, cluster, environ, start, end)
# First we need to check that the supplied topology will actually have
# complete latencies. Only ATLEAST_ONCE and EXACTLY_ONCE will have
# complete latency values as acking is disabled for ATMOST_ONCE.
physical_plan: Dict[str, Any] = tracker.get_physical_plan(
self.tracker_url, cluster, environ, topology_id)
if (physical_plan["config"]
["topology.reliability.mode"] == "ATMOST_ONCE"):
rm_msg: str = (f"Topology {topology_id} reliability mode is set "
f"to ATMOST_ONCE. Complete latency is not "
f"available for these types of topologies")
LOG.warning(rm_msg)
warnings.warn(rm_msg, RuntimeWarning)
return pd.DataFrame()
output: pd.DataFrame = None
spouts: Dict[str, Any] = logical_plan["spouts"]
for spout_component in spouts:
try:
spout_complete_latencies: pd.DataFrame = \
self.get_spout_complete_latencies(topology_id,
cluster, environ,
spout_component,
start_time, end_time,
logical_plan)
except HTTPError as http_error:
LOG.warning("Fetching execute latencies for component %s "
"failed with status code %s", spout_component,
str(http_error.response.status_code))
if output is None:
output = spout_complete_latencies
else:
output = output.append(spout_complete_latencies,
ignore_index=True)
return output
def get_calculated_arrival_rates(self, topology_id: str, cluster: str, environ: str,
start: dt.datetime, end: dt.datetime,
**kwargs: Union[str, int, float]) -> pd.DataFrame:
""" Gets the arrival rates, as a timeseries, for every instance of each
of the bolt components of the specified topology. The start and end
times define the window over which to gather the metrics. The window
duration should be less than 3 hours as this is the limit of what the
Topology master stores.
Arguments:
topology_id (str): The topology identification string.
start (datetime): utc datetime instance for the start of the
metrics gathering period.
end (datetime): utc datetime instance for the end of the
metrics gathering period.
**cluster (str): The cluster the topology is running in.
**environ (str): The environment the topology is running in (eg.
prod, devel, test, etc).
Returns:
pandas.DataFrame: A DataFrame containing the arrival rate
measurements as a timeseries. Each row represents a measurement
(aggregated over one minute) with the following columns:
* timestamp: The UTC timestamp for the metric,
* component: The component this metric comes from,
* task: The instance ID number for the instance that the metric
comes from,
* container: The ID for the container this metric comes from,
* arrival_count: The number of arrivals (across all streams) at
each instance.
* arrival_rate_tps: The arrival rate at each instance (across all
streams) in units of tuples per second.
"""
LOG.info("Getting arrival rates for topology %s over a %d second "
"period from %s to %s", topology_id,
(end-start).total_seconds(), start.isoformat(),
end.isoformat())
execute_counts: pd.DataFrame = self.get_execute_counts(
topology_id, cluster, environ, start, end)
arrivals: pd.DataFrame = \
(execute_counts.groupby(["task", "component", "timestamp"])
.sum().reset_index()
.rename(index=str, columns={"execute_count": "arrival_count"}))
arrivals["arrival_rate_tps"] = (arrivals["arrival_count"] /
DEFAULT_METRIC_PERIOD)
return arrivals
def get_receive_counts(self, topology_id: str, cluster: str, environ: str,
start: dt.datetime, end: dt.datetime,
**kwargs: Union[str, int, float]) -> pd.DataFrame:
msg: str = ("The custom caladrius receive-count metrics is not yet "
"available via the TMaster metrics database")
LOG.error(msg)
raise NotImplementedError(msg)
def get_incoming_queue_sizes(self, topology_id: str, cluster: str, environ: str,
start: [dt.datetime] = None, end: [dt.datetime] = None,
**kwargs: Union[str, int, float]) -> pd.DataFrame:
msg: str = "Unimplemented"
LOG.error(msg)
raise NotImplementedError(msg)
def get_cpu_load(self, topology_id: str, cluster: str, environ: str,
start: [dt.datetime] = None, end: [dt.datetime] = None,
** kwargs: Union[str, int, float]) -> pd.DataFrame:
msg: str = "Unimplemented"
LOG.error(msg)
raise NotImplementedError(msg)
def get_gc_time(self, topology_id: str, cluster: str, environ: str,
start: [dt.datetime] = None, end: [dt.datetime] = None,
** kwargs: Union[str, int, float]) -> pd.DataFrame:
msg: str = "Unimplemented"
LOG.error(msg)
raise NotImplementedError(msg)
def get_num_packets_received(self, topology_id: str, cluster: str, environ: str,
start: [dt.datetime] = None, end: [dt.datetime] = None,
** kwargs: Union[str, int, float]) -> pd.DataFrame:
msg: str = "Unimplemented"
LOG.error(msg)
raise NotImplementedError(msg)
def get_packet_arrival_rate(self, topology_id: str, cluster: str, environ: str,
start: [dt.datetime] = None, end: [dt.datetime] = None,
**kwargs: Union[str, int, float]) -> pd.DataFrame:
msg: str = "Unimplemented"
LOG.error(msg)
raise NotImplementedError(msg)
def get_tuple_arrivals_at_stmgr(self, topology_id: str, cluster: str, environ: str,
start: [dt.datetime] = None, end: [dt.datetime] = None,
**kwargs: Union[str, int, float]) -> pd.DataFrame:
msg: str = "Unimplemented"
LOG.error(msg)
raise NotImplementedError(msg)
| [
"logging.getLogger",
"caladrius.common.heron.tracker.get_metrics_timeline",
"caladrius.common.heron.tracker.get_physical_plan",
"caladrius.common.heron.tracker.incoming_sources_and_streams",
"caladrius.common.heron.tracker.parse_instance_name",
"caladrius.common.heron.tracker.get_logical_plan",
"caladri... | [((600, 627), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (617, 627), False, 'import logging\n'), ((2081, 2113), 'datetime.datetime.now', 'dt.datetime.now', (['dt.timezone.utc'], {}), '(dt.timezone.utc)\n', (2096, 2113), True, 'import datetime as dt\n'), ((6259, 6279), 'pandas.DataFrame', 'pd.DataFrame', (['output'], {}), '(output)\n', (6271, 6279), True, 'import pandas as pd\n'), ((2145, 2179), 'datetime.timedelta', 'dt.timedelta', ([], {'hours': 'time_limit_hrs'}), '(hours=time_limit_hrs)\n', (2157, 2179), True, 'import datetime as dt\n'), ((3221, 3265), 'warnings.warn', 'warnings.warn', (['truncated_msg', 'RuntimeWarning'], {}), '(truncated_msg, RuntimeWarning)\n', (3234, 3265), False, 'import warnings\n'), ((4924, 4966), 'caladrius.common.heron.tracker.parse_instance_name', 'tracker.parse_instance_name', (['instance_name'], {}), '(instance_name)\n', (4951, 4966), False, 'from caladrius.common.heron import tracker\n'), ((7776, 7849), 'caladrius.common.heron.tracker.get_logical_plan', 'tracker.get_logical_plan', (['self.tracker_url', 'cluster', 'environ', 'topology_id'], {}), '(self.tracker_url, cluster, environ, topology_id)\n', (7800, 7849), False, 'from caladrius.common.heron import tracker\n'), ((10909, 10975), 'caladrius.common.heron.tracker.incoming_sources_and_streams', 'tracker.incoming_sources_and_streams', (['logical_plan', 'component_name'], {}), '(logical_plan, component_name)\n', (10945, 10975), False, 'from caladrius.common.heron import tracker\n'), ((11157, 11275), 'caladrius.common.heron.tracker.get_metrics_timeline', 'tracker.get_metrics_timeline', (['self.tracker_url', 'cluster', 'environ', 'topology_id', 'component_name', 'start', 'end', 'metrics'], {}), '(self.tracker_url, cluster, environ,\n topology_id, component_name, start, end, metrics)\n', (11185, 11275), False, 'from caladrius.common.heron import tracker\n'), ((18321, 18379), 'caladrius.common.heron.tracker.get_outgoing_streams', 'tracker.get_outgoing_streams', (['logical_plan', 'component_name'], {}), '(logical_plan, component_name)\n', (18349, 18379), False, 'from caladrius.common.heron import tracker\n'), ((18546, 18664), 'caladrius.common.heron.tracker.get_metrics_timeline', 'tracker.get_metrics_timeline', (['self.tracker_url', 'cluster', 'environ', 'topology_id', 'component_name', 'start', 'end', 'metrics'], {}), '(self.tracker_url, cluster, environ,\n topology_id, component_name, start, end, metrics)\n', (18574, 18664), False, 'from caladrius.common.heron import tracker\n'), ((25335, 25401), 'caladrius.common.heron.tracker.incoming_sources_and_streams', 'tracker.incoming_sources_and_streams', (['logical_plan', 'component_name'], {}), '(logical_plan, component_name)\n', (25371, 25401), False, 'from caladrius.common.heron import tracker\n'), ((25581, 25699), 'caladrius.common.heron.tracker.get_metrics_timeline', 'tracker.get_metrics_timeline', (['self.tracker_url', 'cluster', 'environ', 'topology_id', 'component_name', 'start', 'end', 'metrics'], {}), '(self.tracker_url, cluster, environ,\n topology_id, component_name, start, end, metrics)\n', (25609, 25699), False, 'from caladrius.common.heron import tracker\n'), ((32676, 32734), 'caladrius.common.heron.tracker.get_outgoing_streams', 'tracker.get_outgoing_streams', (['logical_plan', 'component_name'], {}), '(logical_plan, component_name)\n', (32704, 32734), False, 'from caladrius.common.heron import tracker\n'), ((32894, 33012), 'caladrius.common.heron.tracker.get_metrics_timeline', 'tracker.get_metrics_timeline', (['self.tracker_url', 'cluster', 'environ', 'topology_id', 'component_name', 'start', 'end', 'metrics'], {}), '(self.tracker_url, cluster, environ,\n topology_id, component_name, start, end, metrics)\n', (32922, 33012), False, 'from caladrius.common.heron import tracker\n'), ((36419, 36493), 'caladrius.common.heron.tracker.get_physical_plan', 'tracker.get_physical_plan', (['self.tracker_url', 'cluster', 'environ', 'topology_id'], {}), '(self.tracker_url, cluster, environ, topology_id)\n', (36444, 36493), False, 'from caladrius.common.heron import tracker\n'), ((10718, 10791), 'caladrius.common.heron.tracker.get_logical_plan', 'tracker.get_logical_plan', (['self.tracker_url', 'cluster', 'environ', 'topology_id'], {}), '(self.tracker_url, cluster, environ, topology_id)\n', (10742, 10791), False, 'from caladrius.common.heron import tracker\n'), ((18156, 18229), 'caladrius.common.heron.tracker.get_logical_plan', 'tracker.get_logical_plan', (['self.tracker_url', 'cluster', 'environ', 'topology_id'], {}), '(self.tracker_url, cluster, environ, topology_id)\n', (18180, 18229), False, 'from caladrius.common.heron import tracker\n'), ((25144, 25217), 'caladrius.common.heron.tracker.get_logical_plan', 'tracker.get_logical_plan', (['self.tracker_url', 'cluster', 'environ', 'topology_id'], {}), '(self.tracker_url, cluster, environ, topology_id)\n', (25168, 25217), False, 'from caladrius.common.heron import tracker\n'), ((32497, 32570), 'caladrius.common.heron.tracker.get_logical_plan', 'tracker.get_logical_plan', (['self.tracker_url', 'cluster', 'environ', 'topology_id'], {}), '(self.tracker_url, cluster, environ, topology_id)\n', (32521, 32570), False, 'from caladrius.common.heron import tracker\n'), ((36872, 36909), 'warnings.warn', 'warnings.warn', (['rm_msg', 'RuntimeWarning'], {}), '(rm_msg, RuntimeWarning)\n', (36885, 36909), False, 'import warnings\n'), ((36929, 36943), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (36941, 36943), True, 'import pandas as pd\n')] |
import numpy as np
import xarray as xr
from numpy import asarray
import scipy.sparse
from itertools import product
from .util import get_shape_of_data
from .grid_stretching_transforms import scs_transform
from .constants import R_EARTH_m
def get_troposphere_mask(ds):
"""
Returns a mask array for picking out the tropospheric grid boxes.
Args:
ds: xarray Dataset
Dataset containing certain met field variables (i.e.
Met_TropLev, Met_BXHEIGHT).
Returns:
tropmask: numpy ndarray
Tropospheric mask. False denotes grid boxes that are
in the troposphere and True in the stratosphere
(as per Python masking logic).
"""
# ==================================================================
# Initialization
# ==================================================================
# Make sure ds is an xarray Dataset object
if not isinstance(ds, xr.Dataset):
raise TypeError("The ds argument must be an xarray Dataset!")
# Make sure certain variables are found
if "Met_BXHEIGHT" not in ds.data_vars.keys():
raise ValueError("Met_BXHEIGHT could not be found!")
if "Met_TropLev" not in ds.data_vars.keys():
raise ValueError("Met_TropLev could not be found!")
# Mask of tropospheric grid boxes in the Ref dataset
shape = get_shape_of_data(np.squeeze(ds["Met_BXHEIGHT"]))
# Determine if this is GCHP data
is_gchp = "nf" in ds["Met_BXHEIGHT"].dims
# ==================================================================
# Create the mask arrays for the troposphere
#
# Convert the Met_TropLev DataArray objects to numpy ndarrays of
# integer. Also subtract 1 to convert from Fortran to Python
# array index notation.
# ==================================================================
multi_time_slices = (is_gchp and len(shape) == 5) or \
(not is_gchp and len(shape) == 4)
if multi_time_slices:
# --------------------------------------------------------------
# GCC: There are multiple time slices
# --------------------------------------------------------------
# Create the tropmask array with dims
# (time, lev, nf*lat*lon) for GCHP, or
# (time, lev, lat*lon ) for GCC
tropmask = np.ones((shape[0], shape[1],
np.prod(np.array(shape[2:]))), bool)
# Loop over each time
for t in range(tropmask.shape[0]):
# Pick the tropopause level and make a 1-D array
values = ds["Met_TropLev"].isel(time=t).values
lev = np.int_(np.squeeze(values) - 1)
lev_1d = lev.flatten()
# Create the tropospheric mask array
for x in range(tropmask.shape[2]):
tropmask[t, 0: lev_1d[x], x] = False
else:
# --------------------------------------------------------------
# There is only one time slice
# --------------------------------------------------------------
# Create the tropmask array with dims (lev, lat*lon)
tropmask = np.ones((shape[0], np.prod(np.array(shape[1:]))), bool)
# Pick the tropopause level and make a 1-D array
values = ds["Met_TropLev"].values
lev = np.int_(np.squeeze(values) - 1)
lev_1d = lev.flatten()
# Create the tropospheric mask array
for x in range(tropmask.shape[1]):
tropmask[0: lev_1d[x], x] = False
# Reshape into the same shape as Met_BxHeight
return tropmask.reshape(shape)
def get_input_res(data):
"""
Returns resolution of dataset passed to compare_single_level or compare_zonal_means
Args:
data: xarray Dataset
Input GEOS-Chem dataset
Returns:
res: str or int
Lat/lon res of the form 'latresxlonres' or cubed-sphere resolution
gridtype: str
'll' for lat/lon or 'cs' for cubed-sphere
"""
vdims = data.dims
if "lat" in vdims and "lon" in vdims:
lat = data["lat"].values
lon = data["lon"].values
if lat.size / 6 == lon.size:
return lon.size, "cs"
else:
lat.sort()
lon.sort()
# use increment of second and third coordinates
# to avoid polar mischief
lat_res = np.abs(lat[2] - lat[1])
lon_res = np.abs(lon[2] - lon[1])
return str(lat_res) + "x" + str(lon_res), "ll"
else:
#print("grid is cs: ", vdims)
# GCHP data using MAPL v1.0.0+ has dims time, lev, nf, Ydim, and Xdim
if isinstance(data.dims, tuple):
return len(data["Xdim"].values), "cs"
else:
return data.dims["Xdim"], "cs"
def call_make_grid(res, gridtype, in_extent=[-180, 180, -90, 90],
out_extent=[-180, 180, -90, 90], sg_params=[1, 170, -90]):
"""
Create a mask with NaN values removed from an input array
Args:
res: str or int
Resolution of grid (format 'latxlon' or csres)
gridtype: str
'll' for lat/lon or 'cs' for cubed-sphere
Keyword Args (optional):
in_extent: list[float, float, float, float]
Describes minimum and maximum latitude and longitude of input data
in the format [minlon, maxlon, minlat, maxlat]
Default value: [-180, 180, -90, 90]
out_extent: list[float, float, float, float]
Desired minimum and maximum latitude and longitude of output grid
in the format [minlon, maxlon, minlat, maxlat]
Default value: [-180, 180, -90, 90]
sg_params: list[float, float, float] (stretch_factor, target_longitude, target_latitude)
Desired stretched-grid parameters in the format
[stretch_factor, target_longitude, target_latitude].
Will trigger stretched-grid creation if not default values.
Default value: [1, 170, -90] (no stretching)
Returns:
[grid, grid_list]: list(dict, list(dict))
Returns the created grid.
grid_list is a list of grids if gridtype is 'cs', else it is None
"""
# call appropriate make_grid function and return new grid
if gridtype == "ll":
return [make_grid_LL(res, in_extent, out_extent), None]
elif sg_params == [1, 170, -90]:
# standard CS
return make_grid_CS(res)
else:
return make_grid_SG(res, *sg_params)
def get_grid_extents(data, edges=True):
"""
Get min and max lat and lon from an input GEOS-Chem xarray dataset or grid dict
Args:
data: xarray Dataset or dict
A GEOS-Chem dataset or a grid dict
edges (optional): bool
Whether grid extents should use cell edges instead of centers
Default value: True
Returns:
minlon: float
Minimum longitude of data grid
maxlon: float
Maximum longitude of data grid
minlat: float
Minimum latitude of data grid
maxlat: float
Maximum latitude of data grid
"""
if isinstance(data, dict):
if "lon_b" in data and edges:
return np.min(
data["lon_b"]), np.max(
data["lon_b"]), np.min(
data["lat_b"]), np.max(
data["lat_b"])
elif not edges:
return np.min(
data["lon"]), np.max(
data["lon"]), np.min(
data["lat"]), np.max(
data["lat"])
else:
return -180, 180, -90, 90
elif "lat" in data.dims and "lon" in data.dims:
lat = data["lat"].values
lon = data["lon"].values
if lat.size / 6 == lon.size:
# No extents for CS plots right now
return -180, 180, -90, 90
else:
lat = np.sort(lat)
minlat = np.min(lat)
if abs(abs(lat[1]) - abs(lat[0])
) != abs(abs(lat[2]) - abs(lat[1])):
#pole is cutoff
minlat = minlat - 1
maxlat = np.max(lat)
if abs(abs(lat[-1]) - abs(lat[-2])
) != abs(abs(lat[-2]) - abs(lat[-3])):
maxlat = maxlat + 1
# add longitude res to max longitude
lon = np.sort(lon)
minlon = np.min(lon)
maxlon = np.max(lon) + abs(abs(lon[-1]) - abs(lon[-2]))
return minlon, maxlon, minlat, maxlat
else:
# GCHP data using MAPL v1.0.0+ has dims time, lev, nf, Ydim, and Xdim
return -180, 180, -90, 90
def get_vert_grid(dataset, AP=[], BP=[]):
"""
Determine vertical grid of input dataset
Args:
dataset: xarray Dataset
A GEOS-Chem output dataset
Keyword Args (optional):
AP: list-like type
Hybrid grid parameter A in hPa
Default value: []
BP: list-like type
Hybrid grid parameter B (unitless)
Default value: []
Returns:
p_edge: numpy array
Edge pressure values for vertical grid
p_mid: numpy array
Midpoint pressure values for vertical grid
nlev: int
Number of levels in vertical grid
"""
if dataset.sizes["lev"] in (72, 73):
return GEOS_72L_grid.p_edge(), GEOS_72L_grid.p_mid(), 72
elif dataset.sizes["lev"] in (47, 48):
return GEOS_47L_grid.p_edge(), GEOS_47L_grid.p_mid(), 47
elif AP == [] or BP == []:
if dataset.sizes["lev"] == 1:
AP = [1, 1]
BP = [1]
new_grid = vert_grid(AP, BP)
return new_grid.p_edge(), new_grid.p_mid(), np.size(AP)
else:
raise ValueError(
"Only 72/73 or 47/48 level vertical grids are automatically determined" +
"from input dataset by get_vert_grid(), please pass grid parameters AP and BP" +
"as keyword arguments")
else:
new_grid = vert_grid(AP, BP)
return new_grid.p_edge(), new_grid.p_mid(), np.size(AP)
def get_pressure_indices(pedge, pres_range):
"""
Get indices where edge pressure values are within a given pressure range
Args:
pedge: numpy array
A GEOS-Chem output dataset
pres_range: list(float, float)
Contains minimum and maximum pressure
Returns:
numpy array
Indices where edge pressure values are within a given pressure range
"""
return np.where(
(pedge <= np.max(pres_range)) & (
pedge >= np.min(pres_range)))[0]
def pad_pressure_edges(pedge_ind, max_ind, pmid_len):
"""
Add outer indices to edge pressure index list
Args:
pedge_ind: list
List of edge pressure indices
max_ind: int
Maximum index
pmid_len: int
Length of pmid which should not be exceeded by indices
Returns:
pedge_ind: list
List of edge pressure indices, possibly with new minimum and maximum indices
"""
if max_ind > pmid_len:
# don't overstep array bounds for full array
max_ind = max_ind - 1
if min(pedge_ind) != 0:
pedge_ind = np.append(min(pedge_ind) - 1, pedge_ind)
if max(pedge_ind) != max_ind:
pedge_ind = np.append(pedge_ind, max(pedge_ind) + 1)
return pedge_ind
def get_ind_of_pres(dataset, pres):
"""
Get index of pressure level that contains the requested pressure value.
Args:
dataset: xarray Dataset
GEOS-Chem dataset
pres: int or float
Desired pressure value
Returns:
index: int
Index of level in dataset that corresponds to requested pressure
"""
pedge, pmid, _ = get_vert_grid(dataset)
converted_dataset = convert_lev_to_pres(dataset, pmid, pedge)
return np.argmin(np.abs(converted_dataset['lev'] - pres).values)
def convert_lev_to_pres(dataset, pmid, pedge, lev_type='pmid'):
"""
Convert lev dimension to pressure in a GEOS-Chem dataset
Args:
dataset: xarray Dataset
GEOS-Chem dataset
pmid: np.array
Midpoint pressure values
pedge: np.array
Edge pressure values
lev_type (optional): str
Denote whether lev is 'pedge' or 'pmid' if grid is not 72/73 or 47/48 levels
Default value: 'pmid'
Returns:
dataset: xarray Dataset
Input dataset with "lev" dimension values replaced with pressure values
"""
if dataset.sizes["lev"] in (72, 47):
dataset["lev"] = pmid
elif dataset.sizes["lev"] in (73, 48):
dataset["lev"] = pedge
elif lev_type == 'pmid':
print('Warning: Assuming levels correspond with midpoint pressures')
dataset["lev"] = pmid
else:
dataset["lev"] = pedge
dataset["lev"].attrs["unit"] = "hPa"
dataset["lev"].attrs["long_name"] = "level pressure"
return dataset
class vert_grid:
def __init__(self, AP=None, BP=None, p_sfc=1013.25):
if (len(AP) != len(BP)) or (AP is None):
# Throw error?
print('Inconsistent vertical grid specification')
self.AP = np.array(AP)
self.BP = np.array(BP)
self.p_sfc = p_sfc
def p_edge(self):
# Calculate pressure edges using eta coordinate
return self.AP + self.BP * self.p_sfc
def p_mid(self):
p_edge = self.p_edge()
return (p_edge[1:] + p_edge[:-1]) / 2.0
# Standard vertical grids
_GEOS_72L_AP = np.array([0.000000e+00,
4.804826e-02,
6.593752e+00,
1.313480e+01,
1.961311e+01,
2.609201e+01,
3.257081e+01,
3.898201e+01,
4.533901e+01,
5.169611e+01,
5.805321e+01,
6.436264e+01,
7.062198e+01,
7.883422e+01,
8.909992e+01,
9.936521e+01,
1.091817e+02,
1.189586e+02,
1.286959e+02,
1.429100e+02,
1.562600e+02,
1.696090e+02,
1.816190e+02,
1.930970e+02,
2.032590e+02,
2.121500e+02,
2.187760e+02,
2.238980e+02,
2.243630e+02,
2.168650e+02,
2.011920e+02,
1.769300e+02,
1.503930e+02,
1.278370e+02,
1.086630e+02,
9.236572e+01,
7.851231e+01,
6.660341e+01,
5.638791e+01,
4.764391e+01,
4.017541e+01,
3.381001e+01,
2.836781e+01,
2.373041e+01,
1.979160e+01,
1.645710e+01,
1.364340e+01,
1.127690e+01,
9.292942e+00,
7.619842e+00,
6.216801e+00,
5.046801e+00,
4.076571e+00,
3.276431e+00,
2.620211e+00,
2.084970e+00,
1.650790e+00,
1.300510e+00,
1.019440e+00,
7.951341e-01,
6.167791e-01,
4.758061e-01,
3.650411e-01,
2.785261e-01,
2.113490e-01,
1.594950e-01,
1.197030e-01,
8.934502e-02,
6.600001e-02,
4.758501e-02,
3.270000e-02,
2.000000e-02,
1.000000e-02])
_GEOS_72L_BP = np.array([1.000000e+00,
9.849520e-01,
9.634060e-01,
9.418650e-01,
9.203870e-01,
8.989080e-01,
8.774290e-01,
8.560180e-01,
8.346609e-01,
8.133039e-01,
7.919469e-01,
7.706375e-01,
7.493782e-01,
7.211660e-01,
6.858999e-01,
6.506349e-01,
6.158184e-01,
5.810415e-01,
5.463042e-01,
4.945902e-01,
4.437402e-01,
3.928911e-01,
3.433811e-01,
2.944031e-01,
2.467411e-01,
2.003501e-01,
1.562241e-01,
1.136021e-01,
6.372006e-02,
2.801004e-02,
6.960025e-03,
8.175413e-09,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00,
0.000000e+00])
GEOS_72L_grid = vert_grid(_GEOS_72L_AP, _GEOS_72L_BP)
# Reduced grid
_GEOS_47L_AP = np.zeros(48)
_GEOS_47L_BP = np.zeros(48)
# Fill in the values for the surface
_GEOS_47L_AP[0] = _GEOS_72L_AP[0]
_GEOS_47L_BP[0] = _GEOS_72L_BP[0]
# Build the GEOS 72-layer to 47-layer mapping matrix at the same time
_xmat_i = np.zeros((72))
_xmat_j = np.zeros((72))
_xmat_s = np.zeros((72))
# Index here is the 1-indexed layer number
for _i_lev in range(1, 37):
# Map from 1-indexing to 0-indexing
_x_lev = _i_lev - 1
# Sparse matrix for regridding
# Below layer 37, it's 1:1
_xct = _x_lev
_xmat_i[_xct] = _x_lev
_xmat_j[_xct] = _x_lev
_xmat_s[_xct] = 1.0
# Copy over the pressure edge for the top of the grid cell
_GEOS_47L_AP[_i_lev] = _GEOS_72L_AP[_i_lev]
_GEOS_47L_BP[_i_lev] = _GEOS_72L_BP[_i_lev]
# Now deal with the lumped layers
_skip_size_vec = [2, 4]
_number_lumped = [4, 7]
# Initialize
_i_lev = 36
_i_lev_72 = 36
for _lump_seg in range(2):
_skip_size = _skip_size_vec[_lump_seg]
# 1-indexed starting point in the 47-layer grid
_first_lev_47 = _i_lev + 1
_first_lev_72 = _i_lev_72 + 1
# Loop over the coarse vertical levels (47-layer grid)
for _i_lev_offset in range(_number_lumped[_lump_seg]):
# i_lev is the index for the current level on the 47-level grid
_i_lev = _first_lev_47 + _i_lev_offset
# Map from 1-indexing to 0-indexing
_x_lev = _i_lev - 1
# Get the 1-indexed location of the last layer in the 72-layer grid
# which is below the start of the current lumping region
_i_lev_72_base = _first_lev_72 + (_i_lev_offset * _skip_size) - 1
# Get the 1-indexed location of the uppermost level in the 72-layer
# grid which is within the target layer on the 47-layer grid
_i_lev_72 = _i_lev_72_base + _skip_size
# Do the pressure edges first
# These are the 0-indexed locations of the upper edge for the
# target layers in 47- and 72-layer grids
_GEOS_47L_AP[_i_lev] = _GEOS_72L_AP[_i_lev_72]
_GEOS_47L_BP[_i_lev] = _GEOS_72L_BP[_i_lev_72]
# Get the total pressure delta across the layer on the lumped grid
# We are within the fixed pressure levels so don't need to account
# for variations in surface pressure
_dp_total = _GEOS_47L_AP[_i_lev - 1] - _GEOS_47L_AP[_i_lev]
# Now figure out the mapping
for _i_lev_offset_72 in range(_skip_size):
# Source layer in the 72 layer grid (0-indexed)
_x_lev_72 = _i_lev_72_base + _i_lev_offset_72
_xct = _x_lev_72
_xmat_i[_xct] = _x_lev_72
# Target in the 47 layer grid
_xmat_j[_xct] = _x_lev
# Proportion of 72-layer grid cell, by pressure, within expanded
# layer
_xmat_s[_xct] = (_GEOS_72L_AP[_x_lev_72] -
_GEOS_72L_AP[_x_lev_72 + 1]) / _dp_total
_start_pt = _i_lev
# Do last entry separately (no layer to go with it)
_xmat_72to47 = scipy.sparse.coo_matrix(
(_xmat_s, (_xmat_i, _xmat_j)), shape=(72, 47))
GEOS_47L_grid = vert_grid(_GEOS_47L_AP, _GEOS_47L_BP)
# CAM 26-layer grid
_CAM_26L_AP = np.flip(np.array([219.4067, 489.5209, 988.2418, 1805.201,
2983.724, 4462.334, 6160.587, 7851.243,
7731.271, 7590.131, 7424.086, 7228.744,
6998.933, 6728.574, 6410.509, 6036.322,
5596.111, 5078.225, 4468.96, 3752.191,
2908.949, 2084.739, 1334.443, 708.499,
252.136, 0., 0.]), axis=0) * 0.01
_CAM_26L_BP = np.flip(np.array([0., 0., 0., 0.,
0., 0., 0., 0.,
0.01505309, 0.03276228, 0.05359622, 0.07810627,
0.1069411, 0.14086370, 0.180772, 0.227722,
0.2829562, 0.3479364, 0.4243822, 0.5143168,
0.6201202, 0.7235355, 0.8176768, 0.8962153,
0.9534761, 0.9851122, 1.]), axis=0)
CAM_26L_grid = vert_grid(_CAM_26L_AP, _CAM_26L_BP)
def make_grid_LL(llres, in_extent=[-180, 180, -90, 90], out_extent=[]):
"""
Creates a lat/lon grid description.
Args:
llres: str
lat/lon resolution in 'latxlon' format (e.g. '4x5')
Keyword Args (optional):
in_extent: list[float, float, float, float]
Describes minimum and maximum latitude and longitude of initial grid
in the format [minlon, maxlon, minlat, maxlat]
Default value: [-180, 180, -90, 90]
out_extent: list[float, float, float, float]
Describes minimum and maximum latitude and longitude of target grid
in the format [minlon, maxlon, minlat, maxlat]. Needed when intending
to use grid to trim extent of input data
Default value: [] (assumes value of in_extent)
Returns:
llgrid: dict
dict grid description of format {'lat' : lat midpoints,
'lon' : lon midpoints,
'lat_b' : lat edges,
'lon_b' : lon edges}
"""
# get initial bounds of grid
[minlon, maxlon, minlat, maxlat] = in_extent
[dlat, dlon] = list(map(float, llres.split('x')))
lon_b = np.linspace(minlon - dlon / 2, maxlon - dlon /
2, int((maxlon - minlon) / dlon) + 1)
lat_b = np.linspace(minlat - dlat / 2, maxlat + dlat / 2,
int((maxlat - minlat) / dlat) + 2)
if minlat <= -90:
lat_b = lat_b.clip(-90, None)
if maxlat >= 90:
lat_b = lat_b.clip(None, 90)
lat = (lat_b[1:] + lat_b[:-1]) / 2
lon = (lon_b[1:] + lon_b[:-1]) / 2
# trim grid bounds when your desired extent is not the same as your
# initial grid extent
if out_extent == []:
out_extent = in_extent
if out_extent != in_extent:
[minlon, maxlon, minlat, maxlat] = out_extent
minlon_ind = np.nonzero(lon >= minlon)
maxlon_ind = np.nonzero(lon <= maxlon)
lon_inds = np.intersect1d(minlon_ind, maxlon_ind)
lon = lon[lon_inds]
# make sure to get edges of grid correctly
lon_inds = np.append(lon_inds, np.max(lon_inds) + 1)
lon_b = lon_b[lon_inds]
minlat_ind = np.nonzero(lat >= minlat)
maxlat_ind = np.nonzero(lat <= maxlat)
lat_inds = np.intersect1d(minlat_ind, maxlat_ind)
lat = lat[lat_inds]
# make sure to get edges of grid correctly
lat_inds = np.append(lat_inds, np.max(lat_inds) + 1)
lat_b = lat_b[lat_inds]
llgrid = {'lat': lat,
'lon': lon,
'lat_b': lat_b,
'lon_b': lon_b}
return llgrid
def make_grid_CS(csres):
"""
Creates a cubed-sphere grid description.
Args:
csres: int
cubed-sphere resolution of target grid
Returns:
[csgrid, csgrid_list]: list[dict, list[dict]]
csgrid is a dict of format {'lat' : lat midpoints,
'lon' : lon midpoints,
'lat_b' : lat edges,
'lon_b' : lon edges}
where each value has an extra face dimension of length 6.
csgrid_list is a list of dicts separated by face index
"""
csgrid = csgrid_GMAO(csres)
csgrid_list = [None] * 6
for i in range(6):
csgrid_list[i] = {'lat': csgrid['lat'][i],
'lon': csgrid['lon'][i],
'lat_b': csgrid['lat_b'][i],
'lon_b': csgrid['lon_b'][i]}
return [csgrid, csgrid_list]
def make_grid_SG(csres, stretch_factor, target_lon, target_lat):
"""
Creates a stretched-grid grid description.
Args:
csres: int
cubed-sphere resolution of target grid
stretch_factor: float
stretch factor of target grid
target_lon: float
target stretching longitude of target grid
target_lon: float
target stretching latitude of target grid
Returns:
[csgrid, csgrid_list]: list[dict, list[dict]]
csgrid is a dict of format {'lat' : lat midpoints,
'lon' : lon midpoints,
'lat_b' : lat edges,
'lon_b' : lon edges}
where each value has an extra face dimension of length 6.
csgrid_list is a list of dicts separated by face index
"""
csgrid = csgrid_GMAO(csres, offset=0)
csgrid_list = [None] * 6
for i in range(6):
lat = csgrid['lat'][i].flatten()
lon = csgrid['lon'][i].flatten()
lon, lat = scs_transform(
lon, lat, stretch_factor, target_lon, target_lat)
lat = lat.reshape((csres, csres))
lon = lon.reshape((csres, csres))
lat_b = csgrid['lat_b'][i].flatten()
lon_b = csgrid['lon_b'][i].flatten()
lon_b, lat_b = scs_transform(
lon_b, lat_b, stretch_factor, target_lon, target_lat)
lat_b = lat_b.reshape((csres + 1, csres + 1))
lon_b = lon_b.reshape((csres + 1, csres + 1))
csgrid_list[i] = {'lat': lat,
'lon': lon,
'lat_b': lat_b,
'lon_b': lon_b}
for i in range(6):
csgrid['lat'][i] = csgrid_list[i]['lat']
csgrid['lon'][i] = csgrid_list[i]['lon']
csgrid['lat_b'][i] = csgrid_list[i]['lat_b']
csgrid['lon_b'][i] = csgrid_list[i]['lon_b']
return [csgrid, csgrid_list]
def calc_rectilinear_lon_edge(lon_stride, center_at_180):
""" Compute longitude edge vector for a rectilinear grid.
Parameters
----------
lon_stride: float
Stride length in degrees. For example, for a standard GEOS-Chem Classic
4x5 grid, lon_stride would be 5.
center_at_180: bool
Whether or not the grid should have a cell center at 180 degrees (i.e.
on the date line). If true, the first grid cell is centered on the date
line; if false, the first grid edge is on the date line.
Returns
-------
Longitudes of cell edges in degrees East.
Notes
-----
All values are forced to be between [-180,180]. For a grid with N cells in
each band, N+1 edges will be returned, with the first and last value being
duplicates.
Examples
--------
>>> from gcpy.grid.horiz import calc_rectilinear_lon_edge
>>> calc_rectilinear_lon_edge(5.0,true)
np.array([177.5,-177.5,-172.5,...,177.5])
See Also
--------
[NONE]
"""
n_lon = np.round(360.0 / lon_stride)
lon_edge = np.linspace(-180.0, 180.0, num=n_lon + 1)
if center_at_180:
lon_edge = lon_edge - (lon_stride / 2.0)
lon_edge[lon_edge < -180.0] = lon_edge[lon_edge < -180] + 360.0
lon_edge[lon_edge > 180.0] = lon_edge[lon_edge > 180.0] - 360.0
return lon_edge
def calc_rectilinear_lat_edge(lat_stride, half_polar_grid):
""" Compute latitude edge vector for a rectilinear grid.
Parameters
----------
lat_stride: float
Stride length in degrees. For example, for a standard GEOS-Chem Classic
4x5 grid, lat_stride would be 4.
half_polar_grid: bool
Whether or not the grid should be "half-polar" (i.e. bands at poles are
half the size). In either case the grid will start and end at -/+ 90,
but when half_polar_grid is True, the first and last bands will have a
width of 1/2 the normal lat_stride.
Returns
-------
Latitudes of cell edges in degrees North.
Notes
-----
All values are forced to be between [-90,90]. For a grid with N cells in
each band, N+1 edges will be returned, with the first and last value being
duplicates.
Examples
--------
>>> from gcpy.grid.horiz import calc_rectilinear_lat_edge
>>> calc_rectilinear_lat_edge(4.0,true)
np.array([-90,-88,-84,-80,...,84,88,90])
See Also
--------
[NONE]
"""
if half_polar_grid:
start_pt = 90.0 + (lat_stride / 2.0)
else:
start_pt = 90.0
lat_edge = np.linspace(-1.0 * start_pt, start_pt,
num=1 + np.round(2.0 * start_pt / lat_stride))
# Force back onto +/- 90
lat_edge[lat_edge > 90.0] = 90.0
lat_edge[lat_edge < -90.0] = -90.0
return lat_edge
def calc_rectilinear_grid_area(lon_edge, lat_edge):
""" Compute grid cell areas (in m2) for a rectilinear grid.
Parameters
----------
#TODO
Returns
-------
#TODO
Notes
-----
#TODO
Examples
--------
#TODO
See Also
--------
[NONE]
"""
# Convert from km to m
_radius_earth_m = R_EARTH_m
lon_edge = asarray(lon_edge, dtype=float)
lat_edge = asarray(lat_edge, dtype=float)
n_lon = (lon_edge.size) - 1
n_lat = (lat_edge.size) - 1
grid_area = np.zeros((n_lat, n_lon))
sfc_area_const = 2.0 * np.pi * _radius_earth_m * _radius_earth_m
# Longitudes loop, so need to be careful
lon_delta = calc_delta_lon(lon_edge)
# Convert into weights relative to the total circle
lon_delta = lon_delta / 360.0
# Precalculate this
sin_lat_edge = np.sin(np.deg2rad(lat_edge))
for i_lat in range(0, n_lat):
sin_diff = sin_lat_edge[i_lat + 1] - sin_lat_edge[i_lat]
grid_area[i_lat, :] = sin_diff * sfc_area_const * lon_delta
return grid_area
def calc_delta_lon(lon_edge):
""" Compute grid cell longitude widths from an edge vector.
Parameters
----------
lon_edge: float
Vector of longitude edges, in degrees East.
Returns
-------
Width of each cell, degrees East
Notes
-----
Accounts for looping over the domain.
Examples
--------
#TODO
"""
n_lon = (lon_edge.size) - 1
lon_edge = asarray(lon_edge)
# Set up output array
lon_delta = np.zeros((n_lon))
offset = 0.0
next_lon = lon_edge[0]
for i_lon in range(0, n_lon):
last_lon = next_lon
next_lon = lon_edge[i_lon + 1] + offset
while next_lon < last_lon:
offset = offset + 360.0
next_lon = next_lon + 360.0
lon_delta[i_lon] = next_lon - last_lon
return lon_delta
def csgrid_GMAO(res, offset=-10):
"""
Return cubedsphere coordinates with GMAO face orientation
Parameters
----------
res: cubed-sphere Resolution
This function was originally written by <NAME> and included
in package cubedsphere: https://github.com/JiaweiZhuang/cubedsphere
"""
CS = CSGrid(res, offset=offset)
lon = CS.lon_center.transpose(2, 0, 1)
lon_b = CS.lon_edge.transpose(2, 0, 1)
lat = CS.lat_center.transpose(2, 0, 1)
lat_b = CS.lat_edge.transpose(2, 0, 1)
lon[lon < 0] += 360
lon_b[lon_b < 0] += 360
for a in [lon, lon_b, lat, lat_b]:
for tile in [0, 1, 3, 4]:
a[tile] = a[tile].T
for tile in [3, 4]:
a[tile] = np.flip(a[tile], 1)
for tile in [3, 4, 2, 5]:
a[tile] = np.flip(a[tile], 0)
a[2], a[5] = a[5].copy(), a[2].copy() # swap north&south pole
return {'lon': lon, 'lat': lat, 'lon_b': lon_b, 'lat_b': lat_b}
_INV_SQRT_3 = 1.0 / np.sqrt(3.0)
_ASIN_INV_SQRT_3 = np.arcsin(_INV_SQRT_3)
class CSGrid(object):
"""Generator for cubed-sphere grid geometries.
CSGrid computes the latitutde and longitudes of cell centers and edges
on a cubed-sphere grid, providing a way to retrieve these geometries
on-the-fly if your model output data does not include them.
Attributes
----------
{lon,lat}_center: np.ndarray
lat/lon coordinates for each cell center along the cubed-sphere mesh
{lon,lat}_edge: np.ndarray
lat/lon coordinates for the midpoint of the edges separating each
element on the cubed-sphere mesh.
xyz_{center,edge}: np.ndarray
As above, except coordinates are projected into a 3D cartesian space
with common origin to the original lat/lon coordinate system, assuming
a unit sphere.
This class was originally written by <NAME> and included
in package cubedsphere: https://github.com/JiaweiZhuang/cubedsphere
"""
def __init__(self, c, offset=None):
"""
Parameters
----------
c: int
Number edges along each cubed-sphere edge.
======= ====================
C Lat/Lon Resolution
------- --------------------
24 4 deg x 5 deg
48,45 2 deg x 2.5 deg
96,90 1 deg x 1.25 deg
192,180 0.5 deg x 0.625 deg
384,360 0.25 deg x 0.3125 deg
720 0.12g deg x 0.15625 deg
offset: float (optional)
Degrees to offset the first faces' edge in the latitudinal
direction. If not passed, then the western edge of the first face
will align with the prime meridian.
This function was originally written by <NAME> and included
in package cubedsphere: https://github.com/JiaweiZhuang/cubedsphere
"""
self.c = c
self.delta_y = 2. * _ASIN_INV_SQRT_3 / c
self.nx = self.ny = c + 1
self.offset = offset
self._initialize()
def _initialize(self):
c = self.c
nx, ny = self.nx, self.ny
lambda_rad = np.zeros((nx, ny))
lambda_rad[0, :] = 3. * np.pi / 4. # West edge
lambda_rad[-1, :] = 5. * np.pi / 4. # East edge
theta_rad = np.zeros((nx, ny))
theta_rad[0, :] = -_ASIN_INV_SQRT_3 + \
(self.delta_y * np.arange(c + 1)) # West edge
theta_rad[-1, :] = theta_rad[0, :] # East edge
# Cache the reflection points - our upper-left and lower-right corners
lonMir1, lonMir2 = lambda_rad[0, 0], lambda_rad[-1, -1]
latMir1, latMir2 = theta_rad[0, 0], theta_rad[-1, -1]
xyzMir1 = latlon_to_cartesian(lonMir1, latMir1)
xyzMir2 = latlon_to_cartesian(lonMir2, latMir2)
xyzCross = np.cross(xyzMir1, xyzMir2)
norm = np.sqrt(np.sum(xyzCross**2))
xyzCross /= norm
for i in range(1, c):
lonRef, latRef = lambda_rad[0, i], theta_rad[0, i]
xyzRef = np.asarray(latlon_to_cartesian(lonRef, latRef, ))
xyzDot = np.sum(xyzCross * xyzRef)
xyzImg = xyzRef - (2. * xyzDot * xyzCross)
xsImg, ysImg, zsImg = xyzImg
lonImg, latImg = cartesian_to_latlon(xsImg, ysImg, zsImg)
lambda_rad[i, 0] = lonImg
lambda_rad[i, -1] = lonImg
theta_rad[i, 0] = latImg
theta_rad[i, -1] = -latImg
pp = np.zeros([3, c + 1, c + 1])
# Set the four corners
# print("CORNERS")
for i, j in product([0, -1], [0, -1]):
# print(i, j)
pp[:, i, j] = latlon_to_cartesian(
lambda_rad[i, j], theta_rad[i, j])
# Map the edges on the sphere back to the cube.
#Note that all intersections are at x = -rsq3
# print("EDGES")
for ij in range(1, c + 1):
# print(ij)
pp[:, 0, ij] = latlon_to_cartesian(
lambda_rad[0, ij], theta_rad[0, ij])
pp[1, 0, ij] = -pp[1, 0, ij] * _INV_SQRT_3 / pp[0, 0, ij]
pp[2, 0, ij] = -pp[2, 0, ij] * _INV_SQRT_3 / pp[0, 0, ij]
pp[:, ij, 0] = latlon_to_cartesian(
lambda_rad[ij, 0], theta_rad[ij, 0])
pp[1, ij, 0] = -pp[1, ij, 0] * _INV_SQRT_3 / pp[0, ij, 0]
pp[2, ij, 0] = -pp[2, ij, 0] * _INV_SQRT_3 / pp[0, ij, 0]
# # Map interiors
pp[0, :, :] = -_INV_SQRT_3
# print("INTERIOR")
for i in range(1, c + 1):
for j in range(1, c + 1):
# Copy y-z face of the cube along j=1
pp[1, i, j] = pp[1, i, 0]
# Copy along i=1
pp[2, i, j] = pp[2, 0, j]
_pp = pp.copy()
llr, ttr = vec_cartesian_to_latlon(_pp[0], _pp[1], _pp[2])
lambda_rad, theta_rad = llr.copy(), ttr.copy()
# Make grid symmetrical to i = im/2 + 1
for j in range(1, c + 1):
for i in range(1, c + 1):
# print("({}, {}) -> ({}, {})".format(i, 0, i, j))
lambda_rad[i, j] = lambda_rad[i, 0]
for j in range(c + 1):
for i in range(c // 2):
isymm = c - i
# print(isymm)
avgPt = 0.5 * (lambda_rad[i, j] - lambda_rad[isymm, j])
# print(lambda_rad[i, j], lambda_rad[isymm, j], avgPt)
lambda_rad[i, j] = avgPt + np.pi
lambda_rad[isymm, j] = np.pi - avgPt
avgPt = 0.5 * (theta_rad[i, j] + theta_rad[isymm, j])
theta_rad[i, j] = avgPt
theta_rad[isymm, j] = avgPt
# Make grid symmetrical to j = im/2 + 1
for j in range(c // 2):
jsymm = c - j
for i in range(1, c + 1):
avgPt = 0.5 * (lambda_rad[i, j] + lambda_rad[i, jsymm])
lambda_rad[i, j] = avgPt
lambda_rad[i, jsymm] = avgPt
avgPt = 0.5 * (theta_rad[i, j] - theta_rad[i, jsymm])
theta_rad[i, j] = avgPt
theta_rad[i, jsymm] = -avgPt
# Final correction
lambda_rad -= np.pi
llr, ttr = lambda_rad.copy(), theta_rad.copy()
#######################################################################
# MIRROR GRIDS
#######################################################################
new_xgrid = np.zeros((c + 1, c + 1, 6))
new_ygrid = np.zeros((c + 1, c + 1, 6))
xgrid = llr.copy()
ygrid = ttr.copy()
new_xgrid[..., 0] = xgrid.copy()
new_ygrid[..., 0] = ygrid.copy()
# radius = 6370.0e3
radius = 1.
for face in range(1, 6):
for j in range(c + 1):
for i in range(c + 1):
x = xgrid[i, j]
y = ygrid[i, j]
z = radius
if face == 1:
# Rotate about z only
new_xyz = rotate_sphere_3D(x, y, z, -np.pi / 2., 'z')
elif face == 2:
# Rotate about z, then x
temp_xyz = rotate_sphere_3D(x, y, z, -np.pi / 2., 'z')
x, y, z = temp_xyz[:]
new_xyz = rotate_sphere_3D(x, y, z, np.pi / 2., 'x')
elif face == 3:
temp_xyz = rotate_sphere_3D(x, y, z, np.pi, 'z')
x, y, z = temp_xyz[:]
new_xyz = rotate_sphere_3D(x, y, z, np.pi / 2., 'x')
if ((c % 2) != 0) and (j == c // 2 - 1):
print(i, j, face)
new_xyz = (np.pi, *new_xyz)
elif face == 4:
temp_xyz = rotate_sphere_3D(x, y, z, np.pi / 2., 'z')
x, y, z = temp_xyz[:]
new_xyz = rotate_sphere_3D(x, y, z, np.pi / 2., 'y')
elif face == 5:
temp_xyz = rotate_sphere_3D(x, y, z, np.pi / 2., 'y')
x, y, z = temp_xyz[:]
new_xyz = rotate_sphere_3D(x, y, z, 0., 'z')
# print((x, y, z), "\n", new_xyz, "\n" + "--"*40)
new_x, new_y, _ = new_xyz
new_xgrid[i, j, face] = new_x
new_ygrid[i, j, face] = new_y
lon_edge, lat_edge = new_xgrid.copy(), new_ygrid.copy()
#######################################################################
# CLEANUP GRID
#######################################################################
for i, j, f in product(range(c + 1), range(c + 1), range(6)):
new_lon = lon_edge[i, j, f]
if new_lon < 0:
new_lon += 2 * np.pi
if np.abs(new_lon) < 1e-10:
new_lon = 0.
lon_edge[i, j, f] = new_lon
if np.abs(lat_edge[i, j, f]) < 1e-10:
lat_edge[i, j, f] = 0.
lon_edge_deg = np.rad2deg(lon_edge)
lat_edge_deg = np.rad2deg(lat_edge)
#######################################################################
# COMPUTE CELL CENTROIDS
#######################################################################
lon_ctr = np.zeros((c, c, 6))
lat_ctr = np.zeros((c, c, 6))
xyz_ctr = np.zeros((3, c, c, 6))
xyz_edge = np.zeros((3, c + 1, c + 1, 6))
for f in range(6):
for i in range(c):
last_x = (i == (c - 1))
for j in range(c):
last_y = (j == (c - 1))
# Get the four corners
lat_corner = [
lat_edge[i, j, f],
lat_edge[i + 1, j, f],
lat_edge[i + 1, j + 1, f],
lat_edge[i, j + 1, f]]
lon_corner = [
lon_edge[i, j, f],
lon_edge[i + 1, j, f],
lon_edge[i + 1, j + 1, f],
lon_edge[i, j + 1, f]]
# Convert from lat-lon back to cartesian
xyz_corner = np.asarray(
vec_latlon_to_cartesian(
lon_corner, lat_corner))
# Store the edge information
xyz_edge[:, i, j, f] = xyz_corner[:, 0]
if last_x:
xyz_edge[:, i + 1, j, f] = xyz_corner[:, 1]
if last_x or last_y:
xyz_edge[:, i + 1, j + 1, f] = xyz_corner[:, 2]
if last_y:
xyz_edge[:, i, j + 1, f] = xyz_corner[:, 3]
e_mid = np.sum(xyz_corner, axis=1)
e_abs = np.sqrt(np.sum(e_mid * e_mid))
if e_abs > 0:
e_mid = e_mid / e_abs
xyz_ctr[:, i, j, f] = e_mid
_lon, _lat = cartesian_to_latlon(*e_mid)
lon_ctr[i, j, f] = _lon
lat_ctr[i, j, f] = _lat
lon_ctr_deg = np.rad2deg(lon_ctr)
lat_ctr_deg = np.rad2deg(lat_ctr)
if self.offset is not None:
lon_edge_deg += self.offset
lon_ctr_deg += self.offset
#######################################################################
# CACHE
#######################################################################
self.lon_center = lon_ctr_deg
self.lat_center = lat_ctr_deg
self.lon_edge = lon_edge_deg
self.lat_edge = lat_edge_deg
self.xyz_center = xyz_ctr
self.xyz_edge = xyz_edge
def latlon_to_cartesian(lon, lat):
""" Convert latitude/longitude coordinates along the unit sphere to cartesian
coordinates defined by a vector pointing from the sphere's center to its
surface.
This function was originally written by <NAME> and included
in package cubedsphere: https://github.com/JiaweiZhuang/cubedsphere
"""
x = np.cos(lat) * np.cos(lon)
y = np.cos(lat) * np.sin(lon)
z = np.sin(lat)
return x, y, z
vec_latlon_to_cartesian = np.vectorize(latlon_to_cartesian)
def cartesian_to_latlon(x, y, z, ret_xyz=False):
""" Convert a cartesian coordinate to latitude/longitude coordinates.
Optionally return the original cartesian coordinate as a tuple.
This function was originally written by <NAME> and included
in package cubedsphere: https://github.com/JiaweiZhuang/cubedsphere
"""
xyz = np.array([x, y, z])
vector_length = np.sqrt(np.sum(xyz * xyz, axis=0))
xyz /= vector_length
x, y, z = xyz
if (np.abs(x) + np.abs(y)) < 1e-20:
lon = 0.
else:
lon = np.arctan2(y, x)
if lon < 0.:
lon += 2 * np.pi
lat = np.arcsin(z)
# If not normalizing vector, take lat = np.arcsin(z/vector_length)
if ret_xyz:
return lon, lat, xyz
else:
return lon, lat
vec_cartesian_to_latlon = np.vectorize(cartesian_to_latlon)
def spherical_to_cartesian(theta, phi, r=1):
""" Convert spherical coordinates in the form (theta, phi[, r]) to
cartesian, with the origin at the center of the original spherical
coordinate system.
This function was originally written by <NAME> and included
in package cubedsphere: https://github.com/JiaweiZhuang/cubedsphere
"""
x = r * np.cos(phi) * np.cos(theta)
y = r * np.cos(phi) * np.sin(theta)
z = r * np.sin(phi)
return x, y, z
vec_spherical_to_cartesian = np.vectorize(spherical_to_cartesian)
def cartesian_to_spherical(x, y, z):
""" Convert cartesian coordinates to spherical in the form
(theta, phi[, r]) with the origin remaining at the center of the
original spherical coordinate system.
This function was originally written by <NAME> and included
in package cubedsphere: https://github.com/JiaweiZhuang/cubedsphere
"""
r = np.sqrt(x**2 + y**2 + z**2)
#theta = np.arccos(z / r)
theta = np.arctan2(y, x)
phi = np.arctan2(z, np.sqrt(x**2 + y**2))
# if np.abs(x) < 1e-16:
# phi = np.pi
# else:
# phi = np.arctan(y / x)
return theta, phi, r
vec_cartesian_to_spherical = np.vectorize(cartesian_to_spherical)
def rotate_sphere_3D(theta, phi, r, rot_ang, rot_axis='x'):
""" Rotate a spherical coordinate in the form (theta, phi[, r])
about the indicating axis, 'rot_axis'.
This method accomplishes the rotation by projecting to a
cartesian coordinate system and performing a solid body rotation
around the requested axis.
This function was originally written by <NAME> and included
in package cubedsphere: https://github.com/JiaweiZhuang/cubedsphere
"""
cos_ang = np.cos(rot_ang)
sin_ang = np.sin(rot_ang)
x, y, z = spherical_to_cartesian(theta, phi, r)
if rot_axis == 'x':
x_new = x
y_new = cos_ang * y + sin_ang * z
z_new = -sin_ang * y + cos_ang * z
elif rot_axis == 'y':
x_new = cos_ang * x - sin_ang * z
y_new = y
z_new = sin_ang * x + cos_ang * z
elif rot_axis == 'z':
x_new = cos_ang * x + sin_ang * y
y_new = -sin_ang * x + cos_ang * y
z_new = z
theta_new, phi_new, r_new = cartesian_to_spherical(x_new, y_new, z_new)
return theta_new, phi_new, r_new
| [
"numpy.sqrt",
"numpy.array",
"numpy.arctan2",
"numpy.sin",
"numpy.arange",
"numpy.flip",
"numpy.cross",
"numpy.sort",
"itertools.product",
"numpy.asarray",
"numpy.max",
"numpy.linspace",
"numpy.min",
"numpy.rad2deg",
"numpy.round",
"numpy.abs",
"numpy.size",
"numpy.squeeze",
"num... | [((13666, 14423), 'numpy.array', 'np.array', (['[0.0, 0.04804826, 6.593752, 13.1348, 19.61311, 26.09201, 32.57081, 38.98201,\n 45.33901, 51.69611, 58.05321, 64.36264, 70.62198, 78.83422, 89.09992, \n 99.36521, 109.1817, 118.9586, 128.6959, 142.91, 156.26, 169.609, \n 181.619, 193.097, 203.259, 212.15, 218.776, 223.898, 224.363, 216.865, \n 201.192, 176.93, 150.393, 127.837, 108.663, 92.36572, 78.51231, \n 66.60341, 56.38791, 47.64391, 40.17541, 33.81001, 28.36781, 23.73041, \n 19.7916, 16.4571, 13.6434, 11.2769, 9.292942, 7.619842, 6.216801, \n 5.046801, 4.076571, 3.276431, 2.620211, 2.08497, 1.65079, 1.30051, \n 1.01944, 0.7951341, 0.6167791, 0.4758061, 0.3650411, 0.2785261, \n 0.211349, 0.159495, 0.119703, 0.08934502, 0.06600001, 0.04758501, \n 0.0327, 0.02, 0.01]'], {}), '([0.0, 0.04804826, 6.593752, 13.1348, 19.61311, 26.09201, 32.57081,\n 38.98201, 45.33901, 51.69611, 58.05321, 64.36264, 70.62198, 78.83422, \n 89.09992, 99.36521, 109.1817, 118.9586, 128.6959, 142.91, 156.26, \n 169.609, 181.619, 193.097, 203.259, 212.15, 218.776, 223.898, 224.363, \n 216.865, 201.192, 176.93, 150.393, 127.837, 108.663, 92.36572, 78.51231,\n 66.60341, 56.38791, 47.64391, 40.17541, 33.81001, 28.36781, 23.73041, \n 19.7916, 16.4571, 13.6434, 11.2769, 9.292942, 7.619842, 6.216801, \n 5.046801, 4.076571, 3.276431, 2.620211, 2.08497, 1.65079, 1.30051, \n 1.01944, 0.7951341, 0.6167791, 0.4758061, 0.3650411, 0.2785261, \n 0.211349, 0.159495, 0.119703, 0.08934502, 0.06600001, 0.04758501, \n 0.0327, 0.02, 0.01])\n', (13674, 14423), True, 'import numpy as np\n'), ((16515, 17115), 'numpy.array', 'np.array', (['[1.0, 0.984952, 0.963406, 0.941865, 0.920387, 0.898908, 0.877429, 0.856018,\n 0.8346609, 0.8133039, 0.7919469, 0.7706375, 0.7493782, 0.721166, \n 0.6858999, 0.6506349, 0.6158184, 0.5810415, 0.5463042, 0.4945902, \n 0.4437402, 0.3928911, 0.3433811, 0.2944031, 0.2467411, 0.2003501, \n 0.1562241, 0.1136021, 0.06372006, 0.02801004, 0.006960025, 8.175413e-09,\n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]'], {}), '([1.0, 0.984952, 0.963406, 0.941865, 0.920387, 0.898908, 0.877429, \n 0.856018, 0.8346609, 0.8133039, 0.7919469, 0.7706375, 0.7493782, \n 0.721166, 0.6858999, 0.6506349, 0.6158184, 0.5810415, 0.5463042, \n 0.4945902, 0.4437402, 0.3928911, 0.3433811, 0.2944031, 0.2467411, \n 0.2003501, 0.1562241, 0.1136021, 0.06372006, 0.02801004, 0.006960025, \n 8.175413e-09, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0])\n', (16523, 17115), True, 'import numpy as np\n'), ((19434, 19446), 'numpy.zeros', 'np.zeros', (['(48)'], {}), '(48)\n', (19442, 19446), True, 'import numpy as np\n'), ((19462, 19474), 'numpy.zeros', 'np.zeros', (['(48)'], {}), '(48)\n', (19470, 19474), True, 'import numpy as np\n'), ((19662, 19674), 'numpy.zeros', 'np.zeros', (['(72)'], {}), '(72)\n', (19670, 19674), True, 'import numpy as np\n'), ((19687, 19699), 'numpy.zeros', 'np.zeros', (['(72)'], {}), '(72)\n', (19695, 19699), True, 'import numpy as np\n'), ((19712, 19724), 'numpy.zeros', 'np.zeros', (['(72)'], {}), '(72)\n', (19720, 19724), True, 'import numpy as np\n'), ((34976, 34998), 'numpy.arcsin', 'np.arcsin', (['_INV_SQRT_3'], {}), '(_INV_SQRT_3)\n', (34985, 34998), True, 'import numpy as np\n'), ((47239, 47272), 'numpy.vectorize', 'np.vectorize', (['latlon_to_cartesian'], {}), '(latlon_to_cartesian)\n', (47251, 47272), True, 'import numpy as np\n'), ((48083, 48116), 'numpy.vectorize', 'np.vectorize', (['cartesian_to_latlon'], {}), '(cartesian_to_latlon)\n', (48095, 48116), True, 'import numpy as np\n'), ((48627, 48663), 'numpy.vectorize', 'np.vectorize', (['spherical_to_cartesian'], {}), '(spherical_to_cartesian)\n', (48639, 48663), True, 'import numpy as np\n'), ((49314, 49350), 'numpy.vectorize', 'np.vectorize', (['cartesian_to_spherical'], {}), '(cartesian_to_spherical)\n', (49326, 49350), True, 'import numpy as np\n'), ((23083, 23353), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.01505309, 0.03276228, 0.05359622,\n 0.07810627, 0.1069411, 0.1408637, 0.180772, 0.227722, 0.2829562, \n 0.3479364, 0.4243822, 0.5143168, 0.6201202, 0.7235355, 0.8176768, \n 0.8962153, 0.9534761, 0.9851122, 1.0]'], {}), '([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.01505309, 0.03276228, \n 0.05359622, 0.07810627, 0.1069411, 0.1408637, 0.180772, 0.227722, \n 0.2829562, 0.3479364, 0.4243822, 0.5143168, 0.6201202, 0.7235355, \n 0.8176768, 0.8962153, 0.9534761, 0.9851122, 1.0])\n', (23091, 23353), True, 'import numpy as np\n'), ((30288, 30316), 'numpy.round', 'np.round', (['(360.0 / lon_stride)'], {}), '(360.0 / lon_stride)\n', (30296, 30316), True, 'import numpy as np\n'), ((30332, 30373), 'numpy.linspace', 'np.linspace', (['(-180.0)', '(180.0)'], {'num': '(n_lon + 1)'}), '(-180.0, 180.0, num=n_lon + 1)\n', (30343, 30373), True, 'import numpy as np\n'), ((32429, 32459), 'numpy.asarray', 'asarray', (['lon_edge'], {'dtype': 'float'}), '(lon_edge, dtype=float)\n', (32436, 32459), False, 'from numpy import asarray\n'), ((32475, 32505), 'numpy.asarray', 'asarray', (['lat_edge'], {'dtype': 'float'}), '(lat_edge, dtype=float)\n', (32482, 32505), False, 'from numpy import asarray\n'), ((32588, 32612), 'numpy.zeros', 'np.zeros', (['(n_lat, n_lon)'], {}), '((n_lat, n_lon))\n', (32596, 32612), True, 'import numpy as np\n'), ((33538, 33555), 'numpy.asarray', 'asarray', (['lon_edge'], {}), '(lon_edge)\n', (33545, 33555), False, 'from numpy import asarray\n'), ((33599, 33614), 'numpy.zeros', 'np.zeros', (['n_lon'], {}), '(n_lon)\n', (33607, 33614), True, 'import numpy as np\n'), ((34944, 34956), 'numpy.sqrt', 'np.sqrt', (['(3.0)'], {}), '(3.0)\n', (34951, 34956), True, 'import numpy as np\n'), ((47179, 47190), 'numpy.sin', 'np.sin', (['lat'], {}), '(lat)\n', (47185, 47190), True, 'import numpy as np\n'), ((47621, 47640), 'numpy.array', 'np.array', (['[x, y, z]'], {}), '([x, y, z])\n', (47629, 47640), True, 'import numpy as np\n'), ((47891, 47903), 'numpy.arcsin', 'np.arcsin', (['z'], {}), '(z)\n', (47900, 47903), True, 'import numpy as np\n'), ((49029, 49062), 'numpy.sqrt', 'np.sqrt', (['(x ** 2 + y ** 2 + z ** 2)'], {}), '(x ** 2 + y ** 2 + z ** 2)\n', (49036, 49062), True, 'import numpy as np\n'), ((49099, 49115), 'numpy.arctan2', 'np.arctan2', (['y', 'x'], {}), '(y, x)\n', (49109, 49115), True, 'import numpy as np\n'), ((49843, 49858), 'numpy.cos', 'np.cos', (['rot_ang'], {}), '(rot_ang)\n', (49849, 49858), True, 'import numpy as np\n'), ((49873, 49888), 'numpy.sin', 'np.sin', (['rot_ang'], {}), '(rot_ang)\n', (49879, 49888), True, 'import numpy as np\n'), ((1392, 1422), 'numpy.squeeze', 'np.squeeze', (["ds['Met_BXHEIGHT']"], {}), "(ds['Met_BXHEIGHT'])\n", (1402, 1422), True, 'import numpy as np\n'), ((13326, 13338), 'numpy.array', 'np.array', (['AP'], {}), '(AP)\n', (13334, 13338), True, 'import numpy as np\n'), ((13357, 13369), 'numpy.array', 'np.array', (['BP'], {}), '(BP)\n', (13365, 13369), True, 'import numpy as np\n'), ((22587, 22869), 'numpy.array', 'np.array', (['[219.4067, 489.5209, 988.2418, 1805.201, 2983.724, 4462.334, 6160.587, \n 7851.243, 7731.271, 7590.131, 7424.086, 7228.744, 6998.933, 6728.574, \n 6410.509, 6036.322, 5596.111, 5078.225, 4468.96, 3752.191, 2908.949, \n 2084.739, 1334.443, 708.499, 252.136, 0.0, 0.0]'], {}), '([219.4067, 489.5209, 988.2418, 1805.201, 2983.724, 4462.334, \n 6160.587, 7851.243, 7731.271, 7590.131, 7424.086, 7228.744, 6998.933, \n 6728.574, 6410.509, 6036.322, 5596.111, 5078.225, 4468.96, 3752.191, \n 2908.949, 2084.739, 1334.443, 708.499, 252.136, 0.0, 0.0])\n', (22595, 22869), True, 'import numpy as np\n'), ((25549, 25574), 'numpy.nonzero', 'np.nonzero', (['(lon >= minlon)'], {}), '(lon >= minlon)\n', (25559, 25574), True, 'import numpy as np\n'), ((25596, 25621), 'numpy.nonzero', 'np.nonzero', (['(lon <= maxlon)'], {}), '(lon <= maxlon)\n', (25606, 25621), True, 'import numpy as np\n'), ((25641, 25679), 'numpy.intersect1d', 'np.intersect1d', (['minlon_ind', 'maxlon_ind'], {}), '(minlon_ind, maxlon_ind)\n', (25655, 25679), True, 'import numpy as np\n'), ((25874, 25899), 'numpy.nonzero', 'np.nonzero', (['(lat >= minlat)'], {}), '(lat >= minlat)\n', (25884, 25899), True, 'import numpy as np\n'), ((25921, 25946), 'numpy.nonzero', 'np.nonzero', (['(lat <= maxlat)'], {}), '(lat <= maxlat)\n', (25931, 25946), True, 'import numpy as np\n'), ((25966, 26004), 'numpy.intersect1d', 'np.intersect1d', (['minlat_ind', 'maxlat_ind'], {}), '(minlat_ind, maxlat_ind)\n', (25980, 26004), True, 'import numpy as np\n'), ((32912, 32932), 'numpy.deg2rad', 'np.deg2rad', (['lat_edge'], {}), '(lat_edge)\n', (32922, 32932), True, 'import numpy as np\n'), ((37095, 37113), 'numpy.zeros', 'np.zeros', (['(nx, ny)'], {}), '((nx, ny))\n', (37103, 37113), True, 'import numpy as np\n'), ((37248, 37266), 'numpy.zeros', 'np.zeros', (['(nx, ny)'], {}), '((nx, ny))\n', (37256, 37266), True, 'import numpy as np\n'), ((37769, 37795), 'numpy.cross', 'np.cross', (['xyzMir1', 'xyzMir2'], {}), '(xyzMir1, xyzMir2)\n', (37777, 37795), True, 'import numpy as np\n'), ((38414, 38441), 'numpy.zeros', 'np.zeros', (['[3, c + 1, c + 1]'], {}), '([3, c + 1, c + 1])\n', (38422, 38441), True, 'import numpy as np\n'), ((38521, 38546), 'itertools.product', 'product', (['[0, -1]', '[0, -1]'], {}), '([0, -1], [0, -1])\n', (38528, 38546), False, 'from itertools import product\n'), ((41377, 41404), 'numpy.zeros', 'np.zeros', (['(c + 1, c + 1, 6)'], {}), '((c + 1, c + 1, 6))\n', (41385, 41404), True, 'import numpy as np\n'), ((41425, 41452), 'numpy.zeros', 'np.zeros', (['(c + 1, c + 1, 6)'], {}), '((c + 1, c + 1, 6))\n', (41433, 41452), True, 'import numpy as np\n'), ((44033, 44053), 'numpy.rad2deg', 'np.rad2deg', (['lon_edge'], {}), '(lon_edge)\n', (44043, 44053), True, 'import numpy as np\n'), ((44077, 44097), 'numpy.rad2deg', 'np.rad2deg', (['lat_edge'], {}), '(lat_edge)\n', (44087, 44097), True, 'import numpy as np\n'), ((44311, 44330), 'numpy.zeros', 'np.zeros', (['(c, c, 6)'], {}), '((c, c, 6))\n', (44319, 44330), True, 'import numpy as np\n'), ((44349, 44368), 'numpy.zeros', 'np.zeros', (['(c, c, 6)'], {}), '((c, c, 6))\n', (44357, 44368), True, 'import numpy as np\n'), ((44387, 44409), 'numpy.zeros', 'np.zeros', (['(3, c, c, 6)'], {}), '((3, c, c, 6))\n', (44395, 44409), True, 'import numpy as np\n'), ((44429, 44459), 'numpy.zeros', 'np.zeros', (['(3, c + 1, c + 1, 6)'], {}), '((3, c + 1, c + 1, 6))\n', (44437, 44459), True, 'import numpy as np\n'), ((46174, 46193), 'numpy.rad2deg', 'np.rad2deg', (['lon_ctr'], {}), '(lon_ctr)\n', (46184, 46193), True, 'import numpy as np\n'), ((46216, 46235), 'numpy.rad2deg', 'np.rad2deg', (['lat_ctr'], {}), '(lat_ctr)\n', (46226, 46235), True, 'import numpy as np\n'), ((47111, 47122), 'numpy.cos', 'np.cos', (['lat'], {}), '(lat)\n', (47117, 47122), True, 'import numpy as np\n'), ((47125, 47136), 'numpy.cos', 'np.cos', (['lon'], {}), '(lon)\n', (47131, 47136), True, 'import numpy as np\n'), ((47145, 47156), 'numpy.cos', 'np.cos', (['lat'], {}), '(lat)\n', (47151, 47156), True, 'import numpy as np\n'), ((47159, 47170), 'numpy.sin', 'np.sin', (['lon'], {}), '(lon)\n', (47165, 47170), True, 'import numpy as np\n'), ((47669, 47694), 'numpy.sum', 'np.sum', (['(xyz * xyz)'], {'axis': '(0)'}), '(xyz * xyz, axis=0)\n', (47675, 47694), True, 'import numpy as np\n'), ((47821, 47837), 'numpy.arctan2', 'np.arctan2', (['y', 'x'], {}), '(y, x)\n', (47831, 47837), True, 'import numpy as np\n'), ((48499, 48512), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (48505, 48512), True, 'import numpy as np\n'), ((48539, 48552), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (48545, 48552), True, 'import numpy as np\n'), ((48565, 48576), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (48571, 48576), True, 'import numpy as np\n'), ((49140, 49164), 'numpy.sqrt', 'np.sqrt', (['(x ** 2 + y ** 2)'], {}), '(x ** 2 + y ** 2)\n', (49147, 49164), True, 'import numpy as np\n'), ((4402, 4425), 'numpy.abs', 'np.abs', (['(lat[2] - lat[1])'], {}), '(lat[2] - lat[1])\n', (4408, 4425), True, 'import numpy as np\n'), ((4448, 4471), 'numpy.abs', 'np.abs', (['(lon[2] - lon[1])'], {}), '(lon[2] - lon[1])\n', (4454, 4471), True, 'import numpy as np\n'), ((11987, 12026), 'numpy.abs', 'np.abs', (["(converted_dataset['lev'] - pres)"], {}), "(converted_dataset['lev'] - pres)\n", (11993, 12026), True, 'import numpy as np\n'), ((34685, 34704), 'numpy.flip', 'np.flip', (['a[tile]', '(1)'], {}), '(a[tile], 1)\n', (34692, 34704), True, 'import numpy as np\n'), ((34761, 34780), 'numpy.flip', 'np.flip', (['a[tile]', '(0)'], {}), '(a[tile], 0)\n', (34768, 34780), True, 'import numpy as np\n'), ((37819, 37840), 'numpy.sum', 'np.sum', (['(xyzCross ** 2)'], {}), '(xyzCross ** 2)\n', (37825, 37840), True, 'import numpy as np\n'), ((38053, 38078), 'numpy.sum', 'np.sum', (['(xyzCross * xyzRef)'], {}), '(xyzCross * xyzRef)\n', (38059, 38078), True, 'import numpy as np\n'), ((47748, 47757), 'numpy.abs', 'np.abs', (['x'], {}), '(x)\n', (47754, 47757), True, 'import numpy as np\n'), ((47760, 47769), 'numpy.abs', 'np.abs', (['y'], {}), '(y)\n', (47766, 47769), True, 'import numpy as np\n'), ((48485, 48496), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (48491, 48496), True, 'import numpy as np\n'), ((48525, 48536), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (48531, 48536), True, 'import numpy as np\n'), ((3344, 3362), 'numpy.squeeze', 'np.squeeze', (['values'], {}), '(values)\n', (3354, 3362), True, 'import numpy as np\n'), ((7272, 7293), 'numpy.min', 'np.min', (["data['lon_b']"], {}), "(data['lon_b'])\n", (7278, 7293), True, 'import numpy as np\n'), ((7312, 7333), 'numpy.max', 'np.max', (["data['lon_b']"], {}), "(data['lon_b'])\n", (7318, 7333), True, 'import numpy as np\n'), ((7352, 7373), 'numpy.min', 'np.min', (["data['lat_b']"], {}), "(data['lat_b'])\n", (7358, 7373), True, 'import numpy as np\n'), ((7392, 7413), 'numpy.max', 'np.max', (["data['lat_b']"], {}), "(data['lat_b'])\n", (7398, 7413), True, 'import numpy as np\n'), ((7950, 7962), 'numpy.sort', 'np.sort', (['lat'], {}), '(lat)\n', (7957, 7962), True, 'import numpy as np\n'), ((7984, 7995), 'numpy.min', 'np.min', (['lat'], {}), '(lat)\n', (7990, 7995), True, 'import numpy as np\n'), ((8186, 8197), 'numpy.max', 'np.max', (['lat'], {}), '(lat)\n', (8192, 8197), True, 'import numpy as np\n'), ((8406, 8418), 'numpy.sort', 'np.sort', (['lon'], {}), '(lon)\n', (8413, 8418), True, 'import numpy as np\n'), ((8440, 8451), 'numpy.min', 'np.min', (['lon'], {}), '(lon)\n', (8446, 8451), True, 'import numpy as np\n'), ((25798, 25814), 'numpy.max', 'np.max', (['lon_inds'], {}), '(lon_inds)\n', (25804, 25814), True, 'import numpy as np\n'), ((26123, 26139), 'numpy.max', 'np.max', (['lat_inds'], {}), '(lat_inds)\n', (26129, 26139), True, 'import numpy as np\n'), ((31884, 31921), 'numpy.round', 'np.round', (['(2.0 * start_pt / lat_stride)'], {}), '(2.0 * start_pt / lat_stride)\n', (31892, 31921), True, 'import numpy as np\n'), ((37343, 37359), 'numpy.arange', 'np.arange', (['(c + 1)'], {}), '(c + 1)\n', (37352, 37359), True, 'import numpy as np\n'), ((43825, 43840), 'numpy.abs', 'np.abs', (['new_lon'], {}), '(new_lon)\n', (43831, 43840), True, 'import numpy as np\n'), ((43935, 43960), 'numpy.abs', 'np.abs', (['lat_edge[i, j, f]'], {}), '(lat_edge[i, j, f])\n', (43941, 43960), True, 'import numpy as np\n'), ((2430, 2449), 'numpy.array', 'np.array', (['shape[2:]'], {}), '(shape[2:])\n', (2438, 2449), True, 'import numpy as np\n'), ((2680, 2698), 'numpy.squeeze', 'np.squeeze', (['values'], {}), '(values)\n', (2690, 2698), True, 'import numpy as np\n'), ((3193, 3212), 'numpy.array', 'np.array', (['shape[1:]'], {}), '(shape[1:])\n', (3201, 3212), True, 'import numpy as np\n'), ((7474, 7493), 'numpy.min', 'np.min', (["data['lon']"], {}), "(data['lon'])\n", (7480, 7493), True, 'import numpy as np\n'), ((7512, 7531), 'numpy.max', 'np.max', (["data['lon']"], {}), "(data['lon'])\n", (7518, 7531), True, 'import numpy as np\n'), ((7550, 7569), 'numpy.min', 'np.min', (["data['lat']"], {}), "(data['lat'])\n", (7556, 7569), True, 'import numpy as np\n'), ((7588, 7607), 'numpy.max', 'np.max', (["data['lat']"], {}), "(data['lat'])\n", (7594, 7607), True, 'import numpy as np\n'), ((8473, 8484), 'numpy.max', 'np.max', (['lon'], {}), '(lon)\n', (8479, 8484), True, 'import numpy as np\n'), ((10160, 10171), 'numpy.size', 'np.size', (['AP'], {}), '(AP)\n', (10167, 10171), True, 'import numpy as np\n'), ((10633, 10651), 'numpy.max', 'np.max', (['pres_range'], {}), '(pres_range)\n', (10639, 10651), True, 'import numpy as np\n'), ((10678, 10696), 'numpy.min', 'np.min', (['pres_range'], {}), '(pres_range)\n', (10684, 10696), True, 'import numpy as np\n'), ((45787, 45813), 'numpy.sum', 'np.sum', (['xyz_corner'], {'axis': '(1)'}), '(xyz_corner, axis=1)\n', (45793, 45813), True, 'import numpy as np\n'), ((9778, 9789), 'numpy.size', 'np.size', (['AP'], {}), '(AP)\n', (9785, 9789), True, 'import numpy as np\n'), ((45850, 45871), 'numpy.sum', 'np.sum', (['(e_mid * e_mid)'], {}), '(e_mid * e_mid)\n', (45856, 45871), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-09-11 12:40
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import yats.models
class Migration(migrations.Migration):
dependencies = [
('yats', '0013_auto_20180829_0851'),
]
operations = [
migrations.AddField(
model_name='tickets',
name='hasAttachments',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='boards',
name='c_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation time'),
),
migrations.AlterField(
model_name='boards',
name='c_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='creator'),
),
migrations.AlterField(
model_name='organisation',
name='c_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation time'),
),
migrations.AlterField(
model_name='organisation',
name='c_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='creator'),
),
migrations.AlterField(
model_name='ticket_flow',
name='c_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation time'),
),
migrations.AlterField(
model_name='ticket_flow',
name='c_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='creator'),
),
migrations.AlterField(
model_name='ticket_flow_edges',
name='c_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation time'),
),
migrations.AlterField(
model_name='ticket_flow_edges',
name='c_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='creator'),
),
migrations.AlterField(
model_name='ticket_priority',
name='c_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation time'),
),
migrations.AlterField(
model_name='ticket_priority',
name='c_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='creator'),
),
migrations.AlterField(
model_name='ticket_resolution',
name='c_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation time'),
),
migrations.AlterField(
model_name='ticket_resolution',
name='c_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='creator'),
),
migrations.AlterField(
model_name='ticket_type',
name='c_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation time'),
),
migrations.AlterField(
model_name='ticket_type',
name='c_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='creator'),
),
migrations.AlterField(
model_name='tickets',
name='assigned',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='assigned'),
),
migrations.AlterField(
model_name='tickets',
name='c_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation time'),
),
migrations.AlterField(
model_name='tickets',
name='c_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='creator'),
),
migrations.AlterField(
model_name='tickets',
name='caption',
field=models.CharField(max_length=255, verbose_name='caption'),
),
migrations.AlterField(
model_name='tickets',
name='close_date',
field=models.DateTimeField(null=True, verbose_name='close date'),
),
migrations.AlterField(
model_name='tickets',
name='closed',
field=models.BooleanField(default=False, verbose_name='closed'),
),
migrations.AlterField(
model_name='tickets',
name='customer',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='yats.organisation', verbose_name='organisation'),
),
migrations.AlterField(
model_name='tickets',
name='description',
field=models.TextField(verbose_name='description'),
),
migrations.AlterField(
model_name='tickets',
name='last_action_date',
field=models.DateTimeField(null=True, verbose_name='last action'),
),
migrations.AlterField(
model_name='tickets',
name='priority',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='yats.ticket_priority', verbose_name='priority'),
),
migrations.AlterField(
model_name='tickets',
name='resolution',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='yats.ticket_resolution', verbose_name='resolution'),
),
migrations.AlterField(
model_name='tickets',
name='state',
field=models.ForeignKey(blank=True, default=yats.models.get_flow_start, null=True, on_delete=django.db.models.deletion.CASCADE, to='yats.ticket_flow', verbose_name='state'),
),
migrations.AlterField(
model_name='tickets',
name='type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='yats.ticket_type', verbose_name='type'),
),
migrations.AlterField(
model_name='tickets_comments',
name='c_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation time'),
),
migrations.AlterField(
model_name='tickets_comments',
name='c_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='creator'),
),
migrations.AlterField(
model_name='tickets_files',
name='c_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation time'),
),
migrations.AlterField(
model_name='tickets_files',
name='c_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='creator'),
),
migrations.AlterField(
model_name='tickets_history',
name='c_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation time'),
),
migrations.AlterField(
model_name='tickets_history',
name='c_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='creator'),
),
migrations.AlterField(
model_name='tickets_reports',
name='c_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation time'),
),
migrations.AlterField(
model_name='tickets_reports',
name='c_user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='creator'),
),
]
| [
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((520, 554), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (539, 554), False, 'from django.db import migrations, models\n'), ((676, 766), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation time"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation time')\n", (696, 766), False, 'from django.db import migrations, models\n'), ((883, 1021), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""creator"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='+', to=settings.AUTH_USER_MODEL, verbose_name='creator')\n", (900, 1021), False, 'from django.db import migrations, models\n'), ((1144, 1234), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation time"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation time')\n", (1164, 1234), False, 'from django.db import migrations, models\n'), ((1357, 1495), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""creator"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='+', to=settings.AUTH_USER_MODEL, verbose_name='creator')\n", (1374, 1495), False, 'from django.db import migrations, models\n'), ((1617, 1707), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation time"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation time')\n", (1637, 1707), False, 'from django.db import migrations, models\n'), ((1829, 1967), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""creator"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='+', to=settings.AUTH_USER_MODEL, verbose_name='creator')\n", (1846, 1967), False, 'from django.db import migrations, models\n'), ((2095, 2185), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation time"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation time')\n", (2115, 2185), False, 'from django.db import migrations, models\n'), ((2313, 2451), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""creator"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='+', to=settings.AUTH_USER_MODEL, verbose_name='creator')\n", (2330, 2451), False, 'from django.db import migrations, models\n'), ((2577, 2667), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation time"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation time')\n", (2597, 2667), False, 'from django.db import migrations, models\n'), ((2793, 2931), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""creator"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='+', to=settings.AUTH_USER_MODEL, verbose_name='creator')\n", (2810, 2931), False, 'from django.db import migrations, models\n'), ((3059, 3149), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation time"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation time')\n", (3079, 3149), False, 'from django.db import migrations, models\n'), ((3277, 3415), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""creator"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='+', to=settings.AUTH_USER_MODEL, verbose_name='creator')\n", (3294, 3415), False, 'from django.db import migrations, models\n'), ((3537, 3627), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation time"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation time')\n", (3557, 3627), False, 'from django.db import migrations, models\n'), ((3749, 3887), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""creator"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='+', to=settings.AUTH_USER_MODEL, verbose_name='creator')\n", (3766, 3887), False, 'from django.db import migrations, models\n'), ((4007, 4173), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""assigned"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL,\n verbose_name='assigned')\n", (4024, 4173), False, 'from django.db import migrations, models\n'), ((4287, 4377), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation time"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation time')\n", (4307, 4377), False, 'from django.db import migrations, models\n'), ((4495, 4633), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""creator"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='+', to=settings.AUTH_USER_MODEL, verbose_name='creator')\n", (4512, 4633), False, 'from django.db import migrations, models\n'), ((4752, 4808), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""caption"""'}), "(max_length=255, verbose_name='caption')\n", (4768, 4808), False, 'from django.db import migrations, models\n'), ((4935, 4993), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'verbose_name': '"""close date"""'}), "(null=True, verbose_name='close date')\n", (4955, 4993), False, 'from django.db import migrations, models\n'), ((5116, 5173), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""closed"""'}), "(default=False, verbose_name='closed')\n", (5135, 5173), False, 'from django.db import migrations, models\n'), ((5298, 5418), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""yats.organisation"""', 'verbose_name': '"""organisation"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'yats.organisation', verbose_name='organisation')\n", (5315, 5418), False, 'from django.db import migrations, models\n'), ((5541, 5585), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""description"""'}), "(verbose_name='description')\n", (5557, 5585), False, 'from django.db import migrations, models\n'), ((5718, 5777), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'verbose_name': '"""last action"""'}), "(null=True, verbose_name='last action')\n", (5738, 5777), False, 'from django.db import migrations, models\n'), ((5902, 6031), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""yats.ticket_priority"""', 'verbose_name': '"""priority"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='yats.ticket_priority', verbose_name='priority')\n", (5919, 6031), False, 'from django.db import migrations, models\n'), ((6154, 6287), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""yats.ticket_resolution"""', 'verbose_name': '"""resolution"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='yats.ticket_resolution', verbose_name='resolution')\n", (6171, 6287), False, 'from django.db import migrations, models\n'), ((6405, 6579), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'default': 'yats.models.get_flow_start', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""yats.ticket_flow"""', 'verbose_name': '"""state"""'}), "(blank=True, default=yats.models.get_flow_start, null=True,\n on_delete=django.db.models.deletion.CASCADE, to='yats.ticket_flow',\n verbose_name='state')\n", (6422, 6579), False, 'from django.db import migrations, models\n'), ((6692, 6813), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""yats.ticket_type"""', 'verbose_name': '"""type"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='yats.ticket_type', verbose_name='type')\n", (6709, 6813), False, 'from django.db import migrations, models\n'), ((6941, 7031), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation time"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation time')\n", (6961, 7031), False, 'from django.db import migrations, models\n'), ((7158, 7296), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""creator"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='+', to=settings.AUTH_USER_MODEL, verbose_name='creator')\n", (7175, 7296), False, 'from django.db import migrations, models\n'), ((7420, 7510), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation time"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation time')\n", (7440, 7510), False, 'from django.db import migrations, models\n'), ((7634, 7772), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""creator"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='+', to=settings.AUTH_USER_MODEL, verbose_name='creator')\n", (7651, 7772), False, 'from django.db import migrations, models\n'), ((7898, 7988), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation time"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation time')\n", (7918, 7988), False, 'from django.db import migrations, models\n'), ((8114, 8252), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""creator"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='+', to=settings.AUTH_USER_MODEL, verbose_name='creator')\n", (8131, 8252), False, 'from django.db import migrations, models\n'), ((8378, 8468), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation time"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation time')\n", (8398, 8468), False, 'from django.db import migrations, models\n'), ((8594, 8732), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""creator"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='+', to=settings.AUTH_USER_MODEL, verbose_name='creator')\n", (8611, 8732), False, 'from django.db import migrations, models\n')] |
"""
.. function:: queryplan(query) -> Query plan
Returns the query plan of the input query.
Examples::
>>> sql("queryplan select 5")
operation | paramone | paramtwo | databasename | triggerorview
------------------------------------------------------------------
SQLITE_SELECT | None | None | None | None
"""
import setpath
import vtbase
import functions
import apsw
registered=True
class QueryPlan(vtbase.VT):
def VTiter(self, *parsedArgs, **envars):
def authorizer(operation, paramone, paramtwo, databasename, triggerorview):
"""Called when each operation is prepared. We can return SQLITE_OK, SQLITE_DENY or
SQLITE_IGNORE"""
# find the operation name
plan.append([apsw.mapping_authorizer_function[operation], paramone, paramtwo, databasename, triggerorview])
return apsw.SQLITE_OK
def buststatementcache():
c = connection.cursor()
for i in xrange(110):
a=list(c.execute("select "+str(i)))
_, dictargs = self.full_parse(parsedArgs)
if 'query' not in dictargs:
raise functions.OperatorError(__name__.rsplit('.')[-1]," needs query argument ")
query=dictargs['query']
connection = envars['db']
plan=[]
buststatementcache()
cursor = connection.cursor()
cursor.setexectrace(lambda x,y,z:apsw.SQLITE_DENY)
connection.setauthorizer(authorizer)
cursor.execute(query)
connection.setauthorizer(None)
yield [('operation', 'text'), ('paramone', 'text'), ('paramtwo', 'text'), ('databasename', 'text'), ('triggerorview', 'text')]
for r in plan:
yield r
def destroy(self):
pass
def Source():
return vtbase.VTGenerator(QueryPlan)
if not ('.' in __name__):
"""
This is needed to be able to test the function, put it at the end of every
new function you create
"""
import sys
import setpath
from functions import *
testfunction()
if __name__ == "__main__":
reload(sys)
sys.setdefaultencoding('utf-8')
import doctest
doctest.testmod()
| [
"vtbase.VTGenerator",
"sys.setdefaultencoding",
"doctest.testmod"
] | [((1822, 1851), 'vtbase.VTGenerator', 'vtbase.VTGenerator', (['QueryPlan'], {}), '(QueryPlan)\n', (1840, 1851), False, 'import vtbase\n'), ((2142, 2173), 'sys.setdefaultencoding', 'sys.setdefaultencoding', (['"""utf-8"""'], {}), "('utf-8')\n", (2164, 2173), False, 'import sys\n'), ((2205, 2222), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (2220, 2222), False, 'import doctest\n')] |
"""Provide Session class."""
import logging
from time import sleep
from copy import deepcopy
from typing import TYPE_CHECKING, Dict, Optional, Tuple, Union
from urllib.parse import urljoin
from .authorizer import BaseAuthorizer
from .request_wrapper import RequestWrapper
from .retry import RetryPolicy, RateLimit
from .exceptions import (
InvalidAuthorizer,
RequestException,
BadJSON,
)
from .status_codes import (
EXCEPTION_STATUS_CODES,
NO_CONTENT,
RETRY_EXCEPTIONS,
RETRY_STATUS_CODES,
STATUS_TO_EXCEPTION_MAPPING,
SUCCESS_STATUS_CODES,
)
from .settings import TIMEOUT, BASE_URL
if TYPE_CHECKING:
from requests import Response
from .routing import Route
log = logging.getLogger(__name__)
class Session(object):
def __init__(self, authorizer: BaseAuthorizer, base_url: str = BASE_URL):
if not isinstance(authorizer, BaseAuthorizer):
raise InvalidAuthorizer(f"Invalid Authorizer: {authorizer}")
self._authorizer = authorizer
self._rate_limit = RateLimit()
self._request_wrapper = RequestWrapper(base_url=base_url)
def _log_request(self, method, url, params, data, json) -> None:
log.debug(
f"Request: {method} {url}, data: {data}, json: {json}, params: {params}"
)
def __enter__(self):
"""Context manager enter"""
return self
def __exit__(self, *_args):
"""Context manager exit"""
self.close()
def _try_request(
self,
*,
method,
url,
data,
files,
json,
params,
headers,
retry_policy,
timeout,
) -> Tuple[Optional["Response"], Optional[Exception]]:
if not headers:
headers = self._authorizer._get_auth_header()
seconds_to_sleep = self._rate_limit.seconds_to_sleep()
if seconds_to_sleep:
log.debug(f"Sleeping for {seconds_to_sleep} seconds (rate limited)")
sleep(seconds_to_sleep)
try:
response = self._request_wrapper.call(
method,
url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
timeout=timeout,
)
log.debug(f"Response status: {response.status_code}")
# update the rate limit state from response headers
self._rate_limit.update_from_headers(response.headers)
return response, None
except RequestException as exception:
if not retry_policy.retries_remaining or not isinstance(
exception.original_exception, RETRY_EXCEPTIONS
):
raise
return None, exception.original_exception
def _request(
self,
*,
method,
url,
data,
files,
json,
params,
headers,
timeout,
):
retry_policy = RetryPolicy()
self._log_request(method, url, params, data, json)
while retry_policy.retries_remaining:
response, exc = self._try_request(
method=method,
url=url,
data=data,
files=files,
json=json,
params=params,
headers=headers,
retry_policy=retry_policy,
timeout=timeout,
)
if response is None or response.status_code not in RETRY_STATUS_CODES:
break
retry_policy.decrement_retries()
sleep_seconds = retry_policy.seconds_to_sleep()
if sleep_seconds > 0:
sleep(sleep_seconds)
status_code = repr(exc) if exc else response.status_code
if status_code in EXCEPTION_STATUS_CODES:
raise STATUS_TO_EXCEPTION_MAPPING[response.status_code](response)
elif status_code == NO_CONTENT:
return
elif status_code not in SUCCESS_STATUS_CODES:
raise Exception(f"Unknown status code: {status_code}")
try:
return response.json()
except ValueError:
return BadJSON(response)
def close(self):
self._request_wrapper.close()
def safe_copy_dict(self, d: dict, sort: bool = False) -> dict:
if isinstance(d, dict):
d = deepcopy(d)
if sort:
return sorted(d.items())
return d
def request(
self,
route: "Route",
data: dict = None,
files: dict = None,
json: object = None,
params: Optional[Union[str, Dict[str, Union[str, int]]]] = None,
headers: dict = None,
timeout: float = TIMEOUT,
):
"""
Return the json content from the resource at ``path``.
Args:
method: The request method such as GET, POST, PATCH, etc.
path: The path of the request to be combined with the ``base_url``
of the Requestor.
data: Dictionary, bytes, or file-like object to send in the body
of the request.
files: Dictionary, mapping ``filename`` to file-like object.
json: Object to be serialized to JSON in the body of the
request.
params: The query parameters to send with the request.
headers: Overwrite all headers for the request.
"""
params = deepcopy(params) or {}
data = self.safe_copy_dict(data, sort=True)
headers = self.safe_copy_dict(headers, sort=True)
json = self.safe_copy_dict(json)
return self._request(
method=route.method,
url=urljoin(self._request_wrapper.base_url, route.path),
data=data,
files=files,
json=json,
params=params,
headers=headers,
timeout=timeout,
)
def create_session(*, authorizer: BaseAuthorizer = None, base_url: str) -> Session:
return Session(authorizer, base_url=base_url)
| [
"logging.getLogger",
"urllib.parse.urljoin",
"time.sleep",
"copy.deepcopy"
] | [((712, 739), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (729, 739), False, 'import logging\n'), ((1989, 2012), 'time.sleep', 'sleep', (['seconds_to_sleep'], {}), '(seconds_to_sleep)\n', (1994, 2012), False, 'from time import sleep\n'), ((4419, 4430), 'copy.deepcopy', 'deepcopy', (['d'], {}), '(d)\n', (4427, 4430), False, 'from copy import deepcopy\n'), ((5499, 5515), 'copy.deepcopy', 'deepcopy', (['params'], {}), '(params)\n', (5507, 5515), False, 'from copy import deepcopy\n'), ((3735, 3755), 'time.sleep', 'sleep', (['sleep_seconds'], {}), '(sleep_seconds)\n', (3740, 3755), False, 'from time import sleep\n'), ((5753, 5804), 'urllib.parse.urljoin', 'urljoin', (['self._request_wrapper.base_url', 'route.path'], {}), '(self._request_wrapper.base_url, route.path)\n', (5760, 5804), False, 'from urllib.parse import urljoin\n')] |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Threading utilities."""
import queue
import threading
import time
import traceback
from typing import Any, Callable, Dict, Generic, List, Optional, Set, Tuple, TypeVar
T = TypeVar("T")
U = TypeVar("U")
class ThreadCollection:
"""Represents a set of threads."""
_lock: threading.Lock
_threads: Set[threading.Thread]
_name: str
def __init__(self, name: str) -> None:
"""Init a ThreadCollection.
Args:
name: The name of the TreadCollection.
"""
self._lock = threading.Lock()
self._threads = set()
self._name = name
self._started = False
def start(self) -> None:
"""Start must be called to add threads to the collection."""
with self._lock:
self._started = True
def run(self, f: "Callable[..., None]", *args: Any, **kwargs: Any) -> None:
"""Run a thread in the ThreadCollection.
Args:
f: the function to be run within the thread. Thread terminates when the
function returns. Function can take arguments, but must return None.
*args: the arguments to the function.
**kwargs: the keyword arguments to the function.
Raises:
RuntimeError: if the thread connection is not started.
"""
with self._lock:
if not self._started:
raise RuntimeError("ThreadCollection used before starting.")
thread = threading.Thread(
target=self._run_thread,
args=(f, args, kwargs),
name="run thread " + self._name + ": " +
"".join(traceback.format_stack()))
with self._lock:
self._threads.add(thread)
thread.start()
def poll(self, wait: Callable[[float], bool], period: float,
f: "Callable[..., bool]", *args: Any, **kwargs: Any) -> None:
"""Call a function at given rate in thread in the ThreadCollection.
Args:
wait: function that waits for a time or terminates early. Is passed a
float number of seconds to wait, and must return true if the thread
should terminate.
period: the minimum period between calls to the function. Time is adjusted
based on the runtime of the function.
f: the function to be run within the thread. Thread terminates when the
function returns. Function can take arguments. If the function returns
True, the poll thread will terminate.
*args: the arguments to the function.
**kwargs: the keyword arguments to the function.
Raises:
RuntimeError: if the thread connection is not started.
"""
self.run(self._poll_thread, wait, period, f, args, kwargs)
def join(self) -> None:
"""Join all the threads within the ThreadCollection."""
while True:
thread: Optional[threading.Thread] = None
with self._lock:
for t in self._threads:
thread = t
break
if thread is None:
return
thread.join()
def _run_thread(self, f: "Callable[..., None]", args: List[Any],
kwargs: Dict[str, Any]) -> None:
"""Run a thread and then remove it from the ThreadCollection.
Args:
f: the function to be run within the thread. Thread terminates when the
function returns. Function can take arguments, but must return None.
args: the arguments to the function.
kwargs: the keyword arguments to the function. Returns
"""
try:
f(*args, **kwargs)
finally:
with self._lock:
self._threads.remove(threading.current_thread())
def _poll_thread(self, wait: Callable[[float], bool], period: float,
f: "Callable[..., bool]", args: List[Any],
kwargs: Dict[str, Any]) -> None:
"""Poll a function periodically in a TreadhCollection.
Args:
wait: function that waits for a time or terminates early. Is passed a
float number of seconds to wait, and must return true if the thread
should terminate.
period: the minimum period between calls to the function. Time is adjusted
based on the runtime of the function.
f: the function to be run within the thread. Thread terminates when the
function returns. Function can take arguments. If the function returns
True, the poll thread will terminate.
args: the arguments to the function.
kwargs: the keyword arguments to the function. Returns
"""
while True:
start = time.time()
if f(*args, **kwargs):
return
while True:
runtime = time.time() - start
delay = period - runtime
if wait(max(delay, 0)):
return
if delay <= 0:
break
def extract_all_from_queue(q: "queue.Queue[Optional[T]]") -> List[T]:
"""Return the contents of a queue.
The queue is expected to contain a series of elements, and then None as the
last element.
Args:
q: The Queue to empty.
Returns:
The list of queue objects.
"""
msgs = []
while True:
try:
msg = q.get(block=True)
if msg is None:
break
msgs.append(msg)
except queue.Empty:
continue
for _ in msgs:
q.task_done()
q.task_done()
return msgs
def queue_to_callback(q: "queue.Queue[Optional[T]]",
callback: Optional[Callable[[T], None]],
finished_callback: Callable[[], None]) -> None:
"""Convert a queue to a series of callbacks.
The queue is expected to contain a series of elements, and then None as the
last element.
Args:
q: The queue to attach the callbacks too.
callback: function called for each element of the queue.
finished_callback: function called when the queue finished. Returns
"""
while True:
msg = None
try:
msg = q.get(block=True)
except queue.Empty:
continue
if msg is None:
try:
finished_callback()
finally:
q.task_done()
return
if callback is None:
q.task_done()
else:
exception = True
try:
callback(msg)
exception = False
finally:
q.task_done()
if exception:
queue_to_callback(q, None, finished_callback)
if exception:
return
class CallbackCapturer(Generic[T]):
"""CallbackCapturer is a testing tool for a Requester.
This class is used to extract all data from a "callback" and
"finish_callback" interface, as commonly used within the requester.
"""
_queue: "queue.Queue[Optional[T]]"
def __init__(self) -> None:
"""Init CallbackCapturer."""
self._queue = queue.Queue()
def callback(self, msg: T) -> None:
"""Pass a message to the tested object as the "callback" function."""
self._queue.put(msg)
def callback_and_then_finish(self, msg: T) -> None:
"""Pass message to "callback" function and then finish."""
self.callback(msg)
self.finished_callback()
def finished_callback(self) -> None:
"""Mark the tested object as the "finished_callback" function."""
self._queue.put(None)
def callback_false(self, msg: T) -> bool:
"""Pass a message the tested object as the "callback" function.
Args:
msg: message to the callback.
Returns:
Returns False instead of None, which is required in some cases.
"""
self.callback(msg)
return False
def wait(self) -> List[T]:
"""Wait for the finished_callback() function to be called.
Returns:
all objects saved by callbacks.
"""
return extract_all_from_queue(self._queue)
class DoubleCallbackCapturer(Generic[T, U]):
"""DoubleCallbackCapturer is a testing tool for a Requester.
This class is used to extract all data from two callbacks (e.g. a "callback"
and "error_callback") and capture the data.
"""
# The actual type of _capturer is the type:
# "CallbackCapturer[Tuple[Optional[T], Optional[U]]]"
# Unfortunately, pytype does not support this data type currently, so we
# have to remove the arguments and add a type: ignore for mypy.
_capturer: "CallbackCapturer" # type: ignore
def __init__(self) -> None:
"""Init DoubleCallbackCapturer."""
self._capturer = CallbackCapturer()
def first_callback(self, msg: T) -> None:
"""Pass the first message to the tested object as the "callback" function."""
self._capturer.callback((msg, None))
def second_callback(self, msg: U) -> None:
"""Pass the second message to the tested object as the "callback" function."""
self._capturer.callback((None, msg))
def finished_callback(self) -> None:
"""Mark the tested object as the "finished_callback" function."""
self._capturer.finished_callback()
def first_callback_finish(self, msg: T) -> None:
"""Pass the first message to the tested object as the "callback" function."""
self.first_callback(msg)
self.finished_callback()
def second_callback_finish(self, msg: U) -> None:
"""Pass the second message to the tested object as the "callback" function."""
self.second_callback(msg)
self.finished_callback()
def wait(self) -> List[Tuple[Optional[T], Optional[U]]]:
"""Wait for the finished_callback() function to be called.
Returns:
all objects saved by callbacks.
"""
return self._capturer.wait()
class CallbackManager(Generic[T]):
"""Manages callbacks."""
_lock: threading.Lock
_callbacks: List[Tuple[Callable[[T], bool], Callable[[], None]]]
_closed: bool
def __init__(self) -> None:
"""Init the CallbackManager."""
self._lock = threading.Lock()
self._callbacks = []
self._closed = False
def add_callback(
self, callback: Callable[[T], bool],
finished_callback: Optional[Callable[[], None]]) -> Callable[[], None]:
"""Add a callback to the CallbackManager.
Args:
callback: A function to be called whenever there is a new message.
finished_callback: A function to be called when done.
Returns:
Cleanup function.
"""
if finished_callback is None:
tup = ((callback, lambda: None))
else:
tup = (callback, finished_callback)
with self._lock:
closed = self._closed
if not closed:
self._callbacks.append(tup)
if closed:
if finished_callback:
finished_callback()
return lambda: None
return lambda: self._remove_callback(tup)
def _remove_callback(
self, tup: Tuple[Callable[[T], bool], Callable[[], None]]) -> None:
"""Remove a callback.
Args:
tup: A tuple containing a callback and finish call back function.
"""
# TODO: Why not two arguments? pylint: disable=g-bad-todo
with self._lock:
if tup not in self._callbacks:
return
self._callbacks = [
callback for callback in self._callbacks if callback != tup
]
tup[1]()
def close(self) -> None:
"""Close a CallbackManager."""
with self._lock:
self._closed = True
cbs = self._callbacks.copy()
self._callbacks = []
for cb in cbs:
cb[1]()
def call(self, parameter: T) -> None:
"""Call the Callback manager with a single parameter."""
with self._lock:
cbs = self._callbacks.copy()
for cb in cbs:
if cb[0](parameter):
self._remove_callback(cb)
| [
"traceback.format_stack",
"threading.current_thread",
"threading.Lock",
"queue.Queue",
"time.time",
"typing.TypeVar"
] | [((751, 763), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (758, 763), False, 'from typing import Any, Callable, Dict, Generic, List, Optional, Set, Tuple, TypeVar\n'), ((768, 780), 'typing.TypeVar', 'TypeVar', (['"""U"""'], {}), "('U')\n", (775, 780), False, 'from typing import Any, Callable, Dict, Generic, List, Optional, Set, Tuple, TypeVar\n'), ((1071, 1087), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (1085, 1087), False, 'import threading\n'), ((7059, 7072), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (7070, 7072), False, 'import queue\n'), ((10001, 10017), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (10015, 10017), False, 'import threading\n'), ((4925, 4936), 'time.time', 'time.time', ([], {}), '()\n', (4934, 4936), False, 'import time\n'), ((3990, 4016), 'threading.current_thread', 'threading.current_thread', ([], {}), '()\n', (4014, 4016), False, 'import threading\n'), ((5017, 5028), 'time.time', 'time.time', ([], {}), '()\n', (5026, 5028), False, 'import time\n'), ((2053, 2077), 'traceback.format_stack', 'traceback.format_stack', ([], {}), '()\n', (2075, 2077), False, 'import traceback\n')] |
from discord import Client
from discord.ext import tasks
from configurationparser import ConfigurationParser
from message import Message
import asyncio, logging, random, time
logger = logging.getLogger(__name__)
HELP='''```
Usage:
&help - show this help page
&start [author_id] - start sending messages continously (from the specified user)
&stop - stop sending messages continously
&random [author_id] - send a random message once (from the specified user)
&show-config - show active configuration
```'''
class Dumas(Client):
def __init__(self, configuration):
super(Dumas, self).__init__()
self.authors = []
self.configuration = configuration
self.messages = {}
self.sending_messages = False
self.target_channel = None
async def on_ready(self):
logger.info('Dumas is ready.')
await self.read_message_history()
self.target_channel = self.get_channel(self.configuration.channel_target)
await self.send_message(self.configuration.message_welcome)
async def read_message_history(self):
start = time.time()
logger.info('Starting to read message history...')
for channel_id in self.configuration.channel_sources:
channel = self.get_channel(channel_id)
async for msg in channel.history(limit=self.configuration.message_limit):
message = Message(author_id = msg.author.id, content = msg.content)
if message.is_relevant(self.configuration):
if message.author_id not in self.authors:
self.authors.append(message.author_id)
self.messages[message.author_id] = []
self.messages[message.author_id].append(message)
logger.info('Finished reading message history!')
logger.info('Elapsed time: {} seconds'.format(time.time() - start))
async def send_message(self, message):
ZERO_WIDTH_PREFIX = '\u200B\u200D\uFEFF'
await self.target_channel.send(ZERO_WIDTH_PREFIX + str(message))
async def on_message(self, message):
logger.info('Someone sent a message.')
author_id = await self.get_author_id_from_message(message)
if author_id == self.configuration.bot_app:
await self.send_message(self.configuration.message_warning)
elif message.content.startswith('&help'):
await self.show_help()
elif message.content.startswith('&start'):
await self.send_messages_continously(author_id)
elif message.content.startswith('&stop'):
await self.stop_sending_messages_continously()
elif message.content.startswith('&random'):
await self.send_random_message(author_id)
elif message.content.startswith('&show-config'):
await self.show_configuration()
async def get_author_id_from_message(self, message):
if message.content.startswith('&'):
splitted_message = message.content.split(' ')
if len(splitted_message) == 2:
author_id = int(splitted_message[1])
return author_id
async def show_help(self):
logger.info('Showing help.')
await self.send_message(HELP)
async def send_messages_continously(self, author_id):
logger.info('Starting to send messages continously.')
self.sending_messages = True
while self.sending_messages:
await self.send_random_message(author_id)
await asyncio.sleep(self.configuration.message_frequency)
async def stop_sending_messages_continously(self):
logger.info('Stopping to send messages continously.')
self.sending_messages = False
async def send_random_message(self, author_id):
logger.info('Sending a random message.')
author_index = random.randint(0, len(self.authors) - 1)
if author_id != None:
author_index = self.authors.index(author_id)
message_index = random.randint(0, len(self.messages[self.authors[author_index]]) - 1)
await self.send_message(self.messages[self.authors[author_index]][message_index].content)
async def show_configuration(self):
logger.info('Showing configuration.')
await self.send_message('```json' + ConfigurationParser.get_json(self.configuration) + '```')
| [
"logging.getLogger",
"configurationparser.ConfigurationParser.get_json",
"message.Message",
"asyncio.sleep",
"time.time"
] | [((187, 214), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (204, 214), False, 'import asyncio, logging, random, time\n'), ((1096, 1107), 'time.time', 'time.time', ([], {}), '()\n', (1105, 1107), False, 'import asyncio, logging, random, time\n'), ((1364, 1417), 'message.Message', 'Message', ([], {'author_id': 'msg.author.id', 'content': 'msg.content'}), '(author_id=msg.author.id, content=msg.content)\n', (1371, 1417), False, 'from message import Message\n'), ((3282, 3333), 'asyncio.sleep', 'asyncio.sleep', (['self.configuration.message_frequency'], {}), '(self.configuration.message_frequency)\n', (3295, 3333), False, 'import asyncio, logging, random, time\n'), ((1789, 1800), 'time.time', 'time.time', ([], {}), '()\n', (1798, 1800), False, 'import asyncio, logging, random, time\n'), ((4018, 4066), 'configurationparser.ConfigurationParser.get_json', 'ConfigurationParser.get_json', (['self.configuration'], {}), '(self.configuration)\n', (4046, 4066), False, 'from configurationparser import ConfigurationParser\n')] |
from django.contrib import admin
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.contrib.contenttypes.models import ContentType
from django.forms import ModelMultipleChoiceField
from django.utils.html import format_html
from . import models
class CustomModelMCF(ModelMultipleChoiceField):
widget = FilteredSelectMultiple("Models", False)
def label_from_instance(self, obj):
return "{0} :: {1}".format(obj.app_label, obj)
class DatabasePurgerAdmin(admin.ModelAdmin):
list_filter = ('enabled',)
list_display = ('name',) + list_filter + ('_selected_tables', 'delete_by_age', 'delete_by_quantity', 'datetime_field', 'age_in_days', 'max_records')
search_fields = list_display
fieldsets = (
(None, { 'fields': ('name', 'enabled', 'tables')}),
('Criteria', { 'fields': ('delete_by_age', 'delete_by_quantity', 'datetime_field', 'age_in_days', 'max_records')}),
)
def _selected_tables(self, obj):
return format_html(obj.selected_tables)
def formfield_for_manytomany(self, db_field, request, **kwargs):
if db_field.name == 'tables':
return CustomModelMCF(ContentType.objects.all(), **kwargs)
return super().formfield_for_manytomany(db_field, request, **kwargs)
class FilePurgerAdmin(admin.ModelAdmin):
list_filter = ('enabled',)
list_display = ('name',) + list_filter + ('file_pattern', 'directory', 'recursive_search', 'delete_by_filename', 'filename_date_year_first', 'filename_date_day_first', 'delete_by_atime', 'delete_by_mtime', 'delete_by_ctime', 'age_in_days')
search_fields = list_display
fieldsets = (
(None, { 'fields': ('name', 'enabled', 'file_pattern', 'directory', 'recursive_search')}),
('Criteria', { 'fields': ('delete_by_filename', 'filename_date_year_first', 'filename_date_day_first', 'delete_by_atime', 'delete_by_mtime', 'delete_by_ctime', 'age_in_days')}),
)
admin.site.register(models.DatabasePurger, DatabasePurgerAdmin)
admin.site.register(models.FilePurger, FilePurgerAdmin)
| [
"django.contrib.admin.widgets.FilteredSelectMultiple",
"django.contrib.admin.site.register",
"django.contrib.contenttypes.models.ContentType.objects.all",
"django.utils.html.format_html"
] | [((1945, 2008), 'django.contrib.admin.site.register', 'admin.site.register', (['models.DatabasePurger', 'DatabasePurgerAdmin'], {}), '(models.DatabasePurger, DatabasePurgerAdmin)\n', (1964, 2008), False, 'from django.contrib import admin\n'), ((2009, 2064), 'django.contrib.admin.site.register', 'admin.site.register', (['models.FilePurger', 'FilePurgerAdmin'], {}), '(models.FilePurger, FilePurgerAdmin)\n', (2028, 2064), False, 'from django.contrib import admin\n'), ((333, 372), 'django.contrib.admin.widgets.FilteredSelectMultiple', 'FilteredSelectMultiple', (['"""Models"""', '(False)'], {}), "('Models', False)\n", (355, 372), False, 'from django.contrib.admin.widgets import FilteredSelectMultiple\n'), ((994, 1026), 'django.utils.html.format_html', 'format_html', (['obj.selected_tables'], {}), '(obj.selected_tables)\n', (1005, 1026), False, 'from django.utils.html import format_html\n'), ((1169, 1194), 'django.contrib.contenttypes.models.ContentType.objects.all', 'ContentType.objects.all', ([], {}), '()\n', (1192, 1194), False, 'from django.contrib.contenttypes.models import ContentType\n')] |
#!/usr/bin/env python3
"""Creates a standard tiff image from RLENGTH data sent by Brother scanners."""
import struct
import sys
def rle_decode(data):
"""Decodes PackBits encoded data."""
i = 0
output = bytearray()
while i < len(data):
val = data[i]
i += 1
if val == 0x80:
continue
if val > 0x80:
repeats = 0x101 - val
output += data[i:i + 1] * repeats
i += 1
else:
output += data[i:i + val + 1]
i += val + 1
return output
EXIF_TAGS = {
0x100: "ImageWidth",
0x101: "ImageLength",
0x102: "BitsPerSample",
0x103: "Compression",
0x106: "PhotometricInterpretation",
0x10A: "FillOrder",
0x10D: "DocumentName",
0x10E: "ImageDescription",
0x10F: "Make",
0x110: "Model",
0x111: "StripOffsets",
0x112: "Orientation",
0x115: "SamplesPerPixel",
0x116: "RowsPerStrip",
0x117: "StripByteCounts",
0x11A: "XResolution",
0x11B: "YResolution",
0x11C: "PlanarConfiguration",
0x128: "ResolutionUnit",
0x129: "PageNumber",
0x12D: "TransferFunction",
0x131: "Software",
0x132: "DateTime",
0x13B: "Artist",
0x13E: "WhitePoint",
0x13F: "PrimaryChromaticities",
0x156: "TransferRange",
0x200: "JPEGProc",
0x201: "JPEGInterchangeFormat",
0x202: "JPEGInterchangeFormatLength",
0x211: "YCbCrCoefficients",
0x212: "YCbCrSubSampling",
0x213: "YCbCrPositioning",
0x214: "ReferenceBlackWhite",
0x828F: "BatteryLevel",
0x8298: "Copyright",
0x829A: "ExposureTime",
0x829D: "FNumber",
0x83BB: "IPTC/NAA",
0x8769: "ExifIFDPointer",
0x8773: "InterColorProfile",
0x8822: "ExposureProgram",
0x8824: "SpectralSensitivity",
0x8825: "GPSInfoIFDPointer",
0x8827: "ISOSpeedRatings",
0x8828: "OECF",
0x9000: "ExifVersion",
0x9003: "DateTimeOriginal",
0x9004: "DateTimeDigitized",
0x9101: "ComponentsConfiguration",
0x9102: "CompressedBitsPerPixel",
0x9201: "ShutterSpeedValue",
0x9202: "ApertureValue",
0x9203: "BrightnessValue",
0x9204: "ExposureBiasValue",
0x9205: "MaxApertureValue",
0x9206: "SubjectDistance",
0x9207: "MeteringMode",
0x9208: "LightSource",
0x9209: "Flash",
0x920A: "FocalLength",
0x9214: "SubjectArea",
0x927C: "MakerNote",
0x9286: "UserComment",
0x9290: "SubSecTime",
0x9291: "SubSecTimeOriginal",
0x9292: "SubSecTimeDigitized",
0xA000: "FlashPixVersion",
0xA001: "ColorSpace",
0xA002: "PixelXDimension",
0xA003: "PixelYDimension",
0xA004: "RelatedSoundFile",
0xA005: "InteroperabilityIFDPointer",
0xA20B: "FlashEnergy", # 0x920B in TIFF/EP
0xA20C: "SpatialFrequencyResponse", # 0x920C - -
0xA20E: "FocalPlaneXResolution", # 0x920E - -
0xA20F: "FocalPlaneYResolution", # 0x920F - -
0xA210: "FocalPlaneResolutionUnit", # 0x9210 - -
0xA214: "SubjectLocation", # 0x9214 - -
0xA215: "ExposureIndex", # 0x9215 - -
0xA217: "SensingMethod", # 0x9217 - -
0xA300: "FileSource",
0xA301: "SceneType",
0xA302: "CFAPattern", # 0x828E in TIFF/EP
0xA401: "CustomRendered",
0xA402: "ExposureMode",
0xA403: "WhiteBalance",
0xA404: "DigitalZoomRatio",
0xA405: "FocalLengthIn35mmFilm",
0xA406: "SceneCaptureType",
0xA407: "GainControl",
0xA408: "Contrast",
0xA409: "Saturation",
0xA40A: "Sharpness",
0xA40B: "DeviceSettingDescription",
0xA40C: "SubjectDistanceRange",
0xA420: "ImageUniqueID",
}
TIFF_TAGS = {v: k for k, v in EXIF_TAGS.items()}
SHORT = 3
INT = 4
def tiff_tag(name, datatype, *values):
"""Packs a tiff_tag header"""
n = len(values)
dt = 'I' if n == 1 else 'H'
assert n <= 2, "this stub assumes that values fit into the for byte payload"
return struct.pack('<HHI%d%s' % (n, dt), TIFF_TAGS[name], datatype, len(values), *values)
class TiffWriter(object):
def __init__(self, outfile, pages):
self.outfile = outfile
outfile.write(b'II\x2a\x00') # TIFF header
self.pages = pages
self.page = 0
self.offset = 4
def addPage(self, data, width, xdpi, ydpi):
"""Writes the page and image file directory for the page."""
width = int(width) & ~7 # round down to multiples of 8
ifd = [
tiff_tag('ImageWidth', INT, width),
tiff_tag('ImageLength', INT, len(data) * 8 // width), # compute the height based on actual data
tiff_tag('BitsPerSample', SHORT, 1),
tiff_tag('Compression', SHORT, 1), # raw data, no compression
tiff_tag('PhotometricInterpretation', SHORT, 0), # 0 is white
tiff_tag('StripOffsets', INT, self.offset + 4), # image data right after tiff header
tiff_tag('SamplesPerPixel', SHORT, 1),
tiff_tag('RowsPerStrip', INT, len(data) * 8 // width),
tiff_tag('StripByteCounts', INT, len(data)),
tiff_tag('XResolution', SHORT, int(xdpi)),
tiff_tag('YResolution', SHORT, int(ydpi)),
tiff_tag('PageNumber', SHORT, i, len(filenames)),
]
offset_ifd = self.offset + 4 + len(data)
data = struct.pack('<I', offset_ifd) + data + struct.pack('<H', len(ifd)) + b''.join(ifd)
self.offset += len(data)
self.outfile.write(data)
self.page += 1
if __name__ == '__main__':
if len(sys.argv) < 5:
print('Usage: ./%s xdpi ydpi width filenames > out.tiff' % sys.argv[0], file=sys.stderr)
exit(1)
xdpi, ydpi, width = map(int, sys.argv[1:4])
filenames = sys.argv[4:]
tiff_writer = TiffWriter(sys.stdout.buffer, pages=len(filenames))
for i, filename in enumerate(filenames):
rle_data = open(filename, 'rb').read()
tiff_writer.addPage(rle_decode(rle_data), width, xdpi, ydpi)
| [
"struct.pack"
] | [((5280, 5309), 'struct.pack', 'struct.pack', (['"""<I"""', 'offset_ifd'], {}), "('<I', offset_ifd)\n", (5291, 5309), False, 'import struct\n')] |
import cherrypy
import urllib.parse, urllib.request
import math
from polyline import decodePolyline
"""
Dispatches OSRM routing requests to backend servers
depending on the requested start and end coordinates
This is a workaround because OSRM needs large amounts of
memory for preprocessing and running. This allows to
split it in parts and still offer worldwide routing
with the exception of routes across server boundaries.
"""
def null_island():
cherrypy.response.headers["Access-Control-Allow-Headers"] = "X-Requested-With, Content-Type"
cherrypy.response.headers["Access-Control-Allow-Methods"] = "GET"
cherrypy.response.headers["Access-Control-Allow-Origin"] = "*"
cherrypy.response.headers["Content-Disposition"]= "inline; filename=\"response.json\""
cherrypy.response.headers["Content-Type"] = "application/json; charset=UTF-8"
return ('{"message":'
+ '"Welcome to Null Island. At least one point you entered is 0,0 (Which is in '
+ 'the middle of the ocean. There is only a buoy known as Null Island) which '
+ 'means that this query is not meaningful. Because this is so common, we '
+ 'don\'t answer requests for 0,0 to preserve resources.",'
+ '"code":"InvalidOptions"}').encode("UTF8")
def tile2upper_left_coordinate(z,x,y):
s = float(2**z)
lon = float(x) / s * 360. - 180.
lat = math.atan(math.sinh(math.pi - float(y) / s * 2 * math.pi)) * 180 / math.pi
return (lon, lat)
def tile2coordinates(tilestring):
try:
x, y, z = (int(i) for i in tilestring.split(','))
except:
return None
return [tile2upper_left_coordinate(z, x, y),
tile2upper_left_coordinate(z, x + 1, y + 1)]
def url2coordinates(url):
try:
coords = url.split(';')
except:
return None
try:
coords = [c.split(',') for c in coords]
except:
return None
for c in coords:
if len(c) != 2:
return None
try:
coords = [(float(c[0]), float(c[1])) for c in coords]
except:
return None
return coords
class RequestByCoordinate(object):
def contains(self, poly, testpoint):
c = False
for i in range(len(poly)):
v1 = poly[i]
v2 = poly[i-1]
if (v1[1] > testpoint[1]) != (v2[1] > testpoint[1]):
if testpoint[0] < ((v2[0]-v1[0])
* (testpoint[1] - v1[1]) / (v2[1] - v1[1]) + v1[0]):
c = not c
return c
@cherrypy.expose
def default(self, *query, **kwargs):
if len(query) < 1:
raise cherrypy.HTTPError(404)
mode = query[0]
if mode not in cherrypy.request.app.config["modes"]["modes"]:
raise cherrypy.HTTPError(404)
filepart = query[-1]
if len(filepart) > 13 and filepart[:5] == "tile(":
#debug tile requet
coords = tile2coordinates(filepart[5:-5])
elif filepart[:9] == "polyline(":
#poly line encoded coordinates
coords = decodePolyline(filepart[9:-1])
if coords is not None and not all(map(lambda coord: coord[0] or coord[1], coords)):
return null_island()
else:
#semicolon delimited coordinate pairs (lon,lat;...)
coords = url2coordinates(filepart)
if coords is not None and not all(map(lambda coord: coord[0] or coord[1], coords)):
return null_island()
serverset = cherrypy.request.app.config[mode]["servers"]
servers = dict();
defaultserver = next(iter(serverset))
for server in serverset:
poly = cherrypy.request.app.config[mode]["polygon_" + server]
url = cherrypy.request.app.config[mode]["url_" + server]
if poly is None:
defaultserver = server
servers[server] = (url, poly)
useserver = defaultserver
if coords is not None:
inside = {k : 0 for k in servers}
for coord in coords:
nonefound = True
for server, sdata in list(servers.items()):
poly = sdata[1]
if poly is not None and self.contains(poly, coord):
inside[server] += 1
nonefound = False
break
if nonefound:
inside[defaultserver] += 1
useserver = max(inside, key=inside.get)
useserver = servers[useserver][0]
requesturl = useserver + '/' + '/'.join(query[1:])
if cherrypy.request.query_string:
requesturl += '?' + cherrypy.request.query_string
try:
response = urllib.request.urlopen(requesturl)
except urllib.error.HTTPError as e:
cherrypy.response.status = e.code
response = e
except:
raise cherrypy.HTTPError(500, "Routing backend is not reachable")
fetchedheaders = response.info()
for i in list(cherrypy.response.headers.keys()):
if i not in fetchedheaders:
del cherrypy.response.headers[i]
for i in fetchedheaders:
cherrypy.response.headers[i] = fetchedheaders[i]
return response
if __name__ == '__main__':
cherrypy.quickstart(RequestByCoordinate(), "/", "settings.cfg")
else:
# Setup WSGI stuff
if cherrypy.__version__.startswith('3.0') and cherrypy.engine.state == 0:
cherrypy.engine.start(blocking=False)
atexit.register(cherrypy.engine.stop)
cherrypy.config.update('settings.cfg')
application = cherrypy.Application(RequestByCoordinate(), script_name=None,
config='settings.cfg')
| [
"cherrypy.config.update",
"cherrypy.response.headers.keys",
"polyline.decodePolyline",
"cherrypy.engine.start",
"cherrypy.__version__.startswith",
"cherrypy.HTTPError"
] | [((5592, 5630), 'cherrypy.config.update', 'cherrypy.config.update', (['"""settings.cfg"""'], {}), "('settings.cfg')\n", (5614, 5630), False, 'import cherrypy\n'), ((5424, 5462), 'cherrypy.__version__.startswith', 'cherrypy.__version__.startswith', (['"""3.0"""'], {}), "('3.0')\n", (5455, 5462), False, 'import cherrypy\n'), ((5503, 5540), 'cherrypy.engine.start', 'cherrypy.engine.start', ([], {'blocking': '(False)'}), '(blocking=False)\n', (5524, 5540), False, 'import cherrypy\n'), ((2621, 2644), 'cherrypy.HTTPError', 'cherrypy.HTTPError', (['(404)'], {}), '(404)\n', (2639, 2644), False, 'import cherrypy\n'), ((2757, 2780), 'cherrypy.HTTPError', 'cherrypy.HTTPError', (['(404)'], {}), '(404)\n', (2775, 2780), False, 'import cherrypy\n'), ((5047, 5079), 'cherrypy.response.headers.keys', 'cherrypy.response.headers.keys', ([], {}), '()\n', (5077, 5079), False, 'import cherrypy\n'), ((3062, 3092), 'polyline.decodePolyline', 'decodePolyline', (['filepart[9:-1]'], {}), '(filepart[9:-1])\n', (3076, 3092), False, 'from polyline import decodePolyline\n'), ((4924, 4983), 'cherrypy.HTTPError', 'cherrypy.HTTPError', (['(500)', '"""Routing backend is not reachable"""'], {}), "(500, 'Routing backend is not reachable')\n", (4942, 4983), False, 'import cherrypy\n')] |
"""scrapli.transport.plugins.asyncssh.transport"""
import asyncio
from dataclasses import dataclass
from typing import Optional
from asyncssh import connect
from asyncssh.connection import SSHClientConnection
from asyncssh.misc import ConnectionLost, PermissionDenied
from asyncssh.stream import SSHReader, SSHWriter
from scrapli.decorators import TransportTimeout
from scrapli.exceptions import (
ScrapliAuthenticationFailed,
ScrapliConnectionError,
ScrapliConnectionNotOpened,
)
from scrapli.ssh_config import SSHKnownHosts
from scrapli.transport.base import AsyncTransport, BasePluginTransportArgs, BaseTransportArgs
@dataclass()
class PluginTransportArgs(BasePluginTransportArgs):
auth_username: str
auth_password: str = ""
auth_private_key: str = ""
auth_strict_key: bool = True
ssh_config_file: str = ""
ssh_known_hosts_file: str = ""
class AsyncsshTransport(AsyncTransport):
def __init__(
self, base_transport_args: BaseTransportArgs, plugin_transport_args: PluginTransportArgs
) -> None:
super().__init__(base_transport_args=base_transport_args)
self.plugin_transport_args = plugin_transport_args
self.session: Optional[SSHClientConnection] = None
self.stdout: Optional[SSHReader] = None
self.stdin: Optional[SSHWriter] = None
def _verify_key(self) -> None:
"""
Verify target host public key, raise exception if invalid/unknown
Args:
N/A
Returns:
None
Raises:
ScrapliAuthenticationFailed: if host is not in known hosts
"""
known_hosts = SSHKnownHosts(self.plugin_transport_args.ssh_known_hosts_file)
if self._base_transport_args.host not in known_hosts.hosts.keys():
raise ScrapliAuthenticationFailed(
f"{self._base_transport_args.host} not in known_hosts!"
)
def _verify_key_value(self) -> None:
"""
Verify target host public key, raise exception if invalid/unknown
Args:
N/A
Returns:
None
Raises:
ScrapliConnectionNotOpened: if session is unopened/None
ScrapliAuthenticationFailed: if host is in known hosts but public key does not match
"""
if not self.session:
raise ScrapliConnectionNotOpened
known_hosts = SSHKnownHosts(self.plugin_transport_args.ssh_known_hosts_file)
remote_server_key = self.session.get_server_host_key()
remote_public_key = remote_server_key.export_public_key().split()[1].decode()
if known_hosts.hosts[self._base_transport_args.host]["public_key"] != remote_public_key:
raise ScrapliAuthenticationFailed(
f"{self._base_transport_args.host} in known_hosts but public key does not match!"
)
async def open(self) -> None:
self._pre_open_closing_log(closing=False)
if self.plugin_transport_args.auth_strict_key:
self.logger.debug(
f"Attempting to validate {self._base_transport_args.host} public key is in known "
f"hosts"
)
self._verify_key()
# we already fetched host/port/user from the user input and/or the ssh config file, so we
# want to use those explicitly. likewise we pass config file we already found. set known
# hosts and agent to None so we can not have an agent and deal w/ known hosts ourselves
common_args = {
"host": self._base_transport_args.host,
"port": self._base_transport_args.port,
"username": self.plugin_transport_args.auth_username,
"known_hosts": None,
"agent_path": None,
"config": self.plugin_transport_args.ssh_config_file,
}
try:
self.session = await asyncio.wait_for(
connect(
client_keys=self.plugin_transport_args.auth_private_key,
password=self.plugin_transport_args.auth_password,
preferred_auth=(
"publickey",
"keyboard-interactive",
"password",
),
**common_args,
),
timeout=self._base_transport_args.timeout_socket,
)
except PermissionDenied as exc:
msg = "all authentication methods failed"
self.logger.critical(msg)
raise ScrapliAuthenticationFailed(msg) from exc
except asyncio.TimeoutError as exc:
msg = "timed out opening connection to device"
self.logger.critical(msg)
raise ScrapliAuthenticationFailed(msg) from exc
if not self.session:
raise ScrapliConnectionNotOpened
if self.plugin_transport_args.auth_strict_key:
self.logger.debug(
f"Attempting to validate {self._base_transport_args.host} public key is in known "
f"hosts and is valid"
)
self._verify_key_value()
self.stdin, self.stdout, _ = await self.session.open_session(
term_type="xterm", encoding=None
)
self._post_open_closing_log(closing=False)
def close(self) -> None:
self._pre_open_closing_log(closing=True)
if self.session:
try:
self.session.close()
except BrokenPipeError:
# it seems it is possible for the connection transport is_closing() to be true
# already in some cases... since we are closing the connection anyway we will just
# ignore this note that this seemed to only happen in github actions on
# ubuntu-latest w/ py3.8...
pass
# always reset session/stdin/stdout back to None if we are closing!
self.session = None
self.stdin = None
self.stdout = None
self._post_open_closing_log(closing=True)
def isalive(self) -> bool:
if not self.session:
return False
# this may need to be revisited in the future, but this seems to be a good check for
# aliveness
try:
if (
self.session._auth_complete # pylint: disable=W0212
and self.session._transport.is_closing() is False # pylint: disable=W0212
):
return True
except AttributeError:
pass
return False
@TransportTimeout("timed out reading from transport")
async def read(self) -> bytes:
if not self.stdout:
raise ScrapliConnectionNotOpened
try:
buf: bytes = await self.stdout.read(65535)
except ConnectionLost as exc:
msg = (
"encountered EOF reading from transport; typically means the device closed the "
"connection"
)
self.logger.critical(msg)
raise ScrapliConnectionError(msg) from exc
return buf
def write(self, channel_input: bytes) -> None:
if not self.stdin:
raise ScrapliConnectionNotOpened
self.stdin.write(channel_input)
| [
"scrapli.exceptions.ScrapliConnectionError",
"scrapli.decorators.TransportTimeout",
"asyncssh.connect",
"scrapli.exceptions.ScrapliAuthenticationFailed",
"scrapli.ssh_config.SSHKnownHosts",
"dataclasses.dataclass"
] | [((637, 648), 'dataclasses.dataclass', 'dataclass', ([], {}), '()\n', (646, 648), False, 'from dataclasses import dataclass\n'), ((6577, 6629), 'scrapli.decorators.TransportTimeout', 'TransportTimeout', (['"""timed out reading from transport"""'], {}), "('timed out reading from transport')\n", (6593, 6629), False, 'from scrapli.decorators import TransportTimeout\n'), ((1645, 1707), 'scrapli.ssh_config.SSHKnownHosts', 'SSHKnownHosts', (['self.plugin_transport_args.ssh_known_hosts_file'], {}), '(self.plugin_transport_args.ssh_known_hosts_file)\n', (1658, 1707), False, 'from scrapli.ssh_config import SSHKnownHosts\n'), ((2403, 2465), 'scrapli.ssh_config.SSHKnownHosts', 'SSHKnownHosts', (['self.plugin_transport_args.ssh_known_hosts_file'], {}), '(self.plugin_transport_args.ssh_known_hosts_file)\n', (2416, 2465), False, 'from scrapli.ssh_config import SSHKnownHosts\n'), ((1802, 1891), 'scrapli.exceptions.ScrapliAuthenticationFailed', 'ScrapliAuthenticationFailed', (['f"""{self._base_transport_args.host} not in known_hosts!"""'], {}), "(\n f'{self._base_transport_args.host} not in known_hosts!')\n", (1829, 1891), False, 'from scrapli.exceptions import ScrapliAuthenticationFailed, ScrapliConnectionError, ScrapliConnectionNotOpened\n'), ((2732, 2852), 'scrapli.exceptions.ScrapliAuthenticationFailed', 'ScrapliAuthenticationFailed', (['f"""{self._base_transport_args.host} in known_hosts but public key does not match!"""'], {}), "(\n f'{self._base_transport_args.host} in known_hosts but public key does not match!'\n )\n", (2759, 2852), False, 'from scrapli.exceptions import ScrapliAuthenticationFailed, ScrapliConnectionError, ScrapliConnectionNotOpened\n'), ((4544, 4576), 'scrapli.exceptions.ScrapliAuthenticationFailed', 'ScrapliAuthenticationFailed', (['msg'], {}), '(msg)\n', (4571, 4576), False, 'from scrapli.exceptions import ScrapliAuthenticationFailed, ScrapliConnectionError, ScrapliConnectionNotOpened\n'), ((4745, 4777), 'scrapli.exceptions.ScrapliAuthenticationFailed', 'ScrapliAuthenticationFailed', (['msg'], {}), '(msg)\n', (4772, 4777), False, 'from scrapli.exceptions import ScrapliAuthenticationFailed, ScrapliConnectionError, ScrapliConnectionNotOpened\n'), ((7061, 7088), 'scrapli.exceptions.ScrapliConnectionError', 'ScrapliConnectionError', (['msg'], {}), '(msg)\n', (7083, 7088), False, 'from scrapli.exceptions import ScrapliAuthenticationFailed, ScrapliConnectionError, ScrapliConnectionNotOpened\n'), ((3922, 4127), 'asyncssh.connect', 'connect', ([], {'client_keys': 'self.plugin_transport_args.auth_private_key', 'password': 'self.plugin_transport_args.auth_password', 'preferred_auth': "('publickey', 'keyboard-interactive', 'password')"}), "(client_keys=self.plugin_transport_args.auth_private_key, password=\n self.plugin_transport_args.auth_password, preferred_auth=('publickey',\n 'keyboard-interactive', 'password'), **common_args)\n", (3929, 4127), False, 'from asyncssh import connect\n')] |
import utils.config as config
from utils.words import prensetage_of_real_words_in_list
from utils.random import random_subtitution_string
from utils.search import is_known_part_in_text
from string import ascii_lowercase
from tqdm import tqdm
def get_array_key_from_string(string):
return { ascii_lowercase[i]: string[i] for i in range(len(string)) }
def get_letters_diffs(word):
if len(word) == 1:
return [0]
return [ord(word[i + 1]) - ord(word[i]) for i in range(len(word)-1)]
def brute_force_for_known_part(cipher_text, known_part):
for i in tqdm(range(config.MAX_SUBTITUTION_RETRIES)):
curr_array_key = random_subtitution_string()
decrypted = decrypt(cipher_text, curr_array_key, False)
if known_part in decrypted[0]:
return decrypted
curr_array_key = random_subtitution_string()
return decrypt(cipher_text, curr_array_key, False)
def super_bruteforce(cipher_text, known_part=None):
for i in tqdm(range(config.MAX_SUBTITUTION_RETRIES)):
curr_array_key = random_subtitution_string()
decrypted = decrypt(cipher_text, curr_array_key, False)
plain_text = decrypted[0]
required_presentage_of_real_words = config.REQUIRED_PRESENTAGE_OF_REAL_WORDS_FOR_SENTENSE_TO_BE_REAL
# in case of finding the known part, the presentage should be smaller
if known_part and is_known_part_in_text(known_part, plain_text):
required_presentage_of_real_words /= 5
if prensetage_of_real_words_in_list(plain_text.split(' ')) >= required_presentage_of_real_words:
decrypted[2] = True
return decrypted
curr_array_key = random_subtitution_string()
return decrypt(cipher_text, curr_array_key, False)
def decrypt(cipher_text, key, special_bruteforce_mode, known_part=None):
array_key = get_array_key_from_string(key)
if special_bruteforce_mode:
return super_bruteforce(cipher_text, known_part)
if known_part:
return brute_force_for_known_part(cipher_text, known_part)
return [''.join([ array_key[x] if x in array_key else x for x in cipher_text ]), array_key, False] | [
"utils.search.is_known_part_in_text",
"utils.random.random_subtitution_string"
] | [((825, 852), 'utils.random.random_subtitution_string', 'random_subtitution_string', ([], {}), '()\n', (850, 852), False, 'from utils.random import random_subtitution_string\n'), ((1671, 1698), 'utils.random.random_subtitution_string', 'random_subtitution_string', ([], {}), '()\n', (1696, 1698), False, 'from utils.random import random_subtitution_string\n'), ((643, 670), 'utils.random.random_subtitution_string', 'random_subtitution_string', ([], {}), '()\n', (668, 670), False, 'from utils.random import random_subtitution_string\n'), ((1044, 1071), 'utils.random.random_subtitution_string', 'random_subtitution_string', ([], {}), '()\n', (1069, 1071), False, 'from utils.random import random_subtitution_string\n'), ((1384, 1429), 'utils.search.is_known_part_in_text', 'is_known_part_in_text', (['known_part', 'plain_text'], {}), '(known_part, plain_text)\n', (1405, 1429), False, 'from utils.search import is_known_part_in_text\n')] |
from sqlalchemy.ext.declarative import as_declarative, declared_attr
from pastetape.db.session import engine
@as_declarative()
class Base:
__name__: str
@declared_attr
def __tablename__(cls) -> str:
return cls.__name__.lower()
def initialize_db() -> None:
"""
Initializes the database tables.
Using Alembic package for migrations is preferred,
however for simplicity we are using this approach.
"""
from pastetape.models.paste import Paste # noqa
Base.metadata.create_all(engine, Base.metadata.tables.values(), checkfirst=True) # type: ignore
| [
"sqlalchemy.ext.declarative.as_declarative"
] | [((113, 129), 'sqlalchemy.ext.declarative.as_declarative', 'as_declarative', ([], {}), '()\n', (127, 129), False, 'from sqlalchemy.ext.declarative import as_declarative, declared_attr\n')] |
#!/usr/bin/python
# author: qsh
from django.urls import path, re_path
from . import views_old,views1,views
from . import views,user,roles
app_name = 'users'
urlpatterns = [
# http://ip:8000/
path("", views.IndexView.as_view(), name='index'),
# http://ip:8000/login/
path("login/", views.LoginView.as_view(), name='login'),
path("logout/", views.LogoutView.as_view(), name='logout'),
path("list/", user.userlist, name='user_list'),
path('userlist/', user.UserListView.as_view(), name = 'user_list'),
path('grouplist/',roles.GroupListView.as_view(), name='group_list'),
path('powerlist/', roles.PowerListView.as_view(), name='power_list'),
# <pk> 既英语单词主键 <Primary key> --> 搜索索引(userid)
re_path('userdetail/(?P<pk>[0-9]+)?/$', user.UserDetailView.as_view(), name='user_detail'),
path('modifypasswd/', user.ModifyPwdView.as_view(), name='modify_pwd'),
re_path('usergrouppower/(?P<pk>[0-9]+)?/$', user.UserGroupPowerView.as_view(), name='user_group_power'),
# http://ip:8000/logout/
#path("logout/", views2.LogoutView.as_view(), name='logout'),
]
| [
"django.urls.path"
] | [((409, 455), 'django.urls.path', 'path', (['"""list/"""', 'user.userlist'], {'name': '"""user_list"""'}), "('list/', user.userlist, name='user_list')\n", (413, 455), False, 'from django.urls import path, re_path\n')] |
from generativepy.drawing import makeImage
from generativepy.color import Color
def draw(ctx, width, height, frame_no, frame_count):
ctx.set_source_rgba(*Color(1).get_rgba())
ctx.paint()
for i in range(200):
for j in range(200):
ctx.set_source_rgba(*Color(i/200, j/200, 0).get_rgba())
ctx.rectangle(i + 50, j + 50, 1, 1)
ctx.fill()
for i in range(200):
for j in range(200):
ctx.set_source_rgba(*Color(i/200, 0, j/200).get_rgba())
ctx.rectangle(i + 50, j + 300, 1, 1)
ctx.fill()
for i in range(200):
for j in range(200):
ctx.set_source_rgba(*Color(0, i/200, j/200).get_rgba())
ctx.rectangle(i + 50, j + 550, 1, 1)
ctx.fill()
makeImage("/tmp/rgbcolor.png", draw, 300, 800)
| [
"generativepy.color.Color",
"generativepy.drawing.makeImage"
] | [((783, 829), 'generativepy.drawing.makeImage', 'makeImage', (['"""/tmp/rgbcolor.png"""', 'draw', '(300)', '(800)'], {}), "('/tmp/rgbcolor.png', draw, 300, 800)\n", (792, 829), False, 'from generativepy.drawing import makeImage\n'), ((160, 168), 'generativepy.color.Color', 'Color', (['(1)'], {}), '(1)\n', (165, 168), False, 'from generativepy.color import Color\n'), ((285, 311), 'generativepy.color.Color', 'Color', (['(i / 200)', '(j / 200)', '(0)'], {}), '(i / 200, j / 200, 0)\n', (290, 311), False, 'from generativepy.color import Color\n'), ((479, 505), 'generativepy.color.Color', 'Color', (['(i / 200)', '(0)', '(j / 200)'], {}), '(i / 200, 0, j / 200)\n', (484, 505), False, 'from generativepy.color import Color\n'), ((674, 700), 'generativepy.color.Color', 'Color', (['(0)', '(i / 200)', '(j / 200)'], {}), '(0, i / 200, j / 200)\n', (679, 700), False, 'from generativepy.color import Color\n')] |
import appdaemon.plugins.hass.hassapi as hass
import datetime
import pytz
import requests
import json
class SyncGTasksAndGrocy(hass.Hass):
tl_name = 'Corvées'
tl_id = None
tl_main = None
debug = False
gr_cl = None
service = None
tl_lastup = None
google_oauth_tasks = None
grocyapi = None
def initialize(self):
if 'DEBUG' in self.args:
self.debug = self.args["DEBUG"]
self.tl_name = self.args['chores_list']
self.google_oauth_tasks = self.get_app("google_oauth_tasks")
self.grocyapi = self.get_app("grocy_api")
self.handle = self.run_every(self.sync_cb , datetime.datetime.now() , 300)
def terminate(self):
tl_id = None
tl_main = None
gr_cl = None
service = None
tl_lastup = None
google_oauth_tasks = None
grocyapi = None
def init_tasklist(self):
self.google_oauth_tasks.build_service()
# Call the Tasks API
all_tl = self.google_oauth_tasks.service.tasklists().list().execute()
if not all_tl['items']:
self.log('No task lists found.' , level = 'ERROR')
return
else:
if self.debug:
self.log('Task lists:')
for task_list in all_tl['items']:
if self.debug:
self.log(u'{0} ({1})'.format(task_list['title'], task_list['id']))
if task_list['title'] == self.tl_name:
self.tl_id = task_list['id']
if self.debug:
self.log('Task list found, name : {} , id : {}' .format(task_list['title'], self.tl_id))
break
else:
self.log("No list found")
def get_tasklist(self):
tl = self.google_oauth_tasks.service.tasklists().get(tasklist=self.tl_id).execute()
if self.debug:
self.log(tl)
self.tl_lastup = self.rfc3339_to_utc(tl['updated'])
tasklist = self.google_oauth_tasks.service.tasks().list(tasklist=self.tl_id, showCompleted= True, showHidden= True).execute()
if self.debug:
self.log(tasklist)
return tasklist
def sync(self):
self.init_tasklist()
chores_list = self.grocyapi.get_chores()
task_list = self.get_tasklist()
for chore in chores_list:
has_lastc = False
track_date_only = False
if chore['next_estimated_execution_time'] is None:
if self.debug:
self.log("No next date, skipping this chore : " + chore['chore_id'])
continue
chore_due_d = pytz.utc.localize(datetime.datetime.strptime(chore['next_estimated_execution_time'] , '%Y-%m-%d %H:%M:%S' )).date()
if chore['track_date_only'] == "1":
track_date_only = True
c = self.grocyapi.get_chore(chore['chore_id'])
if self.debug:
self.log( 'Chore due date : {}' .format(chore_due_d))
self.log( 'Gtasks list last update : {}' .format(self.tl_lastup))
self.log(c)
if chore['last_tracked_time'] is not None:
chore_lastc_d = pytz.utc.localize(datetime.datetime.strptime(chore['last_tracked_time'] , '%Y-%m-%d %H:%M:%S' ))
has_lastc = True
if self.debug:
self.log('Chore last done : {}' .format(chore_lastc_d))
if track_date_only:
chore_lastc_d = chore_lastc_d.date()
if 'items' not in task_list:
if self.debug:
self.log("List empty, add it to gtasks")
self.add_task(c['chore']['name'],chore_due_d)
continue
for task in task_list['items']:
if self.debug:
self.log(task)
task_due_d = self.rfc3339_to_utc(task['due']).date()
if self.debug:
self.log(task['title'] + " " + c['chore']['name'])
self.log('Task date : {} ' .format(task_due_d))
if task['title'] == c['chore']['name'] and task_due_d == chore_due_d:
if self.debug:
self.log("Task found with status : " + task['status'] )
if task['status'] == "completed":
if self.debug:
self.log(task['title'] + "is completed")
task_c_d = self.rfc3339_to_utc(task['completed'])
if has_lastc:
if self.debug:
self.log(task_c_d)
self.log(task_c_d.date())
if track_date_only:
if task_c_d.date() < chore_lastc_d:
if self.debug:
self.log("Waiting for gtask clear")
break
else:
if task_c_d < chore_lastc_d:
if self.debug:
self.log("Waiting for gtask clear")
break
else:
if self.debug:
self.log("First completion : ")
self.grocyapi.complete_chore(chore['chore_id'],task_c_d)
self.google_oauth_tasks.service.tasks().delete(tasklist=self.tl_id , task=task['id']).execute()
break
else:
if self.debug:
self.log("Task " + task['title'] + " is waiting for action")
break
else:
if self.debug:
self.log("No task found, add it to gtasks")
self.add_task(c['chore']['name'],chore_due_d)
continue
def sync_cb(self , kwargs):
self.sync()
def d_to_rstr(self, date):
if self.debug:
self.log("___function___")
self.log(date)
str = date.strftime('%Y-%m-%dT%H:%M:%S.0000Z')
return str
def rfc3339_to_utc(self, date_rfc3339):
if self.debug:
self.log("___function___")
self.log(date_rfc3339)
date = pytz.utc.localize(datetime.datetime.strptime(date_rfc3339 , '%Y-%m-%dT%H:%M:%S.%fZ' ))
return date
def add_task(self, name, due_date):
str_date = self.d_to_rstr(due_date)
task = {
'title': name ,
'due': str_date
}
if self.debug:
self.log(task)
return self.google_oauth_tasks.service.tasks().insert(tasklist=self.tl_id , body=task).execute() | [
"datetime.datetime.strptime",
"datetime.datetime.now"
] | [((661, 684), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (682, 684), False, 'import datetime\n'), ((6579, 6644), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['date_rfc3339', '"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {}), "(date_rfc3339, '%Y-%m-%dT%H:%M:%S.%fZ')\n", (6605, 6644), False, 'import datetime\n'), ((3322, 3397), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["chore['last_tracked_time']", '"""%Y-%m-%d %H:%M:%S"""'], {}), "(chore['last_tracked_time'], '%Y-%m-%d %H:%M:%S')\n", (3348, 3397), False, 'import datetime\n'), ((2753, 2844), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["chore['next_estimated_execution_time']", '"""%Y-%m-%d %H:%M:%S"""'], {}), "(chore['next_estimated_execution_time'],\n '%Y-%m-%d %H:%M:%S')\n", (2779, 2844), False, 'import datetime\n')] |
#!/usr/bin/env python
usage = """Code to filter traces in the directory given a frequency band and file wildcard.
[-fl][-fh][-f][-t] where:
-fl = miniumum frequency value (e.g. 0.05)
-fh = maxiumum frequency value (e.g. 1.0)
-f = filename wildcard (e.g. '*SAC')
-t = type of filtering (e.g. bandpass)
"""
import obspy
import numpy as np
import argparse
# get the arguments from the terminal
parser = argparse.ArgumentParser(description='Preprocessing script for data retrieved from obspy DMT')
parser.add_argument("-f","--file_wildcard", help="Enter the file to be normalised (e.g. *BHR*)", type=str, required=True, action="store", default = "*SAC")
parser.add_argument("-fl","--lower_frequency", help="Enter the lower frequency you want the analysis to be conducted over", type=float, required=True, action="store", default = "0.1")
parser.add_argument("-fh","--upper_frequency", help="Enter the upper frequency you want the analysis to be conducted over", type=float, required=True, action="store", default = "0.4")
parser.add_argument("-t","--filter_type", help="Enter the type of filtering you want to do", type=str, required=True, action="store", default = "bandpass")
args = parser.parse_args()
file_names = args.file_wildcard
flow=args.lower_frequency
fhigh=args.upper_frequency
type=args.filter_type
st = obspy.read(file_names)
# filter the stream
st_filtered = st.filter(type, freqmin=flow, freqmax=fhigh)
for i,tr in enumerate(st_filtered):
# get information about the trace and rename it
network=tr.stats.network
station=tr.stats.station
tr.write("%s_%s_filtered.SAC" %(network,station), format="SAC")
| [
"obspy.read",
"argparse.ArgumentParser"
] | [((405, 503), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Preprocessing script for data retrieved from obspy DMT"""'}), "(description=\n 'Preprocessing script for data retrieved from obspy DMT')\n", (428, 503), False, 'import argparse\n'), ((1326, 1348), 'obspy.read', 'obspy.read', (['file_names'], {}), '(file_names)\n', (1336, 1348), False, 'import obspy\n')] |
#! /usr/bin/env python
"""
InstrumentData Class -- defines data format, wavelength info, mask geometry
Instruments/masks supported:
NIRISS AMI
GPI, VISIR, NIRC2 removed - too much changed for the JWST NIRISS class
"""
# Standard Imports
import numpy as np
from astropy.io import fits
import os, sys, time
import copy
# Module imports
import synphot
# import stsynphot
# mask geometries, GPI, NIRISS, VISIR supported...
from nrm_analysis.misctools.mask_definitions import NRM_mask_definitions
from nrm_analysis.misctools import utils
from nrm_analysis.misctools import lpl_ianc
um = 1.0e-6
# utility routines for InstrumentData classes
def show_cvsupport_threshold(instr):
""" Show threshold for where 'splodge' data in CV space contains signal """
print("InstrumentData: ", "cvsupport_threshold is: ", instr.cvsupport_threshold)
print("InstrumentData: ", instr.cvsupport_threshold)
def set_cvsupport_threshold(instr, k, v):
""" Set threshold for where 'splodge' data in CV space contains signal
Parameters
----------
instr: InstrumentData instance
thresh: Threshold for the absolute value of the FT(interferogram).
Normalize abs(CV = FT(a)) for unity peak, and define the support
of "good" CV when this is above threshold
"""
instr.cvsupport_threshold[k] = v
print("InstrumentData: ", "New cvsupport_threshold is: ", instr.cvsupport_threshold)
class NIRISS:
def __init__(self, filt,
objname="obj",
src='A0V',
chooseholes=None,
affine2d=None,
bandpass=None,
nbadpix=4,
usebp=True,
firstfew=None,
nspecbin=None,
**kwargs):
"""
Initialize NIRISS class
ARGUMENTS:
kwargs:
UTR
Or just look at the file structure
Either user has webbpsf and filter file can be read, or...
chooseholes: None, or e.g. ['B2', 'B4', 'B5', 'B6'] for a four-hole mask
filt: Filter name string like "F480M"
bandpass: None or [(wt,wlen),(wt,wlen),...]. Monochromatic would be e.g. [(1.0, 4.3e-6)]
Explicit bandpass arg will replace *all* niriss filter-specific variables with
the given bandpass (src, nspecbin, filt), so you can simulate 21cm psfs through
something called "F430M". Can also be synphot.spectrum.SourceSpectrum object.
firstfew: None or the number of slices to truncate input cube to in memory,
the latter for fast developmpent
nbadpix: Number of good pixels to use when fixing bad pixels DEPRECATED
usebp: Convert to usedq during initialization
Internally this is changed to sellf.usedq = usebp immediately for code clarity
True (default) do not use DQ with DO_NOT_USE flag in input MAST data when
fitting data with model. False: Assume no bad pixels in input
noise: standard deviation of noise added to perfect images to enable candid
plots without crashing on np.inf limits! Image assumed to be in (np.float64) dn.
Suggested noise: 1e-6.
src: source spectral type string e.g. "A0V" OR user-defined synphot.spectrum.SourceSpectrum object
nspecbin: Number of wavelength bins to use across the bandpass. Replaces deprecated `usespecbin` which
set **number of wavelengths to into each bin**, not nbins.
"""
self.verbose = False
if "verbose" in kwargs:
self.verbose = kwargs["verbose"]
self.noise = None
if "noise" in kwargs:
self.noise = kwargs["noise"]
if "usespecbin" in kwargs: # compatability with previous arg
# but not really, usespecbin was binning factor, not number of bins
nspecbin = kwargs["usespecbin"]
# change how many wavelength bins will be used across the bandpass
if nspecbin is None:
nspecbin = 19
self.lam_bin = nspecbin
# src can be either a spectral type string or a user-defined synphot spectrum object
if isinstance(src, synphot.spectrum.SourceSpectrum):
print("Using user-defined synphot SourceSpectrum")
if chooseholes:
print("InstrumentData.NIRISS: ", chooseholes)
self.chooseholes = chooseholes
# USEBP is USEDQ in the rest of code - use
self.usedq = usebp
print("Fitting omits bad pixels (identified by DO_NOT_USE value in the DQ extension)")
self.jwst_dqflags() # creates dicts self.bpval, self.bpgroup
# self.bpexist set True/False if DQ fits image extension exists/doesn't
self.firstfew = firstfew
if firstfew is not None: print("InstrumentData.NIRISS: analysing firstfew={:d} slices".format(firstfew))
self.objname = objname
self.filt = filt
if bandpass is not None:
print("InstrumentData.NIRISS: OVERRIDING BANDPASS WITH USER-SUPPLIED VALUES.")
print("\t src, filt, nspecbin parameters will not be used")
# check type of bandpass. can be synphot spectrum
# if so, get throughput and wavelength arrays
if isinstance(bandpass, synphot.spectrum.SpectralElement):
wl, wt = bandpass._get_arrays(bandpass.waveset)
self.throughput = np.array((wt,wl)).T
else:
self.throughput = np.array(bandpass) # type simplification
else:
filt_spec = utils.get_filt_spec(self.filt)
src_spec = utils.get_src_spec(src)
# **NOTE**: As of WebbPSF version 1.0.0 filter is trimmed to where throughput is 10% of peak
# For consistency with WebbPSF simultions, use trim=0.1
self.throughput = utils.combine_src_filt(filt_spec,
src_spec,
trim=0.01,
nlambda=nspecbin,
verbose=self.verbose,
plot=False)
self.lam_c, self.lam_w = utils.get_cw_beta(self.throughput)
if self.verbose: print("InstrumentData.NIRISS: ", self.filt,
": central wavelength {:.4e} microns, ".format(self.lam_c/um), end="")
if self.verbose: print("InstrumentData.NIRISS: ", "fractional bandpass {:.3f}".format(self.lam_w))
self.wls = [self.throughput,]
if self.verbose: print("self.throughput:\n", self.throughput)
# Wavelength info for NIRISS bands F277W, F380M, F430M, or F480M
self.wavextension = ([self.lam_c,], [self.lam_w,])
self.nwav=1 # these are 'slices' if the data is pure imaging integrations -
# nwav is old nomenclature from GPI IFU data. Refactor one day...
#############################
# only one NRM on JWST:
self.telname = "JWST"
self.instrument = "NIRISS"
self.arrname = "jwst_g7s6c" # implaneia mask set with this - unify to short form later
self.holeshape="hex"
self.mask = NRM_mask_definitions(maskname=self.arrname, chooseholes=chooseholes,
holeshape=self.holeshape )
# save affine deformation of pupil object or create a no-deformation object.
# We apply this when sampling the PSF, not to the pupil geometry.
# This will set a default Ideal or a measured rotation, for example,
# and include pixel scale changes due to pupil distortion.
# Separating detector tilt pixel scale effects from pupil distortion effects is
# yet to be determined... see comments in Affine class definition.
# AS AZG 2018 08 15 <NAME>
if affine2d is None:
self.affine2d = utils.Affine2d(mx=1.0,my=1.0,
sx=0.0,sy=0.0,
xo=0.0,yo=0.0, name="Ideal")
else:
self.affine2d = affine2d
# finding centroid from phase slope only considered cv_phase data
# when cv_abs data exceeds this cvsupport_threshold.
# Absolute value of cv data normalized to unity maximum
# for the threshold application.
# Data reduction gurus: tweak the threshold value with experience...
# Gurus: tweak cvsupport with use...
self.cvsupport_threshold = {"F277W":0.02, "F380M": 0.02, "F430M": 0.02, "F480M": 0.02}
if self.verbose: show_cvsupport_threshold(self)
self.threshold = self.cvsupport_threshold[filt]
def set_pscale(self, pscalex_deg=None, pscaley_deg=None):
"""
Override pixel scale in header
"""
if pscalex_deg is not None:
self.pscalex_deg = pscalex_deg
if pscaley_deg is not None:
self.pscaley_deg = pscaley_deg
self.pscale_mas = 0.5 * (pscalex_deg + pscaley_deg) * (60*60*1000)
self.pscale_rad = utils.mas2rad(self.pscale_mas)
def read_data(self, fn, mode="slice"):
# mode options are slice or UTR
# for single slice data, need to read as 3D (1, npix, npix)
# for utr data, need to read as 3D (ngroup, npix, npix)
# fix bad pixels using DQ extension and LPL local averaging,
# but send bad pixel array down to where fringes are fit so they can be ignored.
# For perfectly noiseless data we add GFaussian zero mean self.noise std dev
# to imagge data. Then std devs don't cause plot crashes with limits problems.
with fits.open(fn, memmap=False, do_not_scale_image_data=True) as fitsfile:
# use context manager, memmap=False, deepcopy to avoid memory leaks
scidata = copy.deepcopy(fitsfile[1].data)
if self.noise is not None: scidata += np.random.normal(0, self.noise, scidata.shape)
# usually DQ ext in MAST file... make it non-fatal for DQ to be missing
try:
bpdata=copy.deepcopy(fitsfile['DQ'].data).astype(np.uint32) # bad pixel extension, forced to uint32
self.bpexist = True
dqmask = bpdata & self.bpval["DO_NOT_USE"] == self.bpval["DO_NOT_USE"] #
del bpdata # free memory
# True => driver wants to omit using pixels with dqflag raised in fit,
if self.usedq == True:
print('InstrumentData.NIRISS.read_data: will not use flagged DQ pixels in fit')
except Exception as e:
print('InstrumentData.NIRISS.read_data: raised exception', e)
self.bpexist = False
dqmask = np.zeros(scidata.shape, dtype=np.uint32) # so it doesn't break if issues with DQ data
if scidata.ndim == 3: #len(scidata.shape)==3:
print("read_data() input: 3D cube")
# Truncate all but the first few slices od data and DQ array for rapid development
if self.firstfew is not None:
if scidata.shape[0] > self.firstfew:
scidata = scidata[:self.firstfew, :, :]
dqmask = dqmask[:self.firstfew, :, :]
# 'nwav' name (historical) is actually number of data slices in the 3Dimage cube
self.nwav=scidata.shape[0]
[self.wls.append(self.wls[0]) for f in range(self.nwav-1)]
elif len(scidata.shape)==2: # 'cast' 2d array to 3d with shape[0]=1
print("'InstrumentData.NIRISS.read_data: 2D data array converting to 3D one-slice cube")
scidata = np.array([scidata,])
dqmask = np.array([dqmask,])
else:
sys.exit("InstrumentData.NIRISS.read_data: invalid data dimensions for NIRISS. \nShould have dimensionality of 2 or 3.")
# refpix removal by trimming
scidata = scidata[:,4:, :] # [all slices, imaxis[0], imaxis[1]]
print('\tRefpix-trimmed scidata:', scidata.shape)
#### fix pix using bad pixel map - runs now. Need to sanity-check.
if self.bpexist:
# refpix removal by trimming to match image trim
dqmask = dqmask[:,4:, :] # dqmask bool array to match image trimmed shape
print('\tRefpix-trimmed dqmask: ', dqmask.shape)
prihdr=fitsfile[0].header
scihdr=fitsfile[1].header
# MAST header or similar kwds info for oifits writer:
self.updatewithheaderinfo(prihdr, scihdr)
# Directory name into which to write txt observables & optional fits diagnostic files
# The input fits image or cube of images file rootname is used to create the output
# text&fits dir, using the data file's root name as the directory name: for example,
# /abc/.../imdir/xyz_calints.fits results in a directory /abc/.../imdir/xyz_calints/
self.rootfn = fn.split('/')[-1].replace('.fits', '')
return prihdr, scihdr, scidata, dqmask
def cdmatrix_to_sky(self, vec, cd11, cd12, cd21, cd22):
""" use the global header values explicitly, for clarity
vec is 2d, units of pixels
cdij 4 scalars, conceptually 2x2 array in units degrees/pixel
"""
return np.array((cd11*vec[0] + cd12*vec[1], cd21*vec[0] + cd22*vec[1]))
def degrees_per_pixel(self, hdr):
"""
input: hdr: fits data file's header with or without CDELT1, CDELT2 (degrees per pixel)
returns: cdelt1, cdelt2: tuple, degrees per pixel along axes 1, 2
EITHER: read from header CDELT[12] keywords
OR: calculated using CD matrix (Jacobian of RA-TAN, DEC-TAN degrees
to pixel directions 1,2. No deformation included in this routine,
but the CD matric includes non-linear field distortion.
No pupil distortion or rotation here.
MISSING: If keywords are missing default hardcoded cdelts are returned.
The exact algorithm may substitute this later.
Below seems good to ~5th significant figure when compared to
cdelts header values prior to replacement by cd matrix approach.
N.D. at stsci 11 Mar 20212
We start in Level 1 with the PC matrix and CDELT.
CDELTs come from the SIAF.
The PC matrix is computed from the roll angle, V3YANG and the parity.
The code is here
https://github.com/spacetelescope/jwst/blob/master/jwst/assign_wcs/util.py#L153
In the level 2 imaging pipeline, assign_wcs adds the distortion to the files.
At the end it computes an approximation of the entire distortion transformation
by fitting a polynomial. This approximated distortion is represented as SIP
polynomials in the FITS headers.
Because SIP, by definition, uses a CD matrix, the PC + CDELT are replaced by CD.
How to get CDELTs back?
I think once the rotation, skew and scale are in the CD matrix it's very hard to
disentangle them. The best way IMO is to calculate the local scale using three
point difference. There is a function in jwst that does this.
Using a NIRISS image as an example:
from jwst.assign_wcs import util
from jwst import datamodels
im=datamodels.open('niriss_image_assign_wcs.fits')
util.compute_scale(im.meta.wcs, (im.meta.wcsinfo.ra_ref, im.meta.wcsinfo.dec_ref))
1.823336635353374e-05
The function returns a constant scale. Is this sufficient for what you need or
do you need scales and sheer along each axis? The code in util.compute_scale can
help with figuring out how to get scales along each axis.
I hope this answers your question.
"""
if 'CD1_1' in hdr.keys() and 'CD1_2' in hdr.keys() and \
'CD2_1' in hdr.keys() and 'CD2_2' in hdr.keys():
cd11 = hdr['CD1_1']
cd12 = hdr['CD1_2']
cd21 = hdr['CD2_1']
cd22 = hdr['CD2_2']
# Create unit vectors in detector pixel X and Y directions, units: detector pixels
dxpix = np.array((1.0, 0.0)) # axis 1 step
dypix = np.array((0.0, 1.0)) # axis 2 step
# transform pixel x and y steps to RA-tan, Dec-tan degrees
dxsky = self.cdmatrix_to_sky(dxpix, cd11, cd12, cd21, cd22)
dysky = self.cdmatrix_to_sky(dypix, cd11, cd12, cd21, cd22)
print("Used CD matrix for pixel scales")
return np.linalg.norm(dxsky, ord=2), np.linalg.norm(dysky, ord=2)
elif 'CDELT1' in hdr.keys() and 'CDELT2' in hdr.keys():
return hdr['CDELT1'], hdr['CDELT2']
print("Used CDDELT[12] for pixel scales")
else:
print('InstrumentData.NIRISS: Warning: NIRISS pixel scales not in header. Using 65.6 mas in deg/pix')
return 65.6/(60.0*60.0*1000), 65.6/(60.0*60.0*1000)
def updatewithheaderinfo(self, ph, sh):
""" input: primary header, science header MAST"""
# The info4oif_dict will get pickled to disk when we write txt files of results.
# That way we don't drag in objects like InstrumentData into code that reads text results
# and writes oifits files - a simple built-in dictionary is the only object used in this transfer.
info4oif_dict = {}
info4oif_dict['telname'] = self.telname
info4oif_dict['filt'] = self.filt
info4oif_dict['lam_c'] = self.lam_c
info4oif_dict['lam_w'] = self.lam_w
info4oif_dict['lam_bin'] = self.lam_bin
# Target information - 5/21 targname UNKNOWN in nis019 rehearsal data
# Name in the proposal always non-trivial, targname still UNKNOWN...:
if ph["TARGNAME"] == 'UNKNOWN': objname = ph['TARGPROP']
else: objname = ph['TARGNAME'] # allegedly apt name for archive, standard form
#
# if target name has confusing-to-astroquery dash
self.objname = objname.replace('-', ' '); info4oif_dict['objname'] = self.objname
# AB Dor, ab dor, AB DOR, ab dor are all acceptable.
#
self.ra = ph["TARG_RA"]; info4oif_dict['ra'] = self.ra
self.dec = ph["TARG_DEC"]; info4oif_dict['dec'] = self.dec
# / axis 1 DS9 coordinate of the reference pixel (always POS1)
# / axis 2 DS9 coordinate of the reference pixel (always POS1)
self.crpix1 = sh["CRPIX1"]; info4oif_dict['crpix1'] = self.crpix1
self.crpix2 = sh["CRPIX2"]; info4oif_dict['crpix2'] = self.crpix2
# need <NAME>'s table for actual crval[1,2] for true pointing to detector pixel coords (DS9)
self.instrument = ph["INSTRUME"]; info4oif_dict['instrument'] = self.instrument
self.pupil = ph["PUPIL"]; info4oif_dict['pupil'] = self.pupil
# "ImPlaneIA internal mask name" - oifwriter looks for 'mask'...
self.arrname = "jwst_g7s6c" # implaneia internal name - historical
info4oif_dict['arrname'] = 'g7s6' # for oif
info4oif_dict['mask'] = info4oif_dict['arrname'] # Soulain mask goes into oif arrname
# if data was generated on the average pixel scale of the header
# then this is the right value that gets read in, and used in fringe fitting
pscalex_deg, pscaley_deg = self.degrees_per_pixel(sh)
#
info4oif_dict['pscalex_deg'] = pscalex_deg
info4oif_dict['pscaley_deg'] = pscaley_deg
# Whatever we did set is averaged for isotropic pixel scale here
self.pscale_mas = 0.5 * (pscalex_deg + pscaley_deg) * (60*60*1000); \
info4oif_dict['pscale_mas'] = self.pscale_mas
self.pscale_rad = utils.mas2rad(self.pscale_mas); info4oif_dict['pscale_rad'] = self.pscale_rad
self.mask = NRM_mask_definitions(maskname=self.arrname, chooseholes=self.chooseholes,
holeshape=self.holeshape) # for STAtions x y in oifs
self.date = ph["DATE-OBS"] + "T" + ph["TIME-OBS"]; info4oif_dict['date'] = self.date
datestr = ph["DATE-OBS"]
self.year = datestr[:4]; info4oif_dict['year'] = self.year
self.month = datestr[5:7]; info4oif_dict['month'] = self.month
self.day = datestr[8:10]; info4oif_dict['day'] = self.day
self.parangh= sh["ROLL_REF"]; info4oif_dict['parangh'] = self.parangh
self.pa = sh["PA_V3"]; info4oif_dict['pa'] = self.pa
self.vparity = sh["VPARITY"]; info4oif_dict['vparity'] = self.vparity
# An INTegration is NGROUPS "frames", not relevant here but context info.
# 2d => "cal" file combines all INTegrations (ramps)
# 3d=> "calints" file is a cube of all INTegrations (ramps)
if sh["NAXIS"] == 2:
# all INTegrations or 'ramps'
self.itime = ph["EFFINTTM"] * ph["NINTS"]; info4oif_dict['itime'] = self.itime
elif sh["NAXIS"] == 3:
# each slice is one INTegration or 'ramp'
self.itime = ph["EFFINTTM"]; info4oif_dict['itime'] = self.itime
np.set_printoptions(precision=5, suppress=True, linewidth=160,
formatter={'float': lambda x: "%10.5f," % x})
self.v3i_yang = sh['V3I_YANG'] # Angle from V3 axis to Ideal y axis (deg)
# rotate mask hole center coords by PAV3 # RAC 2021
ctrs_sky = self.mast2sky()
oifctrs = np.zeros(self.mask.ctrs.shape)
oifctrs[:,0] = ctrs_sky[:,1].copy() * -1
oifctrs[:,1] = ctrs_sky[:,0].copy() * -1
info4oif_dict['ctrs_eqt'] = oifctrs # mask centers rotated by PAV3 (equatorial coords)
info4oif_dict['ctrs_inst'] = self.mask.ctrs # as-built instrument mask centers
info4oif_dict['hdia'] = self.mask.hdia
info4oif_dict['nslices'] = self.nwav # nwav: number of image slices or IFU cube slices - AMI is imager
self.info4oif_dict = info4oif_dict # save it when writing extracted observables txt
# rather than calling InstrumentData in the niriss example just to reset just call this routine
def reset_nwav(self, nwav):
print("InstrumentData.NIRISS: ", "Resetting InstrumentData instantiation's nwave to", nwav)
self.nwav = nwav
def jwst_dqflags(self):
"""
dqdata is a 2d (32-bit U?)INT array from the DQ extension of the input file.
We ignore all data with a non-zero DQ flag. I copied all values from a 7.5 build jwst...
but we ignore any non-zero flag meaning, and ignore the pixel in fringe-fitting
The refpix are non-zero DQ, btw...
I changed "pixel" to self.pbval and "group" to self.bpgroup. We may use these later,
so here they are but initially we just discriminate between good (zero value) and non-good.
"""
""" JWST Data Quality Flags
The definitions are documented in the JWST RTD:
https://jwst-pipeline.readthedocs.io/en/latest/jwst/references_general/references_general.html#data-quality-flags
"""
""" JWST Data Quality Flags
The definitions are documented in the JWST RTD:
https://jwst-pipeline.readthedocs.io/en/latest/jwst/references_general/references_general.html#data-quality-flags
Implementation
-------------
The flags are implemented as "bit flags": Each flag is assigned a bit position
in a byte, or multi-byte word, of memory. If that bit is set, the flag assigned
to that bit is interpreted as being set or active.
The data structure that stores bit flags is just the standard Python `int`,
which provides 32 bits. Bits of an integer are most easily referred to using
the formula `2**bit_number` where `bit_number` is the 0-index bit of interest.
2**n is gauche but not everyone loves 1<<n
Rachel uses:
from jwst.datamodels import dqflags
DO_NOT_USE = dqflags.pixel["DO_NOT_USE"]
dqmask = pxdq0 & DO_NOT_USE == DO_NOT_USE
pxdq = np.where(dqmask, pxdq0, 0)
"""
# Pixel-specific flags
self.bpval = {
'GOOD': 0, # No bits set, all is good
'DO_NOT_USE': 2**0, # Bad pixel. Do not use.
'SATURATED': 2**1, # Pixel saturated during exposure
'JUMP_DET': 2**2, # Jump detected during exposure
'DROPOUT': 2**3, # Data lost in transmission
'OUTLIER': 2**4, # Flagged by outlier detection. Was RESERVED_1
'RESERVED_2': 2**5, #
'RESERVED_3': 2**6, #
'RESERVED_4': 2**7, #
'UNRELIABLE_ERROR': 2**8, # Uncertainty exceeds quoted error
'NON_SCIENCE': 2**9, # Pixel not on science portion of detector
'DEAD': 2**10, # Dead pixel
'HOT': 2**11, # Hot pixel
'WARM': 2**12, # Warm pixel
'LOW_QE': 2**13, # Low quantum efficiency
'RC': 2**14, # RC pixel
'TELEGRAPH': 2**15, # Telegraph pixel
'NONLINEAR': 2**16, # Pixel highly nonlinear
'BAD_REF_PIXEL': 2**17, # Reference pixel cannot be used
'NO_FLAT_FIELD': 2**18, # Flat field cannot be measured
'NO_GAIN_VALUE': 2**19, # Gain cannot be measured
'NO_LIN_CORR': 2**20, # Linearity correction not available
'NO_SAT_CHECK': 2**21, # Saturation check not available
'UNRELIABLE_BIAS': 2**22, # Bias variance large
'UNRELIABLE_DARK': 2**23, # Dark variance large
'UNRELIABLE_SLOPE': 2**24, # Slope variance large (i.e., noisy pixel)
'UNRELIABLE_FLAT': 2**25, # Flat variance large
'OPEN': 2**26, # Open pixel (counts move to adjacent pixels)
'ADJ_OPEN': 2**27, # Adjacent to open pixel
'UNRELIABLE_RESET': 2**28, # Sensitive to reset anomaly
'MSA_FAILED_OPEN': 2**29, # Pixel sees light from failed-open shutter
'OTHER_BAD_PIXEL': 2**30, # A catch-all flag
'REFERENCE_PIXEL': 2**31, # Pixel is a reference pixel
}
# Group-specific flags. Once groups are combined, these flags
# are equivalent to the pixel-specific flags.
self.bpgroup = {
'GOOD': self.bpval['GOOD'],
'DO_NOT_USE': self.bpval['DO_NOT_USE'],
'SATURATED': self.bpval['SATURATED'],
'JUMP_DET': self.bpval['JUMP_DET'],
'DROPOUT': self.bpval['DROPOUT'],
}
def mast2sky(self):
"""
Rotate hole center coordinates:
Clockwise by the V3 position angle - V3I_YANG from north in degrees if VPARITY = -1
Counterclockwise by the V3 position angle - V3I_YANG from north in degrees if VPARITY = 1
Hole center coords are in the V2, V3 plane in meters.
Return rotated coordinates to be put in info4oif_dict.
implane2oifits.ObservablesFromText uses these to calculate baselines.
"""
pa = self.pa
mask_ctrs = copy.deepcopy(self.mask.ctrs)
# rotate by an extra 90 degrees (RAC 9/21)
# these coords are just used to orient output in OIFITS files
# NOT used for the fringe fitting itself
mask_ctrs = utils.rotate2dccw(mask_ctrs,np.pi/2.)
vpar = self.vparity # Relative sense of rotation between Ideal xy and V2V3
v3iyang = self.v3i_yang
rot_ang = pa - v3iyang # subject to change!
if pa != 0.0:
# Using rotate2sccw, which rotates **vectors** CCW in a fixed coordinate system,
# so to rotate coord system CW instead of the vector, reverse sign of rotation angle. Double-check comment
if vpar == -1:
# rotate clockwise <rotate coords clockwise?>
ctrs_rot = utils.rotate2dccw(mask_ctrs, np.deg2rad(-rot_ang))
print(f'InstrumentData.mast2sky: Rotating mask hole centers clockwise by {rot_ang:.3f} degrees')
else:
# counterclockwise <rotate coords counterclockwise?>
ctrs_rot = utils.rotate2dccw(mask_ctrs, np.deg2rad(rot_ang))
print('InstrumentData.mast2sky: Rotating mask hole centers counterclockwise by {rot_ang:.3f} degrees')
else:
ctrs_rot = mask_ctrs
return ctrs_rot
| [
"numpy.random.normal",
"nrm_analysis.misctools.utils.get_src_spec",
"nrm_analysis.misctools.mask_definitions.NRM_mask_definitions",
"nrm_analysis.misctools.utils.Affine2d",
"nrm_analysis.misctools.utils.get_filt_spec",
"sys.exit",
"nrm_analysis.misctools.utils.combine_src_filt",
"numpy.linalg.norm",
... | [((6375, 6409), 'nrm_analysis.misctools.utils.get_cw_beta', 'utils.get_cw_beta', (['self.throughput'], {}), '(self.throughput)\n', (6392, 6409), False, 'from nrm_analysis.misctools import utils\n'), ((7371, 7469), 'nrm_analysis.misctools.mask_definitions.NRM_mask_definitions', 'NRM_mask_definitions', ([], {'maskname': 'self.arrname', 'chooseholes': 'chooseholes', 'holeshape': 'self.holeshape'}), '(maskname=self.arrname, chooseholes=chooseholes,\n holeshape=self.holeshape)\n', (7391, 7469), False, 'from nrm_analysis.misctools.mask_definitions import NRM_mask_definitions\n'), ((9243, 9273), 'nrm_analysis.misctools.utils.mas2rad', 'utils.mas2rad', (['self.pscale_mas'], {}), '(self.pscale_mas)\n', (9256, 9273), False, 'from nrm_analysis.misctools import utils\n'), ((13596, 13668), 'numpy.array', 'np.array', (['(cd11 * vec[0] + cd12 * vec[1], cd21 * vec[0] + cd22 * vec[1])'], {}), '((cd11 * vec[0] + cd12 * vec[1], cd21 * vec[0] + cd22 * vec[1]))\n', (13604, 13668), True, 'import numpy as np\n'), ((20287, 20317), 'nrm_analysis.misctools.utils.mas2rad', 'utils.mas2rad', (['self.pscale_mas'], {}), '(self.pscale_mas)\n', (20300, 20317), False, 'from nrm_analysis.misctools import utils\n'), ((20386, 20489), 'nrm_analysis.misctools.mask_definitions.NRM_mask_definitions', 'NRM_mask_definitions', ([], {'maskname': 'self.arrname', 'chooseholes': 'self.chooseholes', 'holeshape': 'self.holeshape'}), '(maskname=self.arrname, chooseholes=self.chooseholes,\n holeshape=self.holeshape)\n', (20406, 20489), False, 'from nrm_analysis.misctools.mask_definitions import NRM_mask_definitions\n'), ((21648, 21761), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'precision': '(5)', 'suppress': '(True)', 'linewidth': '(160)', 'formatter': "{'float': lambda x: '%10.5f,' % x}"}), "(precision=5, suppress=True, linewidth=160, formatter={\n 'float': lambda x: '%10.5f,' % x})\n", (21667, 21761), True, 'import numpy as np\n'), ((21982, 22012), 'numpy.zeros', 'np.zeros', (['self.mask.ctrs.shape'], {}), '(self.mask.ctrs.shape)\n', (21990, 22012), True, 'import numpy as np\n'), ((28017, 28046), 'copy.deepcopy', 'copy.deepcopy', (['self.mask.ctrs'], {}), '(self.mask.ctrs)\n', (28030, 28046), False, 'import copy\n'), ((28237, 28278), 'nrm_analysis.misctools.utils.rotate2dccw', 'utils.rotate2dccw', (['mask_ctrs', '(np.pi / 2.0)'], {}), '(mask_ctrs, np.pi / 2.0)\n', (28254, 28278), False, 'from nrm_analysis.misctools import utils\n'), ((5739, 5769), 'nrm_analysis.misctools.utils.get_filt_spec', 'utils.get_filt_spec', (['self.filt'], {}), '(self.filt)\n', (5758, 5769), False, 'from nrm_analysis.misctools import utils\n'), ((5793, 5816), 'nrm_analysis.misctools.utils.get_src_spec', 'utils.get_src_spec', (['src'], {}), '(src)\n', (5811, 5816), False, 'from nrm_analysis.misctools import utils\n'), ((6020, 6130), 'nrm_analysis.misctools.utils.combine_src_filt', 'utils.combine_src_filt', (['filt_spec', 'src_spec'], {'trim': '(0.01)', 'nlambda': 'nspecbin', 'verbose': 'self.verbose', 'plot': '(False)'}), '(filt_spec, src_spec, trim=0.01, nlambda=nspecbin,\n verbose=self.verbose, plot=False)\n', (6042, 6130), False, 'from nrm_analysis.misctools import utils\n'), ((8070, 8146), 'nrm_analysis.misctools.utils.Affine2d', 'utils.Affine2d', ([], {'mx': '(1.0)', 'my': '(1.0)', 'sx': '(0.0)', 'sy': '(0.0)', 'xo': '(0.0)', 'yo': '(0.0)', 'name': '"""Ideal"""'}), "(mx=1.0, my=1.0, sx=0.0, sy=0.0, xo=0.0, yo=0.0, name='Ideal')\n", (8084, 8146), False, 'from nrm_analysis.misctools import utils\n'), ((9844, 9901), 'astropy.io.fits.open', 'fits.open', (['fn'], {'memmap': '(False)', 'do_not_scale_image_data': '(True)'}), '(fn, memmap=False, do_not_scale_image_data=True)\n', (9853, 9901), False, 'from astropy.io import fits\n'), ((10017, 10048), 'copy.deepcopy', 'copy.deepcopy', (['fitsfile[1].data'], {}), '(fitsfile[1].data)\n', (10030, 10048), False, 'import copy\n'), ((16749, 16769), 'numpy.array', 'np.array', (['(1.0, 0.0)'], {}), '((1.0, 0.0))\n', (16757, 16769), True, 'import numpy as np\n'), ((16806, 16826), 'numpy.array', 'np.array', (['(0.0, 1.0)'], {}), '((0.0, 1.0))\n', (16814, 16826), True, 'import numpy as np\n'), ((5659, 5677), 'numpy.array', 'np.array', (['bandpass'], {}), '(bandpass)\n', (5667, 5677), True, 'import numpy as np\n'), ((10099, 10145), 'numpy.random.normal', 'np.random.normal', (['(0)', 'self.noise', 'scidata.shape'], {}), '(0, self.noise, scidata.shape)\n', (10115, 10145), True, 'import numpy as np\n'), ((17128, 17156), 'numpy.linalg.norm', 'np.linalg.norm', (['dxsky'], {'ord': '(2)'}), '(dxsky, ord=2)\n', (17142, 17156), True, 'import numpy as np\n'), ((17158, 17186), 'numpy.linalg.norm', 'np.linalg.norm', (['dysky'], {'ord': '(2)'}), '(dysky, ord=2)\n', (17172, 17186), True, 'import numpy as np\n'), ((5587, 5605), 'numpy.array', 'np.array', (['(wt, wl)'], {}), '((wt, wl))\n', (5595, 5605), True, 'import numpy as np\n'), ((10940, 10980), 'numpy.zeros', 'np.zeros', (['scidata.shape'], {'dtype': 'np.uint32'}), '(scidata.shape, dtype=np.uint32)\n', (10948, 10980), True, 'import numpy as np\n'), ((11894, 11913), 'numpy.array', 'np.array', (['[scidata]'], {}), '([scidata])\n', (11902, 11913), True, 'import numpy as np\n'), ((11940, 11958), 'numpy.array', 'np.array', (['[dqmask]'], {}), '([dqmask])\n', (11948, 11958), True, 'import numpy as np\n'), ((11994, 12127), 'sys.exit', 'sys.exit', (['"""InstrumentData.NIRISS.read_data: invalid data dimensions for NIRISS. \nShould have dimensionality of 2 or 3."""'], {}), '(\n """InstrumentData.NIRISS.read_data: invalid data dimensions for NIRISS. \nShould have dimensionality of 2 or 3."""\n )\n', (12002, 12127), False, 'import os, sys, time\n'), ((28824, 28844), 'numpy.deg2rad', 'np.deg2rad', (['(-rot_ang)'], {}), '(-rot_ang)\n', (28834, 28844), True, 'import numpy as np\n'), ((29103, 29122), 'numpy.deg2rad', 'np.deg2rad', (['rot_ang'], {}), '(rot_ang)\n', (29113, 29122), True, 'import numpy as np\n'), ((10279, 10313), 'copy.deepcopy', 'copy.deepcopy', (["fitsfile['DQ'].data"], {}), "(fitsfile['DQ'].data)\n", (10292, 10313), False, 'import copy\n')] |
from time import sleep
from ina219 import INA219
ina = INA219(shunt_ohms=0.1,
max_expected_amps = 0.2,
address=0x40)
ina.configure(voltage_range=ina.RANGE_32V,
gain=ina.GAIN_AUTO,
bus_adc=ina.ADC_128SAMP,
shunt_adc=ina.ADC_128SAMP)
def get_readings():
v = ina.voltage()
i = ina.current()
p = ina.power()
# print('{0:0.1f}V\n{1:0.1f}mA'.format(v, i))
# print('\n{0:0.1f} Watts'.format(p/1000))
return i, v, p
| [
"ina219.INA219"
] | [((56, 113), 'ina219.INA219', 'INA219', ([], {'shunt_ohms': '(0.1)', 'max_expected_amps': '(0.2)', 'address': '(64)'}), '(shunt_ohms=0.1, max_expected_amps=0.2, address=64)\n', (62, 113), False, 'from ina219 import INA219\n')] |
import pygame, math
pygame.init()
win = pygame.display.set_mode((1000, 600))
pygame.display.set_caption("Plumber")
angle = [pygame.image.load('angl1.png'), pygame.image.load('angl2.png'), pygame.image.load('angl3.png'),
pygame.image.load('angl4.png')]
straight = [pygame.image.load('str1.png'), pygame.image.load('str2.png')]
starts = pygame.image.load('start.png')
ends = pygame.image.load('end.png')
bg = pygame.image.load('bgrd.png')
clickSound = pygame.mixer.Sound('pop.ogg')
mouseposition = (0, 0)
moves = 0
# points == squares on screenplay with (possible) pipes, points[i] = k == square number i is in order k'th
points = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
# isstr[i] == 1 when we have got staright pipe on point i
isstr = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
# isang[i] == 1 if on point i we have pipe-angle
isang = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
# wsp - which picture we are displaying (straight 0-1, angle 0-3) if we are displaying at point by order
# for example : points[4] = 5 with 3 position of angle, points[5] = 0 -> wsp[4] = 3
wsp = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
lev = 1
# level1
points1 = [0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 3, 4, 5, 6, 9, 10, 0, 0, 0, 0, 0, 7, 8, 11, 12, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
wsp1 = [0, 3, 0, 1, 1, 0, 2, 0, 2, 2, 3, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
isstr1 = [1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
isang1 = [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
wsptrue1 = [0, 3, 1, 1, 0, 0, 3, 1, 0, 2, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
# level2
points2 = [0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 5, 6, 0, 0, 17, 18, 19, 3, 4, 7, 0, 0, 16, 0, 20, 0, 9, 8, 13,
14, 15, 0, 0, 0, 10, 11, 12, 0, 0, 0, 0]
wsp2 = [3, 1, 0, 2, 1, 0, 0, 0, 1, 1, 1, 1, 1, 3, 0, 0, 0, 2, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
isstr2 = [0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1,
0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]
isang2 = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0,
1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0]
wsptrue2 = [3, 1, 1, 1, 0, 2, 3, 1, 0, 2, 1, 0, 0, 2, 0, 0, 1, 2, 0, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
def draw_beginning():
win.blit(bg, (0, 0))
global points, isstr, isang, moves
win.blit(starts, (0, 0))
win.blit(ends, (900, 300))
i = 0
while i < 48:
print(i)
if isstr[i] == 1:
win.blit(straight[wsp[points[i]]], (((i % 8)+1) * 100, (i // 8) * 100))
elif isang[i] == 1:
win.blit(angle[wsp[points[i]]], (((i % 8)+1) * 100, (i // 8) * 100))
i = i + 1
pygame.display.update()
moves = 1
print(wsp)
def redraw():
# p == number of square clicked on
global wsp
p = mouseposition[0] // 100 + 8 * (mouseposition[1] // 100) - 1
# change == number of square in order
change = points[p]
if isstr[p] == 1:
wsp[change] = (wsp[change] + 1) % 2
x = (mouseposition[0] // 100) * 100
y = (mouseposition[1] // 100) * 100
win.blit(straight[wsp[change]], (x, y))
elif isang[p] == 1:
wsp[change] = (wsp[change] + 1) % 4
x = (mouseposition[0] // 100) * 100
y = (mouseposition[1] // 100) * 100
win.blit(angle[wsp[change]], (x, y))
pygame.display.update()
print(wsp)
def nextlevel():
global lev, moves
print("Hurra!")
lev = 2
moves = 0
run = True
while run:
mouseClicked = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
elif event.type == pygame.MOUSEBUTTONUP:
clickSound.play()
mouseposition = event.pos
mouseClicked = True
moves = 1
if moves == 0:
# for level1
if lev == 1:
points = points1
wsp = wsp1
isstr = isstr1
isang = isang1
wsptrue = wsptrue1
draw_beginning()
if lev == 2:
points = points2
wsp = wsp2
isstr = isstr2
isang = isang2
wsptrue = wsptrue2
draw_beginning()
elif mouseClicked:
redraw()
print(wsp)
k = 0
for i in range(48):
if wsp[i] == wsptrue[i]:
k = k + 1
if k == 48:
nextlevel()
pygame.quit()
| [
"pygame.init",
"pygame.quit",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.mixer.Sound",
"pygame.display.set_caption",
"pygame.image.load",
"pygame.display.update"
] | [((23, 36), 'pygame.init', 'pygame.init', ([], {}), '()\n', (34, 36), False, 'import pygame, math\n'), ((46, 82), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(1000, 600)'], {}), '((1000, 600))\n', (69, 82), False, 'import pygame, math\n'), ((84, 121), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Plumber"""'], {}), "('Plumber')\n", (110, 121), False, 'import pygame, math\n'), ((357, 387), 'pygame.image.load', 'pygame.image.load', (['"""start.png"""'], {}), "('start.png')\n", (374, 387), False, 'import pygame, math\n'), ((396, 424), 'pygame.image.load', 'pygame.image.load', (['"""end.png"""'], {}), "('end.png')\n", (413, 424), False, 'import pygame, math\n'), ((431, 460), 'pygame.image.load', 'pygame.image.load', (['"""bgrd.png"""'], {}), "('bgrd.png')\n", (448, 460), False, 'import pygame, math\n'), ((475, 504), 'pygame.mixer.Sound', 'pygame.mixer.Sound', (['"""pop.ogg"""'], {}), "('pop.ogg')\n", (493, 504), False, 'import pygame, math\n'), ((5568, 5581), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (5579, 5581), False, 'import pygame, math\n'), ((134, 164), 'pygame.image.load', 'pygame.image.load', (['"""angl1.png"""'], {}), "('angl1.png')\n", (151, 164), False, 'import pygame, math\n'), ((166, 196), 'pygame.image.load', 'pygame.image.load', (['"""angl2.png"""'], {}), "('angl2.png')\n", (183, 196), False, 'import pygame, math\n'), ((198, 228), 'pygame.image.load', 'pygame.image.load', (['"""angl3.png"""'], {}), "('angl3.png')\n", (215, 228), False, 'import pygame, math\n'), ((240, 270), 'pygame.image.load', 'pygame.image.load', (['"""angl4.png"""'], {}), "('angl4.png')\n", (257, 270), False, 'import pygame, math\n'), ((285, 314), 'pygame.image.load', 'pygame.image.load', (['"""str1.png"""'], {}), "('str1.png')\n", (302, 314), False, 'import pygame, math\n'), ((316, 345), 'pygame.image.load', 'pygame.image.load', (['"""str2.png"""'], {}), "('str2.png')\n", (333, 345), False, 'import pygame, math\n'), ((3764, 3787), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (3785, 3787), False, 'import pygame, math\n'), ((4448, 4471), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (4469, 4471), False, 'import pygame, math\n'), ((4654, 4672), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (4670, 4672), False, 'import pygame, math\n')] |
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Test serializing and deserializing data sent to the server."""
from qiskit.compiler import assemble
from ..decorators import requires_provider
from ..utils import bell_in_qobj, cancel_job
from ..ibmqtestcase import IBMQTestCase
class TestSerialization(IBMQTestCase):
"""Test data serialization."""
@requires_provider
def test_qasm_qobj(self, provider):
"""Test serializing qasm qobj data."""
backend = provider.get_backend('ibmq_qasm_simulator')
qobj = bell_in_qobj(backend=backend)
job = backend.run(qobj, validate_qobj=True)
rqobj = backend.retrieve_job(job.job_id()).qobj()
self.assertEqual(_array_to_list(qobj.to_dict()), rqobj.to_dict())
@requires_provider
def test_pulse_qobj(self, provider):
"""Test serializing pulse qobj data."""
backend = provider.get_backend('ibmq_armonk')
config = backend.configuration()
defaults = backend.defaults()
inst_map = defaults.circuit_instruction_map
x = inst_map.get('x', 0)
measure = inst_map.get('measure', range(config.n_qubits)) << x.duration
schedules = x | measure
qobj = assemble(schedules, backend, meas_level=1, shots=256)
job = backend.run(qobj, validate_qobj=True)
rqobj = backend.retrieve_job(job.job_id()).qobj()
# Convert numpy arrays to lists since they now get converted right
# before being sent to the server.
self.assertEqual(_array_to_list(qobj.to_dict()), rqobj.to_dict())
cancel_job(job)
def _array_to_list(data):
"""Convert numpy arrays to lists."""
for key, value in data.items():
if hasattr(value, 'tolist'):
data[key] = value.tolist()
elif isinstance(value, dict):
_array_to_list(value)
elif isinstance(value, list):
for index, item in enumerate(value):
if isinstance(item, dict):
value[index] = _array_to_list(item)
return data
| [
"qiskit.compiler.assemble"
] | [((1675, 1728), 'qiskit.compiler.assemble', 'assemble', (['schedules', 'backend'], {'meas_level': '(1)', 'shots': '(256)'}), '(schedules, backend, meas_level=1, shots=256)\n', (1683, 1728), False, 'from qiskit.compiler import assemble\n')] |
from pydantic import BaseModel
from tortoise.contrib.pydantic import pydantic_model_creator
from typing import Optional
from src.database.models import Notes
# Creating new notes
NoteInSchema = pydantic_model_creator(
Notes, name="NoteIn", exclude=["author_id"], exclude_readonly=True
)
# retrieving Notes
NoteOutSchema = pydantic_model_creator(
Notes, name="NoteOut", exclude=[
"modified_at", "author.password",
"author.created_at", "author.modified_at"
]
)
class UpdateNote(BaseModel):
title: Optional[str]
content: Optional[str]
| [
"tortoise.contrib.pydantic.pydantic_model_creator"
] | [((196, 290), 'tortoise.contrib.pydantic.pydantic_model_creator', 'pydantic_model_creator', (['Notes'], {'name': '"""NoteIn"""', 'exclude': "['author_id']", 'exclude_readonly': '(True)'}), "(Notes, name='NoteIn', exclude=['author_id'],\n exclude_readonly=True)\n", (218, 290), False, 'from tortoise.contrib.pydantic import pydantic_model_creator\n'), ((330, 466), 'tortoise.contrib.pydantic.pydantic_model_creator', 'pydantic_model_creator', (['Notes'], {'name': '"""NoteOut"""', 'exclude': "['modified_at', 'author.password', 'author.created_at', 'author.modified_at']"}), "(Notes, name='NoteOut', exclude=['modified_at',\n 'author.password', 'author.created_at', 'author.modified_at'])\n", (352, 466), False, 'from tortoise.contrib.pydantic import pydantic_model_creator\n')] |
import os
settings = {
'host': os.environ.get('ACCOUNT_HOST', 'https://hacktest.documents.azure.com:443/'),
'master_key': os.environ.get('ACCOUNT_KEY', '<KEY>'),
'database_id': os.environ.get('COSMOS_DATABASE', 'hackjoblist'),
'container_id': os.environ.get('COSMOS_CONTAINER', 'joblist'),
} | [
"os.environ.get"
] | [((36, 111), 'os.environ.get', 'os.environ.get', (['"""ACCOUNT_HOST"""', '"""https://hacktest.documents.azure.com:443/"""'], {}), "('ACCOUNT_HOST', 'https://hacktest.documents.azure.com:443/')\n", (50, 111), False, 'import os\n'), ((131, 169), 'os.environ.get', 'os.environ.get', (['"""ACCOUNT_KEY"""', '"""<KEY>"""'], {}), "('ACCOUNT_KEY', '<KEY>')\n", (145, 169), False, 'import os\n'), ((190, 238), 'os.environ.get', 'os.environ.get', (['"""COSMOS_DATABASE"""', '"""hackjoblist"""'], {}), "('COSMOS_DATABASE', 'hackjoblist')\n", (204, 238), False, 'import os\n'), ((260, 305), 'os.environ.get', 'os.environ.get', (['"""COSMOS_CONTAINER"""', '"""joblist"""'], {}), "('COSMOS_CONTAINER', 'joblist')\n", (274, 305), False, 'import os\n')] |
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from .target_assigner.proposal_target_layer import ProposalTargetLayer
from ..model_utils.model_nms_utils import class_agnostic_nms
from ...utils import box_coder_utils, common_utils, loss_utils
class RoIHeadTemplate(nn.Module):
def __init__(self, num_class, model_cfg, **kwargs):
super().__init__()
self.model_cfg = model_cfg
self.num_class = num_class
self.box_coder = getattr(box_coder_utils, self.model_cfg.TARGET_CONFIG.BOX_CODER)(
**self.model_cfg.TARGET_CONFIG.get('BOX_CODER_CONFIG', {})
)
self.proposal_target_layer = ProposalTargetLayer(roi_sampler_cfg=self.model_cfg.TARGET_CONFIG)
self.build_losses(self.model_cfg.LOSS_CONFIG)
self.forward_ret_dict = None
def build_losses(self, losses_cfg):
self.add_module(
'reg_loss_func',
loss_utils.WeightedSmoothL1Loss(code_weights=losses_cfg.LOSS_WEIGHTS['code_weights'])
)
def make_fc_layers(self, input_channels, output_channels, fc_list):
fc_layers = []
pre_channel = input_channels
for k in range(0, fc_list.__len__()):
fc_layers.extend([
nn.Conv1d(pre_channel, fc_list[k], kernel_size=1, bias=False),
nn.BatchNorm1d(fc_list[k]),
nn.ReLU(inplace=True)
])
pre_channel = fc_list[k]
if self.model_cfg.DP_RATIO >= 0 and k == 0:
fc_layers.append(nn.Dropout(self.model_cfg.DP_RATIO))
fc_layers.append(nn.Conv1d(pre_channel, output_channels, kernel_size=1, bias=True))
fc_layers = nn.Sequential(*fc_layers)
return fc_layers
@torch.no_grad()
def proposal_layer(self, batch_dict, nms_config):
"""
Args:
batch_dict:
batch_size:
batch_cls_preds: (B, num_boxes, num_classes | 1) or (N1+N2+..., num_classes | 1)
batch_box_preds: (B, num_boxes, 7+C) or (N1+N2+..., 7+C)
cls_preds_normalized: indicate whether batch_cls_preds is normalized
batch_index: optional (N1+N2+...)
nms_config:
Returns:
batch_dict:
rois: (B, num_rois, 7+C)
roi_scores: (B, num_rois)
roi_labels: (B, num_rois)
"""
if batch_dict.get('rois', None) is not None:
return batch_dict
batch_size = batch_dict['batch_size']
batch_box_preds = batch_dict['batch_box_preds']
batch_cls_preds = batch_dict['batch_cls_preds']
batch_feat_preds = batch_dict.get('batch_feat_preds', None)
# rois = batch_box_preds.new_zeros((batch_size, nms_config.NMS_POST_MAXSIZE, batch_box_preds.shape[-1]))
# roi_scores = batch_box_preds.new_zeros((batch_size, nms_config.NMS_POST_MAXSIZE))
# roi_labels = batch_box_preds.new_zeros((batch_size, nms_config.NMS_POST_MAXSIZE), dtype=torch.long)
rois = []
roi_scores = []
roi_labels = []
roi_feats = []
for index in range(batch_size):
if 'batch_index' in batch_dict:
assert batch_cls_preds.shape.__len__() == 2
batch_mask = (batch_dict['batch_index'] == index)
else:
assert batch_dict['batch_cls_preds'].shape.__len__() == 3
batch_mask = index
box_preds = batch_box_preds[batch_mask]
cls_preds = batch_cls_preds[batch_mask]
cur_roi_scores, cur_roi_labels = torch.max(cls_preds, dim=1)
selected, selected_scores = class_agnostic_nms(
box_scores=cur_roi_scores, box_preds=box_preds, nms_config=nms_config
)
pad_len = nms_config.NMS_POST_MAXSIZE - len(selected)
rois.append(F.pad(box_preds[selected], (0, 0, 0, pad_len)))
roi_scores.append(F.pad(cur_roi_scores[selected], (0, pad_len)))
roi_labels.append(F.pad(cur_roi_labels[selected], (0, pad_len)))
if batch_feat_preds is not None:
roi_feats.append(F.pad(batch_feat_preds[batch_mask][selected], (0, 0, 0, pad_len)))
if batch_size == 1:
rois = rois[0].unsqueeze(0)
roi_scores = roi_scores[0].unsqueeze(0)
roi_labels = roi_labels[0].unsqueeze(0)
if batch_feat_preds is not None:
roi_feats = roi_feats[0].unsqueeze(0)
else:
rois = torch.stack(rois)
roi_scores = torch.stack(roi_scores)
roi_labels = torch.stack(roi_labels)
if batch_feat_preds is not None:
roi_feats = torch.stack(roi_feats)
batch_dict['rois'] = rois
batch_dict['roi_scores'] = roi_scores
batch_dict['roi_labels'] = roi_labels + 1
batch_dict['roi_feats'] = roi_feats
batch_dict['has_class_labels'] = True if batch_cls_preds.shape[-1] > 1 else False
batch_dict.pop('batch_index', None)
return batch_dict
def assign_targets(self, batch_dict):
"""
Args:
batch_dict:
batch_size:
rois: (B, num_rois, 7 + C)
roi_scores: (B, num_rois)
gt_boxes: (B, N, 7 + C + 1)
roi_labels: (B, num_rois)
Returns:
batch_dict:
rois: (B, M, 7 + C)
gt_of_rois: (B, M, 7 + C)
gt_iou_of_rois: (B, M)
roi_scores: (B, M)
roi_labels: (B, M)
reg_valid_mask: (B, M)
rcnn_cls_labels: (B, M)
"""
batch_size = batch_dict['batch_size']
with torch.no_grad():
targets_dict = self.proposal_target_layer.forward(batch_dict)
rois = targets_dict['rois'] # (B, N, 7 + C)
gt_of_rois = targets_dict['gt_of_rois'] # (B, N, 7 + C + 1)
targets_dict['gt_of_rois_src'] = gt_of_rois.clone().detach()
# canonical transformation
roi_center = rois[:, :, 0:3]
roi_ry = rois[:, :, 6] % (2 * np.pi)
gt_of_rois[:, :, 0:3] = gt_of_rois[:, :, 0:3] - roi_center
gt_of_rois[:, :, 6] = gt_of_rois[:, :, 6] - roi_ry
# transfer LiDAR coords to local coords
gt_of_rois = common_utils.rotate_points_along_z(
points=gt_of_rois.view(-1, 1, gt_of_rois.shape[-1]), angle=-roi_ry.view(-1)
).view(batch_size, -1, gt_of_rois.shape[-1])
# flip orientation if rois have opposite orientation
heading_label = gt_of_rois[:, :, 6] % (2 * np.pi) # 0 ~ 2pi
opposite_flag = (heading_label > np.pi * 0.5) & (heading_label < np.pi * 1.5)
heading_label[opposite_flag] = (heading_label[opposite_flag] + np.pi) % (2 * np.pi) # (0 ~ pi/2, 3pi/2 ~ 2pi)
flag = heading_label > np.pi
heading_label[flag] = heading_label[flag] - np.pi * 2 # (-pi/2, pi/2)
heading_label = torch.clamp(heading_label, min=-np.pi / 2, max=np.pi / 2)
gt_of_rois[:, :, 6] = heading_label
targets_dict['gt_of_rois'] = gt_of_rois
return targets_dict
def get_box_reg_layer_loss(self, forward_ret_dict):
loss_cfgs = self.model_cfg.LOSS_CONFIG
code_size = self.box_coder.code_size
reg_valid_mask = forward_ret_dict['reg_valid_mask'].view(-1)
gt_boxes3d_ct = forward_ret_dict['gt_of_rois'][..., 0:code_size]
gt_of_rois_src = forward_ret_dict['gt_of_rois_src'][..., 0:code_size].view(-1, code_size)
rcnn_reg = forward_ret_dict['rcnn_reg'] # (rcnn_batch_size, C)
roi_boxes3d = forward_ret_dict['rois']
rcnn_batch_size = gt_boxes3d_ct.view(-1, code_size).shape[0]
fg_mask = (reg_valid_mask > 0)
fg_sum = fg_mask.long().sum().item()
tb_dict = {}
if loss_cfgs.REG_LOSS == 'smooth-l1':
rois_anchor = roi_boxes3d.clone().detach().view(-1, code_size)
rois_anchor[:, 0:3] = 0
rois_anchor[:, 6] = 0
reg_targets = self.box_coder.encode_torch(
gt_boxes3d_ct.view(rcnn_batch_size, code_size), rois_anchor
)
rcnn_loss_reg = self.reg_loss_func(
rcnn_reg.view(rcnn_batch_size, -1).unsqueeze(dim=0),
reg_targets.unsqueeze(dim=0),
) # [B, M, 7]
rcnn_loss_reg = (rcnn_loss_reg.view(rcnn_batch_size, -1) * fg_mask.unsqueeze(dim=-1).float()).sum() / max(
fg_sum, 1)
rcnn_loss_reg = rcnn_loss_reg * loss_cfgs.LOSS_WEIGHTS['rcnn_reg_weight']
tb_dict['rcnn_loss_reg'] = rcnn_loss_reg.item()
if loss_cfgs.CORNER_LOSS_REGULARIZATION and fg_sum > 0:
# TODO: NEED to BE CHECK
fg_rcnn_reg = rcnn_reg.view(rcnn_batch_size, -1)[fg_mask]
fg_roi_boxes3d = roi_boxes3d.view(-1, code_size)[fg_mask]
fg_roi_boxes3d = fg_roi_boxes3d.view(1, -1, code_size)
batch_anchors = fg_roi_boxes3d.clone().detach()
roi_ry = fg_roi_boxes3d[:, :, 6].view(-1)
roi_xyz = fg_roi_boxes3d[:, :, 0:3].view(-1, 3)
batch_anchors[:, :, 0:3] = 0
rcnn_boxes3d = self.box_coder.decode_torch(
fg_rcnn_reg.view(batch_anchors.shape[0], -1, code_size), batch_anchors
).view(-1, code_size)
rcnn_boxes3d = common_utils.rotate_points_along_z(
rcnn_boxes3d.unsqueeze(dim=1), roi_ry
).squeeze(dim=1)
rcnn_boxes3d[:, 0:3] += roi_xyz
loss_corner = loss_utils.get_corner_loss_lidar(
rcnn_boxes3d[:, 0:7],
gt_of_rois_src[fg_mask][:, 0:7]
)
loss_corner = loss_corner.mean()
loss_corner = loss_corner * loss_cfgs.LOSS_WEIGHTS['rcnn_corner_weight']
rcnn_loss_reg += loss_corner
tb_dict['rcnn_loss_corner'] = loss_corner.item()
else:
raise NotImplementedError
return rcnn_loss_reg, tb_dict
def get_box_cls_layer_loss(self, forward_ret_dict):
loss_cfgs = self.model_cfg.LOSS_CONFIG
rcnn_cls = forward_ret_dict['rcnn_cls']
rcnn_cls_labels = forward_ret_dict['rcnn_cls_labels'].view(-1)
if loss_cfgs.CLS_LOSS == 'BinaryCrossEntropy':
rcnn_cls_flat = rcnn_cls.view(-1)
batch_loss_cls = F.binary_cross_entropy(torch.sigmoid(rcnn_cls_flat), rcnn_cls_labels.float(),
reduction='none')
cls_valid_mask = (rcnn_cls_labels >= 0).float()
rcnn_loss_cls = (batch_loss_cls * cls_valid_mask).sum() / torch.clamp(cls_valid_mask.sum(), min=1.0)
elif loss_cfgs.CLS_LOSS == 'CrossEntropy':
batch_loss_cls = F.cross_entropy(rcnn_cls, rcnn_cls_labels, reduction='none', ignore_index=-1)
cls_valid_mask = (rcnn_cls_labels >= 0).float()
rcnn_loss_cls = (batch_loss_cls * cls_valid_mask).sum() / torch.clamp(cls_valid_mask.sum(), min=1.0)
else:
raise NotImplementedError
rcnn_loss_cls = rcnn_loss_cls * loss_cfgs.LOSS_WEIGHTS['rcnn_cls_weight']
tb_dict = {'rcnn_loss_cls': rcnn_loss_cls.item()}
return rcnn_loss_cls, tb_dict
def get_circle_loss(self, forward_ret_dict):
embeddings = forward_ret_dict.get('embeddings', None)
reid_targets = forward_ret_dict.get('reid_targets', None)
loss_cfgs = self.model_cfg.LOSS_CONFIG
circle_losses = []
if embeddings is not None and reid_targets is not None:
m = loss_cfgs.REID_LOSS.RELAXATION_FACTOR
gamma = loss_cfgs.REID_LOSS.SCALE_FACTOR
for embedding, target in zip(embeddings, reid_targets):
label_mat = torch.eq(target.unsqueeze(1), target.unsqueeze(0))
is_pos = torch.triu(label_mat, diagonal=1).view(-1)
is_neg = torch.triu(~label_mat, diagonal=1).view(-1)
if torch.sum(is_neg) == 0 or torch.sum(is_pos) == 0:
continue
norm_feat = F.normalize(embedding, dim=1)
similarity_mat = torch.mm(norm_feat, norm_feat.t()).view(-1)
sp = similarity_mat[is_pos]
sn = similarity_mat[is_neg]
ap = torch.clamp_min(-sp.detach() + 1 + m, min=0.)
an = torch.clamp_min(sn.detach() + m, min=0.)
delta_p = 1 - m
delta_n = m
logit_p = - ap * (sp - delta_p) * gamma
logit_n = an * (sn - delta_n) * gamma
circle_loss = F.softplus(torch.logsumexp(logit_n, dim=0) + torch.logsumexp(logit_p, dim=0))
circle_losses.append(circle_loss)
if len(circle_losses) > 0:
reid_loss = sum(circle_losses) / len(circle_losses)
reid_weight = self.model_cfg.LOSS_CONFIG.LOSS_WEIGHTS.get('reid_weight', 1)
reid_loss *= reid_weight
return reid_loss, {'rcnn_reid_loss': reid_loss.item()}
else:
return 0, {}
def get_loss(self, tb_dict=None):
tb_dict = {} if tb_dict is None else tb_dict
rcnn_loss_cls, cls_tb_dict = self.get_box_cls_layer_loss(self.forward_ret_dict)
tb_dict.update(cls_tb_dict)
rcnn_loss_reg, reg_tb_dict = self.get_box_reg_layer_loss(self.forward_ret_dict)
tb_dict.update(reg_tb_dict)
rcnn_loss_triplet, triplet_tb_dict = self.get_circle_loss(self.forward_ret_dict)
tb_dict.update(triplet_tb_dict)
rcnn_loss = rcnn_loss_cls + rcnn_loss_reg + rcnn_loss_triplet
tb_dict['rcnn_loss'] = rcnn_loss.item()
return rcnn_loss, tb_dict
def generate_predicted_boxes(self, batch_size, rois, cls_preds, box_preds):
"""
Args:
batch_size:
rois: (B, N, 7)
cls_preds: (BN, num_class)
box_preds: (BN, code_size)
Returns:
"""
code_size = self.box_coder.code_size
# batch_cls_preds: (B, N, num_class or 1)
batch_cls_preds = cls_preds.view(batch_size, -1, cls_preds.shape[-1])
batch_box_preds = box_preds.view(batch_size, -1, code_size)
roi_ry = rois[:, :, 6].view(-1)
roi_xyz = rois[:, :, 0:3].view(-1, 3)
local_rois = rois.clone().detach()
local_rois[:, :, 0:3] = 0
batch_box_preds = self.box_coder.decode_torch(batch_box_preds, local_rois).view(-1, code_size)
batch_box_preds = common_utils.rotate_points_along_z(
batch_box_preds.unsqueeze(dim=1), roi_ry
).squeeze(dim=1)
batch_box_preds[:, 0:3] += roi_xyz
batch_box_preds = batch_box_preds.view(batch_size, -1, code_size)
return batch_cls_preds, batch_box_preds
| [
"torch.nn.ReLU",
"torch.nn.Dropout",
"torch.triu",
"torch.nn.Sequential",
"torch.max",
"torch.stack",
"torch.sigmoid",
"torch.nn.functional.normalize",
"torch.nn.BatchNorm1d",
"torch.sum",
"torch.nn.functional.cross_entropy",
"torch.nn.functional.pad",
"torch.no_grad",
"torch.nn.Conv1d",
... | [((1758, 1773), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1771, 1773), False, 'import torch\n'), ((1701, 1726), 'torch.nn.Sequential', 'nn.Sequential', (['*fc_layers'], {}), '(*fc_layers)\n', (1714, 1726), True, 'import torch.nn as nn\n'), ((7016, 7073), 'torch.clamp', 'torch.clamp', (['heading_label'], {'min': '(-np.pi / 2)', 'max': '(np.pi / 2)'}), '(heading_label, min=-np.pi / 2, max=np.pi / 2)\n', (7027, 7073), False, 'import torch\n'), ((1614, 1679), 'torch.nn.Conv1d', 'nn.Conv1d', (['pre_channel', 'output_channels'], {'kernel_size': '(1)', 'bias': '(True)'}), '(pre_channel, output_channels, kernel_size=1, bias=True)\n', (1623, 1679), True, 'import torch.nn as nn\n'), ((3617, 3644), 'torch.max', 'torch.max', (['cls_preds'], {'dim': '(1)'}), '(cls_preds, dim=1)\n', (3626, 3644), False, 'import torch\n'), ((4549, 4566), 'torch.stack', 'torch.stack', (['rois'], {}), '(rois)\n', (4560, 4566), False, 'import torch\n'), ((4592, 4615), 'torch.stack', 'torch.stack', (['roi_scores'], {}), '(roi_scores)\n', (4603, 4615), False, 'import torch\n'), ((4641, 4664), 'torch.stack', 'torch.stack', (['roi_labels'], {}), '(roi_labels)\n', (4652, 4664), False, 'import torch\n'), ((5766, 5781), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5779, 5781), False, 'import torch\n'), ((3897, 3943), 'torch.nn.functional.pad', 'F.pad', (['box_preds[selected]', '(0, 0, 0, pad_len)'], {}), '(box_preds[selected], (0, 0, 0, pad_len))\n', (3902, 3943), True, 'import torch.nn.functional as F\n'), ((3975, 4020), 'torch.nn.functional.pad', 'F.pad', (['cur_roi_scores[selected]', '(0, pad_len)'], {}), '(cur_roi_scores[selected], (0, pad_len))\n', (3980, 4020), True, 'import torch.nn.functional as F\n'), ((4052, 4097), 'torch.nn.functional.pad', 'F.pad', (['cur_roi_labels[selected]', '(0, pad_len)'], {}), '(cur_roi_labels[selected], (0, pad_len))\n', (4057, 4097), True, 'import torch.nn.functional as F\n'), ((4738, 4760), 'torch.stack', 'torch.stack', (['roi_feats'], {}), '(roi_feats)\n', (4749, 4760), False, 'import torch\n'), ((10549, 10577), 'torch.sigmoid', 'torch.sigmoid', (['rcnn_cls_flat'], {}), '(rcnn_cls_flat)\n', (10562, 10577), False, 'import torch\n'), ((10927, 11004), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['rcnn_cls', 'rcnn_cls_labels'], {'reduction': '"""none"""', 'ignore_index': '(-1)'}), "(rcnn_cls, rcnn_cls_labels, reduction='none', ignore_index=-1)\n", (10942, 11004), True, 'import torch.nn.functional as F\n'), ((12242, 12271), 'torch.nn.functional.normalize', 'F.normalize', (['embedding'], {'dim': '(1)'}), '(embedding, dim=1)\n', (12253, 12271), True, 'import torch.nn.functional as F\n'), ((1266, 1327), 'torch.nn.Conv1d', 'nn.Conv1d', (['pre_channel', 'fc_list[k]'], {'kernel_size': '(1)', 'bias': '(False)'}), '(pre_channel, fc_list[k], kernel_size=1, bias=False)\n', (1275, 1327), True, 'import torch.nn as nn\n'), ((1345, 1371), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['fc_list[k]'], {}), '(fc_list[k])\n', (1359, 1371), True, 'import torch.nn as nn\n'), ((1389, 1410), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1396, 1410), True, 'import torch.nn as nn\n'), ((1552, 1587), 'torch.nn.Dropout', 'nn.Dropout', (['self.model_cfg.DP_RATIO'], {}), '(self.model_cfg.DP_RATIO)\n', (1562, 1587), True, 'import torch.nn as nn\n'), ((4177, 4242), 'torch.nn.functional.pad', 'F.pad', (['batch_feat_preds[batch_mask][selected]', '(0, 0, 0, pad_len)'], {}), '(batch_feat_preds[batch_mask][selected], (0, 0, 0, pad_len))\n', (4182, 4242), True, 'import torch.nn.functional as F\n'), ((12004, 12037), 'torch.triu', 'torch.triu', (['label_mat'], {'diagonal': '(1)'}), '(label_mat, diagonal=1)\n', (12014, 12037), False, 'import torch\n'), ((12072, 12106), 'torch.triu', 'torch.triu', (['(~label_mat)'], {'diagonal': '(1)'}), '(~label_mat, diagonal=1)\n', (12082, 12106), False, 'import torch\n'), ((12135, 12152), 'torch.sum', 'torch.sum', (['is_neg'], {}), '(is_neg)\n', (12144, 12152), False, 'import torch\n'), ((12161, 12178), 'torch.sum', 'torch.sum', (['is_pos'], {}), '(is_pos)\n', (12170, 12178), False, 'import torch\n'), ((12777, 12808), 'torch.logsumexp', 'torch.logsumexp', (['logit_n'], {'dim': '(0)'}), '(logit_n, dim=0)\n', (12792, 12808), False, 'import torch\n'), ((12811, 12842), 'torch.logsumexp', 'torch.logsumexp', (['logit_p'], {'dim': '(0)'}), '(logit_p, dim=0)\n', (12826, 12842), False, 'import torch\n')] |
#!/usr/bin/env python3
import sys
import operator
from functools import reduce
from typing import List, Tuple, Iterator
Vector = Tuple[int, int] # x, y
def decode_move(move: str) -> Vector:
d, n = move[0], int(move[1:])
if d == 'U':
return (0, n)
elif d == 'D':
return (0, -n)
elif d == 'L':
return (-n, 0)
elif d == 'R':
return (n, 0)
else:
raise ValueError(f"invalid direction d={d}")
def decode_path(path: str) -> List[Vector]:
return [decode_move(move) for move in path.split(',')]
def unit_vect(move: Vector) -> Iterator[Vector]:
x, y = move
if x != 0:
return ((int(x/abs(x)), 0) for _ in range(abs(x)))
else:
return ((0, int(y/abs(y))) for _ in range(abs(y)))
def vect_add(v1: Vector, v2: Vector) -> Vector:
return tuple(map(operator.add, v1, v2))
def dist(v1: Vector, v2: Vector) -> int:
return abs(v1[0] - v2[0]) + abs(v1[1] - v2[1])
def main(input_file: str):
with open(input_file) as f:
lines = f.readlines()
wires = [decode_path(line) for line in lines]
grids = [{} for _ in wires]
origin = (0, 0)
for i, wire in enumerate(wires):
pos = origin
d = 0
for move in wire:
for vu in unit_vect(move):
pos = vect_add(pos, vu)
d += 1
grids[i].setdefault(pos, d)
intersections = reduce(set.intersection, map(set, grids))
minD = float('inf')
minSteps = float('inf')
for pos in intersections:
minD = min(minD, dist(origin, pos))
minSteps = min(minSteps, sum(g[pos] for g in grids))
print(f'Min distance = {minD}')
print(f'Min steps = {minSteps}')
if __name__ == '__main__':
if len(sys.argv) != 2:
print('usage: ./script.py <input-file>')
sys.exit(1)
main(sys.argv[1])
| [
"sys.exit"
] | [((1839, 1850), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1847, 1850), False, 'import sys\n')] |
"""
A simple python package for scraping and downloading images from Google
Usage:
$ noicesoup.py [-h] -k KEYWORD [-cd CHROMEDRIVER]
NOTE: Default webdriver is Chrome in relative path "chromedriver"
Images will be saved in "downloads/<keyword>"
This package is currently under development...
"""
import threading
import time
import urllib.request
import os
import argparse
from time import sleep
from selenium import webdriver
from bs4 import BeautifulSoup
from pathlib import Path
def get_driver():
path = 'chromedriver'
driver = webdriver.Chrome(executable_path=path)
driver.get(f'https://www.google.com/search?q={keyword}&tbm=isch')
for i in range(0, 7):
driver.execute_script('window.scrollBy(0,document.body.scrollHeight)')
try:
# for clicking show more results button
driver.find_element(
'//*[@id="islmp"]/div/div/div/div/div[2]/div[2]/input').click()
except Exception:
pass
time.sleep(3)
return driver
def download_images(driver):
soup = BeautifulSoup(driver.page_source, 'html.parser')
img_tags = soup.find_all('img', class_='rg_i')
length = len(img_tags)
# get pics and download
for i, v in enumerate(img_tags):
try:
loading_bar(i + 1, length)
urllib.request.urlretrieve(
v['src'], f"{downloads_path}/{keyword}/{str(i + 1)}.jpg")
except Exception:
pass
print()
def loading_bar(n, l):
print("\rDownloading : {} ({:.2f}%)".format(
"█" * round(n / l * 100 / 2), n / l * 100), end="")
def loading_spinner():
msg = "⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏"
len_msg = len(msg)
counter = 0
while True:
displayed = ""
displayed += msg[(counter + 1) % len_msg]
print(f"\r{displayed} Loading {keyword=}", end="")
sleep(0.05)
counter = (counter + 1) % len_msg
if stop_thread:
break
def create_dir():
try:
os.makedirs(f'{downloads_path}/{keyword}')
except Exception:
pass
def main():
global keyword
global driver_path
global downloads_path
global stop_thread
downloads_path = os.path.join(
str(Path.home()), 'Downloads', 'noicesoup_dl')
parser = argparse.ArgumentParser(
description='A simple python package for scraping and downloading images from Google')
parser.add_argument('-k', '--keyword',
help='Input search keyword', required=True)
parser.add_argument('-cd', '--chromedriver',
help='Input ChromeDriver path', default="chromedriver")
args = parser.parse_args()
keyword = args.keyword
driver_path = args.chromedriver
stop_thread = False
thr = threading.Thread(target=loading_spinner)
thr.start()
create_dir()
driver = get_driver()
stop_thread = True
print('\r'+'=' * os.get_terminal_size().columns)
download_images(driver)
print('=' * os.get_terminal_size().columns)
print('Done!')
if "__main__" == __name__:
main()
| [
"os.get_terminal_size",
"argparse.ArgumentParser",
"os.makedirs",
"selenium.webdriver.Chrome",
"pathlib.Path.home",
"time.sleep",
"bs4.BeautifulSoup",
"threading.Thread"
] | [((550, 588), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'executable_path': 'path'}), '(executable_path=path)\n', (566, 588), False, 'from selenium import webdriver\n'), ((1068, 1116), 'bs4.BeautifulSoup', 'BeautifulSoup', (['driver.page_source', '"""html.parser"""'], {}), "(driver.page_source, 'html.parser')\n", (1081, 1116), False, 'from bs4 import BeautifulSoup\n'), ((2281, 2396), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""A simple python package for scraping and downloading images from Google"""'}), "(description=\n 'A simple python package for scraping and downloading images from Google')\n", (2304, 2396), False, 'import argparse\n'), ((2770, 2810), 'threading.Thread', 'threading.Thread', ([], {'target': 'loading_spinner'}), '(target=loading_spinner)\n', (2786, 2810), False, 'import threading\n'), ((994, 1007), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1004, 1007), False, 'import time\n'), ((1860, 1871), 'time.sleep', 'sleep', (['(0.05)'], {}), '(0.05)\n', (1865, 1871), False, 'from time import sleep\n'), ((1993, 2035), 'os.makedirs', 'os.makedirs', (['f"""{downloads_path}/{keyword}"""'], {}), "(f'{downloads_path}/{keyword}')\n", (2004, 2035), False, 'import os\n'), ((2224, 2235), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (2233, 2235), False, 'from pathlib import Path\n'), ((2991, 3013), 'os.get_terminal_size', 'os.get_terminal_size', ([], {}), '()\n', (3011, 3013), False, 'import os\n'), ((2915, 2937), 'os.get_terminal_size', 'os.get_terminal_size', ([], {}), '()\n', (2935, 2937), False, 'import os\n')] |
from os import getenv
from flask import Flask, render_template, request
from twitoff.twitter import add_or_update_user
from .models import DB, User, Tweet
from .twitter import add_or_update_user, get_all_usernames
from .predict import predict_user
# Create a 'factory' for serving up the app when is launched
def create_app():
# initializes our app
app = Flask(__name__)
# Database configurations
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False # Turn off verification when we request changes to db
app.config["SQLALCHEMY_DATABASE_URI"] = getenv('DATABASE_URI')
# Give our APP access to our database
DB.init_app(app)
# Listen to a "route"
# Make our "Home" "root" route. '/' is the home page route
@app.route('/')
def root():
# what i want to happen when somebody goes to the home page
# Query users to display on the home page
return render_template('base.html', title = "Home", users = User.query.all())
@app.route('/update')
def update():
'''update all users'''
usernames = get_all_usernames()
for username in usernames:
add_or_update_user(username)
return render_template('base.html',title="All users have been updated to include their latest tweets")
@app.route('/reset')
def reset():
# remove everything from database
DB.drop_all()
# Create the database file initially
DB.create_all()
return render_template('base.html', title = 'Database Reset')
# API ENDPOINTS (Quering and manipulating data in a database)
# this routes are NOT just displaying information
# this route is going to change our Database
@app.route('/user', methods=['POST'])
@app.route('/user/<name>', methods=['GET'])
def user(name=None, message=''):
# request.values is pulling data from the html
# use the username from the URL (route)
# or grab it from the dropdown
name = name or request.values['user_name']
# If the user exist in the db already, update it, and query for it
try:
if request.method == 'POST':
add_or_update_user(name)
message = f"User '{name}' Successfully Added!"
# From the user that was just added / Updated
# get their tweets to display on the /user/<name> page
tweets = User.query.filter(User.username == name).one().tweets
except Exception as e:
message = f"Error adding {name}: {e}"
tweets = []
return render_template('user.html', title=name, tweets=tweets, message=message)
@app.route('/compare', methods=['POST'])
def compare():
user0, user1 = sorted([request.values['user0'], request.values['user1']])
if user0 == user1:
message = 'Cannot compare a user to themselves!'
else:
tweet_text = request.values['tweet_text']
prediction = predict_user(user0, user1, tweet_text)
if prediction == 0:
predicted_user = user0
non_predicted_user = user1
else:
predicted_user = user1
non_predicted_user = user0
message = f'"{tweet_text}" is more likely to be said by {predicted_user} than {non_predicted_user}'
return render_template('prediction.html', title='Prediction', message=message)
return app
# # kind of like what jinja2 does to our web pages
# app_title = 'Mytwitoff DS33'
# @app.route('/test')
# def test():
# return f"A page from {app_title} app"
# @app.route('/hola')
# def hola():
# return "Hola, Twitoff!"
# @app.route('/salut')
# def salute():
# return "Salute, Twitoff!" | [
"flask.render_template",
"twitoff.twitter.add_or_update_user",
"os.getenv",
"flask.Flask"
] | [((365, 380), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (370, 380), False, 'from flask import Flask, render_template, request\n'), ((567, 589), 'os.getenv', 'getenv', (['"""DATABASE_URI"""'], {}), "('DATABASE_URI')\n", (573, 589), False, 'from os import getenv\n'), ((1191, 1292), 'flask.render_template', 'render_template', (['"""base.html"""'], {'title': '"""All users have been updated to include their latest tweets"""'}), "('base.html', title=\n 'All users have been updated to include their latest tweets')\n", (1206, 1292), False, 'from flask import Flask, render_template, request\n'), ((1478, 1530), 'flask.render_template', 'render_template', (['"""base.html"""'], {'title': '"""Database Reset"""'}), "('base.html', title='Database Reset')\n", (1493, 1530), False, 'from flask import Flask, render_template, request\n'), ((2583, 2655), 'flask.render_template', 'render_template', (['"""user.html"""'], {'title': 'name', 'tweets': 'tweets', 'message': 'message'}), "('user.html', title=name, tweets=tweets, message=message)\n", (2598, 2655), False, 'from flask import Flask, render_template, request\n'), ((3368, 3439), 'flask.render_template', 'render_template', (['"""prediction.html"""'], {'title': '"""Prediction"""', 'message': 'message'}), "('prediction.html', title='Prediction', message=message)\n", (3383, 3439), False, 'from flask import Flask, render_template, request\n'), ((1147, 1175), 'twitoff.twitter.add_or_update_user', 'add_or_update_user', (['username'], {}), '(username)\n', (1165, 1175), False, 'from twitoff.twitter import add_or_update_user\n'), ((2172, 2196), 'twitoff.twitter.add_or_update_user', 'add_or_update_user', (['name'], {}), '(name)\n', (2190, 2196), False, 'from twitoff.twitter import add_or_update_user\n')] |
from random import randint
gnum = randint(0, 2)
print(gnum)
print("welcome to the game\n\n")
print(
"you will guess a number \nbetween 1 to 100 \nwe have selected for you\n\n"
)
print("hint: WARM! mean yor are close by 10 ")
print("\nlet's start\n")
z = 0
count_num = 0
out = 0
warm = 0
cold = 0
list1 = []
while z == 0:
cnum = int(input("choose a number : "))
#print(cnum)
round_num1 = cnum - gnum
round_num = abs(round_num1)
if cnum > 100:
print("beyond the limits (¬_¬) bi***!!!")
z = 0
out += 1
elif round_num <= 10 and round_num != 0:
print("WARM!!")
z = 0
warm += 1
if gnum > cnum:
print("higher -_-\n\n")
elif gnum < cnum:
print("lower -_-\n\n")
elif round_num > 10:
print("cold!!")
z = 0
cold += 1
if gnum > cnum:
print("higher -_-\n\n")
elif gnum < cnum:
print("lower -_-\n\n")
elif cnum == gnum:
print("yey you got it \n\(^-^)/ \n≧◡≦\n\nGood job\n")
z = 1
list1.append(cnum)
count_num = count_num + 1
print("you took %s tries to get it" % (count_num))
print(
"from %s \nyou achieve %s for warm \nyou achieve %s for cold\nyou achieve %s for beyond"
% (count_num, warm, cold, out))
print("\n\nyour numbers are ", list1)
print("see you agine")
#print(list1)
| [
"random.randint"
] | [((34, 47), 'random.randint', 'randint', (['(0)', '(2)'], {}), '(0, 2)\n', (41, 47), False, 'from random import randint\n')] |
# load .t7 file and save as .pkl data
import torchfile
import cv2
import numpy as np
import scipy.io as sio
import pickle
import time
data_path = './data/test_PC/'
# panoContext
#img_tr = torchfile.load('./data/panoContext_img_train.t7')
#print(img_tr.shape)
#lne_tr = torchfile.load('./data/panoContext_line_train.t7')
#print(lne_tr.shape)
#edg_tr = torchfile.load('./data/panoContext_edge_train.t7')
#print(edg_tr.shape)
#junc_tr = torchfile.load('./data/panoContext_cor_train.t7')
#print(junc_tr.shape)
#print('done')
#img_tr = torchfile.load('./data/panoContext_img_val.t7')
#print(img_tr.shape)
#lne_tr = torchfile.load('./data/panoContext_line_val.t7')
#print(lne_tr.shape)
#edg_tr = torchfile.load('./data/panoContext_edge_val.t7')
#print(edg_tr.shape)
#junc_tr = torchfile.load('./data/panoContext_cor_val.t7')
#print(junc_tr.shape)
#print('done')
img_tr = torchfile.load('./data/panoContext_img_test.t7')
print(img_tr.shape)
lne_tr = torchfile.load('./data/panoContext_line_test.t7')
print(lne_tr.shape)
edg_tr = torchfile.load('./data/panoContext_edge_test.t7')
print(edg_tr.shape)
junc_tr = torchfile.load('./data/panoContext_cor_test.t7')
print(junc_tr.shape)
print('done')
# stanford
#img_tr = torchfile.load('./data/stanford2d-3d_img_area_5.t7')
#print(img_tr.shape)
#lne_tr = torchfile.load('./data/stanford2d-3d_line_area_5.t7')
#print(lne_tr.shape)
#edg_tr = torchfile.load('./data/stanford2d-3d_edge_area_5.t7')
#print(edg_tr.shape)
#junc_tr = torchfile.load('./data/stanford2d-3d_cor_area_5.t7')
#print(junc_tr.shape)
#print('done')
gt_txt_path = './data/panoContext_testmap.txt'
gt_path = './data/layoutnet_dataset/test/label_cor/'
# Load data
namelist = []
id_num = []
with open(gt_txt_path, 'r') as f:
while(True):
line = f.readline().strip()
if not line:
break
id_num0 = line.split()
id_num0 = int(id_num0[1])
id_num.append(id_num0)
namelist.append(line)
id_num = np.array(id_num)
cnt = 0
for num in range(img_tr.shape[0]):
print(num)
image = img_tr[num]
image = np.transpose(image, (1,2,0))#*255.0
line = lne_tr[num]
line = np.transpose(line, (1,2,0))
edge = edg_tr[num]
edge = np.transpose(edge, (1,2,0))
junc = junc_tr[num]
junc = np.transpose(junc, (1,2,0))
# corner gt
idn = np.where(id_num == num)
idn = idn[0][0]
filename = namelist[idn]
filename = filename.split()
filename = gt_path+filename[0][:-4]+'.txt'#'.mat'
cnt+=1
cor = np.loadtxt(filename)
cor_sum = 0
for cor_num in range(cor.shape[0]):
cor_sum+=junc[int(cor[cor_num,1]),int(cor[cor_num,0]),0]
#print(cor_sum)
#time.sleep(0.5)
# pickle.dump({'image':image, 'line':line, 'edge':edge, 'junc':junc, 'cor':cor, 'filename':filename[:-4]}, open(data_path+'PC_'+"{:04d}".format(num)+'.pkl', "wb" ) )
pickle.dump({'image':image, 'line':line, 'edge':edge, 'junc':junc, 'cor':cor, 'filename':filename[:-4]}, open(data_path+'PCts_'+"{:04d}".format(num)+'.pkl', "wb" ) )
# pickle.dump({'image':image, 'line':line, 'edge':edge, 'junc':junc, 'cor':cor, 'filename':filename[:-4]}, open(data_path+'PCval_'+"{:04d}".format(num)+'.pkl', "wb" ) )
# pickle.dump({'image':image, 'line':line, 'edge':edge, 'junc':junc, 'cor':cor, 'filename':filename[:-4]}, open(data_path+'area5_'+"{:04d}".format(num)+'.pkl', "wb" ) )
| [
"numpy.where",
"torchfile.load",
"numpy.array",
"numpy.loadtxt",
"numpy.transpose"
] | [((870, 918), 'torchfile.load', 'torchfile.load', (['"""./data/panoContext_img_test.t7"""'], {}), "('./data/panoContext_img_test.t7')\n", (884, 918), False, 'import torchfile\n'), ((948, 997), 'torchfile.load', 'torchfile.load', (['"""./data/panoContext_line_test.t7"""'], {}), "('./data/panoContext_line_test.t7')\n", (962, 997), False, 'import torchfile\n'), ((1027, 1076), 'torchfile.load', 'torchfile.load', (['"""./data/panoContext_edge_test.t7"""'], {}), "('./data/panoContext_edge_test.t7')\n", (1041, 1076), False, 'import torchfile\n'), ((1107, 1155), 'torchfile.load', 'torchfile.load', (['"""./data/panoContext_cor_test.t7"""'], {}), "('./data/panoContext_cor_test.t7')\n", (1121, 1155), False, 'import torchfile\n'), ((1959, 1975), 'numpy.array', 'np.array', (['id_num'], {}), '(id_num)\n', (1967, 1975), True, 'import numpy as np\n'), ((2079, 2109), 'numpy.transpose', 'np.transpose', (['image', '(1, 2, 0)'], {}), '(image, (1, 2, 0))\n', (2091, 2109), True, 'import numpy as np\n'), ((2149, 2178), 'numpy.transpose', 'np.transpose', (['line', '(1, 2, 0)'], {}), '(line, (1, 2, 0))\n', (2161, 2178), True, 'import numpy as np\n'), ((2211, 2240), 'numpy.transpose', 'np.transpose', (['edge', '(1, 2, 0)'], {}), '(edge, (1, 2, 0))\n', (2223, 2240), True, 'import numpy as np\n'), ((2274, 2303), 'numpy.transpose', 'np.transpose', (['junc', '(1, 2, 0)'], {}), '(junc, (1, 2, 0))\n', (2286, 2303), True, 'import numpy as np\n'), ((2328, 2351), 'numpy.where', 'np.where', (['(id_num == num)'], {}), '(id_num == num)\n', (2336, 2351), True, 'import numpy as np\n'), ((2511, 2531), 'numpy.loadtxt', 'np.loadtxt', (['filename'], {}), '(filename)\n', (2521, 2531), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
#pylint: skip-file
import sys
import numpy as np
import torch
import torch as T
import torch.nn as nn
from torch.autograd import Variable
import copy
from utils_pg import *
from encoder import *
from decoder import *
from transformer.layers import Embeddings, PositionEmbeddings
class Model(nn.Module):
def __init__(self, modules, consts, options):
super(Model, self).__init__()
self.is_predicting = options["is_predicting"]
self.beam_decoding = options["beam_decoding"]
self.device = options["device"]
self.copy = options["copy"]
self.coverage = options["coverage"]
self.avg_nll = options["avg_nll"]
self.dim_x = consts["dim_x"]
self.dim_y = consts["dim_y"]
self.len_x = consts["len_x"]
self.len_y = consts["len_y"]
self.hidden_size = consts["hidden_size"]
self.d_model = self.hidden_size
self.d_ff = consts["d_ff"]
self.num_heads = consts["num_heads"]
self.dropout = consts["dropout"]
self.num_layers = consts["num_layers"]
self.dict_size = consts["dict_size"]
self.pad_token_idx = consts["pad_token_idx"]
self.word_pos_size = consts["word_pos_size"]
#self.sent_pos_size = consts["sent_pos_size"]
self.word_emb = Embeddings(self.dict_size, self.dim_x, self.pad_token_idx)
self.pos_emb_w = PositionEmbeddings(self.dim_x, self.word_pos_size)
self.pos_emb_s = PositionEmbeddings(self.dim_x, self.word_pos_size)
self.encoder = LocalEncoder(self.word_emb, self.pos_emb_w, self.pos_emb_s, \
self.d_model, self.d_ff, self.num_heads,\
self.dropout, self.num_layers)
self.decoder = LocalDecoder(self.device, self.copy, self.word_emb, \
self.pos_emb_w, self.pos_emb_s, self.dict_size, \
self.d_model, self.d_ff, self.num_heads,\
self.dropout, self.num_layers)
self.init_weights()
def init_weights(self):
for p in self.word_emb.parameters():
nn.init.xavier_uniform_(p)
for p in self.encoder.parameters():
if p.dim() > 1:
nn.init.xavier_uniform_(p)
for p in self.decoder.parameters():
if p.dim() > 1:
nn.init.xavier_uniform_(p)
def nll_loss(self, y_pred, y, y_mask, avg=True):
cost = -T.log(T.gather(y_pred, -1, y.unsqueeze(-1)))
cost = cost.view(y.shape)
y_mask = y_mask.view(y.shape)
if avg:
cost = T.sum(cost * y_mask, 1) / T.sum(y_mask, -1)
else:
cost = T.sum(cost * y_mask, 1)
#cost = cost.view((y.size(0), -1))
return T.mean(cost)
def encode(self, x, p, ps, mask_x):
return self.encoder(x, p, ps, mask_x)
def decode(self, y, p, ps, m, mask_x, mask_y, x, max_ext_lent):
return self.decoder(y, p, ps, m, mask_x, mask_y, x, max_ext_lent)
def forward(self, x, px, pxs, mask_x, y, py, pys,\
mask_y_tri, y_tgt, mask_y, x_ext, y_ext, max_ext_lent):
hs = self.encode(x, px, pxs, mask_x)
pred = self.decode(y, py, pys, hs, mask_x, mask_y_tri, x_ext, max_ext_lent)
if self.copy:
nll = self.nll_loss(pred, y_ext, mask_y, self.avg_nll)
else:
nll = self.nll_loss(pred, y_tgt, mask_y, self.avg_nll)
ppl = T.exp(nll)
return pred, ppl, None
| [
"transformer.layers.PositionEmbeddings",
"torch.mean",
"torch.nn.init.xavier_uniform_",
"transformer.layers.Embeddings",
"torch.exp",
"torch.sum"
] | [((1337, 1395), 'transformer.layers.Embeddings', 'Embeddings', (['self.dict_size', 'self.dim_x', 'self.pad_token_idx'], {}), '(self.dict_size, self.dim_x, self.pad_token_idx)\n', (1347, 1395), False, 'from transformer.layers import Embeddings, PositionEmbeddings\n'), ((1421, 1471), 'transformer.layers.PositionEmbeddings', 'PositionEmbeddings', (['self.dim_x', 'self.word_pos_size'], {}), '(self.dim_x, self.word_pos_size)\n', (1439, 1471), False, 'from transformer.layers import Embeddings, PositionEmbeddings\n'), ((1497, 1547), 'transformer.layers.PositionEmbeddings', 'PositionEmbeddings', (['self.dim_x', 'self.word_pos_size'], {}), '(self.dim_x, self.word_pos_size)\n', (1515, 1547), False, 'from transformer.layers import Embeddings, PositionEmbeddings\n'), ((2862, 2874), 'torch.mean', 'T.mean', (['cost'], {}), '(cost)\n', (2868, 2874), True, 'import torch as T\n'), ((3551, 3561), 'torch.exp', 'T.exp', (['nll'], {}), '(nll)\n', (3556, 3561), True, 'import torch as T\n'), ((2202, 2228), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['p'], {}), '(p)\n', (2225, 2228), True, 'import torch.nn as nn\n'), ((2780, 2803), 'torch.sum', 'T.sum', (['(cost * y_mask)', '(1)'], {}), '(cost * y_mask, 1)\n', (2785, 2803), True, 'import torch as T\n'), ((2330, 2356), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['p'], {}), '(p)\n', (2353, 2356), True, 'import torch.nn as nn\n'), ((2454, 2480), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['p'], {}), '(p)\n', (2477, 2480), True, 'import torch.nn as nn\n'), ((2703, 2726), 'torch.sum', 'T.sum', (['(cost * y_mask)', '(1)'], {}), '(cost * y_mask, 1)\n', (2708, 2726), True, 'import torch as T\n'), ((2729, 2746), 'torch.sum', 'T.sum', (['y_mask', '(-1)'], {}), '(y_mask, -1)\n', (2734, 2746), True, 'import torch as T\n')] |
import pytest
import sys
sys.path.append(".")
sys.path.append("../.")
from boxdetect import config
from boxdetect import pipelines
def test_save_load_config(capsys):
cfg = config.PipelinesConfig()
cfg.morph_kernels_thickness = 10
cfg.save_yaml('test_cfg.yaml')
cfg2 = config.PipelinesConfig('test_cfg.yaml')
assert(cfg.__dict__ == cfg2.__dict__)
cfg.new_var = 10
cfg.save_yaml('test_cfg.yaml')
cfg2.load_yaml('test_cfg.yaml')
captured = capsys.readouterr()
assert("WARNING" in captured.out)
def test_update_num_iterations():
cfg = config.PipelinesConfig()
cfg.height_range = (5, 5)
cfg.width_range = [(10, 10), (20, 20)]
cfg.update_num_iterations()
assert(cfg.num_iterations == 2)
assert(len(cfg.height_range) == 2)
assert(len(cfg.width_range) == 2)
def test_autoconfig_simple():
box_sizes = [(42, 44), (41, 47), (41, 44), (41, 44), (125, 54), (92, 103)]
file_path = "tests/data/autoconfig_simple/dummy_example.png"
cfg = config.PipelinesConfig()
cfg.autoconfigure(box_sizes)
checkboxes = pipelines.get_checkboxes(
file_path, cfg=cfg, px_threshold=0.01, plot=False, verbose=False)
assert(len(checkboxes) == 12)
cfg = config.PipelinesConfig()
cfg.autoconfigure(box_sizes)
rects, groups, _, _ = pipelines.get_boxes(
file_path, cfg=cfg, plot=False)
assert(len(rects) == 23)
assert(len(groups) == 14)
def test_autoconfig_from_vott_simple():
vott_dir = "tests/data/autoconfig_simple"
file_path = "tests/data/autoconfig_simple/dummy_example.png"
cfg = config.PipelinesConfig()
cfg.autoconfigure_from_vott(vott_dir, class_tags=['box'])
checkboxes = pipelines.get_checkboxes(
file_path, cfg=cfg, px_threshold=0.01, plot=False, verbose=False)
assert(len(checkboxes) == 12)
cfg = config.PipelinesConfig()
cfg.autoconfigure_from_vott(vott_dir, class_tags=['box'])
rects, groups, _, _ = pipelines.get_boxes(
file_path, cfg=cfg, plot=False)
assert(len(rects) == 23)
assert(len(groups) == 14) | [
"boxdetect.pipelines.get_boxes",
"boxdetect.pipelines.get_checkboxes",
"boxdetect.config.PipelinesConfig",
"sys.path.append"
] | [((25, 45), 'sys.path.append', 'sys.path.append', (['"""."""'], {}), "('.')\n", (40, 45), False, 'import sys\n'), ((46, 69), 'sys.path.append', 'sys.path.append', (['"""../."""'], {}), "('../.')\n", (61, 69), False, 'import sys\n'), ((178, 202), 'boxdetect.config.PipelinesConfig', 'config.PipelinesConfig', ([], {}), '()\n', (200, 202), False, 'from boxdetect import config\n'), ((286, 325), 'boxdetect.config.PipelinesConfig', 'config.PipelinesConfig', (['"""test_cfg.yaml"""'], {}), "('test_cfg.yaml')\n", (308, 325), False, 'from boxdetect import config\n'), ((579, 603), 'boxdetect.config.PipelinesConfig', 'config.PipelinesConfig', ([], {}), '()\n', (601, 603), False, 'from boxdetect import config\n'), ((1009, 1033), 'boxdetect.config.PipelinesConfig', 'config.PipelinesConfig', ([], {}), '()\n', (1031, 1033), False, 'from boxdetect import config\n'), ((1085, 1179), 'boxdetect.pipelines.get_checkboxes', 'pipelines.get_checkboxes', (['file_path'], {'cfg': 'cfg', 'px_threshold': '(0.01)', 'plot': '(False)', 'verbose': '(False)'}), '(file_path, cfg=cfg, px_threshold=0.01, plot=False,\n verbose=False)\n', (1109, 1179), False, 'from boxdetect import pipelines\n'), ((1230, 1254), 'boxdetect.config.PipelinesConfig', 'config.PipelinesConfig', ([], {}), '()\n', (1252, 1254), False, 'from boxdetect import config\n'), ((1315, 1366), 'boxdetect.pipelines.get_boxes', 'pipelines.get_boxes', (['file_path'], {'cfg': 'cfg', 'plot': '(False)'}), '(file_path, cfg=cfg, plot=False)\n', (1334, 1366), False, 'from boxdetect import pipelines\n'), ((1599, 1623), 'boxdetect.config.PipelinesConfig', 'config.PipelinesConfig', ([], {}), '()\n', (1621, 1623), False, 'from boxdetect import config\n'), ((1704, 1798), 'boxdetect.pipelines.get_checkboxes', 'pipelines.get_checkboxes', (['file_path'], {'cfg': 'cfg', 'px_threshold': '(0.01)', 'plot': '(False)', 'verbose': '(False)'}), '(file_path, cfg=cfg, px_threshold=0.01, plot=False,\n verbose=False)\n', (1728, 1798), False, 'from boxdetect import pipelines\n'), ((1849, 1873), 'boxdetect.config.PipelinesConfig', 'config.PipelinesConfig', ([], {}), '()\n', (1871, 1873), False, 'from boxdetect import config\n'), ((1963, 2014), 'boxdetect.pipelines.get_boxes', 'pipelines.get_boxes', (['file_path'], {'cfg': 'cfg', 'plot': '(False)'}), '(file_path, cfg=cfg, plot=False)\n', (1982, 2014), False, 'from boxdetect import pipelines\n')] |
import logging
import os
import time
from backtesting import get_bot
from config.cst import *
class Backtesting:
def __init__(self, config, exchange_simulator, exit_at_end=True):
self.config = config
self.begin_time = time.time()
self.time_delta = 0
self.force_exit_at_end = exit_at_end
self.exchange_simulator = exchange_simulator
self.logger = logging.getLogger(self.__class__.__name__)
def end(self):
self.logger.warning("Current backtesting version has a 2% precision error rate.")
for symbol in self.exchange_simulator.get_symbols():
self.report(symbol)
# make sure to wait the end of threads process
backtesting_time = time.time() - self.begin_time
time.sleep(5)
self.logger.info("Simulation lasted {0} sec".format(backtesting_time))
if self.force_exit_at_end:
os._exit(0)
def report(self, symbol):
market_data = self.exchange_simulator.get_data()[symbol][self.exchange_simulator.MIN_ENABLED_TIME_FRAME.value]
self.time_delta = self.begin_time - market_data[0][PriceIndexes.IND_PRICE_TIME.value] / 1000
# profitability
total_profitability = 0
for trader in get_bot().get_exchange_trader_simulators().values():
_, profitability, _ = trader.get_trades_manager().get_profitability()
total_profitability += profitability
# vs market
market_delta = self.get_market_delta(market_data)
# log
self.logger.info(
"Profitability : Market {0}% | OctoBot : {1}%".format(market_delta * 100, total_profitability))
@staticmethod
def get_market_delta(market_data):
market_begin = market_data[0][PriceIndexes.IND_PRICE_CLOSE.value]
market_end = market_data[-1][PriceIndexes.IND_PRICE_CLOSE.value]
if market_begin and market_end and market_begin > 0:
market_delta = market_end / market_begin - 1 if market_end >= market_begin else market_end / market_begin - 1
else:
market_delta = 0
return market_delta
@staticmethod
def enabled(config):
return CONFIG_BACKTESTING in config and config[CONFIG_BACKTESTING][CONFIG_ENABLED_OPTION]
| [
"logging.getLogger",
"backtesting.get_bot",
"time.sleep",
"os._exit",
"time.time"
] | [((241, 252), 'time.time', 'time.time', ([], {}), '()\n', (250, 252), False, 'import time\n'), ((401, 443), 'logging.getLogger', 'logging.getLogger', (['self.__class__.__name__'], {}), '(self.__class__.__name__)\n', (418, 443), False, 'import logging\n'), ((769, 782), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (779, 782), False, 'import time\n'), ((731, 742), 'time.time', 'time.time', ([], {}), '()\n', (740, 742), False, 'import time\n'), ((909, 920), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\n', (917, 920), False, 'import os\n'), ((1252, 1261), 'backtesting.get_bot', 'get_bot', ([], {}), '()\n', (1259, 1261), False, 'from backtesting import get_bot\n')] |
try:
import cv2
import numpy as np
except ImportError as e:
from pip._internal import main as install
packages = ["numpy", "opencv-python"]
for package in packages:
install(["install", package])
finally:
pass
def detectEyeGlasses():
capture = cv2.VideoCapture(0)
eyesCasecade = cv2.CascadeClassifier('haarcascade_eye_tree_eyeglasses.xml')
while capture.isOpened():
ret, video = capture.read()
# convert the video to gray_scale
image_gray = cv2.cvtColor(video, cv2.COLOR_BGR2GRAY)
eyes = eyesCasecade.detectMultiScale(image_gray, 1.1)
for (x, y, w, h) in eyes:
final_image = cv2.rectangle(video, (x, y), (x+w, y+h), (0, 255, 0), 2)
final_image = cv2.rectangle(final_image, (x-1, y+h+15),(x+30, y+h), (0, 255, 0), -1)
final_image = cv2.putText(final_image, "Eye", (x, y+h+12), cv2.FONT_HERSHEY_PLAIN, 1,(255, 0, 0), 1)
cv2.imshow("Eyes Detector", final_image)
key = cv2.waitKey(1)
if key & 0xFF == ord('q') or key == 27:
capture.release()
return cv2.destroyAllWindows()
return cv2.destroyAllWindows()
detectEyeGlasses() | [
"cv2.rectangle",
"cv2.putText",
"cv2.imshow",
"pip._internal.main",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"cv2.cvtColor",
"cv2.CascadeClassifier",
"cv2.waitKey"
] | [((280, 299), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (296, 299), False, 'import cv2\n'), ((319, 379), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""haarcascade_eye_tree_eyeglasses.xml"""'], {}), "('haarcascade_eye_tree_eyeglasses.xml')\n", (340, 379), False, 'import cv2\n'), ((1153, 1176), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1174, 1176), False, 'import cv2\n'), ((509, 548), 'cv2.cvtColor', 'cv2.cvtColor', (['video', 'cv2.COLOR_BGR2GRAY'], {}), '(video, cv2.COLOR_BGR2GRAY)\n', (521, 548), False, 'import cv2\n'), ((1006, 1020), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1017, 1020), False, 'import cv2\n'), ((194, 223), 'pip._internal.main', 'install', (["['install', package]"], {}), "(['install', package])\n", (201, 223), True, 'from pip._internal import main as install\n'), ((672, 732), 'cv2.rectangle', 'cv2.rectangle', (['video', '(x, y)', '(x + w, y + h)', '(0, 255, 0)', '(2)'], {}), '(video, (x, y), (x + w, y + h), (0, 255, 0), 2)\n', (685, 732), False, 'import cv2\n'), ((755, 841), 'cv2.rectangle', 'cv2.rectangle', (['final_image', '(x - 1, y + h + 15)', '(x + 30, y + h)', '(0, 255, 0)', '(-1)'], {}), '(final_image, (x - 1, y + h + 15), (x + 30, y + h), (0, 255, 0\n ), -1)\n', (768, 841), False, 'import cv2\n'), ((852, 947), 'cv2.putText', 'cv2.putText', (['final_image', '"""Eye"""', '(x, y + h + 12)', 'cv2.FONT_HERSHEY_PLAIN', '(1)', '(255, 0, 0)', '(1)'], {}), "(final_image, 'Eye', (x, y + h + 12), cv2.FONT_HERSHEY_PLAIN, 1,\n (255, 0, 0), 1)\n", (863, 947), False, 'import cv2\n'), ((951, 991), 'cv2.imshow', 'cv2.imshow', (['"""Eyes Detector"""', 'final_image'], {}), "('Eyes Detector', final_image)\n", (961, 991), False, 'import cv2\n'), ((1118, 1141), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1139, 1141), False, 'import cv2\n')] |
#
# BSD 3-Clause License
#
# Copyright (c) 2022 University of Wisconsin - Madison
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.#
import rclpy
from rclpy.node import Node
from art_msgs.msg import VehicleState
from art_perception_msgs.msg import ObjectArray, Object
from sensor_msgs.msg import Image
from ament_index_python.packages import get_package_share_directory
from geometry_msgs.msg import PoseStamped
from nav_msgs.msg import Path
from rclpy.qos import QoSHistoryPolicy
from rclpy.qos import QoSProfile
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from scipy.interpolate import interp1d,splev,splprep
import os
import json
class PathPlanningNode(Node):
def __init__(self):
super().__init__('path_planning_node')
#update frequency of this node
self.freq = 10.0
# READ IN SHARE DIRECTORY LOCATION
package_share_directory = get_package_share_directory('path_planning')
# READ IN PARAMETERS
self.declare_parameter('vis', False)
self.vis = self.get_parameter('vis').get_parameter_value().bool_value
self.declare_parameter('lookahead', 2.0)
self.lookahead = self.get_parameter('lookahead').get_parameter_value().double_value
#data that will be used by this class
self.state = VehicleState()
self.path = Path()
# self.objects = ObjectArray()
self.green_cones = np.array([])
self.red_cones = np.array([])
self.go = False
#subscribers
qos_profile = QoSProfile(depth=1)
qos_profile.history = QoSHistoryPolicy.KEEP_LAST
self.sub_state = self.create_subscription(VehicleState, '~/input/vehicle_state', self.state_callback, qos_profile)
self.sub_objects = self.create_subscription(ObjectArray, '~/input/objects', self.objects_callback, qos_profile)
if self.vis:
matplotlib.use("TKAgg")
self.fig, self.ax = plt.subplots()
plt.title("Path Planning")
self.patches = []
self.ax.set_xlim((-1,11))
self.ax.set_ylim((-6,6))
self.left_boundary = None
self.right_boundary = None
#publishers
self.pub_path = self.create_publisher(Path, '~/output/path', 10)
self.timer = self.create_timer(1/self.freq, self.pub_callback)
#function to process data this class subscribes to
def state_callback(self, msg):
# self.get_logger().info("Received '%s'" % msg)
self.state = msg
def objects_callback(self, msg):
# self.get_logger().info("Received '%s'" % msg)
# self.objects = msg
self.go = True
self.green_cones = []
self.red_cones = []
# self.get_logger().info("Detected cones: %s" % (str(len(msg.objects))))
for obj in msg.objects:
pos = [obj.pose.position.x,obj.pose.position.y,obj.pose.position.z]
id = obj.classification.classification
#calculate position from camera parameters, rect, and distance
if(id == 1):
self.red_cones.append(pos)
elif(id == 2):
self.green_cones.append(pos)
else:
self.get_logger().info("Object with unknown label detected {}".format(id))
def order_cones(self,cones,start):
ordered_cones = [start]
ego = start
for i in range(len(cones)):
dist_2 = np.sum((cones - ego)**2, axis=1)
id = np.argmin(dist_2)
ordered_cones.append(cones[id,:])
cones = np.delete(cones,id,axis=0)
ordered_cones = np.asarray(ordered_cones)
total_dist = 0
for i in range(len(ordered_cones)-1):
total_dist += np.linalg.norm(ordered_cones[i,:] - ordered_cones[i+1,:])
return ordered_cones, total_dist
def plan_path(self):
self.red_cones = np.asarray(self.red_cones)
self.green_cones = np.asarray(self.green_cones)
if(len(self.red_cones) == 0):
self.red_cones = np.asarray([1,-1.5,0]) #phantom cone to right if none are seen
if(len(self.green_cones) == 0):
self.green_cones = np.asarray([1,1.5,0]) #phantom cone to right if none are seen
self.red_cones = self.red_cones.reshape((-1,3))
self.green_cones = self.green_cones.reshape((-1,3))
left, l_dist = self.order_cones(self.green_cones,np.array([0.0,.5,0]))
right, r_dist = self.order_cones(self.red_cones,np.array([0.0,-.5,0]))
max_dist = 4
left_spline,u = splprep(left[:,0:2].transpose(),k=max(1,min(int(len(left)/2),5)))
left_samples = np.linspace(0, max_dist / l_dist, 100)
b_left = splev(left_samples,left_spline)
right_spline,u = splprep(right[:,0:2].transpose(),k=max(1,min(int(len(right)/2),5)))
right_samples = np.linspace(0, max_dist / r_dist, 100)
b_right = splev(right_samples,right_spline)
center_line = np.array([(b_left[0] + b_right[0]) / 2, (b_left[1] + b_right[1]) / 2])
# center_line = center_line[:,min(len(b_right[0]),len(b_left[0]))]
distances = np.sum((center_line)**2, axis=0)
id = np.argmin(np.abs(distances - self.lookahead**2))
target_pt = center_line[:,id]
# self.get_logger().info("B Left Spline: %s" % (str(len(b_left))))
if(self.vis):
[p.remove() for p in self.patches]
self.patches.clear()
if(self.left_boundary == None):
self.left_boundary, = self.ax.plot(b_left[0],b_left[1],c='g')
else:
self.left_boundary.set_data(b_left[0],b_left[1])
if(self.right_boundary == None):
self.right_boundary, = self.ax.plot(b_right[0],b_right[1],c='r')
else:
self.right_boundary.set_data(b_right[0],b_right[1])
for pos in right:
circ = patches.Circle(pos[0:2],radius=.1,color='r')
self.ax.add_patch(circ)
self.patches.append(circ)
for pos in left:
circ = patches.Circle(pos[0:2],radius=.1,color='g')
self.ax.add_patch(circ)
self.patches.append(circ)
circ = patches.Circle(target_pt,radius=.1,color='b')
self.ax.add_patch(circ)
self.patches.append(circ)
return target_pt
#callback to run a loop and publish data this class generates
def pub_callback(self):
if(not self.go):
return
msg = Path()
target_pt = self.plan_path()
#calculate path from current cone locations
if(self.vis):
plt.draw()
plt.pause(0.0001)
pt = PoseStamped()
pt.pose.position.x = target_pt[0]
pt.pose.position.y = target_pt[1]
msg.poses.append(pt)
self.pub_path.publish(msg)
def main(args=None):
# print("=== Starting Path Planning Node ===")
rclpy.init(args=args)
planner = PathPlanningNode()
rclpy.spin(planner)
planner.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
| [
"numpy.array",
"numpy.linalg.norm",
"rclpy.init",
"numpy.delete",
"numpy.asarray",
"ament_index_python.packages.get_package_share_directory",
"numpy.linspace",
"scipy.interpolate.splev",
"numpy.argmin",
"rclpy.shutdown",
"matplotlib.patches.Circle",
"numpy.abs",
"matplotlib.use",
"matplotl... | [((8571, 8592), 'rclpy.init', 'rclpy.init', ([], {'args': 'args'}), '(args=args)\n', (8581, 8592), False, 'import rclpy\n'), ((8630, 8649), 'rclpy.spin', 'rclpy.spin', (['planner'], {}), '(planner)\n', (8640, 8649), False, 'import rclpy\n'), ((8681, 8697), 'rclpy.shutdown', 'rclpy.shutdown', ([], {}), '()\n', (8695, 8697), False, 'import rclpy\n'), ((2396, 2440), 'ament_index_python.packages.get_package_share_directory', 'get_package_share_directory', (['"""path_planning"""'], {}), "('path_planning')\n", (2423, 2440), False, 'from ament_index_python.packages import get_package_share_directory\n'), ((2812, 2826), 'art_msgs.msg.VehicleState', 'VehicleState', ([], {}), '()\n', (2824, 2826), False, 'from art_msgs.msg import VehicleState\n'), ((2847, 2853), 'nav_msgs.msg.Path', 'Path', ([], {}), '()\n', (2851, 2853), False, 'from nav_msgs.msg import Path\n'), ((2921, 2933), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (2929, 2933), True, 'import numpy as np\n'), ((2959, 2971), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (2967, 2971), True, 'import numpy as np\n'), ((3041, 3060), 'rclpy.qos.QoSProfile', 'QoSProfile', ([], {'depth': '(1)'}), '(depth=1)\n', (3051, 3060), False, 'from rclpy.qos import QoSProfile\n'), ((5170, 5195), 'numpy.asarray', 'np.asarray', (['ordered_cones'], {}), '(ordered_cones)\n', (5180, 5195), True, 'import numpy as np\n'), ((5452, 5478), 'numpy.asarray', 'np.asarray', (['self.red_cones'], {}), '(self.red_cones)\n', (5462, 5478), True, 'import numpy as np\n'), ((5506, 5534), 'numpy.asarray', 'np.asarray', (['self.green_cones'], {}), '(self.green_cones)\n', (5516, 5534), True, 'import numpy as np\n'), ((6219, 6257), 'numpy.linspace', 'np.linspace', (['(0)', '(max_dist / l_dist)', '(100)'], {}), '(0, max_dist / l_dist, 100)\n', (6230, 6257), True, 'import numpy as np\n'), ((6275, 6307), 'scipy.interpolate.splev', 'splev', (['left_samples', 'left_spline'], {}), '(left_samples, left_spline)\n', (6280, 6307), False, 'from scipy.interpolate import interp1d, splev, splprep\n'), ((6425, 6463), 'numpy.linspace', 'np.linspace', (['(0)', '(max_dist / r_dist)', '(100)'], {}), '(0, max_dist / r_dist, 100)\n', (6436, 6463), True, 'import numpy as np\n'), ((6482, 6516), 'scipy.interpolate.splev', 'splev', (['right_samples', 'right_spline'], {}), '(right_samples, right_spline)\n', (6487, 6516), False, 'from scipy.interpolate import interp1d, splev, splprep\n'), ((6539, 6609), 'numpy.array', 'np.array', (['[(b_left[0] + b_right[0]) / 2, (b_left[1] + b_right[1]) / 2]'], {}), '([(b_left[0] + b_right[0]) / 2, (b_left[1] + b_right[1]) / 2])\n', (6547, 6609), True, 'import numpy as np\n'), ((6714, 6746), 'numpy.sum', 'np.sum', (['(center_line ** 2)'], {'axis': '(0)'}), '(center_line ** 2, axis=0)\n', (6720, 6746), True, 'import numpy as np\n'), ((8133, 8139), 'nav_msgs.msg.Path', 'Path', ([], {}), '()\n', (8137, 8139), False, 'from nav_msgs.msg import Path\n'), ((8332, 8345), 'geometry_msgs.msg.PoseStamped', 'PoseStamped', ([], {}), '()\n', (8343, 8345), False, 'from geometry_msgs.msg import PoseStamped\n'), ((3395, 3418), 'matplotlib.use', 'matplotlib.use', (['"""TKAgg"""'], {}), "('TKAgg')\n", (3409, 3418), False, 'import matplotlib\n'), ((3451, 3465), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (3463, 3465), True, 'import matplotlib.pyplot as plt\n'), ((3478, 3504), 'matplotlib.pyplot.title', 'plt.title', (['"""Path Planning"""'], {}), "('Path Planning')\n", (3487, 3504), True, 'import matplotlib.pyplot as plt\n'), ((4976, 5010), 'numpy.sum', 'np.sum', (['((cones - ego) ** 2)'], {'axis': '(1)'}), '((cones - ego) ** 2, axis=1)\n', (4982, 5010), True, 'import numpy as np\n'), ((5026, 5043), 'numpy.argmin', 'np.argmin', (['dist_2'], {}), '(dist_2)\n', (5035, 5043), True, 'import numpy as np\n'), ((5110, 5138), 'numpy.delete', 'np.delete', (['cones', 'id'], {'axis': '(0)'}), '(cones, id, axis=0)\n', (5119, 5138), True, 'import numpy as np\n'), ((5291, 5352), 'numpy.linalg.norm', 'np.linalg.norm', (['(ordered_cones[i, :] - ordered_cones[i + 1, :])'], {}), '(ordered_cones[i, :] - ordered_cones[i + 1, :])\n', (5305, 5352), True, 'import numpy as np\n'), ((5603, 5627), 'numpy.asarray', 'np.asarray', (['[1, -1.5, 0]'], {}), '([1, -1.5, 0])\n', (5613, 5627), True, 'import numpy as np\n'), ((5737, 5760), 'numpy.asarray', 'np.asarray', (['[1, 1.5, 0]'], {}), '([1, 1.5, 0])\n', (5747, 5760), True, 'import numpy as np\n'), ((5974, 5997), 'numpy.array', 'np.array', (['[0.0, 0.5, 0]'], {}), '([0.0, 0.5, 0])\n', (5982, 5997), True, 'import numpy as np\n'), ((6052, 6076), 'numpy.array', 'np.array', (['[0.0, -0.5, 0]'], {}), '([0.0, -0.5, 0])\n', (6060, 6076), True, 'import numpy as np\n'), ((6770, 6809), 'numpy.abs', 'np.abs', (['(distances - self.lookahead ** 2)'], {}), '(distances - self.lookahead ** 2)\n', (6776, 6809), True, 'import numpy as np\n'), ((7834, 7882), 'matplotlib.patches.Circle', 'patches.Circle', (['target_pt'], {'radius': '(0.1)', 'color': '"""b"""'}), "(target_pt, radius=0.1, color='b')\n", (7848, 7882), True, 'import matplotlib.patches as patches\n'), ((8277, 8287), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (8285, 8287), True, 'import matplotlib.pyplot as plt\n'), ((8300, 8317), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.0001)'], {}), '(0.0001)\n', (8309, 8317), True, 'import matplotlib.pyplot as plt\n'), ((7507, 7554), 'matplotlib.patches.Circle', 'patches.Circle', (['pos[0:2]'], {'radius': '(0.1)', 'color': '"""r"""'}), "(pos[0:2], radius=0.1, color='r')\n", (7521, 7554), True, 'import matplotlib.patches as patches\n'), ((7687, 7734), 'matplotlib.patches.Circle', 'patches.Circle', (['pos[0:2]'], {'radius': '(0.1)', 'color': '"""g"""'}), "(pos[0:2], radius=0.1, color='g')\n", (7701, 7734), True, 'import matplotlib.patches as patches\n')] |