code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack, LLC
# Copyright 2013 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
from fuel_health.common.utils.data_utils import rand_name
import fuel_health.common.ssh
import fuel_health.nmanager
import fuel_health.test
LOG = logging.getLogger(__name__)
class HeatBaseTest(fuel_health.nmanager.NovaNetworkScenarioTest,
fuel_health.nmanager.SmokeChecksTest):
"""
Base class for Heat openstack sanity and smoke tests.
"""
@classmethod
def setUpClass(cls):
fuel_health.nmanager.NovaNetworkScenarioTest.setUpClass()
cls.testvm_flavor = None
cls.flavors = []
if cls.manager.clients_initialized:
if cls.heat_client is None:
cls.fail('Heat is unavailable.')
cls.wait_interval = cls.config.compute.build_interval
cls.wait_timeout = cls.config.compute.build_timeout
@classmethod
def tearDownClass(cls):
fuel_health.nmanager.NovaNetworkScenarioTest.tearDownClass()
LOG.debug("Deleting flavors created by Heat tests.")
cls._clean_flavors()
def setUp(self):
super(HeatBaseTest, self).setUp()
self.check_clients_state()
if not self.testvm_flavor:
LOG.debug("Creating a flavor for Heat tests.")
self.testvm_flavor = self._create_flavors(self.compute_client,
64, 1)
self.flavors.append(self.testvm_flavor)
@staticmethod
def _list_stacks(client):
return client.stacks.list()
def _find_stack(self, client, key, value):
for stack in self._list_stacks(client):
if hasattr(stack, key) and getattr(stack, key) == value:
return stack
return None
def _create_stack(self, client, template,
disable_rollback=True, parameters={}):
stack_name = rand_name('ost1_test-')
client.stacks.create(stack_name=stack_name,
template=template,
parameters=parameters,
disable_rollback=disable_rollback)
# heat client doesn't return stack details after creation
# so need to request them:
stack = self._find_stack(client, 'stack_name', stack_name)
self.set_resource(stack.id, stack)
return stack
def _update_stack(self, client, stack_id, template, parameters={}):
client.stacks.update(stack_id=stack_id,
template=template,
parameters=parameters)
return self._find_stack(client, 'id', stack_id)
def _wait_for_stack_status(self, stack_id, expected_status,
timeout=None, interval=None):
"""
The method is a customization of test.status_timeout().
It addresses `stack_status` instead of `status` field and
checks for FAILED instead of ERROR status.
The rest is the same.
"""
if timeout is None:
timeout = self.wait_timeout
if interval is None:
interval = self.wait_interval
def check_status():
stack = self.heat_client.stacks.get(stack_id)
new_status = stack.stack_status
if 'FAIL' in new_status:
self.fail("Failed to get to expected status. "
"In %s state." % new_status)
elif new_status == expected_status:
return True # All good.
LOG.debug("Waiting for %s to get to %s status. "
"Currently in %s status",
stack, expected_status, new_status)
if not fuel_health.test.call_until_true(check_status,
timeout,
interval):
self.fail("Timed out waiting to become %s"
% expected_status)
def _wait_for_stack_deleted(self, stack_id):
f = lambda: self._find_stack(self.heat_client, 'id', stack_id) is None
if not fuel_health.test.call_until_true(f,
self.wait_timeout,
self.wait_interval):
self.fail("Timed out waiting for stack to be deleted.")
def _find_heat_image(self, image_name):
return image_name in [i.name for i in
self.compute_client.images.list()]
def _wait_for_autoscaling(self, exp_count,
timeout, interval, reduced_stack_name):
LOG.info('expected count is {0}'.format(exp_count))
def count_instances(reduced_stack_name):
res = []
_list = self.compute_client.servers.list()
for server in _list:
LOG.info('instance name is {0}'.format(server.name))
if server.name.startswith(reduced_stack_name):
res.append(server)
LOG.info('!!! current res is {0}'.format(res))
return len(res) == exp_count
return fuel_health.test.call_until_true(
count_instances, timeout, interval, reduced_stack_name)
def _wait_for_cloudinit(self, conn_string, timeout, interval):
"""
Wait for fake file (described in the stack template) to be created
on the instance to make sure cloud-init procedure is completed.
"""
cmd = (conn_string +
" test -f /tmp/vm_ready.txt && echo -ne YES || echo -ne NO")
def check():
return self._run_ssh_cmd(cmd)[0] == "YES"
return fuel_health.test.call_until_true(
check, timeout, interval)
def _save_key_to_file(self, key):
return self._run_ssh_cmd(
"KEY=`mktemp`; echo '%s' > $KEY; echo -ne $KEY;" % key)[0]
def _delete_key_file(self, filepath):
self._run_ssh_cmd("rm -f %s" % filepath)
def _load_vm_cpu(self, connection_string):
return self._run_ssh_cmd(
connection_string + " cat /dev/urandom | gzip -9 > /dev/null &")[0]
def _release_vm_cpu(self, connection_string):
return self._run_ssh_cmd(connection_string + " pkill cat")[0]
def _get_net_uuid(self):
if 'neutron' in self.config.network.network_provider:
network = [net.id for net in
self.compute_client.networks.list()
if net.label == self.private_net]
return network
def _get_subnet_id(self):
if 'neutron' in self.config.network.network_provider:
neutron_net_list = ("neutron "
"--os-username=%s --os-password=%s "
"--os-tenant-name=%s --os-auth-url=%s "
"net-list" % (
self.config.identity.admin_username,
self.config.identity.admin_password,
self.config.identity.admin_tenant_name,
self.config.identity.uri))
# net name surrounded with spaces to guarantee strict match
grep = "%s | grep ' %s ' | grep -v grep | awk '{ print $6 }'" % (
neutron_net_list, self.private_net)
cmd = "echo -ne `%s`" % grep
subnet = self._run_ssh_cmd(cmd)[0]
if subnet:
return subnet
# if network has no subnets
networks = [net.id for net in
self.compute_client.networks.list()
if net.label == self.private_net]
return networks[0]
@staticmethod
def _load_template(file_name):
"""
Load specified template file from etc directory.
"""
filepath = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "etc", file_name)
with open(filepath) as f:
return f.read()
@staticmethod
def _customize_template(template):
"""
By default, heat templates expect neutron subnets to be available.
But if nova-network is used instead of neutron then
subnet usage should be removed from the template.
"""
return '\n'.join(line for line in template.splitlines()
if 'Ref: Subnet' not in line)
|
[
"os.path.realpath",
"fuel_health.common.utils.data_utils.rand_name",
"logging.getLogger"
] |
[((881, 908), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (898, 908), False, 'import logging\n'), ((2553, 2576), 'fuel_health.common.utils.data_utils.rand_name', 'rand_name', (['"""ost1_test-"""'], {}), "('ost1_test-')\n", (2562, 2576), False, 'from fuel_health.common.utils.data_utils import rand_name\n'), ((8579, 8605), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (8595, 8605), False, 'import os\n')]
|
#!/usr/bin/python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import argparse
import cairis.core.BorgFactory
from cairis.core.Borg import Borg
from cairis.mio.ModelExport import exportUserGoalWorkbook
__author__ = '<NAME>'
def main(args=None):
parser = argparse.ArgumentParser(description='Computer Aided Integration of Requirements and Information Security - Persona characteristics To Workbook converter')
parser.add_argument('xlsxFile',help='Workbook to create')
parser.add_argument('--user',dest='userName',help='user name', default='cairis_test')
parser.add_argument('--database',dest='dbName',help='database name',default='cairis_test')
args = parser.parse_args()
cairis.core.BorgFactory.initialise(user=args.userName,db=args.dbName)
exportUserGoalWorkbook(args.xlsxFile)
if __name__ == '__main__':
main()
|
[
"cairis.mio.ModelExport.exportUserGoalWorkbook",
"argparse.ArgumentParser"
] |
[((1013, 1177), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Computer Aided Integration of Requirements and Information Security - Persona characteristics To Workbook converter"""'}), "(description=\n 'Computer Aided Integration of Requirements and Information Security - Persona characteristics To Workbook converter'\n )\n", (1036, 1177), False, 'import argparse\n'), ((1512, 1549), 'cairis.mio.ModelExport.exportUserGoalWorkbook', 'exportUserGoalWorkbook', (['args.xlsxFile'], {}), '(args.xlsxFile)\n', (1534, 1549), False, 'from cairis.mio.ModelExport import exportUserGoalWorkbook\n')]
|
import argparse
import subprocess
from pprint import pprint
from collections import namedtuple
run = subprocess.check_output
srun = run
CPUInfo = namedtuple('CPUInfo', ['processor', 'physical_id', 'core_id'])
def get_cpus():
with open('/proc/cpuinfo', 'r') as f:
raw_out = f.read()
relevant_lines = [l for l in raw_out.split('\n')
if 'processor' in l or 'physical id' in l or 'core id' in l]
assert len(relevant_lines) % 3 == 0
line_data = [int(l[l.index(':') + 1:].strip()) for l in relevant_lines]
cpus = [CPUInfo(*line_data[i:i + 3]) for i in range(0, len(line_data), 3)]
assert len(cpus) % 2 == 0
return cpus
def set_cpu_state(cpu, enabled):
with open('/sys/devices/system/cpu/cpu{}/online'.format(cpu.processor), 'w') as f:
f.write('1' if enabled else '0')
################################################################################
# HyperThreading
################################################################################
def disable_ht():
print('> Disabling HyperThreading...')
cpus = get_cpus()
seen_phys_cores = set()
cpus_to_disable = set()
for cpu in cpus:
key = (cpu.physical_id, cpu.core_id)
if key in seen_phys_cores:
cpus_to_disable.add(cpu)
else:
seen_phys_cores.add(key)
if len(cpus_to_disable) == 0:
print(' No cores with HyperThreading enabled found.')
elif len(cpus_to_disable) == (len(cpus) // 2):
print(' Disabling {} CPUs.'.format(len(cpus_to_disable)))
else:
raise RuntimeError('Expected to disable either exactly half or no CPUs. This might be a bug.')
for cpu in cpus_to_disable:
set_cpu_state(cpu, False)
return [cpu for cpu in cpus if cpu not in cpus_to_disable]
def enable_ht():
for cpu in get_cpus():
set_cpu_state(cpu, True)
################################################################################
# cpusets
################################################################################
def shield_cpus(bench_cpus, bg_cpus):
bench_cpuspec = ','.join(str(cpu.processor) for cpu in bench_cpus)
bg_cpuspec = ','.join(str(cpu.processor) for cpu in bg_cpus)
# Set up our cpusets
srun(['cset', 'set', '--set=bg', '--cpu=' + bg_cpuspec, '--mem=1'])
srun(['cset', 'set', '--set=bench', '--cpu=' + bench_cpuspec, '--mem=0'])
# Move as many tasks (both userspace and kernel) as we can to the bg cpuset
srun(['cset', 'proc', '--move', '--fromset=root', '--toset=bg', '--kthread'])
def remove_shield():
srun(['cset', 'set', '--destroy', '--set=bg'])
srun(['cset', 'set', '--destroy', '--set=bench'])
################################################################################
# CPU Turbo Mode
################################################################################
def set_turbo(value):
with open('/sys/devices/system/cpu/intel_pstate/no_turbo', 'w') as f:
f.write('0' if value else '1')
################################################################################
# Helpers
################################################################################
def isolate_bench_subset(cpus):
bench_cpus = [cpu for cpu in cpus if cpu.physical_id == 0]
bg_cpus = [cpu for cpu in cpus if cpu.physical_id != 0]
assert len(bench_cpus) > 0, "No CPUs on NUMA node 0!"
assert len(bg_cpus) > 0, "Expected at least two NUMA nodes!"
return bench_cpus, bg_cpus
################################################################################
# Setup/Teardown
################################################################################
def setup_benchmark_env():
set_turbo(False)
all_active_cpus = disable_ht()
bench_cpus, bg_cpus = isolate_bench_subset(all_active_cpus)
shield_cpus(bench_cpus, bg_cpus)
with open('bench_cpus', 'w') as f:
f.write(','.join(str(cpu.processor) for cpu in bench_cpus))
def teardown_benchmark_env():
remove_shield()
enable_ht()
set_turbo(True)
def main():
parser = argparse.ArgumentParser(description='Configure benchmarking environment')
parser.add_argument('--setup', action='store_true')
parser.add_argument('--teardown', action='store_true')
args = parser.parse_args()
assert args.setup ^ args.teardown
if args.setup:
setup_benchmark_env()
else:
teardown_benchmark_env()
if __name__ == '__main__':
main()
|
[
"collections.namedtuple",
"argparse.ArgumentParser"
] |
[((148, 210), 'collections.namedtuple', 'namedtuple', (['"""CPUInfo"""', "['processor', 'physical_id', 'core_id']"], {}), "('CPUInfo', ['processor', 'physical_id', 'core_id'])\n", (158, 210), False, 'from collections import namedtuple\n'), ((4097, 4170), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Configure benchmarking environment"""'}), "(description='Configure benchmarking environment')\n", (4120, 4170), False, 'import argparse\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.fields import PlaceholderField
from parler.managers import TranslatableManager
from shop.money.fields import MoneyField
from .product import Product
class Commodity(Product):
"""
This Commodity model inherits from polymorphic Product, and therefore has to be redefined.
"""
unit_price = MoneyField(
_("Unit price"),
decimal_places=3,
help_text=_("Net price for this product"),
)
product_code = models.CharField(
_("Product code"),
max_length=255,
unique=True,
)
# controlling the catalog
placeholder = PlaceholderField("Commodity Details")
show_breadcrumb = True # hard coded to always show the product's breadcrumb
default_manager = TranslatableManager()
class Meta:
verbose_name = _("Commodity")
verbose_name_plural = _("Commodities")
def get_price(self, request):
return self.unit_price
|
[
"django.utils.translation.ugettext_lazy",
"cms.models.fields.PlaceholderField",
"parler.managers.TranslatableManager"
] |
[((756, 793), 'cms.models.fields.PlaceholderField', 'PlaceholderField', (['"""Commodity Details"""'], {}), "('Commodity Details')\n", (772, 793), False, 'from cms.models.fields import PlaceholderField\n'), ((898, 919), 'parler.managers.TranslatableManager', 'TranslatableManager', ([], {}), '()\n', (917, 919), False, 'from parler.managers import TranslatableManager\n'), ((491, 506), 'django.utils.translation.ugettext_lazy', '_', (['"""Unit price"""'], {}), "('Unit price')\n", (492, 506), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((637, 654), 'django.utils.translation.ugettext_lazy', '_', (['"""Product code"""'], {}), "('Product code')\n", (638, 654), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((960, 974), 'django.utils.translation.ugettext_lazy', '_', (['"""Commodity"""'], {}), "('Commodity')\n", (961, 974), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1005, 1021), 'django.utils.translation.ugettext_lazy', '_', (['"""Commodities"""'], {}), "('Commodities')\n", (1006, 1021), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((552, 583), 'django.utils.translation.ugettext_lazy', '_', (['"""Net price for this product"""'], {}), "('Net price for this product')\n", (553, 583), True, 'from django.utils.translation import ugettext_lazy as _\n')]
|
from flask import Flask, request, jsonify
from flask import abort, make_response, url_for
from flask.ext.httpauth import HTTPBasicAuth
app = Flask(__name__)
auth = HTTPBasicAuth()
@auth.get_password
def get_password(username):
if username == 'promise':
return '<PASSWORD>'
return None
orders = [
{
'id':1,
'title':'order 1',
'desc':'first order',
},
{
'id':2,
'title':'order 2',
'desc':'second order',
}
]
@app.errorhandler(404)
def not_found(error):
'''
Make JSON Error response.
'''
return make_response(jsonify({'error':'Not Found.'}), 404)
@app.errorhandler(400)
def bad_request(error):
return make_response(jsonify({'error':'Bad Request.'}), 400)
@auth.error_handler
def unauthorized():
return make_response(jsonify({'error':'unauthorized access'}), 403)
@app.route('/')
def index():
return "Hello World!"
def make_public_order(order):
'''
Return the order public URI
'''
public_order = {}
for field in order:
if field == 'id':
public_order['uri'] = url_for('get_order',
order_id=order['id'],
_external=True)
else:
public_order[field] = order[field]
return public_order
@app.route('/biz/api/v1.0/orders',methods=['GET'])
@auth.login_required
def get_orders():
'''
Get order list.
Play ground with http auth:
curl -u promise:pass -i http://localhost:8000/biz/api/v1.0/orders
'''
return jsonify({'orders':map(make_public_order, orders)})
@app.route('/biz/api/v1.0/orders/<int:order_id>',methods=['GET'])
def get_order(order_id):
'''
Get an order with id.
'''
forders = filter(lambda o: o['id'] == order_id, orders)
if len(forders) == 0:
abort(404)
return jsonify({'order':forders[0]})
@app.route('/biz/api/v1.0/orders',methods=['POST'])
def create_order():
'''
Create new order.
Play ground:
curl -i -H 'Content-Type: application/json' -X POST \
-d '{"title":"another order"}' \
http://localhost:8000/biz/api/v1.0/orders
'''
if not request.json or not 'title' in request.json:
abort(400)
order = {
'id':orders[-1]['id'] + 1,
'title':request.json['title'],
'desc':request.json.get('desc',''),
}
orders.append(order)
return jsonify({'order':order}), 201
@app.route('/biz/api/v1.0/orders/<int:order_id>',methods=['PUT'])
def update_order(order_id):
'''
Update an existing order.
Play ground:
curl -i -H 'Content-Type: application/json' -X PUT
-d '{"title":"another order modified", "desc":"desc modified"}'
http://localhost:8000/biz/api/v1.0/orders/3
'''
forders = filter(lambda o: o['id'] == order_id, orders)
if len(forders) == 0:
abort(404)
order = forders[0]
if not request.json:
abort(400)
if 'title' not in request.json:
abort(400)
order['title'] = request.json.get('title',order['title'])
order['desc'] = request.json.get('desc',order['desc'])
return jsonify({'result':'success'})
@app.route('/biz/api/v1.0/orders/<int:order_id>',methods=['DELETE'])
def delete_order(order_id):
'''
Delete an order.
Play ground:
curl -i -X DELETE http://localhost:8000/biz/api/v1.0/orders/3
'''
forders = filter(lambda o: o['id'] == order_id, orders)
if len(forders) == 0:
abort(404)
orders.remove(forders[0])
return jsonify({'result':'success'})
if __name__ == '__main__':
app.run(port=8000,debug=True);
|
[
"flask.ext.httpauth.HTTPBasicAuth",
"flask.Flask",
"flask.abort",
"flask.jsonify",
"flask.url_for",
"flask.request.json.get"
] |
[((142, 157), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (147, 157), False, 'from flask import Flask, request, jsonify\n'), ((165, 180), 'flask.ext.httpauth.HTTPBasicAuth', 'HTTPBasicAuth', ([], {}), '()\n', (178, 180), False, 'from flask.ext.httpauth import HTTPBasicAuth\n'), ((1893, 1923), 'flask.jsonify', 'jsonify', (["{'order': forders[0]}"], {}), "({'order': forders[0]})\n", (1900, 1923), False, 'from flask import Flask, request, jsonify\n'), ((3122, 3163), 'flask.request.json.get', 'request.json.get', (['"""title"""', "order['title']"], {}), "('title', order['title'])\n", (3138, 3163), False, 'from flask import Flask, request, jsonify\n'), ((3183, 3222), 'flask.request.json.get', 'request.json.get', (['"""desc"""', "order['desc']"], {}), "('desc', order['desc'])\n", (3199, 3222), False, 'from flask import Flask, request, jsonify\n'), ((3233, 3263), 'flask.jsonify', 'jsonify', (["{'result': 'success'}"], {}), "({'result': 'success'})\n", (3240, 3263), False, 'from flask import Flask, request, jsonify\n'), ((3640, 3670), 'flask.jsonify', 'jsonify', (["{'result': 'success'}"], {}), "({'result': 'success'})\n", (3647, 3670), False, 'from flask import Flask, request, jsonify\n'), ((606, 638), 'flask.jsonify', 'jsonify', (["{'error': 'Not Found.'}"], {}), "({'error': 'Not Found.'})\n", (613, 638), False, 'from flask import Flask, request, jsonify\n'), ((718, 752), 'flask.jsonify', 'jsonify', (["{'error': 'Bad Request.'}"], {}), "({'error': 'Bad Request.'})\n", (725, 752), False, 'from flask import Flask, request, jsonify\n'), ((824, 865), 'flask.jsonify', 'jsonify', (["{'error': 'unauthorized access'}"], {}), "({'error': 'unauthorized access'})\n", (831, 865), False, 'from flask import Flask, request, jsonify\n'), ((1871, 1881), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (1876, 1881), False, 'from flask import abort, make_response, url_for\n'), ((2293, 2303), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (2298, 2303), False, 'from flask import abort, make_response, url_for\n'), ((2407, 2435), 'flask.request.json.get', 'request.json.get', (['"""desc"""', '""""""'], {}), "('desc', '')\n", (2423, 2435), False, 'from flask import Flask, request, jsonify\n'), ((2478, 2503), 'flask.jsonify', 'jsonify', (["{'order': order}"], {}), "({'order': order})\n", (2485, 2503), False, 'from flask import Flask, request, jsonify\n'), ((2968, 2978), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (2973, 2978), False, 'from flask import abort, make_response, url_for\n'), ((3035, 3045), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (3040, 3045), False, 'from flask import abort, make_response, url_for\n'), ((3090, 3100), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (3095, 3100), False, 'from flask import abort, make_response, url_for\n'), ((3588, 3598), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (3593, 3598), False, 'from flask import abort, make_response, url_for\n'), ((1114, 1172), 'flask.url_for', 'url_for', (['"""get_order"""'], {'order_id': "order['id']", '_external': '(True)'}), "('get_order', order_id=order['id'], _external=True)\n", (1121, 1172), False, 'from flask import abort, make_response, url_for\n')]
|
#!/usr/bin/env python
import random
import string
from datetime import datetime
import json
def generate_id(length=16):
"""generate random IDs"""
return ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(length))
class DateTimeEncoder(json.JSONEncoder):
"""datetime support in json"""
def default(self, obj):
"""encoder"""
if isinstance(obj, datetime):
return obj.strftime("%Y-%m-%dT%H:%M:%S")
else:
return super(DateTimeEncoder, self).default(obj)
class DateTimeDecoder(json.JSONDecoder):
"""datetime support in json"""
def __init__(self, *args, **kargs):
json.JSONDecoder.__init__(self, object_hook=self.decoder, *args, **kargs)
def decoder(self, d):
"""decoder"""
return datetime.strptime(d, "%Y-%m-%dT%H:%M:%S")
def pascal_case(str):
"""convert string to pascal case"""
return ''.join(x for x in str.replace('_', ' ').title() if not x.isspace())
|
[
"datetime.datetime.strptime",
"random.choice",
"json.JSONDecoder.__init__"
] |
[((671, 744), 'json.JSONDecoder.__init__', 'json.JSONDecoder.__init__', (['self', '*args'], {'object_hook': 'self.decoder'}), '(self, *args, object_hook=self.decoder, **kargs)\n', (696, 744), False, 'import json\n'), ((809, 850), 'datetime.datetime.strptime', 'datetime.strptime', (['d', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(d, '%Y-%m-%dT%H:%M:%S')\n", (826, 850), False, 'from datetime import datetime\n'), ((171, 224), 'random.choice', 'random.choice', (['(string.ascii_lowercase + string.digits)'], {}), '(string.ascii_lowercase + string.digits)\n', (184, 224), False, 'import random\n')]
|
import numpy as _np
import pandas as _pd
import matplotlib.pyplot as _plt
from src.plot_helpers.matplotlib_helpers\
import range_axis_ticks as _range_axis_ticks
def plot_value_by_element(df, xaxis, element_col, value_col, ax, cmap,
alpha=1.0, lw=1.0,
x_intervals=None, x_fmt=None):
'''
Plot values by each element in the column element_col in df on ax.
Can specify linewidth (lw), alpha of lines and colormap to use (cmap).
The number of x-axis intervals can be specified, it it is, provide x_fmt
Will only plot where value > 0 for given datetime
Args:
df: DataFrane
xaxis: xaxis column name in df
element_col: element column name in df
value_col: value column name in df
ax: matplotlib Axes object
cmap: matplotlib.pyplot.cm colourmap object
alpha (float, 0-1): plot alpha
lw (float): linewidth of line plots
x_intervals (int, optional, need x_fmt): number of x-axis intervals
x_fmt (matplotlib Formatter, optional, need x_intervals): formatter
Returns:
Axis with plot of elements
'''
unique_elements = df[element_col].drop_duplicates()
cmap = iter(cmap(_np.linspace(0, 1, len(unique_elements))))
for element, colour in zip(unique_elements, cmap):
filtered_val = df.loc[df[element_col] == element, [xaxis, value_col]]
if abs(filtered_val[value_col].sum()) > 0:
ax.plot(filtered_val[xaxis],
filtered_val[value_col],
label=element,
alpha=alpha, linewidth=lw,
color=colour)
if x_intervals is not None:
ax = _range_axis_ticks(ax, 'x', x_intervals, fmt=x_fmt)
ax.tick_params(axis='x', labelrotation=90, labelsize=8)
return ax
def find_nonzero_category(df, category_col, value_col):
'''
Returns a unique list of categories in category col.
Each category returned has a non-zero sum in the value_col.
Args:
df (pandas DataFrame): DataFrame to use
category_col (str): col within df to use to find non-zero categories
value_col (str): col within df to assess whether category is non-zero
Returns:
Sorted list of categories
'''
cat_types = set(df[category_col])
plot_types = []
for cat in cat_types:
df_cat_val = df.loc[df[category_col] == cat, value_col]
if df_cat_val.sum() > 0:
plot_types.append(cat)
return sorted(plot_types)
def plot_nonzero_elements_by_category(ax, df, xaxis_col, yaxis_col,
element_col, category, category_col,
cmap=None, lw=1.0, alpha=1.0):
'''
Plot x and y axis cols on ax for each unique element
that satisfies a given category value.
Args:
ax (matplotlib Axes object): axis to plot on
df (pandas DataFrame): dataframe to use
xaxis_col (str): x-axis col name
yaxis_col (str): y-axis col name
element_col (str): col name used to identify individual elements
category (str): isolate elements that are under this category
category_col (str): col name in df used to filter on category
cmap (matplotlib.pyplot.cm, optional): colormap
lw (float, optional): linewidth
alpha (float, optional): plot alpha
Returns:
Axis with plotted elements tha meet category criteria
Dataframe with categories filtered
'''
category_df = df.loc[df[category_col] == category, :]
elements = set(category_df[element_col])
if cmap is not None:
colors = iter(cmap(_np.linspace(0, 1, len(elements))))
else:
colors = iter(_plt.cm.tab10(0, 1, len(elements)))
# plot line for each element where yaxis_col > 0 for datetime range
for element, color in zip(sorted(elements), colors):
element_df = category_df.loc[category_df[element_col] == element, :]
if element_df[yaxis_col].sum() > 0:
ax.plot(element_df[xaxis_col], element_df[yaxis_col],
label=element, color=color, linewidth=lw)
return ax, category_df
def nofb(df, datetime_col=None):
'''
Creates two series with the limits of the NEM normal operating
frequency band.
Args:
df (pandas DataFrame): DataFrame length to copy for NOFB series
datetime_col (str, optional): default is to assume DatetimeIndex,
if this is not the case, supply
datetime_col
Returns:
Tuple of Series corresponding to (upper NOFB, lower NOFB)
'''
if datetime_col:
lower = _np.ones(df[datetime_col].shape) * 49.85
upper = _np.ones(df[datetime_col].shape) * 50.15
index = df[datetime_col]
else:
lower = _np.ones(df.index.shape) * 49.85
upper = _np.ones(df.index.shape) * 50.15
index = df.index
return (_pd.Series(data=upper, index=index),
_pd.Series(data=lower, index=index))
def nofb_plot(nofb_function, axis, lw=1.0, alpha=1.0, style='y--'):
'''
Plots NOFB. Adds label to one of the plotted series.
Args:
nofb_function (func): nofb function with args supplied
axis (matplotlib Axes): Axis to plot on
lw (float, optional): plot linewidth
alpha(float, optional): plot alpha
style (str): matplotlib style, refer to pyplot.plot docs
Returns:
Axis with NOFB plotted
'''
upper, lower = nofb_function
axis.plot(upper, style, label='NOFB', linewidth=lw, alpha=alpha)
axis.plot(lower, style, linewidth=lw, alpha=alpha)
return axis
def stacked_bar_subplots(df, figsize, cmap,
xaxis_col, values_col,
subplots_list, subplot_col,
stacked_list, stacked_col,
label_add, ax_title_add):
'''
Plot stacked bar charts across a range of subplots
Args:
df (pd.DataFrame): DataFrame to plot, with releveant *_cols
figsize (tuple): size in inches for figure
cmap (plt.cmap): object to determine color palette for stacked elements
xaxis_col (str): name of col in df to use as xticks
values_col (str): name of col in df to plot values
subplots_list (list): list to use as basis for separating subplots
subplot_col (str); name of col in df to separate data for each subplot
stacked_list (list): list of elements to stack in each subplot
stacked_col (str): name of col in df to obtain series to stack
label_add (str): generic str to add to the end of each stacked el label
ax_title_add (str): generic str to add to end of ax title
Returns:
Fig, Ax
'''
fig, ax = _plt.subplots(len(subplots_list), 1,
figsize=figsize, sharex=True)
x_labels = df[xaxis_col].drop_duplicates().tolist()
x_tix = _np.arange(len(x_labels))
colormap = _plt.get_cmap(cmap)
colors = colormap(_np.linspace(0, 1, len(stacked_list)))
plot_colors = {el: colors[i] for i, el in enumerate(stacked_list)}
for i, subplot_el in enumerate(subplots_list):
y_base = _np.zeros_like(x_tix)
subplot_df = df.query(f'{subplot_col}==@subplot_el')
for el in stacked_list:
rev = subplot_df.query(f'{stacked_col}==@el')
ax[i].bar(x_tix, rev[values_col], bottom=y_base,
color=plot_colors[el],
label=f'{el} {label_add}')
y_base = _np.add(y_base, rev[values_col].tolist())
ax[i].set_title(f'{subplot_el} {ax_title_add}')
return fig, ax
|
[
"numpy.zeros_like",
"matplotlib.pyplot.get_cmap",
"numpy.ones",
"src.plot_helpers.matplotlib_helpers.range_axis_ticks",
"pandas.Series"
] |
[((7095, 7114), 'matplotlib.pyplot.get_cmap', '_plt.get_cmap', (['cmap'], {}), '(cmap)\n', (7108, 7114), True, 'import matplotlib.pyplot as _plt\n'), ((1726, 1776), 'src.plot_helpers.matplotlib_helpers.range_axis_ticks', '_range_axis_ticks', (['ax', '"""x"""', 'x_intervals'], {'fmt': 'x_fmt'}), "(ax, 'x', x_intervals, fmt=x_fmt)\n", (1743, 1776), True, 'from src.plot_helpers.matplotlib_helpers import range_axis_ticks as _range_axis_ticks\n'), ((5031, 5066), 'pandas.Series', '_pd.Series', ([], {'data': 'upper', 'index': 'index'}), '(data=upper, index=index)\n', (5041, 5066), True, 'import pandas as _pd\n'), ((5080, 5115), 'pandas.Series', '_pd.Series', ([], {'data': 'lower', 'index': 'index'}), '(data=lower, index=index)\n', (5090, 5115), True, 'import pandas as _pd\n'), ((7316, 7337), 'numpy.zeros_like', '_np.zeros_like', (['x_tix'], {}), '(x_tix)\n', (7330, 7337), True, 'import numpy as _np\n'), ((4754, 4786), 'numpy.ones', '_np.ones', (['df[datetime_col].shape'], {}), '(df[datetime_col].shape)\n', (4762, 4786), True, 'import numpy as _np\n'), ((4811, 4843), 'numpy.ones', '_np.ones', (['df[datetime_col].shape'], {}), '(df[datetime_col].shape)\n', (4819, 4843), True, 'import numpy as _np\n'), ((4911, 4935), 'numpy.ones', '_np.ones', (['df.index.shape'], {}), '(df.index.shape)\n', (4919, 4935), True, 'import numpy as _np\n'), ((4960, 4984), 'numpy.ones', '_np.ones', (['df.index.shape'], {}), '(df.index.shape)\n', (4968, 4984), True, 'import numpy as _np\n')]
|
import torch
from torch import distributed
from mpi4py import MPI
import socket
import os
def init_process_group(backend):
comm = MPI.COMM_WORLD
world_size = comm.Get_size()
rank = comm.Get_rank()
info = dict()
if rank == 0:
host = socket.gethostname()
address = socket.gethostbyname(host)
info.update(dict(MASTER_ADDR=address, MASTER_PORT='25901'))
info = comm.bcast(info, root=0)
info.update(dict(WORLD_SIZE=str(world_size), RANK=str(rank)))
os.environ.update(info)
distributed.init_process_group(backend=backend)
|
[
"socket.gethostname",
"torch.distributed.init_process_group",
"os.environ.update",
"socket.gethostbyname"
] |
[((505, 528), 'os.environ.update', 'os.environ.update', (['info'], {}), '(info)\n', (522, 528), False, 'import os\n'), ((534, 581), 'torch.distributed.init_process_group', 'distributed.init_process_group', ([], {'backend': 'backend'}), '(backend=backend)\n', (564, 581), False, 'from torch import distributed\n'), ((264, 284), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (282, 284), False, 'import socket\n'), ((303, 329), 'socket.gethostbyname', 'socket.gethostbyname', (['host'], {}), '(host)\n', (323, 329), False, 'import socket\n')]
|
import argparse
import logging
import os
import sys
import textwrap
from . import __version__
from .config import (
get_bustools_binary_path,
get_kallisto_binary_path,
is_dry,
PACKAGE_PATH,
REFERENCES_MAPPING,
set_dry,
TECHNOLOGIES,
TEMP_DIR,
)
from .constants import INFO_FILENAME
from .count import count, count_velocity
from .ref import download_reference, ref, ref_kite, ref_lamanno
from .utils import (
get_bustools_version,
get_kallisto_version,
make_directory,
remove_directory,
TqdmLoggingHandler,
)
def display_info():
"""Displays kb, kallisto and bustools version + citation information, along
with a brief description and examples.
"""
kallisto_version = '.'.join(str(i) for i in get_kallisto_version())
bustools_version = '.'.join(str(i) for i in get_bustools_version())
info = '''kb_python {}
kallisto: {}
bustools: {}
'''.format(__version__, kallisto_version, bustools_version)
with open(os.path.join(PACKAGE_PATH, INFO_FILENAME), 'r') as f:
print(
'{}\n{}'.format(
info, '\n'.join([
line.strip()
if line.startswith('(') else textwrap.fill(line, width=80)
for line in f.readlines()
])
)
)
sys.exit(1)
def display_technologies():
"""Displays a list of supported technologies along with whether kb provides
a whitelist for that technology and the FASTQ argument order for kb count.
"""
headers = [
'name', 'whitelist provided', 'barcode (file #, start, stop)',
'umi (file #, start, stop)', 'read file #'
]
rows = [headers]
print('List of supported single-cell technologies\n')
for t in TECHNOLOGIES:
row = [
t.name,
'yes' if t.whitelist_archive else '',
' '.join(str(tup) for tup in t.barcode_positions),
' '.join(str(tup) for tup in t.umi_positions),
str(t.reads_file),
]
rows.append(row)
max_lens = []
for i in range(len(headers)):
max_lens.append(len(headers[i]))
for row in rows[1:]:
max_lens[i] = max(max_lens[i], len(row[i]))
rows.insert(1, ['-' * l for l in max_lens])
for row in rows:
for col, l in zip(row, max_lens):
print(col.ljust(l + 4), end='')
print()
sys.exit(1)
def parse_ref(args):
"""Parser for the `ref` command.
:param args: Command-line arguments dictionary, as parsed by argparse
:type args: dict
"""
if args.d is not None:
# Options that are files.
options = ['i', 'g', 'c1', 'c2']
files = {
option: getattr(args, option)
for option in options
if getattr(args, option) is not None
}
reference = REFERENCES_MAPPING[args.d]
download_reference(reference, files, overwrite=args.overwrite)
elif args.workflow in {'lamanno', 'nucleus'} or args.lamanno:
ref_lamanno(
args.fasta,
args.gtf,
args.f1,
args.f2,
args.i,
args.g,
args.c1,
args.c2,
overwrite=args.overwrite
)
elif args.workflow == 'kite':
ref_kite(
args.feature, args.f1, args.i, args.g, overwrite=args.overwrite
)
else:
ref(
args.fasta,
args.gtf,
args.f1,
args.i,
args.g,
overwrite=args.overwrite
)
def parse_count(args):
"""Parser for the `count` command.
:param args: Command-line arguments dictionary, as parsed by argparse
:type args: dict
"""
if args.workflow in {'lamanno', 'nucleus'} or args.lamanno or args.nucleus:
count_velocity(
args.i,
args.g,
args.c1,
args.c2,
args.x,
args.o,
args.fastqs,
args.w,
tcc=args.tcc,
filter=args.filter,
threads=args.t,
memory=args.m,
overwrite=args.overwrite,
loom=args.loom,
h5ad=args.h5ad,
nucleus=args.workflow == 'nucleus' or args.nucleus,
)
else:
count(
args.i,
args.g,
args.x,
args.o,
args.fastqs,
args.w,
tcc=args.tcc,
filter=args.filter,
threads=args.t,
memory=args.m,
overwrite=args.overwrite,
loom=args.loom,
h5ad=args.h5ad,
)
COMMAND_TO_FUNCTION = {
'ref': parse_ref,
'count': parse_count,
}
def setup_info_args(parser, parent):
"""Helper function to set up a subparser for the `info` command.
:param parser: argparse parser to add the `info` command to
:type args: argparse.ArgumentParser
:param parent: argparse parser parent of the newly added subcommand.
used to inherit shared commands/flags
:type args: argparse.ArgumentParser
:return: the newly added parser
:rtype: argparse.ArgumentParser
"""
parser_info = parser.add_parser(
'info',
description='Display package and citation information',
help='Display package and citation information',
parents=[parent],
add_help=False,
)
return parser_info
def setup_ref_args(parser, parent):
"""Helper function to set up a subparser for the `ref` command.
:param parser: argparse parser to add the `ref` command to
:type args: argparse.ArgumentParser
:param parent: argparse parser parent of the newly added subcommand.
used to inherit shared commands/flags
:type args: argparse.ArgumentParser
:return: the newly added parser
:rtype: argparse.ArgumentParser
"""
workflow = sys.argv[sys.argv.index('--workflow') +
1] if '--workflow' in sys.argv else 'standard'
parser_ref = parser.add_parser(
'ref',
description='Build a kallisto index and transcript-to-gene mapping',
help='Build a kallisto index and transcript-to-gene mapping',
parents=[parent],
)
parser_ref._actions[0].help = parser_ref._actions[0].help.capitalize()
required_ref = parser_ref.add_argument_group('required arguments')
required_ref.add_argument(
'-i',
metavar='INDEX',
help='Path to the kallisto index to be constructed',
type=str,
required=True
)
required_ref.add_argument(
'-g',
metavar='T2G',
help='Path to transcript-to-gene mapping to be generated',
type=str,
required=True
)
required_ref.add_argument(
'-f1',
metavar='FASTA',
help=(
'[Optional with -d] Path to the cDNA FASTA (lamanno, nucleus) '
'or mismatch FASTA (kite) to be generated '
),
type=str,
required='-d' not in sys.argv
)
required_lamanno = parser_ref.add_argument_group(
'required arguments for `lamanno` and `nucleus` workflows'
)
required_lamanno.add_argument(
'-f2',
metavar='FASTA',
help='Path to the intron FASTA to be generated',
type=str,
required=workflow in {'lamanno', 'nucleus'}
or any(arg in sys.argv for arg in {'--lamanno', '--nucleus'})
)
required_lamanno.add_argument(
'-c1',
metavar='T2C',
help='Path to generate cDNA transcripts-to-capture',
type=str,
required=workflow in {'lamanno', 'nucleus'}
or any(arg in sys.argv for arg in {'--lamanno', '--nucleus'})
)
required_lamanno.add_argument(
'-c2',
metavar='T2C',
help='Path to generate intron transcripts-to-capture',
type=str,
required=workflow in {'lamanno', 'nucleus'}
or any(arg in sys.argv for arg in {'--lamanno', '--nucleus'})
)
parser_ref.add_argument(
'-d',
help=(
'Download a pre-built kallisto index (along with all necessary files) '
'instead of building it locally'
),
type=str,
choices=list(REFERENCES_MAPPING.keys()),
required=False
)
parser_ref.add_argument(
'--lamanno',
help='Deprecated. Use `--workflow lamanno` instead.',
action='store_true'
)
parser_ref.add_argument(
'--overwrite',
help='Overwrite existing kallisto index',
action='store_true'
)
parser_ref.add_argument(
'fasta',
help='Genomic FASTA file',
type=str,
nargs=None if '-d' not in sys.argv and workflow != 'kite' else '?'
)
parser_ref.add_argument(
'gtf',
help='Reference GTF file',
type=str,
nargs=None if '-d' not in sys.argv and workflow != 'kite' else '?'
)
parser_ref.add_argument(
'feature',
help=(
'[`kite` workflow only] Path to TSV containing barcodes and feature names.'
),
type=str,
nargs=None if '-d' not in sys.argv and workflow == 'kite' else '?'
)
return parser_ref
def setup_count_args(parser, parent):
"""Helper function to set up a subparser for the `count` command.
:param parser: argparse parser to add the `count` command to
:type args: argparse.ArgumentParser
:param parent: argparse parser parent of the newly added subcommand.
used to inherit shared commands/flags
:type args: argparse.ArgumentParser
:return: the newly added parser
:rtype: argparse.ArgumentParser
"""
workflow = sys.argv[sys.argv.index('--workflow') +
1] if '--workflow' in sys.argv else 'standard'
# count
parser_count = parser.add_parser(
'count',
description=('Generate count matrices from a set of single-cell FASTQ files. '
'Run `kb --list` to view single-cell technology information.'), # noqa
help='Generate count matrices from a set of single-cell FASTQ files',
parents=[parent],
)
parser_count._actions[0].help = parser_count._actions[0].help.capitalize()
required_count = parser_count.add_argument_group('required arguments')
required_count.add_argument(
'-i',
metavar='INDEX',
help='Path to kallisto index',
type=str,
required=True
)
required_count.add_argument(
'-g',
metavar='T2G',
help='Path to transcript-to-gene mapping',
type=str,
required=True
)
required_count.add_argument(
'-x',
metavar='TECHNOLOGY',
help='Single-cell technology used (`kb --list` to view)',
type=str,
required=True
)
parser_count.add_argument(
'-o',
metavar='OUT',
help='Path to output directory (default: current directory)',
type=str,
default='.',
)
parser_count.add_argument(
'-w',
metavar='WHITELIST',
help=(
'Path to file of whitelisted barcodes to correct to. '
'If not provided and bustools supports the technology, '
'a pre-packaged whitelist is used. If not, the bustools '
'whitelist command is used. (`kb --list` to view whitelists)'
),
type=str
)
parser_count.add_argument(
'-t',
metavar='THREADS',
help='Number of threads to use (default: 8)',
type=int,
default=8
)
parser_count.add_argument(
'-m',
metavar='MEMORY',
help='Maximum memory used (default: 4G)',
type=str,
default='4G'
)
parser_count.add_argument(
'--tcc',
help='Generate a TCC matrix instead of a gene count matrix.',
action='store_true'
)
required_lamanno = parser_count.add_argument_group(
'required arguments for `lamanno` and `nucleus` workflows'
)
required_lamanno.add_argument(
'-c1',
metavar='T2C',
help='Path to cDNA transcripts-to-capture',
type=str,
required=workflow in {'lamanno', 'nucleus'}
or any(arg in sys.argv for arg in {'--lamanno', '--nucleus'})
)
required_lamanno.add_argument(
'-c2',
metavar='T2C',
help='Path to intron transcripts-to-captured',
type=str,
required=workflow in {'lamanno', 'nucleus'}
or any(arg in sys.argv for arg in {'--lamanno', '--nucleus'})
)
parser_count.add_argument(
'--overwrite',
help='Overwrite existing output.bus file',
action='store_true'
)
parser_count.add_argument('--dry-run', help='Dry run', action='store_true')
velocity_group = parser_count.add_mutually_exclusive_group()
velocity_group.add_argument(
'--lamanno',
help='Deprecated. Use `--workflow lamanno` instead.',
action='store_true'
)
velocity_group.add_argument(
'--nucleus',
help='Deprecated. Use `--workflow nucleus` instead.',
action='store_true'
)
parser_count.add_argument(
'--filter',
help='Produce a filtered gene count matrix (default: bustools)',
type=str,
const='bustools',
nargs='?',
choices=['bustools']
)
conversion_group = parser_count.add_mutually_exclusive_group()
conversion_group.add_argument(
'--loom',
help='Generate loom file from count matrix',
action='store_true'
)
conversion_group.add_argument(
'--h5ad',
help='Generate h5ad file from count matrix',
action='store_true'
)
parser_count.add_argument('fastqs', help='FASTQ files', nargs='+')
return parser_count
def main():
"""Command-line entrypoint.
"""
# Main parser
parser = argparse.ArgumentParser(
description='kb_python {}'.format(__version__)
)
parser._actions[0].help = parser._actions[0].help.capitalize()
parser.add_argument(
'--list',
help='Display list of supported single-cell technologies',
action='store_true'
)
subparsers = parser.add_subparsers(
dest='command',
metavar='<CMD>',
)
# Add common options to this parent parser
parent = argparse.ArgumentParser(add_help=False)
parent.add_argument(
'--workflow',
help=(
'Type of workflow. Use `lamanno` to calculate '
'RNA velocity based on La Manno et al. 2018 logic. Use `nucleus` to '
'calculate RNA velocity on single-nucleus RNA-seq reads (default: standard)'
),
type=str,
default='standard',
choices=['standard', 'lamanno', 'nucleus', 'kite']
)
parent.add_argument(
'--keep-tmp',
help='Do not delete the tmp directory',
action='store_true'
)
parent.add_argument(
'--verbose', help='Print debugging information', action='store_true'
)
# Command parsers
setup_info_args(subparsers, argparse.ArgumentParser(add_help=False))
parser_ref = setup_ref_args(subparsers, parent)
parser_count = setup_count_args(subparsers, parent)
command_to_parser = {
'ref': parser_ref,
'count': parser_count,
}
if 'info' in sys.argv:
display_info()
elif '--list' in sys.argv:
display_technologies()
# Show help when no arguments are given
if len(sys.argv) == 1:
parser.print_help(sys.stderr)
sys.exit(1)
if len(sys.argv) == 2:
if sys.argv[1] in command_to_parser:
command_to_parser[sys.argv[1]].print_help(sys.stderr)
else:
parser.print_help(sys.stderr)
sys.exit(1)
args = parser.parse_args()
logging.basicConfig(
format='[%(asctime)s] %(levelname)7s %(message)s',
level=logging.DEBUG if args.verbose else logging.INFO,
)
logger = logging.getLogger(__name__)
logger.addHandler(TqdmLoggingHandler())
if 'dry_run' in args:
# Dry run can not be specified with matrix conversion.
if args.dry_run and (args.loom or args.h5ad):
raise parser.error(
'--dry-run can not be used with --loom or --h5ad'
)
if args.dry_run:
logging.disable(level=logging.CRITICAL)
set_dry()
if any(arg in sys.argv for arg in {'--lamanno', '--nucleus'}):
logger.warning((
'The `--lamanno` and `--nucleus` flags are deprecated. '
'These options will be removed in a future release. '
'Please use `--workflow lamanno` or `--workflow nucleus` instead.'
))
logger.debug('Printing verbose output')
logger.debug(
'kallisto binary located at {}'.format(get_kallisto_binary_path())
)
logger.debug(
'bustools binary located at {}'.format(get_bustools_binary_path())
)
logger.debug('Creating tmp directory')
make_directory(TEMP_DIR)
try:
logger.debug(args)
COMMAND_TO_FUNCTION[args.command](args)
except Exception:
if is_dry():
raise
logger.exception('An exception occurred')
finally:
# Always clean temp dir
if not args.keep_tmp:
logger.debug('Removing tmp directory')
remove_directory(TEMP_DIR)
|
[
"textwrap.fill",
"argparse.ArgumentParser",
"logging.basicConfig",
"logging.disable",
"sys.argv.index",
"sys.exit",
"os.path.join",
"logging.getLogger"
] |
[((1334, 1345), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1342, 1345), False, 'import sys\n'), ((2423, 2434), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2431, 2434), False, 'import sys\n'), ((14419, 14458), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)'}), '(add_help=False)\n', (14442, 14458), False, 'import argparse\n'), ((15899, 16028), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""[%(asctime)s] %(levelname)7s %(message)s"""', 'level': '(logging.DEBUG if args.verbose else logging.INFO)'}), "(format='[%(asctime)s] %(levelname)7s %(message)s',\n level=logging.DEBUG if args.verbose else logging.INFO)\n", (15918, 16028), False, 'import logging\n'), ((16061, 16088), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (16078, 16088), False, 'import logging\n'), ((15166, 15205), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)'}), '(add_help=False)\n', (15189, 15205), False, 'import argparse\n'), ((15636, 15647), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (15644, 15647), False, 'import sys\n'), ((15850, 15861), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (15858, 15861), False, 'import sys\n'), ((997, 1038), 'os.path.join', 'os.path.join', (['PACKAGE_PATH', 'INFO_FILENAME'], {}), '(PACKAGE_PATH, INFO_FILENAME)\n', (1009, 1038), False, 'import os\n'), ((16427, 16466), 'logging.disable', 'logging.disable', ([], {'level': 'logging.CRITICAL'}), '(level=logging.CRITICAL)\n', (16442, 16466), False, 'import logging\n'), ((5954, 5982), 'sys.argv.index', 'sys.argv.index', (['"""--workflow"""'], {}), "('--workflow')\n", (5968, 5982), False, 'import sys\n'), ((9758, 9786), 'sys.argv.index', 'sys.argv.index', (['"""--workflow"""'], {}), "('--workflow')\n", (9772, 9786), False, 'import sys\n'), ((1211, 1240), 'textwrap.fill', 'textwrap.fill', (['line'], {'width': '(80)'}), '(line, width=80)\n', (1224, 1240), False, 'import textwrap\n')]
|
""" Testing code to check time checking mechanisms """
import time
import unittest
import sys
from Session import Session
def test_timeBeforeSession():
TEST_TIME = time.time()
time.sleep(1)
s = Session.createSession()
s.endSession()
assert s.isSessionWithin(TEST_TIME) == True
def test_timeDuringSession():
s = Session.createSession()
time.sleep(1)
TEST_TIME = time.time()
time.sleep(1)
s.endSession()
assert s.isSessionWithin(TEST_TIME) == True
def test_timeAfterSession():
s = Session.createSession()
time.sleep(1)
s.endSession()
TEST_TIME = time.time()
time.sleep(1)
assert s.isSessionWithin(TEST_TIME) == False
|
[
"time.sleep",
"Session.Session.createSession",
"time.time"
] |
[((175, 186), 'time.time', 'time.time', ([], {}), '()\n', (184, 186), False, 'import time\n'), ((191, 204), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (201, 204), False, 'import time\n'), ((214, 237), 'Session.Session.createSession', 'Session.createSession', ([], {}), '()\n', (235, 237), False, 'from Session import Session\n'), ((348, 371), 'Session.Session.createSession', 'Session.createSession', ([], {}), '()\n', (369, 371), False, 'from Session import Session\n'), ((377, 390), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (387, 390), False, 'import time\n'), ((408, 419), 'time.time', 'time.time', ([], {}), '()\n', (417, 419), False, 'import time\n'), ((424, 437), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (434, 437), False, 'import time\n'), ((547, 570), 'Session.Session.createSession', 'Session.createSession', ([], {}), '()\n', (568, 570), False, 'from Session import Session\n'), ((576, 589), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (586, 589), False, 'import time\n'), ((627, 638), 'time.time', 'time.time', ([], {}), '()\n', (636, 638), False, 'import time\n'), ((643, 656), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (653, 656), False, 'import time\n')]
|
# Generated from antlr4-python3-runtime-4.7.2/src/autogen/Cymbol.g4 by ANTLR 4.7.2
# encoding: utf-8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3#")
buf.write("\u00af\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\3\2\3\2")
buf.write("\6\2\'\n\2\r\2\16\2(\3\2\5\2,\n\2\3\3\3\3\3\3\3\3\5\3")
buf.write("\62\n\3\3\3\3\3\3\4\3\4\3\5\3\5\3\5\3\5\5\5<\n\5\3\5\3")
buf.write("\5\3\5\3\6\3\6\3\6\7\6D\n\6\f\6\16\6G\13\6\3\7\3\7\3\7")
buf.write("\3\b\3\b\7\bN\n\b\f\b\16\bQ\13\b\3\b\3\b\3\t\3\t\3\t\3")
buf.write("\t\3\t\3\n\3\n\5\n\\\n\n\3\n\3\n\3\13\3\13\5\13b\n\13")
buf.write("\3\f\3\f\3\f\3\f\3\f\5\fi\n\f\3\r\3\r\3\r\3\r\3\r\3\r")
buf.write("\3\16\3\16\3\16\3\17\3\17\3\17\3\20\3\20\3\20\7\20z\n")
buf.write("\20\f\20\16\20}\13\20\3\21\3\21\3\21\3\21\3\21\5\21\u0084")
buf.write("\n\21\3\22\3\22\3\22\3\22\5\22\u008a\n\22\3\22\3\22\3")
buf.write("\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22")
buf.write("\5\22\u0099\n\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3")
buf.write("\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\7\22\u00aa\n\22")
buf.write("\f\22\16\22\u00ad\13\22\3\22\2\3\"\23\2\4\6\b\n\f\16\20")
buf.write("\22\24\26\30\32\34\36 \"\2\b\3\2\3\5\3\2\31\32\3\2\23")
buf.write("\26\3\2\27\30\3\2\33\34\3\2\20\21\2\u00bc\2&\3\2\2\2\4")
buf.write("-\3\2\2\2\6\65\3\2\2\2\b\67\3\2\2\2\n@\3\2\2\2\fH\3\2")
buf.write("\2\2\16K\3\2\2\2\20T\3\2\2\2\22Y\3\2\2\2\24_\3\2\2\2\26")
buf.write("h\3\2\2\2\30j\3\2\2\2\32p\3\2\2\2\34s\3\2\2\2\36v\3\2")
buf.write("\2\2 \u0083\3\2\2\2\"\u0098\3\2\2\2$\'\5\b\5\2%\'\5\4")
buf.write("\3\2&$\3\2\2\2&%\3\2\2\2\'(\3\2\2\2(&\3\2\2\2()\3\2\2")
buf.write("\2)+\3\2\2\2*,\7\2\2\3+*\3\2\2\2+,\3\2\2\2,\3\3\2\2\2")
buf.write("-.\5\6\4\2.\61\7\36\2\2/\60\7\17\2\2\60\62\5\"\22\2\61")
buf.write("/\3\2\2\2\61\62\3\2\2\2\62\63\3\2\2\2\63\64\7\f\2\2\64")
buf.write("\5\3\2\2\2\65\66\t\2\2\2\66\7\3\2\2\2\678\5\6\4\289\7")
buf.write("\36\2\29;\7\t\2\2:<\5\n\6\2;:\3\2\2\2;<\3\2\2\2<=\3\2")
buf.write("\2\2=>\7\n\2\2>?\5\16\b\2?\t\3\2\2\2@E\5\f\7\2AB\7\13")
buf.write("\2\2BD\5\f\7\2CA\3\2\2\2DG\3\2\2\2EC\3\2\2\2EF\3\2\2\2")
buf.write("F\13\3\2\2\2GE\3\2\2\2HI\5\6\4\2IJ\7\36\2\2J\r\3\2\2\2")
buf.write("KO\7\r\2\2LN\5 \21\2ML\3\2\2\2NQ\3\2\2\2OM\3\2\2\2OP\3")
buf.write("\2\2\2PR\3\2\2\2QO\3\2\2\2RS\7\16\2\2S\17\3\2\2\2TU\7")
buf.write("\36\2\2UV\7\17\2\2VW\5\"\22\2WX\7\f\2\2X\21\3\2\2\2Y[")
buf.write("\7\b\2\2Z\\\5\"\22\2[Z\3\2\2\2[\\\3\2\2\2\\]\3\2\2\2]")
buf.write("^\7\f\2\2^\23\3\2\2\2_a\5\30\r\2`b\5\32\16\2a`\3\2\2\2")
buf.write("ab\3\2\2\2b\25\3\2\2\2ci\5\16\b\2di\5\24\13\2ei\5\22\n")
buf.write("\2fi\5\20\t\2gi\5\34\17\2hc\3\2\2\2hd\3\2\2\2he\3\2\2")
buf.write("\2hf\3\2\2\2hg\3\2\2\2i\27\3\2\2\2jk\7\6\2\2kl\7\t\2\2")
buf.write("lm\5\"\22\2mn\7\n\2\2no\5\26\f\2o\31\3\2\2\2pq\7\7\2\2")
buf.write("qr\5\26\f\2r\33\3\2\2\2st\5\"\22\2tu\7\f\2\2u\35\3\2\2")
buf.write("\2v{\5\"\22\2wx\7\13\2\2xz\5\"\22\2yw\3\2\2\2z}\3\2\2")
buf.write("\2{y\3\2\2\2{|\3\2\2\2|\37\3\2\2\2}{\3\2\2\2~\u0084\5")
buf.write("\4\3\2\177\u0084\5\24\13\2\u0080\u0084\5\22\n\2\u0081")
buf.write("\u0084\5\20\t\2\u0082\u0084\5\34\17\2\u0083~\3\2\2\2\u0083")
buf.write("\177\3\2\2\2\u0083\u0080\3\2\2\2\u0083\u0081\3\2\2\2\u0083")
buf.write("\u0082\3\2\2\2\u0084!\3\2\2\2\u0085\u0086\b\22\1\2\u0086")
buf.write("\u0087\7\36\2\2\u0087\u0089\7\t\2\2\u0088\u008a\5\36\20")
buf.write("\2\u0089\u0088\3\2\2\2\u0089\u008a\3\2\2\2\u008a\u008b")
buf.write("\3\2\2\2\u008b\u0099\7\n\2\2\u008c\u008d\t\3\2\2\u008d")
buf.write("\u0099\5\"\22\16\u008e\u008f\7\22\2\2\u008f\u0099\5\"")
buf.write("\22\r\u0090\u0099\7\36\2\2\u0091\u0099\7\37\2\2\u0092")
buf.write("\u0099\7 \2\2\u0093\u0099\7\35\2\2\u0094\u0095\7\t\2\2")
buf.write("\u0095\u0096\5\"\22\2\u0096\u0097\7\n\2\2\u0097\u0099")
buf.write("\3\2\2\2\u0098\u0085\3\2\2\2\u0098\u008c\3\2\2\2\u0098")
buf.write("\u008e\3\2\2\2\u0098\u0090\3\2\2\2\u0098\u0091\3\2\2\2")
buf.write("\u0098\u0092\3\2\2\2\u0098\u0093\3\2\2\2\u0098\u0094\3")
buf.write("\2\2\2\u0099\u00ab\3\2\2\2\u009a\u009b\f\f\2\2\u009b\u009c")
buf.write("\t\4\2\2\u009c\u00aa\5\"\22\r\u009d\u009e\f\13\2\2\u009e")
buf.write("\u009f\t\5\2\2\u009f\u00aa\5\"\22\f\u00a0\u00a1\f\n\2")
buf.write("\2\u00a1\u00a2\t\3\2\2\u00a2\u00aa\5\"\22\13\u00a3\u00a4")
buf.write("\f\t\2\2\u00a4\u00a5\t\6\2\2\u00a5\u00aa\5\"\22\n\u00a6")
buf.write("\u00a7\f\b\2\2\u00a7\u00a8\t\7\2\2\u00a8\u00aa\5\"\22")
buf.write("\t\u00a9\u009a\3\2\2\2\u00a9\u009d\3\2\2\2\u00a9\u00a0")
buf.write("\3\2\2\2\u00a9\u00a3\3\2\2\2\u00a9\u00a6\3\2\2\2\u00aa")
buf.write("\u00ad\3\2\2\2\u00ab\u00a9\3\2\2\2\u00ab\u00ac\3\2\2\2")
buf.write("\u00ac#\3\2\2\2\u00ad\u00ab\3\2\2\2\22&(+\61;EO[ah{\u0083")
buf.write("\u0089\u0098\u00a9\u00ab")
return buf.getvalue()
class CymbolParser ( Parser ):
grammarFileName = "Cymbol.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'int'", "'float'", "'boolean'", "'if'",
"'else'", "'return'", "'('", "')'", "','", "';'", "'{'",
"'}'", "'='", "'=='", "'!='", "'!'", "'>'", "'<'",
"'>='", "'<='", "'*'", "'/'", "'+'", "'-'", "'&&'",
"'||'" ]
symbolicNames = [ "<INVALID>", "TYPEINT", "TYPEFLOAT", "TYPEBOOLEAN",
"IF", "ELSE", "RETURN", "LP", "RP", "COMMA", "SEMICOLON",
"LB", "RB", "AS", "EQ", "NE", "NOT", "GT", "LT", "GE",
"LE", "MUL", "DIV", "PLUS", "MINUS", "AND", "OR",
"BOOLEAN", "ID", "INT", "FLOAT", "BLOCKCOMMENT", "LINECOMMENT",
"WS" ]
RULE_fiile = 0
RULE_varDecl = 1
RULE_tyype = 2
RULE_funcDecl = 3
RULE_paramTypeList = 4
RULE_paramType = 5
RULE_block = 6
RULE_assignStat = 7
RULE_returnStat = 8
RULE_ifElseStat = 9
RULE_ifElseExprStat = 10
RULE_ifStat = 11
RULE_elseStat = 12
RULE_exprStat = 13
RULE_exprList = 14
RULE_stat = 15
RULE_expr = 16
ruleNames = [ "fiile", "varDecl", "tyype", "funcDecl", "paramTypeList",
"paramType", "block", "assignStat", "returnStat", "ifElseStat",
"ifElseExprStat", "ifStat", "elseStat", "exprStat", "exprList",
"stat", "expr" ]
EOF = Token.EOF
TYPEINT=1
TYPEFLOAT=2
TYPEBOOLEAN=3
IF=4
ELSE=5
RETURN=6
LP=7
RP=8
COMMA=9
SEMICOLON=10
LB=11
RB=12
AS=13
EQ=14
NE=15
NOT=16
GT=17
LT=18
GE=19
LE=20
MUL=21
DIV=22
PLUS=23
MINUS=24
AND=25
OR=26
BOOLEAN=27
ID=28
INT=29
FLOAT=30
BLOCKCOMMENT=31
LINECOMMENT=32
WS=33
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7.2")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class FiileContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def funcDecl(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(CymbolParser.FuncDeclContext)
else:
return self.getTypedRuleContext(CymbolParser.FuncDeclContext,i)
def varDecl(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(CymbolParser.VarDeclContext)
else:
return self.getTypedRuleContext(CymbolParser.VarDeclContext,i)
def EOF(self):
return self.getToken(CymbolParser.EOF, 0)
def getRuleIndex(self):
return CymbolParser.RULE_fiile
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitFiile" ):
return visitor.visitFiile(self)
else:
return visitor.visitChildren(self)
def fiile(self):
localctx = CymbolParser.FiileContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_fiile)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 36
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 36
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,0,self._ctx)
if la_ == 1:
self.state = 34
self.funcDecl()
pass
elif la_ == 2:
self.state = 35
self.varDecl()
pass
self.state = 38
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CymbolParser.TYPEINT) | (1 << CymbolParser.TYPEFLOAT) | (1 << CymbolParser.TYPEBOOLEAN))) != 0)):
break
self.state = 41
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,2,self._ctx)
if la_ == 1:
self.state = 40
self.match(CymbolParser.EOF)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VarDeclContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tyype(self):
return self.getTypedRuleContext(CymbolParser.TyypeContext,0)
def ID(self):
return self.getToken(CymbolParser.ID, 0)
def SEMICOLON(self):
return self.getToken(CymbolParser.SEMICOLON, 0)
def AS(self):
return self.getToken(CymbolParser.AS, 0)
def expr(self):
return self.getTypedRuleContext(CymbolParser.ExprContext,0)
def getRuleIndex(self):
return CymbolParser.RULE_varDecl
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitVarDecl" ):
return visitor.visitVarDecl(self)
else:
return visitor.visitChildren(self)
def varDecl(self):
localctx = CymbolParser.VarDeclContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_varDecl)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 43
self.tyype()
self.state = 44
self.match(CymbolParser.ID)
self.state = 47
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CymbolParser.AS:
self.state = 45
self.match(CymbolParser.AS)
self.state = 46
self.expr(0)
self.state = 49
self.match(CymbolParser.SEMICOLON)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TyypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TYPEINT(self):
return self.getToken(CymbolParser.TYPEINT, 0)
def TYPEFLOAT(self):
return self.getToken(CymbolParser.TYPEFLOAT, 0)
def TYPEBOOLEAN(self):
return self.getToken(CymbolParser.TYPEBOOLEAN, 0)
def getRuleIndex(self):
return CymbolParser.RULE_tyype
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTyype" ):
return visitor.visitTyype(self)
else:
return visitor.visitChildren(self)
def tyype(self):
localctx = CymbolParser.TyypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_tyype)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 51
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CymbolParser.TYPEINT) | (1 << CymbolParser.TYPEFLOAT) | (1 << CymbolParser.TYPEBOOLEAN))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FuncDeclContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tyype(self):
return self.getTypedRuleContext(CymbolParser.TyypeContext,0)
def ID(self):
return self.getToken(CymbolParser.ID, 0)
def LP(self):
return self.getToken(CymbolParser.LP, 0)
def RP(self):
return self.getToken(CymbolParser.RP, 0)
def block(self):
return self.getTypedRuleContext(CymbolParser.BlockContext,0)
def paramTypeList(self):
return self.getTypedRuleContext(CymbolParser.ParamTypeListContext,0)
def getRuleIndex(self):
return CymbolParser.RULE_funcDecl
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitFuncDecl" ):
return visitor.visitFuncDecl(self)
else:
return visitor.visitChildren(self)
def funcDecl(self):
localctx = CymbolParser.FuncDeclContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_funcDecl)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 53
self.tyype()
self.state = 54
self.match(CymbolParser.ID)
self.state = 55
self.match(CymbolParser.LP)
self.state = 57
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CymbolParser.TYPEINT) | (1 << CymbolParser.TYPEFLOAT) | (1 << CymbolParser.TYPEBOOLEAN))) != 0):
self.state = 56
self.paramTypeList()
self.state = 59
self.match(CymbolParser.RP)
self.state = 60
self.block()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParamTypeListContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def paramType(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(CymbolParser.ParamTypeContext)
else:
return self.getTypedRuleContext(CymbolParser.ParamTypeContext,i)
def COMMA(self, i:int=None):
if i is None:
return self.getTokens(CymbolParser.COMMA)
else:
return self.getToken(CymbolParser.COMMA, i)
def getRuleIndex(self):
return CymbolParser.RULE_paramTypeList
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitParamTypeList" ):
return visitor.visitParamTypeList(self)
else:
return visitor.visitChildren(self)
def paramTypeList(self):
localctx = CymbolParser.ParamTypeListContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_paramTypeList)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 62
self.paramType()
self.state = 67
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CymbolParser.COMMA:
self.state = 63
self.match(CymbolParser.COMMA)
self.state = 64
self.paramType()
self.state = 69
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParamTypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tyype(self):
return self.getTypedRuleContext(CymbolParser.TyypeContext,0)
def ID(self):
return self.getToken(CymbolParser.ID, 0)
def getRuleIndex(self):
return CymbolParser.RULE_paramType
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitParamType" ):
return visitor.visitParamType(self)
else:
return visitor.visitChildren(self)
def paramType(self):
localctx = CymbolParser.ParamTypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_paramType)
try:
self.enterOuterAlt(localctx, 1)
self.state = 70
self.tyype()
self.state = 71
self.match(CymbolParser.ID)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BlockContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LB(self):
return self.getToken(CymbolParser.LB, 0)
def RB(self):
return self.getToken(CymbolParser.RB, 0)
def stat(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(CymbolParser.StatContext)
else:
return self.getTypedRuleContext(CymbolParser.StatContext,i)
def getRuleIndex(self):
return CymbolParser.RULE_block
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBlock" ):
return visitor.visitBlock(self)
else:
return visitor.visitChildren(self)
def block(self):
localctx = CymbolParser.BlockContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_block)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 73
self.match(CymbolParser.LB)
self.state = 77
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CymbolParser.TYPEINT) | (1 << CymbolParser.TYPEFLOAT) | (1 << CymbolParser.TYPEBOOLEAN) | (1 << CymbolParser.IF) | (1 << CymbolParser.RETURN) | (1 << CymbolParser.LP) | (1 << CymbolParser.NOT) | (1 << CymbolParser.PLUS) | (1 << CymbolParser.MINUS) | (1 << CymbolParser.BOOLEAN) | (1 << CymbolParser.ID) | (1 << CymbolParser.INT) | (1 << CymbolParser.FLOAT))) != 0):
self.state = 74
self.stat()
self.state = 79
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 80
self.match(CymbolParser.RB)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AssignStatContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ID(self):
return self.getToken(CymbolParser.ID, 0)
def AS(self):
return self.getToken(CymbolParser.AS, 0)
def expr(self):
return self.getTypedRuleContext(CymbolParser.ExprContext,0)
def SEMICOLON(self):
return self.getToken(CymbolParser.SEMICOLON, 0)
def getRuleIndex(self):
return CymbolParser.RULE_assignStat
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAssignStat" ):
return visitor.visitAssignStat(self)
else:
return visitor.visitChildren(self)
def assignStat(self):
localctx = CymbolParser.AssignStatContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_assignStat)
try:
self.enterOuterAlt(localctx, 1)
self.state = 82
self.match(CymbolParser.ID)
self.state = 83
self.match(CymbolParser.AS)
self.state = 84
self.expr(0)
self.state = 85
self.match(CymbolParser.SEMICOLON)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ReturnStatContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def RETURN(self):
return self.getToken(CymbolParser.RETURN, 0)
def SEMICOLON(self):
return self.getToken(CymbolParser.SEMICOLON, 0)
def expr(self):
return self.getTypedRuleContext(CymbolParser.ExprContext,0)
def getRuleIndex(self):
return CymbolParser.RULE_returnStat
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitReturnStat" ):
return visitor.visitReturnStat(self)
else:
return visitor.visitChildren(self)
def returnStat(self):
localctx = CymbolParser.ReturnStatContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_returnStat)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 87
self.match(CymbolParser.RETURN)
self.state = 89
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CymbolParser.LP) | (1 << CymbolParser.NOT) | (1 << CymbolParser.PLUS) | (1 << CymbolParser.MINUS) | (1 << CymbolParser.BOOLEAN) | (1 << CymbolParser.ID) | (1 << CymbolParser.INT) | (1 << CymbolParser.FLOAT))) != 0):
self.state = 88
self.expr(0)
self.state = 91
self.match(CymbolParser.SEMICOLON)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IfElseStatContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ifStat(self):
return self.getTypedRuleContext(CymbolParser.IfStatContext,0)
def elseStat(self):
return self.getTypedRuleContext(CymbolParser.ElseStatContext,0)
def getRuleIndex(self):
return CymbolParser.RULE_ifElseStat
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitIfElseStat" ):
return visitor.visitIfElseStat(self)
else:
return visitor.visitChildren(self)
def ifElseStat(self):
localctx = CymbolParser.IfElseStatContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_ifElseStat)
try:
self.enterOuterAlt(localctx, 1)
self.state = 93
self.ifStat()
self.state = 95
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,8,self._ctx)
if la_ == 1:
self.state = 94
self.elseStat()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IfElseExprStatContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def block(self):
return self.getTypedRuleContext(CymbolParser.BlockContext,0)
def ifElseStat(self):
return self.getTypedRuleContext(CymbolParser.IfElseStatContext,0)
def returnStat(self):
return self.getTypedRuleContext(CymbolParser.ReturnStatContext,0)
def assignStat(self):
return self.getTypedRuleContext(CymbolParser.AssignStatContext,0)
def exprStat(self):
return self.getTypedRuleContext(CymbolParser.ExprStatContext,0)
def getRuleIndex(self):
return CymbolParser.RULE_ifElseExprStat
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitIfElseExprStat" ):
return visitor.visitIfElseExprStat(self)
else:
return visitor.visitChildren(self)
def ifElseExprStat(self):
localctx = CymbolParser.IfElseExprStatContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_ifElseExprStat)
try:
self.state = 102
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,9,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 97
self.block()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 98
self.ifElseStat()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 99
self.returnStat()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 100
self.assignStat()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 101
self.exprStat()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IfStatContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IF(self):
return self.getToken(CymbolParser.IF, 0)
def LP(self):
return self.getToken(CymbolParser.LP, 0)
def expr(self):
return self.getTypedRuleContext(CymbolParser.ExprContext,0)
def RP(self):
return self.getToken(CymbolParser.RP, 0)
def ifElseExprStat(self):
return self.getTypedRuleContext(CymbolParser.IfElseExprStatContext,0)
def getRuleIndex(self):
return CymbolParser.RULE_ifStat
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitIfStat" ):
return visitor.visitIfStat(self)
else:
return visitor.visitChildren(self)
def ifStat(self):
localctx = CymbolParser.IfStatContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_ifStat)
try:
self.enterOuterAlt(localctx, 1)
self.state = 104
self.match(CymbolParser.IF)
self.state = 105
self.match(CymbolParser.LP)
self.state = 106
self.expr(0)
self.state = 107
self.match(CymbolParser.RP)
self.state = 108
self.ifElseExprStat()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ElseStatContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ELSE(self):
return self.getToken(CymbolParser.ELSE, 0)
def ifElseExprStat(self):
return self.getTypedRuleContext(CymbolParser.IfElseExprStatContext,0)
def getRuleIndex(self):
return CymbolParser.RULE_elseStat
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitElseStat" ):
return visitor.visitElseStat(self)
else:
return visitor.visitChildren(self)
def elseStat(self):
localctx = CymbolParser.ElseStatContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_elseStat)
try:
self.enterOuterAlt(localctx, 1)
self.state = 110
self.match(CymbolParser.ELSE)
self.state = 111
self.ifElseExprStat()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExprStatContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expr(self):
return self.getTypedRuleContext(CymbolParser.ExprContext,0)
def SEMICOLON(self):
return self.getToken(CymbolParser.SEMICOLON, 0)
def getRuleIndex(self):
return CymbolParser.RULE_exprStat
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExprStat" ):
return visitor.visitExprStat(self)
else:
return visitor.visitChildren(self)
def exprStat(self):
localctx = CymbolParser.ExprStatContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_exprStat)
try:
self.enterOuterAlt(localctx, 1)
self.state = 113
self.expr(0)
self.state = 114
self.match(CymbolParser.SEMICOLON)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExprListContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expr(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(CymbolParser.ExprContext)
else:
return self.getTypedRuleContext(CymbolParser.ExprContext,i)
def COMMA(self, i:int=None):
if i is None:
return self.getTokens(CymbolParser.COMMA)
else:
return self.getToken(CymbolParser.COMMA, i)
def getRuleIndex(self):
return CymbolParser.RULE_exprList
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExprList" ):
return visitor.visitExprList(self)
else:
return visitor.visitChildren(self)
def exprList(self):
localctx = CymbolParser.ExprListContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_exprList)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 116
self.expr(0)
self.state = 121
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CymbolParser.COMMA:
self.state = 117
self.match(CymbolParser.COMMA)
self.state = 118
self.expr(0)
self.state = 123
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StatContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def varDecl(self):
return self.getTypedRuleContext(CymbolParser.VarDeclContext,0)
def ifElseStat(self):
return self.getTypedRuleContext(CymbolParser.IfElseStatContext,0)
def returnStat(self):
return self.getTypedRuleContext(CymbolParser.ReturnStatContext,0)
def assignStat(self):
return self.getTypedRuleContext(CymbolParser.AssignStatContext,0)
def exprStat(self):
return self.getTypedRuleContext(CymbolParser.ExprStatContext,0)
def getRuleIndex(self):
return CymbolParser.RULE_stat
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitStat" ):
return visitor.visitStat(self)
else:
return visitor.visitChildren(self)
def stat(self):
localctx = CymbolParser.StatContext(self, self._ctx, self.state)
self.enterRule(localctx, 30, self.RULE_stat)
try:
self.state = 129
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,11,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 124
self.varDecl()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 125
self.ifElseStat()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 126
self.returnStat()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 127
self.assignStat()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 128
self.exprStat()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExprContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.op = None # Token
def ID(self):
return self.getToken(CymbolParser.ID, 0)
def LP(self):
return self.getToken(CymbolParser.LP, 0)
def RP(self):
return self.getToken(CymbolParser.RP, 0)
def exprList(self):
return self.getTypedRuleContext(CymbolParser.ExprListContext,0)
def expr(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(CymbolParser.ExprContext)
else:
return self.getTypedRuleContext(CymbolParser.ExprContext,i)
def PLUS(self):
return self.getToken(CymbolParser.PLUS, 0)
def MINUS(self):
return self.getToken(CymbolParser.MINUS, 0)
def NOT(self):
return self.getToken(CymbolParser.NOT, 0)
def INT(self):
return self.getToken(CymbolParser.INT, 0)
def FLOAT(self):
return self.getToken(CymbolParser.FLOAT, 0)
def BOOLEAN(self):
return self.getToken(CymbolParser.BOOLEAN, 0)
def LT(self):
return self.getToken(CymbolParser.LT, 0)
def GT(self):
return self.getToken(CymbolParser.GT, 0)
def LE(self):
return self.getToken(CymbolParser.LE, 0)
def GE(self):
return self.getToken(CymbolParser.GE, 0)
def MUL(self):
return self.getToken(CymbolParser.MUL, 0)
def DIV(self):
return self.getToken(CymbolParser.DIV, 0)
def AND(self):
return self.getToken(CymbolParser.AND, 0)
def OR(self):
return self.getToken(CymbolParser.OR, 0)
def EQ(self):
return self.getToken(CymbolParser.EQ, 0)
def NE(self):
return self.getToken(CymbolParser.NE, 0)
def getRuleIndex(self):
return CymbolParser.RULE_expr
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExpr" ):
return visitor.visitExpr(self)
else:
return visitor.visitChildren(self)
def expr(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = CymbolParser.ExprContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 32
self.enterRecursionRule(localctx, 32, self.RULE_expr, _p)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 150
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,13,self._ctx)
if la_ == 1:
self.state = 132
self.match(CymbolParser.ID)
self.state = 133
self.match(CymbolParser.LP)
self.state = 135
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CymbolParser.LP) | (1 << CymbolParser.NOT) | (1 << CymbolParser.PLUS) | (1 << CymbolParser.MINUS) | (1 << CymbolParser.BOOLEAN) | (1 << CymbolParser.ID) | (1 << CymbolParser.INT) | (1 << CymbolParser.FLOAT))) != 0):
self.state = 134
self.exprList()
self.state = 137
self.match(CymbolParser.RP)
pass
elif la_ == 2:
self.state = 138
localctx.op = self._input.LT(1)
_la = self._input.LA(1)
if not(_la==CymbolParser.PLUS or _la==CymbolParser.MINUS):
localctx.op = self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 139
self.expr(12)
pass
elif la_ == 3:
self.state = 140
self.match(CymbolParser.NOT)
self.state = 141
self.expr(11)
pass
elif la_ == 4:
self.state = 142
self.match(CymbolParser.ID)
pass
elif la_ == 5:
self.state = 143
self.match(CymbolParser.INT)
pass
elif la_ == 6:
self.state = 144
self.match(CymbolParser.FLOAT)
pass
elif la_ == 7:
self.state = 145
self.match(CymbolParser.BOOLEAN)
pass
elif la_ == 8:
self.state = 146
self.match(CymbolParser.LP)
self.state = 147
self.expr(0)
self.state = 148
self.match(CymbolParser.RP)
pass
self._ctx.stop = self._input.LT(-1)
self.state = 169
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 167
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,14,self._ctx)
if la_ == 1:
localctx = CymbolParser.ExprContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expr)
self.state = 152
if not self.precpred(self._ctx, 10):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 10)")
self.state = 153
localctx.op = self._input.LT(1)
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CymbolParser.GT) | (1 << CymbolParser.LT) | (1 << CymbolParser.GE) | (1 << CymbolParser.LE))) != 0)):
localctx.op = self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 154
self.expr(11)
pass
elif la_ == 2:
localctx = CymbolParser.ExprContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expr)
self.state = 155
if not self.precpred(self._ctx, 9):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 9)")
self.state = 156
localctx.op = self._input.LT(1)
_la = self._input.LA(1)
if not(_la==CymbolParser.MUL or _la==CymbolParser.DIV):
localctx.op = self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 157
self.expr(10)
pass
elif la_ == 3:
localctx = CymbolParser.ExprContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expr)
self.state = 158
if not self.precpred(self._ctx, 8):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 8)")
self.state = 159
localctx.op = self._input.LT(1)
_la = self._input.LA(1)
if not(_la==CymbolParser.PLUS or _la==CymbolParser.MINUS):
localctx.op = self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 160
self.expr(9)
pass
elif la_ == 4:
localctx = CymbolParser.ExprContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expr)
self.state = 161
if not self.precpred(self._ctx, 7):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 7)")
self.state = 162
localctx.op = self._input.LT(1)
_la = self._input.LA(1)
if not(_la==CymbolParser.AND or _la==CymbolParser.OR):
localctx.op = self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 163
self.expr(8)
pass
elif la_ == 5:
localctx = CymbolParser.ExprContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expr)
self.state = 164
if not self.precpred(self._ctx, 6):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 6)")
self.state = 165
localctx.op = self._input.LT(1)
_la = self._input.LA(1)
if not(_la==CymbolParser.EQ or _la==CymbolParser.NE):
localctx.op = self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 166
self.expr(7)
pass
self.state = 171
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int):
if self._predicates == None:
self._predicates = dict()
self._predicates[16] = self.expr_sempred
pred = self._predicates.get(ruleIndex, None)
if pred is None:
raise Exception("No predicate with index:" + str(ruleIndex))
else:
return pred(localctx, predIndex)
def expr_sempred(self, localctx:ExprContext, predIndex:int):
if predIndex == 0:
return self.precpred(self._ctx, 10)
if predIndex == 1:
return self.precpred(self._ctx, 9)
if predIndex == 2:
return self.precpred(self._ctx, 8)
if predIndex == 3:
return self.precpred(self._ctx, 7)
if predIndex == 4:
return self.precpred(self._ctx, 6)
|
[
"io.StringIO",
"antlr4.error.Errors.FailedPredicateException"
] |
[((218, 228), 'io.StringIO', 'StringIO', ([], {}), '()\n', (226, 228), False, 'from io import StringIO\n'), ((44187, 44249), 'antlr4.error.Errors.FailedPredicateException', 'FailedPredicateException', (['self', '"""self.precpred(self._ctx, 10)"""'], {}), "(self, 'self.precpred(self._ctx, 10)')\n", (44211, 44249), False, 'from antlr4.error.Errors import FailedPredicateException\n'), ((45338, 45399), 'antlr4.error.Errors.FailedPredicateException', 'FailedPredicateException', (['self', '"""self.precpred(self._ctx, 9)"""'], {}), "(self, 'self.precpred(self._ctx, 9)')\n", (45362, 45399), False, 'from antlr4.error.Errors import FailedPredicateException\n'), ((46388, 46449), 'antlr4.error.Errors.FailedPredicateException', 'FailedPredicateException', (['self', '"""self.precpred(self._ctx, 8)"""'], {}), "(self, 'self.precpred(self._ctx, 8)')\n", (46412, 46449), False, 'from antlr4.error.Errors import FailedPredicateException\n'), ((47440, 47501), 'antlr4.error.Errors.FailedPredicateException', 'FailedPredicateException', (['self', '"""self.precpred(self._ctx, 7)"""'], {}), "(self, 'self.precpred(self._ctx, 7)')\n", (47464, 47501), False, 'from antlr4.error.Errors import FailedPredicateException\n'), ((48488, 48549), 'antlr4.error.Errors.FailedPredicateException', 'FailedPredicateException', (['self', '"""self.precpred(self._ctx, 6)"""'], {}), "(self, 'self.precpred(self._ctx, 6)')\n", (48512, 48549), False, 'from antlr4.error.Errors import FailedPredicateException\n')]
|
import torch
import torch.nn.functional as F
from torch import nn
# palabra anterior o <SOS> -->
# [features_imagen] --> attention --> decoder --> [0,0......,1,0,0,0,0,0....0]
class Decoder(nn.Module):
def __init__(self, image_features_dim,vocab_size, embed_size, hidden_size, num_layers=1):
super(Decoder, self).__init__()
self.embed = nn.Embedding(vocab_size, embed_size)
self.hidden_size = hidden_size
self.num_layers = num_layers
# hidden_size * 2 => because we now have the encoder_states which are
# states for backward and forward states
self.rnn = nn.GRU(image_features_dim + embed_size, hidden_size, num_layers, batch_first=False)
self.linear = nn.Linear(hidden_size, vocab_size)
self.relu = nn.ReLU()
def init_hidden(self, batch_size:int):
# (num_layers * num_directions, batch, hidden_size)
return torch.zeros(self.num_layers, batch_size, self.hidden_size)
def forward(self, context_vector, word, hidden_state=None):
"""
It's important to remember that we compute one time step at a time
word => (vocab_size)
image_features => (embed_size) ?
"""
# embeddings => (bsz, 1, embed_size)
embeddings = self.embed(word)
# embeddings => (bsz, embed_size)
embeddings = embeddings.squeeze(1)
rnn_input = torch.cat((context_vector, embeddings), dim=-1)
outputs, hidden_state = self.rnn(rnn_input.unsqueeze(0), hidden_state)
outputs = self.relu(self.linear(outputs))
predictions = outputs.squeeze(0)
return predictions, hidden_state
|
[
"torch.nn.GRU",
"torch.nn.ReLU",
"torch.nn.Embedding",
"torch.cat",
"torch.nn.Linear",
"torch.zeros"
] |
[((382, 418), 'torch.nn.Embedding', 'nn.Embedding', (['vocab_size', 'embed_size'], {}), '(vocab_size, embed_size)\n', (394, 418), False, 'from torch import nn\n'), ((641, 728), 'torch.nn.GRU', 'nn.GRU', (['(image_features_dim + embed_size)', 'hidden_size', 'num_layers'], {'batch_first': '(False)'}), '(image_features_dim + embed_size, hidden_size, num_layers,\n batch_first=False)\n', (647, 728), False, 'from torch import nn\n'), ((747, 781), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'vocab_size'], {}), '(hidden_size, vocab_size)\n', (756, 781), False, 'from torch import nn\n'), ((803, 812), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (810, 812), False, 'from torch import nn\n'), ((932, 990), 'torch.zeros', 'torch.zeros', (['self.num_layers', 'batch_size', 'self.hidden_size'], {}), '(self.num_layers, batch_size, self.hidden_size)\n', (943, 990), False, 'import torch\n'), ((1416, 1463), 'torch.cat', 'torch.cat', (['(context_vector, embeddings)'], {'dim': '(-1)'}), '((context_vector, embeddings), dim=-1)\n', (1425, 1463), False, 'import torch\n')]
|
import re
from resources.element import ShallowQuestion
from db.connection import session
from db.entities import *
from datetime import datetime
import logging
import os
if not os.path.exists('logs'):
os.makedirs('logs')
logger = logging.getLogger('logs/sql_manager.log')
logger.setLevel(logging.DEBUG)
# Create Formatter
formatter = logging.Formatter('%(asctime)s-%(levelname)s-FILE:%(filename)s-FUNC:%(funcName)s-LINE:%(lineno)d-%(message)s')
# create a file handler and add it to logger
file_handler = logging.FileHandler('logs/sql_manager.log')
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
def str_to_time(date, fmt='%Y-%m-%d %H:%M:%SZ', default=None):
""" Convert a string to datetime.
If it cannot be created, it returns the default value
:param srt date: The string of a date
:param str fmt: The format of the date param
:param str default: A default value in case the date creation fail.
:return: A datetime object created from the date
:rtype: datetime or None
"""
try:
return datetime.strptime(date, fmt)
except (ValueError, TypeError):
logger.error(f'ValueError, TypeError')
return default
def extract_id(route):
""" Extract the id from a user's path
:param srt route: The string of the path of the user page
:return: The id of the user
:rtype: int or None
"""
try:
return int(route.split('/')[-2])
except (AttributeError, IndexError, ValueError):
logger.error(f'AttributeError, IndexError, ValueError')
return None
def extract_username(route):
""" Extract the username from a user's path
:param srt route: The string of the path of the user page
:return: The id of the user
:rtype: str or None
"""
try:
return route.split('/')[-1]
except (AttributeError, IndexError):
logger.error(f'AttributeError, IndexError')
return None
class EntityManager:
def __init__(self, source):
""" Constructor method
:param str source: A domain belonging to the Stack Exchange network
"""
logger.info(f'Session opened. Class initiated: {self}, source: {source}')
self.source = source
def save(self, shallow_question):
""" Save (create or update) a question in the database.
Create records (Source, User, Tag, Answer) only if they do not already exist in the database.
:param ShallowQuestion shallow_question:
"""
users_to_create = {}
source = session.query(Source).filter_by(name=self.source).first()
if source is None:
source = Source()
source.name = self.source
question = session.query(Question).filter_by(source_id=source.id, stack_exchange_id=shallow_question.id).first()
if question is None:
question = Question()
question.source = source
question.stack_exchange_id = shallow_question.id
else:
question.answers = []
question.title = shallow_question.title
question.asked = shallow_question.asked
question.active = str_to_time(shallow_question.active)
question.viewed = re.findall('\d+', shallow_question.viewed)[0]
question.answer_count = shallow_question.answer_count
owner_stack_exchange_id = extract_id(shallow_question.owner_id)
owner = session.query(User).filter_by(source_id=source.id, stack_exchange_id=owner_stack_exchange_id).first()
if owner is None:
owner = User()
owner.source = source
owner.stack_exchange_id = owner_stack_exchange_id
users_to_create[owner.stack_exchange_id] = owner
owner.username = extract_username(shallow_question.owner_id)
owner.name = shallow_question.owner_name
question.owner = owner
question.vote_count = shallow_question.vote_count
question.bookmark_count = shallow_question.bookmark_count
for tag_name in shallow_question.tags:
tag = session.query(Tag).filter_by(name=tag_name).first()
if tag is None:
tag = Tag()
tag.name = tag_name
for shallow_tag_details in shallow_question.tags_details:
if shallow_tag_details.name == tag.name:
details = TagDetail()
details.definition = shallow_tag_details.definition
details.page = shallow_tag_details.page
details.list_of_tags = shallow_tag_details.list_of_tags
tag.details = details
question.tags.append(tag)
question.edited_time = str_to_time(shallow_question.edited_time)
editor_stack_exchange_id = extract_id(shallow_question.edited_id)
editor = session.query(User).filter_by(source_id=source.id, stack_exchange_id=editor_stack_exchange_id).first()
if editor_stack_exchange_id in users_to_create:
editor = users_to_create[editor_stack_exchange_id]
if editor is None:
editor = User()
editor.source = source
editor.stack_exchange_id = editor_stack_exchange_id
users_to_create[editor.stack_exchange_id] = editor
editor.username = extract_username(shallow_question.edited_id)
editor.name = shallow_question.edited_name
for shallow_answer in shallow_question.answers:
answer = Answer()
answer.answer_time = str_to_time(shallow_answer.user_time)
user_stack_exchange_id = extract_id(shallow_answer.user_id)
user = session.query(User).filter_by(source_id=source.id, stack_exchange_id=user_stack_exchange_id).first()
if user_stack_exchange_id in users_to_create:
user = users_to_create[user_stack_exchange_id]
if user is None:
user = User()
user.source = source
user.stack_exchange_id = user_stack_exchange_id
users_to_create[user.stack_exchange_id] = user
user.username = extract_username(shallow_answer.user_id)
user.name = shallow_answer.user_name
answer.user = user
answer.vote_count = shallow_answer.vote_count
answer.edit_time = str_to_time(shallow_answer.edit_time)
question_editor_stack_exchange_id = extract_id(shallow_answer.edit_id)
question_editor = session.query(User).filter_by(source_id=source.id,
stack_exchange_id=question_editor_stack_exchange_id).first()
if question_editor_stack_exchange_id in users_to_create:
question_editor = users_to_create[question_editor_stack_exchange_id]
if question_editor is None:
question_editor = User()
question_editor.source = source
question_editor.stack_exchange_id = question_editor_stack_exchange_id
users_to_create[question_editor.stack_exchange_id] = question_editor
question_editor.username = extract_username(shallow_answer.edit_id)
question_editor.name = shallow_answer.edit_name
question.answers.append(answer)
if question.id is None:
session.add(question)
logger.info('Session closed')
session.commit()
|
[
"os.makedirs",
"logging.FileHandler",
"os.path.exists",
"db.connection.session.query",
"logging.Formatter",
"datetime.datetime.strptime",
"db.connection.session.commit",
"re.findall",
"db.connection.session.add",
"logging.getLogger"
] |
[((236, 277), 'logging.getLogger', 'logging.getLogger', (['"""logs/sql_manager.log"""'], {}), "('logs/sql_manager.log')\n", (253, 277), False, 'import logging\n'), ((341, 461), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s-%(levelname)s-FILE:%(filename)s-FUNC:%(funcName)s-LINE:%(lineno)d-%(message)s"""'], {}), "(\n '%(asctime)s-%(levelname)s-FILE:%(filename)s-FUNC:%(funcName)s-LINE:%(lineno)d-%(message)s'\n )\n", (358, 461), False, 'import logging\n'), ((513, 556), 'logging.FileHandler', 'logging.FileHandler', (['"""logs/sql_manager.log"""'], {}), "('logs/sql_manager.log')\n", (532, 556), False, 'import logging\n'), ((178, 200), 'os.path.exists', 'os.path.exists', (['"""logs"""'], {}), "('logs')\n", (192, 200), False, 'import os\n'), ((206, 225), 'os.makedirs', 'os.makedirs', (['"""logs"""'], {}), "('logs')\n", (217, 225), False, 'import os\n'), ((1101, 1129), 'datetime.datetime.strptime', 'datetime.strptime', (['date', 'fmt'], {}), '(date, fmt)\n', (1118, 1129), False, 'from datetime import datetime\n'), ((7490, 7506), 'db.connection.session.commit', 'session.commit', ([], {}), '()\n', (7504, 7506), False, 'from db.connection import session\n'), ((3255, 3298), 're.findall', 're.findall', (['"""\\\\d+"""', 'shallow_question.viewed'], {}), "('\\\\d+', shallow_question.viewed)\n", (3265, 3298), False, 'import re\n'), ((7421, 7442), 'db.connection.session.add', 'session.add', (['question'], {}), '(question)\n', (7432, 7442), False, 'from db.connection import session\n'), ((2581, 2602), 'db.connection.session.query', 'session.query', (['Source'], {}), '(Source)\n', (2594, 2602), False, 'from db.connection import session\n'), ((2755, 2778), 'db.connection.session.query', 'session.query', (['Question'], {}), '(Question)\n', (2768, 2778), False, 'from db.connection import session\n'), ((3453, 3472), 'db.connection.session.query', 'session.query', (['User'], {}), '(User)\n', (3466, 3472), False, 'from db.connection import session\n'), ((4906, 4925), 'db.connection.session.query', 'session.query', (['User'], {}), '(User)\n', (4919, 4925), False, 'from db.connection import session\n'), ((4108, 4126), 'db.connection.session.query', 'session.query', (['Tag'], {}), '(Tag)\n', (4121, 4126), False, 'from db.connection import session\n'), ((5721, 5740), 'db.connection.session.query', 'session.query', (['User'], {}), '(User)\n', (5734, 5740), False, 'from db.connection import session\n'), ((6562, 6581), 'db.connection.session.query', 'session.query', (['User'], {}), '(User)\n', (6575, 6581), False, 'from db.connection import session\n')]
|
#!/usr/bin/env python3
import pandas as pd
import sklearn.neighbors as neighbors
from sklearn.neighbors import KNeighborsClassifier
import sklearn.metrics as metrics
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
from sklearn.svm import SVC
from sklearn.ensemble import RandomForestClassifier
import sys
def main():
train=sys.argv[1].split(',') #train attractors
df_attr=pd.DataFrame()
for j in train:
dfj=pd.read_csv(j, delim_whitespace=True,index_col = ["name"])
df_attr=pd.concat([df_attr,dfj],axis=0)
#df_attr=df_attr.drop_duplicates()
datasets=sys.argv[2].split(',') # test attractors
df_perturb=pd.DataFrame()
for i in datasets:
dfi=pd.read_csv(i, delim_whitespace=True,index_col = ["name"])
df_perturb=pd.concat([df_perturb,dfi],axis=0)
df_labels=pd.read_csv(sys.argv[3], delim_whitespace=True,index_col = ["name"]) # kmeans results
labels=df_labels['clusters'].tolist()
outfolder=sys.argv[2]
regressor = RandomForestClassifier(n_estimators=20, random_state=0)
regressor.fit(df_attr,labels) #do knn with attractor landscape
perturb_lab=regressor.predict(df_perturb) # predict clusters for perturbations
print("calculated svm on attractors, predicted perturbations")
#create dataframe of perturabtion and clusters
df2=pd.DataFrame(index=df_perturb.index)
df2['clusters']=perturb_lab
# write out knn results
df2.to_csv('class_'+outfolder+'_RF.txt',sep=' ',index_label="name",chunksize=10000)
main()
|
[
"pandas.DataFrame",
"sklearn.ensemble.RandomForestClassifier",
"pandas.read_csv",
"pandas.concat"
] |
[((429, 443), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (441, 443), True, 'import pandas as pd\n'), ((700, 714), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (712, 714), True, 'import pandas as pd\n'), ((877, 944), 'pandas.read_csv', 'pd.read_csv', (['sys.argv[3]'], {'delim_whitespace': '(True)', 'index_col': "['name']"}), "(sys.argv[3], delim_whitespace=True, index_col=['name'])\n", (888, 944), True, 'import pandas as pd\n'), ((1052, 1107), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {'n_estimators': '(20)', 'random_state': '(0)'}), '(n_estimators=20, random_state=0)\n', (1074, 1107), False, 'from sklearn.ensemble import RandomForestClassifier\n'), ((1385, 1421), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'df_perturb.index'}), '(index=df_perturb.index)\n', (1397, 1421), True, 'import pandas as pd\n'), ((476, 533), 'pandas.read_csv', 'pd.read_csv', (['j'], {'delim_whitespace': '(True)', 'index_col': "['name']"}), "(j, delim_whitespace=True, index_col=['name'])\n", (487, 533), True, 'import pandas as pd\n'), ((551, 584), 'pandas.concat', 'pd.concat', (['[df_attr, dfj]'], {'axis': '(0)'}), '([df_attr, dfj], axis=0)\n', (560, 584), True, 'import pandas as pd\n'), ((750, 807), 'pandas.read_csv', 'pd.read_csv', (['i'], {'delim_whitespace': '(True)', 'index_col': "['name']"}), "(i, delim_whitespace=True, index_col=['name'])\n", (761, 807), True, 'import pandas as pd\n'), ((828, 864), 'pandas.concat', 'pd.concat', (['[df_perturb, dfi]'], {'axis': '(0)'}), '([df_perturb, dfi], axis=0)\n', (837, 864), True, 'import pandas as pd\n')]
|
"""
AwesomeTkinter, a new tkinter widgets design using custom styles and images
:copyright: (c) 2020-2021 by <NAME>.
"""
import tkinter as tk
from tkinter import ttk
from .utils import *
from .config import *
from .images import *
from .scrollbar import SimpleScrollbar
class ScrollableFrame(tk.Frame):
"""A frame with scrollbars
inspired by : https://stackoverflow.com/a/3092341
basically it is a frame inside a canvas inside another frame
usage:
frame = ScrollableFrame(root)
frame.pack(fill='both', expand=True)
# add your widgets normally
tk.Label(frame, text=hello).pack()
"""
def __init__(self, parent, vscroll=True, hscroll=True, autoscroll=False, bg=None, sbar_fg=None, sbar_bg=None,
vbar_width=10, hbar_width=10):
"""initialize
Args:
parent (tk.Widget): tkinter master widget
vscroll (bool): use vertical scrollbar
hscroll (bool): use horizontal scrollbar
autoscroll (bool): auto scroll to bottom if new items added to frame
bg (str): background
sbar_fg (str): color of scrollbars' slider
sbar_bg (str): color of scrollbars' trough, default to frame's background
vbar_width (int): vertical scrollbar width
hbar_width (int): horizontal scrollbar width
"""
self.autoscroll = autoscroll
self.current_height = None
sbar_bg = sbar_bg or 'white'
sbar_fg = sbar_fg or 'blue'
# create outside frame
self.outer_frame = tk.Frame(parent, bg=bg)
# create canvas
self.canvas = tk.Canvas(self.outer_frame, borderwidth=0, highlightthickness=0, background=bg)
# initialize super class
tk.Frame.__init__(self, self.canvas, bg=bg)
# scrollbars
if vscroll:
self.vsb = SimpleScrollbar(self.outer_frame, orient="vertical", command=self.canvas.yview, bg=sbar_bg,
slider_color=sbar_fg, width=vbar_width)
self.canvas.configure(yscrollcommand=self.vsb.set)
self.vsb.pack(side="right", fill="y")
if hscroll:
self.hsb = SimpleScrollbar(self.outer_frame, orient="horizontal", command=self.canvas.xview, bg=sbar_bg,
slider_color=sbar_fg, width=hbar_width)
self.canvas.configure(xscrollcommand=self.hsb.set)
self.hsb.pack(side="bottom", fill="x")
self.canvas.pack(side="left", fill="both", expand=True)
self._id = self.canvas.create_window((0, 0), window=self, anchor="nw", tags="self")
self.bind("<Configure>", self._on_self_configure)
self.canvas.bind("<Configure>", self._on_canvas_configure)
# scroll with mousewheel
scroll_with_mousewheel(self, target=self.canvas)
# use outer frame geometry managers
self.pack = self.outer_frame.pack
self.pack_forget = self.outer_frame.pack_forget
self.grid = self.outer_frame.grid
self.grid_forget = self.outer_frame.grid_forget
self.grid_remove = self.outer_frame.grid_remove
self.place = self.outer_frame.place
self.place_forget = self.outer_frame.place_forget
# get scroll methods from canvas
self.yview_moveto = self.canvas.yview_moveto
self.xview_moveto = self.canvas.xview_moveto
def yview_scroll(self, *args):
if self.winfo_height() > self.outer_frame.winfo_height():
self.canvas.yview_scroll(*args)
def xview_scroll(self, *args):
if self.winfo_width() > self.outer_frame.winfo_width():
self.canvas.xview_scroll(*args)
def _on_self_configure(self, event):
"""Reset the scroll region to match contents"""
if self.winfo_height() != self.current_height:
self.canvas.configure(scrollregion=self.canvas.bbox("all"))
# scroll to bottom, if new widgets added to frame
if self.autoscroll:
self.scrolltobottom()
self.current_height = self.winfo_height()
def _on_canvas_configure(self, event):
"""expand self to fill canvas"""
self.canvas.itemconfigure(self._id, width=self.canvas.winfo_width())
def vscroll(self, fraction):
"""scroll canvas vertically
Args:
fraction (float): from 0 "top" to 1.0 "bottom"
"""
self.canvas.yview_moveto(fraction)
def scrolltobottom(self):
self.vscroll(1.0)
def scrolltotop(self):
self.vscroll(0)
def hscroll(self, fraction):
"""scroll canvas horizontally
Args:
fraction (float): from 0 "left" to 1.0 "right"
"""
self.canvas.xview_moveto(fraction)
class Frame3d(ttk.Frame):
"""create a frame with 3d background color and shadow"""
styles = []
def __init__(self, parent, bg=None, **options):
"""initialize
Args:
parent: tkinter container widget, i.e. root or another frame
bg (str): color of frame
"""
self.bg = bg or DEFAULT_COLOR
parent_color = get_widget_attribute(parent, 'background') or DEFAULT_COLOR
# initialize super class
ttk.Frame.__init__(self, parent, **options)
# create unique style name based on frame color
frame_style = f'Frame3d_{generate_unique_name(color_to_rgba(self.bg))}'
# create style
if frame_style not in Frame3d.styles:
self.img = self.create_image()
# create elements
s = ttk.Style()
element_style = f'{frame_style}_element'
s.element_create(element_style, 'image', self.img, border=15, sticky="nsew")
s.layout(frame_style, [(element_style, {"sticky": "nsew"})])
s.map(frame_style, background=[('', parent_color)])
# add to styles
Frame3d.styles.append(frame_style)
self['style'] = frame_style
def create_image(self):
shadow_img = create_pil_image(b64=btn_base)
img = create_pil_image(b64=btn_face, color=self.bg)
# merge face with base image
img = mix_images(shadow_img, img)
return ImageTk.PhotoImage(img)
|
[
"tkinter.Canvas",
"tkinter.Frame.__init__",
"tkinter.ttk.Style",
"tkinter.ttk.Frame.__init__",
"tkinter.Frame"
] |
[((1591, 1614), 'tkinter.Frame', 'tk.Frame', (['parent'], {'bg': 'bg'}), '(parent, bg=bg)\n', (1599, 1614), True, 'import tkinter as tk\n'), ((1662, 1741), 'tkinter.Canvas', 'tk.Canvas', (['self.outer_frame'], {'borderwidth': '(0)', 'highlightthickness': '(0)', 'background': 'bg'}), '(self.outer_frame, borderwidth=0, highlightthickness=0, background=bg)\n', (1671, 1741), True, 'import tkinter as tk\n'), ((1784, 1827), 'tkinter.Frame.__init__', 'tk.Frame.__init__', (['self', 'self.canvas'], {'bg': 'bg'}), '(self, self.canvas, bg=bg)\n', (1801, 1827), True, 'import tkinter as tk\n'), ((5289, 5332), 'tkinter.ttk.Frame.__init__', 'ttk.Frame.__init__', (['self', 'parent'], {}), '(self, parent, **options)\n', (5307, 5332), False, 'from tkinter import ttk\n'), ((5631, 5642), 'tkinter.ttk.Style', 'ttk.Style', ([], {}), '()\n', (5640, 5642), False, 'from tkinter import ttk\n')]
|
# Copyright (c) 2014-2015 SwiperProxy Team
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish
# distribute, sublicense and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject
# to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import re
import Util
class CSSPage:
"""
Used for a CSS stylesheet. Uses the reader function to read a
block, rewrites that block and writes it to the client using the
writer function.
"""
BLKSIZE = 65536
def __init__(self, config, ssl, reader, writer, remote_host):
self.config = config
self.ssl = ssl
self.reader = reader
self.writer = writer
self.input_buffer = ''
self.output_buffer = ''
self.remote_host = remote_host
def rewrite_re(self, m):
part1 = m.group(1) or ''
scheme = m.group(6) or ''
url = m.group(7) or ''
closer = m.group(9) or ''
return part1 + Util.rewrite_URL(scheme+"//"+url, self.config, self.ssl,
self.remote_host) + closer
def rewrite(self):
pattern = r"(((background(-image)?\s*:)|@import)\s*(url)?\s*[('\"]+\s*)(https?:)?//([^\"')]+)(:\d+)?([)'\"]+)"
while True:
s = self.reader(self.BLKSIZE)
if not s or len(s) == 0:
# End of file, there may be a left-over in the input
# buffer.
self.output_buffer += self.input_buffer
self.write_output(True)
break
self.input_buffer += s
news = re.sub(pattern, self.rewrite_re, self.input_buffer,
re.I|re.M|re.S)
# It may be the case that the background image string is
# divided over two blocks. Keep the last 1024 bytes in the
# input buffer and write everything up to that point to the
# output buffer
if len(news) > 1024:
self.output_buffer += news[:-1024]
self.input_buffer = news[-1024:]
self.write_output(False)
else:
self.output_buffer += news
self.input_buffer = ''
self.write_output(False)
def write_output(self, final):
length = len(self.output_buffer)
for beg in range(0, length, self.BLKSIZE):
end = beg + self.BLKSIZE
if end > length:
if not final:
self.output_buffer = self.output_buffer[beg:]
return
end = length
self.writer(self.output_buffer[beg:end])
self.output_buffer = ''
|
[
"Util.rewrite_URL",
"re.sub"
] |
[((2434, 2505), 're.sub', 're.sub', (['pattern', 'self.rewrite_re', 'self.input_buffer', '(re.I | re.M | re.S)'], {}), '(pattern, self.rewrite_re, self.input_buffer, re.I | re.M | re.S)\n', (2440, 2505), False, 'import re\n'), ((1798, 1876), 'Util.rewrite_URL', 'Util.rewrite_URL', (["(scheme + '//' + url)", 'self.config', 'self.ssl', 'self.remote_host'], {}), "(scheme + '//' + url, self.config, self.ssl, self.remote_host)\n", (1814, 1876), False, 'import Util\n')]
|
from setuptools import setup
VERSION = '0.1'
DESCRIPTION = 'get teh string '
LONG_DESCRIPTION = 'this is my first pacakage'
# Setting up
setup(
name="hello_World",
version=VERSION,
author='Udhay',
author_email='<EMAIL>',
description=DESCRIPTION,
long_description_content_type="text/markdown",
long_description=LONG_DESCRIPTION,
url='ssh://git@github.com:Udhayabanu/hello_package.git',
packages=['helloWorld'],
license='MIT',
install_requires=[],
classifiers=[
"Development Status :: It is in Development stage",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3",
"Operating System :: Unix",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
]
)
|
[
"setuptools.setup"
] |
[((139, 739), 'setuptools.setup', 'setup', ([], {'name': '"""hello_World"""', 'version': 'VERSION', 'author': '"""Udhay"""', 'author_email': '"""<EMAIL>"""', 'description': 'DESCRIPTION', 'long_description_content_type': '"""text/markdown"""', 'long_description': 'LONG_DESCRIPTION', 'url': '"""ssh://git@github.com:Udhayabanu/hello_package.git"""', 'packages': "['helloWorld']", 'license': '"""MIT"""', 'install_requires': '[]', 'classifiers': "['Development Status :: It is in Development stage',\n 'Intended Audience :: Developers',\n 'Programming Language :: Python :: 3', 'Operating System :: Unix',\n 'Operating System :: MacOS :: MacOS X',\n 'Operating System :: Microsoft :: Windows']"}), "(name='hello_World', version=VERSION, author='Udhay', author_email=\n '<EMAIL>', description=DESCRIPTION, long_description_content_type=\n 'text/markdown', long_description=LONG_DESCRIPTION, url=\n 'ssh://git@github.com:Udhayabanu/hello_package.git', packages=[\n 'helloWorld'], license='MIT', install_requires=[], classifiers=[\n 'Development Status :: It is in Development stage',\n 'Intended Audience :: Developers',\n 'Programming Language :: Python :: 3', 'Operating System :: Unix',\n 'Operating System :: MacOS :: MacOS X',\n 'Operating System :: Microsoft :: Windows'])\n", (144, 739), False, 'from setuptools import setup\n')]
|
# Title: 연산자 끼워넣기
# Link: https://www.acmicpc.net/problem/14888
import sys
from itertools import permutations
from collections import defaultdict
from copy import deepcopy
sys.setrecursionlimit(10 ** 6)
read_single_int = lambda: int(sys.stdin.readline().strip())
read_list_int = lambda: list(map(int, sys.stdin.readline().strip().split(' ')))
def solution_bf(n: int, ns: list, ops: list):
minimum = 10000000000
maximum = -minimum
cach = defaultdict(lambda: False)
operators = [0]*ops[0] + [1]*ops[1] + [2]*ops[2] + [3]*ops[3]
for operation in permutations(operators):
op_str = ''.join(map(str, operation))
if cach[op_str]:
continue
cach[op_str] = True
res = ns[0]
for i, x in enumerate(ns[1:]):
if operation[i] == 0:
res += x
elif operation[i] == 1:
res -= x
elif operation[i] == 2:
res *= x
else:
minus = False
if res < 0:
minus = True
res = abs(res)//x
if minus:
res = -res
minimum = min(minimum, res)
maximum = max(maximum, res)
return '{}\n{}'.format(maximum, minimum)
def dfs_long_time(ns: list, operations: list, res: int, minimum: list, maximum: list, visited: list, idx: int, op_idx: int, call: list):
call[0] = call[0] + 1
ans = 0
if idx == len(ns):
minimum[0] = min(minimum[0], res)
maximum[0] = max(maximum[0], res)
visited[op_idx] = False
return
for i, op in enumerate(operations):
if visited[i]:
continue
visited[i] = True
if op == 0:
ans = res + ns[idx]
elif op == 1:
ans = res - ns[idx]
elif op == 2:
ans = res * ns[idx]
else:
sign_a, sign_b = 1, 1
if res < 0:
sign_a = -1
if ns[idx] < 0:
sign_b = -1
ans = (abs(res)//abs(ns[idx])) * (sign_a * sign_b)
dfs_long_time(ns, operations, ans, minimum, maximum, visited, idx+1, i, call)
visited[op_idx] = False
def solution_dfs(n: int, ns: list, ops: list):
call = [0]
operations = [0]*ops[0] + [1]*ops[1] + [2]*ops[2] + [3]*ops[3]
minimum, maximum = [10**10], [-10**10]
visited = [False for _ in range(n-1)]
dfs_long_time(ns, operations, ns[0], minimum, maximum, visited, 1, 0, call)
return '{}\n{}\n{}'.format(maximum[0], minimum[0], call[0])
def dfs(ns: list, idx: int, res: int, plus: int, minus: int, multiples: int, divides: int, minimum: list, maximum: list, call: list):
call[0] = call[0] + 1
if idx == len(ns):
minimum[0] = min(minimum[0], res)
maximum[0] = max(maximum[0], res)
return
if plus:
dfs(ns, idx+1, res+ns[idx], plus-1, minus, multiples, divides, minimum, maximum, call)
if minus:
dfs(ns, idx+1, res-ns[idx], plus, minus-1, multiples, divides, minimum, maximum, call)
if multiples:
dfs(ns, idx+1, res*ns[idx], plus, minus, multiples-1, divides, minimum, maximum, call)
if divides:
sign_a = 1 if res > 0 else -1
sign_b = 1 if ns[idx] > 0 else -1
dfs(ns, idx+1, sign_a*sign_b*(abs(res)//abs(ns[idx])), plus, minus, multiples, divides-1, minimum, maximum, call)
def solution(n: int, ns: list, ops: list):
minimum, maximum = [10**10], [-10**10]
call = [0]
dfs(ns, 1, ns[0], ops[0], ops[1], ops[2], ops[3], minimum, maximum, call)
return '{}\n{}'.format(maximum[0], minimum[0])
def main():
n = read_single_int()
ns = read_list_int()
ops = read_list_int()
print(solution(n, ns, ops))
# print(solution_dfs(n, ns, ops))
if __name__ == '__main__':
main()
|
[
"collections.defaultdict",
"itertools.permutations",
"sys.setrecursionlimit",
"sys.stdin.readline"
] |
[((182, 212), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(10 ** 6)'], {}), '(10 ** 6)\n', (203, 212), False, 'import sys\n'), ((475, 502), 'collections.defaultdict', 'defaultdict', (['(lambda : False)'], {}), '(lambda : False)\n', (486, 502), False, 'from collections import defaultdict\n'), ((591, 614), 'itertools.permutations', 'permutations', (['operators'], {}), '(operators)\n', (603, 614), False, 'from itertools import permutations\n'), ((248, 268), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (266, 268), False, 'import sys\n'), ((317, 337), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (335, 337), False, 'import sys\n')]
|
# -*- coding=utf-8 -*-
import os, sys
sys.path.append(os.getcwd()) # 告诉pytest运行前先检索当前路径
from Basic.Init_Driver import init_hlj_driver
from Basic.read_data import Read_Data
from Page.search_page import Search_Page
import pytest
import allure
"""
allure generate report/ -o report/html
"""
def package_param_data():
# 储存参数值列表 ,格式[(用例编号1,输入内容2),(用例编号1,输入内容2)...]
list_data = []
# 返回yaml文件数据
yaml_data = Read_Data("Search_page.yaml").return_data()
# print(type(yaml_data))
# print(yaml_data)
for i in yaml_data.keys():
# print(i)
# print(yaml_data.get(i))
# print(yaml_data.get(i).get("value"))
# list_data中添加参数值
list_data.append((i, yaml_data.get(i).get("value")))
return list_data
print(sys.path)
package_param_data()
class Test_Search(object):
"""
def __init__(self):
self.driver = init_hlj_driver()
希望函数运行多次,但不希望每次运行都初始化和退出
"""
# def __init__(self):
# self.driver = init_hlj_driver()
# sp = Search_Page(self.driver)
# sp.jump_guide()
def setup_class(self):
self.driver = init_hlj_driver()
sp = Search_Page(self.driver)
sp.jump_guide()
@pytest.mark.parametrize('test_id, value', package_param_data()) # 参数传递三组参数,会运行三次
@allure.severity(allure.severity_level.CRITICAL)
@allure.step("主页:设计搜索步骤001")
def test_search(self, test_id, value):
allure.attach('描述', '重复三次搜索步骤')
# 实例化页面封装类
sp = Search_Page(self.driver)
# 调用操作类
print("test_id:", test_id)
sp.input_search_text(value)
# self.driver.quit()
@allure.severity(allure.severity_level.BLOCKER)
@allure.step("主页:楼盘测试")
def test_index_lp(self):
assert 0
def teardown_class(self):
# 退出driver对象
self.driver.quit()
if __name__ == "__main__":
Test_Search().test_search(11, "H")
|
[
"Basic.read_data.Read_Data",
"os.getcwd",
"allure.attach",
"allure.step",
"Page.search_page.Search_Page",
"allure.severity",
"Basic.Init_Driver.init_hlj_driver"
] |
[((58, 69), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (67, 69), False, 'import os, sys\n'), ((1372, 1419), 'allure.severity', 'allure.severity', (['allure.severity_level.CRITICAL'], {}), '(allure.severity_level.CRITICAL)\n', (1387, 1419), False, 'import allure\n'), ((1426, 1453), 'allure.step', 'allure.step', (['"""主页:设计搜索步骤001"""'], {}), "('主页:设计搜索步骤001')\n", (1437, 1453), False, 'import allure\n'), ((1728, 1774), 'allure.severity', 'allure.severity', (['allure.severity_level.BLOCKER'], {}), '(allure.severity_level.BLOCKER)\n', (1743, 1774), False, 'import allure\n'), ((1781, 1803), 'allure.step', 'allure.step', (['"""主页:楼盘测试"""'], {}), "('主页:楼盘测试')\n", (1792, 1803), False, 'import allure\n'), ((1194, 1211), 'Basic.Init_Driver.init_hlj_driver', 'init_hlj_driver', ([], {}), '()\n', (1209, 1211), False, 'from Basic.Init_Driver import init_hlj_driver\n'), ((1226, 1250), 'Page.search_page.Search_Page', 'Search_Page', (['self.driver'], {}), '(self.driver)\n', (1237, 1250), False, 'from Page.search_page import Search_Page\n'), ((1507, 1538), 'allure.attach', 'allure.attach', (['"""描述"""', '"""重复三次搜索步骤"""'], {}), "('描述', '重复三次搜索步骤')\n", (1520, 1538), False, 'import allure\n'), ((1575, 1599), 'Page.search_page.Search_Page', 'Search_Page', (['self.driver'], {}), '(self.driver)\n', (1586, 1599), False, 'from Page.search_page import Search_Page\n'), ((442, 471), 'Basic.read_data.Read_Data', 'Read_Data', (['"""Search_page.yaml"""'], {}), "('Search_page.yaml')\n", (451, 471), False, 'from Basic.read_data import Read_Data\n')]
|
"""Main module."""
from itertools import chain
import anytree
import nanoid
import parse
NANOID_ALPHABET = '-0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
NANOID_SIZE = 10
STEM_TEMPLATES = ('{group:l}_{index:d}__{name:w}', '{group:l}__{name:w}',
'{index:d}__{name:w}', '{name:w}')
def _get_title(note):
"""Extract title from note.
:param note: path not note file
:type note: class: `pathlib.path`
:return: Note title
:rrtype: str
"""
title = note.stem
with note.open(encoding="utf-8") as fd_in:
found_line = False
for line in fd_in.readlines():
if "=======" in line: # pylint: disable=no-else-continue
found_line = True
continue
elif found_line:
title = line.strip()
break
return title
def _parse_stem(stem):
"""Extract group from note file stem.
:param stem: Path.stem()
:type stem: str
:return: Note group
:rrtype: str
"""
for template in STEM_TEMPLATES:
try:
return parse.parse(template, stem)['group']
except (KeyError, TypeError):
pass
return None
def get_target():
"""Create a random target ID.
:return: target id
:rrtype: str
"""
return nanoid.generate(NANOID_ALPHABET, NANOID_SIZE)
def get_tree(root_dir):
"""Get a tree of notes.
:param root_dir: The root directory of the notebook
:type root_dir: class: `pathlib.Path`
:return: Tree root node
:rtype: class: anytree.Node
"""
# print(root_dir)
# build/notes/rst
nodes = {root_dir.name: anytree.Node(root_dir.name)}
for note in sorted(root_dir.glob('**/*.rst')):
tmp = note.relative_to(root_dir)
target = f'/{tmp.parent}/{tmp.stem}' # /1._overview/0_readme
parts = []
for part in chain([root_dir.name], tmp.parts[:-1]):
parts.append(part)
if '/'.join(parts) not in nodes:
parent = nodes['/'.join(parts[:-1])]
nodes['/'.join(parts)] = anytree.Node(part, parent=parent)
anytree.Node(note.name,
group=_parse_stem(note.stem),
parent=nodes['/'.join(parts)],
title=_get_title(note),
target=target)
return nodes[root_dir.name]
def prune_tree(root, prune):
"""Prune nodes that shouldn't be rendered on the index page.
:param root: Root node of the notes tree
:type root: anytree.Node
:param prune: An tuple of node names to be pruned
:type prune: tuple
:return: None
"""
for node in anytree.search.findall(
root, filter_=lambda node: node.name in prune):
node.parent = None
def render_index(root, template, out):
"""Render notebook tree into index.rst.
:param root: notebook tree root node
:type root: class: anytree.Node
:param template: A jinja2 template
:type template: class: Jinja2.Template
:param fd_out: Open file like object.
:type fd_out: File Like Object
:return: None
"""
nodes = [node for node in anytree.PreOrderIter(root) if node.depth]
out.write(template.render(nodes=nodes))
def render_note(template, out):
"""Render a note.
:param template: A jinja2 template
:type template: class: Jinja2.Template
:param out: Open file like object.
:type out: File Like Object
:return: None
"""
note_id = get_target()
out.write(template.render(note_id=note_id))
|
[
"anytree.Node",
"anytree.PreOrderIter",
"nanoid.generate",
"anytree.search.findall",
"itertools.chain",
"parse.parse"
] |
[((1334, 1379), 'nanoid.generate', 'nanoid.generate', (['NANOID_ALPHABET', 'NANOID_SIZE'], {}), '(NANOID_ALPHABET, NANOID_SIZE)\n', (1349, 1379), False, 'import nanoid\n'), ((2695, 2764), 'anytree.search.findall', 'anytree.search.findall', (['root'], {'filter_': '(lambda node: node.name in prune)'}), '(root, filter_=lambda node: node.name in prune)\n', (2717, 2764), False, 'import anytree\n'), ((1674, 1701), 'anytree.Node', 'anytree.Node', (['root_dir.name'], {}), '(root_dir.name)\n', (1686, 1701), False, 'import anytree\n'), ((1908, 1946), 'itertools.chain', 'chain', (['[root_dir.name]', 'tmp.parts[:-1]'], {}), '([root_dir.name], tmp.parts[:-1])\n', (1913, 1946), False, 'from itertools import chain\n'), ((3187, 3213), 'anytree.PreOrderIter', 'anytree.PreOrderIter', (['root'], {}), '(root)\n', (3207, 3213), False, 'import anytree\n'), ((1110, 1137), 'parse.parse', 'parse.parse', (['template', 'stem'], {}), '(template, stem)\n', (1121, 1137), False, 'import parse\n'), ((2119, 2152), 'anytree.Node', 'anytree.Node', (['part'], {'parent': 'parent'}), '(part, parent=parent)\n', (2131, 2152), False, 'import anytree\n')]
|
"""
This files only purpose is to pretty print the given map.
Input to printer is a map, the path the robot took and a planned path
if there is no path the robot took or planned path, they args can be left
printer(map, rob_path, planned_path)
result is nothing.
is saves the file in this folder. the name is by default test.png
(this two things could be changed if wanted.
"""
import numpy as np
from PIL import Image
def printer(m, rob_path=[], planned_path=[]):
maxV = m.maxV()
minV = m.lowestV()
x_range = maxV[0] + abs(minV[0])
y_range = maxV[1] + abs(minV[1])
arr = np.zeros([x_range +1,y_range + 1])
for i in m.d:
if(m.d[i]):
arr[(i[0]+ abs(minV[0]), i[1] + abs(minV[1]))] = 255
else:
arr[(i[0]+ abs(minV[0]), i[1] + abs(minV[1]))] = 125
image = Image.fromarray(arr)
image = image.convert("L")
pixels = image.load()
for i in rob_path:
pixels[i] = 0
for i in planned_path:
pixels[i] = (50)
#if image.size[0] >= int(m.rob.x_pos.value) + m.lowX and image.size[1] >= int(m.rob.y_pos.value) + m.lowY :
#pixels[(int(m.rob.x_pos.value) + m.lowX , int(m.rob.y_pos.value) + m.lowY)] = 200
image = image.resize([350,350],Image.NEAREST)
filename = "test.png"
image.save(filename)
|
[
"PIL.Image.fromarray",
"numpy.zeros"
] |
[((600, 636), 'numpy.zeros', 'np.zeros', (['[x_range + 1, y_range + 1]'], {}), '([x_range + 1, y_range + 1])\n', (608, 636), True, 'import numpy as np\n'), ((831, 851), 'PIL.Image.fromarray', 'Image.fromarray', (['arr'], {}), '(arr)\n', (846, 851), False, 'from PIL import Image\n')]
|
import torch
import torch.nn as nn
import lrp_framework.lrp as lrp
from utils import calculate_linear_lrp_fast
class LRP_Classifier(nn.Module):
def __init__(self, num_classes=1000) -> None:
super(LRP_Classifier,self).__init__()
#just the same structure as AlexNet, for easy import
#using lrp modules, so the AlexNet weights are then directly used by the lrp framework
self.features = lrp.Sequential(
lrp.Conv2d(3, 64, kernel_size=11, stride=4, padding=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
lrp.Conv2d(64, 192, kernel_size=5, padding=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
lrp.Conv2d(192, 384, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
lrp.Conv2d(384, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
lrp.Conv2d(256, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
)
self.avgpool = nn.AdaptiveAvgPool2d((6, 6))
self.classifier = lrp.Sequential(
nn.Dropout(),
lrp.Linear(256 * 6 * 6, 4096),
nn.ReLU(inplace=True),
nn.Dropout(),
lrp.Linear(4096, 4096),
nn.ReLU(inplace=True),
lrp.Linear(4096, num_classes),
)
def forward(self, x: torch.Tensor, explain=False, rule="epsilon", pattern=None) -> torch.Tensor:
#x = self.seq(x,explain,rule,pattern)
x = self.features(x,explain,rule,pattern)
x = self.avgpool(x)
x = torch.flatten(x, 1)
x = self.classifier(x,explain,rule,pattern)
return x
|
[
"torch.flatten",
"torch.nn.AdaptiveAvgPool2d",
"torch.nn.Dropout",
"torch.nn.ReLU",
"lrp_framework.lrp.Linear",
"lrp_framework.lrp.Conv2d",
"torch.nn.MaxPool2d"
] |
[((1164, 1192), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(6, 6)'], {}), '((6, 6))\n', (1184, 1192), True, 'import torch.nn as nn\n'), ((1760, 1779), 'torch.flatten', 'torch.flatten', (['x', '(1)'], {}), '(x, 1)\n', (1773, 1779), False, 'import torch\n'), ((456, 510), 'lrp_framework.lrp.Conv2d', 'lrp.Conv2d', (['(3)', '(64)'], {'kernel_size': '(11)', 'stride': '(4)', 'padding': '(2)'}), '(3, 64, kernel_size=11, stride=4, padding=2)\n', (466, 510), True, 'import lrp_framework.lrp as lrp\n'), ((528, 549), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (535, 549), True, 'import torch.nn as nn\n'), ((567, 604), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(3)', 'stride': '(2)'}), '(kernel_size=3, stride=2)\n', (579, 604), True, 'import torch.nn as nn\n'), ((622, 667), 'lrp_framework.lrp.Conv2d', 'lrp.Conv2d', (['(64)', '(192)'], {'kernel_size': '(5)', 'padding': '(2)'}), '(64, 192, kernel_size=5, padding=2)\n', (632, 667), True, 'import lrp_framework.lrp as lrp\n'), ((685, 706), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (692, 706), True, 'import torch.nn as nn\n'), ((724, 761), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(3)', 'stride': '(2)'}), '(kernel_size=3, stride=2)\n', (736, 761), True, 'import torch.nn as nn\n'), ((779, 825), 'lrp_framework.lrp.Conv2d', 'lrp.Conv2d', (['(192)', '(384)'], {'kernel_size': '(3)', 'padding': '(1)'}), '(192, 384, kernel_size=3, padding=1)\n', (789, 825), True, 'import lrp_framework.lrp as lrp\n'), ((843, 864), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (850, 864), True, 'import torch.nn as nn\n'), ((882, 928), 'lrp_framework.lrp.Conv2d', 'lrp.Conv2d', (['(384)', '(256)'], {'kernel_size': '(3)', 'padding': '(1)'}), '(384, 256, kernel_size=3, padding=1)\n', (892, 928), True, 'import lrp_framework.lrp as lrp\n'), ((946, 967), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (953, 967), True, 'import torch.nn as nn\n'), ((985, 1031), 'lrp_framework.lrp.Conv2d', 'lrp.Conv2d', (['(256)', '(256)'], {'kernel_size': '(3)', 'padding': '(1)'}), '(256, 256, kernel_size=3, padding=1)\n', (995, 1031), True, 'import lrp_framework.lrp as lrp\n'), ((1049, 1070), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1056, 1070), True, 'import torch.nn as nn\n'), ((1088, 1125), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(3)', 'stride': '(2)'}), '(kernel_size=3, stride=2)\n', (1100, 1125), True, 'import torch.nn as nn\n'), ((1251, 1263), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (1261, 1263), True, 'import torch.nn as nn\n'), ((1281, 1310), 'lrp_framework.lrp.Linear', 'lrp.Linear', (['(256 * 6 * 6)', '(4096)'], {}), '(256 * 6 * 6, 4096)\n', (1291, 1310), True, 'import lrp_framework.lrp as lrp\n'), ((1328, 1349), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1335, 1349), True, 'import torch.nn as nn\n'), ((1367, 1379), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (1377, 1379), True, 'import torch.nn as nn\n'), ((1397, 1419), 'lrp_framework.lrp.Linear', 'lrp.Linear', (['(4096)', '(4096)'], {}), '(4096, 4096)\n', (1407, 1419), True, 'import lrp_framework.lrp as lrp\n'), ((1437, 1458), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1444, 1458), True, 'import torch.nn as nn\n'), ((1476, 1505), 'lrp_framework.lrp.Linear', 'lrp.Linear', (['(4096)', 'num_classes'], {}), '(4096, num_classes)\n', (1486, 1505), True, 'import lrp_framework.lrp as lrp\n')]
|
import re
from pathlib import Path
from typing import Dict, List, Tuple
from pvi._produce.asyn import AsynParameter, AsynProducer
from pvi.device import Grid, Group
from ._asyn_convert import (
Action,
AsynRecord,
Parameter,
Readback,
RecordError,
SettingPair,
)
OVERRIDE_DESC = "# Overriding value in auto-generated template"
class TemplateConverter:
def __init__(self, *templates: Path):
self.templates = templates
self._text = [t.read_text() for t in templates]
def top_level_text(self, driver_name: str):
extracted_templates = []
for text in self._text:
record_extractor = RecordExtractor(text)
extracted_templates.append(record_extractor.get_top_level_text(driver_name))
return extracted_templates
def convert(self) -> AsynProducer:
def get_prefix(texts: List[str]) -> str:
# e.g. from: record(waveform, "$(P)$(R)FilePath")
# extract: $(P)$(R)
prefix_extractor = re.compile(
r'(?:record\()(?:[^,]*)(?:[^"]*)(?:")((?:\$\([^)]\))*)(?:[^"]*)'
)
for text in texts:
prefixes = re.findall(prefix_extractor, text)
prefixes = list(set(prefixes))
if len(prefixes) > 1:
raise ValueError("Not all asyn records have the same macro prefix")
return prefixes.pop()
def get_asyn_parameters(texts: List[str]) -> Dict[int, str]:
# e.g. from: field(INP, "@asyn($(PORT),$(ADDR=0),$(TIMEOUT=1))FILE_PATH")
# extract: $(PORT),$(ADDR=0),$(TIMEOUT=1))FILE_PATH
asyn_parameter_extractor = r'(?:@asyn\()([^"]*)'
for text in texts:
asyn_parameters = re.findall(asyn_parameter_extractor, text)
# then: remove final close bracket and driver param name
# $(PORT),$(ADDR=0),$(TIMEOUT=1)
asyn_parameters = [match[: match.rfind(")")] for match in asyn_parameters]
if len(set(asyn_parameters)) > 1:
print(
"More than one set of asyn params found. Taking the first instance"
)
return {i: p.strip() for i, p in enumerate(asyn_parameters[0].split(","))}
asyn_vars = get_asyn_parameters(self._text)
return AsynProducer(
prefix=get_prefix(self._text),
label=self.templates[0].stem,
asyn_port=asyn_vars.get(0, "$(PORT)"),
address=asyn_vars.get(1, "$(ADDR=0)"),
timeout=asyn_vars.get(2, "$(TIMEOUT=1)"),
parent="asynPortDriver",
parameters=[
Group(
name="ComponentGroupOne",
layout=Grid(),
children=self._extract_components(),
)
],
)
def _extract_components(self) -> List[AsynParameter]:
components = []
for text in self._text:
record_extractor = RecordExtractor(text)
asyn_records = record_extractor.get_asyn_records()
for parameter in RecordRoleSorter.sort_records(asyn_records):
component = parameter.generate_component()
components.append(component)
return components
class RecordExtractor:
def __init__(self, text):
self._text = text
def _extract_record_strs(self):
# extract a whole record definition inc. fields e.g.
# record(waveform, "$(P)$(R)FilePath")
# {
# field(PINI, "YES")
# field(DTYP, "asynOctetWrite")
# field(INP, "@asyn($(PORT),$(ADDR=0),$(TIMEOUT=1))FILE_PATH")
# field(FTVL, "CHAR")
# field(NELM, "256")
# info(autosaveFields, "VAL")
# }
record_extractor = re.compile(r"^[^#\n]*record\([^{]*{[^}]*}", re.MULTILINE)
return re.findall(record_extractor, self._text)
def _parse_record(self, record_str: str) -> Tuple:
# extract three groups from a record definition e.g.
# from:
# record(waveform, "$(P)$(R)FilePath")
# {
# #field(PINI, "YES")
# field(DTYP, "asynOctetWrite")
# field(INP, "@asyn($(PORT),$(ADDR=0),$(TIMEOUT=1))FILE_PATH")
# field(FTVL, "CHAR")
# field(NELM, "256")
# info(autosaveFields, "VAL")
# }
# extract:
# Group 1 - record type: waveform
# Group 2 - record name exc. prefix: FilePath
# Group 3 - all fields:
# #field(PINI, "YES")
# field(DTYP, "asynOctetWrite")
# field(INP, "@asyn($(PORT),$(ADDR=0),$(TIMEOUT=1))FILE_PATH")
# field(FTVL, "CHAR")
# field(NELM, "256")
# info(autosaveFields, "VAL")
#
record_parser = re.compile(
r'(?:record\()([^,]*)(?:[^"]*)(?:")'
r'(?:(?:\$\([a-zA-Z0-9]\))*)([^"]*)'
r'(?:")(?:[^{]*)(?:{)([^}]*)(?:})'
)
return re.findall(record_parser, record_str)[0]
def _extract_fields(self, fields_str: str) -> List[Tuple[str, str]]:
# extract two groups from a field e.g.
# from: field(PINI, "YES")
# extract:
# Group 1 - Field: PINI
# Group 2 - Value: YES
field_extractor = re.compile(
r'^[^#\n]*(?:field\()([^,]*)(?:,)(?:[^"]*)(?:")([^"]*)(?:")', re.MULTILINE
)
return re.findall(field_extractor, fields_str)
def _extract_infos(self, fields_str: str) -> List[Tuple[str, str]]:
# extract two groups from an info tag e.g.
# from: info(autosaveFields, "VAL")
# extract:
# Group 1 - Field: autosaveFields
# Group 2 - Value: VAL
info_extractor = re.compile(
r'^[^#\n]*(?:info\()([^,]*)(?:,)(?:[^"]*)(?:")([^"]*)(?:")', re.MULTILINE
)
return re.findall(info_extractor, fields_str)
def _create_asyn_record(self, record_str: str) -> AsynRecord:
record_type, record_name, record_fields = self._parse_record(record_str)
fields = dict(self._extract_fields(record_fields))
info = dict(self._extract_infos(record_fields))
record = AsynRecord(
name=record_name, type=record_type, fields=fields, infos=info
)
return record
def get_asyn_records(self) -> List[AsynRecord]:
record_strs = self._extract_record_strs()
record_list = []
for record_str in record_strs:
try:
record_list.append(self._create_asyn_record(record_str))
except RecordError:
pass
return record_list
def _create_stream_record(self, record_str):
raise RecordError
def get_stream_records(self):
return []
def get_top_level_text(self, driver_name: str) -> str:
record_strs = self._extract_record_strs()
top_level_str = self._text
for record_str in record_strs:
try:
self._create_asyn_record(record_str)
except RecordError:
try:
self._create_stream_record(record_str)
except RecordError:
pass
else:
top_level_str = top_level_str.replace(record_str, "")
# Get override strings for setting pair clashes
asyn_records = self.get_asyn_records()
setting_pairs = [
p
for p in RecordRoleSorter.sort_records(asyn_records)
if isinstance(p, SettingPair)
]
overrides = [
setting_pair.get_naming_overrides()
for setting_pair in setting_pairs
if setting_pair.has_clashes()
]
record_lines = {
self._parse_record(record_str)[1]: record_str.splitlines()
for record_str in record_strs
}
def keep_line(line: str, clashing_fields: List[str]) -> bool:
extracted_field = self._extract_fields(line) or self._extract_infos(line)
return not extracted_field or (extracted_field[0][0] in clashing_fields)
override = [
"\n".join(
[OVERRIDE_DESC]
+ [
line
for line in record_lines[record_name]
if keep_line(line, clashing_fields)
]
)
for record_name, clashing_fields in overrides
]
top_level_str = self._add_param_template_include(top_level_str, driver_name)
top_level_str += "\n\n".join(override)
return top_level_str
def _add_param_template_include(self, top_level_str: str, driver_name: str) -> str:
top_level_str = f'include "{driver_name}ParamSet.template"\n' + top_level_str
return top_level_str
class RecordRoleSorter:
@staticmethod
def sort_records(records: List[AsynRecord]) -> List[Parameter]:
def _sort_inputs_outputs(
records: List[AsynRecord],
) -> Tuple[List[AsynRecord], List[AsynRecord]]:
inp_records = [r for r in records if "INP" in r.fields]
write_records = [r for r in records if "OUT" in r.fields]
# Move waveform records with asynOctetWrite from read to write
read_records = []
for r in inp_records:
if r.type == "waveform" and (
r.fields["DTYP"] == "asynOctetWrite"
or r.fields["DTYP"].endswith("ArrayOut")
):
write_records.append(r)
else:
read_records.append(r)
return read_records, write_records
read_records, write_records = _sort_inputs_outputs(records)
parameters: List[Parameter] = []
parameters += ParameterRoleMatcher.get_actions(read_records, write_records)
parameters += ParameterRoleMatcher.get_readbacks(read_records, write_records)
parameters += ParameterRoleMatcher.get_setting_pairs(
read_records, write_records
)
return parameters
class ParameterRoleMatcher:
@staticmethod
def get_actions(
read_records: List[AsynRecord], write_records: List[AsynRecord]
) -> List[Action]:
actions = [
Action(write_record=w)
for w in write_records
if w.get_parameter_name()
not in [r.get_parameter_name() for r in read_records]
]
return actions
@staticmethod
def get_readbacks(
read_records: List[AsynRecord], write_records: List[AsynRecord]
) -> List[Readback]:
readbacks = [
Readback(read_record=r)
for r in read_records
if r.get_parameter_name()
not in [w.get_parameter_name() for w in write_records]
]
return readbacks
@staticmethod
def get_setting_pairs(
read_records: List[AsynRecord], write_records: List[AsynRecord]
) -> List[SettingPair]:
setting_pairs = [
SettingPair(read_record=r, write_record=w)
for r in read_records
for w in write_records
if r.get_parameter_name() == w.get_parameter_name()
]
return setting_pairs
|
[
"pvi.device.Grid",
"re.findall",
"re.compile"
] |
[((3825, 3883), 're.compile', 're.compile', (['"""^[^#\\\\n]*record\\\\([^{]*{[^}]*}"""', 're.MULTILINE'], {}), "('^[^#\\\\n]*record\\\\([^{]*{[^}]*}', re.MULTILINE)\n", (3835, 3883), False, 'import re\n'), ((3898, 3938), 're.findall', 're.findall', (['record_extractor', 'self._text'], {}), '(record_extractor, self._text)\n', (3908, 3938), False, 'import re\n'), ((4838, 4963), 're.compile', 're.compile', (['"""(?:record\\\\()([^,]*)(?:[^"]*)(?:")(?:(?:\\\\$\\\\([a-zA-Z0-9]\\\\))*)([^"]*)(?:")(?:[^{]*)(?:{)([^}]*)(?:})"""'], {}), '(\n \'(?:record\\\\()([^,]*)(?:[^"]*)(?:")(?:(?:\\\\$\\\\([a-zA-Z0-9]\\\\))*)([^"]*)(?:")(?:[^{]*)(?:{)([^}]*)(?:})\'\n )\n', (4848, 4963), False, 'import re\n'), ((5325, 5416), 're.compile', 're.compile', (['"""^[^#\\\\n]*(?:field\\\\()([^,]*)(?:,)(?:[^"]*)(?:")([^"]*)(?:")"""', 're.MULTILINE'], {}), '(\'^[^#\\\\n]*(?:field\\\\()([^,]*)(?:,)(?:[^"]*)(?:")([^"]*)(?:")\',\n re.MULTILINE)\n', (5335, 5416), False, 'import re\n'), ((5449, 5488), 're.findall', 're.findall', (['field_extractor', 'fields_str'], {}), '(field_extractor, fields_str)\n', (5459, 5488), False, 'import re\n'), ((5774, 5865), 're.compile', 're.compile', (['"""^[^#\\\\n]*(?:info\\\\()([^,]*)(?:,)(?:[^"]*)(?:")([^"]*)(?:")"""', 're.MULTILINE'], {}), '(\'^[^#\\\\n]*(?:info\\\\()([^,]*)(?:,)(?:[^"]*)(?:")([^"]*)(?:")\', re\n .MULTILINE)\n', (5784, 5865), False, 'import re\n'), ((5897, 5935), 're.findall', 're.findall', (['info_extractor', 'fields_str'], {}), '(info_extractor, fields_str)\n', (5907, 5935), False, 'import re\n'), ((1019, 1098), 're.compile', 're.compile', (['"""(?:record\\\\()(?:[^,]*)(?:[^"]*)(?:")((?:\\\\$\\\\([^)]\\\\))*)(?:[^"]*)"""'], {}), '(\'(?:record\\\\()(?:[^,]*)(?:[^"]*)(?:")((?:\\\\$\\\\([^)]\\\\))*)(?:[^"]*)\')\n', (1029, 1098), False, 'import re\n'), ((5020, 5057), 're.findall', 're.findall', (['record_parser', 'record_str'], {}), '(record_parser, record_str)\n', (5030, 5057), False, 'import re\n'), ((1184, 1218), 're.findall', 're.findall', (['prefix_extractor', 'text'], {}), '(prefix_extractor, text)\n', (1194, 1218), False, 'import re\n'), ((1761, 1803), 're.findall', 're.findall', (['asyn_parameter_extractor', 'text'], {}), '(asyn_parameter_extractor, text)\n', (1771, 1803), False, 'import re\n'), ((2749, 2755), 'pvi.device.Grid', 'Grid', ([], {}), '()\n', (2753, 2755), False, 'from pvi.device import Grid, Group\n')]
|
from queue import Queue
class Graph:
def __init__(self):
self._vertices: list = []
self._colors: dict = {}
self._adjacency_matrix: dict = {}
def add_vertex(self, label: str):
self._vertices.append(label)
self._colors[label] = None
self._adjacency_matrix[label]: list = []
def add_edge(self, label1: str, label2: str):
self._adjacency_matrix[label1].append(label2)
self._adjacency_matrix[label2].append(label1)
def bipartite_check(self) -> bool:
for vertex in self._vertices:
if self._colors[vertex] is not None:
continue
self._colors[vertex] = "red"
q: Queue = Queue()
q.enqueue(vertex)
while not q.is_empty():
v = q.dequeue()
for neighbour in self._adjacency_matrix[v]:
if self._colors[neighbour] == self._colors[v]:
return False
if self._colors[neighbour] is None:
if self._colors[v] == "red":
self._colors[neighbour] = "blue"
else:
self._colors[neighbour] = "red"
q.enqueue(neighbour)
return True
|
[
"queue.Queue"
] |
[((706, 713), 'queue.Queue', 'Queue', ([], {}), '()\n', (711, 713), False, 'from queue import Queue\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates.
import random
from typing import Optional, Tuple
import torch
from densepose.converters import ToChartResultConverterWithConfidences
from .densepose_base import DensePoseBaseSampler
class DensePoseConfidenceBasedSampler(DensePoseBaseSampler):
"""
Samples DensePose data from DensePose predictions.
Samples for each class are drawn using confidence value estimates.
"""
def __init__(
self,
confidence_channel: str,
count_per_class: int = 8,
search_count_multiplier: Optional[float] = None,
search_proportion: Optional[float] = None,
):
"""
Constructor
Args:
confidence_channel (str): confidence channel to use for sampling;
possible values:
"sigma_2": confidences for UV values
"fine_segm_confidence": confidences for fine segmentation
"coarse_segm_confidence": confidences for coarse segmentation
(default: "sigma_2")
count_per_class (int): the sampler produces at most `count_per_class`
samples for each category (default: 8)
search_count_multiplier (float or None): if not None, the total number
of the most confident estimates of a given class to consider is
defined as `min(search_count_multiplier * count_per_class, N)`,
where `N` is the total number of estimates of the class; cannot be
specified together with `search_proportion` (default: None)
search_proportion (float or None): if not None, the total number of the
of the most confident estimates of a given class to consider is
defined as `min(max(search_proportion * N, count_per_class), N)`,
where `N` is the total number of estimates of the class; cannot be
specified together with `search_count_multiplier` (default: None)
"""
super().__init__(count_per_class)
self.confidence_channel = confidence_channel
self.search_count_multiplier = search_count_multiplier
self.search_proportion = search_proportion
assert (search_count_multiplier is None) or (search_proportion is None), (
f"Cannot specify both search_count_multiplier (={search_count_multiplier})"
f"and search_proportion (={search_proportion})"
)
def _produce_index_sample(self, values: torch.Tensor, count: int):
"""
Produce a sample of indices to select data based on confidences
Args:
values (torch.Tensor): an array of size [n, k] that contains
estimated values (U, V, confidences);
n: number of channels (U, V, confidences)
k: number of points labeled with part_id
count (int): number of samples to produce, should be positive and <= k
Return:
list(int): indices of values (along axis 1) selected as a sample
"""
k = values.shape[1]
if k == count:
index_sample = list(range(k))
else:
# take the best count * search_count_multiplier pixels,
# sample from them uniformly
# (here best = smallest variance)
_, sorted_confidence_indices = torch.sort(values[2])
if self.search_count_multiplier is not None:
search_count = min(int(count * self.search_count_multiplier), k)
elif self.search_proportion is not None:
search_count = min(max(int(k * self.search_proportion), count), k)
else:
search_count = min(count, k)
sample_from_top = random.sample(range(search_count), count)
index_sample = sorted_confidence_indices[:search_count][sample_from_top]
return index_sample
def _produce_labels_and_results(self, instance) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Method to get labels and DensePose results from an instance, with confidences
Args:
instance (Instances): an instance of `DensePoseChartPredictorOutputWithConfidences`
Return:
labels (torch.Tensor): shape [H, W], DensePose segmentation labels
dp_result (torch.Tensor): shape [3, H, W], DensePose results u and v
stacked with the confidence channel
"""
converter = ToChartResultConverterWithConfidences
chart_result = converter.convert(instance.pred_densepose, instance.pred_boxes)
labels, dp_result = chart_result.labels.cpu(), chart_result.uv.cpu()
dp_result = torch.cat(
(dp_result, getattr(chart_result, self.confidence_channel)[None].cpu())
)
return labels, dp_result
|
[
"torch.sort"
] |
[((3335, 3356), 'torch.sort', 'torch.sort', (['values[2]'], {}), '(values[2])\n', (3345, 3356), False, 'import torch\n')]
|
import logging
import multiprocessing
import os
import sys
import flask
# flask app for serving predictions
app = flask.Flask(__name__)
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO)
# ============================== #
# REQUIRED ENVIRONMENT VARIABLES #
# ============================== #
model_server_workers = int(os.environ.get('MODEL_SERVER_WORKERS', multiprocessing.cpu_count())) # dynamic cpu num
# ===================== #
# START OF FIXED INPUTS #
# ===================== #
try:
from model import load_ctx, predict
except ImportError:
print('Could not import load_model and predict')
sys.exit(1)
# TODO -> enforce this structure
ctx = load_ctx("model.pkl") # load trained model
@app.route('/invocations', methods=['POST'])
def invocations():
"""
A flask handler for predictions
Returns:
A flask response with either a prediction or an error
"""
# pre-process request
data = flask.request.get_data() # read data
# make predictions
try:
out = predict(data, ctx) # extract prediction
logging.info("Predicted digit: {}".format(out))
return flask.jsonify(result=out)
except Exception as ex:
logging.error(ex)
return flask.Response(response='Error while processing the request',
status=500,
mimetype='text/plain')
|
[
"logging.error",
"model.load_ctx",
"logging.basicConfig",
"flask.Flask",
"model.predict",
"flask.request.get_data",
"flask.jsonify",
"flask.Response",
"sys.exit",
"multiprocessing.cpu_count"
] |
[((116, 137), 'flask.Flask', 'flask.Flask', (['__name__'], {}), '(__name__)\n', (127, 137), False, 'import flask\n'), ((139, 231), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s:%(levelname)s:%(message)s"""', 'level': 'logging.INFO'}), "(format='%(asctime)s:%(levelname)s:%(message)s', level=\n logging.INFO)\n", (158, 231), False, 'import logging\n'), ((701, 722), 'model.load_ctx', 'load_ctx', (['"""model.pkl"""'], {}), "('model.pkl')\n", (709, 722), False, 'from model import load_ctx, predict\n'), ((977, 1001), 'flask.request.get_data', 'flask.request.get_data', ([], {}), '()\n', (999, 1001), False, 'import flask\n'), ((399, 426), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (424, 426), False, 'import multiprocessing\n'), ((649, 660), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (657, 660), False, 'import sys\n'), ((1062, 1080), 'model.predict', 'predict', (['data', 'ctx'], {}), '(data, ctx)\n', (1069, 1080), False, 'from model import load_ctx, predict\n'), ((1174, 1199), 'flask.jsonify', 'flask.jsonify', ([], {'result': 'out'}), '(result=out)\n', (1187, 1199), False, 'import flask\n'), ((1237, 1254), 'logging.error', 'logging.error', (['ex'], {}), '(ex)\n', (1250, 1254), False, 'import logging\n'), ((1270, 1370), 'flask.Response', 'flask.Response', ([], {'response': '"""Error while processing the request"""', 'status': '(500)', 'mimetype': '"""text/plain"""'}), "(response='Error while processing the request', status=500,\n mimetype='text/plain')\n", (1284, 1370), False, 'import flask\n')]
|
"""
Search indexing classes to index into Elasticsearch.
Django settings that should be defined:
`ES_HOSTS`: A list of hosts where Elasticsearch lives. E.g.
['192.168.1.1:9200', '192.168.2.1:9200']
`ES_DEFAULT_NUM_REPLICAS`: An integer of the number of replicas.
`ES_DEFAULT_NUM_SHARDS`: An integer of the number of shards.
TODO: Handle page removal case in Page.
"""
from __future__ import absolute_import
from builtins import object
from django.utils import timezone
from elasticsearch import Elasticsearch, exceptions
from elasticsearch.helpers import bulk_index
from django.conf import settings
class Index(object):
"""Base class to define some common methods across indexes."""
# The _index and _type define the URL path to Elasticsearch, e.g.:
# http://localhost:9200/{_index}/{_type}/_search
_index = 'readthedocs'
_type = None
def __init__(self):
self.es = Elasticsearch(settings.ES_HOSTS)
def get_settings(self, settings_override=None):
"""
Returns settings to be passed to ES create_index.
If `settings_override` is provided, this will use `settings_override`
to override the defaults defined here.
"""
default_settings = {
'number_of_replicas': settings.ES_DEFAULT_NUM_REPLICAS,
'number_of_shards': settings.ES_DEFAULT_NUM_SHARDS,
'refresh_interval': '5s',
'store.compress.tv': True,
'store.compress.stored': True,
'analysis': self.get_analysis(),
}
if settings_override:
default_settings.update(settings_override)
return default_settings
def get_analysis(self):
"""
Returns the analysis dict to be used in settings for create_index.
For languages that ES supports we define either the minimal or light
stemming, which isn't as aggressive as the snowball stemmer. We also
define the stopwords for that language.
For all languages we've customized we're using the ICU plugin.
"""
analyzers = {}
filters = {}
# The default is used for fields that need ICU but are composed of
# many languages.
analyzers['default_icu'] = {
'type': 'custom',
'tokenizer': 'icu_tokenizer',
'filter': ['word_delimiter', 'icu_folding', 'icu_normalizer'],
}
# Customize the word_delimiter filter to set various options.
filters['custom_word_delimiter'] = {
'type': 'word_delimiter',
'preserve_original': True,
}
return {
'analyzer': analyzers,
'filter': filters,
}
def timestamped_index(self):
return '{0}-{1}'.format(
self._index, timezone.now().strftime('%Y%m%d%H%M%S'))
def create_index(self, index=None):
"""
Creates index.
This uses `get_settings` and `get_mappings` to define the index.
"""
index = index or self._index
body = {
'settings': self.get_settings(),
}
self.es.indices.create(index=index, body=body)
def refresh_index(self, index=None):
index = index or self._index
self.es.indices.refresh(index=index)
def put_mapping(self, index=None):
index = index or self._index
self.es.indices.put_mapping(self._type, self.get_mapping(), index)
def bulk_index(self, data, index=None, chunk_size=500, parent=None,
routing=None):
"""
Given a list of documents, uses Elasticsearch bulk indexing.
For each doc this calls `extract_document`, then indexes.
`chunk_size` defaults to the elasticsearch lib's default. Override per
your document size as needed.
"""
index = index or self._index
docs = []
for d in data:
source = self.extract_document(d)
doc = {
'_index': index,
'_type': self._type,
'_id': source['id'],
'_source': source,
}
if parent:
doc['_parent'] = parent
if routing:
doc['_routing'] = routing
docs.append(doc)
# TODO: This doesn't work with the new ES setup.
bulk_index(self.es, docs, chunk_size=chunk_size)
def index_document(self, data, index=None, parent=None, routing=None):
doc = self.extract_document(data)
kwargs = {
'index': index or self._index,
'doc_type': self._type,
'body': doc,
'id': doc['id']
}
if parent:
kwargs['parent'] = parent
if routing:
kwargs['routing'] = routing
self.es.index(**kwargs)
def delete_index(self, index_name):
self.es.indices.delete(index=index_name)
def delete_document(self, body, index=None, parent=None, routing=None):
kwargs = {
'index': index or self._index,
'doc_type': self._type,
'body': body,
}
if parent:
kwargs['parent'] = parent
if routing:
kwargs['routing'] = routing
return self.es.delete_by_query(**kwargs)
def get_mapping(self):
"""Returns the mapping for this _index and _type."""
raise NotImplementedError()
def extract_document(self, data):
"""Extracts the Elasticsearch document for this object instance."""
raise NotImplementedError()
def update_aliases(self, new_index, delete=True):
"""
Points `_index` to `new_index` and deletes `_index` if delete=True.
The ES `update_aliases` is atomic.
"""
old_index = None
# Get current alias, if any.
try:
aliases = self.es.indices.get_alias(name=self._index)
if aliases and list(aliases.keys()):
old_index = list(aliases.keys())[0]
except exceptions.NotFoundError:
pass
actions = []
if old_index:
actions.append({'remove': {'index': old_index,
'alias': self._index}})
actions.append({'add': {'index': new_index, 'alias': self._index}})
self.es.indices.update_aliases(body={'actions': actions})
# Delete old index if any and if specified.
if delete and old_index:
self.es.indices.delete(index=old_index)
def search(self, body, **kwargs):
return self.es.search(index=self._index, doc_type=self._type,
body=body, **kwargs)
class ProjectIndex(Index):
"""Search index configuration for Projects"""
_type = 'project'
def get_mapping(self):
mapping = {
self._type: {
# Disable _all field to reduce index size.
'_all': {'enabled': False},
'properties': {
'id': {'type': 'long'},
'name': {'type': 'string', 'analyzer': 'default_icu'},
'description': {'type': 'string', 'analyzer': 'default_icu'},
'slug': {'type': 'string', 'index': 'not_analyzed'},
'lang': {'type': 'string', 'index': 'not_analyzed'},
'tags': {'type': 'string', 'index': 'not_analyzed'},
'privacy': {'type': 'string', 'index': 'not_analyzed'},
'author': {
'type': 'string',
'analyzer': 'default_icu',
'fields': {
'raw': {
'type': 'string',
'index': 'not_analyzed',
},
},
},
'url': {'type': 'string', 'index': 'not_analyzed'},
# Add a weight field to enhance relevancy scoring.
'weight': {'type': 'float'},
}
}
}
return mapping
def extract_document(self, data):
doc = {}
attrs = ('id', 'name', 'slug', 'description', 'lang', 'tags', 'author', 'url')
for attr in attrs:
doc[attr] = data.get(attr, '')
# Add project boost.
doc['weight'] = data.get('weight', 1.0)
return doc
class PageIndex(Index):
"""Search index configuration for Pages"""
_type = 'page'
_parent = 'project'
def get_mapping(self):
mapping = {
self._type: {
# Disable _all field to reduce index size.
'_all': {'enabled': False},
# Associate a page with a project.
'_parent': {'type': self._parent},
'properties': {
'id': {'type': 'string', 'index': 'not_analyzed'},
'sha': {'type': 'string', 'index': 'not_analyzed'},
'project': {'type': 'string', 'index': 'not_analyzed'},
'version': {'type': 'string', 'index': 'not_analyzed'},
'path': {'type': 'string', 'index': 'not_analyzed'},
'taxonomy': {'type': 'string', 'index': 'not_analyzed'},
'commit': {'type': 'string', 'index': 'not_analyzed'},
'title': {'type': 'string', 'analyzer': 'default_icu'},
'headers': {'type': 'string', 'analyzer': 'default_icu'},
'content': {'type': 'string', 'analyzer': 'default_icu'},
# Add a weight field to enhance relevancy scoring.
'weight': {'type': 'float'},
}
}
}
return mapping
def extract_document(self, data):
doc = {}
attrs = ('id', 'project', 'title', 'headers', 'version', 'path',
'content', 'taxonomy', 'commit')
for attr in attrs:
doc[attr] = data.get(attr, '')
# Add page boost.
doc['weight'] = data.get('weight', 1.0)
return doc
class SectionIndex(Index):
"""Search index configuration for Sections"""
_type = 'section'
_parent = 'page'
def get_mapping(self):
mapping = {
self._type: {
# Disable _all field to reduce index size.
'_all': {'enabled': False},
# Associate a section with a page.
'_parent': {'type': self._parent},
# Commenting this out until we need it.
# 'suggest': {
# "type": "completion",
# "index_analyzer": "simple",
# "search_analyzer": "simple",
# "payloads": True,
# },
'properties': {
'id': {'type': 'string', 'index': 'not_analyzed'},
'project': {'type': 'string', 'index': 'not_analyzed'},
'version': {'type': 'string', 'index': 'not_analyzed'},
'path': {'type': 'string', 'index': 'not_analyzed'},
'page_id': {'type': 'string', 'index': 'not_analyzed'},
'commit': {'type': 'string', 'index': 'not_analyzed'},
'title': {'type': 'string', 'analyzer': 'default_icu'},
'content': {'type': 'string', 'analyzer': 'default_icu'},
'blocks': {
'type': 'object',
'properties': {
'code': {'type': 'string', 'analyzer': 'default_icu'}
}
},
# Add a weight field to enhance relevancy scoring.
'weight': {'type': 'float'},
}
}
}
return mapping
def extract_document(self, data):
doc = {}
attrs = ('id', 'project', 'title', 'page_id', 'version', 'path', 'content', 'commit')
for attr in attrs:
doc[attr] = data.get(attr, '')
# Add page boost.
doc['weight'] = data.get('weight', 1.0)
return doc
|
[
"elasticsearch.Elasticsearch",
"django.utils.timezone.now",
"elasticsearch.helpers.bulk_index"
] |
[((942, 974), 'elasticsearch.Elasticsearch', 'Elasticsearch', (['settings.ES_HOSTS'], {}), '(settings.ES_HOSTS)\n', (955, 974), False, 'from elasticsearch import Elasticsearch, exceptions\n'), ((4378, 4426), 'elasticsearch.helpers.bulk_index', 'bulk_index', (['self.es', 'docs'], {'chunk_size': 'chunk_size'}), '(self.es, docs, chunk_size=chunk_size)\n', (4388, 4426), False, 'from elasticsearch.helpers import bulk_index\n'), ((2824, 2838), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (2836, 2838), False, 'from django.utils import timezone\n')]
|
import argparse
import os
from omrdatasettools import Downloader, OmrDataset
from MeasureDetector.ImageConverter import ImageConverter
from MeasureDetector.ImageColorInverter import ImageColorInverter
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Downloads and prepares the MUSCIMA++ dataset')
parser.add_argument("--dataset_directory", type=str, default="data",
help="The directory, where the extracted dataset will be copied to")
flags = parser.parse_args()
dataset_directory = os.path.join(flags.dataset_directory, "muscima_pp")
dataset_downloader = Downloader()
dataset_downloader.download_and_extract_dataset(OmrDataset.MuscimaPlusPlus_V1, dataset_directory)
dataset_downloader.download_and_extract_dataset(OmrDataset.MuscimaPlusPlus_MeasureAnnotations, dataset_directory)
image_inverter = ImageColorInverter()
image_inverter.invert_images(dataset_directory, "*.png")
image_converter = ImageConverter()
image_converter.convert_grayscale_images_to_rgb_images(dataset_directory)
|
[
"os.path.join",
"argparse.ArgumentParser",
"MeasureDetector.ImageColorInverter.ImageColorInverter",
"omrdatasettools.Downloader",
"MeasureDetector.ImageConverter.ImageConverter"
] |
[((244, 332), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Downloads and prepares the MUSCIMA++ dataset"""'}), "(description=\n 'Downloads and prepares the MUSCIMA++ dataset')\n", (267, 332), False, 'import argparse\n'), ((551, 602), 'os.path.join', 'os.path.join', (['flags.dataset_directory', '"""muscima_pp"""'], {}), "(flags.dataset_directory, 'muscima_pp')\n", (563, 602), False, 'import os\n'), ((629, 641), 'omrdatasettools.Downloader', 'Downloader', ([], {}), '()\n', (639, 641), False, 'from omrdatasettools import Downloader, OmrDataset\n'), ((884, 904), 'MeasureDetector.ImageColorInverter.ImageColorInverter', 'ImageColorInverter', ([], {}), '()\n', (902, 904), False, 'from MeasureDetector.ImageColorInverter import ImageColorInverter\n'), ((989, 1005), 'MeasureDetector.ImageConverter.ImageConverter', 'ImageConverter', ([], {}), '()\n', (1003, 1005), False, 'from MeasureDetector.ImageConverter import ImageConverter\n')]
|
from flask import Flask
from config import Config
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_login import LoginManager
# Enable foreign key support in sqlite #
from sqlalchemy.engine import Engine
from sqlalchemy import event
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
# End 'Enable foreign key support in sqlite' block #
app = Flask(__name__)
app.config.from_object(Config)
app.config["LOGS"].mkdir(exist_ok=True)
scheduled_delete = app.config["UPLOAD_FOLDER"].joinpath("scheduled_delete")
scheduled_delete.mkdir(parents=True, exist_ok=True)
db = SQLAlchemy(app)
migrate = Migrate(app, db)
login = LoginManager(app)
login.login_view = "login"
from app import routes, models
|
[
"flask.Flask",
"flask_sqlalchemy.SQLAlchemy",
"flask_migrate.Migrate",
"flask_login.LoginManager",
"sqlalchemy.event.listens_for"
] |
[((271, 307), 'sqlalchemy.event.listens_for', 'event.listens_for', (['Engine', '"""connect"""'], {}), "(Engine, 'connect')\n", (288, 307), False, 'from sqlalchemy import event\n'), ((531, 546), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (536, 546), False, 'from flask import Flask\n'), ((751, 766), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (761, 766), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((777, 793), 'flask_migrate.Migrate', 'Migrate', (['app', 'db'], {}), '(app, db)\n', (784, 793), False, 'from flask_migrate import Migrate\n'), ((802, 819), 'flask_login.LoginManager', 'LoginManager', (['app'], {}), '(app)\n', (814, 819), False, 'from flask_login import LoginManager\n')]
|
import abc
from itertools import chain
class RollingObject(metaclass=abc.ABCMeta):
"""
Baseclass for rolling iterator objects.
The __new__ method here sets appropriate magic
methods for the class (__iter__ and __init__)
depending on window_type.
All iteration logic is handled in this class.
Subclasses just implement methods manipulating
any attributes needed to compute the value of
the rolling window as values are added and removed.
Subclasses *must* implement the following methods
with the following parameters:
_init_fixed(self, iterable, window_size, **kwargs)
_init_variable(self, iterable, window_size, **kwargs)
_update_window(self, new)
_add_new(self, new)
_remove_old(self)
current_value(self) # this is a @property
Variable-length instances must also have a self._obs
attribute returning the current size of the window.
"""
def __new__(cls, iterable, window_size, window_type="fixed", **kwargs):
if window_type == "fixed":
cls.__init__ = cls._init_fixed
cls.__next__ = cls._next_fixed
elif window_type == "variable":
cls.__init__ = cls._init_variable
cls.__next__ = cls._next_variable
else:
raise ValueError("Unknown window_type '{}'".format(window_type))
self = super().__new__(cls)
self.window_type = window_type
self.window_size = _validate_window_size(window_size)
self._iterator = iter(iterable)
if self.window_type == "variable":
self._filled = False
return self
def __repr__(self):
return "Rolling(operation='{}', window_size={}, window_type='{}')".format(
self.__class__.__name__, self.window_size, self.window_type
)
def __iter__(self):
return self
def _next_fixed(self):
"""
Return the next value for fixed-length windows
"""
new = next(self._iterator)
self._update_window(new)
return self.current_value
def _next_variable(self):
"""
Return the next value for variable-length windows
"""
# while the window size is not reached, add new values
if not self._filled and self._obs < self.window_size:
new = next(self._iterator)
self._add_new(new)
if self._obs == self.window_size:
self._filled = True
return self.current_value
# once the window size is reached, update window until the iterator finishes
try:
new = next(self._iterator)
self._update_window(new)
return self.current_value
# if the iterator finishes, remove the oldest values one at a time
except StopIteration:
if self._obs == 1:
raise
else:
self._remove_old()
return self.current_value
def extend(self, iterable):
"""
Extend the iterator being consumed with a new iterable.
The extend() method may be called at any time (even after
StopIteration has been raised). The most recent values from
the current iterator are retained and used in the calculation
of the next window value.
For "variable" windows which are decreasing in size, extending
the iterator means that these windows will grow towards their
maximum size again.
"""
self._iterator = chain(self._iterator, iterable)
if self.window_type == "variable":
self._filled = False
@property
@abc.abstractmethod
def current_value(self):
"""
Return the current value of the window
"""
pass
@abc.abstractmethod
def _init_fixed(self):
"""
Intialise as a fixed-size window
"""
pass
@abc.abstractmethod
def _init_variable(self):
"""
Intialise as a variable-size window
"""
pass
@abc.abstractmethod
def _remove_old(self):
"""
Remove the oldest value from the window, decreasing window size by 1
"""
pass
@abc.abstractmethod
def _add_new(self, new):
"""
Add a new value to the window, increasing window size by 1
"""
pass
@abc.abstractmethod
def _update_window(self, new):
"""
Add a new value to the window and remove the oldest value from the window
"""
pass
def _validate_window_size(k):
"""
Check if k is a positive integer
"""
if not isinstance(k, int):
raise TypeError(
"window_size must be integer type, got {}".format(type(k).__name__)
)
if k <= 0:
raise ValueError("window_size must be positive")
return k
|
[
"itertools.chain"
] |
[((3534, 3565), 'itertools.chain', 'chain', (['self._iterator', 'iterable'], {}), '(self._iterator, iterable)\n', (3539, 3565), False, 'from itertools import chain\n')]
|
from __future__ import annotations
from typing import Optional, TYPE_CHECKING, Union
from pyspark.sql.types import StructType, DataType
from spark_auto_mapper_fhir.fhir_types.list import FhirList
from spark_auto_mapper_fhir.fhir_types.string import FhirString
from spark_auto_mapper_fhir.fhir_types.uri import FhirUri
from spark_auto_mapper_fhir.extensions.extension_base import ExtensionBase
from spark_auto_mapper_fhir.fhir_types.id import FhirId
from spark_auto_mapper_fhir.base_types.fhir_complex_type_base import FhirComplexTypeBase
from spark_fhir_schemas.r4.complex_types.meta import MetaSchema
if TYPE_CHECKING:
pass
# id_ (string)
# extension (Extension)
# versionId (id)
# lastUpdated (instant)
from spark_auto_mapper_fhir.fhir_types.instant import FhirInstant
# source (uri)
# profile (canonical)
from spark_auto_mapper_fhir.fhir_types.canonical import FhirCanonical
# security (Coding)
from spark_auto_mapper_fhir.complex_types.coding import Coding
# Import for CodeableConcept for security
from spark_auto_mapper_fhir.value_sets.all_security_labels import (
AllSecurityLabelsCode,
)
# End Import for CodeableConcept for security
# tag (Coding)
# Import for CodeableConcept for tag
from spark_auto_mapper_fhir.value_sets.common_tags import CommonTagsCode
# End Import for CodeableConcept for tag
# This file is auto-generated by generate_classes so do not edit manually
# noinspection PyPep8Naming
class Meta(FhirComplexTypeBase):
"""
Meta
fhir-base.xsd
The metadata about a resource. This is content in the resource that is maintained by the infrastructure. Changes to the content might not always be associated with version changes to the resource.
If the element is present, it must have a value for at least one of the defined elements, an @id referenced from the Narrative, or extensions
"""
# noinspection PyPep8Naming
def __init__(
self,
*,
id_: Optional[FhirString] = None,
extension: Optional[FhirList[ExtensionBase]] = None,
versionId: Optional[FhirId] = None,
lastUpdated: Optional[FhirInstant] = None,
source: Optional[FhirUri] = None,
profile: Optional[FhirList[FhirCanonical]] = None,
security: Optional[FhirList[Coding[AllSecurityLabelsCode]]] = None,
tag: Optional[FhirList[Coding[CommonTagsCode]]] = None,
) -> None:
"""
The metadata about a resource. This is content in the resource that is
maintained by the infrastructure. Changes to the content might not always be
associated with version changes to the resource.
If the element is present, it must have a value for at least one of the
defined elements, an @id referenced from the Narrative, or extensions
:param id_: None
:param extension: May be used to represent additional information that is not part of the basic
definition of the element. To make the use of extensions safe and manageable,
there is a strict set of governance applied to the definition and use of
extensions. Though any implementer can define an extension, there is a set of
requirements that SHALL be met as part of the definition of the extension.
:param versionId: The version specific identifier, as it appears in the version portion of the
URL. This value changes when the resource is created, updated, or deleted.
:param lastUpdated: When the resource last changed - e.g. when the version changed.
:param source: A uri that identifies the source system of the resource. This provides a
minimal amount of [[[Provenance]]] information that can be used to track or
differentiate the source of information in the resource. The source may
identify another FHIR server, document, message, database, etc.
:param profile: A list of profiles (references to [[[StructureDefinition]]] resources) that
this resource claims to conform to. The URL is a reference to
[[[StructureDefinition.url]]].
:param security: Security labels applied to this resource. These tags connect specific
resources to the overall security policy and infrastructure.
:param tag: Tags applied to this resource. Tags are intended to be used to identify and
relate resources to process and workflow, and applications are not required to
consider the tags when interpreting the meaning of a resource.
"""
super().__init__(
id_=id_,
extension=extension,
versionId=versionId,
lastUpdated=lastUpdated,
source=source,
profile=profile,
security=security,
tag=tag,
)
def get_schema(
self, include_extension: bool
) -> Optional[Union[StructType, DataType]]:
return MetaSchema.get_schema(include_extension=include_extension)
|
[
"spark_fhir_schemas.r4.complex_types.meta.MetaSchema.get_schema"
] |
[((5004, 5062), 'spark_fhir_schemas.r4.complex_types.meta.MetaSchema.get_schema', 'MetaSchema.get_schema', ([], {'include_extension': 'include_extension'}), '(include_extension=include_extension)\n', (5025, 5062), False, 'from spark_fhir_schemas.r4.complex_types.meta import MetaSchema\n')]
|
import os
from extract.readers import CSVReader, JSONReader, XLSReader, Reader
from typing import Type
class Extractor:
path: str
data: list
_readers: dict[str, Type[Reader]]
default_path = os.path.join(os.getcwd(), 'input_files')
def __init__(self, path: str = default_path):
self.path = path
self.data = []
self._readers = {
'.csv': CSVReader,
'.json': JSONReader,
'.xml': XLSReader,
}
print(self.path)
def get_data(self, path: str = default_path) -> [dict]:
files = os.listdir(path)
for file in files:
self._extract_data(file, path)
return self.data
def _extract_data(self, file, path):
filename, file_extension = os.path.splitext(file)
filepath = os.path.join(path, file)
reader = self._readers[file_extension]()
self.data.extend(reader.get_data(filepath))
|
[
"os.getcwd",
"os.path.splitext",
"os.path.join",
"os.listdir"
] |
[((221, 232), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (230, 232), False, 'import os\n'), ((581, 597), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (591, 597), False, 'import os\n'), ((772, 794), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (788, 794), False, 'import os\n'), ((814, 838), 'os.path.join', 'os.path.join', (['path', 'file'], {}), '(path, file)\n', (826, 838), False, 'import os\n')]
|
from django.shortcuts import render
from django.http import HttpResponseForbidden
# Create your views here.
def default(request):
return render(request, 'basicInfo/basic_homepage.html', {})
def signup(request):
return render(request, "basicInfo/basic_signup.html", {})
def login(request):
return render(request, "basicInfo/basic_login.html", {})
def student(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_personinfo.html", feedDict)
def exam(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_examarrange.html", feedDict)
def calendar(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_calender.html", feedDict)
def courseplan(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_courseplan.html", feedDict)
def personalinfo(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_personinfo.html", feedDict)
def courseregist(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_courseregist.html", feedDict)
def mycourse(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_mycourse.html", feedDict)
def grade(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_grade.html", feedDict)
def coursesearch(request):
sid = request.session["account_id"]
feedDict={
"account_id": sid
}
if request.session["type"]!=0:
return HttpResponseForbidden()
return render(request, "basicInfo/student_coursesearch.html", feedDict)
def teacher(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_information.html", feedDict)
def teacher_index(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_information.html", feedDict)
def teacher_information(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_information.html", feedDict)
def teacher_comment(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_comment.html", feedDict)
def teacher_course_regist(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_course_regist.html", feedDict)
def teacher_course_open(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_course_open.html", feedDict)
def teacher_course_edit(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=1:
return HttpResponseForbidden()
return render(request, "basicInfo/teacher_course_edit.html", feedDict)
def school_forum(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
return render(request, "basicInfo/school_forum.html", feedDict)
def admin(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_information.html", feedDict)
def admin_index(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_information.html", feedDict)
def admin_information(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_information.html", feedDict)
def admin_comment(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_comment.html", feedDict)
def admin_course_regist(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_course_regist.html", feedDict)
def admin_course_open(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_course_open.html", feedDict)
def admin_course_edit(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_course_edit.html", feedDict)
def admin_course_approve(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_course_approve.html", feedDict)
def admin_teach_approve(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_teach_approve.html", feedDict)
def admin_course_adjust(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_course_adjust.html", feedDict)
def admin_teach_adjust(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_teach_adjust.html", feedDict)
def admin_apply_approve_s(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_apply_approve_s.html", feedDict)
def admin_apply_approve_t(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_apply_approve_t.html", feedDict)
def admin_select_adjust(request):
tid = request.session["account_id"]
feedDict={
"account_id": tid
}
if request.session["type"]!=2:
return HttpResponseForbidden()
return render(request, "basicInfo/admin_select_adjust.html", feedDict)
|
[
"django.shortcuts.render",
"django.http.HttpResponseForbidden"
] |
[((151, 203), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/basic_homepage.html"""', '{}'], {}), "(request, 'basicInfo/basic_homepage.html', {})\n", (157, 203), False, 'from django.shortcuts import render\n'), ((242, 292), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/basic_signup.html"""', '{}'], {}), "(request, 'basicInfo/basic_signup.html', {})\n", (248, 292), False, 'from django.shortcuts import render\n'), ((330, 379), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/basic_login.html"""', '{}'], {}), "(request, 'basicInfo/basic_login.html', {})\n", (336, 379), False, 'from django.shortcuts import render\n'), ((588, 650), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/student_personinfo.html"""', 'feedDict'], {}), "(request, 'basicInfo/student_personinfo.html', feedDict)\n", (594, 650), False, 'from django.shortcuts import render\n'), ((854, 917), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/student_examarrange.html"""', 'feedDict'], {}), "(request, 'basicInfo/student_examarrange.html', feedDict)\n", (860, 917), False, 'from django.shortcuts import render\n'), ((1125, 1185), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/student_calender.html"""', 'feedDict'], {}), "(request, 'basicInfo/student_calender.html', feedDict)\n", (1131, 1185), False, 'from django.shortcuts import render\n'), ((1395, 1457), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/student_courseplan.html"""', 'feedDict'], {}), "(request, 'basicInfo/student_courseplan.html', feedDict)\n", (1401, 1457), False, 'from django.shortcuts import render\n'), ((1669, 1731), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/student_personinfo.html"""', 'feedDict'], {}), "(request, 'basicInfo/student_personinfo.html', feedDict)\n", (1675, 1731), False, 'from django.shortcuts import render\n'), ((1943, 2007), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/student_courseregist.html"""', 'feedDict'], {}), "(request, 'basicInfo/student_courseregist.html', feedDict)\n", (1949, 2007), False, 'from django.shortcuts import render\n'), ((2217, 2277), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/student_mycourse.html"""', 'feedDict'], {}), "(request, 'basicInfo/student_mycourse.html', feedDict)\n", (2223, 2277), False, 'from django.shortcuts import render\n'), ((2482, 2539), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/student_grade.html"""', 'feedDict'], {}), "(request, 'basicInfo/student_grade.html', feedDict)\n", (2488, 2539), False, 'from django.shortcuts import render\n'), ((2751, 2815), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/student_coursesearch.html"""', 'feedDict'], {}), "(request, 'basicInfo/student_coursesearch.html', feedDict)\n", (2757, 2815), False, 'from django.shortcuts import render\n'), ((3022, 3085), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/teacher_information.html"""', 'feedDict'], {}), "(request, 'basicInfo/teacher_information.html', feedDict)\n", (3028, 3085), False, 'from django.shortcuts import render\n'), ((3296, 3359), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/teacher_information.html"""', 'feedDict'], {}), "(request, 'basicInfo/teacher_information.html', feedDict)\n", (3302, 3359), False, 'from django.shortcuts import render\n'), ((3578, 3641), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/teacher_information.html"""', 'feedDict'], {}), "(request, 'basicInfo/teacher_information.html', feedDict)\n", (3584, 3641), False, 'from django.shortcuts import render\n'), ((3856, 3915), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/teacher_comment.html"""', 'feedDict'], {}), "(request, 'basicInfo/teacher_comment.html', feedDict)\n", (3862, 3915), False, 'from django.shortcuts import render\n'), ((4136, 4201), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/teacher_course_regist.html"""', 'feedDict'], {}), "(request, 'basicInfo/teacher_course_regist.html', feedDict)\n", (4142, 4201), False, 'from django.shortcuts import render\n'), ((4418, 4481), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/teacher_course_open.html"""', 'feedDict'], {}), "(request, 'basicInfo/teacher_course_open.html', feedDict)\n", (4424, 4481), False, 'from django.shortcuts import render\n'), ((4700, 4763), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/teacher_course_edit.html"""', 'feedDict'], {}), "(request, 'basicInfo/teacher_course_edit.html', feedDict)\n", (4706, 4763), False, 'from django.shortcuts import render\n'), ((4899, 4955), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/school_forum.html"""', 'feedDict'], {}), "(request, 'basicInfo/school_forum.html', feedDict)\n", (4905, 4955), False, 'from django.shortcuts import render\n'), ((5158, 5219), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_information.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_information.html', feedDict)\n", (5164, 5219), False, 'from django.shortcuts import render\n'), ((5428, 5489), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_information.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_information.html', feedDict)\n", (5434, 5489), False, 'from django.shortcuts import render\n'), ((5706, 5767), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_information.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_information.html', feedDict)\n", (5712, 5767), False, 'from django.shortcuts import render\n'), ((5978, 6035), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_comment.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_comment.html', feedDict)\n", (5984, 6035), False, 'from django.shortcuts import render\n'), ((6254, 6317), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_course_regist.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_course_regist.html', feedDict)\n", (6260, 6317), False, 'from django.shortcuts import render\n'), ((6532, 6593), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_course_open.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_course_open.html', feedDict)\n", (6538, 6593), False, 'from django.shortcuts import render\n'), ((6810, 6871), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_course_edit.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_course_edit.html', feedDict)\n", (6816, 6871), False, 'from django.shortcuts import render\n'), ((7089, 7153), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_course_approve.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_course_approve.html', feedDict)\n", (7095, 7153), False, 'from django.shortcuts import render\n'), ((7370, 7433), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_teach_approve.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_teach_approve.html', feedDict)\n", (7376, 7433), False, 'from django.shortcuts import render\n'), ((7650, 7713), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_course_adjust.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_course_adjust.html', feedDict)\n", (7656, 7713), False, 'from django.shortcuts import render\n'), ((7929, 7991), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_teach_adjust.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_teach_adjust.html', feedDict)\n", (7935, 7991), False, 'from django.shortcuts import render\n'), ((8212, 8277), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_apply_approve_s.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_apply_approve_s.html', feedDict)\n", (8218, 8277), False, 'from django.shortcuts import render\n'), ((8496, 8561), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_apply_approve_t.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_apply_approve_t.html', feedDict)\n", (8502, 8561), False, 'from django.shortcuts import render\n'), ((8780, 8843), 'django.shortcuts.render', 'render', (['request', '"""basicInfo/admin_select_adjust.html"""', 'feedDict'], {}), "(request, 'basicInfo/admin_select_adjust.html', feedDict)\n", (8786, 8843), False, 'from django.shortcuts import render\n'), ((550, 573), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (571, 573), False, 'from django.http import HttpResponseForbidden\n'), ((818, 841), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (839, 841), False, 'from django.http import HttpResponseForbidden\n'), ((1089, 1112), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (1110, 1112), False, 'from django.http import HttpResponseForbidden\n'), ((1359, 1382), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (1380, 1382), False, 'from django.http import HttpResponseForbidden\n'), ((1633, 1656), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (1654, 1656), False, 'from django.http import HttpResponseForbidden\n'), ((1907, 1930), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (1928, 1930), False, 'from django.http import HttpResponseForbidden\n'), ((2181, 2204), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (2202, 2204), False, 'from django.http import HttpResponseForbidden\n'), ((2446, 2469), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (2467, 2469), False, 'from django.http import HttpResponseForbidden\n'), ((2715, 2738), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (2736, 2738), False, 'from django.http import HttpResponseForbidden\n'), ((2986, 3009), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (3007, 3009), False, 'from django.http import HttpResponseForbidden\n'), ((3260, 3283), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (3281, 3283), False, 'from django.http import HttpResponseForbidden\n'), ((3542, 3565), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (3563, 3565), False, 'from django.http import HttpResponseForbidden\n'), ((3820, 3843), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (3841, 3843), False, 'from django.http import HttpResponseForbidden\n'), ((4100, 4123), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (4121, 4123), False, 'from django.http import HttpResponseForbidden\n'), ((4382, 4405), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (4403, 4405), False, 'from django.http import HttpResponseForbidden\n'), ((4664, 4687), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (4685, 4687), False, 'from django.http import HttpResponseForbidden\n'), ((5122, 5145), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (5143, 5145), False, 'from django.http import HttpResponseForbidden\n'), ((5392, 5415), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (5413, 5415), False, 'from django.http import HttpResponseForbidden\n'), ((5670, 5693), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (5691, 5693), False, 'from django.http import HttpResponseForbidden\n'), ((5942, 5965), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (5963, 5965), False, 'from django.http import HttpResponseForbidden\n'), ((6218, 6241), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (6239, 6241), False, 'from django.http import HttpResponseForbidden\n'), ((6496, 6519), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (6517, 6519), False, 'from django.http import HttpResponseForbidden\n'), ((6774, 6797), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (6795, 6797), False, 'from django.http import HttpResponseForbidden\n'), ((7053, 7076), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (7074, 7076), False, 'from django.http import HttpResponseForbidden\n'), ((7334, 7357), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (7355, 7357), False, 'from django.http import HttpResponseForbidden\n'), ((7614, 7637), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (7635, 7637), False, 'from django.http import HttpResponseForbidden\n'), ((7893, 7916), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (7914, 7916), False, 'from django.http import HttpResponseForbidden\n'), ((8176, 8199), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (8197, 8199), False, 'from django.http import HttpResponseForbidden\n'), ((8460, 8483), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (8481, 8483), False, 'from django.http import HttpResponseForbidden\n'), ((8744, 8767), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (8765, 8767), False, 'from django.http import HttpResponseForbidden\n')]
|
"""Test pydeCONZ session class.
pytest --cov-report term-missing --cov=pydeconz tests/test_init.py
"""
import asyncio
from unittest.mock import Mock, patch
from asynctest import CoroutineMock
import pytest
import aiohttp
from pydeconz import DeconzSession
from pydeconz.sensor import GenericStatus
API_KEY = "1234567890"
IP = "127.0.0.1"
PORT = "80"
@pytest.fixture
def session() -> DeconzSession:
"""Returns the session object."""
session = Mock()
return DeconzSession(session, IP, PORT, API_KEY)
async def test_initialize(session) -> None:
"""Test a successful call of load_parameters."""
with patch(
"pydeconz.DeconzSession.request",
new=CoroutineMock(
return_value={
"config": {"bridgeid": "012345"},
"groups": {
"g1": {
"id": "gid",
"scenes": [{"id": "sc1", "name": "scene1"}],
"state": {},
"action": {},
"lights": [],
}
},
"lights": {"l1": {"state": {}}},
"sensors": {
"s1": {"type": GenericStatus.ZHATYPE[0], "state": {}, "config": {}}
},
}
),
):
await session.initialize()
assert "g1" in session.groups
assert session.groups["g1"].id == "gid"
assert "sc1" in session.groups["g1"].scenes
assert session.groups["g1"].deconz_id == "/groups/g1"
assert session.groups["g1"].scenes["sc1"].id == "sc1"
assert "l1" in session.lights
assert session.lights["l1"].deconz_id == "/lights/l1"
assert "s1" in session.sensors
assert session.sensors["s1"].deconz_id == "/sensors/s1"
assert session.sensors["s1"].type == GenericStatus.ZHATYPE[0]
|
[
"pydeconz.DeconzSession",
"asynctest.CoroutineMock",
"unittest.mock.Mock"
] |
[((456, 462), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (460, 462), False, 'from unittest.mock import Mock, patch\n'), ((474, 515), 'pydeconz.DeconzSession', 'DeconzSession', (['session', 'IP', 'PORT', 'API_KEY'], {}), '(session, IP, PORT, API_KEY)\n', (487, 515), False, 'from pydeconz import DeconzSession\n'), ((685, 999), 'asynctest.CoroutineMock', 'CoroutineMock', ([], {'return_value': "{'config': {'bridgeid': '012345'}, 'groups': {'g1': {'id': 'gid', 'scenes':\n [{'id': 'sc1', 'name': 'scene1'}], 'state': {}, 'action': {}, 'lights':\n []}}, 'lights': {'l1': {'state': {}}}, 'sensors': {'s1': {'type':\n GenericStatus.ZHATYPE[0], 'state': {}, 'config': {}}}}"}), "(return_value={'config': {'bridgeid': '012345'}, 'groups': {\n 'g1': {'id': 'gid', 'scenes': [{'id': 'sc1', 'name': 'scene1'}],\n 'state': {}, 'action': {}, 'lights': []}}, 'lights': {'l1': {'state': {\n }}}, 'sensors': {'s1': {'type': GenericStatus.ZHATYPE[0], 'state': {},\n 'config': {}}}})\n", (698, 999), False, 'from asynctest import CoroutineMock\n')]
|
# Yuio project, MIT licence.
#
# https://github.com/taminomara/yuio/
#
# You're free to copy this file to your project and edit it for your needs,
# just keep this copyright line please :3
"""
This module provides basic functionality to interact with git.
It comes in handy when writing deployment scripts.
Interacting with a repository
-----------------------------
All repository interactions are done through the :class:`Repo` class
and its methods. If an interaction fails, a :class:`GitException` is raised.
.. autoclass:: Repo
:members:
.. autoclass:: GitException
:members:
Commit and status objects
-------------------------
Some of :class:`Repo` commands return parsed descriptions of git objects:
.. autoclass:: Commit
:members:
.. autoclass:: Status
:members:
.. autoclass:: FileStatus
:members:
.. autoclass:: Modification
:members:
Parsing git refs
----------------
When you need to query a git ref from a user, :class:`RefParser` will ensure
that the ref points to a valid git object:
.. autoclass:: RefParser
"""
import dataclasses
import enum
import pathlib
import re
import subprocess
import typing as _t
from dataclasses import dataclass
from datetime import datetime
import yuio.parse
class GitException(subprocess.SubprocessError):
"""Raised when git returns a non-zero exit code.
"""
class Repo:
"""A class that allows interactions with a git repository.
"""
def __init__(self, path: _t.Union[pathlib.Path, str]):
self._path = pathlib.Path(path)
if not self._path.joinpath('.git').is_dir():
raise GitException(f'{path} is not a git repository')
try:
self.git('--version')
except FileNotFoundError:
raise GitException(f'git executable was not found')
try:
self.git('status')
except GitException:
raise GitException(f'{path} is not a git repository')
def git(self, *args: str) -> bytes:
"""Call git and return its stdout.
"""
res = subprocess.run(
['git'] + list(args),
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
cwd=self._path,
)
if res.returncode != 0:
raise GitException(
f'git exited with status code {res.returncode}:\n'
f'{res.stderr.decode()}'
)
return res.stdout
def status(self) -> 'Status':
"""Query the current repository status.
"""
text = self.git('status', '--porcelain=v2', '--branch', '-z')
lines = iter(text.split(b'\0'))
status = Status(commit='')
for line_b in lines:
line = line_b.decode()
if line.startswith('# branch.oid'):
status.commit = line[13:]
elif line.startswith('# branch.head'):
if line[14:] != '(detached)':
status.branch = line[14:]
elif line.startswith('# branch.upstream'):
status.upstream = line[18:]
elif line.startswith('# branch.ab'):
match = re.match(
r'^\+(\d+) -(\d+)$', line[12:])
assert match is not None
status.ahead = int(match.group(1))
status.behind = int(match.group(2))
elif line.startswith('1'):
match = re.match(
r'^(.)(.) .{4} (?:[^ ]+ ){5}(.*)$', line[2:])
assert match is not None
file_status = FileStatus(
path=pathlib.Path(match.group(3)),
staged=Modification(match.group(1)),
tree=Modification(match.group(2)),
)
status.changes.append(file_status)
status.has_tracked_changes |= \
file_status.staged is not Modification.UNTRACKED
status.has_untracked_changes |= \
file_status.staged is Modification.UNTRACKED
elif line.startswith('2'):
match = re.match(
r'^(.)(.) .{4} (?:[^ ]+ ){6}(.*)$', line[2:])
assert match is not None
file_status = FileStatus(
path=pathlib.Path(match.group(3)),
path_from=pathlib.Path(next(lines).decode()),
staged=Modification(match.group(1)),
tree=Modification(match.group(2)),
)
status.changes.append(file_status)
status.has_tracked_changes |= \
file_status.staged is not Modification.UNTRACKED
status.has_untracked_changes |= \
file_status.staged is Modification.UNTRACKED
return status
_LOG_FMT = '%H%n%aN%n%aE%n%aI%n%cN%n%cE%n%cI%n%w(0,0,1)%B%w(0,0)%n-'
def log(
self,
*refs: str,
max_entries: _t.Optional[int] = 10
) -> _t.List['Commit']:
"""Query the log for given git objects.
Note that by default log output is limited by ten entries.
"""
args = [f'--pretty=format:{self._LOG_FMT}']
if max_entries is not None:
args += ['-n', str(max_entries)]
args += list(refs)
text = self.git('log', *args)
lines = iter(text.decode().split('\n'))
commits = []
for line in lines:
commits.append(self._parse_single_log_entry(line, lines))
return commits
def show(self, ref) -> _t.Optional['Commit']:
"""Query information for the given git object.
Return `None` if object is not found.
"""
try:
text = self.git(
'show',
f'--pretty=format:{self._LOG_FMT}',
ref,
)
except GitException:
return None
if not text:
return None
lines = iter(text.decode().split('\n'))
line = next(lines)
commit = self._parse_single_log_entry(line, lines)
commit.orig_ref = ref
return commit
@staticmethod
def _parse_single_log_entry(commit, lines) -> 'Commit':
author = next(lines)
author_email = next(lines)
author_date = datetime.fromisoformat(next(lines))
committer = next(lines)
committer_email = next(lines)
committer_date = datetime.fromisoformat(next(lines))
title = next(lines)
body = ''
while True:
line = next(lines)
if not line or line.startswith(' '):
body += line[1:] + '\n'
else:
break
body = body.strip('\n')
if body:
body += '\n'
return Commit(
commit,
author,
author_email,
author_date,
committer,
committer_email,
committer_date,
title,
body
)
@dataclass
class Commit:
"""Commit description.
"""
#: Commit hash.
hash: str
#: Author name.
author: str
#: Author email.
author_email: str
#: Author time.
author_date: datetime
#: Committer name.
committer: str
#: Committer email.
committer_email: str
#: Committer time.
committer_date: datetime
#: Commit title, i.e. first line of the message.
title: str
#: Commit body, i.e. the rest of the message.
body: str
#: If commit was parsed from a user input, this field will contain
#: original input. I.e. if a user enters ``HEAD`` and it gets resolved
#: into a commit, `orig_ref` will contain string ``'HEAD'``.
#:
#: See also :class:`RefParser`.
orig_ref: _t.Optional[str] = None
@property
def short_hash(self):
return self.hash[:7]
def __str__(self):
if self.orig_ref:
return self.orig_ref
else:
return self.short_hash
class Modification(enum.Enum):
"""For changed file, what modification was applied to it.
"""
#: File wasn't changed.
UNMODIFIED = '.'
#: File was changed.
MODIFIED = 'M'
#: File was created.
ADDED = 'A'
#: File was deleted.
DELETED = 'D'
#: File was renamed (and possibly changed).
RENAMED = 'R'
#: File was copied (and possibly changed).
COPIED = 'C'
#: File with conflicts is unmerged.
UPDATED = 'U'
#: File is in ``.gitignore``.
IGNORED = '?'
#: File was created but not yet added to git, i.e. not staged.
UNTRACKED = '!'
@dataclass
class FileStatus:
"""Status of a changed file.
"""
#: Path of the file.
path: pathlib.Path
#: If file was moved, contains path where it was moved from.
path_from: _t.Optional[pathlib.Path] = None
#: File modification in the index (staged).
staged: Modification = Modification.UNMODIFIED
#: File modification in the tree (unstaged).
tree: Modification = Modification.UNMODIFIED
@dataclass
class Status:
"""Status of a working copy.
"""
# Current commit hash.
commit: str
#: Name of the current branch.
branch: _t.Optional[str] = None
#: Name of the upstream branch.
upstream: _t.Optional[str] = None
#: Number of commits the branch is ahead of upstream.
ahead: _t.Optional[int] = None
#: Number of commits the branch is behind of upstream.
behind: _t.Optional[int] = None
#: True if any tracked file was changed.
has_tracked_changes: bool = False
#: True if any file was added but not tracked.
has_untracked_changes: bool = False
#: List of changed files, both tracked and untracked.
changes: _t.List[FileStatus] = dataclasses.field(default_factory=list)
@property
def has_changes(self) -> bool:
"""True if there are any changes in the repository.
"""
return self.has_tracked_changes or self.has_untracked_changes
class RefParser(yuio.parse.Parser[Commit]):
"""A parser for git refs (commits, tags, branches, and so on).
"""
def __init__(self, repo: Repo):
super().__init__()
self._repo = repo
def parse(self, value: str) -> Commit:
commit = self._repo.show(value)
if commit is None:
raise yuio.parse.ParsingError('invalid git ref')
return commit
def parse_config(self, value: _t.Any) -> Commit:
if not isinstance(value, str):
raise yuio.parse.ParsingError('expected a string')
return self.parse(value)
def validate(self, value: Commit):
super().validate(value)
|
[
"dataclasses.field",
"pathlib.Path",
"re.match"
] |
[((9792, 9831), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (9809, 9831), False, 'import dataclasses\n'), ((1521, 1539), 'pathlib.Path', 'pathlib.Path', (['path'], {}), '(path)\n', (1533, 1539), False, 'import pathlib\n'), ((3145, 3187), 're.match', 're.match', (['"""^\\\\+(\\\\d+) -(\\\\d+)$"""', 'line[12:]'], {}), "('^\\\\+(\\\\d+) -(\\\\d+)$', line[12:])\n", (3153, 3187), False, 'import re\n'), ((3414, 3467), 're.match', 're.match', (['"""^(.)(.) .{4} (?:[^ ]+ ){5}(.*)$"""', 'line[2:]'], {}), "('^(.)(.) .{4} (?:[^ ]+ ){5}(.*)$', line[2:])\n", (3422, 3467), False, 'import re\n'), ((4104, 4157), 're.match', 're.match', (['"""^(.)(.) .{4} (?:[^ ]+ ){6}(.*)$"""', 'line[2:]'], {}), "('^(.)(.) .{4} (?:[^ ]+ ){6}(.*)$', line[2:])\n", (4112, 4157), False, 'import re\n')]
|
from bs4 import BeautifulSoup
import requests
"""
get the html for my personal website
"""
markup = requests.get('http://xuguanzhou.com')
soup = BeautifulSoup(markup.text,"html.parser")
print(type(soup))
print(soup.prettify())
|
[
"bs4.BeautifulSoup",
"requests.get"
] |
[((102, 139), 'requests.get', 'requests.get', (['"""http://xuguanzhou.com"""'], {}), "('http://xuguanzhou.com')\n", (114, 139), False, 'import requests\n'), ((148, 189), 'bs4.BeautifulSoup', 'BeautifulSoup', (['markup.text', '"""html.parser"""'], {}), "(markup.text, 'html.parser')\n", (161, 189), False, 'from bs4 import BeautifulSoup\n')]
|
# --------------
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
def visual_summary(type_, df, col):
"""Summarize the Data using Visual Method.
This function accepts the type of visualization, the data frame and the column to be summarized.
It displays the chart based on the given parameters.
Keyword arguments:
type_ -- visualization method to be used
df -- the dataframe
col -- the column in the dataframe to be summarized
"""
if type_ == 'hist':
df[col].hist(bins = 20)
plt.show()
elif type_ == 'scatter':
plt.scatter(df[col[0]], df[col[1]])
plt.show()
elif type_ == 'pie':
plt.pie(df[col].value_counts())
plt.show()
else:
print("call the function again with proper parameters.")
def central_tendency(type_, df, col):
"""Calculate the measure of central tendency.
This function accepts the type of central tendency to be calculated, the data frame and the required column.
It returns the calculated measure.
Keyword arguments:
type_ -- type of central tendency to be calculated
df -- the dataframe
col -- the column in the dataframe to do the calculations
Returns:
cent_tend -- the calculated measure of central tendency
"""
if type_ == 'mean':
cent_tend = df[col].mean()
if type_ == 'mode':
cent_tend = df[col].mode()
if type_ == 'median':
cent_tend = df[col].median()
return cent_tend
def measure_of_dispersion(type_, df, col):
"""Calculate the measure of dispersion.
This function accepts the measure of dispersion to be calculated, the data frame and the required column(s).
It returns the calculated measure.
Keyword arguments:
type_ -- type of central tendency to be calculated
df -- the dataframe
col -- the column(s) in the dataframe to do the calculations, this is a list with 2 elements if we want to calculate covariance
Returns:
disp -- the calculated measure of dispersion
"""
if type_ == 'range':
maximum = df[col].max()
minimum = df[col].min()
disp = maximum - minimum
elif type_ == 'MAD':
mean = df[col].mean()
distance = abs(df[col] - mean)
disp = distance.sum()/len(distance)
elif type_ == 'std dev':
mean = df[col].mean()
distance = (df[col] - mean)**2
disp = (distance.sum()/len(distance))**(1/2)
elif type_ == 'CV':
mean = df[col].mean()
std = measure_of_dispersion('std dev', df, col)
disp = (std/mean)*100
elif type_ == 'iqr':
q1 = df[col].quantile(q=0.25)
q3 = df[col].quantile(q=0.75)
disp = q3-q1
elif type_ == 'cov':
new = df[col]
mean_col1 = new[col[0]].mean()
mean_col2 = new[col[1]].mean()
diff_col1 = (new[col[0]] - mean_col1)
diff_col2 = (new[col[1]] - mean_col2)
summation = (diff_col1 * diff_col2).sum()
n = new.shape[0]
disp = summation/n
else:
print("Call the measure_of_dispersion() with proper parameters.")
return disp
def calculate_correlation(type_, df, col1, col2):
"""Calculate the defined correlation coefficient.
This function accepts the type of correlation coefficient to be calculated, the data frame and the two column.
It returns the calculated coefficient.
Keyword arguments:
type_ -- type of correlation coefficient to be calculated
df -- the dataframe
col1 -- first column
col2 -- second column
Returns:
corr -- the calculated correlation coefficient
"""
newdf = df[[col1, col2]]
if type_ == 'Pearson':
# Calulating Pearson correlation coefficient
covariance = newdf.cov().iloc[0,1]
std_col1 = newdf[col1].std()
std_col2 = newdf[col2].std()
corr = covariance/(std_col1 * std_col2)
elif type_ == 'Spearman':
# Calculating Spearman rank correlation coefficient
ranks = newdf.rank(axis=0)
ranks['d^2'] = (ranks[col1] - ranks[col2])**2
d_square = ranks['d^2'].sum()
n = ranks.shape[0]
corr = 1 - ((6*d_square)/(n**3 - n))
else:
print("call the calculate_correlation() with proper parameters.")
return corr
def calculate_probability_discrete(data, event):
"""Calculates the probability of an event from a discrete distribution.
This function accepts the distribution of a variable and the event, and returns the probability of the event.
Keyword arguments:
data -- series that contains the distribution of the discrete variable
event -- the event for which the probability is to be calculated
Returns:
prob -- calculated probability fo the event
"""
total = data.value_counts().sum()
yes = data.value_counts()[event]
prob = yes/total
return prob
def event_independence_check(prob_event1, prob_event2, prob_event1_event2):
"""Checks if two events are independent.
This function accepts the probability of 2 events and their joint probability.
And prints if the events are independent or not.
Keyword arguments:
prob_event1 -- probability of event1
prob_event2 -- probability of event2
prob_event1_event2 -- probability of event1 and event2
"""
if (prob_event1_event2 == prob_event1 * prob_event2):
print("Event 1 and Event 2 are independent.")
else:
print("Event 1 and Event 2 are not independent.")
def bayes_theorem(df, col1, event1, col2, event2):
"""Calculates the conditional probability using Bayes Theorem.
This function accepts the dataframe, two columns along with two conditions to calculate the probability, P(B|A).
You can call the calculate_probability_discrete() to find the basic probabilities and then use them to find the conditional probability.
Keyword arguments:
df -- the dataframe
col1 -- the first column where the first event is recorded
event1 -- event to define the first condition
col2 -- the second column where the second event is recorded
event2 -- event to define the second condition
Returns:
prob -- calculated probability for the event1 given event2 has already occured
"""
p_A = calculate_probability_discrete(df[col1],event1)
p_B = calculate_probability_discrete(df[col2],event2)
p_B_given_A = calculate_probability_discrete(df[df[col1] == event1][col2],event2)
prob = (p_A*p_B_given_A)/p_B
return prob
# Load the dataset
df = pd.read_csv(path)
# Using the visual_summary(), visualize the distribution of the data provided.
# You can also do it at country level or based on years by passing appropriate arguments to the fuction.
print("Distribution of the countries")
visual_summary('pie',df,'country')
print("Distribution of exch_usd")
visual_summary('hist',df,'exch_usd')
for c in list(df['country'].unique()):
print("country: ", c)
visual_summary('scatter', df[df['country'] == c], ['year','inflation_annual_cpi'])
# You might also want to see the central tendency of certain variables. Call the central_tendency() to do the same.
# This can also be done at country level or based on years by passing appropriate arguments to the fuction.
print("Mean of exch_usd:", central_tendency('mean',df,'exch_usd'))
print("Mode of country:", central_tendency('mode',df,'country'))
print("Median of exch_usd", central_tendency('median',df,'exch_usd'))
# Measures of dispersion gives a good insight about the distribution of the variable.
# Call the measure_of_dispersion() with desired parameters and see the summary of different variables.
print("Range of inflation_annual_cpi:", measure_of_dispersion('range', df, 'inflation_annual_cpi'))
print("MAD of gdp_weighted_default:", measure_of_dispersion('MAD', df, 'gdp_weighted_default'))
print("CV of gdp_weighted_default:", measure_of_dispersion('CV', df, 'gdp_weighted_default'))
print("Std Dev of gdp_weighted_default:", measure_of_dispersion('std dev', df, 'gdp_weighted_default'))
print("Cov between 'gdp_weighted_default', 'inflation_annual_cpi':", measure_of_dispersion('cov', df, ['gdp_weighted_default', 'inflation_annual_cpi']))
# There might exists a correlation between different variables.
# Call the calculate_correlation() to check the correlation of the variables you desire.
print("Corr between inflation_annual_cpi and systemic_crisis", calculate_correlation('Pearson',df,'inflation_annual_cpi','systemic_crisis'))
# From the given data, let's check the probability of banking_crisis for different countries.
# Call the calculate_probability_discrete() to check the desired probability.
# Also check which country has the maximum probability of facing the crisis.
# You can do it by storing the probabilities in a dictionary, with country name as the key. Or you are free to use any other technique.
prob_crisis = {}
for c in list(df.country.unique()):
print("Country: ", c)
prob_crisis[c] = round(calculate_probability_discrete(df[df['country'] == c]['banking_crisis'], 'crisis' ), 4)
print("Probability of Crisis: ", prob_crisis[c])
print(prob_crisis)
# Next, let us check if banking_crisis is independent of systemic_crisis, currency_crisis & inflation_crisis.
# Calculate the probabilities of these event using calculate_probability_discrete() & joint probabilities as well.
# Then call event_independence_check() with above probabilities to check for independence.
b_crisis = calculate_probability_discrete(df['banking_crisis'], 'crisis' )
s_crisis = calculate_probability_discrete(df['systemic_crisis'], 1 )
c_crisis = calculate_probability_discrete(df['currency_crises'], 1 )
i_crisis = calculate_probability_discrete(df['inflation_crises'], 1 )
df1 = df[df['currency_crises'] == 1]
# Calculate the P(A|B)
p_i_given_c = df1[df1['inflation_crises'] == 1].shape[0]/df1.shape[0]
p_c_i = p_i_given_c * c_crisis
print(p_c_i, c_crisis * i_crisis)
event_independence_check(c_crisis, i_crisis, p_c_i)
# Finally, let us calculate the probability of banking_crisis given that other crises (systemic_crisis, currency_crisis & inflation_crisis one by one) have already occured.
# This can be done by calling the bayes_theorem() you have defined with respective parameters.
prob_ = []
prob_.append(bayes_theorem(df,'banking_crisis','crisis','systemic_crisis', 1))
prob_.append(bayes_theorem(df,'banking_crisis','crisis','currency_crises', 1))
prob_.append(bayes_theorem(df,'banking_crisis','crisis','inflation_crises', 1))
print("Probability of banking_crisis given that other crisis have already occured:", prob_)
# Code ends
|
[
"pandas.read_csv",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.show"
] |
[((6758, 6775), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (6769, 6775), True, 'import pandas as pd\n'), ((561, 571), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (569, 571), True, 'import matplotlib.pyplot as plt\n'), ((609, 644), 'matplotlib.pyplot.scatter', 'plt.scatter', (['df[col[0]]', 'df[col[1]]'], {}), '(df[col[0]], df[col[1]])\n', (620, 644), True, 'import matplotlib.pyplot as plt\n'), ((653, 663), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (661, 663), True, 'import matplotlib.pyplot as plt\n'), ((737, 747), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (745, 747), True, 'import matplotlib.pyplot as plt\n')]
|
#%% Import
import sys
import re
import math
import string
import time
from pathlib import Path
import numpy as np
import pandas as pd
import string
import pickle
from scipy.sparse import hstack
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics.pairwise import cosine_distances
import spacy
from gensim.models import KeyedVectors
from nltk.tokenize import word_tokenize
#%% Constants
PATH_PROJ = Path(__file__).parent
PATH_DATA = PATH_PROJ / 'lib' / 'data'
PATH_MODELS = PATH_PROJ / 'lib' / 'models'
sys.path.append(str(PATH_PROJ))
# timer
start_time = time.time()
print("Start loading...")
#%% Load
# list of punctuation marks
punctuations = string.punctuation
# Create spacy word2vec and list of stopwords
nlp = spacy.load('en_core_web_sm')
stop_words = spacy.lang.en.stop_words.STOP_WORDS
# load classifier
with open(str(PATH_MODELS/"RFClassifier.pkl"), 'rb') as f:
clf = pickle.load(f)
# load vectorizer
with open(str(PATH_MODELS/'TFIDFVectorizer_lemma.pkl'), 'rb') as f:
v_lemma = pickle.load(f)
with open(str(PATH_MODELS/'TFIDFVectorizer_keyword.pkl'), 'rb') as f:
v_keyword = pickle.load(f)
with open(str(PATH_MODELS/'TFIDFVectorizer_noun.pkl'), 'rb') as f:
v_noun = pickle.load(f)
with open(str(PATH_MODELS/'TFIDFVectorizer_verb.pkl'), 'rb') as f:
v_verb = pickle.load(f)
# load intent list
with open(str(PATH_MODELS/'intent_list.pkl'), 'rb') as f:
intent_list = pickle.load(f)
# load clustering centres
with open(str(PATH_MODELS/'dict_cluster.pkl'), 'rb') as f:
dict_cluster = pickle.load(f)
# load idf
with open(str(PATH_MODELS/'idf.pkl'), 'rb') as f:
idf = pickle.load(f)
# load intent2index
with open(str(PATH_MODELS/'intent2index.pkl'), 'rb') as f:
intent2index = pickle.load(f)
# load keyword_list_lemma
with open(str(PATH_MODELS/'keyword_list_lemma.pkl'), 'rb') as f:
keyword_list_lemma = pickle.load(f)
EMBEDDING_FILE = str(PATH_MODELS / 'GoogleNews-vectors-negative300.bin.gz')
word2vec = KeyedVectors.load_word2vec_format(EMBEDDING_FILE, binary=True)
duration= time.time() - start_time
print(f"all pickles loaded, using {duration} sec")
#%% Pipeline
def get_intent_nlp_clustering(query):
"""
return a dataframe df
columns: pred_seq, intent_class, intent_string, pred_prob
rows: top 3 prediciton, example for first row: 1, 0, Promotions, 0.66
"""
# setup timer
start = time.time()
#%% pipeline
# convert question to dataframe
df = pd.DataFrame()
df = pd.DataFrame(columns=['query'])
df.loc[0] = [query]
# preprocessing test query
df['query'] = df['query'].apply(clean_text)
df['query'] = df['query'].apply(nltk_tokenize)
df['query'] = df['query'].apply(lambda x:' '.join([token.lemma_ for token in nlp(x) if token.lemma_ not in stop_words]))
df['query'] = df['query'].str.lower()
# get nlp features
df = get_nlp_features(df, keyword_list_lemma)
# get clustering matrix
df_cluster = get_distance_matrix_idf(df, intent_list, dict_cluster, word2vec, idf)
# get top 3 clusters
top_3 = get_top_3(df_cluster, intent_list)
# print(top_3)
# get inputs for RF classifier
top_clusters_cols = ['clusters_1', 'clusters_2', 'clusters_3']
# get input vector for RFClassifier
X_in = add_nlp_vec(top_3, v_lemma, v_keyword, v_noun, v_verb, top_clusters_cols)
# get prediction proba
probs = clf.predict_proba(X_in)
# get index for top 3 prediction by proba
ind = np.argsort(probs, axis=1)[:,-3:]
# save probability
proba = probs[0][ind[0]]
# save predicitons as dataframe
best_3 = pd.DataFrame(ind,columns=['top3','top2','top1'])
best_3['top1'] = clf.classes_[best_3['top1']]
best_3['top2'] = clf.classes_[best_3['top2']]
best_3['top3'] = clf.classes_[best_3['top3']]
best_3['top3_prob'] = proba[0]
best_3['top2_prob'] = proba[1]
best_3['top1_prob'] = proba[2]
# get index to intent dictionary from intent2index
index2intent = {y:x for x,y in intent2index.items()}
# get class name of top predictions
best_3['top1_name'] = best_3['top1'].apply(get_target_name, index2intent=index2intent)
best_3['top2_name'] = best_3['top2'].apply(get_target_name, index2intent=index2intent)
best_3['top3_name'] = best_3['top3'].apply(get_target_name, index2intent=index2intent)
# output prediction
top1 = best_3.at[0,'top1_name']
top2 = best_3.at[0,'top2_name']
top3 = best_3.at[0,'top3_name']
top1_prob = best_3.at[0,'top1_prob']
top2_prob = best_3.at[0,'top2_prob']
top3_prob = best_3.at[0,'top3_prob']
# print(f'For sentence:\n{query}\n')
# print(f'Top 1 prediction intent is {top1} with probability {100*top1_prob:.2f}%')
# print(f'Top 2 prediction intent is {top2} with probability {100*top2_prob:.2f}%')
# print(f'Top 3 prediction intent is {top3} with probability {100*top3_prob:.2f}%')
top1_class = best_3.at[0,'top1']
top2_class = best_3.at[0,'top2']
top3_class = best_3.at[0,'top3']
# convert to output
df = pd.DataFrame([
[1, top1_class, top1, top1_prob],
[2, top2_class, top2, top2_prob],
[3, top3_class, top3, top3_prob]
], columns=['pred_seq', 'intent_class', 'intent', 'pred_prob'])
inference_time = time.time() - start
print("inference_time", inference_time)
output = process_output(df)
return output
def process_output(df):
""" Process DataFrame to output format
Return:
internal_model_results = {'intent': [1,2,3]}
"""
result_list = []
# get mapping
df_map = pd.read_csv(str(PATH_DATA/'intent_index.csv'))
dict_map = df_map.set_index('Intent')['Label'].to_dict()
# get list of intents
for i in range(1,4):
intent = df.loc[df['pred_seq'] == i]['intent'].values[0]
intent_label = dict_map[intent]
result_list.append(intent_label)
return result_list
#%% utilities
def get_nlp_features(df, keyword_list_lemma):
""" Get keyword features from dataframe """
data = df.copy()
data['lemma'] = data['query'].apply(lambda x:' '.join([token.lemma_ for token in nlp(x) if token.lemma_ not in stop_words]))
data['keyword'] = data['lemma'].apply(lambda x: list(set([token.lemma_ for token in nlp(x) if token.lemma_ in keyword_list_lemma])))
data['noun'] = data['query'].apply(lambda x: list(set([token.lemma_ for token in nlp(x) if token.pos_ in ['NOUN','PROPN'] and token.lemma_ not in stop_words])))
data['verb'] = data['query'].apply(lambda x: list(set([token.lemma_ for token in nlp(x) if token.pos_ in ['VERB'] and token.lemma_ not in stop_words])))
data['noun'] = data['noun'].apply(lambda x: ' '.join([w for w in x]))
data['verb'] = data['verb'].apply(lambda x: ' '.join([w for w in x]))
data['keyword'] = data['keyword'].apply(lambda x: ' '.join([w for w in x]))
return data
def get_distance_matrix_idf(df_test, intent_list, dict_cluster, word2vec, idf):
""" Get distance for each query to every intent center
Args:
df_test (pd.DataFrame): input test dataframe with intent and query
intent_list (list): list of intents to loop through
dict_cluster (dict): dictionary of cluster centres
word2vec (dict): word embeddings dictionary
idf (dict): idf of each words
Returns:
result (pd.DataFrame): distance matrix for each query, lowest distance intent idealy should match label
"""
df = df_test.copy()
for intent in intent_list:
# distance = cosine_similarity(sentence embedding, intent cluster centre embedding)
df[intent] = df['query'].apply(lambda x: cosine_distances(get_sentence_vec(x, word2vec, idf).reshape(1,-1),
dict_cluster[intent].reshape(1,-1)).item())
return df
def get_sentence_vec(sentence, word2vec, idf=None):
""" Get embedding of sentence by using word2vec embedding of words
If idf is provided, the sentence is the weighted embedding by
SUM( embedding[word] x idf[word] )
Args:
sentence (str): input sentence
word2vec (dict): loaded word2vec model from Gensim
idf (dict, optional): inverse document frequency of words in all queries
Returns:
emb (np.array): 300-dimentions embedding of sentence
"""
words = sentence.split()
words = [word for word in words if word in word2vec.vocab]
# if no word in word2vec vocab, return 0x300 embedding
if len(words)==0:
return np.zeros((300,), dtype='float32')
# use mean if no idf provided
if idf is None:
emb = word2vec[words].mean(axis=0)
else:
# get all idf of words, if new word is not in idf, assign 0.0 weights
idf_series = np.array([idf.get(word, 0.0) for word in words])
# change shape to 1 x num_of_words
idf_series = idf_series.reshape(1, -1)
# use matrix multiplication to get weighted word vector sum for sentence embeddings
emb = np.matmul(idf_series, word2vec[words]).reshape(-1)
return emb
def clean_text(text):
""" Basic text cleaning
1. lowercase
2. remove special characters
"""
text = text.lower()
text = re.sub(r'[^a-z0-9\s]', '', text)
return text
def nltk_tokenize(text):
""" tokenize text using NLTK and join back as sentence"""
# import nltk
# nltk.download('punkt')
return ' '.join(word_tokenize(text))
def get_top_3(data, intent_list):
data = data.copy()
cluster_cols = intent_list.copy()
data['clusters_top3'] = data.apply(lambda x: np.argsort(x[cluster_cols].values)[:3].tolist(), axis=1)
top_clusters_cols = pd.DataFrame(data['clusters_top3'].values.tolist(),columns = ['clusters_1','clusters_2','clusters_3']).reset_index(drop=True)
data = data.reset_index(drop=True)
data = pd.concat([data,top_clusters_cols], axis=1)
data.drop(columns = 'clusters_top3', inplace=True)
data.drop(columns = cluster_cols, inplace=True)
# print(data.head())
return data
def add_nlp_vec(df, v_lemma, v_keyword, v_noun, v_verb, top_clusters_cols):
""" Transform NLP features to vector for input X using TFIDF """
x_test_lemma = v_lemma.transform(df['lemma'])
x_test_keyword = v_keyword.transform(df['keyword'])
x_test_noun = v_noun.transform(df['noun'])
x_test_verb = v_verb.transform(df['verb'])
# combine all features
x_test_combined = hstack((x_test_lemma,
x_test_keyword,
x_test_noun,
x_test_verb,
df[top_clusters_cols].values),format='csr')
x_test_combined_columns = v_lemma.get_feature_names()+\
v_keyword.get_feature_names()+\
v_noun.get_feature_names()+\
v_verb.get_feature_names()+\
top_clusters_cols
x_test_combined = pd.DataFrame(x_test_combined.toarray())
x_test_combined.columns = x_test_combined_columns
return x_test_combined
def get_target_name(index, index2intent):
return index2intent[index]
|
[
"pandas.DataFrame",
"numpy.zeros",
"time.time",
"numpy.argsort",
"spacy.load",
"pathlib.Path",
"pickle.load",
"scipy.sparse.hstack",
"gensim.models.KeyedVectors.load_word2vec_format",
"numpy.matmul",
"re.sub",
"pandas.concat",
"nltk.tokenize.word_tokenize"
] |
[((644, 655), 'time.time', 'time.time', ([], {}), '()\n', (653, 655), False, 'import time\n'), ((806, 834), 'spacy.load', 'spacy.load', (['"""en_core_web_sm"""'], {}), "('en_core_web_sm')\n", (816, 834), False, 'import spacy\n'), ((2050, 2112), 'gensim.models.KeyedVectors.load_word2vec_format', 'KeyedVectors.load_word2vec_format', (['EMBEDDING_FILE'], {'binary': '(True)'}), '(EMBEDDING_FILE, binary=True)\n', (2083, 2112), False, 'from gensim.models import KeyedVectors\n'), ((486, 500), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (490, 500), False, 'from pathlib import Path\n'), ((972, 986), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (983, 986), False, 'import pickle\n'), ((1088, 1102), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1099, 1102), False, 'import pickle\n'), ((1189, 1203), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1200, 1203), False, 'import pickle\n'), ((1284, 1298), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1295, 1298), False, 'import pickle\n'), ((1379, 1393), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1390, 1393), False, 'import pickle\n'), ((1490, 1504), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1501, 1504), False, 'import pickle\n'), ((1610, 1624), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1621, 1624), False, 'import pickle\n'), ((1697, 1711), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1708, 1711), False, 'import pickle\n'), ((1811, 1825), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1822, 1825), False, 'import pickle\n'), ((1943, 1957), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1954, 1957), False, 'import pickle\n'), ((2124, 2135), 'time.time', 'time.time', ([], {}), '()\n', (2133, 2135), False, 'import time\n'), ((2472, 2483), 'time.time', 'time.time', ([], {}), '()\n', (2481, 2483), False, 'import time\n'), ((2547, 2561), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (2559, 2561), True, 'import pandas as pd\n'), ((2571, 2602), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['query']"}), "(columns=['query'])\n", (2583, 2602), True, 'import pandas as pd\n'), ((3696, 3747), 'pandas.DataFrame', 'pd.DataFrame', (['ind'], {'columns': "['top3', 'top2', 'top1']"}), "(ind, columns=['top3', 'top2', 'top1'])\n", (3708, 3747), True, 'import pandas as pd\n'), ((5135, 5320), 'pandas.DataFrame', 'pd.DataFrame', (['[[1, top1_class, top1, top1_prob], [2, top2_class, top2, top2_prob], [3,\n top3_class, top3, top3_prob]]'], {'columns': "['pred_seq', 'intent_class', 'intent', 'pred_prob']"}), "([[1, top1_class, top1, top1_prob], [2, top2_class, top2,\n top2_prob], [3, top3_class, top3, top3_prob]], columns=['pred_seq',\n 'intent_class', 'intent', 'pred_prob'])\n", (5147, 5320), True, 'import pandas as pd\n'), ((9397, 9429), 're.sub', 're.sub', (['"""[^a-z0-9\\\\s]"""', '""""""', 'text'], {}), "('[^a-z0-9\\\\s]', '', text)\n", (9403, 9429), False, 'import re\n'), ((10026, 10070), 'pandas.concat', 'pd.concat', (['[data, top_clusters_cols]'], {'axis': '(1)'}), '([data, top_clusters_cols], axis=1)\n', (10035, 10070), True, 'import pandas as pd\n'), ((10625, 10738), 'scipy.sparse.hstack', 'hstack', (['(x_test_lemma, x_test_keyword, x_test_noun, x_test_verb, df[\n top_clusters_cols].values)'], {'format': '"""csr"""'}), "((x_test_lemma, x_test_keyword, x_test_noun, x_test_verb, df[\n top_clusters_cols].values), format='csr')\n", (10631, 10738), False, 'from scipy.sparse import hstack\n'), ((3560, 3585), 'numpy.argsort', 'np.argsort', (['probs'], {'axis': '(1)'}), '(probs, axis=1)\n', (3570, 3585), True, 'import numpy as np\n'), ((5381, 5392), 'time.time', 'time.time', ([], {}), '()\n', (5390, 5392), False, 'import time\n'), ((8680, 8713), 'numpy.zeros', 'np.zeros', (['(300,)'], {'dtype': '"""float32"""'}), "((300,), dtype='float32')\n", (8688, 8713), True, 'import numpy as np\n'), ((9601, 9620), 'nltk.tokenize.word_tokenize', 'word_tokenize', (['text'], {}), '(text)\n', (9614, 9620), False, 'from nltk.tokenize import word_tokenize\n'), ((9170, 9208), 'numpy.matmul', 'np.matmul', (['idf_series', 'word2vec[words]'], {}), '(idf_series, word2vec[words])\n', (9179, 9208), True, 'import numpy as np\n'), ((9768, 9802), 'numpy.argsort', 'np.argsort', (['x[cluster_cols].values'], {}), '(x[cluster_cols].values)\n', (9778, 9802), True, 'import numpy as np\n')]
|
from django.db import models
from django.urls import reverse, NoReverseMatch
from django_nlf.conf import nlf_settings
from django_nlf.functions import FunctionRegistry
from django_nlf.types import FieldFilterSchema, ModelFilterSchema
class NLFModelSchemaBuilder:
__cache = {}
field_shortcuts = nlf_settings.FIELD_SHORTCUTS
path_sep = nlf_settings.PATH_SEPARATOR
empty_val = nlf_settings.EMPTY_VALUE
autocomplete_views = nlf_settings.SEARCH_URL_MAP
autocomplete_param = nlf_settings.SEARCH_PARAM
ignored_model_labels = (
"auth.Permission",
"contenttypes.ContentType",
)
def get_schema_for(self, model: "django.db.models.Model"):
"""Returns a schema for the given model.
:param "django.db.models.Model" model: .
:return: .
:rtype: ModelFilterSchema
"""
label = model._meta.label # pylint: disable=protected-access
if label not in self.__cache:
NLFModelSchemaBuilder.__cache[label] = self._build_schema_for(model)
return self.__cache[label]
def _build_schema_for(self, model: "django.db.models.Model") -> ModelFilterSchema:
fields = self._get_fields(model._meta) # pylint: disable=protected-access
functions = FunctionRegistry.get_functions_for(model)
return ModelFilterSchema(
fields=[self._build_field_schema_for(field, path) for path, field in fields],
functions={key.name: value for key, value in functions.items()},
empty_val=self.empty_val,
)
def _get_fields(
self, opts: "django.db.models.options.Options", explored_rels=None, path: str = ""
):
fields = [
(f"{path}{self.path_sep}{field.name}" if path else field.name, field)
for field in opts.get_fields()
]
related_fields = []
explored_rels = explored_rels or tuple()
for new_path, field in fields:
if not field.is_relation:
continue
related_opts = field.related_model._meta # pylint: disable=protected-access
if (
field.target_field not in explored_rels
and related_opts.label not in self.ignored_model_labels
):
explored_rels = explored_rels + (field.target_field,)
# if we would explore field directions, we would immediately run into an infinite
# circle when we encounter a relationship with a reverse reletaionship, which is the
# default behaviour. Therefore we skip those paths that we already examined
related_fields.extend(self._get_fields(related_opts, explored_rels, new_path))
return fields + related_fields
def _build_field_schema_for(self, field, path: str) -> FieldFilterSchema:
schema = FieldFilterSchema(
path=path,
type=self._get_field_schema_type(field),
help=str(getattr(field, "help_text", "")), # convert to str to evaluate lazy text
nullable=field.null,
)
if field.is_relation:
related_model_label = (
field.related_model._meta.label # pylint: disable=protected-access
)
url_name = self.autocomplete_views.get(
related_model_label, f"{field.related_model.__name__.lower()}-list"
)
try:
schema.search_url = reverse(url_name)
schema.search_param = self.autocomplete_param
schema.target_field = field.target_field.name
except NoReverseMatch:
pass
elif field.flatchoices: # flatchoices does not exist on relations
schema.choices = [display for _, display in field.flatchoices]
return schema
def _get_field_schema_type(self, field) -> str:
if field.is_relation:
return self._get_field_schema_type(field.target_field)
if isinstance(field, models.BooleanField):
return "boolean"
if isinstance(field, models.IntegerField):
return "integer"
if isinstance(field, (models.DecimalField, models.FloatField)):
return "number"
return "string"
|
[
"django.urls.reverse",
"django_nlf.functions.FunctionRegistry.get_functions_for"
] |
[((1299, 1340), 'django_nlf.functions.FunctionRegistry.get_functions_for', 'FunctionRegistry.get_functions_for', (['model'], {}), '(model)\n', (1333, 1340), False, 'from django_nlf.functions import FunctionRegistry\n'), ((3537, 3554), 'django.urls.reverse', 'reverse', (['url_name'], {}), '(url_name)\n', (3544, 3554), False, 'from django.urls import reverse, NoReverseMatch\n')]
|
# -*- coding: utf-8 -*-
# =============================================================================
# 2mmn40 week 3 report
# version 2017-12-03 afternoon
# BA
#
#
# for BA: Make sure to run in directory
# C:\Users\20165263\Dropbox\tue\2mmn40\src
#
# =============================================================================
import numpy as np
import matplotlib.pyplot as plt
# Objective: simulate a diatomic bond. So we're just integrating f=ma over t.
# To integrate f=ma, we need f, m, v0 and q0.
# f is obtained from potentials. f = -grad u
# m, v0, x0 are all given.
# Data structure required: molecule geometry. So, a list of lists of molecules.
# Each molecule needs to have a mass, an x0, a v0, and explicitly
### part 1: diatomic molecule
#molecule parameters
bondList = [[1],[0]]
kbond = 1.0
rbond = 1.0
m = np.array([1.0, 1.0])
#simulation parameters: choice of integrator
# 0 - forward euler
# 1 - verlet
# 2 - velocity verlet
integrator = 0
maxsteps = 1000
# take a small enough timestep
dt = min(np.sqrt( kbond/m )) /100
#initial values
q0 = np.array([[0.0, 0.1, -0.1],
[1.01, 0.9, 0.95]])
v0 = np.array([[0.0, 0.0, 0.0],
[0.0, 0.0, 0.0]])
#initialize system state
q = q0.copy()
v = v0.copy()
#find distance: r and dr
dr = q - q[:, np.newaxis]
r = np.linalg.norm(dr, axis=2)
# find bond forces
# will throw a RuntimeWarning due to the dividing by zero along diagonal elements of r.
# However, nan_to_num converts those nan's to zero in the result, so ignore the warning.
# A particle cannot exert a force on itself, so that makes sense
fbond = np.nan_to_num( -kbond * dr * (rbond - r[:,:,np.newaxis]) / r[:,:,np.newaxis])
ftotal = np.sum(fbond,axis=1)
#integrate a single step:
if integrator == 0:
q += dt*v + dt**2 /(2*m[:,np.newaxis]) *ftotal
v += dt/m[:,np.newaxis] *ftotal
elif integrator == 1:
#Verlet integration step
q += 0
elif integrator == 2:
#Velocity Verlect integration step
q += 0
else:
raise ('Unkown integrator selected')
|
[
"numpy.sum",
"numpy.nan_to_num",
"numpy.linalg.norm",
"numpy.array",
"numpy.sqrt"
] |
[((836, 856), 'numpy.array', 'np.array', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (844, 856), True, 'import numpy as np\n'), ((1078, 1125), 'numpy.array', 'np.array', (['[[0.0, 0.1, -0.1], [1.01, 0.9, 0.95]]'], {}), '([[0.0, 0.1, -0.1], [1.01, 0.9, 0.95]])\n', (1086, 1125), True, 'import numpy as np\n'), ((1147, 1191), 'numpy.array', 'np.array', (['[[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]'], {}), '([[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]])\n', (1155, 1191), True, 'import numpy as np\n'), ((1318, 1344), 'numpy.linalg.norm', 'np.linalg.norm', (['dr'], {'axis': '(2)'}), '(dr, axis=2)\n', (1332, 1344), True, 'import numpy as np\n'), ((1615, 1700), 'numpy.nan_to_num', 'np.nan_to_num', (['(-kbond * dr * (rbond - r[:, :, np.newaxis]) / r[:, :, np.newaxis])'], {}), '(-kbond * dr * (rbond - r[:, :, np.newaxis]) / r[:, :, np.newaxis]\n )\n', (1628, 1700), True, 'import numpy as np\n'), ((1702, 1723), 'numpy.sum', 'np.sum', (['fbond'], {'axis': '(1)'}), '(fbond, axis=1)\n', (1708, 1723), True, 'import numpy as np\n'), ((1031, 1049), 'numpy.sqrt', 'np.sqrt', (['(kbond / m)'], {}), '(kbond / m)\n', (1038, 1049), True, 'import numpy as np\n')]
|
from copy import deepcopy
from ldaptor._encoder import get_strings
class LDAPAttributeSet(set):
def __init__(self, key, *a, **kw):
"""
Represents all the values for an attribute in an LDAP entry. An entry
might have "cn" or "objectClass" or "uid" attributes, and this class
represents each of those.
You can find the name of the LDAP entry attribute (eg. "uid") with the
``.key`` member variable.
You can find the values of the LDAP attribute by casting this to a
``list``.
@param key: the key of the attribute, eg "uid".
@type key: str
@param args: set of values for this attribute, eg. "jsmith"
"""
self.key = key
super(LDAPAttributeSet, self).__init__(*a, **kw)
def __repr__(self):
values = list(self)
values.sort()
attributes = ', '.join([repr(x) for x in values])
return '%s(%r, [%s])' % (
self.__class__.__name__,
self.key,
attributes)
def __eq__(self, other):
"""
Note that LDAPAttributeSets can also be compared against any
iterator. In that case the attributeType will be ignored.
"""
if isinstance(other, LDAPAttributeSet):
if self.key != other.key:
return False
return super(LDAPAttributeSet, self).__eq__(other)
else:
me = list(self)
me.sort()
him = list(other)
him.sort()
return me == him
def __ne__(self, other):
return not self == other
def add(self, key):
"""
Adding key to the attributes with checking
if it exists as byte or unicode string
"""
for k in get_strings(key):
if k in self:
return
set.add(self, key)
def remove(self, key):
"""
Removing key from the attributes with checking
if it exists as byte or unicode string
"""
for k in get_strings(key):
if k in self:
set.remove(self, k)
return
raise KeyError(key)
def copy(self):
result = self.__class__(self.key)
result.update(self)
return result
__copy__ = copy
def __deepcopy__(self, memo):
result = self.__class__(self.key)
memo[id(self)] = result
data = deepcopy(set(self), memo)
result.update(data)
return result
|
[
"ldaptor._encoder.get_strings"
] |
[((1776, 1792), 'ldaptor._encoder.get_strings', 'get_strings', (['key'], {}), '(key)\n', (1787, 1792), False, 'from ldaptor._encoder import get_strings\n'), ((2042, 2058), 'ldaptor._encoder.get_strings', 'get_strings', (['key'], {}), '(key)\n', (2053, 2058), False, 'from ldaptor._encoder import get_strings\n')]
|
"""
Heuristic agents for various OpenAI Gym environments. The agent policies, in
this case, are deterministic functions, and often handcrafted or found by
non-gradient optimization algorithms, such as evolutionary strategies.
Many of the heuristic policies were adapted from the following source:
```
@book{xiao2022,
title = {Reinforcement Learning: Theory and {Python} Implementation},
author = {<NAME>}
publisher = {Springer Nature},
}
```
"""
from typing import Dict
import numpy as np
import torch
from ilpyt.agents.base_agent import BaseAgent
class LunarLanderContinuousHeuristicAgent(BaseAgent):
"""
Heuristic policy for the OpenAI Gym LunarLanderContinuous-v2 environment.
Adapted from the OpenAI Gym repository:
https://github.com/openai/gym/blob/master/gym/envs/box2d/lunar_lander.py
"""
def initialize(self) -> None:
"""
Pass. Heuristic agents do not require any initialization."
"""
pass
def step(self, state: torch.Tensor) -> np.ndarray:
"""
Find best action for the given state.
Parameters
----------
state: torch.Tensor
state tensor, of size (batch_size, 8) with attributes
[horizontal coordinate, vertical coordinate, horizontal speed,
vertical speed, angle, angular speed, first leg contact,
second leg contact]
Returns
-------
np.ndarray:
selected actions, of size (batch_size, 2)
"""
batch_size = len(state)
angle_targ = (
state[:, 0] * 0.5 + state[:, 2] * 1.0
) # angle point towards center
angle_targ = torch.clip(angle_targ, -0.4, 0.4)
hover_targ = 0.55 * torch.abs(state[:, 0]) # target y proportional to
# horizontal offset
angle = (angle_targ - state[:, 4]) * 0.5 - (state[:, 5]) * 1.0
hover = (hover_targ - state[:, 1]) * 0.5 - (state[:, 3]) * 0.5
for i in range(batch_size):
if state[i, 6] or state[i, 7]: # legs have contact
angle[i] = 0
hover[i] = -(state[i, 3]) * 0.5 # override to reduce fall speed
a = torch.stack([hover * 20 - 1, -angle * 20], dim=-1)
a = torch.clamp(a, -1, +1)
return a.cpu().numpy()
def update(self, batch: Dict[str, torch.Tensor]) -> Dict[str, float]:
"""
Pass. Heuristic agents do not update their agent policies.
"""
return {}
class LunarLanderHeuristicAgent(BaseAgent):
"""
Heuristic policy for the OpenAI Gym LunarLander-v2 environment.
Adapted from the book 'Reinforcement Learning: Theory and Python Implementation':
https://github.com/openai/gym/blob/master/gym/envs/box2d/lunar_lander.py
"""
def initialize(self):
"""
Pass. Heuristic agents do not require any initialization."
"""
pass
def step(self, state: torch.Tensor):
"""
Find best action for the given state.
Parameters
----------
state (torch.Tensor):
state tensor, of size (batch_size, 8) with attributes
[horizontal coordinate, vertical coordinate, horizontal speed,
vertical speed, angle, angular speed, first leg contact,
second leg contact]
Returns
-------
np.ndarray:
selected actions, of size (batch_size, action_shape)
"""
batch_size = len(state)
angle_targ = (
state[:, 0] * 0.5 + state[:, 2] * 1.0
) # angle point towards center
angle_targ = torch.clip(angle_targ, -0.4, 0.4)
hover_targ = 0.55 * torch.abs(state[:, 0]) # target y proportional to
# horizontal offset
angle = (angle_targ - state[:, 4]) * 0.5 - (state[:, 5]) * 1.0
hover = (hover_targ - state[:, 1]) * 0.5 - (state[:, 3]) * 0.5
for i in range(batch_size):
if state[i, 6] or state[i, 7]: # legs have contact
angle[i] = 0
hover[i] = -(state[i, 3]) * 0.5 # override to reduce fall speed
a = np.zeros(batch_size, dtype=np.uint8)
for i in range(batch_size):
if hover[i] > torch.abs(angle[i]) and hover[i] > 0.05:
a[i] = 2
elif angle[i] < -0.05:
a[i] = 3
elif angle[i] > +0.05:
a[i] = 1
return a
def update(self, batch: Dict[str, torch.Tensor]) -> Dict[str, float]:
"""
Pass. Heuristic agents do not update their agent policies.
"""
return {}
class CartPoleHeuristicAgent(BaseAgent):
"""
Heuristic agent for the OpenAI Gym CartPole-v0 environment.
Adapted from the book 'Reinforcement Learning: Theory and Python Implementation':
https://github.com/ZhiqingXiao/OpenAIGymSolution
"""
def initialize(self):
"""
Pass. Heuristic agents do not require any initialization."
"""
pass
def step(self, state: torch.Tensor) -> np.ndarray:
"""
Find best action for the given state. The overall policy followed by the
CartPole agent: push right when 3*angle + angle_velocity > 0.
Parameters
----------
state: torch.Tensor
state tensor of size (batch_size, 4) with attributes
[cart position, cart velocity, pole angle, pole velocity at tip]
Returns
-------
np.ndarray:
action, of shape (batch_size, ) where 0= push cart to left, 1 = push cart to right
"""
angle, angle_velocity = state[:, 2], state[:, 3]
a = (3 * angle + angle_velocity) > 0
return a.cpu().long().numpy()
def update(self, batch: Dict[str, torch.Tensor]) -> Dict[str, float]:
"""
Pass. Heuristic agents do not update their agent policies.
"""
return {}
class MountainCarHeuristicAgent(BaseAgent):
"""
Fixed deterministic policy for the OpenAI gym MountainCar-v0 environment.
Adapted from the book 'Reinforcement Learning: Theory and Python Implementation':
https://github.com/ZhiqingXiao/OpenAIGymSolution
"""
def initialize(self):
"""
Pass. Heuristic agents do not require any initialization."
"""
pass
def step(self, state: torch.Tensor) -> np.ndarray:
"""
Find best action for the given state. Push right when satisfying a
certain condition; otherwise push left.
Parameters
----------
state: torch.Tensor
state tensor of size (batch_size, 2) with attributes
[position, velocity]
Returns
-------
np.ndarray:
discrete action of shape (batch_size, ) where
0 = push left, 1 = no push, 2 = push right
"""
actions = []
positions, velocities = state[:, 0], state[:, 1]
for (position, velocity) in zip(positions, velocities):
lb = min(
-0.09 * (position + 0.25) ** 2 + 0.03,
0.3 * (position + 0.9) ** 4 - 0.008,
)
ub = -0.07 * (position + 0.38) ** 2 + 0.07
if lb < velocity < ub:
action = 2 # push right
else:
action = 0 # push left
actions.append(action)
return actions
def update(self, batch: Dict[str, torch.Tensor]) -> Dict[str, float]:
"""
Pass. Heuristic agents do not update their agent policies.
"""
return {}
class MountainCarContinuousHeuristicAgent(BaseAgent):
"""
Heuristic agent for the OpenAI Gym MountainCarContinuous-v0 environment.
Adapted from the book 'Reinforcement Learning: Theory and Python Implementation':
https://github.com/ZhiqingXiao/OpenAIGymSolution
"""
def initialize(self):
"""
Pass. Heuristic agents do not require any initialization."
"""
pass
def step(self, state: torch.Tensor) -> np.ndarray:
"""
Find best action for the given state. Push right when satisfying a
certain condition; otherwise push left.
Parameters
----------
state: torch.Tensor
state tensor of size (batch_size, 2) with attributes
[position, velocity]
Returns
-------
np.ndarray:
continuous action of shape (batch_size, ) - pushing the car to the
left or to the right
"""
positions, velocities = state[:, 0], state[:, 1]
actions = []
for (position, velocity) in zip(positions, velocities):
if position > -4 * velocity or position < 13 * velocity - 0.6:
force = 1.0
else:
force = -1.0
actions.append(
[
force,
]
)
return actions
def update(self, batch: Dict[str, torch.Tensor]) -> Dict[str, float]:
"""
Pass. Heuristic agents do not update their agent policies.
"""
return {}
|
[
"torch.stack",
"numpy.zeros",
"torch.clip",
"torch.clamp",
"torch.abs"
] |
[((1686, 1719), 'torch.clip', 'torch.clip', (['angle_targ', '(-0.4)', '(0.4)'], {}), '(angle_targ, -0.4, 0.4)\n', (1696, 1719), False, 'import torch\n'), ((2194, 2244), 'torch.stack', 'torch.stack', (['[hover * 20 - 1, -angle * 20]'], {'dim': '(-1)'}), '([hover * 20 - 1, -angle * 20], dim=-1)\n', (2205, 2244), False, 'import torch\n'), ((2257, 2279), 'torch.clamp', 'torch.clamp', (['a', '(-1)', '(+1)'], {}), '(a, -1, +1)\n', (2268, 2279), False, 'import torch\n'), ((3630, 3663), 'torch.clip', 'torch.clip', (['angle_targ', '(-0.4)', '(0.4)'], {}), '(angle_targ, -0.4, 0.4)\n', (3640, 3663), False, 'import torch\n'), ((4138, 4174), 'numpy.zeros', 'np.zeros', (['batch_size'], {'dtype': 'np.uint8'}), '(batch_size, dtype=np.uint8)\n', (4146, 4174), True, 'import numpy as np\n'), ((1748, 1770), 'torch.abs', 'torch.abs', (['state[:, 0]'], {}), '(state[:, 0])\n', (1757, 1770), False, 'import torch\n'), ((3692, 3714), 'torch.abs', 'torch.abs', (['state[:, 0]'], {}), '(state[:, 0])\n', (3701, 3714), False, 'import torch\n'), ((4237, 4256), 'torch.abs', 'torch.abs', (['angle[i]'], {}), '(angle[i])\n', (4246, 4256), False, 'import torch\n')]
|
from django.template.exceptions import TemplateDoesNotExist
from django.template.base import Template, Context
from django.template.engine import Engine
from django.template.loader import render_to_string
from django.core.mail import EmailMultiAlternatives
from django.conf import settings
import re
from inlinestyler.utils import inline_css
import commonmark
import commonmark_extensions.plaintext
# Remove cssutils's warnings, of which there are many.
import cssutils
import logging
cssutils.log.setLevel(logging.ERROR)
def send_mail(template_prefix, from_email, recipient_list, template_context, fail_silently=False, **kwargs):
# Sends a templated HTML email.
#
# Unrecognized arguments are passed on to Django's EmailMultiAlternatives's init method.
# add default template context variables from settings.DEFAULT_TEMPLATE_CONTEXT
template_context = build_template_context(template_context)
# subject
subject = render_to_string(template_prefix + '_subject.txt', template_context)
subject = re.sub(r"\s*[\n\r]+\s*", " ", subject).strip() # remove superfluous internal white space around line breaks and leading/trailing spaces
# Add subject as a new context variable, and it is used in the base HTML template's title tag.
template_context['subject'] = subject
# body
# see if a Markdown template is present
try:
# Use the template engine's loaders to find the template, but then just
# ask for its source so we have the raw Markdown.
md_template = Engine.get_default().get_template(template_prefix + '.md').source
except TemplateDoesNotExist:
md_template = None
if md_template:
# render the text and HTML parts from the Markdown template
text_body, html_body = render_from_markdown(md_template, template_context)
else:
# render from separate text and html templates
text_body = render_to_string(template_prefix + '.txt', template_context)
html_body = render_to_string(template_prefix + '.html', template_context)
# inline HTML styles because some mail clients dont process the <style> tag
html_body = inline_css(html_body)
# construct MIME message
msg = EmailMultiAlternatives(
subject=subject,
body=text_body,
from_email=from_email,
to=recipient_list,
**kwargs
)
msg.attach_alternative(html_body, "text/html")
# send!
msg.send(fail_silently=fail_silently)
def build_template_context(user_variables):
template_context = { }
# Add in default context variables from the settings module, if such a setting exists.
try:
template_context.update(settings.DEFAULT_TEMPLATE_CONTEXT)
except AttributeError:
pass
# Add in the user-provided context variables.
if user_variables:
template_context.update(user_variables)
return template_context
def render_from_markdown(template, template_context):
# Render the Markdown first. Markdown has different text escaping rules
# (backslash-escaping of certain symbols only), and we can't add that
# logic to Django's template auto-escaping. So we render the Markdown
# first, which gives HTML. That HTML can be treated as a regular Django
# template (with regular HTML autoescaping).
#
# (If we did it in the other order, we'd have to disable Django's
# HTML autoescaping and then have some other method to prevent the
# use of variables in the template from generating Markdown tags.)
#
# Do this within each {% block %}...{% endblock %} tag, since we
# don't want to create HTML <p>s around content that doesn't occur
# within a block. Assumes there are no nested blocks.
#
# We turn off CommonMark's safe mode, however, since we trust the
# template. (Safe mode prohibits HTML inlines and also prevents some
# unsafe URLs, but that's up to the caller.)
# CommonMark replaces non-URL-safe characters in link URLs with
# their %-escaped code. Monkey-patch the CommonMark library to
# not do that for { and } so that template variables within links
# remain a template variable and don't turn into %7B%7Bvarname%7D%7D.
# Do this prior to parsing.
from commonmark import common, inlines
def fixed_normalize_uri(uri):
return common.normalize_uri(uri).replace("%7B", "{").replace("%7D", "}")
inlines.normalize_uri = fixed_normalize_uri
# Build the HTML and text templates.
def run_renderer(renderer, ext, wrap=lambda x : x):
r = template
# fix the {% extends "..." %} file extension.
r = re.sub(
r"^(\s*\{%\s*extends\s+\"[^\"]*)(\"\s*%\})",
lambda m : m.group(1) + "." + ext + m.group(2),
r)
# Run CommonMark on each block separately.
r = re.sub(
r"(\{%\s*block [^%]+\s*%\})\s*([\s\S]*?)\s*(\{%\s*endblock\s*%\})",
lambda m : m.group(1)
+ wrap(renderer.render(commonmark.Parser().parse(m.group(2))))
+ m.group(3),
r
)
return r
# Render to HTML, put the extends tag back with an .html extension.
html_body = run_renderer(commonmark.HtmlRenderer({ "safe": False }), 'html')
# For the text portion, we'll render using a special renderer, and we'll
# wrap each block in the Django template directive to turn off auto-escaping.
text_body = run_renderer(commonmark_extensions.plaintext.PlainTextRenderer(), 'txt',
wrap = lambda block : "{% autoescape off %}" + block + "{% endautoescape %}")
# Now render as Django templates.
html_body = Template(html_body).render(Context(template_context)).strip()
text_body = Template(text_body).render(Context(template_context)).strip()
return text_body, html_body
|
[
"django.template.engine.Engine.get_default",
"commonmark.common.normalize_uri",
"commonmark.HtmlRenderer",
"cssutils.log.setLevel",
"django.template.loader.render_to_string",
"commonmark.Parser",
"inlinestyler.utils.inline_css",
"django.core.mail.EmailMultiAlternatives",
"django.template.base.Context",
"re.sub",
"django.template.base.Template"
] |
[((488, 524), 'cssutils.log.setLevel', 'cssutils.log.setLevel', (['logging.ERROR'], {}), '(logging.ERROR)\n', (509, 524), False, 'import cssutils\n'), ((948, 1016), 'django.template.loader.render_to_string', 'render_to_string', (["(template_prefix + '_subject.txt')", 'template_context'], {}), "(template_prefix + '_subject.txt', template_context)\n", (964, 1016), False, 'from django.template.loader import render_to_string\n'), ((2159, 2180), 'inlinestyler.utils.inline_css', 'inline_css', (['html_body'], {}), '(html_body)\n', (2169, 2180), False, 'from inlinestyler.utils import inline_css\n'), ((2221, 2333), 'django.core.mail.EmailMultiAlternatives', 'EmailMultiAlternatives', ([], {'subject': 'subject', 'body': 'text_body', 'from_email': 'from_email', 'to': 'recipient_list'}), '(subject=subject, body=text_body, from_email=\n from_email, to=recipient_list, **kwargs)\n', (2243, 2333), False, 'from django.core.mail import EmailMultiAlternatives\n'), ((1919, 1979), 'django.template.loader.render_to_string', 'render_to_string', (["(template_prefix + '.txt')", 'template_context'], {}), "(template_prefix + '.txt', template_context)\n", (1935, 1979), False, 'from django.template.loader import render_to_string\n'), ((2000, 2061), 'django.template.loader.render_to_string', 'render_to_string', (["(template_prefix + '.html')", 'template_context'], {}), "(template_prefix + '.html', template_context)\n", (2016, 2061), False, 'from django.template.loader import render_to_string\n'), ((5240, 5280), 'commonmark.HtmlRenderer', 'commonmark.HtmlRenderer', (["{'safe': False}"], {}), "({'safe': False})\n", (5263, 5280), False, 'import commonmark\n'), ((1031, 1072), 're.sub', 're.sub', (['"""\\\\s*[\\\\n\\\\r]+\\\\s*"""', '""" """', 'subject'], {}), "('\\\\s*[\\\\n\\\\r]+\\\\s*', ' ', subject)\n", (1037, 1072), False, 'import re\n'), ((5709, 5734), 'django.template.base.Context', 'Context', (['template_context'], {}), '(template_context)\n', (5716, 5734), False, 'from django.template.base import Template, Context\n'), ((5787, 5812), 'django.template.base.Context', 'Context', (['template_context'], {}), '(template_context)\n', (5794, 5812), False, 'from django.template.base import Template, Context\n'), ((1536, 1556), 'django.template.engine.Engine.get_default', 'Engine.get_default', ([], {}), '()\n', (1554, 1556), False, 'from django.template.engine import Engine\n'), ((5682, 5701), 'django.template.base.Template', 'Template', (['html_body'], {}), '(html_body)\n', (5690, 5701), False, 'from django.template.base import Template, Context\n'), ((5760, 5779), 'django.template.base.Template', 'Template', (['text_body'], {}), '(text_body)\n', (5768, 5779), False, 'from django.template.base import Template, Context\n'), ((4346, 4371), 'commonmark.common.normalize_uri', 'common.normalize_uri', (['uri'], {}), '(uri)\n', (4366, 4371), False, 'from commonmark import common, inlines\n'), ((5017, 5036), 'commonmark.Parser', 'commonmark.Parser', ([], {}), '()\n', (5034, 5036), False, 'import commonmark\n')]
|
from tclCommands.TclCommand import *
from shapely.geometry import Point
class TclCommandAlignDrillGrid(TclCommandSignaled):
"""
Tcl shell command to create an Excellon object
with drills for aligment grid.
Todo: What is an alignment grid?
"""
# array of all command aliases, to be able use old names for
# backward compatibility (add_poly, add_polygon)
aliases = ['aligndrillgrid']
# Dictionary of types from Tcl command, needs to be ordered.
# For positional arguments
arg_names = collections.OrderedDict([
('outname', str)
])
# Dictionary of types from Tcl command, needs to be ordered.
# For options like -optionname value
option_types = collections.OrderedDict([
('dia', float),
('gridx', float),
('gridxoffset', float),
('gridy', float),
('gridyoffset', float),
('columns', int),
('rows', int)
])
# array of mandatory options for current Tcl command: required = {'name','outname'}
required = ['outname', 'gridx', 'gridy', 'columns', 'rows']
# structured help for current command, args needs to be ordered
help = {
'main': "Create excellon with drills for aligment grid.",
'args': collections.OrderedDict([
('outname', 'Name of the object to create.'),
('dia', 'Tool diameter.'),
('gridx', 'Grid size in X axis.'),
('gridoffsetx', 'Move grid from origin.'),
('gridy', 'Grid size in Y axis.'),
('gridoffsety', 'Move grid from origin.'),
('colums', 'Number of grid holes on X axis.'),
('rows', 'Number of grid holes on Y axis.'),
]),
'examples': []
}
def execute(self, args, unnamed_args):
"""
execute current TCL shell command
:param args: array of known named arguments and options
:param unnamed_args: array of other values which were passed into command
without -somename and we do not have them in known arg_names
:return: None or exception
"""
if 'gridoffsetx' not in args:
gridoffsetx = 0
else:
gridoffsetx = args['gridoffsetx']
if 'gridoffsety' not in args:
gridoffsety = 0
else:
gridoffsety = args['gridoffsety']
# Tools
tools = {"1": {"C": args['dia']}}
def aligndrillgrid_init_me(init_obj, app_obj):
"""
This function is used to initialize the new
object once it's created.
:param init_obj: The new object.
:param app_obj: The application (FlatCAMApp)
:return: None
"""
drills = []
currenty = 0
for row in range(args['rows']):
currentx = 0
for col in range(args['columns']):
point = Point(currentx + gridoffsetx, currenty + gridoffsety)
drills.append({"point": point, "tool": "1"})
currentx = currentx + args['gridx']
currenty = currenty + args['gridy']
init_obj.tools = tools
init_obj.drills = drills
init_obj.create_geometry()
# Create the new object
self.app.new_object("excellon", args['outname'], aligndrillgrid_init_me)
|
[
"shapely.geometry.Point"
] |
[((2934, 2987), 'shapely.geometry.Point', 'Point', (['(currentx + gridoffsetx)', '(currenty + gridoffsety)'], {}), '(currentx + gridoffsetx, currenty + gridoffsety)\n', (2939, 2987), False, 'from shapely.geometry import Point\n')]
|
#!/usr/bin/env python3
import os.path as path
import time
import toml
import json
import csv
import os
# various utilities used for file io,
# including loading project configuration,
# state, etc...
# generate a list of all project directories.
# ignores directories with a `.` in their name.
def get_projects():
directory = 'tmp/projects/'
if not path.isdir(directory):
raise Exception('expected project folder: ' + directory)
dirlist = list_dirs(directory)
projects = [d for d in dirlist if not '.' in d]
return projects
# load the configuration file for a given project.
def get_config(project):
directory = 'tmp/projects/{}/'.format(project)
core = load_file(directory,'config')
config = expand(directory,core)
return config
# load existent state files if any.
def get_state(project):
directory = 'tmp/projects/{}/state-files/'.format(project)
if not path.isdir(directory):
return {}
files = list_files(directory)
state = {}
for f in files:
parse = get_parser(f,strict=False)
if not parse: continue
name = '.'.join(f.split('.')[:-1])
with open(directory + f) as fp:
data = parse(fp)
state[name] = data
return state
# save all elements of the current state object.
def save_state(project,state):
directory = 'tmp/projects/{}/state-files/'.format(project)
if not path.isdir(directory):
os.makedirs(directory)
for key in state:
val = state[key]
if not val: continue
fname = '{}.toml'.format(key)
with open(directory + fname,'w') as fp:
toml.dump(val,fp)
# recursively expand all `-file` fields
# within some existing dict of data.
def expand(directory,data):
collector = {}
# iteratively expand all `-file` fields.
for key in data:
if not key.endswith('-file'):
collector[key] = data[key]
continue
new = '-'.join(key.split('-')[:-1])
exp = load_file(directory,data[key])
collector[new] = exp
# recursively expand all dict subfields.
for key in collector:
val = collector[key]
if isinstance(val,dict):
exp = expand(directory,val)
collector[key] = exp
return collector
# load a target file.
def load_file(directory,target):
files = list_files(directory)
match = [f for f in files if f.startswith(target)]
if not match:
raise Exception('no file matching: ' + target)
filename = match.pop()
parse = get_parser(filename)
with open(directory + filename) as fp:
data = parse(fp)
return data
# load a parser for some config file.
def get_parser(filename,strict=True):
if filename.endswith('toml'):
parse = lambda fp: toml.load(fp)
elif filename.endswith('json'):
parse = lambda fp: json.load(fp)
elif not strict: return None
else: raise Exception('unknown filetype: ' + filename)
return parse
# list all files in a directory.
def list_files(directory):
dirlist = os.listdir(directory)
fltr = lambda f: path.isfile(directory + f)
return list(filter(fltr,dirlist))
# list all directories in a directory.
def list_dirs(directory):
dirlist = os.listdir(directory)
fltr = lambda f: path.isdir(directory + f)
return list(filter(fltr,dirlist))
# return all elements from a list of files
# which match the specified filetype
def match_filetype(files,filetype):
if filetype == '*': return files
filetype = filetype.lower()
matches = []
for name in files:
ext = name.split('.').pop().lower()
if ext == filetype:
matches.append(name)
return matches
# archiving shortcut for steps which archive rows,
# e.g.; when removed during value-based filtering.
def save_archive(project,step,rows):
fname = str(step) + '-' + str(int(time.time()))
fpath = 'tmp/archive/{}/'.format(project)
if not path.isdir(fpath):
os.makedirs(fpath)
filepath = fpath + fname + '.csv'
save_csv(filepath,rows,append=True)
# saves all rows to the file specified by `filepath`.
# automatically infers headers from the `_fields` method
# of the `namedtuple` object. can be optionally set to
# `append` mode, which appends the data & skips writing
# header names if the target csv already exists.
def save_csv(filepath,rows,append=False):
if not rows: return
# get the field names of our data.
fields = rows[0]._fields
# if append is true and the file already exists, set
# mode to `append`, else set mode to `write`.
mode = 'a' if append and path.isfile(filepath) else 'w'
# make sure the user is appraised of a file being written.
print('writing {} rows to {}'.format(len(rows),filepath))
# open the file with the appropraite mode.
with open(filepath,mode) as fp:
# let the `csv` module handle formatting.
writer = csv.writer(fp)
# if we are writing to a new file, we should
# write the field names as our first row.
if mode == 'w':
writer.writerow(fields)
# iteratively write all rows to our file.
for row in rows:
writer.writerow(row)
|
[
"json.load",
"os.makedirs",
"csv.writer",
"os.path.isdir",
"time.time",
"os.path.isfile",
"toml.load",
"toml.dump",
"os.listdir"
] |
[((3069, 3090), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (3079, 3090), False, 'import os\n'), ((3258, 3279), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (3268, 3279), False, 'import os\n'), ((359, 380), 'os.path.isdir', 'path.isdir', (['directory'], {}), '(directory)\n', (369, 380), True, 'import os.path as path\n'), ((914, 935), 'os.path.isdir', 'path.isdir', (['directory'], {}), '(directory)\n', (924, 935), True, 'import os.path as path\n'), ((1410, 1431), 'os.path.isdir', 'path.isdir', (['directory'], {}), '(directory)\n', (1420, 1431), True, 'import os.path as path\n'), ((1441, 1463), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (1452, 1463), False, 'import os\n'), ((3112, 3138), 'os.path.isfile', 'path.isfile', (['(directory + f)'], {}), '(directory + f)\n', (3123, 3138), True, 'import os.path as path\n'), ((3301, 3326), 'os.path.isdir', 'path.isdir', (['(directory + f)'], {}), '(directory + f)\n', (3311, 3326), True, 'import os.path as path\n'), ((3966, 3983), 'os.path.isdir', 'path.isdir', (['fpath'], {}), '(fpath)\n', (3976, 3983), True, 'import os.path as path\n'), ((3993, 4011), 'os.makedirs', 'os.makedirs', (['fpath'], {}), '(fpath)\n', (4004, 4011), False, 'import os\n'), ((4940, 4954), 'csv.writer', 'csv.writer', (['fp'], {}), '(fp)\n', (4950, 4954), False, 'import csv\n'), ((1638, 1656), 'toml.dump', 'toml.dump', (['val', 'fp'], {}), '(val, fp)\n', (1647, 1656), False, 'import toml\n'), ((2793, 2806), 'toml.load', 'toml.load', (['fp'], {}), '(fp)\n', (2802, 2806), False, 'import toml\n'), ((4634, 4655), 'os.path.isfile', 'path.isfile', (['filepath'], {}), '(filepath)\n', (4645, 4655), True, 'import os.path as path\n'), ((2870, 2883), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (2879, 2883), False, 'import json\n'), ((3895, 3906), 'time.time', 'time.time', ([], {}), '()\n', (3904, 3906), False, 'import time\n')]
|
#!/usr/bin/env python
import os
import sys
from setuptools import find_packages, setup
kwargs = {
'name': 'rosdistro',
# same version as in:
# - src/rosdistro/__init__.py
# - stdeb.cfg
'version': '0.8.0',
'install_requires': ['PyYAML', 'setuptools'],
'packages': find_packages('src'),
'package_dir': {'': 'src'},
'scripts': [
# 'scripts/rosdistro',
'scripts/rosdistro_build_cache',
'scripts/rosdistro_freeze_source',
# 'scripts/rosdistro_convert',
# 'scripts/rosdistro_generate_cache',
'scripts/rosdistro_migrate_to_rep_141',
'scripts/rosdistro_migrate_to_rep_143',
'scripts/rosdistro_reformat'
],
'author': '<NAME>, <NAME>',
'author_email': '<EMAIL>, <EMAIL>',
'maintainer': '<NAME>',
'maintainer_email': '<EMAIL>',
'url': 'http://wiki.ros.org/rosdistro',
'keywords': ['ROS'],
'classifiers': [
'Programming Language :: Python',
'License :: OSI Approved :: BSD License',
'License :: OSI Approved :: MIT License'],
'description': 'A tool to work with rosdistro files',
'long_description': 'A tool to work with rosdistro files',
'license': 'BSD, MIT'
}
if sys.version_info[0] == 2 and sys.version_info[1] < 7:
kwargs['install_requires'].append('argparse')
if 'SKIP_PYTHON_MODULES' in os.environ:
kwargs['packages'] = []
kwargs['package_dir'] = {}
elif 'SKIP_PYTHON_SCRIPTS' in os.environ:
kwargs['name'] += '_modules'
kwargs['scripts'] = []
else:
kwargs['install_requires'] += ['catkin_pkg', 'rospkg']
setup(**kwargs)
|
[
"setuptools.setup",
"setuptools.find_packages"
] |
[((1598, 1613), 'setuptools.setup', 'setup', ([], {}), '(**kwargs)\n', (1603, 1613), False, 'from setuptools import find_packages, setup\n'), ((294, 314), 'setuptools.find_packages', 'find_packages', (['"""src"""'], {}), "('src')\n", (307, 314), False, 'from setuptools import find_packages, setup\n')]
|
"""Definitions for all core numeric instructions."""
import math
from pyshgp.push.instruction import SimpleInstruction
from pyshgp.utils import Token
def _add(a, b):
return b + a,
def _sub(a, b):
return b - a,
def _mult(a, b):
return b * a,
def _p_div(a, b):
if a == 0:
return Token.revert
return b / a,
def _p_mod(a, b):
if a == 0:
return Token.revert
return b % a,
def _min(a, b):
return min(a, b),
def _max(a, b):
return max(a, b),
def _inc(x):
return x + 1,
def _dec(x):
return x - 1,
def _lt(a, b):
return b < a,
def _gt(a, b):
return b > a,
def _lte(a, b):
return b <= a,
def _gte(a, b):
return b >= a,
def _sin(x):
return math.sin(x),
def _cos(x):
return math.cos(x),
def _tan(x):
return math.tan(x),
def _to_int(x):
return int(x),
def _to_float(x):
return float(x),
def instructions():
"""Return all core numeric instructions."""
i = []
for push_type in ["int", "float"]:
i.append(SimpleInstruction(
"{t}_add".format(t=push_type),
_add,
input_stacks=[push_type, push_type],
output_stacks=[push_type],
code_blocks=0,
docstring="Adds the top two {t}s and pushes the result.".format(t=push_type)
))
i.append(SimpleInstruction(
"{t}_sub".format(t=push_type),
_sub,
input_stacks=[push_type, push_type],
output_stacks=[push_type],
code_blocks=0,
docstring="Subtracts the top two {t}s and pushes the result.".format(t=push_type)
))
i.append(SimpleInstruction(
"{t}_mult".format(t=push_type),
_mult,
input_stacks=[push_type, push_type],
output_stacks=[push_type],
code_blocks=0,
docstring="Multiplies the top two {t}s and pushes the result.".format(t=push_type)
))
i.append(SimpleInstruction(
"{t}_div".format(t=push_type),
_p_div,
input_stacks=[push_type, push_type],
output_stacks=[push_type],
code_blocks=0,
docstring="Divides the top two {t}s and pushes the result.".format(t=push_type)
))
i.append(SimpleInstruction(
"{t}_mod".format(t=push_type),
_p_mod,
input_stacks=[push_type, push_type],
output_stacks=[push_type],
code_blocks=0,
docstring="Computes the modulus of the top two {t}s and pushes the result.".format(t=push_type)
))
i.append(SimpleInstruction(
"{t}_min".format(t=push_type),
_min,
input_stacks=[push_type, push_type],
output_stacks=[push_type],
code_blocks=0,
docstring="Pushes the minimum of two {t}.".format(t=push_type)
))
i.append(SimpleInstruction(
"{t}_max".format(t=push_type),
_max,
input_stacks=[push_type, push_type],
output_stacks=[push_type],
code_blocks=0,
docstring="Pushes the maximum of two {t}.".format(t=push_type)
))
i.append(SimpleInstruction(
"{t}_inc".format(t=push_type),
_inc,
input_stacks=[push_type],
output_stacks=[push_type],
code_blocks=0,
docstring="Increments the top {t} by 1.".format(t=push_type)
))
i.append(SimpleInstruction(
"{t}_dec".format(t=push_type),
_dec,
input_stacks=[push_type],
output_stacks=[push_type],
code_blocks=0,
docstring="Decrements the top {t} by 1.".format(t=push_type)
))
i.append(SimpleInstruction(
"{t}_lt".format(t=push_type),
_lt,
input_stacks=[push_type, push_type],
output_stacks=["bool"],
code_blocks=0,
docstring="Pushes true if the top {t} is less than the second. Pushes false otherwise.".format(t=push_type)
))
i.append(SimpleInstruction(
"{t}_lte".format(t=push_type),
_lte,
input_stacks=[push_type, push_type],
output_stacks=["bool"],
code_blocks=0,
docstring="Pushes true if the top {t} is less than, or equal to, the second. Pushes false otherwise.".format(t=push_type)
))
i.append(SimpleInstruction(
"{t}_gt".format(t=push_type),
_gt,
input_stacks=[push_type, push_type],
output_stacks=["bool"],
code_blocks=0,
docstring="Pushes true if the top {t} is greater than the second.. Pushes false otherwise.".format(t=push_type)
))
i.append(SimpleInstruction(
"{t}_gte".format(t=push_type),
_gte,
input_stacks=[push_type, push_type],
output_stacks=["bool"],
code_blocks=0,
docstring="Pushes true if the top {t} is greater than, or equal to, the second. Pushes false otherwise.".format(t=push_type)
))
# Trig functions
i.append(SimpleInstruction(
"float_sin",
_sin,
input_stacks=["float"],
output_stacks=["float"],
code_blocks=0,
docstring="Pushes the sin of the top float."
))
i.append(SimpleInstruction(
"float_cos",
_cos,
input_stacks=["float"],
output_stacks=["float"],
code_blocks=0,
docstring="Pushes the cos of the top float."
))
i.append(SimpleInstruction(
"float_tan",
_tan,
input_stacks=["float"],
output_stacks=["float"],
code_blocks=0,
docstring="Pushes the tan of the top float."
))
# Type converting
i.append(SimpleInstruction(
"int_from_bool",
_to_int,
input_stacks=["bool"],
output_stacks=["int"],
code_blocks=0,
docstring="Pushes 1 in the top boolean is true. Pushes 0 if the top boolean is false."
))
i.append(SimpleInstruction(
"float_from_bool",
_to_float,
input_stacks=["bool"],
output_stacks=["float"],
code_blocks=0,
docstring="Pushes 1.0 in the top boolean is true. Pushes 0.0 if the top boolean is false."
))
i.append(SimpleInstruction(
"int_from_float",
_to_int,
input_stacks=["float"],
output_stacks=["int"],
code_blocks=0,
docstring="Casts the top float to an integer and pushes the result."
))
i.append(SimpleInstruction(
"float_from_int",
_to_float,
input_stacks=["int"],
output_stacks=["float"],
code_blocks=0,
docstring="Casts the top integer to a float and pushes the result."
))
return i
|
[
"pyshgp.push.instruction.SimpleInstruction",
"math.tan",
"math.cos",
"math.sin"
] |
[((738, 749), 'math.sin', 'math.sin', (['x'], {}), '(x)\n', (746, 749), False, 'import math\n'), ((777, 788), 'math.cos', 'math.cos', (['x'], {}), '(x)\n', (785, 788), False, 'import math\n'), ((816, 827), 'math.tan', 'math.tan', (['x'], {}), '(x)\n', (824, 827), False, 'import math\n'), ((5232, 5383), 'pyshgp.push.instruction.SimpleInstruction', 'SimpleInstruction', (['"""float_sin"""', '_sin'], {'input_stacks': "['float']", 'output_stacks': "['float']", 'code_blocks': '(0)', 'docstring': '"""Pushes the sin of the top float."""'}), "('float_sin', _sin, input_stacks=['float'], output_stacks=\n ['float'], code_blocks=0, docstring='Pushes the sin of the top float.')\n", (5249, 5383), False, 'from pyshgp.push.instruction import SimpleInstruction\n'), ((5448, 5599), 'pyshgp.push.instruction.SimpleInstruction', 'SimpleInstruction', (['"""float_cos"""', '_cos'], {'input_stacks': "['float']", 'output_stacks': "['float']", 'code_blocks': '(0)', 'docstring': '"""Pushes the cos of the top float."""'}), "('float_cos', _cos, input_stacks=['float'], output_stacks=\n ['float'], code_blocks=0, docstring='Pushes the cos of the top float.')\n", (5465, 5599), False, 'from pyshgp.push.instruction import SimpleInstruction\n'), ((5664, 5815), 'pyshgp.push.instruction.SimpleInstruction', 'SimpleInstruction', (['"""float_tan"""', '_tan'], {'input_stacks': "['float']", 'output_stacks': "['float']", 'code_blocks': '(0)', 'docstring': '"""Pushes the tan of the top float."""'}), "('float_tan', _tan, input_stacks=['float'], output_stacks=\n ['float'], code_blocks=0, docstring='Pushes the tan of the top float.')\n", (5681, 5815), False, 'from pyshgp.push.instruction import SimpleInstruction\n'), ((5903, 6109), 'pyshgp.push.instruction.SimpleInstruction', 'SimpleInstruction', (['"""int_from_bool"""', '_to_int'], {'input_stacks': "['bool']", 'output_stacks': "['int']", 'code_blocks': '(0)', 'docstring': '"""Pushes 1 in the top boolean is true. Pushes 0 if the top boolean is false."""'}), "('int_from_bool', _to_int, input_stacks=['bool'],\n output_stacks=['int'], code_blocks=0, docstring=\n 'Pushes 1 in the top boolean is true. Pushes 0 if the top boolean is false.'\n )\n", (5920, 6109), False, 'from pyshgp.push.instruction import SimpleInstruction\n'), ((6165, 6381), 'pyshgp.push.instruction.SimpleInstruction', 'SimpleInstruction', (['"""float_from_bool"""', '_to_float'], {'input_stacks': "['bool']", 'output_stacks': "['float']", 'code_blocks': '(0)', 'docstring': '"""Pushes 1.0 in the top boolean is true. Pushes 0.0 if the top boolean is false."""'}), "('float_from_bool', _to_float, input_stacks=['bool'],\n output_stacks=['float'], code_blocks=0, docstring=\n 'Pushes 1.0 in the top boolean is true. Pushes 0.0 if the top boolean is false.'\n )\n", (6182, 6381), False, 'from pyshgp.push.instruction import SimpleInstruction\n'), ((6437, 6622), 'pyshgp.push.instruction.SimpleInstruction', 'SimpleInstruction', (['"""int_from_float"""', '_to_int'], {'input_stacks': "['float']", 'output_stacks': "['int']", 'code_blocks': '(0)', 'docstring': '"""Casts the top float to an integer and pushes the result."""'}), "('int_from_float', _to_int, input_stacks=['float'],\n output_stacks=['int'], code_blocks=0, docstring=\n 'Casts the top float to an integer and pushes the result.')\n", (6454, 6622), False, 'from pyshgp.push.instruction import SimpleInstruction\n'), ((6683, 6869), 'pyshgp.push.instruction.SimpleInstruction', 'SimpleInstruction', (['"""float_from_int"""', '_to_float'], {'input_stacks': "['int']", 'output_stacks': "['float']", 'code_blocks': '(0)', 'docstring': '"""Casts the top integer to a float and pushes the result."""'}), "('float_from_int', _to_float, input_stacks=['int'],\n output_stacks=['float'], code_blocks=0, docstring=\n 'Casts the top integer to a float and pushes the result.')\n", (6700, 6869), False, 'from pyshgp.push.instruction import SimpleInstruction\n')]
|
# common holds pyhton function to be used in the snakefile
import pandas as pd
import yaml
# map samples to fastqs
def get_samples():
"""
return list of samples from samplesheet.tsv
"""
return list(st.index)
def get_marks():
"""
return list of marks from samplesheet.tsv
"""
return list(st['mark'])
def get_bowtie2_input(wildcards):
"""
returns reads associated with a sample
"""
r1=st.loc[wildcards.sample]['R1']
r2=st.loc[wildcards.sample]['R2']
return r1,r2
def get_reads():
"""
get list of all reads
"""
rlist=list(st['R1'])+list(st['R2'])
rlist=[os.path.basename(f).split('.')[0] for f in rlist]
return rlist
def get_igg(wildcards):
"""
Returns the igg file for the sample unless
the sample is IgG then no control file is used.
"""
if config['USEIGG']:
igg=st.loc[wildcards.sample]['igg']
iggbam=f'data/ban/{igg}.ban.sorted.markd.bam'
isigg="IgG" in wildcards.sample
if not isigg:
return f'{iggbam}'
else:
# return ""
return f'{iggbam}'
else:
return ""
def macs2_igg(wildcards):
"""
Returns the igg file for the sample unless
the sample is IgG then no control file is used.
"""
if config['USEIGG']:
igg=st.loc[wildcards.sample]['igg']
iggbam=f'data/ban/{igg}.ban.sorted.markd.bam'
isigg="IgG" in wildcards.sample
if not isigg:
return f'-c {iggbam}'
else:
return ""
else:
return ""
def macs2_peak(wildcards):
"""
Returns the peak type (broad or narrow) for a sample.
"""
peak=st.loc[wildcards.sample]['peak']
if peak == 'broad':
return "--broad"
if peak == 'narrow':
return ""
def seacr_igg(wildcards):
"""
Returns the igg file for the sample unless
the sample is IgG then no control file is used.
"""
if config['USEIGG']:
igg=st.loc[wildcards.sample]['igg']
iggbam=f'data/seacr/bedgraph/{igg}.bedgraph'
isigg="IgG" in wildcards.sample
if not isigg:
return f'{iggbam}'
else:
return config['SEACR_THRESHOLD']
else:
return ""
def seacr_norm(wildcards):
"""
Returns "norm" if using SEACR with treatment + control situation
Returns "non" if using SEACR with no treatment. E.g. just IgG
"""
if config["IGG"] in wildcards.sample:
return "non"
else:
return "norm"
def get_callpeaks(wildcards):
"""
Returns the callpeaks input files
"""
bam=f"data/ban/{wildcards.sample}.ban.sorted.markd.bam"
bai=f"data/ban/{wildcards.sample}.ban.sorted.markd.bam.bai"
# cp="scripts/gopeaks"
return [bam,bai]
def gopeaks_igg(wildcards):
"""
Returns the igg file for the sample unless
the sample is IgG then no control file is used.
"""
if config['USEIGG']:
igg=st.loc[wildcards.sample]['igg']
iggbam=f'data/ban/{igg}.ban.sorted.markd.bam'
isigg="IgG" in wildcards.sample
if not isigg:
return f'-control {iggbam}'
else:
return ""
else:
return ""
def dynamic_range_input():
"""
Input: list of all consensus peak files
Method: Loop through each consensus file. Define method,condition,mark.
Method: Use method,condition,mark to glob relevant bam file pairs.
Output: One consensus file with one BAM file. Their condition,mark must match,
Output: but must output all methods for a sample.
"""
all_consensus = glob.glob("data/consensus/*.bed")
consensus_file = os.path.basename(consensus).replace(".bed", "")
method=consensus_file.split("_")[0]
condition=consensus_file.split("_")[1]
mark=consensus_file.split("_")[2]
input_bams = glob.glob("data/ban/{condition}*{mark}*.bam".format(condition = condition, mark = mark))
for bam in input_bams:
return(consensus_file, bam)
def get_standard(wildcards):
"""
Input: wildcard of a sample
Method: use wildcard of a sample to grab standard in config.yml
Output: the gold standard file
"""
with open("src/config.yml", "r") as fi:
cfg = yaml.safe_load(fi)
return( cfg["STANDARDS"][wildcards.sample] )
def get_minwidth(wildcards):
"""
Input: wildcard of a sample
Output: change minwidth for H3K27Ac in Kasumi cells
Note: only appropriate for this analysis
"""
condition = str(wildcards.sample).split("_")[0]
replicate = str(wildcards.sample).split("_")[1]
mark = str(wildcards.sample).split("_")[2]
if ((condition == "Kasumi") and (mark == "H3K27Ac")):
return ""
else:
return ""
def get_groups():
"""
Get {condition}_{mark} information without replicates using the samplesheet.
"""
outdf = pd.DataFrame()
cond_mark = st[['condition', 'mark']][st.mark != "IgG"].drop_duplicates().reset_index(drop = True) # condition_mark w/out replicates
for method in all_methods:
tmpdf = cond_mark.copy()
tmpdf['method'] = method
outdf = pd.concat([outdf, tmpdf])
outdf = outdf.reset_index(drop = True)
return(outdf)
def group_reps(wildcards):
"""
Input: {condition}_{mark} groups
Output: peak files with {method}_{condition}_{replicate}_{mark}
"""
samples = list( st[(st.condition == wildcards.condition) & (st.mark == wildcards.mark)]['sample'] )
if wildcards.method == "gopeaks":
return([ "data/gopeaks/{s}.bed".format(s = i) for i in samples ])
if wildcards.method == "macs2":
return([ "data/macs2/{s}_peaks.narrowPeak".format(s = i) for i in samples ])
if wildcards.method == "seacr-relaxed":
return([ "data/seacr/{s}.relaxed.bed".format(s = i) for i in samples ])
if wildcards.method == "seacr-stringent":
return([ "data/seacr/{s}.stringent.bed".format(s = i) for i in samples ])
|
[
"pandas.DataFrame",
"yaml.safe_load",
"pandas.concat"
] |
[((4875, 4889), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (4887, 4889), True, 'import pandas as pd\n'), ((4245, 4263), 'yaml.safe_load', 'yaml.safe_load', (['fi'], {}), '(fi)\n', (4259, 4263), False, 'import yaml\n'), ((5140, 5165), 'pandas.concat', 'pd.concat', (['[outdf, tmpdf]'], {}), '([outdf, tmpdf])\n', (5149, 5165), True, 'import pandas as pd\n')]
|
import asyncio
import math
import sys
import boto3
import json
import traceback
from typing import Dict, List, Any
import ccxt.async_support as ccxt
from ccxt import InvalidOrder, OrderNotFound
from ccxt.async_support.base.exchange import Exchange
from Exceptions import OrderCreationError, OrderErrorByExchange
from OrderRequest import OrderRequest, OrderRequestStatus, OrderRequestType, OrderRequestList, \
SegmentedOrderRequestList, CCXT_ORDER_STATUS_OPEN, CCXT_ORDER_STATUS_CANCELED
import time
import logging
from Notifications import sendNotification
from TraderHistory import TraderHistory
from Database import Database
logger = logging.getLogger('Trader')
class Trader:
PHASE_CREATE_TIMEOUT = 5 # sec (Az összes create-nek létre kell jönnie ennyi idő után)
PHASE_FETCH_TIMEOUT = 10 # sec (Az összes order-nek CLOSED-nak kell lennie ennyi idő után, ha ez nem igaz, akkor ABORT ALL)
NOF_CCTX_RETRY = 4
TTL_TRADEORDER_S = 60 * 5
FETCH_ORDER_STATUS_TIMEOUT = 60 * 60 # sec
# EFFICIENCY = 0.9 # Ezzel szorozzuk a beadott amout-okat, hogy elkerüljük a recegést a soros átváltások miatt
#
# @staticmethod
# def applyEfficiencyOnAmounts(segmentedOrderRequestList: SegmentedOrderRequestList) -> SegmentedOrderRequestList:
# for orl in segmentedOrderRequestList.getOrderRequestLists():
# for idx, orderRequest in enumerate(orl.getOrderRequests()):
# orderRequest.amount = orderRequest.amount * pow(Trader.EFFICIENCY, idx * 1)
# return segmentedOrderRequestList
def __init__(self, is_sandbox_mode=True):
self.__balances: Dict[str, Any] = {}
self.__is_sandbox_mode: bool = is_sandbox_mode
self.__exchanges: Dict[str, Exchange] = {}
self.__isBusy = False
logger.debug(f'Trader.__init__(is_sandbox_mode={is_sandbox_mode})')
def getBalances(self):
return self.__balances
def getFreeBalances(self):
free = {}
for name in self.__balances:
exchange = self.__balances[name]
try:
for symbol in exchange['free']:
if exchange['free'][symbol] > 0.00001:
if name not in free:
free[name] = {}
free[name][symbol] = exchange['free'][symbol]
except Exception as e:
logger.error(e)
return free
@staticmethod
def storeFreeBalances(uuid, timing, balances):
db = Database.initDBFromAWSParameterStore()
for exchange in balances:
for symbol in balances[exchange]:
query = "INSERT INTO `balance`" \
"(uuid, timing, exchange, symbol, balance)" \
"VALUES(%s,%s,%s,%s,%s)"
args = (
uuid,
timing,
exchange,
symbol,
balances[exchange][symbol]
)
try:
cursor = db.cursor()
cursor.execute(query, args)
if cursor.lastrowid:
if cursor.lastrowid % 100 == 0:
print('last insert id', cursor.lastrowid)
db.commit()
except Exception as e:
print(e)
db.close()
async def initExchangesFromAWSParameterStore(self):
logger.debug(f'initExchangesFromAWSParameterStore')
with open('./cred/aws-keys.json') as file:
cred = json.load(file)
ssm = boto3.client('ssm',
aws_access_key_id=cred['aws_access_key_id'],
aws_secret_access_key=cred['aws_secret_access_key'],
region_name=cred['region_name'])
def getSSMParam(paramName):
return ssm.get_parameter(Name=paramName, WithDecryption=True)['Parameter']['Value']
enabledExchanges = getSSMParam('/prod/enabledExchanges').split(',')
for exch in enabledExchanges:
path = f'/prod/exchange/{exch}/'
pars = ssm.get_parameters_by_path(
Path=path,
Recursive=True,
WithDecryption=True
)
exchangeCreds = {}
for par in pars['Parameters']:
key = par['Name'].split('/')[-1]
value = par['Value']
exchangeCreds[key] = value
await self.__init_exchange(exch, exchangeCreds)
await self.fetch_balances()
logger.debug(f'Free balances: \n{self.getFreeBalances()}')
Trader.storeFreeBalances(None, None, self.getFreeBalances())
async def initExchangesFromCredFile(self, credfile):
logger.debug(f'initExchangesFromCredFile({credfile})')
with open(credfile) as file:
exchangeCreds = json.load(file)
for exchangeName in exchangeCreds:
await self.__init_exchange(exchangeName, exchangeCreds[exchangeName])
await self.fetch_balances()
async def __init_exchange(self, exchangeName: str, exchangeCreds):
exchange = getattr(ccxt, exchangeName)(exchangeCreds)
await exchange.load_markets()
self.__exchanges[exchangeName.lower().replace(" ", "")] = exchange
async def __close_exchange(self, exchange):
await exchange.close()
async def close_exchanges(self):
tasks = []
for _, exchange in self.__exchanges.items():
tasks.append(asyncio.ensure_future(
self.__close_exchange(exchange)))
await asyncio.gather(*tasks)
logger.debug("Exchanges closed")
async def __cancelOrderRequest(self, orderRequest: OrderRequest):
logger.debug(f'__cancelOrderRequest #{orderRequest.id} ({orderRequest.toString()})')
waitingForCreatingStatusRetries = 0
while orderRequest.getStatus() == OrderRequestStatus.CREATING and waitingForCreatingStatusRetries < 100:
logger.debug(
f"Canceling order request (#{orderRequest.id}) is waiting for status CREATING retrycnt={waitingForCreatingStatusRetries}")
await asyncio.sleep(0.5)
waitingForCreatingStatusRetries = waitingForCreatingStatusRetries + 1
if orderRequest.id is None:
logger.error(f"Canceling order request (#{orderRequest.id}) is not possible id=None, state={orderRequest.getStatus().value}")
return
t1 = time.time()
if orderRequest.id is not None:
for retrycntr in range(Trader.NOF_CCTX_RETRY):
try:
response = await self.__exchanges[
orderRequest.exchange_name_std].cancelOrder(orderRequest.id, orderRequest.market)
logger.debug(f'cancelOrder response={response}')
if 'error' in response:
raise ValueError('Error in exchange response:' +
str(response['error']))
logger.debug(f'Cancelled oder #{orderRequest.id} ({orderRequest})')
orderRequest.setCanceled()
exchange = self.__exchanges[orderRequest.exchange_name_std]
await asyncio.sleep(exchange.rateLimit / 1000)
return
except OrderNotFound as onf:
logger.error(f'Cancel order request (#{orderRequest.id}) failed with OrderNotFound ({onf})')
break
except Exception as e:
logger.debug(f'Cancel order request (#{orderRequest.id}) failed with {e}, retrycntr={retrycntr}')
await asyncio.sleep(self.__exchanges[orderRequest.exchange_name_std].rateLimit / 1000)
dt = (time.time() - t1) * 1000
logger.debug(f'Cancel order request (#{orderRequest.id}) ended in {dt} ms ({orderRequest.toString()})')
async def cancelAllOrderRequests(self, segmentedOrderRequestList: SegmentedOrderRequestList):
tasks = []
for orderRequest in segmentedOrderRequestList.getOrderRequests():
if orderRequest.isPending() is True:
tasks.append(
asyncio.ensure_future(self.__cancelOrderRequest(orderRequest)))
await asyncio.gather(*tasks)
logger.debug("Cancellation of all order requests completed")
async def abortSegmentedOrderRequestList(self, segmentedOrderRequestList: SegmentedOrderRequestList):
logger.debug(f'abortSegmentedOrderRequestList')
try:
await self.cancelAllOrderRequests(segmentedOrderRequestList)
except Exception as e:
logger.error(f'abortSegmentedOrderRequestList failed: {e}')
async def __fetch_order_status(self, orderRequest: OrderRequest):
logger.debug(f'__fetch_order_status #{orderRequest.id} ({orderRequest.toString()})')
try:
t1 = time.time()
response = await self.__exchanges[orderRequest.exchange_name_std].fetchOrder(orderRequest.id)
logger.debug(f'__fetch_order_status #{orderRequest.id} response: {response}')
t2 = time.time()
d = (t2 - t1) * 1000.0
logger.debug(f'Order status fetched #{orderRequest.id} from {orderRequest.exchange_name} in {d} ms')
orderRequest.updateOrderStatusFromCCXT(response)
if response['status'] == CCXT_ORDER_STATUS_CANCELED:
logger.debug(f'Order status CANCELED #{orderRequest.id}')
raise OrderErrorByExchange(orderRequest)
exchange = self.__exchanges[orderRequest.exchange_name_std]
await asyncio.sleep(exchange.rateLimit / 1000)
return
except OrderErrorByExchange as e:
raise e
except InvalidOrder as e:
logger.error(
f'Order status fetching failed for {orderRequest.id} {orderRequest.market} {orderRequest.exchange_name} {e.args}')
raise OrderErrorByExchange(orderRequest)
except Exception as e:
logger.error(f'Order status fetching failed for {orderRequest} with reason {e}')
await asyncio.sleep(
self.__exchanges[orderRequest.exchange_name_std].rateLimit / 1000)
async def __fetch_order_status_until_closed_or_timeout(self, orderRequest: OrderRequest):
t_start = time.time()
while (orderRequest.getStatus() != OrderRequestStatus.CLOSED) and (time.time() < t_start + Trader.FETCH_ORDER_STATUS_TIMEOUT):
await self.__fetch_order_status(orderRequest)
await asyncio.sleep(self.__exchanges[orderRequest.exchange_name_std].rateLimit / 1000)
async def fetch_order_statuses(self, segmentedOrderRequestList: SegmentedOrderRequestList):
tasks = []
try:
for orderRequest in segmentedOrderRequestList.getOrderRequests():
tasks.append(asyncio.ensure_future(
self.__fetch_order_status(orderRequest)))
await asyncio.gather(*tasks)
logger.debug("Order statuses fetching completed")
except OrderErrorByExchange as e:
logger.error(f"Order canceled by exchange: {e}")
raise e
async def fetch_balance(self, exchange):
for retrycntr in range(Trader.NOF_CCTX_RETRY):
t1 = time.time()
try:
balance = await exchange.fetch_balance()
self.__balances[exchange.name.lower().replace(
" ", "")] = balance
d_ms = (time.time() - t1) * 1000.0
logger.debug('Balance fetching completed from ' +
exchange.name + f" in {d_ms} ms")
await asyncio.sleep(exchange.rateLimit / 1000) # wait for rateLimit
return
except (ccxt.ExchangeError, ccxt.NetworkError) as error:
d_ms = (time.time() - t1) * 1000.0
logger.error('Fetch balance failed from ' + exchange.name +
" " + type(error).__name__ + " " +
str(error.args) + " retrycntr:" + str(retrycntr) + f" in {d_ms} ms")
await asyncio.sleep(exchange.rateLimit / 1000)
logger.error(f'Error during fetch balance for {exchange}.')
raise ValueError(f'Error during fetch balance for {exchange}.')
async def fetch_balances(self):
self.__balances = {}
tasks = []
for _, exchange in self.__exchanges.items():
tasks.append(asyncio.ensure_future(self.fetch_balance(exchange)))
await asyncio.gather(*tasks)
def get_free_balance(self, exchangeName, symbol) -> float:
try:
return float(self.__balances[exchangeName][symbol]["free"])
except Exception as e:
# logger.warning()
raise ValueError(
f"No balance available from {exchangeName} {symbol} {self.__balances[exchangeName][symbol]['free']}")
def is_exchange_available(self, exchange_name: str) -> bool:
return exchange_name in self.__exchanges
def get_exchange(self, exchange_name: str) -> Exchange:
if not self.is_exchange_available(exchange_name):
raise ValueError(
f'Exchange ({exchange_name}) does not exists in initialized exchanges'
)
return self.__exchanges[exchange_name]
def get_market(self, exchange_name: str,
market_str: str) -> Dict[str, Any]:
exchange = self.get_exchange(exchange_name)
if market_str not in exchange.markets:
raise ValueError(
f'Symbol ({market_str}) does not exists in exchange ({exchange_name})'
)
return exchange.markets[market_str]
def get_min_trade_amount(self, exchange_name: str, market_str: str):
market = self.get_market(exchange_name, market_str)
return market['limits']['amount']['min']
def isOrderRequestValid(self, orderRequest: OrderRequest) -> bool:
exchange_name = orderRequest.exchange_name_std
market_str = orderRequest.market
amount = orderRequest.volumeBase
type = orderRequest.type
try:
if not self.is_exchange_available(exchange_name):
raise ValueError(f'Exchange is not available: {exchange_name}')
exchange = self.get_exchange(exchange_name)
market = self.get_market(exchange_name, market_str)
if market['limits']['amount']['min']:
if amount < exchange.markets[market_str]['limits']['amount']['min']:
raise ValueError(
'Amount too small, won'
't execute on ' + exchange.name + " " + market_str +
" Amount: " + str(amount) + " Min.amount:" +
str(exchange.markets[market_str]['limits']['amount']['min'])
)
if market['limits']['amount']['max']:
if amount > exchange.markets[market_str]['limits']['amount']['max']:
raise ValueError(
'Amount too big, won'
't execute on ' + exchange.name + " " + market_str +
" Amount: " + str(amount) + " Max.amount:" +
str(exchange.markets[market_str]['limits']['amount']['max'])
)
return True
except Exception as e:
raise ValueError(f"Error during validating OrderRequest: {e}")
def hasSufficientBalanceForOrderRequest(self, orderRequest: OrderRequest):
logger.debug(f'hasSufficientBalanceForOrderRequest({orderRequest})')
exchange_name = orderRequest.exchange_name_std
market_str = orderRequest.market
volumeBase = orderRequest.volumeBase
if orderRequest.type == OrderRequestType.SELL:
base_symbol = market_str.split('/')[0]
free_balance_base = self.get_free_balance(exchange_name, base_symbol)
logger.debug(f'base_symbol={base_symbol}, free_balance_base={free_balance_base}, volumeBase={volumeBase}')
if free_balance_base < volumeBase:
raise ValueError(
f'Insufficient fund on {exchange_name} {orderRequest.market}.' +
f' free_balance_base: {free_balance_base}' +
f' volumeBase: {volumeBase}' +
f' type: {orderRequest.type}')
else:
logger.debug(f'Has sufficient fund on {orderRequest.exchange_name_std} {orderRequest.market}: balance={free_balance_base} needed={volumeBase}')
return True
elif orderRequest.type == OrderRequestType.BUY:
quote_symbol = market_str.split('/')[1]
free_balance_quote = self.get_free_balance(exchange_name, quote_symbol)
logger.debug(f'quote_symbol={quote_symbol}, free_balance_quote={free_balance_quote}, volumeBase={volumeBase}, orderRequest.meanPrice={orderRequest.meanPrice}')
needed_quote = volumeBase * orderRequest.meanPrice
if free_balance_quote < needed_quote:
raise ValueError(
f'Insufficient fund on {orderRequest.exchange_name_std} {orderRequest.market}.' +
f' free_balance_quote: {free_balance_quote}' +
f' volumeBase * orderRequest.meanPrice: {volumeBase * orderRequest.meanPrice}' +
f' type: {orderRequest.type}')
else:
logger.debug(f'Has sufficient fund on {orderRequest.exchange_name_std} {orderRequest.market}: balance={free_balance_quote} needed={needed_quote}')
return True
else:
raise ValueError('Invalid orderRequest.type')
def isOrderRequestListValid(self, orderRequestList: OrderRequestList):
for orderRequest in orderRequestList.getOrderRequests():
if self.isOrderRequestValid(orderRequest) is False:
return False
ors = orderRequestList.getOrderRequests()
return self.hasSufficientBalanceForOrderRequest(ors[0])
def isSegmentedOrderRequestListValid(self, segmentedOrderRequestList: SegmentedOrderRequestList):
ret: bool = True
for orderRequestList in segmentedOrderRequestList.getOrderRequestLists():
ret = ret & self.isOrderRequestListValid(orderRequestList)
return ret
async def __create_limit_order(self, orderRequest: OrderRequest):
logger.debug(f"__create_limit_order ({orderRequest.toString()})")
if orderRequest.shouldAbort:
logger.debug(f"Create limit order is canceled, reason: shouldAbort is True ({orderRequest.toString()})")
return
exchange = self.__exchanges[orderRequest.exchange_name_std]
symbol = orderRequest.market
amount = orderRequest.volumeBase
price = orderRequest.limitPrice
t1 = time.time()
try:
if self.__is_sandbox_mode is True:
raise ValueError('Trader sandbox mode ON')
if orderRequest.type == OrderRequestType.BUY:
orderRequest.setStatus(OrderRequestStatus.CREATING)
response = await exchange.createLimitBuyOrder(symbol, exchange.amountToPrecision(symbol, amount), exchange.priceToPrecision(symbol, price))
logger.debug(f"{orderRequest.exchange_name_std}.createLimitBuyOrder ({orderRequest.toString()}) response: {response}")
elif orderRequest.type == OrderRequestType.SELL:
orderRequest.setStatus(OrderRequestStatus.CREATING)
response = await exchange.createLimitSellOrder(symbol, exchange.amountToPrecision(symbol, amount), exchange.priceToPrecision(symbol, price))
logger.debug(f"{orderRequest.exchange_name_std}.createLimitSellOrder ({orderRequest.toString()}) response: {response}")
else:
raise ValueError('orderRequest.type has an invalid value')
if 'id' not in response:
raise OrderCreationError("Order creation failed: id is not present" +
exchange.name + " " + symbol)
orderRequest.id = response['id']
orderRequest.setStatus(OrderRequestStatus.CREATED)
if 'info' in response:
if 'error' in response['info']:
orderRequest.errorlog = response['info']['error']
if 'info' in response:
if 'error' in response['info']:
if response['info']['error']:
raise ValueError('Error in exchange response:' +
str(response['error']))
d_ms = (time.time() - t1) * 1000.0
logger.debug(f"Create limit order SUCCESS ({orderRequest.toString()}) in {d_ms} ms")
await asyncio.sleep(exchange.rateLimit / 1000)
except Exception as error:
d_ms = (time.time() - t1) * 1000.0
logger.error(f"Create limit order FAILED ({orderRequest.toString()}) in {d_ms} ms. Reason: {error}")
orderRequest.setStatus(OrderRequestStatus.FAILED)
raise error
async def createLimitOrdersOnOrderRequestList(self, orderRequestList: OrderRequestList):
'''
Creates limit order and waits for the order status
:param orderRequestList:
:return:
'''
try:
# Pre-check transactions
if self.isOrderRequestListValid(orderRequestList) is False:
logger.error(f'OrderRequestList is not valid: {orderRequestList} ')
raise ValueError(f'OrderRequestList is not valid: {orderRequestList} ')
# Fire real transactions
for orderRequest in orderRequestList.getOrderRequests():
if orderRequest.shouldAbort is False:
await self.__create_limit_order(orderRequest)
if orderRequest.shouldAbort is True:
await self.__cancelOrderRequest(orderRequest)
else:
await self.__fetch_order_status_until_closed_or_timeout(orderRequest)
if orderRequest.getStatus() != OrderRequestStatus.CLOSED:
raise ValueError(f'OrderRequestStatus is not CLOSED after timeout: {orderRequest.toString()}')
except Exception as e:
logger.error(f"OrderRequestList cannot be created: {e}")
# traceback.print_exc()
raise e
async def createLimitOrdersOnSegmentedOrderRequestList(self, segmentedOrderRequestList: SegmentedOrderRequestList):
orders = []
try:
# Pre-check transactions
if self.isSegmentedOrderRequestListValid(segmentedOrderRequestList) is False:
raise ValueError(
f"segmentedOrderRequestList is not valid: {segmentedOrderRequestList} ")
# Fire real transactions
for orderRequestList in segmentedOrderRequestList.getOrderRequestLists():
orders.append(
asyncio.ensure_future(self.createLimitOrdersOnOrderRequestList(orderRequestList)))
await asyncio.gather(*orders)
except ValueError as ve:
logger.error("Arbitrage deal cannot be executed, " + str(ve.args))
raise ve
def isSandboxMode(self):
return self.__is_sandbox_mode
def input(self, str):
return input(str)
def sendNotification(self, str_text):
sendNotification(str_text)
async def pollTrades(self):
''' TraderHistory-n keresztül menti a trade-ket '''
traderHistory = await TraderHistory.getInstance()
await traderHistory.pollTrades()
await traderHistory.close()
def saveSORLtoDB(self, segmentedOrderRequestList: SegmentedOrderRequestList):
try:
segmentedOrderRequestList.saveToDB()
except Exception as e:
logger.error(e)
async def execute(self, segmentedOrderRequestList: SegmentedOrderRequestList):
if self.__isBusy:
# logger.debug(f"Trader is busy, the execute() call is droped")
return
self.__isBusy = True
try:
logger.debug(f'Start execute the orders:')
logger.debug(f'\n{segmentedOrderRequestList.sorlToString()}\n')
logger.debug(f'Free balances: \n{self.getFreeBalances()}')
isValid = self.isSegmentedOrderRequestListValid(segmentedOrderRequestList)
logger.debug(f'Validating result: {isValid}')
if isValid is False:
self.__isBusy = False
return
if self.__is_sandbox_mode:
logger.debug('Trader is in sandbox mode. Skiping the order requests.')
self.__isBusy = False
return
except ValueError as e:
logger.error(f"execute failed during pre validation. Reason: {e}")
self.__isBusy = False
return
except Exception as e:
logger.error(f"execute failed during pre validation. Reason: {e}", exc_info=True)
self.__isBusy = False
return
# ret = self.input('Write <ok> to authorize the trade:')
# if ret != "ok":
# logger.debug(f'Trader is not authorized to execute the trade.')
# return
# else:
# logger.debug('Trader is authorized.')
try:
# TODO: save SORL into db
self.sendNotification(f"CryptoArb Trader is placing orders, uuid: {segmentedOrderRequestList.uuid}")
t1 = time.time()
Trader.storeFreeBalances(segmentedOrderRequestList.uuid, -1, self.getFreeBalances())
await self.createLimitOrdersOnSegmentedOrderRequestList(segmentedOrderRequestList)
d_s = time.time() - t1
logger.debug(f"createLimitOrdersOnSegmentedOrderRequestList ended in {d_s} s")
logger.debug(f"Waiting for the order requests to complete for {Trader.TTL_TRADEORDER_S} s ")
await asyncio.sleep(Trader.TTL_TRADEORDER_S)
logger.debug(f"Waiting for TTL_TRADEORDER_S is over. ({Trader.TTL_TRADEORDER_S} s) ")
await self.fetch_order_statuses(segmentedOrderRequestList)
logger.debug(f"Canceling all requests")
await self.cancelAllOrderRequests(segmentedOrderRequestList)
except Exception as e:
d_s = time.time() - t1
logger.error(f"execute failed in {d_s} s. Reason: {e}")
for orderRequest in segmentedOrderRequestList.getOrderRequests():
orderRequest.shouldAbort = True
await self.abortSegmentedOrderRequestList(segmentedOrderRequestList)
self.sendNotification(f"CryptoArb Trader failed. Reason: " + f"{e}"[:100])
finally:
logger.debug('SORL after execution:')
logger.debug(f'\n{segmentedOrderRequestList.sorlToString()}\n')
logger.debug('History log after execution:')
logger.debug(f'\n{segmentedOrderRequestList.statusLogToString()}\n')
await self.fetch_balances()
logger.debug(f'Free Balances: {self.getFreeBalances()}')
Trader.storeFreeBalances(segmentedOrderRequestList.uuid, 1, self.getFreeBalances())
# Fetch trades into db
await self.pollTrades()
self.saveSORLtoDB(segmentedOrderRequestList)
# TODO: fetch FIAT into db
self.__isBusy = False
logger.debug('execute(): end.')
# sys.exit("Exit after execute()")
|
[
"asyncio.gather",
"json.load",
"Database.Database.initDBFromAWSParameterStore",
"boto3.client",
"asyncio.sleep",
"Exceptions.OrderErrorByExchange",
"Exceptions.OrderCreationError",
"time.time",
"Notifications.sendNotification",
"logging.getLogger",
"TraderHistory.TraderHistory.getInstance"
] |
[((644, 671), 'logging.getLogger', 'logging.getLogger', (['"""Trader"""'], {}), "('Trader')\n", (661, 671), False, 'import logging\n'), ((2501, 2539), 'Database.Database.initDBFromAWSParameterStore', 'Database.initDBFromAWSParameterStore', ([], {}), '()\n', (2537, 2539), False, 'from Database import Database\n'), ((6605, 6616), 'time.time', 'time.time', ([], {}), '()\n', (6614, 6616), False, 'import time\n'), ((10520, 10531), 'time.time', 'time.time', ([], {}), '()\n', (10529, 10531), False, 'import time\n'), ((19126, 19137), 'time.time', 'time.time', ([], {}), '()\n', (19135, 19137), False, 'import time\n'), ((23776, 23802), 'Notifications.sendNotification', 'sendNotification', (['str_text'], {}), '(str_text)\n', (23792, 23802), False, 'from Notifications import sendNotification\n'), ((3582, 3597), 'json.load', 'json.load', (['file'], {}), '(file)\n', (3591, 3597), False, 'import json\n'), ((3616, 3775), 'boto3.client', 'boto3.client', (['"""ssm"""'], {'aws_access_key_id': "cred['aws_access_key_id']", 'aws_secret_access_key': "cred['aws_secret_access_key']", 'region_name': "cred['region_name']"}), "('ssm', aws_access_key_id=cred['aws_access_key_id'],\n aws_secret_access_key=cred['aws_secret_access_key'], region_name=cred[\n 'region_name'])\n", (3628, 3775), False, 'import boto3\n'), ((4995, 5010), 'json.load', 'json.load', (['file'], {}), '(file)\n', (5004, 5010), False, 'import json\n'), ((5729, 5751), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (5743, 5751), False, 'import asyncio\n'), ((8427, 8449), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (8441, 8449), False, 'import asyncio\n'), ((9065, 9076), 'time.time', 'time.time', ([], {}), '()\n', (9074, 9076), False, 'import time\n'), ((9290, 9301), 'time.time', 'time.time', ([], {}), '()\n', (9299, 9301), False, 'import time\n'), ((11489, 11500), 'time.time', 'time.time', ([], {}), '()\n', (11498, 11500), False, 'import time\n'), ((12756, 12778), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (12770, 12778), False, 'import asyncio\n'), ((23926, 23953), 'TraderHistory.TraderHistory.getInstance', 'TraderHistory.getInstance', ([], {}), '()\n', (23951, 23953), False, 'from TraderHistory import TraderHistory\n'), ((25897, 25908), 'time.time', 'time.time', ([], {}), '()\n', (25906, 25908), False, 'import time\n'), ((6297, 6315), 'asyncio.sleep', 'asyncio.sleep', (['(0.5)'], {}), '(0.5)\n', (6310, 6315), False, 'import asyncio\n'), ((7921, 7932), 'time.time', 'time.time', ([], {}), '()\n', (7930, 7932), False, 'import time\n'), ((9672, 9706), 'Exceptions.OrderErrorByExchange', 'OrderErrorByExchange', (['orderRequest'], {}), '(orderRequest)\n', (9692, 9706), False, 'from Exceptions import OrderCreationError, OrderErrorByExchange\n'), ((9798, 9838), 'asyncio.sleep', 'asyncio.sleep', (['(exchange.rateLimit / 1000)'], {}), '(exchange.rateLimit / 1000)\n', (9811, 9838), False, 'import asyncio\n'), ((10132, 10166), 'Exceptions.OrderErrorByExchange', 'OrderErrorByExchange', (['orderRequest'], {}), '(orderRequest)\n', (10152, 10166), False, 'from Exceptions import OrderCreationError, OrderErrorByExchange\n'), ((10607, 10618), 'time.time', 'time.time', ([], {}), '()\n', (10616, 10618), False, 'import time\n'), ((10743, 10828), 'asyncio.sleep', 'asyncio.sleep', (['(self.__exchanges[orderRequest.exchange_name_std].rateLimit / 1000)'], {}), '(self.__exchanges[orderRequest.exchange_name_std].rateLimit / 1000\n )\n', (10756, 10828), False, 'import asyncio\n'), ((11163, 11185), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (11177, 11185), False, 'import asyncio\n'), ((20250, 20348), 'Exceptions.OrderCreationError', 'OrderCreationError', (["('Order creation failed: id is not present' + exchange.name + ' ' + symbol)"], {}), "('Order creation failed: id is not present' + exchange.\n name + ' ' + symbol)\n", (20268, 20348), False, 'from Exceptions import OrderCreationError, OrderErrorByExchange\n'), ((21081, 21121), 'asyncio.sleep', 'asyncio.sleep', (['(exchange.rateLimit / 1000)'], {}), '(exchange.rateLimit / 1000)\n', (21094, 21121), False, 'import asyncio\n'), ((23446, 23469), 'asyncio.gather', 'asyncio.gather', (['*orders'], {}), '(*orders)\n', (23460, 23469), False, 'import asyncio\n'), ((26119, 26130), 'time.time', 'time.time', ([], {}), '()\n', (26128, 26130), False, 'import time\n'), ((26350, 26388), 'asyncio.sleep', 'asyncio.sleep', (['Trader.TTL_TRADEORDER_S'], {}), '(Trader.TTL_TRADEORDER_S)\n', (26363, 26388), False, 'import asyncio\n'), ((10309, 10394), 'asyncio.sleep', 'asyncio.sleep', (['(self.__exchanges[orderRequest.exchange_name_std].rateLimit / 1000)'], {}), '(self.__exchanges[orderRequest.exchange_name_std].rateLimit / 1000\n )\n', (10322, 10394), False, 'import asyncio\n'), ((11879, 11919), 'asyncio.sleep', 'asyncio.sleep', (['(exchange.rateLimit / 1000)'], {}), '(exchange.rateLimit / 1000)\n', (11892, 11919), False, 'import asyncio\n'), ((20938, 20949), 'time.time', 'time.time', ([], {}), '()\n', (20947, 20949), False, 'import time\n'), ((26732, 26743), 'time.time', 'time.time', ([], {}), '()\n', (26741, 26743), False, 'import time\n'), ((7390, 7430), 'asyncio.sleep', 'asyncio.sleep', (['(exchange.rateLimit / 1000)'], {}), '(exchange.rateLimit / 1000)\n', (7403, 7430), False, 'import asyncio\n'), ((11702, 11713), 'time.time', 'time.time', ([], {}), '()\n', (11711, 11713), False, 'import time\n'), ((12345, 12385), 'asyncio.sleep', 'asyncio.sleep', (['(exchange.rateLimit / 1000)'], {}), '(exchange.rateLimit / 1000)\n', (12358, 12385), False, 'import asyncio\n'), ((21178, 21189), 'time.time', 'time.time', ([], {}), '()\n', (21187, 21189), False, 'import time\n'), ((7826, 7911), 'asyncio.sleep', 'asyncio.sleep', (['(self.__exchanges[orderRequest.exchange_name_std].rateLimit / 1000)'], {}), '(self.__exchanges[orderRequest.exchange_name_std].rateLimit / 1000\n )\n', (7839, 7911), False, 'import asyncio\n'), ((12058, 12069), 'time.time', 'time.time', ([], {}), '()\n', (12067, 12069), False, 'import time\n')]
|
import datetime
import re
import jwt
from flask import current_app as app
from flask import request, session
from CTFd.cache import cache
from CTFd.constants.users import UserAttrs
from CTFd.constants.teams import TeamAttrs
from CTFd.models import Fails, Users, db, Teams, Tracking
from CTFd.utils import get_config
def get_current_user():
if authed():
user = Users.query.filter_by(id=session["id"]).first()
return user
else:
return None
def get_current_user_attrs():
if authed():
return get_user_attrs(user_id=session["id"])
else:
return None
@cache.memoize(timeout=30)
def get_user_attrs(user_id):
user = Users.query.filter_by(id=user_id).first()
if user:
d = {}
for field in UserAttrs._fields:
d[field] = getattr(user, field)
return UserAttrs(**d)
return None
def get_current_team():
if authed():
user = get_current_user()
return user.team
else:
return None
def get_current_team_attrs():
if authed():
user = get_user_attrs(user_id=session["id"])
if user.team_id:
return get_team_attrs(team_id=user.team_id)
return None
@cache.memoize(timeout=30)
def get_team_attrs(team_id):
team = Teams.query.filter_by(id=team_id).first()
if team:
d = {}
for field in TeamAttrs._fields:
d[field] = getattr(team, field)
return TeamAttrs(**d)
return None
def get_current_user_type(fallback=None):
if authed():
user = get_current_user_attrs()
return user.type
else:
return fallback
def authed():
try:
#if bool(session.get("id", False)):
# return True
pemfile = open("/jwt.pub", 'r')
keystring = pemfile.read()
pemfile.close()
decoded = jwt.decode(request.headers.get('X-CTFProxy-JWT'), keystring, algorithm='EdDSA')
username = decoded['username'].decode('utf-8')
displayname = decoded['displayname'].decode('utf-8')
groups = [x.split("@")[0] for x in decoded['groups']]
user = Users.query.filter_by(email=username).first()
if user is not None:
session["id"] = user.id
user.name = displayname
user.email = username
user.affiliation = ",".join(groups)
user.type = "admin" if "ctfd-admin" in groups else "user"
db.session.commit()
db.session.flush()
else:
user = Users(
name=displayname,
email=username,
verified=True,
affiliation=",".join(groups),
type="admin" if "ctfd-admin" in groups else "user",
)
db.session.add(user)
db.session.commit()
db.session.flush()
session["id"] = user.id
return True
except:
return False
def is_admin():
if authed():
user = get_current_user_attrs()
return user.type == "admin"
else:
return False
def is_verified():
if get_config("verify_emails"):
user = get_current_user_attrs()
if user:
return user.verified
else:
return False
else:
return True
def get_ip(req=None):
if req is None:
req = request
if req.headers.get('X-CTFProxy-Remote-Addr') != "":
return request.headers.get('X-CTFProxy-Remote-Addr')
return req.remote_addr
def get_current_user_recent_ips():
if authed():
return get_user_recent_ips(user_id=session["id"])
else:
return None
@cache.memoize(timeout=60)
def get_user_recent_ips(user_id):
hour_ago = datetime.datetime.now() - datetime.timedelta(hours=1)
addrs = (
Tracking.query.with_entities(Tracking.ip.distinct())
.filter(Tracking.user_id == user_id, Tracking.date >= hour_ago)
.all()
)
return set([ip for (ip,) in addrs])
def get_wrong_submissions_per_minute(account_id):
"""
Get incorrect submissions per minute.
:param account_id:
:return:
"""
one_min_ago = datetime.datetime.utcnow() + datetime.timedelta(minutes=-1)
fails = (
db.session.query(Fails)
.filter(Fails.account_id == account_id, Fails.date >= one_min_ago)
.all()
)
return len(fails)
|
[
"CTFd.models.Users.query.filter_by",
"CTFd.models.db.session.add",
"CTFd.cache.cache.memoize",
"flask.request.headers.get",
"CTFd.models.Tracking.ip.distinct",
"datetime.datetime.now",
"datetime.datetime.utcnow",
"CTFd.constants.teams.TeamAttrs",
"datetime.timedelta",
"CTFd.constants.users.UserAttrs",
"CTFd.models.db.session.commit",
"CTFd.models.db.session.flush",
"CTFd.models.db.session.query",
"CTFd.utils.get_config",
"CTFd.models.Teams.query.filter_by"
] |
[((609, 634), 'CTFd.cache.cache.memoize', 'cache.memoize', ([], {'timeout': '(30)'}), '(timeout=30)\n', (622, 634), False, 'from CTFd.cache import cache\n'), ((1209, 1234), 'CTFd.cache.cache.memoize', 'cache.memoize', ([], {'timeout': '(30)'}), '(timeout=30)\n', (1222, 1234), False, 'from CTFd.cache import cache\n'), ((3644, 3669), 'CTFd.cache.cache.memoize', 'cache.memoize', ([], {'timeout': '(60)'}), '(timeout=60)\n', (3657, 3669), False, 'from CTFd.cache import cache\n'), ((3101, 3128), 'CTFd.utils.get_config', 'get_config', (['"""verify_emails"""'], {}), "('verify_emails')\n", (3111, 3128), False, 'from CTFd.utils import get_config\n'), ((844, 858), 'CTFd.constants.users.UserAttrs', 'UserAttrs', ([], {}), '(**d)\n', (853, 858), False, 'from CTFd.constants.users import UserAttrs\n'), ((1444, 1458), 'CTFd.constants.teams.TeamAttrs', 'TeamAttrs', ([], {}), '(**d)\n', (1453, 1458), False, 'from CTFd.constants.teams import TeamAttrs\n'), ((3426, 3471), 'flask.request.headers.get', 'request.headers.get', (['"""X-CTFProxy-Remote-Addr"""'], {}), "('X-CTFProxy-Remote-Addr')\n", (3445, 3471), False, 'from flask import request, session\n'), ((3719, 3742), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3740, 3742), False, 'import datetime\n'), ((3745, 3772), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (3763, 3772), False, 'import datetime\n'), ((4146, 4172), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (4170, 4172), False, 'import datetime\n'), ((4175, 4205), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(-1)'}), '(minutes=-1)\n', (4193, 4205), False, 'import datetime\n'), ((675, 708), 'CTFd.models.Users.query.filter_by', 'Users.query.filter_by', ([], {'id': 'user_id'}), '(id=user_id)\n', (696, 708), False, 'from CTFd.models import Fails, Users, db, Teams, Tracking\n'), ((1275, 1308), 'CTFd.models.Teams.query.filter_by', 'Teams.query.filter_by', ([], {'id': 'team_id'}), '(id=team_id)\n', (1296, 1308), False, 'from CTFd.models import Fails, Users, db, Teams, Tracking\n'), ((1857, 1894), 'flask.request.headers.get', 'request.headers.get', (['"""X-CTFProxy-JWT"""'], {}), "('X-CTFProxy-JWT')\n", (1876, 1894), False, 'from flask import request, session\n'), ((2430, 2449), 'CTFd.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2447, 2449), False, 'from CTFd.models import Fails, Users, db, Teams, Tracking\n'), ((2462, 2480), 'CTFd.models.db.session.flush', 'db.session.flush', ([], {}), '()\n', (2478, 2480), False, 'from CTFd.models import Fails, Users, db, Teams, Tracking\n'), ((2758, 2778), 'CTFd.models.db.session.add', 'db.session.add', (['user'], {}), '(user)\n', (2772, 2778), False, 'from CTFd.models import Fails, Users, db, Teams, Tracking\n'), ((2791, 2810), 'CTFd.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2808, 2810), False, 'from CTFd.models import Fails, Users, db, Teams, Tracking\n'), ((2823, 2841), 'CTFd.models.db.session.flush', 'db.session.flush', ([], {}), '()\n', (2839, 2841), False, 'from CTFd.models import Fails, Users, db, Teams, Tracking\n'), ((376, 415), 'CTFd.models.Users.query.filter_by', 'Users.query.filter_by', ([], {'id': "session['id']"}), "(id=session['id'])\n", (397, 415), False, 'from CTFd.models import Fails, Users, db, Teams, Tracking\n'), ((2119, 2156), 'CTFd.models.Users.query.filter_by', 'Users.query.filter_by', ([], {'email': 'username'}), '(email=username)\n', (2140, 2156), False, 'from CTFd.models import Fails, Users, db, Teams, Tracking\n'), ((4228, 4251), 'CTFd.models.db.session.query', 'db.session.query', (['Fails'], {}), '(Fails)\n', (4244, 4251), False, 'from CTFd.models import Fails, Users, db, Teams, Tracking\n'), ((3824, 3846), 'CTFd.models.Tracking.ip.distinct', 'Tracking.ip.distinct', ([], {}), '()\n', (3844, 3846), False, 'from CTFd.models import Fails, Users, db, Teams, Tracking\n')]
|
""" to research dataset and event-loop object in ipython
"""
from psana.pyalgos.generic.NDArrUtils import print_ndarr
from psana import DataSource
ds = DataSource(files='/reg/g/psdm/detector/data2_test/xtc/data-amox23616-r0104-e000010-xtcav.xtc2')
orun = next(ds.runs())
det = orun.Detector('xtcav')
print('test_xtcav_data expt: %s runnum: %d\n' % (orun.expt, orun.runnum))
for nev,evt in enumerate(orun.events()):
if nev>10 : break
print('Event %03d'%nev, end='')
#print_ndarr(det.raw.array(evt), ' det.raw.array(evt):')
print_ndarr(det.raw(evt), ' det.raw(evt):')
#print('XXXXX', evt._dgrams[0].xtcav[0].raw.raw)
#----------
|
[
"psana.DataSource"
] |
[((155, 260), 'psana.DataSource', 'DataSource', ([], {'files': '"""/reg/g/psdm/detector/data2_test/xtc/data-amox23616-r0104-e000010-xtcav.xtc2"""'}), "(files=\n '/reg/g/psdm/detector/data2_test/xtc/data-amox23616-r0104-e000010-xtcav.xtc2'\n )\n", (165, 260), False, 'from psana import DataSource\n')]
|
import numpy as np
import tensorflow as tf
from baselines.a2c.utils import conv, fc, conv_to_fc, batch_to_seq, seq_to_batch, lstm, lnlstm, sample
class CnnPolicy(object):
def __init__(self, sess, ob_space, ac_space, nenv, nsteps, nstack, reuse=False):
nbatch = nenv*nsteps
#nh, nw, nc = ob_space.shape
nh,nw,nc = 1,1,2
ob_shape = (nbatch, nh, nw, nc*nstack)
nact = ac_space.n
X = tf.placeholder(tf.uint8, shape=[nbatch,nh,nw,nc*nstack]) #obs
with tf.variable_scope("model", reuse=reuse):
h = conv(tf.cast(X, tf.float32)/255., 'c1', nf=32, rf=1, stride=1, init_scale=np.sqrt(2))
h2 = conv(h, 'c2', nf=64, rf=1, stride=1, init_scale=np.sqrt(2))
h3 = conv(h2, 'c3', nf=64, rf=1, stride=1, init_scale=np.sqrt(2))
h3 = conv_to_fc(h3)
h4 = fc(h3, 'fc1', nh=512, init_scale=np.sqrt(2))
pi = fc(h4, 'pi', nact, act=lambda x:x)
vf = fc(h4, 'v', 1, act=lambda x:x)
""" The part of the model that predicts the progress through the
environment we don't actually have to predict the number of frames.
Could just have a monotonically increasing prediction. This is
enough to enforce that the model learns an order to the states that
corresponds to progress."""
progressf = fc(h4, 'progress', 1, act=lambda x:x) # default act is relu
v0 = vf[:, 0]
progress0 = progressf[:, 0]
a0 = sample(pi)
self.initial_state = [] #not stateful
def step(ob, *_args, **_kwargs):
a, v, p = sess.run([a0, v0, progress0], {X:ob})
return a, v, [], p #[] is a dummy state
def value(ob, *_args, **_kwargs):
return sess.run(v0, {X:ob})
self.X = X
self.pi = pi
self.vf = vf
self.step = step
self.value = value
self.progress = progress0
|
[
"baselines.a2c.utils.sample",
"tensorflow.variable_scope",
"tensorflow.placeholder",
"tensorflow.cast",
"baselines.a2c.utils.conv_to_fc",
"baselines.a2c.utils.fc",
"numpy.sqrt"
] |
[((435, 496), 'tensorflow.placeholder', 'tf.placeholder', (['tf.uint8'], {'shape': '[nbatch, nh, nw, nc * nstack]'}), '(tf.uint8, shape=[nbatch, nh, nw, nc * nstack])\n', (449, 496), True, 'import tensorflow as tf\n'), ((1515, 1525), 'baselines.a2c.utils.sample', 'sample', (['pi'], {}), '(pi)\n', (1521, 1525), False, 'from baselines.a2c.utils import conv, fc, conv_to_fc, batch_to_seq, seq_to_batch, lstm, lnlstm, sample\n'), ((510, 549), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""model"""'], {'reuse': 'reuse'}), "('model', reuse=reuse)\n", (527, 549), True, 'import tensorflow as tf\n'), ((825, 839), 'baselines.a2c.utils.conv_to_fc', 'conv_to_fc', (['h3'], {}), '(h3)\n', (835, 839), False, 'from baselines.a2c.utils import conv, fc, conv_to_fc, batch_to_seq, seq_to_batch, lstm, lnlstm, sample\n'), ((919, 954), 'baselines.a2c.utils.fc', 'fc', (['h4', '"""pi"""', 'nact'], {'act': '(lambda x: x)'}), "(h4, 'pi', nact, act=lambda x: x)\n", (921, 954), False, 'from baselines.a2c.utils import conv, fc, conv_to_fc, batch_to_seq, seq_to_batch, lstm, lnlstm, sample\n'), ((972, 1003), 'baselines.a2c.utils.fc', 'fc', (['h4', '"""v"""', '(1)'], {'act': '(lambda x: x)'}), "(h4, 'v', 1, act=lambda x: x)\n", (974, 1003), False, 'from baselines.a2c.utils import conv, fc, conv_to_fc, batch_to_seq, seq_to_batch, lstm, lnlstm, sample\n'), ((1383, 1421), 'baselines.a2c.utils.fc', 'fc', (['h4', '"""progress"""', '(1)'], {'act': '(lambda x: x)'}), "(h4, 'progress', 1, act=lambda x: x)\n", (1385, 1421), False, 'from baselines.a2c.utils import conv, fc, conv_to_fc, batch_to_seq, seq_to_batch, lstm, lnlstm, sample\n'), ((572, 594), 'tensorflow.cast', 'tf.cast', (['X', 'tf.float32'], {}), '(X, tf.float32)\n', (579, 594), True, 'import tensorflow as tf\n'), ((641, 651), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (648, 651), True, 'import numpy as np\n'), ((718, 728), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (725, 728), True, 'import numpy as np\n'), ((796, 806), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (803, 806), True, 'import numpy as np\n'), ((890, 900), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (897, 900), True, 'import numpy as np\n')]
|
# Copyright 2022 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for haiku._src.config."""
from concurrent import futures
import inspect
import threading
from absl.testing import absltest
from absl.testing import parameterized
from haiku._src import config
import jax
class ConfigTest(parameterized.TestCase):
def setUp(self):
super().setUp()
self._before = config.main_thread_config
config.tls.config = config.main_thread_config = config.Config.default()
def tearDown(self):
super().tearDown()
config.tls.config = config.main_thread_config = self._before
del self._before
def test_check_jax_usage(self):
cfg = config.get_config()
config.check_jax_usage()
self.assertTrue(cfg.check_jax_usage)
config.check_jax_usage(False)
self.assertFalse(cfg.check_jax_usage)
config.check_jax_usage(True)
self.assertTrue(cfg.check_jax_usage)
@parameterized.parameters(True, False)
def test_inherits_default_from_main_thread(self, default):
e1 = threading.Event()
e2 = threading.Event()
config.get_config().check_jax_usage = default
def f():
self.assertEqual(config.get_config().check_jax_usage, default)
config.get_config().check_jax_usage = True
e1.set()
e2.wait()
self.assertTrue(config.get_config().check_jax_usage)
def g():
e1.wait()
self.assertEqual(config.get_config().check_jax_usage, default)
config.get_config().check_jax_usage = False
e2.set()
self.assertFalse(config.get_config().check_jax_usage)
with futures.ThreadPoolExecutor() as tpe:
f1 = tpe.submit(g)
f2 = tpe.submit(f)
f2.result()
f1.result()
self.assertEqual(config.get_config().check_jax_usage, default)
def test_with_config(self):
ran_f = [False]
@config.with_config(check_jax_usage=False)
def f():
ran_f[0] = True
return config.get_config().check_jax_usage
cfg = config.get_config()
cfg.check_jax_usage = True
self.assertFalse(f())
self.assertTrue(ran_f[0])
self.assertTrue(cfg.check_jax_usage)
def test_assign(self):
cfg = config.get_config()
cfg.check_jax_usage = False
with config.assign(check_jax_usage=True):
self.assertTrue(cfg.check_jax_usage)
self.assertFalse(cfg.check_jax_usage)
def test_assign_with_error(self):
cfg = config.get_config()
cfg.check_jax_usage = False
try:
with config.assign(check_jax_usage=True):
self.assertTrue(cfg.check_jax_usage)
# Raise an exception to test that config is reset on error.
raise ValueError("expected")
except ValueError:
pass
self.assertFalse(cfg.check_jax_usage)
def test_context_matches_set(self):
context_sig = inspect.signature(config.context)
set_sig = inspect.signature(config.set)
self.assertEqual(context_sig.parameters, set_sig.parameters)
def test_context(self):
cfg = config.get_config()
cfg.check_jax_usage = False
with config.context(check_jax_usage=True):
self.assertTrue(cfg.check_jax_usage)
self.assertFalse(cfg.check_jax_usage)
def test_set(self):
cfg = config.get_config()
cfg.check_jax_usage = False
config.set(check_jax_usage=True)
self.assertTrue(cfg.check_jax_usage)
config.set(check_jax_usage=False)
self.assertFalse(cfg.check_jax_usage)
if __name__ == "__main__":
# TODO(tomhennigan): Remove this unused import.
del jax # This is only needed for an internal build test to pass.
absltest.main()
|
[
"absl.testing.absltest.main",
"haiku._src.config.set",
"haiku._src.config.Config.default",
"haiku._src.config.context",
"haiku._src.config.assign",
"absl.testing.parameterized.parameters",
"threading.Event",
"inspect.signature",
"concurrent.futures.ThreadPoolExecutor",
"haiku._src.config.with_config",
"haiku._src.config.get_config",
"haiku._src.config.check_jax_usage"
] |
[((1536, 1573), 'absl.testing.parameterized.parameters', 'parameterized.parameters', (['(True)', '(False)'], {}), '(True, False)\n', (1560, 1573), False, 'from absl.testing import parameterized\n'), ((4143, 4158), 'absl.testing.absltest.main', 'absltest.main', ([], {}), '()\n', (4156, 4158), False, 'from absl.testing import absltest\n'), ((1091, 1114), 'haiku._src.config.Config.default', 'config.Config.default', ([], {}), '()\n', (1112, 1114), False, 'from haiku._src import config\n'), ((1292, 1311), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (1309, 1311), False, 'from haiku._src import config\n'), ((1316, 1340), 'haiku._src.config.check_jax_usage', 'config.check_jax_usage', ([], {}), '()\n', (1338, 1340), False, 'from haiku._src import config\n'), ((1386, 1415), 'haiku._src.config.check_jax_usage', 'config.check_jax_usage', (['(False)'], {}), '(False)\n', (1408, 1415), False, 'from haiku._src import config\n'), ((1462, 1490), 'haiku._src.config.check_jax_usage', 'config.check_jax_usage', (['(True)'], {}), '(True)\n', (1484, 1490), False, 'from haiku._src import config\n'), ((1644, 1661), 'threading.Event', 'threading.Event', ([], {}), '()\n', (1659, 1661), False, 'import threading\n'), ((1671, 1688), 'threading.Event', 'threading.Event', ([], {}), '()\n', (1686, 1688), False, 'import threading\n'), ((2444, 2485), 'haiku._src.config.with_config', 'config.with_config', ([], {'check_jax_usage': '(False)'}), '(check_jax_usage=False)\n', (2462, 2485), False, 'from haiku._src import config\n'), ((2581, 2600), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (2598, 2600), False, 'from haiku._src import config\n'), ((2765, 2784), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (2782, 2784), False, 'from haiku._src import config\n'), ((2995, 3014), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (3012, 3014), False, 'from haiku._src import config\n'), ((3387, 3420), 'inspect.signature', 'inspect.signature', (['config.context'], {}), '(config.context)\n', (3404, 3420), False, 'import inspect\n'), ((3435, 3464), 'inspect.signature', 'inspect.signature', (['config.set'], {}), '(config.set)\n', (3452, 3464), False, 'import inspect\n'), ((3567, 3586), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (3584, 3586), False, 'from haiku._src import config\n'), ((3784, 3803), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (3801, 3803), False, 'from haiku._src import config\n'), ((3840, 3872), 'haiku._src.config.set', 'config.set', ([], {'check_jax_usage': '(True)'}), '(check_jax_usage=True)\n', (3850, 3872), False, 'from haiku._src import config\n'), ((3918, 3951), 'haiku._src.config.set', 'config.set', ([], {'check_jax_usage': '(False)'}), '(check_jax_usage=False)\n', (3928, 3951), False, 'from haiku._src import config\n'), ((1694, 1713), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (1711, 1713), False, 'from haiku._src import config\n'), ((2196, 2224), 'concurrent.futures.ThreadPoolExecutor', 'futures.ThreadPoolExecutor', ([], {}), '()\n', (2222, 2224), False, 'from concurrent import futures\n'), ((2826, 2861), 'haiku._src.config.assign', 'config.assign', ([], {'check_jax_usage': '(True)'}), '(check_jax_usage=True)\n', (2839, 2861), False, 'from haiku._src import config\n'), ((3628, 3664), 'haiku._src.config.context', 'config.context', ([], {'check_jax_usage': '(True)'}), '(check_jax_usage=True)\n', (3642, 3664), False, 'from haiku._src import config\n'), ((1829, 1848), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (1846, 1848), False, 'from haiku._src import config\n'), ((2067, 2086), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (2084, 2086), False, 'from haiku._src import config\n'), ((2341, 2360), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (2358, 2360), False, 'from haiku._src import config\n'), ((2534, 2553), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (2551, 2553), False, 'from haiku._src import config\n'), ((3067, 3102), 'haiku._src.config.assign', 'config.assign', ([], {'check_jax_usage': '(True)'}), '(check_jax_usage=True)\n', (3080, 3102), False, 'from haiku._src import config\n'), ((1777, 1796), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (1794, 1796), False, 'from haiku._src import config\n'), ((1925, 1944), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (1942, 1944), False, 'from haiku._src import config\n'), ((2015, 2034), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (2032, 2034), False, 'from haiku._src import config\n'), ((2149, 2168), 'haiku._src.config.get_config', 'config.get_config', ([], {}), '()\n', (2166, 2168), False, 'from haiku._src import config\n')]
|
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import unittest
import numpy as np
from openvino.tools.mo.middle.dequantize_linear_resolver import DequantizeLinearResolver
from openvino.tools.mo.front.common.partial_infer.utils import int64_array
from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs
from unit_tests.utils.graph import build_graph
from generator import generator, generate
nodes1_attributes = {
'input': {'kind': 'op', 'op': 'AnyOp'},
'input_data': {'kind': 'data', 'shape': None},
'dequantize': {'kind': 'op', 'op': 'DequantizeLinear', 'axis': 1},
'dequantize_data': {'kind': 'data', 'shape': None},
'scale_param_dq': {'kind': 'op', 'type': 'Const', 'op': 'Const'},
'scale_param_dq_data': {'kind': 'data', 'shape': None},
'zerop_param_dq': {'kind': 'op', 'type': 'Const', 'op': 'Const'},
'zerop_param_dq_data': {'kind': 'data', 'shape': None},
'out': {'kind': 'op', 'op': 'AnyOp'},
'out_data': {'kind': 'data', 'shape': None},
'result': {'kind': 'op', 'op': 'Result'},
}
nodes_ref_attributes = {
'input': {'kind': 'op', 'op': 'AnyOp'},
'input_data': {'kind': 'data', 'shape': None},
'cast': {'kind': 'op', 'op': 'Cast', 'type': 'Convert'},
'cast_data': {'kind': 'data', 'shape': None},
'sub': {'kind': 'op', 'op': 'Sub', 'type': 'Subtract'},
'sub_data': {'kind': 'data', 'shape': None},
'mul': {'kind': 'op', 'op': 'Mul', 'type': 'Multiply'},
'mul_data': {'kind': 'data', 'shape': None},
'scale_param_dq': {'kind': 'op', 'type': 'Const', 'op': 'Const'},
'scale_param_dq_data': {'kind': 'data', 'shape': None},
'zerop_param_dq': {'kind': 'op', 'type': 'Const', 'op': 'Const'},
'zerop_param_dq_data': {'kind': 'data', 'shape': None},
'out': {'kind': 'op', 'op': 'AnyOp'},
'out_data': {'kind': 'data', 'shape': None},
'result': {'kind': 'op', 'op': 'Result'},
'sub_reshape_const': {'kind': 'op', 'type': 'Const', 'op': 'Const'},
'sub_reshape_const_data': {'kind': 'data', 'shape': None},
'sub_reshape': {'kind': 'op', 'type': 'Reshape', 'op': 'Reshape'},
'sub_reshape_data': {'kind': 'data', 'shape': None},
'mul_reshape_const': {'kind': 'op', 'type': 'Const', 'op': 'Const'},
'mul_reshape_const_data': {'kind': 'data', 'shape': None},
'mul_reshape': {'kind': 'op', 'type': 'Reshape', 'op': 'Reshape'},
'mul_reshape_data': {'kind': 'data', 'shape': None},
}
class TestDequantizeLinearResolver(unittest.TestCase):
def test_dequantize(self):
graph = build_graph(nodes1_attributes,
[('input', 'input_data'),
('input_data', 'dequantize'),
('dequantize', 'dequantize_data'),
('scale_param_dq', 'scale_param_dq_data'),
('zerop_param_dq', 'zerop_param_dq_data'),
('scale_param_dq_data', 'dequantize'),
('zerop_param_dq_data', 'dequantize'),
('dequantize_data', 'out'),
('out', 'out_data'),
('out_data', 'result'),
],
{'input_data': {'shape': int64_array([1, 3, 224, 224])},
'scale_param_dq': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
'scale_param_dq_data': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
'zerop_param_dq': {'shape': np.array([]), 'value': np.uint8(0)},
'zerop_param_dq_data': {'shape': np.array([]), 'value': np.uint8(0)},
}, nodes_with_edges_only=True)
graph_ref = build_graph(nodes_ref_attributes,
[('input', 'input_data'),
('input_data', 'cast'),
('cast', 'cast_data'),
('cast_data', 'sub'),
('zerop_param_dq', 'zerop_param_dq_data'),
('zerop_param_dq_data', 'sub'),
('sub', 'sub_data'),
('sub_data', 'mul'),
('scale_param_dq', 'scale_param_dq_data'),
('scale_param_dq_data', 'mul'),
('mul', 'mul_data'),
('mul_data', 'out'),
('out', 'out_data'),
('out_data', 'result'),
],
{'input_data': {'shape': int64_array([1, 3, 224, 224])},
'scale_param_dq': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
'scale_param_dq_data': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
'zerop_param_dq': {'shape': np.array([]), 'value': np.uint8(0)},
'zerop_param_dq_data': {'shape': np.array([]), 'value': np.uint8(0)},
}, nodes_with_edges_only=True)
graph.stage = 'middle'
DequantizeLinearResolver().find_and_replace_pattern(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'out', check_op_attrs=True)
self.assertTrue(flag, resp)
def test_dequantize_no_zerop(self):
graph = build_graph(nodes1_attributes,
[('input', 'input_data'),
('input_data', 'dequantize'),
('dequantize', 'dequantize_data'),
('scale_param_dq', 'scale_param_dq_data'),
('scale_param_dq_data', 'dequantize'),
('dequantize', 'dequantize_data'),
('dequantize_data', 'out'),
('out', 'out_data'),
('out_data', 'result'),
],
{'input_data': {'shape': int64_array([1, 3, 224, 224])},
'scale_param_dq': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
'scale_param_dq_data': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
}, nodes_with_edges_only=True)
graph_ref = build_graph(nodes_ref_attributes,
[('input', 'input_data'),
('input_data', 'cast'),
('cast', 'cast_data'),
('cast_data', 'mul'),
('scale_param_dq', 'scale_param_dq_data'),
('scale_param_dq_data', 'mul'),
('mul', 'mul_data'),
('mul_data', 'out'),
('out', 'out_data'),
('out_data', 'result'),
],
{'input_data': {'shape': int64_array([1, 3, 224, 224])},
'scale_param_dq': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
'scale_param_dq_data': {'shape': np.array([]), 'value': np.float32(1.0 / 255)},
}, nodes_with_edges_only=True)
graph.stage = 'middle'
DequantizeLinearResolver().find_and_replace_pattern(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'out', check_op_attrs=True)
self.assertTrue(flag, resp)
@generator
class TestDequantizeWithAxis(unittest.TestCase):
@generate(*[(int64_array([1, 3, 4, 4]), np.array([2, 3, 4, 5], dtype=np.float32),
np.array([2, 3, 4, 5], dtype=np.uint8), int64_array([1, 1, 4, 1]), 2),
(int64_array([1, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.uint8), int64_array([1, 3, 1, 1]), 1),
(int64_array([2, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.uint8), int64_array([2, 1, 1, 1]), 0),
(int64_array([1, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.uint8), int64_array([1, 1, 4, 1]), -2),
(int64_array([1, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.uint8), int64_array([1, 1, 1, 4]), -1),
(int64_array([1, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.int32), int64_array([1, 1, 4, 1]), 2),
(int64_array([1, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.int32), int64_array([1, 3, 1, 1]), 1),
(int64_array([2, 3, 4, 4]), int64_array([2, 3, 4, 5]),
np.array([2, 3, 4, 5], dtype=np.int32), int64_array([2, 1, 1, 1]), 0),
])
def test_dequantize_with_axis(self, input_shape, scale_param_value, zero_param_value, target_shape, axis):
graph = build_graph(nodes1_attributes,
[('input', 'input_data'),
('input_data', 'dequantize'),
('dequantize', 'dequantize_data'),
('scale_param_dq', 'scale_param_dq_data'),
('zerop_param_dq', 'zerop_param_dq_data'),
('scale_param_dq_data', 'dequantize'),
('zerop_param_dq_data', 'dequantize'),
('dequantize_data', 'out'),
('out', 'out_data'),
('out_data', 'result'),
],
{'input_data': {'shape': input_shape},
'dequantize': {'axis': axis},
'scale_param_dq': {'shape': scale_param_value.shape,
'value': scale_param_value},
'scale_param_dq_data': {'shape': scale_param_value.shape,
'value': scale_param_value},
'zerop_param_dq': {'shape': zero_param_value.shape,
'value': zero_param_value},
'zerop_param_dq_data': {'shape': zero_param_value.shape,
'value': zero_param_value},
}, nodes_with_edges_only=True)
graph_ref = build_graph(nodes_ref_attributes,
[('input', 'input_data'),
('input_data', 'cast'),
('cast', 'cast_data'),
('cast_data', 'sub'),
('zerop_param_dq', 'zerop_param_dq_data'),
('zerop_param_dq_data', 'sub_reshape'),
('sub_reshape_const', 'sub_reshape_const_data'),
('sub_reshape_const_data', 'sub_reshape'),
('sub_reshape', 'sub_reshape_data'),
('sub_reshape_data', 'sub'),
('sub', 'sub_data'),
('sub_data', 'mul'),
('scale_param_dq', 'scale_param_dq_data'),
('scale_param_dq_data', 'mul_reshape'),
('mul_reshape_const', 'mul_reshape_const_data'),
('mul_reshape_const_data', 'mul_reshape'),
('mul_reshape', 'mul_reshape_data'),
('mul_reshape_data', 'mul'),
('mul', 'mul_data'),
('mul_data', 'out'),
('out', 'out_data'),
('out_data', 'result'),
],
{'input_data': {'shape': input_shape},
'scale_param_dq': {'shape': scale_param_value.shape,
'value': scale_param_value},
'scale_param_dq_data': {'shape': scale_param_value.shape,
'value': scale_param_value},
'zerop_param_dq': {'shape': zero_param_value.shape,
'value': zero_param_value},
'zerop_param_dq_data': {'shape': zero_param_value.shape,
'value': zero_param_value},
'sub_reshape_const_data': {'shape': target_shape.shape, 'value': target_shape},
'mul_reshape_const_data': {'shape': target_shape.shape, 'value': target_shape},
}, nodes_with_edges_only=True)
graph.stage = 'middle'
DequantizeLinearResolver().find_and_replace_pattern(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'out', check_op_attrs=True)
self.assertTrue(flag, resp)
|
[
"openvino.tools.mo.front.common.partial_infer.utils.int64_array",
"numpy.uint8",
"openvino.tools.mo.utils.ir_engine.compare_graphs.compare_graphs",
"unit_tests.utils.graph.build_graph",
"numpy.float32",
"openvino.tools.mo.middle.dequantize_linear_resolver.DequantizeLinearResolver",
"numpy.array"
] |
[((5432, 5492), 'openvino.tools.mo.utils.ir_engine.compare_graphs.compare_graphs', 'compare_graphs', (['graph', 'graph_ref', '"""out"""'], {'check_op_attrs': '(True)'}), "(graph, graph_ref, 'out', check_op_attrs=True)\n", (5446, 5492), False, 'from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs\n'), ((7722, 7782), 'openvino.tools.mo.utils.ir_engine.compare_graphs.compare_graphs', 'compare_graphs', (['graph', 'graph_ref', '"""out"""'], {'check_op_attrs': '(True)'}), "(graph, graph_ref, 'out', check_op_attrs=True)\n", (7736, 7782), False, 'from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs\n'), ((9315, 10156), 'unit_tests.utils.graph.build_graph', 'build_graph', (['nodes1_attributes', "[('input', 'input_data'), ('input_data', 'dequantize'), ('dequantize',\n 'dequantize_data'), ('scale_param_dq', 'scale_param_dq_data'), (\n 'zerop_param_dq', 'zerop_param_dq_data'), ('scale_param_dq_data',\n 'dequantize'), ('zerop_param_dq_data', 'dequantize'), (\n 'dequantize_data', 'out'), ('out', 'out_data'), ('out_data', 'result')]", "{'input_data': {'shape': input_shape}, 'dequantize': {'axis': axis},\n 'scale_param_dq': {'shape': scale_param_value.shape, 'value':\n scale_param_value}, 'scale_param_dq_data': {'shape': scale_param_value.\n shape, 'value': scale_param_value}, 'zerop_param_dq': {'shape':\n zero_param_value.shape, 'value': zero_param_value},\n 'zerop_param_dq_data': {'shape': zero_param_value.shape, 'value':\n zero_param_value}}"], {'nodes_with_edges_only': '(True)'}), "(nodes1_attributes, [('input', 'input_data'), ('input_data',\n 'dequantize'), ('dequantize', 'dequantize_data'), ('scale_param_dq',\n 'scale_param_dq_data'), ('zerop_param_dq', 'zerop_param_dq_data'), (\n 'scale_param_dq_data', 'dequantize'), ('zerop_param_dq_data',\n 'dequantize'), ('dequantize_data', 'out'), ('out', 'out_data'), (\n 'out_data', 'result')], {'input_data': {'shape': input_shape},\n 'dequantize': {'axis': axis}, 'scale_param_dq': {'shape':\n scale_param_value.shape, 'value': scale_param_value},\n 'scale_param_dq_data': {'shape': scale_param_value.shape, 'value':\n scale_param_value}, 'zerop_param_dq': {'shape': zero_param_value.shape,\n 'value': zero_param_value}, 'zerop_param_dq_data': {'shape':\n zero_param_value.shape, 'value': zero_param_value}},\n nodes_with_edges_only=True)\n", (9326, 10156), False, 'from unit_tests.utils.graph import build_graph\n'), ((10854, 12237), 'unit_tests.utils.graph.build_graph', 'build_graph', (['nodes_ref_attributes', "[('input', 'input_data'), ('input_data', 'cast'), ('cast', 'cast_data'), (\n 'cast_data', 'sub'), ('zerop_param_dq', 'zerop_param_dq_data'), (\n 'zerop_param_dq_data', 'sub_reshape'), ('sub_reshape_const',\n 'sub_reshape_const_data'), ('sub_reshape_const_data', 'sub_reshape'), (\n 'sub_reshape', 'sub_reshape_data'), ('sub_reshape_data', 'sub'), ('sub',\n 'sub_data'), ('sub_data', 'mul'), ('scale_param_dq',\n 'scale_param_dq_data'), ('scale_param_dq_data', 'mul_reshape'), (\n 'mul_reshape_const', 'mul_reshape_const_data'), (\n 'mul_reshape_const_data', 'mul_reshape'), ('mul_reshape',\n 'mul_reshape_data'), ('mul_reshape_data', 'mul'), ('mul', 'mul_data'),\n ('mul_data', 'out'), ('out', 'out_data'), ('out_data', 'result')]", "{'input_data': {'shape': input_shape}, 'scale_param_dq': {'shape':\n scale_param_value.shape, 'value': scale_param_value},\n 'scale_param_dq_data': {'shape': scale_param_value.shape, 'value':\n scale_param_value}, 'zerop_param_dq': {'shape': zero_param_value.shape,\n 'value': zero_param_value}, 'zerop_param_dq_data': {'shape':\n zero_param_value.shape, 'value': zero_param_value},\n 'sub_reshape_const_data': {'shape': target_shape.shape, 'value':\n target_shape}, 'mul_reshape_const_data': {'shape': target_shape.shape,\n 'value': target_shape}}"], {'nodes_with_edges_only': '(True)'}), "(nodes_ref_attributes, [('input', 'input_data'), ('input_data',\n 'cast'), ('cast', 'cast_data'), ('cast_data', 'sub'), ('zerop_param_dq',\n 'zerop_param_dq_data'), ('zerop_param_dq_data', 'sub_reshape'), (\n 'sub_reshape_const', 'sub_reshape_const_data'), (\n 'sub_reshape_const_data', 'sub_reshape'), ('sub_reshape',\n 'sub_reshape_data'), ('sub_reshape_data', 'sub'), ('sub', 'sub_data'),\n ('sub_data', 'mul'), ('scale_param_dq', 'scale_param_dq_data'), (\n 'scale_param_dq_data', 'mul_reshape'), ('mul_reshape_const',\n 'mul_reshape_const_data'), ('mul_reshape_const_data', 'mul_reshape'), (\n 'mul_reshape', 'mul_reshape_data'), ('mul_reshape_data', 'mul'), ('mul',\n 'mul_data'), ('mul_data', 'out'), ('out', 'out_data'), ('out_data',\n 'result')], {'input_data': {'shape': input_shape}, 'scale_param_dq': {\n 'shape': scale_param_value.shape, 'value': scale_param_value},\n 'scale_param_dq_data': {'shape': scale_param_value.shape, 'value':\n scale_param_value}, 'zerop_param_dq': {'shape': zero_param_value.shape,\n 'value': zero_param_value}, 'zerop_param_dq_data': {'shape':\n zero_param_value.shape, 'value': zero_param_value},\n 'sub_reshape_const_data': {'shape': target_shape.shape, 'value':\n target_shape}, 'mul_reshape_const_data': {'shape': target_shape.shape,\n 'value': target_shape}}, nodes_with_edges_only=True)\n", (10865, 12237), False, 'from unit_tests.utils.graph import build_graph\n'), ((13527, 13587), 'openvino.tools.mo.utils.ir_engine.compare_graphs.compare_graphs', 'compare_graphs', (['graph', 'graph_ref', '"""out"""'], {'check_op_attrs': '(True)'}), "(graph, graph_ref, 'out', check_op_attrs=True)\n", (13541, 13587), False, 'from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs\n'), ((5349, 5375), 'openvino.tools.mo.middle.dequantize_linear_resolver.DequantizeLinearResolver', 'DequantizeLinearResolver', ([], {}), '()\n', (5373, 5375), False, 'from openvino.tools.mo.middle.dequantize_linear_resolver import DequantizeLinearResolver\n'), ((7639, 7665), 'openvino.tools.mo.middle.dequantize_linear_resolver.DequantizeLinearResolver', 'DequantizeLinearResolver', ([], {}), '()\n', (7663, 7665), False, 'from openvino.tools.mo.middle.dequantize_linear_resolver import DequantizeLinearResolver\n'), ((13444, 13470), 'openvino.tools.mo.middle.dequantize_linear_resolver.DequantizeLinearResolver', 'DequantizeLinearResolver', ([], {}), '()\n', (13468, 13470), False, 'from openvino.tools.mo.middle.dequantize_linear_resolver import DequantizeLinearResolver\n'), ((3310, 3339), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 3, 224, 224]'], {}), '([1, 3, 224, 224])\n', (3321, 3339), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((3399, 3411), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3407, 3411), True, 'import numpy as np\n'), ((3422, 3443), 'numpy.float32', 'np.float32', (['(1.0 / 255)'], {}), '(1.0 / 255)\n', (3432, 3443), True, 'import numpy as np\n'), ((3508, 3520), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3516, 3520), True, 'import numpy as np\n'), ((3531, 3552), 'numpy.float32', 'np.float32', (['(1.0 / 255)'], {}), '(1.0 / 255)\n', (3541, 3552), True, 'import numpy as np\n'), ((3612, 3624), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3620, 3624), True, 'import numpy as np\n'), ((3635, 3646), 'numpy.uint8', 'np.uint8', (['(0)'], {}), '(0)\n', (3643, 3646), True, 'import numpy as np\n'), ((3711, 3723), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3719, 3723), True, 'import numpy as np\n'), ((3734, 3745), 'numpy.uint8', 'np.uint8', (['(0)'], {}), '(0)\n', (3742, 3745), True, 'import numpy as np\n'), ((4791, 4820), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 3, 224, 224]'], {}), '([1, 3, 224, 224])\n', (4802, 4820), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((4884, 4896), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (4892, 4896), True, 'import numpy as np\n'), ((4907, 4928), 'numpy.float32', 'np.float32', (['(1.0 / 255)'], {}), '(1.0 / 255)\n', (4917, 4928), True, 'import numpy as np\n'), ((4997, 5009), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (5005, 5009), True, 'import numpy as np\n'), ((5020, 5041), 'numpy.float32', 'np.float32', (['(1.0 / 255)'], {}), '(1.0 / 255)\n', (5030, 5041), True, 'import numpy as np\n'), ((5105, 5117), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (5113, 5117), True, 'import numpy as np\n'), ((5128, 5139), 'numpy.uint8', 'np.uint8', (['(0)'], {}), '(0)\n', (5136, 5139), True, 'import numpy as np\n'), ((5208, 5220), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (5216, 5220), True, 'import numpy as np\n'), ((5231, 5242), 'numpy.uint8', 'np.uint8', (['(0)'], {}), '(0)\n', (5239, 5242), True, 'import numpy as np\n'), ((6243, 6272), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 3, 224, 224]'], {}), '([1, 3, 224, 224])\n', (6254, 6272), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((6332, 6344), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (6340, 6344), True, 'import numpy as np\n'), ((6355, 6376), 'numpy.float32', 'np.float32', (['(1.0 / 255)'], {}), '(1.0 / 255)\n', (6365, 6376), True, 'import numpy as np\n'), ((6441, 6453), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (6449, 6453), True, 'import numpy as np\n'), ((6464, 6485), 'numpy.float32', 'np.float32', (['(1.0 / 255)'], {}), '(1.0 / 255)\n', (6474, 6485), True, 'import numpy as np\n'), ((7282, 7311), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 3, 224, 224]'], {}), '([1, 3, 224, 224])\n', (7293, 7311), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((7375, 7387), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (7383, 7387), True, 'import numpy as np\n'), ((7398, 7419), 'numpy.float32', 'np.float32', (['(1.0 / 255)'], {}), '(1.0 / 255)\n', (7408, 7419), True, 'import numpy as np\n'), ((7488, 7500), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (7496, 7500), True, 'import numpy as np\n'), ((7511, 7532), 'numpy.float32', 'np.float32', (['(1.0 / 255)'], {}), '(1.0 / 255)\n', (7521, 7532), True, 'import numpy as np\n'), ((7897, 7922), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 3, 4, 4]'], {}), '([1, 3, 4, 4])\n', (7908, 7922), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((7924, 7964), 'numpy.array', 'np.array', (['[2, 3, 4, 5]'], {'dtype': 'np.float32'}), '([2, 3, 4, 5], dtype=np.float32)\n', (7932, 7964), True, 'import numpy as np\n'), ((7983, 8021), 'numpy.array', 'np.array', (['[2, 3, 4, 5]'], {'dtype': 'np.uint8'}), '([2, 3, 4, 5], dtype=np.uint8)\n', (7991, 8021), True, 'import numpy as np\n'), ((8023, 8048), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 1, 4, 1]'], {}), '([1, 1, 4, 1])\n', (8034, 8048), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8071, 8096), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 3, 4, 4]'], {}), '([1, 3, 4, 4])\n', (8082, 8096), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8098, 8123), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[2, 3, 4, 5]'], {}), '([2, 3, 4, 5])\n', (8109, 8123), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8142, 8180), 'numpy.array', 'np.array', (['[2, 3, 4, 5]'], {'dtype': 'np.uint8'}), '([2, 3, 4, 5], dtype=np.uint8)\n', (8150, 8180), True, 'import numpy as np\n'), ((8182, 8207), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 3, 1, 1]'], {}), '([1, 3, 1, 1])\n', (8193, 8207), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8230, 8255), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[2, 3, 4, 4]'], {}), '([2, 3, 4, 4])\n', (8241, 8255), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8257, 8282), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[2, 3, 4, 5]'], {}), '([2, 3, 4, 5])\n', (8268, 8282), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8301, 8339), 'numpy.array', 'np.array', (['[2, 3, 4, 5]'], {'dtype': 'np.uint8'}), '([2, 3, 4, 5], dtype=np.uint8)\n', (8309, 8339), True, 'import numpy as np\n'), ((8341, 8366), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[2, 1, 1, 1]'], {}), '([2, 1, 1, 1])\n', (8352, 8366), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8389, 8414), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 3, 4, 4]'], {}), '([1, 3, 4, 4])\n', (8400, 8414), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8416, 8441), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[2, 3, 4, 5]'], {}), '([2, 3, 4, 5])\n', (8427, 8441), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8460, 8498), 'numpy.array', 'np.array', (['[2, 3, 4, 5]'], {'dtype': 'np.uint8'}), '([2, 3, 4, 5], dtype=np.uint8)\n', (8468, 8498), True, 'import numpy as np\n'), ((8500, 8525), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 1, 4, 1]'], {}), '([1, 1, 4, 1])\n', (8511, 8525), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8549, 8574), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 3, 4, 4]'], {}), '([1, 3, 4, 4])\n', (8560, 8574), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8576, 8601), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[2, 3, 4, 5]'], {}), '([2, 3, 4, 5])\n', (8587, 8601), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8620, 8658), 'numpy.array', 'np.array', (['[2, 3, 4, 5]'], {'dtype': 'np.uint8'}), '([2, 3, 4, 5], dtype=np.uint8)\n', (8628, 8658), True, 'import numpy as np\n'), ((8660, 8685), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 1, 1, 4]'], {}), '([1, 1, 1, 4])\n', (8671, 8685), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8709, 8734), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 3, 4, 4]'], {}), '([1, 3, 4, 4])\n', (8720, 8734), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8736, 8761), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[2, 3, 4, 5]'], {}), '([2, 3, 4, 5])\n', (8747, 8761), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8780, 8818), 'numpy.array', 'np.array', (['[2, 3, 4, 5]'], {'dtype': 'np.int32'}), '([2, 3, 4, 5], dtype=np.int32)\n', (8788, 8818), True, 'import numpy as np\n'), ((8820, 8845), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 1, 4, 1]'], {}), '([1, 1, 4, 1])\n', (8831, 8845), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8868, 8893), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 3, 4, 4]'], {}), '([1, 3, 4, 4])\n', (8879, 8893), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8895, 8920), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[2, 3, 4, 5]'], {}), '([2, 3, 4, 5])\n', (8906, 8920), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((8939, 8977), 'numpy.array', 'np.array', (['[2, 3, 4, 5]'], {'dtype': 'np.int32'}), '([2, 3, 4, 5], dtype=np.int32)\n', (8947, 8977), True, 'import numpy as np\n'), ((8979, 9004), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[1, 3, 1, 1]'], {}), '([1, 3, 1, 1])\n', (8990, 9004), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((9027, 9052), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[2, 3, 4, 4]'], {}), '([2, 3, 4, 4])\n', (9038, 9052), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((9054, 9079), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[2, 3, 4, 5]'], {}), '([2, 3, 4, 5])\n', (9065, 9079), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n'), ((9098, 9136), 'numpy.array', 'np.array', (['[2, 3, 4, 5]'], {'dtype': 'np.int32'}), '([2, 3, 4, 5], dtype=np.int32)\n', (9106, 9136), True, 'import numpy as np\n'), ((9138, 9163), 'openvino.tools.mo.front.common.partial_infer.utils.int64_array', 'int64_array', (['[2, 1, 1, 1]'], {}), '([2, 1, 1, 1])\n', (9149, 9163), False, 'from openvino.tools.mo.front.common.partial_infer.utils import int64_array\n')]
|
# -*- coding: utf-8 -*-
"""
HackerRank - Sock Merchant
https://www.hackerrank.com/challenges/sock-merchant
Created on Mon Nov 12 22:29:31 2018
@author: <NAME>
"""
## REQUIRED MODULES
from collections import Counter
import sys
## MODULE DEFINITIONS
class Solution:
"""
Iteration over all elements of array.
Time complexity: O(n)
- Traverse all elements of array
Space complexity: O(n)
- Amortized Counter object contains all unique socks
"""
def count_sock_pairs(self, n, a):
"""
Determine minimum number of matching sock pairs.
:param int n: number of individual socks
:param list[int] a: array of all socks
:return: maximum number of matching pairs of socks
:rtype: int
"""
if (not n or
not a):
return -1
c = Counter(a)
p = 0
for i, q in c.items():
p += q // 2
return p
class Solution2:
"""
Iteration over all elements in array.
Time complexity: O(n)
- Traverse all elements of array
Space complexity: O(n)
- Amortized dictionary contains all unique socks
"""
def count_sock_pairs(self, n, a):
"""
Determine minimum number of matching sock pairs.
:param int n: number of individual socks
:param list[int] a: array of all socks
:return: maximum number of matching pairs of socks
:rtype: int
"""
if (not n or
not a):
return -1
c = {}
p = 0
for i in range(n):
t = a[i]
if t in c:
# Increase total sock count
c[t] += 1
if c[t] % 2 == 0:
# Increase sock pair count
p += 1
else:
c[t] = 1
return p
class Input:
def stdin(self, sys_stdin):
"""
Imports standard input.
:param _io.TextIOWrapper sys_stdin: standard input
:return: length of array and input array
:rtype: tuple[int, list[int]]
"""
inputs = [x for x in sys_stdin]
n = int(inputs[0])
a = [int(x) for x in inputs[1].split()]
return n, a
## MAIN MODULE
if __name__ == "__main__":
# Import exercise parameters
n, a = Input()\
.stdin(sys.stdin)
# Evaluate solution
z = Solution()\
.count_sock_pairs(n, a)
print(z)
## END OF FILE
|
[
"collections.Counter"
] |
[((894, 904), 'collections.Counter', 'Counter', (['a'], {}), '(a)\n', (901, 904), False, 'from collections import Counter\n')]
|
from web.views.shelter import shelter_bp, shelters_bp
from web.views.user import user_bp
from web.views.administration import admin_bp
from web.views import views
from web.views.page import recommendations
from web.views.admin import *
from web.views.session_mgmt import *
from web.views import api
import conf
from flask import g
from flask_login import current_user
@g.babel.localeselector
def get_locale():
# if a user is logged in, use the locale from the user settings
user = getattr(g, "user", None)
if user is not None and current_user.is_authenticated:
return user.preferred_language
# otherwise try to guess the language from the user accept
# header the browser transmits. The best match wins.
return request.accept_languages.best_match(conf.LANGUAGES.keys())
# @g.babel.timezoneselector
# def get_timezone():
# user = getattr(g, 'user', None)
# if user is not None:
# return user.timezone
|
[
"conf.LANGUAGES.keys"
] |
[((782, 803), 'conf.LANGUAGES.keys', 'conf.LANGUAGES.keys', ([], {}), '()\n', (801, 803), False, 'import conf\n')]
|
# -*- encoding: utf-8 -*-
import json
from django.shortcuts import render
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from django.http.response import HttpResponse, JsonResponse
from notification.service import ServiceAgent
from notification.choices import PlatformType
notification_required_fields = ['token', 'title', 'content']
@csrf_exempt
@require_POST
def test_ios_notification(request):
"""
view api used to test ios notification
"""
try:
data = json.loads(request.body)
for field in notification_required_fields:
if field not in data:
return HttpResponse('required field: {} not given!\n'.format(field))
return notify_by_platform(PlatformType.IOS, data)
except Exception as e:
return HttpResponse('Exception: {}'.format(str(e)))
@csrf_exempt
@require_POST
def test_android_notification(request):
"""
view api used to test android notification
"""
try:
data = json.loads(request.body)
for field in notification_required_fields:
if field not in data:
return HttpResponse('required field: {} not given!\n'.format(field))
return notify_by_platform(PlatformType.ANDROID, data)
except Exception as e:
return HttpResponse('Exception: {}'.format(str(e)))
def notify_by_platform(platform, data):
tokens = data['token'].split(',')
title = data['title']
content = data['content']
services = ServiceAgent.get_services_by_platform(platform)
notify_success = False
for service in services:
if platform == PlatformType.IOS:
notify_success = service.ios_push(tokens, title, content)
elif platform == PlatformType.ANDROID:
notify_success = service.android_push(tokens, title, content)
if notify_success:
break
result = 'Succeed' if notify_success else 'Failed'
return HttpResponse(result)
|
[
"notification.service.ServiceAgent.get_services_by_platform",
"json.loads",
"django.http.response.HttpResponse"
] |
[((1591, 1638), 'notification.service.ServiceAgent.get_services_by_platform', 'ServiceAgent.get_services_by_platform', (['platform'], {}), '(platform)\n', (1628, 1638), False, 'from notification.service import ServiceAgent\n'), ((2052, 2072), 'django.http.response.HttpResponse', 'HttpResponse', (['result'], {}), '(result)\n', (2064, 2072), False, 'from django.http.response import HttpResponse, JsonResponse\n'), ((546, 570), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (556, 570), False, 'import json\n'), ((1081, 1105), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (1091, 1105), False, 'import json\n')]
|
from setuptools import setup, find_packages
package_name = 'lanenet'
setup(
name=package_name,
version='0.1.0',
packages=find_packages(),
py_modules=[],
zip_safe=True,
install_requires=[
'setuptools',
'torch',
'torchvision',
'opencv-python',
'numpy',
'tqdm'
],
author='<NAME>',
maintainer='<NAME>',
description='Lanenet implementation in PyTorch',
license='Apache License, Version 2.0',
test_suite='pytest'
)
|
[
"setuptools.find_packages"
] |
[((135, 150), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (148, 150), False, 'from setuptools import setup, find_packages\n')]
|
import numpy as np
import imageio
import matplotlib.pyplot as plt
import random
import sys
import argparse
from numba import jit,jitclass,prange
from numba import int64,float64
'''
Suceptible-Infected-Removed (SIR) [012]
'''
def press(event,obj):
sys.stdout.flush()
if event.key == 'q':
if obj.save:
imageio.mimsave(obj.save_dir+obj.save_name,
obj.images,fps=30)
sys.exit(0)
spec = [('sizGrid',int64[:]),('spdConstant',float64),
('sizHuman',float64),('simTime',int64),('infR',float64),
('infP',float64),('rmT',int64),('rmP',float64),
('loc',float64[:]),('spd',float64[:]),('state',int64[:])]
@jitclass(spec)
class model:
def __init__(self,sizGrid=10,spdK=.15,
sizHuman=100,simT=1000000,
infR=.2,infP=.05,rmT=50,rmP=.1,
save=False,save_dir=None,save_name=None):
# Constant
self.sizGrid = sizGrid
self.spdConstant = spdK
self.sizHuman = sizHuman
self.simTime = simT
self.infR,self.infP = infR,infP
self.rmT,self.rmP = rmT,rmP
# Location and Speed
self.loc = self.sizGrid*np.random.uniform(0,1,
(self.sizHuman,2))
self.spd = self.spdConstant*np.random.normal(0,1,
(self.sizHuman,2))
# Infect and Remove Record
# 1st: Infect (bol), 2nd: Remove (bol), 3rd: Infect Period
self.state = np.zeros((self.sizHuman,3))
self.state[np.random.randint(0,self.sizHuman),0] = 1
# Plot attribute
self.fig = plt.figure(figsize=(10,5))
self.fig.canvas.mpl_connect('key_press_event',lambda event:press(event,self))
plt.ion()
self.t = 0
# store SIR
self.sS,self.sI,self.sR = [],[],[]
# images
self.images = []
self.save,self.save_dir,self.save_name = save,save_dir,save_name
def updatePlot(self):
ax = plt.gca(); ax.clear()
ax1 = plt.subplot(1,2,1)
ax1.clear()
rmvBol = self.state[:,1]==1
ax1.scatter(self.loc[rmvBol,0],self.loc[rmvBol,1],c='b')
infectBol = [False if rmvBol[i]==True else (self.state[i,0]==1) for i in range(self.state.shape[0])]
ax1.scatter(self.loc[infectBol,0],self.loc[infectBol,1],c='r')
susceptBol = (self.state[:,0]==0)&(self.state[:,1]==0)
ax1.scatter(self.loc[susceptBol==1,0],self.loc[susceptBol==1,1],c='g')
plt.xlim([0,self.sizGrid])
plt.ylim([0,self.sizGrid])
plt.title('Iteration {}'.format(self.t))
ax2 = plt.subplot(1,2,2)
ax2.clear()
S,I,R = np.sum(susceptBol),np.sum(infectBol),np.sum(rmvBol)
self.sS.append(S); self.sI.append(I); self.sR.append(R)
ax2.plot(self.sS,'g',label='susceptible')
ax2.annotate('{}'.format(S),(self.t,S))
ax2.plot(self.sI,'r',label='infectious')
ax2.annotate('{}'.format(I),(self.t,I))
ax2.plot(self.sR,'b',label='recovered')
ax2.annotate('{}'.format(R),(self.t,R))
plt.ylabel('Number')
plt.xlabel('Iterations')
ax2.legend(loc='upper center', bbox_to_anchor=(0.5, -0.05),shadow=True, ncol=3)
plt.show()
self.fig.canvas.draw()
image = np.frombuffer(self.fig.canvas.tostring_rgb(),dtype='uint8')
image = image.reshape(self.fig.canvas.get_width_height()[::-1]+(3,))
self.fig.canvas.flush_events()
return image
def updateLocSpd(self):
self.t+=1
self.loc += self.spd
flipx = (self.loc[:,0]<=0) | (self.loc[:,0]>=self.sizGrid)
flipy = (self.loc[:,1]<=0) | (self.loc[:,1]>=self.sizGrid)
self.spd[:,0][flipx] *= -1
self.spd[:,1][flipy] *= -1
def updateInfect(self):
infect = self.state[:,0]==1
rmv = self.state[:,1]==1
infectLoc = self.loc[infect,:]
for i in range(infectLoc.shape[0]):
tmpdist = np.linalg.norm(self.loc-infectLoc[i,:],axis=1)
potential = (tmpdist<=self.infR)
potentialP = np.random.uniform(0,1,self.sizHuman)
newInfectbol = potential*potentialP >= (1-self.infP)
infect[newInfectbol] = 1
for i in range(len(infect)):
if rmv[i] == True: infect[i] = False
self.state[:,0] = infect
self.state[:,2][infect==1] += 1
def updateRemove(self):
infect = self.state[:,0]==1
infectPeriodbol = self.state[:,2]*infect >= self.rmT
potentialP = np.random.uniform(0,1,self.sizHuman)
newRmvbol = potentialP*infectPeriodbol >= (1-self.rmP)
self.state[:,0][newRmvbol] = 0
self.state[:,1][newRmvbol] = 1
def simulate(self):
while self.t < self.simTime:
if self.t%1 == 0:
image = self.updatePlot()
if self.save:
self.images.append(image)
self.updateLocSpd()
self.updateInfect()
self.updateRemove()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Suceptible-Infectious-Removed Model')
parser.add_argument('--infR',type=float,default=.2,help='Infectious Radius')
parser.add_argument('--infP',type=float,default=.05,help='Infectious Probability')
parser.add_argument('--rmT',type=int,default=50,help='Recovered Period')
parser.add_argument('--rmP',type=float,default=.1,help='Recovered probability')
parser.add_argument('--simT',type=int,default=100000,help='simulation epochs')
parser.add_argument('--save',type=bool,default=False,help='save mode')
parser.add_argument('--save-dir',type=str,default='/home/yui/Documents/outputs/',help='save path')
parser.add_argument('--save-name',type=str,default='SIR.gif',help='save gif name')
args = parser.parse_args()
model_kwargs = {
'infR':args.infR,'infP':args.infP,
'rmT':args.rmT,'rmP':args.rmP,'simT':args.simT}
modelCov = model(**model_kwargs)
modelCov.simulate()
|
[
"numpy.sum",
"argparse.ArgumentParser",
"matplotlib.pyplot.figure",
"numpy.random.randint",
"sys.stdout.flush",
"numpy.linalg.norm",
"numpy.random.normal",
"matplotlib.pyplot.gca",
"imageio.mimsave",
"matplotlib.pyplot.show",
"numba.jitclass",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.ion",
"matplotlib.pyplot.ylabel",
"sys.exit",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.xlim",
"numpy.random.uniform",
"numpy.zeros",
"matplotlib.pyplot.xlabel"
] |
[((674, 688), 'numba.jitclass', 'jitclass', (['spec'], {}), '(spec)\n', (682, 688), False, 'from numba import jit, jitclass, prange\n'), ((251, 269), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (267, 269), False, 'import sys\n'), ((5019, 5093), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Suceptible-Infectious-Removed Model"""'}), "(description='Suceptible-Infectious-Removed Model')\n", (5042, 5093), False, 'import argparse\n'), ((419, 430), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (427, 430), False, 'import sys\n'), ((1436, 1464), 'numpy.zeros', 'np.zeros', (['(self.sizHuman, 3)'], {}), '((self.sizHuman, 3))\n', (1444, 1464), True, 'import numpy as np\n'), ((1569, 1596), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 5)'}), '(figsize=(10, 5))\n', (1579, 1596), True, 'import matplotlib.pyplot as plt\n'), ((1690, 1699), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (1697, 1699), True, 'import matplotlib.pyplot as plt\n'), ((1936, 1945), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1943, 1945), True, 'import matplotlib.pyplot as plt\n'), ((1981, 2001), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (1992, 2001), True, 'import matplotlib.pyplot as plt\n'), ((2451, 2478), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0, self.sizGrid]'], {}), '([0, self.sizGrid])\n', (2459, 2478), True, 'import matplotlib.pyplot as plt\n'), ((2486, 2513), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0, self.sizGrid]'], {}), '([0, self.sizGrid])\n', (2494, 2513), True, 'import matplotlib.pyplot as plt\n'), ((2577, 2597), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (2588, 2597), True, 'import matplotlib.pyplot as plt\n'), ((3047, 3067), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Number"""'], {}), "('Number')\n", (3057, 3067), True, 'import matplotlib.pyplot as plt\n'), ((3076, 3100), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Iterations"""'], {}), "('Iterations')\n", (3086, 3100), True, 'import matplotlib.pyplot as plt\n'), ((3197, 3207), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3205, 3207), True, 'import matplotlib.pyplot as plt\n'), ((4495, 4533), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)', 'self.sizHuman'], {}), '(0, 1, self.sizHuman)\n', (4512, 4533), True, 'import numpy as np\n'), ((328, 393), 'imageio.mimsave', 'imageio.mimsave', (['(obj.save_dir + obj.save_name)', 'obj.images'], {'fps': '(30)'}), '(obj.save_dir + obj.save_name, obj.images, fps=30)\n', (343, 393), False, 'import imageio\n'), ((1162, 1205), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)', '(self.sizHuman, 2)'], {}), '(0, 1, (self.sizHuman, 2))\n', (1179, 1205), True, 'import numpy as np\n'), ((1256, 1298), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)', '(self.sizHuman, 2)'], {}), '(0, 1, (self.sizHuman, 2))\n', (1272, 1298), True, 'import numpy as np\n'), ((2632, 2650), 'numpy.sum', 'np.sum', (['susceptBol'], {}), '(susceptBol)\n', (2638, 2650), True, 'import numpy as np\n'), ((2651, 2668), 'numpy.sum', 'np.sum', (['infectBol'], {}), '(infectBol)\n', (2657, 2668), True, 'import numpy as np\n'), ((2669, 2683), 'numpy.sum', 'np.sum', (['rmvBol'], {}), '(rmvBol)\n', (2675, 2683), True, 'import numpy as np\n'), ((3933, 3983), 'numpy.linalg.norm', 'np.linalg.norm', (['(self.loc - infectLoc[i, :])'], {'axis': '(1)'}), '(self.loc - infectLoc[i, :], axis=1)\n', (3947, 3983), True, 'import numpy as np\n'), ((4050, 4088), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)', 'self.sizHuman'], {}), '(0, 1, self.sizHuman)\n', (4067, 4088), True, 'import numpy as np\n'), ((1483, 1518), 'numpy.random.randint', 'np.random.randint', (['(0)', 'self.sizHuman'], {}), '(0, self.sizHuman)\n', (1500, 1518), True, 'import numpy as np\n')]
|
# coding: utf-8
"""
FlashBlade REST API Client
A lightweight client for FlashBlade REST API 2.0, developed by Pure Storage, Inc. (http://www.purestorage.com/).
OpenAPI spec version: 2.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flashblade.FB_2_0 import models
class BucketReplicaLink(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'direction': 'Direction',
'lag': 'int',
'status_details': 'str',
'local_bucket': 'FixedReference',
'paused': 'bool',
'recovery_point': 'int',
'remote': 'FixedReference',
'remote_bucket': 'FixedReferenceNameOnly',
'remote_credentials': 'Reference',
'status': 'str'
}
attribute_map = {
'id': 'id',
'direction': 'direction',
'lag': 'lag',
'status_details': 'status_details',
'local_bucket': 'local_bucket',
'paused': 'paused',
'recovery_point': 'recovery_point',
'remote': 'remote',
'remote_bucket': 'remote_bucket',
'remote_credentials': 'remote_credentials',
'status': 'status'
}
required_args = {
}
def __init__(
self,
id=None, # type: str
direction=None, # type: models.Direction
lag=None, # type: int
status_details=None, # type: str
local_bucket=None, # type: models.FixedReference
paused=None, # type: bool
recovery_point=None, # type: int
remote=None, # type: models.FixedReference
remote_bucket=None, # type: models.FixedReferenceNameOnly
remote_credentials=None, # type: models.Reference
status=None, # type: str
):
"""
Keyword args:
id (str): A non-modifiable, globally unique ID chosen by the system.
direction (Direction)
lag (int): Duration in milliseconds that represents how far behind the replication target is from the source. This is the time difference between current time and `recovery_point`.
status_details (str): Detailed information about the status of the replica link when it is unhealthy.
local_bucket (FixedReference): Reference to a local bucket.
paused (bool): Is the replica link paused?
recovery_point (int): Time, in milliseconds since UNIX epoch, where all object changes before this time are guaranteed to have been replicated. Changes after this time may have been replicated.
remote (FixedReference): Reference to the associated remote, which can either be a `target` or remote `array`. If it is an `array`, then the `resource-type` field will not be populated.
remote_bucket (FixedReferenceNameOnly): Reference to a remote bucket.
remote_credentials (Reference): Reference to a remote-credentials object to access the remote bucket.
status (str): Status of the replica link. Values include `replicating`, `paused`, and `unhealthy`.
"""
if id is not None:
self.id = id
if direction is not None:
self.direction = direction
if lag is not None:
self.lag = lag
if status_details is not None:
self.status_details = status_details
if local_bucket is not None:
self.local_bucket = local_bucket
if paused is not None:
self.paused = paused
if recovery_point is not None:
self.recovery_point = recovery_point
if remote is not None:
self.remote = remote
if remote_bucket is not None:
self.remote_bucket = remote_bucket
if remote_credentials is not None:
self.remote_credentials = remote_credentials
if status is not None:
self.status = status
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `BucketReplicaLink`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
return None
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(BucketReplicaLink, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, BucketReplicaLink):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"six.iteritems"
] |
[((4759, 4792), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (4772, 4792), False, 'import six\n')]
|
#!/usr/bin/env python3
import rospy
import std_msgs.msg
from geometry_msgs.msg import Twist
from sensor_msgs.msg import RegionOfInterest as ROI
import numpy as np
w = 640
h = 640
pid_w = [0.5, 0, 0] # pid parameters of yaw channel
pid_h = [0.8, 0, 0] # pid parameters of up channel
pid_f = [0.8, 0, 0] # pid parameters of forward channel
if __name__ == '__main__':
rospy.init_node('pid', anonymous=True)
sub = rospy.Subscriber("roi", ROI, callback)
pub = rospy.Publisher('cmd_vel', Twist, queue_size=1)
rospy.spin()
|
[
"rospy.spin",
"rospy.Publisher",
"rospy.Subscriber",
"rospy.init_node"
] |
[((375, 413), 'rospy.init_node', 'rospy.init_node', (['"""pid"""'], {'anonymous': '(True)'}), "('pid', anonymous=True)\n", (390, 413), False, 'import rospy\n'), ((424, 462), 'rospy.Subscriber', 'rospy.Subscriber', (['"""roi"""', 'ROI', 'callback'], {}), "('roi', ROI, callback)\n", (440, 462), False, 'import rospy\n'), ((473, 520), 'rospy.Publisher', 'rospy.Publisher', (['"""cmd_vel"""', 'Twist'], {'queue_size': '(1)'}), "('cmd_vel', Twist, queue_size=1)\n", (488, 520), False, 'import rospy\n'), ((525, 537), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (535, 537), False, 'import rospy\n')]
|
"""
Kubernetes cluster manager module that provides functionality to schedule jobs as well
as manage their state in the cluster.
"""
import shlex
from kubernetes import client as k_client
from kubernetes import config as k_config
from kubernetes.client.rest import ApiException
from .abstractmgr import AbstractManager, ManagerException
class KubernetesManager(AbstractManager):
def __init__(self, config_dict=None):
super().__init__(config_dict)
k_config.load_incluster_config()
self.kube_client = k_client.CoreV1Api()
self.kube_v1_batch_client = k_client.BatchV1Api()
def schedule_job(self, image, command, name, resources_dict, mountdir=None):
"""
Schedule a new job and return the job object.
"""
job_instance = self.create_job(image, command, name, resources_dict, mountdir)
job = self.submit_job(job_instance)
return job
def get_job(self, name):
"""
Get a previously scheduled job object.
"""
job_namespace = self.config.get('JOB_NAMESPACE')
try:
job = self.kube_v1_batch_client.read_namespaced_job(name, job_namespace)
except ApiException as e:
status_code = 503 if e.status == 500 else e.status
raise ManagerException(str(e), status_code=status_code)
return job
def get_job_logs(self, job):
"""
Get the logs string from a previously scheduled job object.
"""
# TODO: Think of a better way to abstract out logs in case of multiple pods running parallelly
logs = ''
pods = self.get_job_pods(job.metadata.name)
for pod_item in pods.items:
pod_name = pod_item.metadata.name
logs += self.get_pod_log(pod_name)
return logs
def get_job_info(self, job):
"""
Get the job's info dictionary for a previously scheduled job object.
"""
info = super().get_job_info(job)
status = 'notstarted'
message = 'task not available yet'
conditions = job.status.conditions
failed = job.status.failed
succeeded = job.status.succeeded
completion_time = job.status.completion_time
if not (conditions is None and failed is None and succeeded is None):
if conditions:
for condition in conditions:
if condition.type == 'Failed' and condition.status == 'True':
message = condition.message
status = 'finishedWithError'
break
if status == 'notstarted':
if completion_time and succeeded:
message = 'finished'
status = 'finishedSuccessfully'
elif job.status.active:
message = 'running'
status = 'started'
else:
message = 'inactive'
status = 'undefined'
info['name'] = job.metadata.name
info['image'] = job.spec.template.spec.containers[0].image
info['cmd'] = ' '.join(job.spec.template.spec.containers[0].command)
if completion_time is not None:
info['timestamp'] = completion_time.isoformat()
info['message'] = message
info['status'] = status
return info
def remove_job(self, job):
"""
Remove a previously scheduled job.
"""
job_namespace = self.config.get('JOB_NAMESPACE')
body = k_client.V1DeleteOptions(propagation_policy='Background')
self.kube_v1_batch_client.delete_namespaced_job(job.metadata.name, body=body,
namespace=job_namespace)
def create_job(self, image, command, name, resources_dict, mountdir=None):
"""
Create and return a new job instance.
"""
number_of_workers = resources_dict.get('number_of_workers')
cpu_limit = str(resources_dict.get('cpu_limit')) + 'm'
memory_limit = str(resources_dict.get('memory_limit')) + 'Mi'
gpu_limit = resources_dict.get('gpu_limit')
# configure pod's containers
requests = {'memory': '150Mi', 'cpu': '250m'}
limits = {'memory': memory_limit, 'cpu': cpu_limit}
env = []
if gpu_limit > 0:
# ref: https://kubernetes.io/docs/tasks/manage-gpus/scheduling-gpus/
limits['nvidia.com/gpu'] = gpu_limit
env = [k_client.V1EnvVar(name='NVIDIA_VISIBLE_DEVICES', value='all'),
k_client.V1EnvVar(name='NVIDIA_DRIVER_CAPABILITIES',
value='compute,utility'),
k_client.V1EnvVar(name='NVIDIA_REQUIRE_CUDA', value='cuda>=9.0')],
container = k_client.V1Container(
name=name,
image=image,
env=env,
command=shlex.split(command),
security_context=k_client.V1SecurityContext(
allow_privilege_escalation=False,
capabilities=k_client.V1Capabilities(drop=['ALL'])
),
resources=k_client.V1ResourceRequirements(limits=limits, requests=requests),
volume_mounts=[k_client.V1VolumeMount(mount_path='/share',
name='storebase')]
)
# configure pod template's spec
storage_type = self.config.get('STORAGE_TYPE')
if storage_type == 'host':
volume = k_client.V1Volume(
name='storebase',
host_path=k_client.V1HostPathVolumeSource(path=mountdir)
)
else:
volume = k_client.V1Volume(
name='storebase',
nfs=k_client.V1NFSVolumeSource(server=self.config.get('NFS_SERVER'),
path=mountdir)
)
template = k_client.V1PodTemplateSpec(
spec=k_client.V1PodSpec(restart_policy='Never',
containers=[container],
volumes=[volume])
)
# configure job's spec
spec = k_client.V1JobSpec(
parallelism=number_of_workers,
completions=number_of_workers,
backoff_limit=1,
ttl_seconds_after_finished=86400, # 24h
active_deadline_seconds=43200, # 12h
template=template
)
# instantiate the job object
job = k_client.V1Job(
api_version='batch/v1',
kind='Job',
metadata=k_client.V1ObjectMeta(name=name),
spec=spec)
return job
def submit_job(self, job):
"""
Submit a new job and return the job object.
"""
job_namespace = self.config.get('JOB_NAMESPACE')
try:
job = self.kube_v1_batch_client.create_namespaced_job(body=job,
namespace=job_namespace)
except ApiException as e:
status_code = 503 if e.status == 500 else e.status
raise ManagerException(str(e), status_code=status_code)
return job
def get_job_pods(self, name):
"""
Returns all the pods created as part of job.
"""
job_namespace = self.config.get('JOB_NAMESPACE')
return self.kube_client.list_namespaced_pod(job_namespace,
label_selector='job-name='+name)
def get_pod_log(self, name, container_name=None):
"""
Get a pod log
"""
job_namespace = self.config.get('JOB_NAMESPACE')
try:
if container_name:
log = self.kube_client.read_namespaced_pod_log(name=name,
namespace=job_namespace,
container=container_name)
else:
log = self.kube_client.read_namespaced_pod_log(name=name,
namespace=job_namespace)
except ApiException:
log = ''
return log
def get_pod_status(self, name):
"""
Get a pod's status
"""
job_namespace = self.config.get('JOB_NAMESPACE')
status = self.kube_client.read_namespaced_pod_status(name=name,
namespace=job_namespace)
return status
|
[
"kubernetes.client.V1HostPathVolumeSource",
"kubernetes.client.V1JobSpec",
"kubernetes.client.V1Capabilities",
"kubernetes.client.V1PodSpec",
"kubernetes.client.V1DeleteOptions",
"kubernetes.client.V1EnvVar",
"kubernetes.config.load_incluster_config",
"shlex.split",
"kubernetes.client.V1ObjectMeta",
"kubernetes.client.CoreV1Api",
"kubernetes.client.V1ResourceRequirements",
"kubernetes.client.V1VolumeMount",
"kubernetes.client.BatchV1Api"
] |
[((473, 505), 'kubernetes.config.load_incluster_config', 'k_config.load_incluster_config', ([], {}), '()\n', (503, 505), True, 'from kubernetes import config as k_config\n'), ((533, 553), 'kubernetes.client.CoreV1Api', 'k_client.CoreV1Api', ([], {}), '()\n', (551, 553), True, 'from kubernetes import client as k_client\n'), ((590, 611), 'kubernetes.client.BatchV1Api', 'k_client.BatchV1Api', ([], {}), '()\n', (609, 611), True, 'from kubernetes import client as k_client\n'), ((3548, 3605), 'kubernetes.client.V1DeleteOptions', 'k_client.V1DeleteOptions', ([], {'propagation_policy': '"""Background"""'}), "(propagation_policy='Background')\n", (3572, 3605), True, 'from kubernetes import client as k_client\n'), ((6202, 6392), 'kubernetes.client.V1JobSpec', 'k_client.V1JobSpec', ([], {'parallelism': 'number_of_workers', 'completions': 'number_of_workers', 'backoff_limit': '(1)', 'ttl_seconds_after_finished': '(86400)', 'active_deadline_seconds': '(43200)', 'template': 'template'}), '(parallelism=number_of_workers, completions=\n number_of_workers, backoff_limit=1, ttl_seconds_after_finished=86400,\n active_deadline_seconds=43200, template=template)\n', (6220, 6392), True, 'from kubernetes import client as k_client\n'), ((4935, 4955), 'shlex.split', 'shlex.split', (['command'], {}), '(command)\n', (4946, 4955), False, 'import shlex\n'), ((5168, 5233), 'kubernetes.client.V1ResourceRequirements', 'k_client.V1ResourceRequirements', ([], {'limits': 'limits', 'requests': 'requests'}), '(limits=limits, requests=requests)\n', (5199, 5233), True, 'from kubernetes import client as k_client\n'), ((5989, 6078), 'kubernetes.client.V1PodSpec', 'k_client.V1PodSpec', ([], {'restart_policy': '"""Never"""', 'containers': '[container]', 'volumes': '[volume]'}), "(restart_policy='Never', containers=[container], volumes=\n [volume])\n", (6007, 6078), True, 'from kubernetes import client as k_client\n'), ((6628, 6660), 'kubernetes.client.V1ObjectMeta', 'k_client.V1ObjectMeta', ([], {'name': 'name'}), '(name=name)\n', (6649, 6660), True, 'from kubernetes import client as k_client\n'), ((4520, 4581), 'kubernetes.client.V1EnvVar', 'k_client.V1EnvVar', ([], {'name': '"""NVIDIA_VISIBLE_DEVICES"""', 'value': '"""all"""'}), "(name='NVIDIA_VISIBLE_DEVICES', value='all')\n", (4537, 4581), True, 'from kubernetes import client as k_client\n'), ((4602, 4679), 'kubernetes.client.V1EnvVar', 'k_client.V1EnvVar', ([], {'name': '"""NVIDIA_DRIVER_CAPABILITIES"""', 'value': '"""compute,utility"""'}), "(name='NVIDIA_DRIVER_CAPABILITIES', value='compute,utility')\n", (4619, 4679), True, 'from kubernetes import client as k_client\n'), ((4737, 4801), 'kubernetes.client.V1EnvVar', 'k_client.V1EnvVar', ([], {'name': '"""NVIDIA_REQUIRE_CUDA"""', 'value': '"""cuda>=9.0"""'}), "(name='NVIDIA_REQUIRE_CUDA', value='cuda>=9.0')\n", (4754, 4801), True, 'from kubernetes import client as k_client\n'), ((5262, 5323), 'kubernetes.client.V1VolumeMount', 'k_client.V1VolumeMount', ([], {'mount_path': '"""/share"""', 'name': '"""storebase"""'}), "(mount_path='/share', name='storebase')\n", (5284, 5323), True, 'from kubernetes import client as k_client\n'), ((5615, 5661), 'kubernetes.client.V1HostPathVolumeSource', 'k_client.V1HostPathVolumeSource', ([], {'path': 'mountdir'}), '(path=mountdir)\n', (5646, 5661), True, 'from kubernetes import client as k_client\n'), ((5093, 5130), 'kubernetes.client.V1Capabilities', 'k_client.V1Capabilities', ([], {'drop': "['ALL']"}), "(drop=['ALL'])\n", (5116, 5130), True, 'from kubernetes import client as k_client\n')]
|
# -*- coding: utf-8 -*-
"""Advent of Code 2020 - Day 15 - Rambunctious Recitation."""
import argparse
import pdb
import traceback
from re import findall
def extract_ints(line):
return [int(x) for x in findall(r"-?\d+", line)]
def find_offsets(values, target):
offsets = []
for idx, value in enumerate(values):
if value == target:
offsets.append(idx)
return offsets
def solve(numbers, limit):
last = {}
for i, x in enumerate(numbers[:-1]):
last[x] = i
seen = numbers[-1]
for i in range(len(numbers) - 1, limit - 1):
x = i - last[seen] if seen in last else 0
last[seen] = i
seen = x
return seen
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Advent of Code - 2020 - Day 15 - Rambunctious Recitation."
)
parser.add_argument(
"input",
type=str,
default="input.txt",
nargs="?",
help="The puzzle input. (Default %(default)s)",
)
args = parser.parse_args()
try:
numbers = []
with open(args.input, "rt") as inf:
for line in inf:
numbers += extract_ints(line)
print((solve(numbers, 2020), solve(numbers, 30_000_000)))
except Exception:
traceback.print_exc()
pdb.post_mortem()
|
[
"traceback.print_exc",
"re.findall",
"pdb.post_mortem",
"argparse.ArgumentParser"
] |
[((730, 831), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Advent of Code - 2020 - Day 15 - Rambunctious Recitation."""'}), "(description=\n 'Advent of Code - 2020 - Day 15 - Rambunctious Recitation.')\n", (753, 831), False, 'import argparse\n'), ((208, 231), 're.findall', 'findall', (['"""-?\\\\d+"""', 'line'], {}), "('-?\\\\d+', line)\n", (215, 231), False, 'from re import findall\n'), ((1290, 1311), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (1309, 1311), False, 'import traceback\n'), ((1320, 1337), 'pdb.post_mortem', 'pdb.post_mortem', ([], {}), '()\n', (1335, 1337), False, 'import pdb\n')]
|
# -*- coding: utf-8 -*-
"""
services.payment
~~~~~~~~~~~~~~~~
Services for payments
"""
import falcon
import arrow
from .mongo import DService
from smpa.models.payment import Payment
class PaymentService(DService):
__model__ = Payment
def check(self, id):
"""Checks the status of a payment on GovPay
"""
from smpa.app import govpay
payment = _payments.get(str(id))
if payment.payment_id is None:
raise falcon.HTTPError(falcon.HTTP_404, 'Payment not found')
rv = govpay.check_payment(payment.payment_id)
j = rv.json()
if rv.status_code == 200:
payment = _payments.update(id=payment.id, json=j)
return payment
else:
response = {
"success": False
}
if j.get('description', None) is not None:
response["message"] = j.get('description')
if j.get('code', None) is not None:
response["code"] = j.get('code')
raise falcon.HTTPError(falcon.HTTP_422, response)
def create(self, req, application_id):
"""
Errors from GovPay look like this...
{
"field": "amount",
"code": "P0102",
"description": "Invalid attribute value: amount. Must be less than or equal to 10000000"
}
The create application payment method needs to...
1. Generate a unique reference number
2. Call govpay.create_payment
3. Create a Payment model
4. Return that Payment model
Args:
**kwargs: Description
"""
from smpa.app import config, govpay
from .application import _applications, _application_references
ref = _application_references.next()
application = _applications.get(application_id)
application.reference = ref
_applications.save(application)
amount = config.PAYMENT_AMOUNT
description = config.PAYMENT_DESCRIPTION
user = req.context['user']
user.export()
# Create a local payment object first so we have an ID
payment = self.new()
payment.owner_id = str(user.id)
payment.application_id = str(application_id)
payment = _payments.save(payment)
# Create the payment
rv = govpay.create_payment(
amount=amount,
description=description,
reference=ref,
application_id=application_id,
payment_id=str(payment.id)
)
j = rv.json()
if rv.status_code == 201:
payment = _payments.update(id=str(payment.id), json=j)
payment.next_url = j['_links']['next_url']['href']
payment = _payments.save(payment)
# Update the application with a reference
application = _applications.get(str(application_id))
application.reference = ref
_applications.save(application)
return payment
else:
response = {
"success": False
}
if j.get('description', None) is not None:
response["message"] = j.get('description')
if j.get('code', None) is not None:
response["code"] = j.get('code')
raise falcon.HTTPError(falcon.HTTP_422, response)
_payments = PaymentService()
|
[
"smpa.app.govpay.check_payment",
"falcon.HTTPError"
] |
[((549, 589), 'smpa.app.govpay.check_payment', 'govpay.check_payment', (['payment.payment_id'], {}), '(payment.payment_id)\n', (569, 589), False, 'from smpa.app import config, govpay\n'), ((480, 534), 'falcon.HTTPError', 'falcon.HTTPError', (['falcon.HTTP_404', '"""Payment not found"""'], {}), "(falcon.HTTP_404, 'Payment not found')\n", (496, 534), False, 'import falcon\n'), ((1052, 1095), 'falcon.HTTPError', 'falcon.HTTPError', (['falcon.HTTP_422', 'response'], {}), '(falcon.HTTP_422, response)\n', (1068, 1095), False, 'import falcon\n'), ((3342, 3385), 'falcon.HTTPError', 'falcon.HTTPError', (['falcon.HTTP_422', 'response'], {}), '(falcon.HTTP_422, response)\n', (3358, 3385), False, 'import falcon\n')]
|
from math import pi
from bokeh.plotting import figure, show, output_file
output_file('ovals.html')
p = figure(width=400, height=400)
p.oval(x=[1, 2, 3], y=[1, 2, 3], width=0.2, height=40, color="#CAB2D6",
angle=pi/3, height_units="screen")
show(p)
|
[
"bokeh.plotting.output_file",
"bokeh.plotting.figure",
"bokeh.plotting.show"
] |
[((74, 99), 'bokeh.plotting.output_file', 'output_file', (['"""ovals.html"""'], {}), "('ovals.html')\n", (85, 99), False, 'from bokeh.plotting import figure, show, output_file\n'), ((105, 134), 'bokeh.plotting.figure', 'figure', ([], {'width': '(400)', 'height': '(400)'}), '(width=400, height=400)\n', (111, 134), False, 'from bokeh.plotting import figure, show, output_file\n'), ((250, 257), 'bokeh.plotting.show', 'show', (['p'], {}), '(p)\n', (254, 257), False, 'from bokeh.plotting import figure, show, output_file\n')]
|
import numpy as np
def custom_image_generator(generator, directory, class_names, batch_size=16, target_size=(512, 512),
color_mode="grayscale", class_mode="binary", mean=None, std=None, cam=False, verbose=0):
"""
In paper chap 3.1:
we downscale the images to 1024x1024 and normalize based
on the mean and standard deviation of images in the
ImageNet training set
"""
if mean is None:
mean = np.mean(np.array([0.485, 0.456, 0.406]))
if std is None:
std = np.mean(np.array([0.229, 0.1024, 0.225]))
iterator = generator.flow_from_directory(directory=directory,
target_size=target_size,
color_mode=color_mode,
class_mode=class_mode,
batch_size=batch_size)
# class index -> xxxx|xxxx
class_indices_reversed = dict((v, k) for k, v in iterator.class_indices.items())
#print(iterator.class_indices)
stepi = 1
for batch_x, batch_y in iterator:
batch_y_multilabel = []
if(verbose > 0):
print("** Generating batch for step {} {}".format(stepi, batch_y))
stepi += 1
for i in range(batch_y.shape[0]):
# class index -> xxxx|xxxx -> one hot
one_hot_vec = label2vec(class_indices_reversed[batch_y[i]], class_names)
if(verbose > 1):
print(one_hot_vec)
batch_y_multilabel.append(one_hot_vec)
# now shape is (batch#, 14)
batch_y_multilabel = np.array(batch_y_multilabel)
# make the output [y1, y2, y3 ... y14] where yx shape is (batch#, 1)
if cam:
yield (batch_x - mean) / std, [np.array(y) for y in batch_y_multilabel.T.tolist()], batch_x
else:
yield (batch_x - mean) / std, [np.array(y) for y in batch_y_multilabel.T.tolist()]
def label2vec(label, class_names):
vec = np.zeros(len(class_names))
if label == "No Finding":
return vec
labels = label.split("|")
for l in labels:
vec[class_names.index(l)] = 1
return vec
|
[
"numpy.array"
] |
[((1637, 1665), 'numpy.array', 'np.array', (['batch_y_multilabel'], {}), '(batch_y_multilabel)\n', (1645, 1665), True, 'import numpy as np\n'), ((465, 496), 'numpy.array', 'np.array', (['[0.485, 0.456, 0.406]'], {}), '([0.485, 0.456, 0.406])\n', (473, 496), True, 'import numpy as np\n'), ((540, 572), 'numpy.array', 'np.array', (['[0.229, 0.1024, 0.225]'], {}), '([0.229, 0.1024, 0.225])\n', (548, 572), True, 'import numpy as np\n'), ((1802, 1813), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (1810, 1813), True, 'import numpy as np\n'), ((1920, 1931), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (1928, 1931), True, 'import numpy as np\n')]
|
# ----------------------------------------------------------------------------
# Copyright (c) 2021, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from q2_types.feature_data import DNAFASTAFormat
from qiime2.plugin import model
from ..plugin_setup import plugin
MAGSequencesDirFmt = model.SingleFileDirectoryFormat(
'MAGSequencesDirFmt', r'mag[0-9]+\.(fa|fasta)$', DNAFASTAFormat)
plugin.register_formats(
MAGSequencesDirFmt
)
|
[
"qiime2.plugin.model.SingleFileDirectoryFormat"
] |
[((484, 584), 'qiime2.plugin.model.SingleFileDirectoryFormat', 'model.SingleFileDirectoryFormat', (['"""MAGSequencesDirFmt"""', '"""mag[0-9]+\\\\.(fa|fasta)$"""', 'DNAFASTAFormat'], {}), "('MAGSequencesDirFmt',\n 'mag[0-9]+\\\\.(fa|fasta)$', DNAFASTAFormat)\n", (515, 584), False, 'from qiime2.plugin import model\n')]
|
import sys
import argparse
from typing import Any
from balsa import verbose_arg_string, delete_existing_arg_string, log_dir_arg_string
from pyship import __name__, __version__, DEFAULT_DIST_DIR_NAME
def get_arguments() -> Any:
parser = argparse.ArgumentParser(prog=__name__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-n", "--name", help="name of target application to ship (if not provided elsewhere such as in pyproject.toml at: [project] name=<name>")
parser.add_argument("-p", "--profile", help="cloud profile")
parser.add_argument("-i", "--id", help="cloud id")
parser.add_argument("-s", "--secret", help="cloud secret")
parser.add_argument("--noupload", default=False, action="store_true", help="do not upload files to the cloud (e.g. installer and clip files)")
parser.add_argument("-d", "--dist", default=DEFAULT_DIST_DIR_NAME, help="distribution directory for this target application (i.e. directory that contains the wheel)")
parser.add_argument(
"-f",
"--findlinks",
nargs="+",
default=[],
help="one or more directories to pass to pip when creating the clip (similar to pip's --find-links except only specify once for multiple directories)",
)
parser.add_argument("--version", action="store_true", help="display version")
parser.add_argument("-v", f"--{verbose_arg_string}", action="store_true", help="increase output verbosity")
parser.add_argument(f"--{delete_existing_arg_string}", action="store_true", help="delete log prior to running")
parser.add_argument(f"--{log_dir_arg_string}", help="force a particular log directory (default is appdir's log directory)")
args = parser.parse_args()
if args.version:
print(f"{__name__} {__version__}")
sys.exit()
return args
|
[
"argparse.ArgumentParser",
"sys.exit"
] |
[((245, 344), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '__name__', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), '(prog=__name__, formatter_class=argparse.\n ArgumentDefaultsHelpFormatter)\n', (268, 344), False, 'import argparse\n'), ((1816, 1826), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1824, 1826), False, 'import sys\n')]
|
from dataclasses import dataclass
from typing import Callable, Any
from rxbp.observable import Observable
from rxbp.observerinfo import ObserverInfo
from rxbp.observers.mapobserver import MapObserver
@dataclass
class MapObservable(Observable):
source: Observable
func: Callable[[Any], Any]
# stack: List[FrameSummary]
def observe(self, observer_info: ObserverInfo):
return self.source.observe(observer_info.copy(
observer=MapObserver(
source=observer_info.observer,
func=self.func,
),
))
# except Exception:
# raise Exception(to_operator_exception(
# message=f'something went wrong when observing {self.source}',
# stack=self.stack,
# ))
|
[
"rxbp.observers.mapobserver.MapObserver"
] |
[((462, 520), 'rxbp.observers.mapobserver.MapObserver', 'MapObserver', ([], {'source': 'observer_info.observer', 'func': 'self.func'}), '(source=observer_info.observer, func=self.func)\n', (473, 520), False, 'from rxbp.observers.mapobserver import MapObserver\n')]
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from os import path as P
from setuptools import setup
import lxltools
with open(P.join(P.dirname(__file__), 'requirements.txt')) as fp:
requirements = [l.rstrip() for l in fp.readlines()]
setup(
name = "lxltools",
version = lxltools.__version__,
description = """LIBRISXL Linked Data Database""",
long_description = """
%s""" % "".join(open("README.md")),
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Libraries :: Python Modules"
],
keywords = "linkeddata database json rdf",
platforms = ["any"],
packages = ["lxltools"],
include_package_data = True,
install_requires = requirements,
test_suite = 'nose.collector'
)
|
[
"os.path.dirname"
] |
[((135, 154), 'os.path.dirname', 'P.dirname', (['__file__'], {}), '(__file__)\n', (144, 154), True, 'from os import path as P\n')]
|
import pandas as pd
import os
from models.T5MultiTask.t5_model import T5Model_MultiTask
import torch
torch.multiprocessing.set_sharing_strategy('file_system')
'''
Function to train the T5 model in a multi-task and dataset scenario
Parameters
------------
train_task_dict (dict): dataset dictonary containing every datasets avaliable for each training task
model_name (str): The T5 pre-trained model name (t5-base, t5-small, t5-large)
model_args (dict): The training parameters used while training the T5 model
eval_task_dict (dict): dataset dictonary containing every datasets avaliable for each evaluation task
'''
def T5_Multi_Task_Train(train_task_dict, model_name, model_args, eval_task_dict=None):
model = T5Model_MultiTask("t5", model_name, args = model_args)
model.train_model(train_task_dict, eval_data = eval_task_dict)
def main():
#dataset dictonary containing every datasets avaliable for each training task
train_task_dict = {"assert_ade": ["assert_ade/train_assert_ade_smm4h_task1.csv",
"assert_ade/train_assert_ade_smm4h_task2.csv",
"assert_ade/train_assert_ade_cadec.csv",
"assert_ade/train_assert_ade_ade_corpus.csv"],
"ner_ade": ["ner_ade/train_ner_ade_smm4h_task2.csv",
"ner_ade/train_ner_ade_cadec.csv",
"ner_ade/train_ner_ade_ade_corpus.csv"],
"ner_drug": ["ner_drug/train_ner_drug_smm4h_task2.csv",
"ner_drug/train_ner_drug_cadec.csv",
"ner_drug/train_ner_drug_ade_corpus.csv"],
"ner_dosgae": ["ner_dosage/train_ner_dosage_ade_corpus.csv"]
}
'''
eval_task_dict = {"assert_ade": ["assert_ade/eval_assert_ade_smm4h_task1.csv",
"assert_ade/eval_assert_ade_smm4h_task2.csv",
"assert_ade/eval_assert_ade_cadec.csv",
"assert_ade/eval_assert_ade_ade_corpus.csv"],
"ner_ade": ["ner_ade/eval_ner_ade_smm4h_task2.csv",
"ner_ade/eval_ner_ade_cadec.csv",
"ner_ade/eval_ner_ade_ade_corpus.csv"],
"ner_drug": ["ner_drug/eval_ner_drug_smm4h_task2.csv",
"ner_drug/eval_ner_drug_cadec.csv",
"ner_drug/eval_ner_drug_ade_corpus.csv"],
"ner_dosgae": ["ner_dosage/eval_ner_dosage_ade_corpus.csv"]
}
'''
'''
Model hyper-parameters, for detailed list of hyperparamets checkout model_args.py and global_args.py in /models/config
The paramters description can also be found on https://huggingface.co/transformers/main_classes/trainer.html#transformers.TrainingArguments
'''
model_args = {
"balancing_approach": "TDB", # choose from TDB or TB
"mixing_strategy": "TS", # choose from Proportional Mixing (PM) or Temperature Scaling (TS)
"temperature_value": 2, #temperature value if using TS
"dataset_path": os.getcwd() + '/data/combiner_data/', #path to where the dataset is located
"max_seq_length": 500,
"train_batch_size": 8,
"eval_batch_size": 8,
"num_train_epochs": 5,
"evaluate_during_training": False,
#"evaluate_during_training_steps": 5000,
#"evaluate_during_training_verbose": True,
"max_length": 150,
"learning_rate": 1e-4,
"n_gpu": 4,
"evaluate_generated_text": True,
"gradient_accumulation_steps": 1,
"use_multiprocessing": False,
"fp16": True,
"save_steps": -1,
"save_eval_checkpoints": False,
"save_model_every_epoch": False, "reprocess_input_data": True,
"overwrite_output_dir": True,
#"weight_decay": 0.01,
#"warmup_steps": 600,
"wandb_project": None
}
#Training the T5 model in a multi-task setting using the arguments defined above
T5_Multi_Task_Train(train_task_dict, "t5-base", model_args)
if __name__ == "__main__":
main()
|
[
"os.getcwd",
"torch.multiprocessing.set_sharing_strategy",
"models.T5MultiTask.t5_model.T5Model_MultiTask"
] |
[((107, 164), 'torch.multiprocessing.set_sharing_strategy', 'torch.multiprocessing.set_sharing_strategy', (['"""file_system"""'], {}), "('file_system')\n", (149, 164), False, 'import torch\n'), ((736, 788), 'models.T5MultiTask.t5_model.T5Model_MultiTask', 'T5Model_MultiTask', (['"""t5"""', 'model_name'], {'args': 'model_args'}), "('t5', model_name, args=model_args)\n", (753, 788), False, 'from models.T5MultiTask.t5_model import T5Model_MultiTask\n'), ((3403, 3414), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3412, 3414), False, 'import os\n')]
|
# author: <NAME>, Early October
# version: 6.1
import PySimpleGUI as sg
import cx_Oracle
from Add_New_Classes import run_program as add
from Edit_Classes import run_program as edit
from Grades_Chart import run_program as access
def run_program(): # the function that runs everything
con = cx_Oracle.connect('EOM/EOM@127.0.0.1/xe') # connects to the database
cur = con.cursor(scrollable=True) # object, used to execute SQL commands in python
classes = [] # list, stores the names of the classes
period = [] # list, stores the period of the classes
year = [] # list, stores the active year data of the classes
column = [] # list, stores elements of the gui
student_numbers = [] # list, stores the student numbers of the students in class that is going to be deleted
def get_number_of_students(course_code): # function, gets the student numbers in a given class and returns the number of students
cur.execute("select * from EOM_STUDENTS")
number_of = 0
for row in cur: # for loop that goes through the students data table
if row[1] == course_code: # checks if a student belongs to a certain class
number_of += 1
student_numbers.append(row[0])
return number_of
cur.execute("select * from EOM_CLASS")
for row in cur: # goes through the classes data table and put the values into respective lists
word = row[0].split('/')
classes.append(word[0])
year.append(str(word[1]))
period.append(str(row[1]))
for x in range(len(classes)): # repeats the same amount of time as the number of classes so everything is displayed
if x == len(classes) - 1: # special gui elements for the last class, set the select to True
column.append([sg.Text(classes[x] + " ", size=(20, 1), justification='right'),
sg.Button('access', button_color=('black', 'orange'), key=str(x)), sg.Radio('select', "RADIO1", default=True)],
)
else:
column.append([sg.Text(classes[x] + " ", size=(20, 1), justification='right'),
sg.Button('access', button_color=('black', 'orange'), key=str(x)),
sg.Radio('select', "RADIO1", default=True)],
)
column.append([sg.Text('Period: ' + period[x]), sg.Text('Year: ' + year[x])], )
column.append([sg.Text(' ')])
layout = [ # where the gui is put together, each [] means that its a line's content
[sg.Text(' Class selection', size=(17, 1), font=("Helvetica", 25), text_color='black', justification='center')],
[sg.Column(column, scrollable=True, size=(400, 300))],
[sg.Button('Add Class', button_color=('white', 'black'), font=("Helvetica", 15), key='key_add_class'),
sg.Button('Edit Class', button_color=('white', 'black'), font=("Helvetica", 15), key='key_edit_class'),
sg.Button('Delete Class', button_color=('white', 'black'), font
=("Helvetica", 15), key='key_delete_class')]]
window = sg.FlexForm('Class selection ', auto_size_text=True, default_element_size=(40, 1)).Layout(layout) # used to open up a window and display everything
def reopen(): # function closes the window and opens it again, refreshing and updating the gui
window.Close()
run_program()
while True: # runs as long as the window is open, similar to an action listener
event, values = window.Read() # the pysimplegui equivalent of an action listener
if event == 'key_add_class': # checks if it was the add classes button that was pressed
add()
reopen()
if event == 'key_edit_class': # checks if it was the edit class button that was pressed
for x in range(len(classes)): # goes through and finds the class that has been selected
if values[x]:
edit(classes[x], period[x], year[x])
reopen()
if event == 'key_delete_class': # checks if it was the delete class button that was pressed
deleted = '' # variable, a variable to used to hold the name of the selected class
for x in range(len(classes)): # goes through and finds the selected class
if values[x]: # check if x has been selected
deleted = classes[x]
cur.execute("DELETE FROM EOM_CLASS WHERE CLASS = :course_code", course_code=str(classes[x] + '/' + year[x]))
con.commit()
cur.execute("select * from EOM_STUDENTS")
for z in range(get_number_of_students(deleted)): # runs as many times as it is needed to deleted all students of the selected class
cur.execute("DELETE FROM EOM_STUDENTS WHERE STUDENT_ID = :v_id", v_id=student_numbers[x])
reopen()
if event is None: # check if the window should be closed
break
for x in range(len(classes)): # goes through all the access buttons
if event == str(x): # check if it was x that was pressed
access(classes[x] + '/' + year[x])
reopen()
window.Close()
run_program()
|
[
"PySimpleGUI.Button",
"Add_New_Classes.run_program",
"PySimpleGUI.FlexForm",
"PySimpleGUI.Text",
"Grades_Chart.run_program",
"PySimpleGUI.Radio",
"Edit_Classes.run_program",
"PySimpleGUI.Column",
"cx_Oracle.connect"
] |
[((297, 338), 'cx_Oracle.connect', 'cx_Oracle.connect', (['"""EOM/EOM@127.0.0.1/xe"""'], {}), "('EOM/EOM@127.0.0.1/xe')\n", (314, 338), False, 'import cx_Oracle\n'), ((2573, 2695), 'PySimpleGUI.Text', 'sg.Text', (['""" Class selection"""'], {'size': '(17, 1)', 'font': "('Helvetica', 25)", 'text_color': '"""black"""', 'justification': '"""center"""'}), "(' Class selection', size=(17, 1), font=('Helvetica', 25),\n text_color='black', justification='center')\n", (2580, 2695), True, 'import PySimpleGUI as sg\n'), ((2703, 2754), 'PySimpleGUI.Column', 'sg.Column', (['column'], {'scrollable': '(True)', 'size': '(400, 300)'}), '(column, scrollable=True, size=(400, 300))\n', (2712, 2754), True, 'import PySimpleGUI as sg\n'), ((2766, 2871), 'PySimpleGUI.Button', 'sg.Button', (['"""Add Class"""'], {'button_color': "('white', 'black')", 'font': "('Helvetica', 15)", 'key': '"""key_add_class"""'}), "('Add Class', button_color=('white', 'black'), font=('Helvetica', \n 15), key='key_add_class')\n", (2775, 2871), True, 'import PySimpleGUI as sg\n'), ((2877, 2983), 'PySimpleGUI.Button', 'sg.Button', (['"""Edit Class"""'], {'button_color': "('white', 'black')", 'font': "('Helvetica', 15)", 'key': '"""key_edit_class"""'}), "('Edit Class', button_color=('white', 'black'), font=('Helvetica',\n 15), key='key_edit_class')\n", (2886, 2983), True, 'import PySimpleGUI as sg\n'), ((2990, 3101), 'PySimpleGUI.Button', 'sg.Button', (['"""Delete Class"""'], {'button_color': "('white', 'black')", 'font': "('Helvetica', 15)", 'key': '"""key_delete_class"""'}), "('Delete Class', button_color=('white', 'black'), font=(\n 'Helvetica', 15), key='key_delete_class')\n", (2999, 3101), True, 'import PySimpleGUI as sg\n'), ((3123, 3210), 'PySimpleGUI.FlexForm', 'sg.FlexForm', (['"""Class selection """'], {'auto_size_text': '(True)', 'default_element_size': '(40, 1)'}), "('Class selection ', auto_size_text=True, default_element_size=(\n 40, 1))\n", (3134, 3210), True, 'import PySimpleGUI as sg\n'), ((3704, 3709), 'Add_New_Classes.run_program', 'add', ([], {}), '()\n', (3707, 3709), True, 'from Add_New_Classes import run_program as add\n'), ((2371, 2402), 'PySimpleGUI.Text', 'sg.Text', (["('Period: ' + period[x])"], {}), "('Period: ' + period[x])\n", (2378, 2402), True, 'import PySimpleGUI as sg\n'), ((2404, 2431), 'PySimpleGUI.Text', 'sg.Text', (["('Year: ' + year[x])"], {}), "('Year: ' + year[x])\n", (2411, 2431), True, 'import PySimpleGUI as sg\n'), ((2459, 2471), 'PySimpleGUI.Text', 'sg.Text', (['""" """'], {}), "(' ')\n", (2466, 2471), True, 'import PySimpleGUI as sg\n'), ((5172, 5206), 'Grades_Chart.run_program', 'access', (["(classes[x] + '/' + year[x])"], {}), "(classes[x] + '/' + year[x])\n", (5178, 5206), True, 'from Grades_Chart import run_program as access\n'), ((1810, 1876), 'PySimpleGUI.Text', 'sg.Text', (["(classes[x] + ' ')"], {'size': '(20, 1)', 'justification': '"""right"""'}), "(classes[x] + ' ', size=(20, 1), justification='right')\n", (1817, 1876), True, 'import PySimpleGUI as sg\n'), ((1972, 2014), 'PySimpleGUI.Radio', 'sg.Radio', (['"""select"""', '"""RADIO1"""'], {'default': '(True)'}), "('select', 'RADIO1', default=True)\n", (1980, 2014), True, 'import PySimpleGUI as sg\n'), ((2086, 2152), 'PySimpleGUI.Text', 'sg.Text', (["(classes[x] + ' ')"], {'size': '(20, 1)', 'justification': '"""right"""'}), "(classes[x] + ' ', size=(20, 1), justification='right')\n", (2093, 2152), True, 'import PySimpleGUI as sg\n'), ((2275, 2317), 'PySimpleGUI.Radio', 'sg.Radio', (['"""select"""', '"""RADIO1"""'], {'default': '(True)'}), "('select', 'RADIO1', default=True)\n", (2283, 2317), True, 'import PySimpleGUI as sg\n'), ((3980, 4016), 'Edit_Classes.run_program', 'edit', (['classes[x]', 'period[x]', 'year[x]'], {}), '(classes[x], period[x], year[x])\n', (3984, 4016), True, 'from Edit_Classes import run_program as edit\n')]
|
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.data_migration.doctype.data_migration_connector.connectors.base import BaseConnection
import mysql.connector as mariadb
class EmployeeTrainingCertificationConnection(BaseConnection):
def __init__(self, connector):
self.connector = connector
try:
password = self.get_password()
except frappe.AuthenticationError:
password = None
try:
self.connection = mariadb.connect(
host=self.connector.hostname,
user=self.connector.username,
password=password,
database=self.connector.database_name
)
except mariadb.Error as error:
print("Error: {}".format(error))
frappe.throw(("Error: {}".format(error)))
self.name_field = 'id'
def insert(self, doctype, doc):
pass
def update(self, doctype, doc, migration_id):
pass
def delete(self, doctype, migration_id):
pass
def get(self, remote_objectname, fields=None, filters=None, start=0, page_length=10):
query = ("""
SELECT
ecd.id,
u.employeeId as parent,
'training_and_certification_details' as parentfield,
'Employee' as parenttype,
ecd.course_name,
ecd.course_level,
ecd.course_offered_by,
ecd.certification_name,
ecd.description,
ecd.issued_date
FROM
main_empcertificationdetails ecd
INNER JOIN
main_users u ON u.id = ecd.user_id
WHERE
ecd.isactive = 1;""")
cursor = self.connection.cursor(dictionary=True)
cursor.execute(query)
records = []
for data in cursor:
data['parent'] = data['parent'].replace('EMPG', '')
records.append(data)
cursor.close()
return list(records)
def __del__(self):
self.connection.close()
|
[
"mysql.connector.connect"
] |
[((460, 597), 'mysql.connector.connect', 'mariadb.connect', ([], {'host': 'self.connector.hostname', 'user': 'self.connector.username', 'password': 'password', 'database': 'self.connector.database_name'}), '(host=self.connector.hostname, user=self.connector.username,\n password=password, database=self.connector.database_name)\n', (475, 597), True, 'import mysql.connector as mariadb\n')]
|
#!/usr/bin/env python
import subprocess
import os
import shutil
import tarfile
from setuptools import setup
def untar(fname, fpath):
if fname.endswith('tar.gz') or fname.endswith('tar.bz') or fname.endswith('tar'):
tar = tarfile.open(fname)
tar.extractall(path=fpath)
tar.close()
os.remove(fname)
base = os.getcwd()
prefix = os.environ.get('PREFIX')
processDi = os.path.abspath(os.path.join(prefix, os.pardir))
processDir = os.path.join(processDi, 'work')
binDir = os.path.join(prefix, 'bin')
libDir = os.path.join(processDir, 'source', 'lib')
srcDir = os.path.join(processDir, 'source', 'src')
# =====write Makefile.local===========
makeFilename = os.path.join(processDir, 'source', 'build', "Makefile.local")
fn = open(makeFilename, "w")
fn.write("HDF5_PREFIX = %s\n" % prefix)
# fn.write("HDF5_PREFIX = /usr/local\n")
fn.write("FFLAGS_HDF5 = -D_RTTOV_HDF $(FFLAG_MOD)$(HDF5_PREFIX)/include\n")
fn.write("LDFLAGS_HDF5 = -L$(HDF5_PREFIX)/lib -lhdf5hl_fortran -lhdf5_hl -lhdf5_fortran -lhdf5\n")
fn.write("FFLAGS_EXTERN = $(FFLAGS_NETCDF) $(FFLAGS_HDF5) $(FFLAGS_DRHOOK)\n")
fn.write("LDFLAGS_EXTERN = $(LDFLAGS_NETCDF) $(LDFLAGS_HDF5) $(LDFLAGS_DRHOOK)")
fn.close()
# =====compile rttov=================
rttovPath = os.path.join(prefix, 'share', 'rttov')
os.makedirs(rttovPath)
rttovEmisPath = os.path.join(rttovPath, 'emis_data')
os.makedirs(rttovEmisPath)
rttovBRDFPath = os.path.join(rttovPath, 'brdf_data')
os.makedirs(rttovBRDFPath)
os.chdir(srcDir)
subprocess.call("../build/Makefile.PL RTTOV_HDF=1 RTTOV_F2PY=1", shell=True)
subprocess.call("make ARCH=gfortran INSTALLDIR=./ clean", shell=True)
subprocess.call("make ARCH=gfortran INSTALLDIR=./", shell=True)
# subprocess.call(["../build/rttov_compile.sh"])
# ====moving shared library to bin ===========
p = subprocess.Popen(["conda", "info", "--root"], stdout=subprocess.PIPE)
out = p.communicate()
condaPath = out[0][:-1]
os.chdir(base)
shutil.copyfile(os.path.join(libDir, 'rttov_wrapper_f2py.so'),
os.path.join(prefix, 'lib', 'python2.7',
'site-packages', 'rttov_wrapper_f2py.so'))
shutil.copyfile(os.path.join(processDir, 'source', 'rtcoef_rttov11',
'rttov7pred54L', 'rtcoef_landsat_8_tirs.dat'),
os.path.join(rttovPath, 'rtcoef_landsat_8_tirs.dat'))
setup(
name="pyrttov",
version="0.4.0",
description="pythonic wrapper for rttov",
author="<NAME>",
author_email="<EMAIL>",
packages=['pyrttov'],
platforms='Posix; MacOS X; Windows',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
# Uses dictionary comprehensions ==> 2.7 only
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
[
"subprocess.Popen",
"os.remove",
"os.makedirs",
"setuptools.setup",
"os.getcwd",
"os.environ.get",
"subprocess.call",
"tarfile.open",
"os.path.join",
"os.chdir"
] |
[((344, 355), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (353, 355), False, 'import os\n'), ((365, 389), 'os.environ.get', 'os.environ.get', (['"""PREFIX"""'], {}), "('PREFIX')\n", (379, 389), False, 'import os\n'), ((464, 495), 'os.path.join', 'os.path.join', (['processDi', '"""work"""'], {}), "(processDi, 'work')\n", (476, 495), False, 'import os\n'), ((505, 532), 'os.path.join', 'os.path.join', (['prefix', '"""bin"""'], {}), "(prefix, 'bin')\n", (517, 532), False, 'import os\n'), ((542, 583), 'os.path.join', 'os.path.join', (['processDir', '"""source"""', '"""lib"""'], {}), "(processDir, 'source', 'lib')\n", (554, 583), False, 'import os\n'), ((593, 634), 'os.path.join', 'os.path.join', (['processDir', '"""source"""', '"""src"""'], {}), "(processDir, 'source', 'src')\n", (605, 634), False, 'import os\n'), ((690, 751), 'os.path.join', 'os.path.join', (['processDir', '"""source"""', '"""build"""', '"""Makefile.local"""'], {}), "(processDir, 'source', 'build', 'Makefile.local')\n", (702, 751), False, 'import os\n'), ((1267, 1305), 'os.path.join', 'os.path.join', (['prefix', '"""share"""', '"""rttov"""'], {}), "(prefix, 'share', 'rttov')\n", (1279, 1305), False, 'import os\n'), ((1306, 1328), 'os.makedirs', 'os.makedirs', (['rttovPath'], {}), '(rttovPath)\n', (1317, 1328), False, 'import os\n'), ((1345, 1381), 'os.path.join', 'os.path.join', (['rttovPath', '"""emis_data"""'], {}), "(rttovPath, 'emis_data')\n", (1357, 1381), False, 'import os\n'), ((1382, 1408), 'os.makedirs', 'os.makedirs', (['rttovEmisPath'], {}), '(rttovEmisPath)\n', (1393, 1408), False, 'import os\n'), ((1425, 1461), 'os.path.join', 'os.path.join', (['rttovPath', '"""brdf_data"""'], {}), "(rttovPath, 'brdf_data')\n", (1437, 1461), False, 'import os\n'), ((1462, 1488), 'os.makedirs', 'os.makedirs', (['rttovBRDFPath'], {}), '(rttovBRDFPath)\n', (1473, 1488), False, 'import os\n'), ((1490, 1506), 'os.chdir', 'os.chdir', (['srcDir'], {}), '(srcDir)\n', (1498, 1506), False, 'import os\n'), ((1507, 1583), 'subprocess.call', 'subprocess.call', (['"""../build/Makefile.PL RTTOV_HDF=1 RTTOV_F2PY=1"""'], {'shell': '(True)'}), "('../build/Makefile.PL RTTOV_HDF=1 RTTOV_F2PY=1', shell=True)\n", (1522, 1583), False, 'import subprocess\n'), ((1584, 1653), 'subprocess.call', 'subprocess.call', (['"""make ARCH=gfortran INSTALLDIR=./ clean"""'], {'shell': '(True)'}), "('make ARCH=gfortran INSTALLDIR=./ clean', shell=True)\n", (1599, 1653), False, 'import subprocess\n'), ((1654, 1717), 'subprocess.call', 'subprocess.call', (['"""make ARCH=gfortran INSTALLDIR=./"""'], {'shell': '(True)'}), "('make ARCH=gfortran INSTALLDIR=./', shell=True)\n", (1669, 1717), False, 'import subprocess\n'), ((1819, 1888), 'subprocess.Popen', 'subprocess.Popen', (["['conda', 'info', '--root']"], {'stdout': 'subprocess.PIPE'}), "(['conda', 'info', '--root'], stdout=subprocess.PIPE)\n", (1835, 1888), False, 'import subprocess\n'), ((1936, 1950), 'os.chdir', 'os.chdir', (['base'], {}), '(base)\n', (1944, 1950), False, 'import os\n'), ((2361, 2896), 'setuptools.setup', 'setup', ([], {'name': '"""pyrttov"""', 'version': '"""0.4.0"""', 'description': '"""pythonic wrapper for rttov"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'packages': "['pyrttov']", 'platforms': '"""Posix; MacOS X; Windows"""', 'license': '"""BSD 3-Clause"""', 'classifiers': "['Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Topic :: Scientific/Engineering :: GIS']"}), "(name='pyrttov', version='0.4.0', description=\n 'pythonic wrapper for rttov', author='<NAME>', author_email='<EMAIL>',\n packages=['pyrttov'], platforms='Posix; MacOS X; Windows', license=\n 'BSD 3-Clause', classifiers=['Development Status :: 2 - Pre-Alpha',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Topic :: Scientific/Engineering :: GIS'])\n", (2366, 2896), False, 'from setuptools import setup\n'), ((418, 449), 'os.path.join', 'os.path.join', (['prefix', 'os.pardir'], {}), '(prefix, os.pardir)\n', (430, 449), False, 'import os\n'), ((1968, 2013), 'os.path.join', 'os.path.join', (['libDir', '"""rttov_wrapper_f2py.so"""'], {}), "(libDir, 'rttov_wrapper_f2py.so')\n", (1980, 2013), False, 'import os\n'), ((2031, 2117), 'os.path.join', 'os.path.join', (['prefix', '"""lib"""', '"""python2.7"""', '"""site-packages"""', '"""rttov_wrapper_f2py.so"""'], {}), "(prefix, 'lib', 'python2.7', 'site-packages',\n 'rttov_wrapper_f2py.so')\n", (2043, 2117), False, 'import os\n'), ((2161, 2263), 'os.path.join', 'os.path.join', (['processDir', '"""source"""', '"""rtcoef_rttov11"""', '"""rttov7pred54L"""', '"""rtcoef_landsat_8_tirs.dat"""'], {}), "(processDir, 'source', 'rtcoef_rttov11', 'rttov7pred54L',\n 'rtcoef_landsat_8_tirs.dat')\n", (2173, 2263), False, 'import os\n'), ((2306, 2358), 'os.path.join', 'os.path.join', (['rttovPath', '"""rtcoef_landsat_8_tirs.dat"""'], {}), "(rttovPath, 'rtcoef_landsat_8_tirs.dat')\n", (2318, 2358), False, 'import os\n'), ((235, 254), 'tarfile.open', 'tarfile.open', (['fname'], {}), '(fname)\n', (247, 254), False, 'import tarfile\n'), ((318, 334), 'os.remove', 'os.remove', (['fname'], {}), '(fname)\n', (327, 334), False, 'import os\n')]
|
import numpy as np
import Levenshtein as Lev
def cer_calculate(s1, s2, no_spaces=False):
"""
Computes the Character Error Rate, defined as the edit distance.
Arguments:
s1 (string): space-separated sentence
s2 (string): space-separated sentence
"""
if no_spaces:
s1, s2, = s1.replace(' ', ''), s2.replace(' ', '')
return min(len(s1), Lev.distance(s1, s2)) / len(s1)
def compute_accuracy(ground_truth, predictions, mode='full_sequence'):
"""
Computes accuracy
:param ground_truth:
:param predictions:
:param display: Whether to print values to stdout
:param mode: if 'per_char' is selected then
single_label_accuracy = correct_predicted_char_nums_of_single_sample / single_label_char_nums
avg_label_accuracy = sum(single_label_accuracy) / label_nums
if 'full_sequence' is selected then
single_label_accuracy = 1 if the prediction result is exactly the same as label else 0
avg_label_accuracy = sum(single_label_accuracy) / label_nums
:return: avg_label_accuracy
"""
avg_accuracy = 0
if mode == 'per_char':
accuracy = []
for index, label in enumerate(ground_truth):
prediction = predictions[index]
total_count = len(label)
correct_count = 0
try:
for i, tmp in enumerate(label):
if tmp == prediction[i]:
correct_count += 1
except IndexError:
continue
finally:
try:
accuracy.append(correct_count / total_count)
except ZeroDivisionError:
if len(prediction) == 0:
accuracy.append(1)
else:
accuracy.append(0)
avg_accuracy = np.mean(np.array(accuracy).astype(np.float32), axis=0)
elif mode == 'full_sequence':
try:
correct_count = 0
for index, label in enumerate(ground_truth):
prediction = predictions[index]
if prediction == label:
correct_count += 1
avg_accuracy = correct_count / len(ground_truth)
except ZeroDivisionError:
if not predictions:
avg_accuracy = 1
else:
avg_accuracy = 0
elif mode == 'CER':
cer_list = []
for index, label in enumerate(ground_truth):
prediction = predictions[index]
cer = cer_calculate(label, prediction)
cer_list.append(cer)
avg_accuracy = np.mean(np.array(cer_list).astype(np.float32), axis=0)
else:
raise NotImplementedError('Other accuracy compute mode has not been implemented')
return avg_accuracy
|
[
"Levenshtein.distance",
"numpy.array"
] |
[((384, 404), 'Levenshtein.distance', 'Lev.distance', (['s1', 's2'], {}), '(s1, s2)\n', (396, 404), True, 'import Levenshtein as Lev\n'), ((1917, 1935), 'numpy.array', 'np.array', (['accuracy'], {}), '(accuracy)\n', (1925, 1935), True, 'import numpy as np\n'), ((2694, 2712), 'numpy.array', 'np.array', (['cer_list'], {}), '(cer_list)\n', (2702, 2712), True, 'import numpy as np\n')]
|
import airflow
from airflow import DAG
from airflow.operators.dummy import DummyOperator
from airflow.operators.python import PythonOperator
ERP_CHANGE_DATE = airflow.utils.dates.days_ago(1)
def _fetch_sales(**context):
if context["execution_date"] < ERP_CHANGE_DATE:
_fetch_sales_old(**context)
else:
_fetch_sales_new(**context)
def _fetch_sales_old(**context):
print("Fetching sales data (OLD)...")
def _fetch_sales_new(**context):
print("Fetching sales data (NEW)...")
def _clean_sales(**context):
if context["execution_date"] < airflow.utils.dates.days_ago(1):
_clean_sales_old(**context)
else:
_clean_sales_new(**context)
def _clean_sales_old(**context):
print("Preprocessing sales data (OLD)...")
def _clean_sales_new(**context):
print("Preprocessing sales data (NEW)...")
with DAG(
dag_id="02_branch_function",
start_date=airflow.utils.dates.days_ago(3),
schedule_interval="@daily",
) as dag:
start = DummyOperator(task_id="start")
fetch_sales = PythonOperator(task_id="fetch_sales", python_callable=_fetch_sales)
clean_sales = PythonOperator(task_id="clean_sales", python_callable=_clean_sales)
fetch_weather = DummyOperator(task_id="fetch_weather")
clean_weather = DummyOperator(task_id="clean_weather")
join_datasets = DummyOperator(task_id="join_datasets")
train_model = DummyOperator(task_id="train_model")
deploy_model = DummyOperator(task_id="deploy_model")
start >> [fetch_sales, fetch_weather]
fetch_sales >> clean_sales
fetch_weather >> clean_weather
[clean_sales, clean_weather] >> join_datasets
join_datasets >> train_model >> deploy_model
|
[
"airflow.utils.dates.days_ago",
"airflow.operators.python.PythonOperator",
"airflow.operators.dummy.DummyOperator"
] |
[((161, 192), 'airflow.utils.dates.days_ago', 'airflow.utils.dates.days_ago', (['(1)'], {}), '(1)\n', (189, 192), False, 'import airflow\n'), ((1004, 1034), 'airflow.operators.dummy.DummyOperator', 'DummyOperator', ([], {'task_id': '"""start"""'}), "(task_id='start')\n", (1017, 1034), False, 'from airflow.operators.dummy import DummyOperator\n'), ((1054, 1121), 'airflow.operators.python.PythonOperator', 'PythonOperator', ([], {'task_id': '"""fetch_sales"""', 'python_callable': '_fetch_sales'}), "(task_id='fetch_sales', python_callable=_fetch_sales)\n", (1068, 1121), False, 'from airflow.operators.python import PythonOperator\n'), ((1140, 1207), 'airflow.operators.python.PythonOperator', 'PythonOperator', ([], {'task_id': '"""clean_sales"""', 'python_callable': '_clean_sales'}), "(task_id='clean_sales', python_callable=_clean_sales)\n", (1154, 1207), False, 'from airflow.operators.python import PythonOperator\n'), ((1229, 1267), 'airflow.operators.dummy.DummyOperator', 'DummyOperator', ([], {'task_id': '"""fetch_weather"""'}), "(task_id='fetch_weather')\n", (1242, 1267), False, 'from airflow.operators.dummy import DummyOperator\n'), ((1288, 1326), 'airflow.operators.dummy.DummyOperator', 'DummyOperator', ([], {'task_id': '"""clean_weather"""'}), "(task_id='clean_weather')\n", (1301, 1326), False, 'from airflow.operators.dummy import DummyOperator\n'), ((1348, 1386), 'airflow.operators.dummy.DummyOperator', 'DummyOperator', ([], {'task_id': '"""join_datasets"""'}), "(task_id='join_datasets')\n", (1361, 1386), False, 'from airflow.operators.dummy import DummyOperator\n'), ((1405, 1441), 'airflow.operators.dummy.DummyOperator', 'DummyOperator', ([], {'task_id': '"""train_model"""'}), "(task_id='train_model')\n", (1418, 1441), False, 'from airflow.operators.dummy import DummyOperator\n'), ((1461, 1498), 'airflow.operators.dummy.DummyOperator', 'DummyOperator', ([], {'task_id': '"""deploy_model"""'}), "(task_id='deploy_model')\n", (1474, 1498), False, 'from airflow.operators.dummy import DummyOperator\n'), ((578, 609), 'airflow.utils.dates.days_ago', 'airflow.utils.dates.days_ago', (['(1)'], {}), '(1)\n', (606, 609), False, 'import airflow\n'), ((917, 948), 'airflow.utils.dates.days_ago', 'airflow.utils.dates.days_ago', (['(3)'], {}), '(3)\n', (945, 948), False, 'import airflow\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 10 10:43:18 2019
@author: nevalaio
"""
import ee
import time
import datetime
import satelliteTools as st
import pandas as pd
from geetools import batch, tools
import numpy as np
ee.Initialize()
#----------------- Sentinel-2 -------------------------------------
def S2_getBandData_within_bbox_single_feature(S2_timseries_dataframe, aoi_shp, AOI_id_property,AOI_id, bufferdist, datestart, dateend):
bands= ['B3', 'B4','B5','B6','B7','B8A','B11','B12'] #]
# properties = ['cos(View_Zenith)', 'cos(Sun_Zenith)', 'cos(Rel_Azimuth)']
start = time.time()
image_list = {}
crs_list = {}
key = AOI_id
full_assetids = "COPERNICUS/S2_SR/" + S2_timseries_dataframe[key]['assetid']
image_list[key] = [ee.Image(a) for a in full_assetids]
crs_list[key] = [crs for crs in S2_timseries_dataframe[key]['crs']][0]
attributes = st.getShapeAtrrtibutesWithIdentifier(aoi_shp, AOI_id_property)
feature = ee.Feature(ee.Geometry.Polygon(st.wkt2coordinates(attributes[key]['geometry'])),\
{'name':key, 'image_list':image_list[key], 'crs':crs_list[key]})
if bufferdist:
bbox = ee.Feature(feature.geometry().buffer(bufferdist).bounds(0.1,feature.get("crs")))
else:
bbox = ee.Feature(feature.geometry().bounds(0.1,feature.get("crs")))
imageCollection = ee.ImageCollection.fromImages(feature.get("image_list"))\
.filterBounds(bbox.geometry())\
.filterDate(datestart,dateend)\
.select(bands)
# imageCollection = imageCollection.map(S2_addNDVI) #lisää ja laske indeksejä tässä?
def S2_getBandData_image_single_feature(img):
img = img.clip(bbox.geometry())
productid = img.get('PRODUCT_ID')
assetid = img.get('assetid')
tileid = img.get('MGRS_TILE')
system_index = img.get('system:index')
sun_azimuth = img.get('MEAN_SOLAR_AZIMUTH_ANGLE')
sun_zenith = img.get('MEAN_SOLAR_ZENITH_ANGLE')
view_azimuth = ee.Array([img.get('MEAN_INCIDENCE_AZIMUTH_ANGLE_%s'%b) for b in bands]).reduce(ee.Reducer.mean(), [0]).get([0])
view_zenith = ee.Array([img.get('MEAN_INCIDENCE_ZENITH_ANGLE_%s'%b) for b in bands]).reduce(ee.Reducer.mean(), [0]).get([0])
img = img.resample('bilinear').reproject(crs=feature.get("crs"), scale=10)
# get the lat lon and add the ndvi
image_grid = ee.Image.pixelCoordinates(ee.Projection(feature.get("crs")))\
.addBands([img.select(b) for b in bands])
# apply reducer to list
image_grid = image_grid.reduceRegion(
reducer=ee.Reducer.toList(),
geometry=bbox.geometry(),
maxPixels=1e8,
scale=10)
# get data into arrays
x_coords = ee.Array(image_grid.get("x"))
y_coords = ee.Array(image_grid.get("y"))
# band_data = []
# [band_data.extend(b,ee.Array(image_grid.get("%s"%b))) for b in bands[:-1]]
band_data = {b:ee.Array(image_grid.get("%s"%b)) for b in bands}
# NDVI_array = ee.Array(image_grid.get("NDVI"))
# B6_array = ee.Array(image_grid.get("B6"))
# perform LAI et al. computation possibly here!
tmpfeature = ee.Feature(ee.Geometry.Point([0,0]))\
.set('productid', productid)\
.set('system_index',system_index)\
.set('assetid', assetid)\
.set('tileid', tileid)\
.set('crs', feature.get("crs"))\
.set('sun_zenith',sun_zenith)\
.set('sun_azimuth',sun_azimuth)\
.set('view_zenith',view_zenith)\
.set('view_azimuth',view_azimuth)\
.set('x_coords', x_coords)\
.set('y_coords', y_coords)\
.set(band_data)
return tmpfeature
S2_single_feature_data = imageCollection.map(S2_getBandData_image_single_feature).getInfo()
end = time.time()
total_time = end - start
print ("Processsing time in seconds %s"%total_time)
return S2_single_feature_data
def S2_getBandData_within_aoi_single_feature(S2_timseries_dataframe, aoi_shp, AOI_id_property,AOI_id, datestart, dateend):
bands= ['B3', 'B4','B5','B6','B7','B8A','B11','B12'] #]
# properties = ['cos(View_Zenith)', 'cos(Sun_Zenith)', 'cos(Rel_Azimuth)']
start = time.time()
image_list = {}
crs_list = {}
key = AOI_id
full_assetids = "COPERNICUS/S2_SR/" + S2_timseries_dataframe[key]['assetid']
image_list[key] = [ee.Image(a) for a in full_assetids]
crs_list[key] = [crs for crs in S2_timseries_dataframe[key]['crs']][0]
attributes = st.getShapeAtrrtibutesWithIdentifier(aoi_shp, AOI_id_property)
feature = ee.Feature(ee.Geometry.Polygon(st.wkt2coordinates(attributes[key]['geometry'])),\
{'name':key, 'image_list':image_list[key], 'crs':crs_list[key]})
geom = feature.geometry(0.1,feature.get("crs"))
imageCollection = ee.ImageCollection.fromImages(feature.get("image_list"))\
.filterBounds(geom)\
.filterDate(datestart,dateend)\
.select(bands)
# imageCollection = imageCollection.map(S2_addNDVI) #lisää ja laske indeksejä tässä?
def S2_getBandData_image_single_feature(img):
img = img.clip(geom)
productid = img.get('PRODUCT_ID')
assetid = img.get('assetid')
tileid = img.get('MGRS_TILE')
system_index = img.get('system:index')
sun_azimuth = img.get('MEAN_SOLAR_AZIMUTH_ANGLE')
sun_zenith = img.get('MEAN_SOLAR_ZENITH_ANGLE')
view_azimuth = ee.Array([img.get('MEAN_INCIDENCE_AZIMUTH_ANGLE_%s'%b) for b in bands]).reduce(ee.Reducer.mean(), [0]).get([0])
view_zenith = ee.Array([img.get('MEAN_INCIDENCE_ZENITH_ANGLE_%s'%b) for b in bands]).reduce(ee.Reducer.mean(), [0]).get([0])
img = img.resample('bilinear').reproject(crs=feature.get("crs"), scale=10)
# get the lat lon and add the ndvi
image_grid = ee.Image.pixelCoordinates(ee.Projection(feature.get("crs")))\
.addBands([img.select(b) for b in bands])
# apply reducer to list
image_grid = image_grid.reduceRegion(
reducer=ee.Reducer.toList(),
geometry=geom,
maxPixels=1e8,
scale=10)
# get data into arrays
x_coords = ee.Array(image_grid.get("x"))
y_coords = ee.Array(image_grid.get("y"))
# band_data = []
# [band_data.extend(b,ee.Array(image_grid.get("%s"%b))) for b in bands[:-1]]
band_data = {b:ee.Array(image_grid.get("%s"%b)) for b in bands}
# NDVI_array = ee.Array(image_grid.get("NDVI"))
# B6_array = ee.Array(image_grid.get("B6"))
# perform LAI et al. computation possibly here!
tmpfeature = ee.Feature(ee.Geometry.Point([0,0]))\
.set('productid', productid)\
.set('system_index',system_index)\
.set('assetid', assetid)\
.set('tileid', tileid)\
.set('crs', feature.get("crs"))\
.set('sun_zenith',sun_zenith)\
.set('sun_azimuth',sun_azimuth)\
.set('view_zenith',view_zenith)\
.set('view_azimuth',view_azimuth)\
.set('x_coords', x_coords)\
.set('y_coords', y_coords)\
.set(band_data)
return tmpfeature
S2_single_feature_data = imageCollection.map(S2_getBandData_image_single_feature).getInfo()
end = time.time()
total_time = end - start
print ("Processsing time in seconds %s"%total_time)
return S2_single_feature_data
def S2_getBandData_single_feature_to_dict(featureDict):
featureCollection_dict = {}
for farm, featureCollection in featureDict.items():
featureCollection_dict[farm]= {'Date': []}
featureCollection_dict[farm].update({prop:[] for prop in featureCollection['features'][0]['properties'].keys()})
for featnum in range(len(featureCollection['features'])):
productid = featureCollection['features'][featnum]['properties']['productid']
date = st.sentinelTitle2Datetime(productid)
featureCollection_dict[farm]['Date'].append(date)
for prop in featureCollection['features'][featnum]['properties'].keys():
if prop is not 'Date':
featureCollection_dict[farm][prop].append(featureCollection['features'][featnum]['properties'][prop])
return featureCollection_dict
def featureCollection_dict_to_dataframes(featureCollection_dict,props):
dataframes = {}
for key, item in featureCollection_dict.items():
# dataframes[key] = pd.DataFrame({'Date': item['Date'],
# 'lai': list(np.mean(np.array(item['lai']), axis = 1)) ,
# 'lai_std': list(np.std(np.array(item['lai']), axis = 1)) })
dataframes[key] = pd.DataFrame({'Date': item['Date']})
for prop in props:
dataframes[key][prop] = list(np.mean(np.array(item[prop]), axis = 1))
dataframes[key][prop+'_std'] = list(np.std(np.array(item['lai']), axis = 1))
return dataframes
def S2_getBandData(S2_timseries_dataframe, aoi_shp, AOI_id_property, bufferdist, datestart, dateend):
bands= ['B3', 'B4','B5','B6','B7','B8A','B11','B12'] #]
# properties = ['cos(View_Zenith)', 'cos(Sun_Zenith)', 'cos(Rel_Azimuth)']
start = time.time()
image_list = {}
crs_list = {}
for key, item in S2_timseries_dataframe.items():
full_assetids = "COPERNICUS/S2_SR/" + item['assetid']
image_list[key] = [ee.Image(a) for a in full_assetids]
crs_list[key] = [crs for crs in item['crs']][0]
attributes = st.getShapeAtrrtibutesWithIdentifier(aoi_shp, AOI_id_property)
features = [ee.Feature(ee.Geometry.Polygon(st.wkt2coordinates(attributes[key]['geometry'])),\
{'name':key, 'image_list':image_list[key], 'crs':crs_list[key]}) for key,item in S2_timseries_dataframe.items()]
featureCollection = ee.FeatureCollection(features)
def S2_getBandData_feature(feature):
if bufferdist:
bbox = ee.Feature(feature.geometry().buffer(bufferdist).bounds(0.1,feature.get("crs")))
else:
bbox = ee.Feature(feature.geometry().bounds(0.1,feature.get("crs")))
imageCollection = ee.ImageCollection.fromImages(feature.get("image_list"))\
.filterBounds(bbox.geometry())\
.filterDate(datestart,dateend)\
.select(bands)
# imageCollection = imageCollection.map(S2_addNDVI) #lisää ja laske indeksejä tässä?
def S2_getBandData_image(img):
img = img.clip(bbox.geometry())
productid = img.get('PRODUCT_ID')
assetid = img.get('assetid')
tileid = img.get('MGRS_TILE')
system_index = img.get('system:index')
sun_azimuth = img.get('MEAN_SOLAR_AZIMUTH_ANGLE')
sun_zenith = img.get('MEAN_SOLAR_ZENITH_ANGLE')
view_azimuth = ee.Array([img.get('MEAN_INCIDENCE_AZIMUTH_ANGLE_%s'%b) for b in bands]).reduce(ee.Reducer.mean(), [0])
view_zenith = ee.Array([img.get('MEAN_INCIDENCE_ZENITH_ANGLE_%s'%b) for b in bands]).reduce(ee.Reducer.mean(), [0])
img = img.resample('bilinear').reproject(crs=feature.get("crs"), scale=10)
# get the lat lon and add the ndvi
image_grid = ee.Image.pixelCoordinates(ee.Projection(feature.get("crs")))\
.addBands([img.select(b) for b in bands])
# apply reducer to list
image_grid = image_grid.reduceRegion(
reducer=ee.Reducer.toList(),
geometry=bbox.geometry(),
maxPixels=1e8,
scale=10)
# get data into arrays
x_coords = ee.Array(image_grid.get("x"))
y_coords = ee.Array(image_grid.get("y"))
# band_data = []
# [band_data.extend(b,ee.Array(image_grid.get("%s"%b))) for b in bands[:-1]]
band_data = {b:ee.Array(image_grid.get("%s"%b)) for b in bands}
# NDVI_array = ee.Array(image_grid.get("NDVI"))
# B6_array = ee.Array(image_grid.get("B6"))
# perform LAI et al. computation possibly here!
tmpfeature = ee.Feature(ee.Geometry.Point([0,0]))\
.set('productid', productid)\
.set('system_index',system_index)\
.set('assetid', assetid)\
.set('tileid', tileid)\
.set('crs', feature.get("crs"))\
.set('sun_zenith',sun_zenith)\
.set('sun_azimuth',sun_azimuth)\
.set('view_zenith',view_zenith)\
.set('view_azimuth',view_azimuth)\
.set('x_coords', x_coords)\
.set('y_coords', y_coords)\
.set(band_data)
return tmpfeature
S2_image_data = imageCollection.map(S2_getBandData_image)
return feature.set('productid',S2_image_data.aggregate_array('productid'))\
.set('system_index', S2_image_data.aggregate_array('system_index'))\
.set('assetid',S2_image_data.aggregate_array('assetid'))\
.set('tileid',S2_image_data.aggregate_array('tileid'))\
.set('crs',S2_image_data.aggregate_array('crs'))\
.set('x_coords',S2_image_data.aggregate_array('x_coords'))\
.set('y_coords',S2_image_data.aggregate_array('y_coords'))\
.set('sun_zenith',S2_image_data.aggregate_array('sun_zenith'))\
.set('sun_azimuth',S2_image_data.aggregate_array('sun_azimuth'))\
.set('view_zenith',S2_image_data.aggregate_array('view_zenith'))\
.set('view_azimuth',S2_image_data.aggregate_array('view_azimuth'))\
.set({b:S2_image_data.aggregate_array(b) for b in bands})
featureCollection = featureCollection.map(S2_getBandData_feature).getInfo()
end = time.time()
total_time = end - start
print ("Processsing time in seconds %s"%total_time)
return featureCollection
def S2_addNDVI(image):
ndvi = image.normalizedDifference(['B8', 'B4']).rename('NDVI')
return image.addBands(ndvi)
def S2_computeNDVItimeseries(AOI_shp,AOI_id_property,datestart, dateend):
start = time.time()
#aoi_shp = "/home/nevalaio/Dropbox/Työura/FMI/CARBO/analysis/ruukki_blocks_wgs84.shp"
attributes = st.getShapeAtrrtibutesWithIdentifier(AOI_shp,AOI_id_property)
features = [ee.Feature(ee.Geometry.Polygon(st.wkt2coordinates(item['geometry'])), {'name':key}) for key,item in attributes.items()]
featureCollection = ee.FeatureCollection(features)
def S2_computeNDVItimeseries_feature(feature):
area = feature.geometry()
collection = ee.ImageCollection("COPERNICUS/S2_SR").filterBounds(area)\
.filterDate(datestart,dateend)\
.select(['B8','B4','SCL'])
collection = collection.map(S2_addNDVI)
def S2_computeNDVItimeseries_image(img):
# ndvi = ee.Image(img.select(['NDVI']))
# scl = ee.Image(img.select(['SCL']))
productid = img.get('PRODUCT_ID')
assetid = img.id()
tileid = img.get('MGRS_TILE')
system_index = img.get('system:index')
proj = img.select("B8").projection()
# get the lat lon and add the ndvi
# latlon = ee.Image.pixelLonLat().addBands([scl,ndvi])
# apply reducer to list
img = img.reduceRegion(
reducer=ee.Reducer.toList(),
geometry=area,
maxPixels=1e8,
scale=10)
# get data into arrays
classdata = ee.Array(ee.Algorithms.If(img.get("SCL"),ee.Array(img.get("SCL")),ee.Array([0])))
ndvidata = ee.Array(ee.Algorithms.If(img.get("NDVI"),ee.Array(img.get("NDVI")),ee.Array([-9999])))
classmask = classdata.eq(0).add(classdata.eq(1).add(classdata.eq(3).add(classdata.eq(7)\
.add(classdata.eq(8).add(classdata.eq(9)\
.add(classdata.eq(10).add(classdata.eq(11)\
)))))))
badcount = classmask.reduce(ee.Reducer.sum(),[0])
totalcount = classmask.length()
goodcount = totalcount.get([0])
# ndvidata_masked = ndvidata.mask(classmask.Not())
mean = ndvidata.reduce(ee.Reducer.mean(),[0]).get([0])
std = ndvidata.reduce(ee.Reducer.stdDev(),[0]).get([0])
qualityUncertainty = badcount.divide(totalcount).get([0])
tmpfeature = ee.Feature(ee.Geometry.Point([0,0]))\
.set('productid', productid)\
.set('system_index',system_index)\
.set('assetid', assetid)\
.set('tileid', tileid)\
.set('projection', proj)\
.set('sample_n', goodcount)\
.set('ndvi_mean',mean)\
.set('ndvi_std',std)\
.set('quality_uncertainty',qualityUncertainty)
return tmpfeature
ndvi_timeseries = collection.map(S2_computeNDVItimeseries_image)
return feature.set('productid',ndvi_timeseries.aggregate_array('productid'))\
.set('system_index', ndvi_timeseries.aggregate_array('system_index'))\
.set('assetid',ndvi_timeseries.aggregate_array('assetid'))\
.set('tileid',ndvi_timeseries.aggregate_array('tileid'))\
.set('projection',ndvi_timeseries.aggregate_array('projection'))\
.set('sample_n',ndvi_timeseries.aggregate_array('sample_n'))\
.set('ndvi_mean',ndvi_timeseries.aggregate_array('ndvi_mean'))\
.set('ndvi_std',ndvi_timeseries.aggregate_array('ndvi_std'))\
.set('quality_uncertainty',ndvi_timeseries.aggregate_array('quality_uncertainty'))
featureCollection = featureCollection.map(S2_computeNDVItimeseries_feature).getInfo()
end = time.time()
total_time = end - start
print ("Processsing time in seconds %s"%total_time)
return featureCollection
def S2_getTimeseriesQualityInformation(AOI_shp,AOI_id_property,datestart, dateend):
start = time.time()
attributes = st.getShapeAtrrtibutesWithIdentifier(AOI_shp,AOI_id_property)
features = [ee.Feature(ee.Geometry.Polygon(st.wkt2coordinates(item['geometry'])), {'name':key}) for key,item in attributes.items()]
featureCollection = ee.FeatureCollection(features)
def S2_getTimeseriesQualityInformation_feature(feature):
area = feature.geometry()
collection = ee.ImageCollection("COPERNICUS/S2_SR").filterBounds(area)\
.filterDate(datestart,dateend)\
.select(['SCL'])
def S2_getTimeseriesQualityInformation_image(img):
productid = img.get('PRODUCT_ID')
assetid = img.id()
tileid = img.get('MGRS_TILE')
system_index = img.get('system:index')
proj = img.select("SCL").projection()
# apply reducer to list
img = img.reduceRegion(
reducer=ee.Reducer.toList(),
geometry=area,
maxPixels=1e8,
scale=10)
# get data into arrays
classdata = ee.Array(ee.Algorithms.If(img.get("SCL"),ee.Array(img.get("SCL")),ee.Array([0])))
classmask = classdata.eq(0).add(classdata.eq(1).add(classdata.eq(3).add(classdata.eq(7)\
.add(classdata.eq(8).add(classdata.eq(9)\
.add(classdata.eq(10).add(classdata.eq(11)\
)))))))
badcount = classmask.reduce(ee.Reducer.sum(),[0])
totalcount = classmask.length()
goodcount = totalcount.get([0])
qualityUncertainty = badcount.divide(totalcount).get([0])
tmpfeature = ee.Feature(ee.Geometry.Point([0,0]))\
.set('productid', productid)\
.set('system_index',system_index)\
.set('assetid', assetid)\
.set('tileid', tileid)\
.set('projection', proj)\
.set('sample_n', goodcount)\
.set('quality_uncertainty',qualityUncertainty)
return tmpfeature
QI_timeseries = collection.map(S2_getTimeseriesQualityInformation_image)
return feature.set('productid',QI_timeseries.aggregate_array('productid'))\
.set('system_index', QI_timeseries.aggregate_array('system_index'))\
.set('assetid',QI_timeseries.aggregate_array('assetid'))\
.set('tileid',QI_timeseries.aggregate_array('tileid'))\
.set('projection',QI_timeseries.aggregate_array('projection'))\
.set('sample_n',QI_timeseries.aggregate_array('sample_n'))\
.set('quality_uncertainty',QI_timeseries.aggregate_array('quality_uncertainty'))
featureCollection = featureCollection.map(S2_getTimeseriesQualityInformation_feature).getInfo()
end = time.time()
total_time = end - start
print ("Processsing time in seconds %s"%total_time)
return featureCollection
def S2_featureCollection2Dataframe(featureCollection):
dataframes = {}
for featnum in range(len(featureCollection['features'])):
featureCollection_dict = {}
key = featureCollection['features'][featnum]['properties']['name']
productid = featureCollection['features'][featnum]['properties']['productid']
projections = featureCollection['features'][featnum]['properties']['projection']
crs = [d['crs'] for d in projections]
dates = [st.sentinelTitle2Datetime(d) for d in productid]
featureCollection_dict.update({'Date': dates, 'crs': crs})
for prop, data in featureCollection['features'][featnum]['properties'].items():
if prop not in ['Date','crs','projection']:
featureCollection_dict.update({prop: data})
dataframes[key] = pd.DataFrame(featureCollection_dict)
return dataframes
def S2_NDVIfeatureCollection2Dataframe(featureCollection):
dataframes = {}
for featnum in range(len(featureCollection['features'])):
key = featureCollection['features'][featnum]['properties']['name']
productid = featureCollection['features'][featnum]['properties']['productid']
# dates = [datetime.datetime.strptime(d.split('_')[1], '%Y%m%dT%H%M%S') for d in dataid]
projections = featureCollection['features'][featnum]['properties']['projection']
crs = [d['crs'] for d in projections]
dates = [st.sentinelTitle2Datetime(d) for d in productid]
featureCollection_dict= {'Date': dates,
'productid': productid,
'system_index': featureCollection['features'][featnum]['properties']['system_index'],
'assetid': featureCollection['features'][featnum]['properties']['assetid'],
'tileid': featureCollection['features'][featnum]['properties']['tileid'],
'crs': crs,
'sample_n': featureCollection['features'][featnum]['properties']['sample_n'],
'ndvi_mean': featureCollection['features'][featnum]['properties']['ndvi_mean'],
'ndvi_std': featureCollection['features'][featnum]['properties']['ndvi_std'],
'quality_uncertainty': featureCollection['features'][featnum]['properties']['quality_uncertainty']
}
dataframes[key] = pd.DataFrame(featureCollection_dict, columns= ['Date','productid','system_index','assetid','tileid','crs','sample_n','ndvi_mean','ndvi_std','quality_uncertainty'])
return dataframes
def S2_exportImageCollection(assetIDs, aoi):
assetIDs = ["COPERNICUS/S2_SR/" + a for a in assetIDs]
images = [ee.Image(assetid) for assetid in assetIDs]
imageCollection = ee.ImageCollection(images)
aoi = ee.Feature(ee.Geometry.Polygon(st.wkt2coordinates(aoi)))
batch.imagecollection.toDrive(imageCollection, 'FOLDER', region=tools.geometry.getRegion(aoi), scale=10, verbose=True)
#----------------- LANDSAT-8 -------------------------------------
def L8_addNDVI(image):
ndvi = image.normalizedDifference(['B5', 'B4']).rename('NDVI')
return image.addBands(ndvi)
def L8_computeNDVItimeseries(AOI_shp,AOI_id_property,datestart, dateend):
start = time.time()
attributes = st.getShapeAtrrtibutesWithIdentifier(AOI_shp,AOI_id_property)
features = [ee.Feature(ee.Geometry.Polygon(st.wkt2coordinates(item['geometry'])), {'name':key}) for key,item in attributes.items()]
featureCollection = ee.FeatureCollection(features)
def L8_comuteNDVItimeseries_feature(feature):
area = feature.geometry()
collection = ee.ImageCollection("LANDSAT/LC08/C01/T1_SR").filterBounds(area)\
.filterDate(datestart,dateend)\
.select(['B5','B4','pixel_qa'])
collection = collection.map(L8_addNDVI)
def L8_computeNDVItimeseries_image(img):
# ndvi = ee.Image(img.select(['NDVI']))
dataid = img.get('LANDSAT_ID')
sensingtime = img.get('SENSING_TIME')
# qa = ee.Image(img.select(['pixel_qa']))
# get the lat lon and add the ndvi
# latlon = ee.Image.pixelLonLat().addBands([qa, ndvi])
# apply reducer to list
img = img.reduceRegion(
reducer=ee.Reducer.toList(),
geometry=area,
maxPixels=1e8,
scale=30);
# get data into arrays
classdata = ee.Array(ee.Algorithms.If(img.get("pixel_qa"),ee.Array(img.get("pixel_qa")),ee.Array([0])))
ndvidata = ee.Array(ee.Algorithms.If(img.get("NDVI"),ee.Array(img.get("NDVI")),ee.Array([-9999])))
# classdata = ee.Array(latlon.get("pixel_qa"))
# ndvidata = ee.Array(latlon.get("NDVI"))
mean = ndvidata.reduce(ee.Reducer.mean(),[0]).get([0])
std = ndvidata.reduce(ee.Reducer.stdDev(),[0]).get([0])
classmask = classdata.eq(322).Or(classdata.eq(386)).Not()
badcount = classmask.reduce(ee.Reducer.sum(),[0])
totalcount = classmask.length()
qualityUncertainty = badcount.divide(totalcount).get([0])
tmpfeature = ee.Feature(ee.Geometry.Point([0,0]))\
.set('dataid',dataid)\
.set('sensing_time', sensingtime)\
.set('ndvi_mean',mean)\
.set('ndvi_std',std)\
.set('quality_uncertainty',qualityUncertainty)
return tmpfeature
ndvi_timeseries = collection.map(L8_computeNDVItimeseries_image)
return feature.set('dataid',ndvi_timeseries.aggregate_array('dataid'))\
.set('sensing_time',ndvi_timeseries.aggregate_array('sensing_time'))\
.set('ndvi_mean',ndvi_timeseries.aggregate_array('ndvi_mean'))\
.set('ndvi_std',ndvi_timeseries.aggregate_array('ndvi_std'))\
.set('quality_uncertainty',ndvi_timeseries.aggregate_array('quality_uncertainty'))
featureCollection = featureCollection.map(L8_comuteNDVItimeseries_feature).getInfo()
end = time.time()
total_time = end - start
print ("Processsing time in seconds %s"%total_time)
return featureCollection
def L8_featureCollection2Dataframe(L8_featureCollection):
dataframes = {}
for featnum in range(len(L8_featureCollection['features'])):
key = L8_featureCollection['features'][featnum]['properties']['name']
dataid = L8_featureCollection['features'][featnum]['properties']['dataid']
dates = [datetime.datetime.strptime(d.split('.')[0], '%Y-%m-%dT%H:%M:%S') for d in L8_featureCollection['features'][featnum]['properties']['sensing_time']]
featureCollection_dict= {'Date': dates,
'dataid':dataid,
'ndvi_mean':L8_featureCollection['features'][featnum]['properties']['ndvi_mean'],
'ndvi_std':L8_featureCollection['features'][featnum]['properties']['ndvi_std'],
'quality_uncertainty':L8_featureCollection['features'][featnum]['properties']['quality_uncertainty']
}
dataframes[key] = pd.DataFrame(featureCollection_dict, columns= ['Date','dataid','ndvi_mean','ndvi_std','quality_uncertainty'])
return dataframes
|
[
"pandas.DataFrame",
"satelliteTools.sentinelTitle2Datetime",
"ee.Reducer.stdDev",
"satelliteTools.getShapeAtrrtibutesWithIdentifier",
"satelliteTools.wkt2coordinates",
"ee.Reducer.toList",
"ee.FeatureCollection",
"ee.ImageCollection",
"ee.Image",
"time.time",
"geetools.tools.geometry.getRegion",
"ee.Reducer.mean",
"ee.Array",
"numpy.array",
"ee.Geometry.Point",
"ee.Reducer.sum",
"ee.Initialize"
] |
[((249, 264), 'ee.Initialize', 'ee.Initialize', ([], {}), '()\n', (262, 264), False, 'import ee\n'), ((625, 636), 'time.time', 'time.time', ([], {}), '()\n', (634, 636), False, 'import time\n'), ((930, 992), 'satelliteTools.getShapeAtrrtibutesWithIdentifier', 'st.getShapeAtrrtibutesWithIdentifier', (['aoi_shp', 'AOI_id_property'], {}), '(aoi_shp, AOI_id_property)\n', (966, 992), True, 'import satelliteTools as st\n'), ((4186, 4197), 'time.time', 'time.time', ([], {}), '()\n', (4195, 4197), False, 'import time\n'), ((4596, 4607), 'time.time', 'time.time', ([], {}), '()\n', (4605, 4607), False, 'import time\n'), ((4901, 4963), 'satelliteTools.getShapeAtrrtibutesWithIdentifier', 'st.getShapeAtrrtibutesWithIdentifier', (['aoi_shp', 'AOI_id_property'], {}), '(aoi_shp, AOI_id_property)\n', (4937, 4963), True, 'import satelliteTools as st\n'), ((7975, 7986), 'time.time', 'time.time', ([], {}), '()\n', (7984, 7986), False, 'import time\n'), ((9929, 9940), 'time.time', 'time.time', ([], {}), '()\n', (9938, 9940), False, 'import time\n'), ((10244, 10306), 'satelliteTools.getShapeAtrrtibutesWithIdentifier', 'st.getShapeAtrrtibutesWithIdentifier', (['aoi_shp', 'AOI_id_property'], {}), '(aoi_shp, AOI_id_property)\n', (10280, 10306), True, 'import satelliteTools as st\n'), ((10569, 10599), 'ee.FeatureCollection', 'ee.FeatureCollection', (['features'], {}), '(features)\n', (10589, 10599), False, 'import ee\n'), ((14843, 14854), 'time.time', 'time.time', ([], {}), '()\n', (14852, 14854), False, 'import time\n'), ((15176, 15187), 'time.time', 'time.time', ([], {}), '()\n', (15185, 15187), False, 'import time\n'), ((15300, 15362), 'satelliteTools.getShapeAtrrtibutesWithIdentifier', 'st.getShapeAtrrtibutesWithIdentifier', (['AOI_shp', 'AOI_id_property'], {}), '(AOI_shp, AOI_id_property)\n', (15336, 15362), True, 'import satelliteTools as st\n'), ((15527, 15557), 'ee.FeatureCollection', 'ee.FeatureCollection', (['features'], {}), '(features)\n', (15547, 15557), False, 'import ee\n'), ((19171, 19182), 'time.time', 'time.time', ([], {}), '()\n', (19180, 19182), False, 'import time\n'), ((19405, 19416), 'time.time', 'time.time', ([], {}), '()\n', (19414, 19416), False, 'import time\n'), ((19439, 19501), 'satelliteTools.getShapeAtrrtibutesWithIdentifier', 'st.getShapeAtrrtibutesWithIdentifier', (['AOI_shp', 'AOI_id_property'], {}), '(AOI_shp, AOI_id_property)\n', (19475, 19501), True, 'import satelliteTools as st\n'), ((19661, 19691), 'ee.FeatureCollection', 'ee.FeatureCollection', (['features'], {}), '(features)\n', (19681, 19691), False, 'import ee\n'), ((22558, 22569), 'time.time', 'time.time', ([], {}), '()\n', (22567, 22569), False, 'import time\n'), ((25674, 25700), 'ee.ImageCollection', 'ee.ImageCollection', (['images'], {}), '(images)\n', (25692, 25700), False, 'import ee\n'), ((26172, 26183), 'time.time', 'time.time', ([], {}), '()\n', (26181, 26183), False, 'import time\n'), ((26201, 26263), 'satelliteTools.getShapeAtrrtibutesWithIdentifier', 'st.getShapeAtrrtibutesWithIdentifier', (['AOI_shp', 'AOI_id_property'], {}), '(AOI_shp, AOI_id_property)\n', (26237, 26263), True, 'import satelliteTools as st\n'), ((26423, 26453), 'ee.FeatureCollection', 'ee.FeatureCollection', (['features'], {}), '(features)\n', (26443, 26453), False, 'import ee\n'), ((29227, 29238), 'time.time', 'time.time', ([], {}), '()\n', (29236, 29238), False, 'import time\n'), ((796, 807), 'ee.Image', 'ee.Image', (['a'], {}), '(a)\n', (804, 807), False, 'import ee\n'), ((4767, 4778), 'ee.Image', 'ee.Image', (['a'], {}), '(a)\n', (4775, 4778), False, 'import ee\n'), ((9396, 9432), 'pandas.DataFrame', 'pd.DataFrame', (["{'Date': item['Date']}"], {}), "({'Date': item['Date']})\n", (9408, 9432), True, 'import pandas as pd\n'), ((23551, 23587), 'pandas.DataFrame', 'pd.DataFrame', (['featureCollection_dict'], {}), '(featureCollection_dict)\n', (23563, 23587), True, 'import pandas as pd\n'), ((25303, 25482), 'pandas.DataFrame', 'pd.DataFrame', (['featureCollection_dict'], {'columns': "['Date', 'productid', 'system_index', 'assetid', 'tileid', 'crs',\n 'sample_n', 'ndvi_mean', 'ndvi_std', 'quality_uncertainty']"}), "(featureCollection_dict, columns=['Date', 'productid',\n 'system_index', 'assetid', 'tileid', 'crs', 'sample_n', 'ndvi_mean',\n 'ndvi_std', 'quality_uncertainty'])\n", (25315, 25482), True, 'import pandas as pd\n'), ((25609, 25626), 'ee.Image', 'ee.Image', (['assetid'], {}), '(assetid)\n', (25617, 25626), False, 'import ee\n'), ((30415, 30531), 'pandas.DataFrame', 'pd.DataFrame', (['featureCollection_dict'], {'columns': "['Date', 'dataid', 'ndvi_mean', 'ndvi_std', 'quality_uncertainty']"}), "(featureCollection_dict, columns=['Date', 'dataid', 'ndvi_mean',\n 'ndvi_std', 'quality_uncertainty'])\n", (30427, 30531), True, 'import pandas as pd\n'), ((1038, 1085), 'satelliteTools.wkt2coordinates', 'st.wkt2coordinates', (["attributes[key]['geometry']"], {}), "(attributes[key]['geometry'])\n", (1056, 1085), True, 'import satelliteTools as st\n'), ((5009, 5056), 'satelliteTools.wkt2coordinates', 'st.wkt2coordinates', (["attributes[key]['geometry']"], {}), "(attributes[key]['geometry'])\n", (5027, 5056), True, 'import satelliteTools as st\n'), ((8609, 8645), 'satelliteTools.sentinelTitle2Datetime', 'st.sentinelTitle2Datetime', (['productid'], {}), '(productid)\n', (8634, 8645), True, 'import satelliteTools as st\n'), ((10126, 10137), 'ee.Image', 'ee.Image', (['a'], {}), '(a)\n', (10134, 10137), False, 'import ee\n'), ((23188, 23216), 'satelliteTools.sentinelTitle2Datetime', 'st.sentinelTitle2Datetime', (['d'], {}), '(d)\n', (23213, 23216), True, 'import satelliteTools as st\n'), ((24175, 24203), 'satelliteTools.sentinelTitle2Datetime', 'st.sentinelTitle2Datetime', (['d'], {}), '(d)\n', (24200, 24203), True, 'import satelliteTools as st\n'), ((25742, 25765), 'satelliteTools.wkt2coordinates', 'st.wkt2coordinates', (['aoi'], {}), '(aoi)\n', (25760, 25765), True, 'import satelliteTools as st\n'), ((25836, 25865), 'geetools.tools.geometry.getRegion', 'tools.geometry.getRegion', (['aoi'], {}), '(aoi)\n', (25860, 25865), False, 'from geetools import batch, tools\n'), ((2818, 2837), 'ee.Reducer.toList', 'ee.Reducer.toList', ([], {}), '()\n', (2835, 2837), False, 'import ee\n'), ((6618, 6637), 'ee.Reducer.toList', 'ee.Reducer.toList', ([], {}), '()\n', (6635, 6637), False, 'import ee\n'), ((10354, 10401), 'satelliteTools.wkt2coordinates', 'st.wkt2coordinates', (["attributes[key]['geometry']"], {}), "(attributes[key]['geometry'])\n", (10372, 10401), True, 'import satelliteTools as st\n'), ((11785, 11802), 'ee.Reducer.mean', 'ee.Reducer.mean', ([], {}), '()\n', (11800, 11802), False, 'import ee\n'), ((11913, 11930), 'ee.Reducer.mean', 'ee.Reducer.mean', ([], {}), '()\n', (11928, 11930), False, 'import ee\n'), ((15414, 15450), 'satelliteTools.wkt2coordinates', 'st.wkt2coordinates', (["item['geometry']"], {}), "(item['geometry'])\n", (15432, 15450), True, 'import satelliteTools as st\n'), ((17340, 17356), 'ee.Reducer.sum', 'ee.Reducer.sum', ([], {}), '()\n', (17354, 17356), False, 'import ee\n'), ((19548, 19584), 'satelliteTools.wkt2coordinates', 'st.wkt2coordinates', (["item['geometry']"], {}), "(item['geometry'])\n", (19566, 19584), True, 'import satelliteTools as st\n'), ((21113, 21129), 'ee.Reducer.sum', 'ee.Reducer.sum', ([], {}), '()\n', (21127, 21129), False, 'import ee\n'), ((26310, 26346), 'satelliteTools.wkt2coordinates', 'st.wkt2coordinates', (["item['geometry']"], {}), "(item['geometry'])\n", (26328, 26346), True, 'import satelliteTools as st\n'), ((28132, 28148), 'ee.Reducer.sum', 'ee.Reducer.sum', ([], {}), '()\n', (28146, 28148), False, 'import ee\n'), ((2261, 2278), 'ee.Reducer.mean', 'ee.Reducer.mean', ([], {}), '()\n', (2276, 2278), False, 'import ee\n'), ((2394, 2411), 'ee.Reducer.mean', 'ee.Reducer.mean', ([], {}), '()\n', (2409, 2411), False, 'import ee\n'), ((6061, 6078), 'ee.Reducer.mean', 'ee.Reducer.mean', ([], {}), '()\n', (6076, 6078), False, 'import ee\n'), ((6194, 6211), 'ee.Reducer.mean', 'ee.Reducer.mean', ([], {}), '()\n', (6209, 6211), False, 'import ee\n'), ((9509, 9529), 'numpy.array', 'np.array', (['item[prop]'], {}), '(item[prop])\n', (9517, 9529), True, 'import numpy as np\n'), ((9597, 9618), 'numpy.array', 'np.array', (["item['lai']"], {}), "(item['lai'])\n", (9605, 9618), True, 'import numpy as np\n'), ((12364, 12383), 'ee.Reducer.toList', 'ee.Reducer.toList', ([], {}), '()\n', (12381, 12383), False, 'import ee\n'), ((16530, 16549), 'ee.Reducer.toList', 'ee.Reducer.toList', ([], {}), '()\n', (16547, 16549), False, 'import ee\n'), ((16779, 16792), 'ee.Array', 'ee.Array', (['[0]'], {}), '([0])\n', (16787, 16792), False, 'import ee\n'), ((16886, 16903), 'ee.Array', 'ee.Array', (['[-9999]'], {}), '([-9999])\n', (16894, 16903), False, 'import ee\n'), ((20414, 20433), 'ee.Reducer.toList', 'ee.Reducer.toList', ([], {}), '()\n', (20431, 20433), False, 'import ee\n'), ((20663, 20676), 'ee.Array', 'ee.Array', (['[0]'], {}), '([0])\n', (20671, 20676), False, 'import ee\n'), ((27384, 27403), 'ee.Reducer.toList', 'ee.Reducer.toList', ([], {}), '()\n', (27401, 27403), False, 'import ee\n'), ((27623, 27636), 'ee.Array', 'ee.Array', (['[0]'], {}), '([0])\n', (27631, 27636), False, 'import ee\n'), ((27730, 27747), 'ee.Array', 'ee.Array', (['[-9999]'], {}), '([-9999])\n', (27738, 27747), False, 'import ee\n'), ((17548, 17565), 'ee.Reducer.mean', 'ee.Reducer.mean', ([], {}), '()\n', (17563, 17565), False, 'import ee\n'), ((17614, 17633), 'ee.Reducer.stdDev', 'ee.Reducer.stdDev', ([], {}), '()\n', (17631, 17633), False, 'import ee\n'), ((27922, 27939), 'ee.Reducer.mean', 'ee.Reducer.mean', ([], {}), '()\n', (27937, 27939), False, 'import ee\n'), ((27988, 28007), 'ee.Reducer.stdDev', 'ee.Reducer.stdDev', ([], {}), '()\n', (28005, 28007), False, 'import ee\n'), ((15674, 15712), 'ee.ImageCollection', 'ee.ImageCollection', (['"""COPERNICUS/S2_SR"""'], {}), "('COPERNICUS/S2_SR')\n", (15692, 15712), False, 'import ee\n'), ((19818, 19856), 'ee.ImageCollection', 'ee.ImageCollection', (['"""COPERNICUS/S2_SR"""'], {}), "('COPERNICUS/S2_SR')\n", (19836, 19856), False, 'import ee\n'), ((26578, 26622), 'ee.ImageCollection', 'ee.ImageCollection', (['"""LANDSAT/LC08/C01/T1_SR"""'], {}), "('LANDSAT/LC08/C01/T1_SR')\n", (26596, 26622), False, 'import ee\n'), ((28317, 28342), 'ee.Geometry.Point', 'ee.Geometry.Point', (['[0, 0]'], {}), '([0, 0])\n', (28334, 28342), False, 'import ee\n'), ((21343, 21368), 'ee.Geometry.Point', 'ee.Geometry.Point', (['[0, 0]'], {}), '([0, 0])\n', (21360, 21368), False, 'import ee\n'), ((17767, 17792), 'ee.Geometry.Point', 'ee.Geometry.Point', (['[0, 0]'], {}), '([0, 0])\n', (17784, 17792), False, 'import ee\n'), ((3522, 3547), 'ee.Geometry.Point', 'ee.Geometry.Point', (['[0, 0]'], {}), '([0, 0])\n', (3539, 3547), False, 'import ee\n'), ((7311, 7336), 'ee.Geometry.Point', 'ee.Geometry.Point', (['[0, 0]'], {}), '([0, 0])\n', (7328, 7336), False, 'import ee\n'), ((13116, 13141), 'ee.Geometry.Point', 'ee.Geometry.Point', (['[0, 0]'], {}), '([0, 0])\n', (13133, 13141), False, 'import ee\n')]
|
"""
General-purpose tags for working with various aspects of Snippets --
whether a user has bookmarked/rated a given Snippet, etc.
"""
from django import template
from cab.models import Bookmark, Rating, Snippet
register = template.Library()
class IfBookmarkedNode(template.Node):
def __init__(self, user_id, snippet_id, nodelist_true, nodelist_false):
self.user_id = user_id
self.snippet_id = snippet_id
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
def render(self, context):
try:
self.user_id = template.resolve_variable(self.user_id, context)
self.snippet_id = template.resolve_variable(self.snippet_id, context)
except template.VariableDoesNotExist:
return ''
if Bookmark.objects.already_bookmarked(self.user_id, self.snippet_id):
return self.nodelist_true.render(context)
return self.nodelist_false.render(context)
class IfRatedNode(template.Node):
def __init__(self, user_id, snippet_id, nodelist_true, nodelist_false):
self.user_id = user_id
self.snippet_id = snippet_id
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
def render(self, context):
try:
self.user_id = template.resolve_variable(self.user_id, context)
self.snippet_id = template.resolve_variable(self.snippet_id, context)
except template.VariableDoesNotExist:
return ''
if Rating.objects.already_rated(self.user_id, self.snippet_id):
return self.nodelist_true.render(context)
return self.nodelist_false.render(context)
class LatestSnippetsNode(template.Node):
def __init__(self, num, varname):
self.num, self.varname = num, varname
def render(self, context):
context[self.varname] = Snippet.objects.all().select_related()[:self.num]
return ''
class RatingForSnippetNode(template.Node):
def __init__(self, snippet_id, context_var):
self.snippet_id = snippet_id
self.context_var = context_var
def render(self, context):
try:
self.snippet_id = template.resolve_variable(self.snippet_id, context)
except template.VariableDoesNotExist:
return ''
context[self.context_var] = Rating.objects.score_for_snippet(self.snippet_id)
return ''
class RatingByUserNode(template.Node):
def __init__(self, user_id, snippet_id, context_var):
self.user_id, self.snippet_id = user_id, snippet_id
self.context_var = context_var
def render(self, context):
try:
self.user_id = template.resolve_variable(self.user_id, context)
self.snippet_id = template.resolve_variable(self.snippet_id, context)
rating = Rating.objects.get(user__pk=self.user_id, snippet__pk=self.snippet_id)
except template.VariableDoesNotExist:
return ''
except Rating.DoesNotExist:
return ''
context[self.context_var] = rating
return ''
def do_if_bookmarked(parser, token):
"""
Returns either of two blocks of content depending on whether a
given User has bookmarked a given Snippet.
Example::
{% if_already_bookmarked user.id object.id %}
<p>This Snippet is in your bookmarks.</p>
{% else %}
<p>You haven't bookmarked this Snippet.</p>
{% endif_already_bookmarked %}
"""
bits = token.contents.split()
if len(bits) != 3:
raise template.TemplateSyntaxError("'%s' tag takes exactly two arguments" % bits[0])
nodelist_true = parser.parse(('else', 'endif_bookmarked'))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse(('endif_bookmarked',))
parser.delete_first_token()
else:
nodelist_false = template.NodeList()
return IfBookmarkedNode(bits[1], bits[2], nodelist_true, nodelist_false)
def do_if_rated(parser, token):
"""
Returns either of two blocks of content depending on whether a
given User has rated a given Snippet.
Example::
{% if_rated user.id object.id %}
<p>You have already rated this Snippet.</p>
{% else %}
<p>You haven't rated this Snippet yet.</p>
{% endif_rated %}
"""
bits = token.contents.split()
if len(bits) != 3:
raise template.TemplateSyntaxError("'%s' tag takes exactly two arguments" % bits[0])
nodelist_true = parser.parse(('else', 'endif_rated'))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse(('endif_rated',))
parser.delete_first_token()
else:
nodelist_false = template.NodeList()
return IfRatedNode(bits[1], bits[2], nodelist_true, nodelist_false)
def do_latest_snippets(parser, token):
"""
Returns the latest ``num`` Snippets.
Example::
{% get_latest_snippets 5 as latest_snippets %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes exactly three arguments" % bits[0])
return LatestSnippetsNode(bits[1], bits[3])
def do_rating_for_snippet(parser, token):
"""
Retrieves a list containing the total score for a Snippet and the
number of Ratings it's received, and stores them in a specified
context variable.
Example usage::
{% get_rating_for_snippet object.id as score %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes exactly three arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("second argument to '%s' tag must be 'as'" % bits[0])
return RatingForSnippetNode(bits[1], bits[3])
def do_rating_by_user(parser, token):
"""
Returns a User's Rating of a Snippet, if any.
Example::
{% get_rating_by_user user.id object.id as rating %}
"""
bits = token.contents.split()
if len(bits) != 5:
raise template.TemplateSyntaxError("'%s' tag takes exactly four arguments" % bits[0])
if bits[3] != 'as':
raise template.TemplateSyntaxError("third argument to '%s' tag must be 'as'" % bits[0])
return RatingByUserNode(bits[1], bits[2], bits[4])
register.tag('get_latest_snippets', do_latest_snippets)
register.tag('get_rating_for_snippet', do_rating_for_snippet)
register.tag('get_rating_by_user', do_rating_by_user)
register.tag('if_bookmarked', do_if_bookmarked)
register.tag('if_rated', do_if_rated)
|
[
"django.template.Library",
"django.template.resolve_variable",
"cab.models.Snippet.objects.all",
"cab.models.Rating.objects.already_rated",
"cab.models.Rating.objects.score_for_snippet",
"cab.models.Rating.objects.get",
"django.template.TemplateSyntaxError",
"cab.models.Bookmark.objects.already_bookmarked",
"django.template.NodeList"
] |
[((226, 244), 'django.template.Library', 'template.Library', ([], {}), '()\n', (242, 244), False, 'from django import template\n'), ((796, 862), 'cab.models.Bookmark.objects.already_bookmarked', 'Bookmark.objects.already_bookmarked', (['self.user_id', 'self.snippet_id'], {}), '(self.user_id, self.snippet_id)\n', (831, 862), False, 'from cab.models import Bookmark, Rating, Snippet\n'), ((1515, 1574), 'cab.models.Rating.objects.already_rated', 'Rating.objects.already_rated', (['self.user_id', 'self.snippet_id'], {}), '(self.user_id, self.snippet_id)\n', (1543, 1574), False, 'from cab.models import Bookmark, Rating, Snippet\n'), ((2349, 2398), 'cab.models.Rating.objects.score_for_snippet', 'Rating.objects.score_for_snippet', (['self.snippet_id'], {}), '(self.snippet_id)\n', (2381, 2398), False, 'from cab.models import Bookmark, Rating, Snippet\n'), ((3595, 3673), 'django.template.TemplateSyntaxError', 'template.TemplateSyntaxError', (['("\'%s\' tag takes exactly two arguments" % bits[0])'], {}), '("\'%s\' tag takes exactly two arguments" % bits[0])\n', (3623, 3673), False, 'from django import template\n'), ((3934, 3953), 'django.template.NodeList', 'template.NodeList', ([], {}), '()\n', (3951, 3953), False, 'from django import template\n'), ((4481, 4559), 'django.template.TemplateSyntaxError', 'template.TemplateSyntaxError', (['("\'%s\' tag takes exactly two arguments" % bits[0])'], {}), '("\'%s\' tag takes exactly two arguments" % bits[0])\n', (4509, 4559), False, 'from django import template\n'), ((4810, 4829), 'django.template.NodeList', 'template.NodeList', ([], {}), '()\n', (4827, 4829), False, 'from django import template\n'), ((5149, 5234), 'django.template.TemplateSyntaxError', 'template.TemplateSyntaxError', (['("\'%s\' tag takes exactly three arguments" % bits[0])'], {}), '("\'%s\' tag takes exactly three arguments" % bits[0]\n )\n', (5177, 5234), False, 'from django import template\n'), ((5658, 5743), 'django.template.TemplateSyntaxError', 'template.TemplateSyntaxError', (['("\'%s\' tag takes exactly three arguments" % bits[0])'], {}), '("\'%s\' tag takes exactly three arguments" % bits[0]\n )\n', (5686, 5743), False, 'from django import template\n'), ((5777, 5863), 'django.template.TemplateSyntaxError', 'template.TemplateSyntaxError', (['("second argument to \'%s\' tag must be \'as\'" % bits[0])'], {}), '("second argument to \'%s\' tag must be \'as\'" %\n bits[0])\n', (5805, 5863), False, 'from django import template\n'), ((6171, 6250), 'django.template.TemplateSyntaxError', 'template.TemplateSyntaxError', (['("\'%s\' tag takes exactly four arguments" % bits[0])'], {}), '("\'%s\' tag takes exactly four arguments" % bits[0])\n', (6199, 6250), False, 'from django import template\n'), ((6289, 6374), 'django.template.TemplateSyntaxError', 'template.TemplateSyntaxError', (['("third argument to \'%s\' tag must be \'as\'" % bits[0])'], {}), '("third argument to \'%s\' tag must be \'as\'" %\n bits[0])\n', (6317, 6374), False, 'from django import template\n'), ((586, 634), 'django.template.resolve_variable', 'template.resolve_variable', (['self.user_id', 'context'], {}), '(self.user_id, context)\n', (611, 634), False, 'from django import template\n'), ((665, 716), 'django.template.resolve_variable', 'template.resolve_variable', (['self.snippet_id', 'context'], {}), '(self.snippet_id, context)\n', (690, 716), False, 'from django import template\n'), ((1305, 1353), 'django.template.resolve_variable', 'template.resolve_variable', (['self.user_id', 'context'], {}), '(self.user_id, context)\n', (1330, 1353), False, 'from django import template\n'), ((1384, 1435), 'django.template.resolve_variable', 'template.resolve_variable', (['self.snippet_id', 'context'], {}), '(self.snippet_id, context)\n', (1409, 1435), False, 'from django import template\n'), ((2193, 2244), 'django.template.resolve_variable', 'template.resolve_variable', (['self.snippet_id', 'context'], {}), '(self.snippet_id, context)\n', (2218, 2244), False, 'from django import template\n'), ((2691, 2739), 'django.template.resolve_variable', 'template.resolve_variable', (['self.user_id', 'context'], {}), '(self.user_id, context)\n', (2716, 2739), False, 'from django import template\n'), ((2770, 2821), 'django.template.resolve_variable', 'template.resolve_variable', (['self.snippet_id', 'context'], {}), '(self.snippet_id, context)\n', (2795, 2821), False, 'from django import template\n'), ((2843, 2913), 'cab.models.Rating.objects.get', 'Rating.objects.get', ([], {'user__pk': 'self.user_id', 'snippet__pk': 'self.snippet_id'}), '(user__pk=self.user_id, snippet__pk=self.snippet_id)\n', (2861, 2913), False, 'from cab.models import Bookmark, Rating, Snippet\n'), ((1876, 1897), 'cab.models.Snippet.objects.all', 'Snippet.objects.all', ([], {}), '()\n', (1895, 1897), False, 'from cab.models import Bookmark, Rating, Snippet\n')]
|
import ipaddress
from typing import Dict, List, Optional
from pydantic import BaseModel, validator
from common_osint_model.models import ShodanDataHandler, CensysDataHandler, Logger
class AutonomousSystem(BaseModel, ShodanDataHandler, CensysDataHandler, Logger):
"""Represents an autonomous system"""
number: int
name: str
country: Optional[str]
prefix: Optional[str]
source: str
@validator("prefix")
def validate_prefix(cls, v):
if not v:
return v
try:
ipaddress.ip_network(v)
except Exception as e:
raise ValueError(f"Prefix given could not be parsed by ipaddress module. Likely \"{v}\" has a "
f"wrong format: {e}")
return v
@classmethod
def from_shodan(cls, d: Dict):
"""Creates an instance of this class using a typical Shodan dictionary."""
if isinstance(d, List):
cls.debug("Got a list instead of a dictionary. Usually multiple services of the same host are represented"
" as multiple list items by shodan, so this should not be a problem as the AS is the same for all."
" Using the first item.")
d = d[0]
return AutonomousSystem(
number=int(d.get("asn").replace("AS", "")),
name=d.get("isp"),
country=d.get("location", {}).get("country_code", None),
prefix=None, # Not available in Shodan data
source="shodan"
)
@classmethod
def from_censys(cls, d: Dict):
return AutonomousSystem(
number=d["autonomous_system"]["asn"],
name=d["autonomous_system"]["name"],
country=d["autonomous_system"]["country_code"],
prefix=d["autonomous_system"]["bgp_prefix"],
source="censys"
)
|
[
"ipaddress.ip_network",
"pydantic.validator"
] |
[((414, 433), 'pydantic.validator', 'validator', (['"""prefix"""'], {}), "('prefix')\n", (423, 433), False, 'from pydantic import BaseModel, validator\n'), ((531, 554), 'ipaddress.ip_network', 'ipaddress.ip_network', (['v'], {}), '(v)\n', (551, 554), False, 'import ipaddress\n')]
|
from enum import Enum, auto
class ArjunaOption(Enum):
ARJUNA_ROOT_DIR = auto()
ARJUNA_EXTERNAL_TOOLS_DIR = auto()
ARJUNA_EXTERNAL_IMPORTS_DIR = auto()
PYTHON_LOG_NAME = auto()
LOG_NAME = auto()
LOG_DIR = auto()
LOG_CONSOLE_LEVEL = auto()
LOG_FILE_LEVEL = auto()
PROJECT_NAME = auto()
PROJECT_ROOT_DIR = auto()
PROJECT_CONF_FILE = auto()
DATA_DIR = auto()
DATA_SOURCES_DIR = auto()
DATA_REFERENCES_DIR = auto()
SCREENSHOTS_DIR = auto()
CONFIG_DIR = auto()
SETU_PROJECT_DIRS_FILES = auto()
REPORT_DIR = auto()
ARCHIVES_DIR = auto()
AUT_URL = auto()
TESTRUN_ENVIRONMENT = auto()
TESTRUN_HOST_OS = auto()
SETU_GUIAUTO_ACTOR_MODE = auto()
SETU_GUIAUTO_ACTOR_URL = auto()
AUTOMATOR_NAME = auto()
BROWSER_NAME = auto()
BROWSER_VERSION = auto()
BROWSER_MAXIMIZE = auto()
BROWSER_DIM_HEIGHT = auto()
BROWSER_DIM_WIDTH = auto()
BROWSER_BIN_PATH = auto()
BROWSER_PROXY_ON = auto()
GUIAUTO_INPUT_DIR = auto()
GUIAUTO_NAMESPACE_DIR = auto()
GUIAUTO_CONTEXT = auto()
SCROLL_PIXELS = auto()
SWIPE_TOP = auto()
SWIPE_BOTTOM = auto()
SWIPE_MAX_WAIT = auto()
GUIAUTO_MAX_WAIT = auto()
GUIAUTO_SLOMO_ON = auto()
GUIAUTO_SLOMO_INTERVAL = auto()
MOBILE_OS_NAME = auto()
MOBILE_OS_VERSION = auto()
MOBILE_DEVICE_NAME = auto()
MOBILE_DEVICE_UDID = auto()
MOBILE_APP_FILE_PATH = auto()
SELENIUM_DRIVER_PROP = auto()
SELENIUM_DRIVERS_DIR = auto()
SELENIUM_DRIVER_PATH = auto()
APPIUM_HUB_URL = auto()
APPIUM_AUTO_LAUNCH = auto()
IMAGE_COMPARISON_MIN_SCORE = auto()
UNITEE_PROJECT_DIRS_FILES = auto()
UNITEE_PROJECT_SESSIONS_DIR = auto()
UNITEE_PROJECT_GROUPS_DIR = auto()
UNITEE_PROJECT_TESTS_DIR = auto()
UNITEE_PROJECT_TEST_MODULE_IMPORT_PREFIX = auto()
UNITEE_PROJECT_FIXTURES_IMPORT_PREFIX = auto()
UNITEE_PROJECT_CORE_DIR = auto()
UNITEE_PROJECT_CORE_DB_CENTRAL_DIR = auto()
UNITEE_PROJECT_CORE_DB_CENTRAL_DBFILE = auto()
UNITEE_PROJECT_CORE_DB_RUN_DBFILE = auto()
UNITEE_PROJECT_CORE_DB_ALLRUN_DIR = auto()
UNITEE_PROJECT_CORE_DB_TEMPLATE_DIR = auto()
UNITEE_PROJECT_CORE_DB_TEMPLATE_CENTRAL_DBFILE = auto()
UNITEE_PROJECT_CORE_DB_TEMPLATE_RUN_DBFILE = auto()
UNITEE_PROJECT_REPORTER_MODE = auto()
UNITEE_PROJECT_ACTIVE_REPORTERS = auto()
UNITEE_PROJECT_DEFERRED_REPORTERS = auto()
UNITEE_PROJECT_FAILFAST = auto()
UNITEE_PROJECT_REPORT_NAME_FORMAT = auto()
UNITEE_PROJECT_REPORT_HEADERS_TMETA = auto()
UNITEE_PROJECT_REPORT_HEADERS_IGMETA = auto()
UNITEE_PROJECT_REPORT_HEADERS_PROPS = auto()
UNITEE_PROJECT_REPORT_HEADERS_REPORTABLE_TEST = auto()
UNITEE_PROJECT_REPORT_HEADERS_REPORTABLE_STEP = auto()
UNITEE_PROJECT_REPORT_HEADERS_REPORTABLE_ISSUE = auto()
UNITEE_PROJECT_REPORT_HEADERS_REPORTABLE_IGNORED = auto()
UNITEE_PROJECT_REPORT_HEADERS_REPORTABLE_FIXTURE = auto()
UNITEE_PROJECT_REPORT_HEADERS_REPORTABLE_EVENT = auto()
UNITEE_PROJECT_RUNID = auto()
UNITEE_PROJECT_IRUNID = auto()
UNITEE_PROJECT_SESSION_NAME = auto()
UNITEE_PROJECT_CORE = auto()
UNITEE_PROJECT_SCREENSHOTS_RUN_DIR = auto()
UNITEE_PROJECT_RUN_REPORT_DIR = auto()
UNITEE_PROJECT_RUN_REPORT_JDB_DIR = auto()
UNITEE_PROJECT_RUN_REPORT_JSON_DIR = auto()
UNITEE_PROJECT_RUN_REPORT_JSON_TESTS_DIR = auto()
UNITEE_PROJECT_RUN_REPORT_JSON_IGNOREDTESTS_DIR = auto()
UNITEE_PROJECT_RUN_REPORT_JSON_ISSUES_DIR = auto()
UNITEE_PROJECT_RUN_REPORT_JSON_EVENTS_DIR = auto()
UNITEE_PROJECT_RUN_REPORT_JSON_FIXTURES_DIR = auto()
class Key(Enum):
NULL = auto()
CANCEL = auto()
HELP = auto()
BACKSPACE = auto()
BACK_SPACE = auto()
TAB = auto()
CLEAR = auto()
RETURN = auto()
ENTER = auto()
SHIFT = auto()
LEFT_SHIFT = auto()
CONTROL = auto()
LEFT_CONTROL = auto()
ALT = auto()
LEFT_ALT = auto()
PAUSE = auto()
ESCAPE = auto()
SPACE = auto()
PAGE_UP = auto()
PAGE_DOWN = auto()
END = auto()
HOME = auto()
LEFT = auto()
ARROW_LEFT = auto()
UP = auto()
ARROW_UP = auto()
RIGHT = auto()
ARROW_RIGHT = auto()
DOWN = auto()
ARROW_DOWN = auto()
INSERT = auto()
DELETE = auto()
SEMICOLON = auto()
EQUALS = auto()
NUMPAD0 = auto()
NUMPAD1 = auto()
NUMPAD2 = auto()
NUMPAD3 = auto()
NUMPAD4 = auto()
NUMPAD5 = auto()
NUMPAD6 = auto()
NUMPAD7 = auto()
NUMPAD8 = auto()
NUMPAD9 = auto()
MULTIPLY = auto()
ADD = auto()
SEPARATOR = auto()
SUBTRACT = auto()
DECIMAL = auto()
DIVIDE = auto()
F1 = auto()
F2 = auto()
F3 = auto()
F4 = auto()
F5 = auto()
F6 = auto()
F7 = auto()
F8 = auto()
F9 = auto()
F10 = auto()
F11 = auto()
F12 = auto()
META = auto()
COMMAND = auto()
class ModifierKey(Enum):
CTRL = auto()
CMD = auto()
XCTRL = auto()
ALT = auto()
SHIFT = auto()
class TimeUnit(Enum):
MILLI_SECONDS = auto()
SECONDS = auto()
MINUTES = auto()
class DesktopOS(Enum):
WINDOWS = auto()
MAC = auto()
class SetuActorMode(Enum):
LOCAL = auto()
REMOTE = auto()
class MobileOsName(Enum):
ANDROID = auto()
IOS = auto()
class BrowserName(Enum):
CHROME = auto()
FIREFOX = auto()
SAFARI = auto()
IE = auto()
OPERA = auto()
HTML = auto()
|
[
"enum.auto"
] |
[((77, 83), 'enum.auto', 'auto', ([], {}), '()\n', (81, 83), False, 'from enum import Enum, auto\n'), ((116, 122), 'enum.auto', 'auto', ([], {}), '()\n', (120, 122), False, 'from enum import Enum, auto\n'), ((157, 163), 'enum.auto', 'auto', ([], {}), '()\n', (161, 163), False, 'from enum import Enum, auto\n'), ((186, 192), 'enum.auto', 'auto', ([], {}), '()\n', (190, 192), False, 'from enum import Enum, auto\n'), ((208, 214), 'enum.auto', 'auto', ([], {}), '()\n', (212, 214), False, 'from enum import Enum, auto\n'), ((230, 236), 'enum.auto', 'auto', ([], {}), '()\n', (234, 236), False, 'from enum import Enum, auto\n'), ((262, 268), 'enum.auto', 'auto', ([], {}), '()\n', (266, 268), False, 'from enum import Enum, auto\n'), ((290, 296), 'enum.auto', 'auto', ([], {}), '()\n', (294, 296), False, 'from enum import Enum, auto\n'), ((317, 323), 'enum.auto', 'auto', ([], {}), '()\n', (321, 323), False, 'from enum import Enum, auto\n'), ((347, 353), 'enum.auto', 'auto', ([], {}), '()\n', (351, 353), False, 'from enum import Enum, auto\n'), ((378, 384), 'enum.auto', 'auto', ([], {}), '()\n', (382, 384), False, 'from enum import Enum, auto\n'), ((401, 407), 'enum.auto', 'auto', ([], {}), '()\n', (405, 407), False, 'from enum import Enum, auto\n'), ((431, 437), 'enum.auto', 'auto', ([], {}), '()\n', (435, 437), False, 'from enum import Enum, auto\n'), ((464, 470), 'enum.auto', 'auto', ([], {}), '()\n', (468, 470), False, 'from enum import Enum, auto\n'), ((493, 499), 'enum.auto', 'auto', ([], {}), '()\n', (497, 499), False, 'from enum import Enum, auto\n'), ((517, 523), 'enum.auto', 'auto', ([], {}), '()\n', (521, 523), False, 'from enum import Enum, auto\n'), ((555, 561), 'enum.auto', 'auto', ([], {}), '()\n', (559, 561), False, 'from enum import Enum, auto\n'), ((579, 585), 'enum.auto', 'auto', ([], {}), '()\n', (583, 585), False, 'from enum import Enum, auto\n'), ((605, 611), 'enum.auto', 'auto', ([], {}), '()\n', (609, 611), False, 'from enum import Enum, auto\n'), ((627, 633), 'enum.auto', 'auto', ([], {}), '()\n', (631, 633), False, 'from enum import Enum, auto\n'), ((661, 667), 'enum.auto', 'auto', ([], {}), '()\n', (665, 667), False, 'from enum import Enum, auto\n'), ((690, 696), 'enum.auto', 'auto', ([], {}), '()\n', (694, 696), False, 'from enum import Enum, auto\n'), ((728, 734), 'enum.auto', 'auto', ([], {}), '()\n', (732, 734), False, 'from enum import Enum, auto\n'), ((764, 770), 'enum.auto', 'auto', ([], {}), '()\n', (768, 770), False, 'from enum import Enum, auto\n'), ((793, 799), 'enum.auto', 'auto', ([], {}), '()\n', (797, 799), False, 'from enum import Enum, auto\n'), ((820, 826), 'enum.auto', 'auto', ([], {}), '()\n', (824, 826), False, 'from enum import Enum, auto\n'), ((849, 855), 'enum.auto', 'auto', ([], {}), '()\n', (853, 855), False, 'from enum import Enum, auto\n'), ((879, 885), 'enum.auto', 'auto', ([], {}), '()\n', (883, 885), False, 'from enum import Enum, auto\n'), ((911, 917), 'enum.auto', 'auto', ([], {}), '()\n', (915, 917), False, 'from enum import Enum, auto\n'), ((942, 948), 'enum.auto', 'auto', ([], {}), '()\n', (946, 948), False, 'from enum import Enum, auto\n'), ((972, 978), 'enum.auto', 'auto', ([], {}), '()\n', (976, 978), False, 'from enum import Enum, auto\n'), ((1002, 1008), 'enum.auto', 'auto', ([], {}), '()\n', (1006, 1008), False, 'from enum import Enum, auto\n'), ((1034, 1040), 'enum.auto', 'auto', ([], {}), '()\n', (1038, 1040), False, 'from enum import Enum, auto\n'), ((1069, 1075), 'enum.auto', 'auto', ([], {}), '()\n', (1073, 1075), False, 'from enum import Enum, auto\n'), ((1098, 1104), 'enum.auto', 'auto', ([], {}), '()\n', (1102, 1104), False, 'from enum import Enum, auto\n'), ((1125, 1131), 'enum.auto', 'auto', ([], {}), '()\n', (1129, 1131), False, 'from enum import Enum, auto\n'), ((1148, 1154), 'enum.auto', 'auto', ([], {}), '()\n', (1152, 1154), False, 'from enum import Enum, auto\n'), ((1174, 1180), 'enum.auto', 'auto', ([], {}), '()\n', (1178, 1180), False, 'from enum import Enum, auto\n'), ((1202, 1208), 'enum.auto', 'auto', ([], {}), '()\n', (1206, 1208), False, 'from enum import Enum, auto\n'), ((1232, 1238), 'enum.auto', 'auto', ([], {}), '()\n', (1236, 1238), False, 'from enum import Enum, auto\n'), ((1262, 1268), 'enum.auto', 'auto', ([], {}), '()\n', (1266, 1268), False, 'from enum import Enum, auto\n'), ((1298, 1304), 'enum.auto', 'auto', ([], {}), '()\n', (1302, 1304), False, 'from enum import Enum, auto\n'), ((1327, 1333), 'enum.auto', 'auto', ([], {}), '()\n', (1331, 1333), False, 'from enum import Enum, auto\n'), ((1358, 1364), 'enum.auto', 'auto', ([], {}), '()\n', (1362, 1364), False, 'from enum import Enum, auto\n'), ((1390, 1396), 'enum.auto', 'auto', ([], {}), '()\n', (1394, 1396), False, 'from enum import Enum, auto\n'), ((1422, 1428), 'enum.auto', 'auto', ([], {}), '()\n', (1426, 1428), False, 'from enum import Enum, auto\n'), ((1456, 1462), 'enum.auto', 'auto', ([], {}), '()\n', (1460, 1462), False, 'from enum import Enum, auto\n'), ((1491, 1497), 'enum.auto', 'auto', ([], {}), '()\n', (1495, 1497), False, 'from enum import Enum, auto\n'), ((1525, 1531), 'enum.auto', 'auto', ([], {}), '()\n', (1529, 1531), False, 'from enum import Enum, auto\n'), ((1559, 1565), 'enum.auto', 'auto', ([], {}), '()\n', (1563, 1565), False, 'from enum import Enum, auto\n'), ((1588, 1594), 'enum.auto', 'auto', ([], {}), '()\n', (1592, 1594), False, 'from enum import Enum, auto\n'), ((1620, 1626), 'enum.auto', 'auto', ([], {}), '()\n', (1624, 1626), False, 'from enum import Enum, auto\n'), ((1661, 1667), 'enum.auto', 'auto', ([], {}), '()\n', (1665, 1667), False, 'from enum import Enum, auto\n'), ((1701, 1707), 'enum.auto', 'auto', ([], {}), '()\n', (1705, 1707), False, 'from enum import Enum, auto\n'), ((1742, 1748), 'enum.auto', 'auto', ([], {}), '()\n', (1746, 1748), False, 'from enum import Enum, auto\n'), ((1781, 1787), 'enum.auto', 'auto', ([], {}), '()\n', (1785, 1787), False, 'from enum import Enum, auto\n'), ((1819, 1825), 'enum.auto', 'auto', ([], {}), '()\n', (1823, 1825), False, 'from enum import Enum, auto\n'), ((1873, 1879), 'enum.auto', 'auto', ([], {}), '()\n', (1877, 1879), False, 'from enum import Enum, auto\n'), ((1924, 1930), 'enum.auto', 'auto', ([], {}), '()\n', (1928, 1930), False, 'from enum import Enum, auto\n'), ((1961, 1967), 'enum.auto', 'auto', ([], {}), '()\n', (1965, 1967), False, 'from enum import Enum, auto\n'), ((2009, 2015), 'enum.auto', 'auto', ([], {}), '()\n', (2013, 2015), False, 'from enum import Enum, auto\n'), ((2060, 2066), 'enum.auto', 'auto', ([], {}), '()\n', (2064, 2066), False, 'from enum import Enum, auto\n'), ((2107, 2113), 'enum.auto', 'auto', ([], {}), '()\n', (2111, 2113), False, 'from enum import Enum, auto\n'), ((2154, 2160), 'enum.auto', 'auto', ([], {}), '()\n', (2158, 2160), False, 'from enum import Enum, auto\n'), ((2203, 2209), 'enum.auto', 'auto', ([], {}), '()\n', (2207, 2209), False, 'from enum import Enum, auto\n'), ((2263, 2269), 'enum.auto', 'auto', ([], {}), '()\n', (2267, 2269), False, 'from enum import Enum, auto\n'), ((2319, 2325), 'enum.auto', 'auto', ([], {}), '()\n', (2323, 2325), False, 'from enum import Enum, auto\n'), ((2361, 2367), 'enum.auto', 'auto', ([], {}), '()\n', (2365, 2367), False, 'from enum import Enum, auto\n'), ((2406, 2412), 'enum.auto', 'auto', ([], {}), '()\n', (2410, 2412), False, 'from enum import Enum, auto\n'), ((2453, 2459), 'enum.auto', 'auto', ([], {}), '()\n', (2457, 2459), False, 'from enum import Enum, auto\n'), ((2490, 2496), 'enum.auto', 'auto', ([], {}), '()\n', (2494, 2496), False, 'from enum import Enum, auto\n'), ((2537, 2543), 'enum.auto', 'auto', ([], {}), '()\n', (2541, 2543), False, 'from enum import Enum, auto\n'), ((2586, 2592), 'enum.auto', 'auto', ([], {}), '()\n', (2590, 2592), False, 'from enum import Enum, auto\n'), ((2636, 2642), 'enum.auto', 'auto', ([], {}), '()\n', (2640, 2642), False, 'from enum import Enum, auto\n'), ((2685, 2691), 'enum.auto', 'auto', ([], {}), '()\n', (2689, 2691), False, 'from enum import Enum, auto\n'), ((2744, 2750), 'enum.auto', 'auto', ([], {}), '()\n', (2748, 2750), False, 'from enum import Enum, auto\n'), ((2803, 2809), 'enum.auto', 'auto', ([], {}), '()\n', (2807, 2809), False, 'from enum import Enum, auto\n'), ((2863, 2869), 'enum.auto', 'auto', ([], {}), '()\n', (2867, 2869), False, 'from enum import Enum, auto\n'), ((2925, 2931), 'enum.auto', 'auto', ([], {}), '()\n', (2929, 2931), False, 'from enum import Enum, auto\n'), ((2987, 2993), 'enum.auto', 'auto', ([], {}), '()\n', (2991, 2993), False, 'from enum import Enum, auto\n'), ((3047, 3053), 'enum.auto', 'auto', ([], {}), '()\n', (3051, 3053), False, 'from enum import Enum, auto\n'), ((3081, 3087), 'enum.auto', 'auto', ([], {}), '()\n', (3085, 3087), False, 'from enum import Enum, auto\n'), ((3116, 3122), 'enum.auto', 'auto', ([], {}), '()\n', (3120, 3122), False, 'from enum import Enum, auto\n'), ((3157, 3163), 'enum.auto', 'auto', ([], {}), '()\n', (3161, 3163), False, 'from enum import Enum, auto\n'), ((3190, 3196), 'enum.auto', 'auto', ([], {}), '()\n', (3194, 3196), False, 'from enum import Enum, auto\n'), ((3238, 3244), 'enum.auto', 'auto', ([], {}), '()\n', (3242, 3244), False, 'from enum import Enum, auto\n'), ((3281, 3287), 'enum.auto', 'auto', ([], {}), '()\n', (3285, 3287), False, 'from enum import Enum, auto\n'), ((3328, 3334), 'enum.auto', 'auto', ([], {}), '()\n', (3332, 3334), False, 'from enum import Enum, auto\n'), ((3376, 3382), 'enum.auto', 'auto', ([], {}), '()\n', (3380, 3382), False, 'from enum import Enum, auto\n'), ((3430, 3436), 'enum.auto', 'auto', ([], {}), '()\n', (3434, 3436), False, 'from enum import Enum, auto\n'), ((3491, 3497), 'enum.auto', 'auto', ([], {}), '()\n', (3495, 3497), False, 'from enum import Enum, auto\n'), ((3546, 3552), 'enum.auto', 'auto', ([], {}), '()\n', (3550, 3552), False, 'from enum import Enum, auto\n'), ((3601, 3607), 'enum.auto', 'auto', ([], {}), '()\n', (3605, 3607), False, 'from enum import Enum, auto\n'), ((3658, 3664), 'enum.auto', 'auto', ([], {}), '()\n', (3662, 3664), False, 'from enum import Enum, auto\n'), ((3692, 3698), 'enum.auto', 'auto', ([], {}), '()\n', (3696, 3698), False, 'from enum import Enum, auto\n'), ((3709, 3715), 'enum.auto', 'auto', ([], {}), '()\n', (3713, 3715), False, 'from enum import Enum, auto\n'), ((3724, 3730), 'enum.auto', 'auto', ([], {}), '()\n', (3728, 3730), False, 'from enum import Enum, auto\n'), ((3744, 3750), 'enum.auto', 'auto', ([], {}), '()\n', (3748, 3750), False, 'from enum import Enum, auto\n'), ((3765, 3771), 'enum.auto', 'auto', ([], {}), '()\n', (3769, 3771), False, 'from enum import Enum, auto\n'), ((3779, 3785), 'enum.auto', 'auto', ([], {}), '()\n', (3783, 3785), False, 'from enum import Enum, auto\n'), ((3795, 3801), 'enum.auto', 'auto', ([], {}), '()\n', (3799, 3801), False, 'from enum import Enum, auto\n'), ((3812, 3818), 'enum.auto', 'auto', ([], {}), '()\n', (3816, 3818), False, 'from enum import Enum, auto\n'), ((3828, 3834), 'enum.auto', 'auto', ([], {}), '()\n', (3832, 3834), False, 'from enum import Enum, auto\n'), ((3844, 3850), 'enum.auto', 'auto', ([], {}), '()\n', (3848, 3850), False, 'from enum import Enum, auto\n'), ((3865, 3871), 'enum.auto', 'auto', ([], {}), '()\n', (3869, 3871), False, 'from enum import Enum, auto\n'), ((3883, 3889), 'enum.auto', 'auto', ([], {}), '()\n', (3887, 3889), False, 'from enum import Enum, auto\n'), ((3906, 3912), 'enum.auto', 'auto', ([], {}), '()\n', (3910, 3912), False, 'from enum import Enum, auto\n'), ((3920, 3926), 'enum.auto', 'auto', ([], {}), '()\n', (3924, 3926), False, 'from enum import Enum, auto\n'), ((3939, 3945), 'enum.auto', 'auto', ([], {}), '()\n', (3943, 3945), False, 'from enum import Enum, auto\n'), ((3955, 3961), 'enum.auto', 'auto', ([], {}), '()\n', (3959, 3961), False, 'from enum import Enum, auto\n'), ((3972, 3978), 'enum.auto', 'auto', ([], {}), '()\n', (3976, 3978), False, 'from enum import Enum, auto\n'), ((3988, 3994), 'enum.auto', 'auto', ([], {}), '()\n', (3992, 3994), False, 'from enum import Enum, auto\n'), ((4006, 4012), 'enum.auto', 'auto', ([], {}), '()\n', (4010, 4012), False, 'from enum import Enum, auto\n'), ((4026, 4032), 'enum.auto', 'auto', ([], {}), '()\n', (4030, 4032), False, 'from enum import Enum, auto\n'), ((4040, 4046), 'enum.auto', 'auto', ([], {}), '()\n', (4044, 4046), False, 'from enum import Enum, auto\n'), ((4055, 4061), 'enum.auto', 'auto', ([], {}), '()\n', (4059, 4061), False, 'from enum import Enum, auto\n'), ((4070, 4076), 'enum.auto', 'auto', ([], {}), '()\n', (4074, 4076), False, 'from enum import Enum, auto\n'), ((4091, 4097), 'enum.auto', 'auto', ([], {}), '()\n', (4095, 4097), False, 'from enum import Enum, auto\n'), ((4104, 4110), 'enum.auto', 'auto', ([], {}), '()\n', (4108, 4110), False, 'from enum import Enum, auto\n'), ((4123, 4129), 'enum.auto', 'auto', ([], {}), '()\n', (4127, 4129), False, 'from enum import Enum, auto\n'), ((4139, 4145), 'enum.auto', 'auto', ([], {}), '()\n', (4143, 4145), False, 'from enum import Enum, auto\n'), ((4161, 4167), 'enum.auto', 'auto', ([], {}), '()\n', (4165, 4167), False, 'from enum import Enum, auto\n'), ((4176, 4182), 'enum.auto', 'auto', ([], {}), '()\n', (4180, 4182), False, 'from enum import Enum, auto\n'), ((4197, 4203), 'enum.auto', 'auto', ([], {}), '()\n', (4201, 4203), False, 'from enum import Enum, auto\n'), ((4214, 4220), 'enum.auto', 'auto', ([], {}), '()\n', (4218, 4220), False, 'from enum import Enum, auto\n'), ((4231, 4237), 'enum.auto', 'auto', ([], {}), '()\n', (4235, 4237), False, 'from enum import Enum, auto\n'), ((4251, 4257), 'enum.auto', 'auto', ([], {}), '()\n', (4255, 4257), False, 'from enum import Enum, auto\n'), ((4268, 4274), 'enum.auto', 'auto', ([], {}), '()\n', (4272, 4274), False, 'from enum import Enum, auto\n'), ((4287, 4293), 'enum.auto', 'auto', ([], {}), '()\n', (4291, 4293), False, 'from enum import Enum, auto\n'), ((4305, 4311), 'enum.auto', 'auto', ([], {}), '()\n', (4309, 4311), False, 'from enum import Enum, auto\n'), ((4323, 4329), 'enum.auto', 'auto', ([], {}), '()\n', (4327, 4329), False, 'from enum import Enum, auto\n'), ((4341, 4347), 'enum.auto', 'auto', ([], {}), '()\n', (4345, 4347), False, 'from enum import Enum, auto\n'), ((4359, 4365), 'enum.auto', 'auto', ([], {}), '()\n', (4363, 4365), False, 'from enum import Enum, auto\n'), ((4377, 4383), 'enum.auto', 'auto', ([], {}), '()\n', (4381, 4383), False, 'from enum import Enum, auto\n'), ((4395, 4401), 'enum.auto', 'auto', ([], {}), '()\n', (4399, 4401), False, 'from enum import Enum, auto\n'), ((4413, 4419), 'enum.auto', 'auto', ([], {}), '()\n', (4417, 4419), False, 'from enum import Enum, auto\n'), ((4431, 4437), 'enum.auto', 'auto', ([], {}), '()\n', (4435, 4437), False, 'from enum import Enum, auto\n'), ((4449, 4455), 'enum.auto', 'auto', ([], {}), '()\n', (4453, 4455), False, 'from enum import Enum, auto\n'), ((4468, 4474), 'enum.auto', 'auto', ([], {}), '()\n', (4472, 4474), False, 'from enum import Enum, auto\n'), ((4482, 4488), 'enum.auto', 'auto', ([], {}), '()\n', (4486, 4488), False, 'from enum import Enum, auto\n'), ((4502, 4508), 'enum.auto', 'auto', ([], {}), '()\n', (4506, 4508), False, 'from enum import Enum, auto\n'), ((4521, 4527), 'enum.auto', 'auto', ([], {}), '()\n', (4525, 4527), False, 'from enum import Enum, auto\n'), ((4539, 4545), 'enum.auto', 'auto', ([], {}), '()\n', (4543, 4545), False, 'from enum import Enum, auto\n'), ((4556, 4562), 'enum.auto', 'auto', ([], {}), '()\n', (4560, 4562), False, 'from enum import Enum, auto\n'), ((4570, 4576), 'enum.auto', 'auto', ([], {}), '()\n', (4574, 4576), False, 'from enum import Enum, auto\n'), ((4583, 4589), 'enum.auto', 'auto', ([], {}), '()\n', (4587, 4589), False, 'from enum import Enum, auto\n'), ((4596, 4602), 'enum.auto', 'auto', ([], {}), '()\n', (4600, 4602), False, 'from enum import Enum, auto\n'), ((4609, 4615), 'enum.auto', 'auto', ([], {}), '()\n', (4613, 4615), False, 'from enum import Enum, auto\n'), ((4622, 4628), 'enum.auto', 'auto', ([], {}), '()\n', (4626, 4628), False, 'from enum import Enum, auto\n'), ((4635, 4641), 'enum.auto', 'auto', ([], {}), '()\n', (4639, 4641), False, 'from enum import Enum, auto\n'), ((4648, 4654), 'enum.auto', 'auto', ([], {}), '()\n', (4652, 4654), False, 'from enum import Enum, auto\n'), ((4661, 4667), 'enum.auto', 'auto', ([], {}), '()\n', (4665, 4667), False, 'from enum import Enum, auto\n'), ((4674, 4680), 'enum.auto', 'auto', ([], {}), '()\n', (4678, 4680), False, 'from enum import Enum, auto\n'), ((4688, 4694), 'enum.auto', 'auto', ([], {}), '()\n', (4692, 4694), False, 'from enum import Enum, auto\n'), ((4702, 4708), 'enum.auto', 'auto', ([], {}), '()\n', (4706, 4708), False, 'from enum import Enum, auto\n'), ((4716, 4722), 'enum.auto', 'auto', ([], {}), '()\n', (4720, 4722), False, 'from enum import Enum, auto\n'), ((4732, 4738), 'enum.auto', 'auto', ([], {}), '()\n', (4736, 4738), False, 'from enum import Enum, auto\n'), ((4750, 4756), 'enum.auto', 'auto', ([], {}), '()\n', (4754, 4756), False, 'from enum import Enum, auto\n'), ((4794, 4800), 'enum.auto', 'auto', ([], {}), '()\n', (4798, 4800), False, 'from enum import Enum, auto\n'), ((4811, 4817), 'enum.auto', 'auto', ([], {}), '()\n', (4815, 4817), False, 'from enum import Enum, auto\n'), ((4830, 4836), 'enum.auto', 'auto', ([], {}), '()\n', (4834, 4836), False, 'from enum import Enum, auto\n'), ((4847, 4853), 'enum.auto', 'auto', ([], {}), '()\n', (4851, 4853), False, 'from enum import Enum, auto\n'), ((4866, 4872), 'enum.auto', 'auto', ([], {}), '()\n', (4870, 4872), False, 'from enum import Enum, auto\n'), ((4916, 4922), 'enum.auto', 'auto', ([], {}), '()\n', (4920, 4922), False, 'from enum import Enum, auto\n'), ((4937, 4943), 'enum.auto', 'auto', ([], {}), '()\n', (4941, 4943), False, 'from enum import Enum, auto\n'), ((4958, 4964), 'enum.auto', 'auto', ([], {}), '()\n', (4962, 4964), False, 'from enum import Enum, auto\n'), ((5003, 5009), 'enum.auto', 'auto', ([], {}), '()\n', (5007, 5009), False, 'from enum import Enum, auto\n'), ((5020, 5026), 'enum.auto', 'auto', ([], {}), '()\n', (5024, 5026), False, 'from enum import Enum, auto\n'), ((5067, 5073), 'enum.auto', 'auto', ([], {}), '()\n', (5071, 5073), False, 'from enum import Enum, auto\n'), ((5087, 5093), 'enum.auto', 'auto', ([], {}), '()\n', (5091, 5093), False, 'from enum import Enum, auto\n'), ((5135, 5141), 'enum.auto', 'auto', ([], {}), '()\n', (5139, 5141), False, 'from enum import Enum, auto\n'), ((5152, 5158), 'enum.auto', 'auto', ([], {}), '()\n', (5156, 5158), False, 'from enum import Enum, auto\n'), ((5198, 5204), 'enum.auto', 'auto', ([], {}), '()\n', (5202, 5204), False, 'from enum import Enum, auto\n'), ((5219, 5225), 'enum.auto', 'auto', ([], {}), '()\n', (5223, 5225), False, 'from enum import Enum, auto\n'), ((5239, 5245), 'enum.auto', 'auto', ([], {}), '()\n', (5243, 5245), False, 'from enum import Enum, auto\n'), ((5255, 5261), 'enum.auto', 'auto', ([], {}), '()\n', (5259, 5261), False, 'from enum import Enum, auto\n'), ((5274, 5280), 'enum.auto', 'auto', ([], {}), '()\n', (5278, 5280), False, 'from enum import Enum, auto\n'), ((5292, 5298), 'enum.auto', 'auto', ([], {}), '()\n', (5296, 5298), False, 'from enum import Enum, auto\n')]
|
#!/usr/bin/env python
#
# Public Domain 2014-2017 MongoDB, Inc.
# Public Domain 2008-2014 WiredTiger, Inc.
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import wiredtiger, wttest
from wtscenario import make_scenarios
# test_index02.py
# test search_near in indices
class test_index02(wttest.WiredTigerTestCase):
'''Test search_near in indices'''
scenarios = make_scenarios([
('index', dict(indexconfig='columns=(v)', ncol=1)),
('index-with-key', dict(indexconfig='columns=(v,k)', ncol=2)),
])
basename = 'test_index02'
tablename = 'table:' + basename
indexname = 'index:' + basename + ":inverse"
def test_search_near_exists(self):
'''Create a table, look for an existing key'''
self.session.create(self.tablename, 'key_format=r,value_format=Q,columns=(k,v)')
self.session.create(self.indexname, self.indexconfig)
cur = self.session.open_cursor(self.tablename, None, "append")
cur.set_value(1)
cur.insert()
cur.set_value(5)
cur.insert()
cur.set_value(5)
cur.insert()
cur.set_value(5)
cur.insert()
cur.set_value(10)
cur.insert()
cur.close()
# Retry after reopening
for runs in xrange(2):
# search near should find a match
cur = self.session.open_cursor(self.indexname, None, None)
if self.ncol == 1:
cur.set_key(5)
else:
cur.set_key(5, 3)
self.assertEqual(cur.search_near(), 0)
# Retry after reopening
self.reopen_conn()
def test_search_near_between(self):
'''Create a table, look for a non-existing key'''
self.session.create(self.tablename, 'key_format=i,value_format=i,columns=(k,v)')
self.session.create(self.indexname, self.indexconfig)
cur = self.session.open_cursor(self.tablename)
for k in xrange(3):
cur[k] = 5 * k + 10
cur.close()
search_keys = [ 1, 11, 15, 19, 21 ]
# search near should find a match
for runs in xrange(2):
cur = self.session.open_cursor(self.indexname, None, None)
for k in search_keys:
if self.ncol == 1:
cur.set_key(k)
else:
cur.set_key(k, 1) # [15,1] will completely match
exact = cur.search_near()
if self.ncol == 1:
found_key = cur.get_key()
else:
[ found_key, index ] = cur.get_key()
self.pr("search_near for " + str(k) + " found " + str(found_key) + " with exact " + str(exact))
self.assertEqual(exact, cmp(found_key, k), "for key " + str(k))
self.reopen_conn()
def test_search_near_empty(self):
'''Create an empty table, look for a key'''
self.session.create(self.tablename, 'key_format=i,value_format=i,columns=(k,v)')
self.session.create(self.indexname, self.indexconfig)
cur = self.session.open_cursor(self.indexname, None, None)
if self.ncol == 1:
cur.set_key(3)
else:
cur.set_key(3, 1)
self.assertEqual(cur.search_near(), wiredtiger.WT_NOTFOUND)
if __name__ == '__main__':
wttest.run()
|
[
"wttest.run"
] |
[((4476, 4488), 'wttest.run', 'wttest.run', ([], {}), '()\n', (4486, 4488), False, 'import wiredtiger, wttest\n')]
|
# Copyright (c) 2007-2018 UShareSoft, All rights reserved
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
import pyxb
from mock import patch
from uforge.application import Api
from uforge.objects import uforge
from hammr.utils import constants
from hammr.utils import migration_utils
class TestMigrationTable(unittest.TestCase):
@patch('texttable.Texttable.add_row')
def test_migration_table_add_all_migration_in_progress_in_the_table(self, mock_table_add_row):
# given
migrations = uforge.migrations()
migrations.migrations = pyxb.BIND()
migration1 = self.create_migration(1, "a migration", 50, "In Progress", False, False, False)
migrations.migrations.append(migration1)
migration2 = self.create_migration(2, "a second migration", 55, "In Progress", False, False, False)
migrations.migrations.append(migration2)
# when
migration_utils.migration_table(migrations.migrations.migration)
# then
self.assertEquals(mock_table_add_row.call_count, 2)
mock_table_add_row.assert_any_call([1, "a migration", "In Progress (50%)"])
mock_table_add_row.assert_any_call([2, "a second migration", "In Progress (55%)"])
@patch('texttable.Texttable.add_row')
def test_migration_table_add_the_migration_done_in_the_table(self, mock_table_add_row):
# given
migrations = uforge.migrations()
migrations.migrations = pyxb.BIND()
migration1 = self.create_migration(1, "a migration", 50, "Done", True, False, False)
migrations.migrations.append(migration1)
# when
migration_utils.migration_table(migrations.migrations.migration)
# then
self.assertEquals(mock_table_add_row.call_count, 1)
mock_table_add_row.assert_any_call([1, "a migration", "Done"])
@patch('texttable.Texttable.add_row')
def test_migration_table_add_the_migration_failed_in_the_table(self, mock_table_add_row):
# given
migrations = uforge.migrations()
migrations.migrations = pyxb.BIND()
migration1 = self.create_migration(1, "a migration", 50, "Failed", False, True, False)
migrations.migrations.append(migration1)
# when
migration_utils.migration_table(migrations.migrations.migration)
# then
self.assertEquals(mock_table_add_row.call_count, 1)
mock_table_add_row.assert_any_call([1, "a migration", "Failed"])
@patch('texttable.Texttable.add_row')
def test_migration_table_add_the_migration_cancelled_in_the_table(self, mock_table_add_row):
# given
migrations = uforge.migrations()
migrations.migrations = pyxb.BIND()
migration1 = self.create_migration(1, "a migration", 50, "Cancelled", False, False, True)
migrations.migrations.append(migration1)
# when
migration_utils.migration_table(migrations.migrations.migration)
# then
self.assertEquals(mock_table_add_row.call_count, 1)
mock_table_add_row.assert_any_call([1, "a migration", "Cancelled"])
@patch("ussclicore.utils.generics_utils.get_file")
def test_retrieve_migration_configuration_raise_exception_if_no_file_is_retrieved(self, mock_get_file):
# given
args_file = "file_no_present.json"
mock_get_file.return_value = None
# when
with self.assertRaises(Exception) as e:
migration_utils.retrieve_migration_configuration(args_file)
# then
self.assertTrue("No such file or directory: " + args_file in e.exception)
@patch("ussclicore.utils.generics_utils.get_file")
@patch("hammr.utils.hammr_utils.load_data")
def test_retrieve_migration_configuration_raise_exception_if_file_contain_no_migration(self, mock_load_data, mock_get_file):
# given
args_file = "file_present.json"
mock_get_file.return_value = "a file"
mock_load_data.return_value = self.get_migration_config(migration_key="noMigration")
# when
with self.assertRaises(Exception) as e:
migration_utils.retrieve_migration_configuration(args_file)
# then
self.assertTrue("no migration section found" in e.exception)
@patch("ussclicore.utils.generics_utils.get_file")
@patch("hammr.utils.hammr_utils.load_data")
@patch("hammr.utils.migration_utils.check_mandatory_migrate")
def test_retrieve_migration_configuration_check_mandatory_migrate_if_file_contain_migration(self, mock_check_mandatory_migrate, mock_load_data, mock_get_file):
# given
args_file = "file_present.json"
mock_get_file.return_value = "a file"
data = self.get_migration_config()
mock_load_data.return_value = data
# when
migration_utils.retrieve_migration_configuration(args_file)
# then
mock_check_mandatory_migrate.assert_called_with(data["migration"])
def test_check_mandatory_migrate_raise_exception_if_not_contain_name(self):
# given
data = self.get_migration_config(name_key="noName")
# when
with self.assertRaises(Exception) as e:
migration_utils.check_mandatory_migrate(data["migration"])
# then
self.assertTrue("check yours parameters in file, no attribute [name] for [migration]" in e.exception)
def test_check_mandatory_migrate_raise_exception_if_not_contain_os(self):
# given
data = self.get_migration_config(os_key="noOS")
# when
with self.assertRaises(Exception) as e:
migration_utils.check_mandatory_migrate(data["migration"])
# then
self.assertTrue("check yours parameters in file, no attribute [os] for [migration]" in e.exception)
def test_check_mandatory_migrate_raise_exception_if_os_value_not_valid(self):
# given
data = self.get_migration_config(os_value="Windows")
# when
with self.assertRaises(Exception) as e:
migration_utils.check_mandatory_migrate(data["migration"])
# then
self.assertTrue("check yours parameters in file, attribute [os] for [migration] is not correct. Only 'linux' is supported" in e.exception)
def test_check_mandatory_migrate_raise_exception_if_not_contain_source(self):
# given
data = self.get_migration_config(source_key="noSource")
# when
with self.assertRaises(Exception) as e:
migration_utils.check_mandatory_migrate(data["migration"])
# then
self.assertTrue("check yours parameters in file, no attribute [source] for [migration]" in e.exception)
def test_check_mandatory_migrate_raise_exception_if_not_contain_target(self):
# given
data = self.get_migration_config(target_key="noTarget")
# when
with self.assertRaises(Exception) as e:
migration_utils.check_mandatory_migrate(data["migration"])
# then
self.assertTrue("check yours parameters in file, no attribute [target] for [migration]" in e.exception)
@patch("hammr.utils.migration_utils.check_mandatory_target")
@patch("hammr.utils.migration_utils.check_mandatory_source")
def test_check_mandatory_migrate_check_mandatory_source_target_if_contain_source_target(self, mock_check_mandatory_source, mock_check_mandatory_target):
# given
data = self.get_migration_config()
# when
migration_utils.check_mandatory_migrate(data["migration"])
# then
mock_check_mandatory_source.assert_called_with(data["migration"]["source"])
mock_check_mandatory_target.assert_called_with(data["migration"]["target"])
def test_check_mandatory_source_raise_exception_if_not_contain_host(self):
# given
data = self.get_migration_config(host_key="noHost")
# when
with self.assertRaises(Exception) as e:
migration_utils.check_mandatory_source(data["migration"]["source"])
# then
self.assertTrue("check yours parameters in file, no attribute [host] for [migration][source]" in e.exception)
def test_check_mandatory_source_raise_exception_if_not_contain_user(self):
# given
data = self.get_migration_config(user_key="noUser")
# when
with self.assertRaises(Exception) as e:
migration_utils.check_mandatory_source(data["migration"]["source"])
# then
self.assertTrue("check yours parameters in file, no attribute [user] for [migration][source]" in e.exception)
def test_check_mandatory_target_raise_exception_if_not_contain_builder(self):
# given
data = self.get_migration_config(builder_key="noBuilder")
# when
with self.assertRaises(Exception) as e:
migration_utils.check_mandatory_target(data["migration"]["target"])
# then
self.assertTrue("check yours parameters in file, no attribute [builder] for [migration][target]" in e.exception)
@patch("hammr.utils.migration_utils.check_mandatory_builder")
def test_check_mandatory_target_check_mandatory_builder_if_contain_builder(self, mock_check_mandatory_builder):
# given
data = self.get_migration_config()
# when
migration_utils.check_mandatory_target(data["migration"]["target"])
# then
mock_check_mandatory_builder.assert_called_with(data["migration"]["target"]["builder"])
def test_check_mandatory_builder_raise_exception_if_not_contain_type(self):
# given
data = self.get_migration_config(type_key="noType")
# when
with self.assertRaises(Exception) as e:
migration_utils.check_mandatory_builder(data["migration"]["target"]["builder"])
# then
self.assertTrue("check yours parameters in file, no attribute [type] for [migration][target][builder]" in e.exception)
def test_check_mandatory_builder_raise_exception_if_not_contain_account(self):
# given
data = self.get_migration_config(account_key="noAccount")
# when
with self.assertRaises(Exception) as e:
migration_utils.check_mandatory_builder(data["migration"]["target"]["builder"])
# then
self.assertTrue("check yours parameters in file, no attribute [account] for [migration][target][builder]" in e.exception)
@patch("hammr.utils.migration_utils.check_mandatory_account")
def test_check_mandatory_builder_check_mandatory_account_if_contain_account(self, mock_check_mandatory_account):
# given
data = self.get_migration_config()
# when
migration_utils.check_mandatory_builder(data["migration"]["target"]["builder"])
# then
mock_check_mandatory_account.assert_called_with(data["migration"]["target"]["builder"]["account"])
def test_check_mandatory_account_raise_exception_if_not_contain_name(self):
# given
data = self.get_migration_config(account_name_key="noAccountName")
# when
with self.assertRaises(Exception) as e:
migration_utils.check_mandatory_account(data["migration"]["target"]["builder"]["account"])
# then
self.assertTrue("check yours parameters in file, no attribute [name] for [migration][target][builder][account]" in e.exception)
@patch("hammr.utils.generate_utils.get_target_format_object")
def test_retrieve_target_format_return_the_target_format_found(self, mock_get_target_format_object):
# given
api = Api("url", username="username", password="password", headers=None,
disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)
target_format = uforge.TargetFormat()
mock_get_target_format_object.return_value = target_format
# when
target_format_retrieved = migration_utils.retrieve_target_format(api, "login", "targetFormatName")
# then
self.assertEqual(target_format_retrieved, target_format)
@patch("hammr.utils.generate_utils.get_target_format_object")
def test_retrieve_target_format_raise_exception_when_the_target_format_not_found(self, mock_get_target_format_object):
# given
api = Api("url", username="username", password="password", headers=None,
disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)
mock_get_target_format_object.return_value = None
# when
with self.assertRaises(Exception) as e:
migration_utils.retrieve_target_format(api, "login", "targetFormatName")
# then
self.assertTrue("TargetFormat type unknown: targetFormatName" in e.exception)
def test_retrieve_image_return_the_image_created(self):
# given
api = Api("url", username="username", password="password", headers=None,
disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)
target_format = uforge.TargetFormat()
image_format = uforge.ImageFormat()
image_format.name = "vcenter"
target_format.format = image_format
builder = {
"hardwareSettings": {
"memory": 512,
"hwType": 4
}
}
# when
image_retrieved = migration_utils.retrieve_image(builder, target_format, api, "login")
# then
self.assertEqual(image_retrieved.installProfile.memorySize, 512)
self.assertEqual(image_retrieved.installProfile.hwType, "4")
self.assertFalse(image_retrieved.compress)
@patch("ussclicore.utils.generics_utils.remove_special_chars")
def test_retrieve_image_raise_exception_when_format_name_not_found(self, mock_remove_special_chars):
# given
api = Api("url", username="username", password="password", headers=None,
disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)
target_format = uforge.TargetFormat()
image_format = uforge.ImageFormat()
image_format.name = "vcenter"
target_format.format = image_format
builder = {
"hardwareSettings": {
"memory": 512,
"hwType": 4
}
}
mock_remove_special_chars.return_value = "vcenternotfound"
# when
with self.assertRaises(Exception) as e:
migration_utils.retrieve_image(builder, target_format, api, "login")
# then
self.assertTrue("TargetFormat type is unsupported: vcenter" in e.exception)
def test_check_mandatory_installation_raise_exception_when_no_installation_for_format_aws(self):
# given
install_profile = uforge.InstallProfile()
install_profile.diskSize = 0
builder = {
"type": "Amazon AWS"
}
# when
with self.assertRaises(Exception) as e:
migration_utils.check_mandatory_installation("aws", builder)
# then
self.assertTrue("check yours parameters in file, no attribute [installation] for [migration][target][builder], mandatory to migrate to [Amazon AWS]" in e.exception)
def test_set_install_profile_disk_size_set_disk_size_when_format_aws(self):
# given
install_profile = uforge.InstallProfile()
install_profile.diskSize = 0
builder = {
"installation": {
"diskSize": 12
}
}
# when
install_profile = migration_utils.set_install_profile_disk_size(install_profile, builder, "aws")
# then
self.assertEqual(install_profile.diskSize, 12)
def test_set_install_profile_disk_size_not_set_disk_size_when_format_vcenter(self):
# given
install_profile = uforge.InstallProfile()
install_profile.diskSize = 0
builder = {
"installation": {
"diskSize": 12
}
}
# when
install_profile = migration_utils.set_install_profile_disk_size(install_profile, builder, "vcenter")
# then
self.assertEqual(install_profile.diskSize, 0)
def test_set_install_profile_disk_size_raise_exception_when_no_diskSize(self):
# given
install_profile = uforge.InstallProfile()
install_profile.diskSize = 0
builder = {
"type": "Amazon AWS",
"installation": {}
}
# when
with self.assertRaises(Exception) as e:
migration_utils.set_install_profile_disk_size(install_profile, builder, "aws")
# then
self.assertTrue("check yours parameters in file, no attribute [disksize] for [migration][target][builder][installation], mandatory to migrate to [Amazon AWS]" in e.exception)
def test_retrieve_publish_image_return_the_publish_image_created(self):
# given
target_format = uforge.TargetFormat()
image_format = uforge.ImageFormat()
image_format.name = "vcenter"
target_format.format = image_format
builder = {
"displayName": "vcenter-vm-name",
"esxHost": "esxhost_vcenter",
"datastore": "datastore_vcenter",
"network": "network_vcenter"
}
# when
publish_image_retrieved = migration_utils.retrieve_publish_image(builder, target_format)
# then
self.assertEqual(publish_image_retrieved.displayName, builder["displayName"])
self.assertEqual(publish_image_retrieved.esxHost, builder["esxHost"])
self.assertEqual(publish_image_retrieved.datastore, builder["datastore"])
self.assertEqual(publish_image_retrieved.network, builder["network"])
@patch("ussclicore.utils.generics_utils.remove_special_chars")
def test_retrieve_publish_image_raise_exception_when_format_name_not_found(self, mock_remove_special_chars):
# given
target_format = uforge.TargetFormat()
image_format = uforge.ImageFormat()
image_format.name = "vcenter"
target_format.format = image_format
builder = {
"displayName": "vcenter-vm-name",
"esxHost": "esxhost_vcenter",
"datastore": "datastore_vcenter",
"network": "network_vcenter"
}
mock_remove_special_chars.return_value = "vcenternotfound"
# when
with self.assertRaises(Exception) as e:
migration_utils.retrieve_publish_image(builder, target_format)
# then
self.assertTrue("TargetFormat type is unsupported: vcenter" in e.exception)
@patch("uforge.application.Api._Users._Accounts.Getall")
def test_retrieve_account_return_the_cred_account_found(self, mock_api_get_all):
# given
api = Api("url", username="username", password="password", headers=None, disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)
cred_account = uforge.CredAccountVSphere()
cred_account.name = "accountName"
cred_account.uri = "/uri/credAccount"
cred_accounts = self.create_accounts(cred_account, "vsphere")
mock_api_get_all.return_value = cred_accounts
# when
cred_account_retrieved = migration_utils.retrieve_account(api, "login", cred_account.name)
# then
self.assertEqual(cred_account_retrieved.name, cred_account.name)
self.assertEqual(cred_account_retrieved.uri, cred_account.uri)
@patch("uforge.application.Api._Users._Accounts.Getall")
def test_retrieve_account_from_platform_raise_exception_when_no_accounts(self, mock_api_get_all):
# given
api = Api("url", username="username", password="password", headers=None,
disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)
accounts = uforge.CredAccounts()
accounts.credAccounts = pyxb.BIND()
mock_api_get_all.return_value = accounts
# when
with self.assertRaises(Exception) as e:
migration_utils.retrieve_account(api, "login", "account")
# then
self.assertTrue("No CredAccounts available.\n You can use the command 'hammr account create' to create an account." in e.exception)
@patch("uforge.application.Api._Users._Accounts.Getall")
def test_retrieve_account_from_platform_raise_exception_when_account_not_found(self, mock_api_get_all):
# given
api = Api("url", username="username", password="password", headers=None, disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)
cred_account = uforge.CredAccountVSphere()
cred_account.name = "accountName"
cred_account.uri = "/uri/credAccount"
cred_accounts = self.create_accounts(cred_account, "vsphere")
mock_api_get_all.return_value = cred_accounts
# when
with self.assertRaises(Exception) as e:
migration_utils.retrieve_account(api, "login", "accountNotFound")
# then
self.assertTrue("CredAccount unknown: accountNotFound\n You can use the command 'hammr account create' to create an account." in e.exception)
def create_migration(self, id, name, percentage, statusMessage, complete, error, cancelled):
newMigration = uforge.migration()
newMigration.dbId = id
newMigration.name = name
status = uforge.status()
status.message = statusMessage
status.percentage = percentage
status.complete = complete
status.error = error
status.cancelled = cancelled
newMigration.status = status
return newMigration
def create_accounts(self, account, target_platform_type):
target_platform = uforge.TargetPlatform()
target_platform.name = "targetPlatformName"
target_platform.type = target_platform_type
account.targetPlatform = target_platform
accounts = uforge.CredAccounts()
accounts.credAccounts = pyxb.BIND()
accounts.credAccounts.append(account)
return accounts
def get_migration_config(self, migration_key="migration", name_key="name", os_key="os", os_value="linux",
source_key="source", host_key="host", user_key="user",
target_key="target", builder_key="builder", type_key="type", account_key="account", account_name_key="name"):
migration_config = {
migration_key: {
name_key: "myMigration",
os_key: os_value,
source_key: {
host_key: "127.0.0.1",
user_key: "root",
},
target_key: {
builder_key: {
type_key: "VMware vCenter format",
account_key: {
account_name_key: "credAccountTest"
}
}
}
}
}
return migration_config
|
[
"pyxb.BIND",
"uforge.objects.uforge.TargetPlatform",
"hammr.utils.migration_utils.check_mandatory_target",
"uforge.objects.uforge.CredAccountVSphere",
"hammr.utils.migration_utils.check_mandatory_builder",
"hammr.utils.migration_utils.migration_table",
"hammr.utils.migration_utils.retrieve_target_format",
"hammr.utils.migration_utils.check_mandatory_account",
"uforge.objects.uforge.migrations",
"hammr.utils.migration_utils.check_mandatory_source",
"uforge.objects.uforge.ImageFormat",
"uforge.objects.uforge.migration",
"mock.patch",
"hammr.utils.migration_utils.retrieve_account",
"hammr.utils.migration_utils.retrieve_publish_image",
"uforge.objects.uforge.status",
"hammr.utils.migration_utils.check_mandatory_installation",
"hammr.utils.migration_utils.set_install_profile_disk_size",
"uforge.application.Api",
"hammr.utils.migration_utils.check_mandatory_migrate",
"uforge.objects.uforge.TargetFormat",
"hammr.utils.migration_utils.retrieve_migration_configuration",
"hammr.utils.migration_utils.retrieve_image",
"uforge.objects.uforge.InstallProfile",
"uforge.objects.uforge.CredAccounts"
] |
[((883, 919), 'mock.patch', 'patch', (['"""texttable.Texttable.add_row"""'], {}), "('texttable.Texttable.add_row')\n", (888, 919), False, 'from mock import patch\n'), ((1773, 1809), 'mock.patch', 'patch', (['"""texttable.Texttable.add_row"""'], {}), "('texttable.Texttable.add_row')\n", (1778, 1809), False, 'from mock import patch\n'), ((2387, 2423), 'mock.patch', 'patch', (['"""texttable.Texttable.add_row"""'], {}), "('texttable.Texttable.add_row')\n", (2392, 2423), False, 'from mock import patch\n'), ((3007, 3043), 'mock.patch', 'patch', (['"""texttable.Texttable.add_row"""'], {}), "('texttable.Texttable.add_row')\n", (3012, 3043), False, 'from mock import patch\n'), ((3636, 3685), 'mock.patch', 'patch', (['"""ussclicore.utils.generics_utils.get_file"""'], {}), "('ussclicore.utils.generics_utils.get_file')\n", (3641, 3685), False, 'from mock import patch\n'), ((4135, 4184), 'mock.patch', 'patch', (['"""ussclicore.utils.generics_utils.get_file"""'], {}), "('ussclicore.utils.generics_utils.get_file')\n", (4140, 4184), False, 'from mock import patch\n'), ((4190, 4232), 'mock.patch', 'patch', (['"""hammr.utils.hammr_utils.load_data"""'], {}), "('hammr.utils.hammr_utils.load_data')\n", (4195, 4232), False, 'from mock import patch\n'), ((4784, 4833), 'mock.patch', 'patch', (['"""ussclicore.utils.generics_utils.get_file"""'], {}), "('ussclicore.utils.generics_utils.get_file')\n", (4789, 4833), False, 'from mock import patch\n'), ((4839, 4881), 'mock.patch', 'patch', (['"""hammr.utils.hammr_utils.load_data"""'], {}), "('hammr.utils.hammr_utils.load_data')\n", (4844, 4881), False, 'from mock import patch\n'), ((4887, 4947), 'mock.patch', 'patch', (['"""hammr.utils.migration_utils.check_mandatory_migrate"""'], {}), "('hammr.utils.migration_utils.check_mandatory_migrate')\n", (4892, 4947), False, 'from mock import patch\n'), ((7620, 7679), 'mock.patch', 'patch', (['"""hammr.utils.migration_utils.check_mandatory_target"""'], {}), "('hammr.utils.migration_utils.check_mandatory_target')\n", (7625, 7679), False, 'from mock import patch\n'), ((7685, 7744), 'mock.patch', 'patch', (['"""hammr.utils.migration_utils.check_mandatory_source"""'], {}), "('hammr.utils.migration_utils.check_mandatory_source')\n", (7690, 7744), False, 'from mock import patch\n'), ((9549, 9609), 'mock.patch', 'patch', (['"""hammr.utils.migration_utils.check_mandatory_builder"""'], {}), "('hammr.utils.migration_utils.check_mandatory_builder')\n", (9554, 9609), False, 'from mock import patch\n'), ((10920, 10980), 'mock.patch', 'patch', (['"""hammr.utils.migration_utils.check_mandatory_account"""'], {}), "('hammr.utils.migration_utils.check_mandatory_account')\n", (10925, 10980), False, 'from mock import patch\n'), ((11882, 11942), 'mock.patch', 'patch', (['"""hammr.utils.generate_utils.get_target_format_object"""'], {}), "('hammr.utils.generate_utils.get_target_format_object')\n", (11887, 11942), False, 'from mock import patch\n'), ((12561, 12621), 'mock.patch', 'patch', (['"""hammr.utils.generate_utils.get_target_format_object"""'], {}), "('hammr.utils.generate_utils.get_target_format_object')\n", (12566, 12621), False, 'from mock import patch\n'), ((14131, 14192), 'mock.patch', 'patch', (['"""ussclicore.utils.generics_utils.remove_special_chars"""'], {}), "('ussclicore.utils.generics_utils.remove_special_chars')\n", (14136, 14192), False, 'from mock import patch\n'), ((18247, 18308), 'mock.patch', 'patch', (['"""ussclicore.utils.generics_utils.remove_special_chars"""'], {}), "('ussclicore.utils.generics_utils.remove_special_chars')\n", (18252, 18308), False, 'from mock import patch\n'), ((19129, 19184), 'mock.patch', 'patch', (['"""uforge.application.Api._Users._Accounts.Getall"""'], {}), "('uforge.application.Api._Users._Accounts.Getall')\n", (19134, 19184), False, 'from mock import patch\n'), ((19985, 20040), 'mock.patch', 'patch', (['"""uforge.application.Api._Users._Accounts.Getall"""'], {}), "('uforge.application.Api._Users._Accounts.Getall')\n", (19990, 20040), False, 'from mock import patch\n'), ((20762, 20817), 'mock.patch', 'patch', (['"""uforge.application.Api._Users._Accounts.Getall"""'], {}), "('uforge.application.Api._Users._Accounts.Getall')\n", (20767, 20817), False, 'from mock import patch\n'), ((1056, 1075), 'uforge.objects.uforge.migrations', 'uforge.migrations', ([], {}), '()\n', (1073, 1075), False, 'from uforge.objects import uforge\n'), ((1108, 1119), 'pyxb.BIND', 'pyxb.BIND', ([], {}), '()\n', (1117, 1119), False, 'import pyxb\n'), ((1451, 1515), 'hammr.utils.migration_utils.migration_table', 'migration_utils.migration_table', (['migrations.migrations.migration'], {}), '(migrations.migrations.migration)\n', (1482, 1515), False, 'from hammr.utils import migration_utils\n'), ((1939, 1958), 'uforge.objects.uforge.migrations', 'uforge.migrations', ([], {}), '()\n', (1956, 1958), False, 'from uforge.objects import uforge\n'), ((1991, 2002), 'pyxb.BIND', 'pyxb.BIND', ([], {}), '()\n', (2000, 2002), False, 'import pyxb\n'), ((2169, 2233), 'hammr.utils.migration_utils.migration_table', 'migration_utils.migration_table', (['migrations.migrations.migration'], {}), '(migrations.migrations.migration)\n', (2200, 2233), False, 'from hammr.utils import migration_utils\n'), ((2555, 2574), 'uforge.objects.uforge.migrations', 'uforge.migrations', ([], {}), '()\n', (2572, 2574), False, 'from uforge.objects import uforge\n'), ((2607, 2618), 'pyxb.BIND', 'pyxb.BIND', ([], {}), '()\n', (2616, 2618), False, 'import pyxb\n'), ((2787, 2851), 'hammr.utils.migration_utils.migration_table', 'migration_utils.migration_table', (['migrations.migrations.migration'], {}), '(migrations.migrations.migration)\n', (2818, 2851), False, 'from hammr.utils import migration_utils\n'), ((3178, 3197), 'uforge.objects.uforge.migrations', 'uforge.migrations', ([], {}), '()\n', (3195, 3197), False, 'from uforge.objects import uforge\n'), ((3230, 3241), 'pyxb.BIND', 'pyxb.BIND', ([], {}), '()\n', (3239, 3241), False, 'import pyxb\n'), ((3413, 3477), 'hammr.utils.migration_utils.migration_table', 'migration_utils.migration_table', (['migrations.migrations.migration'], {}), '(migrations.migrations.migration)\n', (3444, 3477), False, 'from hammr.utils import migration_utils\n'), ((5324, 5383), 'hammr.utils.migration_utils.retrieve_migration_configuration', 'migration_utils.retrieve_migration_configuration', (['args_file'], {}), '(args_file)\n', (5372, 5383), False, 'from hammr.utils import migration_utils\n'), ((7985, 8043), 'hammr.utils.migration_utils.check_mandatory_migrate', 'migration_utils.check_mandatory_migrate', (["data['migration']"], {}), "(data['migration'])\n", (8024, 8043), False, 'from hammr.utils import migration_utils\n'), ((9809, 9876), 'hammr.utils.migration_utils.check_mandatory_target', 'migration_utils.check_mandatory_target', (["data['migration']['target']"], {}), "(data['migration']['target'])\n", (9847, 9876), False, 'from hammr.utils import migration_utils\n'), ((11181, 11260), 'hammr.utils.migration_utils.check_mandatory_builder', 'migration_utils.check_mandatory_builder', (["data['migration']['target']['builder']"], {}), "(data['migration']['target']['builder'])\n", (11220, 11260), False, 'from hammr.utils import migration_utils\n'), ((12078, 12222), 'uforge.application.Api', 'Api', (['"""url"""'], {'username': '"""username"""', 'password': '"""password"""', 'headers': 'None', 'disable_ssl_certificate_validation': '(False)', 'timeout': 'constants.HTTP_TIMEOUT'}), "('url', username='username', password='password', headers=None,\n disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)\n", (12081, 12222), False, 'from uforge.application import Api\n'), ((12261, 12282), 'uforge.objects.uforge.TargetFormat', 'uforge.TargetFormat', ([], {}), '()\n', (12280, 12282), False, 'from uforge.objects import uforge\n'), ((12401, 12473), 'hammr.utils.migration_utils.retrieve_target_format', 'migration_utils.retrieve_target_format', (['api', '"""login"""', '"""targetFormatName"""'], {}), "(api, 'login', 'targetFormatName')\n", (12439, 12473), False, 'from hammr.utils import migration_utils\n'), ((12775, 12919), 'uforge.application.Api', 'Api', (['"""url"""'], {'username': '"""username"""', 'password': '"""password"""', 'headers': 'None', 'disable_ssl_certificate_validation': '(False)', 'timeout': 'constants.HTTP_TIMEOUT'}), "('url', username='username', password='password', headers=None,\n disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)\n", (12778, 12919), False, 'from uforge.application import Api\n'), ((13334, 13478), 'uforge.application.Api', 'Api', (['"""url"""'], {'username': '"""username"""', 'password': '"""password"""', 'headers': 'None', 'disable_ssl_certificate_validation': '(False)', 'timeout': 'constants.HTTP_TIMEOUT'}), "('url', username='username', password='password', headers=None,\n disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)\n", (13337, 13478), False, 'from uforge.application import Api\n'), ((13519, 13540), 'uforge.objects.uforge.TargetFormat', 'uforge.TargetFormat', ([], {}), '()\n', (13538, 13540), False, 'from uforge.objects import uforge\n'), ((13564, 13584), 'uforge.objects.uforge.ImageFormat', 'uforge.ImageFormat', ([], {}), '()\n', (13582, 13584), False, 'from uforge.objects import uforge\n'), ((13847, 13915), 'hammr.utils.migration_utils.retrieve_image', 'migration_utils.retrieve_image', (['builder', 'target_format', 'api', '"""login"""'], {}), "(builder, target_format, api, 'login')\n", (13877, 13915), False, 'from hammr.utils import migration_utils\n'), ((14328, 14472), 'uforge.application.Api', 'Api', (['"""url"""'], {'username': '"""username"""', 'password': '"""password"""', 'headers': 'None', 'disable_ssl_certificate_validation': '(False)', 'timeout': 'constants.HTTP_TIMEOUT'}), "('url', username='username', password='password', headers=None,\n disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)\n", (14331, 14472), False, 'from uforge.application import Api\n'), ((14511, 14532), 'uforge.objects.uforge.TargetFormat', 'uforge.TargetFormat', ([], {}), '()\n', (14530, 14532), False, 'from uforge.objects import uforge\n'), ((14556, 14576), 'uforge.objects.uforge.ImageFormat', 'uforge.ImageFormat', ([], {}), '()\n', (14574, 14576), False, 'from uforge.objects import uforge\n'), ((15254, 15277), 'uforge.objects.uforge.InstallProfile', 'uforge.InstallProfile', ([], {}), '()\n', (15275, 15277), False, 'from uforge.objects import uforge\n'), ((15828, 15851), 'uforge.objects.uforge.InstallProfile', 'uforge.InstallProfile', ([], {}), '()\n', (15849, 15851), False, 'from uforge.objects import uforge\n'), ((16037, 16115), 'hammr.utils.migration_utils.set_install_profile_disk_size', 'migration_utils.set_install_profile_disk_size', (['install_profile', 'builder', '"""aws"""'], {}), "(install_profile, builder, 'aws')\n", (16082, 16115), False, 'from hammr.utils import migration_utils\n'), ((16318, 16341), 'uforge.objects.uforge.InstallProfile', 'uforge.InstallProfile', ([], {}), '()\n', (16339, 16341), False, 'from uforge.objects import uforge\n'), ((16527, 16613), 'hammr.utils.migration_utils.set_install_profile_disk_size', 'migration_utils.set_install_profile_disk_size', (['install_profile', 'builder', '"""vcenter"""'], {}), "(install_profile, builder,\n 'vcenter')\n", (16572, 16613), False, 'from hammr.utils import migration_utils\n'), ((16806, 16829), 'uforge.objects.uforge.InstallProfile', 'uforge.InstallProfile', ([], {}), '()\n', (16827, 16829), False, 'from uforge.objects import uforge\n'), ((17434, 17455), 'uforge.objects.uforge.TargetFormat', 'uforge.TargetFormat', ([], {}), '()\n', (17453, 17455), False, 'from uforge.objects import uforge\n'), ((17479, 17499), 'uforge.objects.uforge.ImageFormat', 'uforge.ImageFormat', ([], {}), '()\n', (17497, 17499), False, 'from uforge.objects import uforge\n'), ((17838, 17900), 'hammr.utils.migration_utils.retrieve_publish_image', 'migration_utils.retrieve_publish_image', (['builder', 'target_format'], {}), '(builder, target_format)\n', (17876, 17900), False, 'from hammr.utils import migration_utils\n'), ((18462, 18483), 'uforge.objects.uforge.TargetFormat', 'uforge.TargetFormat', ([], {}), '()\n', (18481, 18483), False, 'from uforge.objects import uforge\n'), ((18507, 18527), 'uforge.objects.uforge.ImageFormat', 'uforge.ImageFormat', ([], {}), '()\n', (18525, 18527), False, 'from uforge.objects import uforge\n'), ((19300, 19444), 'uforge.application.Api', 'Api', (['"""url"""'], {'username': '"""username"""', 'password': '"""password"""', 'headers': 'None', 'disable_ssl_certificate_validation': '(False)', 'timeout': 'constants.HTTP_TIMEOUT'}), "('url', username='username', password='password', headers=None,\n disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)\n", (19303, 19444), False, 'from uforge.application import Api\n'), ((19464, 19491), 'uforge.objects.uforge.CredAccountVSphere', 'uforge.CredAccountVSphere', ([], {}), '()\n', (19489, 19491), False, 'from uforge.objects import uforge\n'), ((19753, 19818), 'hammr.utils.migration_utils.retrieve_account', 'migration_utils.retrieve_account', (['api', '"""login"""', 'cred_account.name'], {}), "(api, 'login', cred_account.name)\n", (19785, 19818), False, 'from hammr.utils import migration_utils\n'), ((20173, 20317), 'uforge.application.Api', 'Api', (['"""url"""'], {'username': '"""username"""', 'password': '"""password"""', 'headers': 'None', 'disable_ssl_certificate_validation': '(False)', 'timeout': 'constants.HTTP_TIMEOUT'}), "('url', username='username', password='password', headers=None,\n disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)\n", (20176, 20317), False, 'from uforge.application import Api\n'), ((20351, 20372), 'uforge.objects.uforge.CredAccounts', 'uforge.CredAccounts', ([], {}), '()\n', (20370, 20372), False, 'from uforge.objects import uforge\n'), ((20405, 20416), 'pyxb.BIND', 'pyxb.BIND', ([], {}), '()\n', (20414, 20416), False, 'import pyxb\n'), ((20956, 21100), 'uforge.application.Api', 'Api', (['"""url"""'], {'username': '"""username"""', 'password': '"""password"""', 'headers': 'None', 'disable_ssl_certificate_validation': '(False)', 'timeout': 'constants.HTTP_TIMEOUT'}), "('url', username='username', password='password', headers=None,\n disable_ssl_certificate_validation=False, timeout=constants.HTTP_TIMEOUT)\n", (20959, 21100), False, 'from uforge.application import Api\n'), ((21120, 21147), 'uforge.objects.uforge.CredAccountVSphere', 'uforge.CredAccountVSphere', ([], {}), '()\n', (21145, 21147), False, 'from uforge.objects import uforge\n'), ((21789, 21807), 'uforge.objects.uforge.migration', 'uforge.migration', ([], {}), '()\n', (21805, 21807), False, 'from uforge.objects import uforge\n'), ((21890, 21905), 'uforge.objects.uforge.status', 'uforge.status', ([], {}), '()\n', (21903, 21905), False, 'from uforge.objects import uforge\n'), ((22240, 22263), 'uforge.objects.uforge.TargetPlatform', 'uforge.TargetPlatform', ([], {}), '()\n', (22261, 22263), False, 'from uforge.objects import uforge\n'), ((22438, 22459), 'uforge.objects.uforge.CredAccounts', 'uforge.CredAccounts', ([], {}), '()\n', (22457, 22459), False, 'from uforge.objects import uforge\n'), ((22492, 22503), 'pyxb.BIND', 'pyxb.BIND', ([], {}), '()\n', (22501, 22503), False, 'import pyxb\n'), ((3971, 4030), 'hammr.utils.migration_utils.retrieve_migration_configuration', 'migration_utils.retrieve_migration_configuration', (['args_file'], {}), '(args_file)\n', (4019, 4030), False, 'from hammr.utils import migration_utils\n'), ((4633, 4692), 'hammr.utils.migration_utils.retrieve_migration_configuration', 'migration_utils.retrieve_migration_configuration', (['args_file'], {}), '(args_file)\n', (4681, 4692), False, 'from hammr.utils import migration_utils\n'), ((5709, 5767), 'hammr.utils.migration_utils.check_mandatory_migrate', 'migration_utils.check_mandatory_migrate', (["data['migration']"], {}), "(data['migration'])\n", (5748, 5767), False, 'from hammr.utils import migration_utils\n'), ((6121, 6179), 'hammr.utils.migration_utils.check_mandatory_migrate', 'migration_utils.check_mandatory_migrate', (["data['migration']"], {}), "(data['migration'])\n", (6160, 6179), False, 'from hammr.utils import migration_utils\n'), ((6540, 6598), 'hammr.utils.migration_utils.check_mandatory_migrate', 'migration_utils.check_mandatory_migrate', (["data['migration']"], {}), "(data['migration'])\n", (6579, 6598), False, 'from hammr.utils import migration_utils\n'), ((7001, 7059), 'hammr.utils.migration_utils.check_mandatory_migrate', 'migration_utils.check_mandatory_migrate', (["data['migration']"], {}), "(data['migration'])\n", (7040, 7059), False, 'from hammr.utils import migration_utils\n'), ((7427, 7485), 'hammr.utils.migration_utils.check_mandatory_migrate', 'migration_utils.check_mandatory_migrate', (["data['migration']"], {}), "(data['migration'])\n", (7466, 7485), False, 'from hammr.utils import migration_utils\n'), ((8461, 8528), 'hammr.utils.migration_utils.check_mandatory_source', 'migration_utils.check_mandatory_source', (["data['migration']['source']"], {}), "(data['migration']['source'])\n", (8499, 8528), False, 'from hammr.utils import migration_utils\n'), ((8895, 8962), 'hammr.utils.migration_utils.check_mandatory_source', 'migration_utils.check_mandatory_source', (["data['migration']['source']"], {}), "(data['migration']['source'])\n", (8933, 8962), False, 'from hammr.utils import migration_utils\n'), ((9338, 9405), 'hammr.utils.migration_utils.check_mandatory_target', 'migration_utils.check_mandatory_target', (["data['migration']['target']"], {}), "(data['migration']['target'])\n", (9376, 9405), False, 'from hammr.utils import migration_utils\n'), ((10223, 10302), 'hammr.utils.migration_utils.check_mandatory_builder', 'migration_utils.check_mandatory_builder', (["data['migration']['target']['builder']"], {}), "(data['migration']['target']['builder'])\n", (10262, 10302), False, 'from hammr.utils import migration_utils\n'), ((10688, 10767), 'hammr.utils.migration_utils.check_mandatory_builder', 'migration_utils.check_mandatory_builder', (["data['migration']['target']['builder']"], {}), "(data['migration']['target']['builder'])\n", (10727, 10767), False, 'from hammr.utils import migration_utils\n'), ((11633, 11728), 'hammr.utils.migration_utils.check_mandatory_account', 'migration_utils.check_mandatory_account', (["data['migration']['target']['builder']['account']"], {}), "(data['migration']['target'][\n 'builder']['account'])\n", (11672, 11728), False, 'from hammr.utils import migration_utils\n'), ((13068, 13140), 'hammr.utils.migration_utils.retrieve_target_format', 'migration_utils.retrieve_target_format', (['api', '"""login"""', '"""targetFormatName"""'], {}), "(api, 'login', 'targetFormatName')\n", (13106, 13140), False, 'from hammr.utils import migration_utils\n'), ((14941, 15009), 'hammr.utils.migration_utils.retrieve_image', 'migration_utils.retrieve_image', (['builder', 'target_format', 'api', '"""login"""'], {}), "(builder, target_format, api, 'login')\n", (14971, 15009), False, 'from hammr.utils import migration_utils\n'), ((15455, 15515), 'hammr.utils.migration_utils.check_mandatory_installation', 'migration_utils.check_mandatory_installation', (['"""aws"""', 'builder'], {}), "('aws', builder)\n", (15499, 15515), False, 'from hammr.utils import migration_utils\n'), ((17039, 17117), 'hammr.utils.migration_utils.set_install_profile_disk_size', 'migration_utils.set_install_profile_disk_size', (['install_profile', 'builder', '"""aws"""'], {}), "(install_profile, builder, 'aws')\n", (17084, 17117), False, 'from hammr.utils import migration_utils\n'), ((18960, 19022), 'hammr.utils.migration_utils.retrieve_publish_image', 'migration_utils.retrieve_publish_image', (['builder', 'target_format'], {}), '(builder, target_format)\n', (18998, 19022), False, 'from hammr.utils import migration_utils\n'), ((20542, 20599), 'hammr.utils.migration_utils.retrieve_account', 'migration_utils.retrieve_account', (['api', '"""login"""', '"""account"""'], {}), "(api, 'login', 'account')\n", (20574, 20599), False, 'from hammr.utils import migration_utils\n'), ((21436, 21501), 'hammr.utils.migration_utils.retrieve_account', 'migration_utils.retrieve_account', (['api', '"""login"""', '"""accountNotFound"""'], {}), "(api, 'login', 'accountNotFound')\n", (21468, 21501), False, 'from hammr.utils import migration_utils\n')]
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mwindow.ui'
#
# Created: Sun Mar 05 22:07:45 2017
# by: pyside-uic 0.2.15 running on PySide 1.2.4
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.setWindowModality(QtCore.Qt.NonModal)
MainWindow.resize(600, 400)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMinimumSize(QtCore.QSize(600, 400))
MainWindow.setMaximumSize(QtCore.QSize(600, 400))
MainWindow.setDockOptions(QtGui.QMainWindow.AllowTabbedDocks|QtGui.QMainWindow.AnimatedDocks)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayoutWidget = QtGui.QWidget(self.centralwidget)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(280, 10, 311, 341))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.TweakInfo = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.TweakInfo.setContentsMargins(0, 0, 0, 0)
self.TweakInfo.setObjectName("TweakInfo")
self.nameLabel = QtGui.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial Black")
font.setPointSize(12)
self.nameLabel.setFont(font)
self.nameLabel.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel.setWordWrap(False)
self.nameLabel.setObjectName("nameLabel")
self.TweakInfo.addWidget(self.nameLabel)
self.fieldsLayout = QtGui.QVBoxLayout()
self.fieldsLayout.setObjectName("fieldsLayout")
self.nameLayout = QtGui.QHBoxLayout()
self.nameLayout.setObjectName("nameLayout")
self.label_2 = QtGui.QLabel(self.verticalLayoutWidget)
self.label_2.setObjectName("label_2")
self.nameLayout.addWidget(self.label_2)
self.nameInput = QtGui.QLineEdit(self.verticalLayoutWidget)
self.nameInput.setObjectName("nameInput")
self.nameLayout.addWidget(self.nameInput)
self.fieldsLayout.addLayout(self.nameLayout)
self.versionLayout = QtGui.QHBoxLayout()
self.versionLayout.setObjectName("versionLayout")
self.label_5 = QtGui.QLabel(self.verticalLayoutWidget)
self.label_5.setObjectName("label_5")
self.versionLayout.addWidget(self.label_5)
self.versionInput = QtGui.QLineEdit(self.verticalLayoutWidget)
self.versionInput.setObjectName("versionInput")
self.versionLayout.addWidget(self.versionInput)
self.fieldsLayout.addLayout(self.versionLayout)
self.packageLayout = QtGui.QHBoxLayout()
self.packageLayout.setObjectName("packageLayout")
self.label_3 = QtGui.QLabel(self.verticalLayoutWidget)
self.label_3.setObjectName("label_3")
self.packageLayout.addWidget(self.label_3)
self.packageInput = QtGui.QLineEdit(self.verticalLayoutWidget)
self.packageInput.setObjectName("packageInput")
self.packageLayout.addWidget(self.packageInput)
self.fieldsLayout.addLayout(self.packageLayout)
self.sectionLayout = QtGui.QHBoxLayout()
self.sectionLayout.setObjectName("sectionLayout")
self.label_6 = QtGui.QLabel(self.verticalLayoutWidget)
self.label_6.setObjectName("label_6")
self.sectionLayout.addWidget(self.label_6)
self.sectionInput = QtGui.QLineEdit(self.verticalLayoutWidget)
self.sectionInput.setObjectName("sectionInput")
self.sectionLayout.addWidget(self.sectionInput)
self.fieldsLayout.addLayout(self.sectionLayout)
self.authorLayout = QtGui.QHBoxLayout()
self.authorLayout.setObjectName("authorLayout")
self.label_7 = QtGui.QLabel(self.verticalLayoutWidget)
self.label_7.setObjectName("label_7")
self.authorLayout.addWidget(self.label_7)
self.authorInput = QtGui.QLineEdit(self.verticalLayoutWidget)
self.authorInput.setObjectName("authorInput")
self.authorLayout.addWidget(self.authorInput)
self.fieldsLayout.addLayout(self.authorLayout)
self.filePathLayout = QtGui.QHBoxLayout()
self.filePathLayout.setObjectName("filePathLayout")
self.label_8 = QtGui.QLabel(self.verticalLayoutWidget)
self.label_8.setObjectName("label_8")
self.filePathLayout.addWidget(self.label_8)
self.filePathInput = QtGui.QLineEdit(self.verticalLayoutWidget)
self.filePathInput.setObjectName("filePathInput")
self.filePathLayout.addWidget(self.filePathInput)
self.fieldsLayout.addLayout(self.filePathLayout)
self.descriptionTextInput = QtGui.QPlainTextEdit(self.verticalLayoutWidget)
self.descriptionTextInput.setObjectName("descriptionTextInput")
self.fieldsLayout.addWidget(self.descriptionTextInput)
self.TweakInfo.addLayout(self.fieldsLayout)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.deleteButton = QtGui.QPushButton(self.verticalLayoutWidget)
self.deleteButton.setObjectName("deleteButton")
self.horizontalLayout.addWidget(self.deleteButton)
self.saveButton = QtGui.QPushButton(self.verticalLayoutWidget)
self.saveButton.setObjectName("saveButton")
self.horizontalLayout.addWidget(self.saveButton)
self.TweakInfo.addLayout(self.horizontalLayout)
self.horizontalLayoutWidget_4 = QtGui.QWidget(self.centralwidget)
self.horizontalLayoutWidget_4.setGeometry(QtCore.QRect(200, 370, 160, 80))
self.horizontalLayoutWidget_4.setObjectName("horizontalLayoutWidget_4")
self.horizontalLayout_4 = QtGui.QHBoxLayout(self.horizontalLayoutWidget_4)
self.horizontalLayout_4.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.label_4 = QtGui.QLabel(self.horizontalLayoutWidget_4)
self.label_4.setObjectName("label_4")
self.horizontalLayout_4.addWidget(self.label_4)
self.lineEdit_3 = QtGui.QLineEdit(self.horizontalLayoutWidget_4)
self.lineEdit_3.setObjectName("lineEdit_3")
self.horizontalLayout_4.addWidget(self.lineEdit_3)
self.verticalLayoutWidget_2 = QtGui.QWidget(self.centralwidget)
self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(10, 10, 261, 341))
self.verticalLayoutWidget_2.setObjectName("verticalLayoutWidget_2")
self.TweaksList = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)
self.TweaksList.setContentsMargins(0, 0, 0, 0)
self.TweaksList.setObjectName("TweaksList")
self.packagesListWidget = QtGui.QListWidget(self.verticalLayoutWidget_2)
self.packagesListWidget.setObjectName("packagesListWidget")
self.TweaksList.addWidget(self.packagesListWidget)
self.addtweakButton = QtGui.QPushButton(self.verticalLayoutWidget_2)
self.addtweakButton.setObjectName("addtweakButton")
self.TweaksList.addWidget(self.addtweakButton)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 600, 21))
self.menubar.setObjectName("menubar")
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionOpen = QtGui.QAction(MainWindow)
self.actionOpen.setObjectName("actionOpen")
self.actionSave = QtGui.QAction(MainWindow)
self.actionSave.setObjectName("actionSave")
self.menuFile.addAction(self.actionOpen)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionSave)
self.menubar.addAction(self.menuFile.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "Cydia Repo Manager", None, QtGui.QApplication.UnicodeUTF8))
self.nameLabel.setText(QtGui.QApplication.translate("MainWindow", "TextLabel", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("MainWindow", "Name", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setText(QtGui.QApplication.translate("MainWindow", "Version", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("MainWindow", "Package", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setText(QtGui.QApplication.translate("MainWindow", "Section", None, QtGui.QApplication.UnicodeUTF8))
self.label_7.setText(QtGui.QApplication.translate("MainWindow", "Author", None, QtGui.QApplication.UnicodeUTF8))
self.label_8.setText(QtGui.QApplication.translate("MainWindow", "File Path", None, QtGui.QApplication.UnicodeUTF8))
self.deleteButton.setText(QtGui.QApplication.translate("MainWindow", "Delete", None, QtGui.QApplication.UnicodeUTF8))
self.saveButton.setText(QtGui.QApplication.translate("MainWindow", "Save", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("MainWindow", "Name", None, QtGui.QApplication.UnicodeUTF8))
self.addtweakButton.setText(QtGui.QApplication.translate("MainWindow", "Add tweak", None, QtGui.QApplication.UnicodeUTF8))
self.menuFile.setTitle(QtGui.QApplication.translate("MainWindow", "File", None, QtGui.QApplication.UnicodeUTF8))
self.actionOpen.setText(QtGui.QApplication.translate("MainWindow", "Open", None, QtGui.QApplication.UnicodeUTF8))
self.actionSave.setText(QtGui.QApplication.translate("MainWindow", "Save", None, QtGui.QApplication.UnicodeUTF8))
|
[
"PySide.QtGui.QHBoxLayout",
"PySide.QtCore.QMetaObject.connectSlotsByName",
"PySide.QtGui.QListWidget",
"PySide.QtGui.QStatusBar",
"PySide.QtGui.QPushButton",
"PySide.QtGui.QVBoxLayout",
"PySide.QtCore.QSize",
"PySide.QtGui.QMenu",
"PySide.QtGui.QLineEdit",
"PySide.QtGui.QPlainTextEdit",
"PySide.QtCore.QRect",
"PySide.QtGui.QSizePolicy",
"PySide.QtGui.QApplication.translate",
"PySide.QtGui.QMenuBar",
"PySide.QtGui.QLabel",
"PySide.QtGui.QFont",
"PySide.QtGui.QAction",
"PySide.QtGui.QWidget"
] |
[((499, 574), 'PySide.QtGui.QSizePolicy', 'QtGui.QSizePolicy', (['QtGui.QSizePolicy.Preferred', 'QtGui.QSizePolicy.Preferred'], {}), '(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)\n', (516, 574), False, 'from PySide import QtCore, QtGui\n'), ((1033, 1058), 'PySide.QtGui.QWidget', 'QtGui.QWidget', (['MainWindow'], {}), '(MainWindow)\n', (1046, 1058), False, 'from PySide import QtCore, QtGui\n'), ((1153, 1186), 'PySide.QtGui.QWidget', 'QtGui.QWidget', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1166, 1186), False, 'from PySide import QtCore, QtGui\n'), ((1363, 1407), 'PySide.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (1380, 1407), False, 'from PySide import QtCore, QtGui\n'), ((1537, 1576), 'PySide.QtGui.QLabel', 'QtGui.QLabel', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (1549, 1576), False, 'from PySide import QtCore, QtGui\n'), ((1592, 1605), 'PySide.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1603, 1605), False, 'from PySide import QtCore, QtGui\n'), ((1939, 1958), 'PySide.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (1956, 1958), False, 'from PySide import QtCore, QtGui\n'), ((2041, 2060), 'PySide.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (2058, 2060), False, 'from PySide import QtCore, QtGui\n'), ((2136, 2175), 'PySide.QtGui.QLabel', 'QtGui.QLabel', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (2148, 2175), False, 'from PySide import QtCore, QtGui\n'), ((2295, 2337), 'PySide.QtGui.QLineEdit', 'QtGui.QLineEdit', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (2310, 2337), False, 'from PySide import QtCore, QtGui\n'), ((2520, 2539), 'PySide.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (2537, 2539), False, 'from PySide import QtCore, QtGui\n'), ((2621, 2660), 'PySide.QtGui.QLabel', 'QtGui.QLabel', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (2633, 2660), False, 'from PySide import QtCore, QtGui\n'), ((2786, 2828), 'PySide.QtGui.QLineEdit', 'QtGui.QLineEdit', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (2801, 2828), False, 'from PySide import QtCore, QtGui\n'), ((3026, 3045), 'PySide.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (3043, 3045), False, 'from PySide import QtCore, QtGui\n'), ((3127, 3166), 'PySide.QtGui.QLabel', 'QtGui.QLabel', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (3139, 3166), False, 'from PySide import QtCore, QtGui\n'), ((3292, 3334), 'PySide.QtGui.QLineEdit', 'QtGui.QLineEdit', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (3307, 3334), False, 'from PySide import QtCore, QtGui\n'), ((3532, 3551), 'PySide.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (3549, 3551), False, 'from PySide import QtCore, QtGui\n'), ((3633, 3672), 'PySide.QtGui.QLabel', 'QtGui.QLabel', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (3645, 3672), False, 'from PySide import QtCore, QtGui\n'), ((3798, 3840), 'PySide.QtGui.QLineEdit', 'QtGui.QLineEdit', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (3813, 3840), False, 'from PySide import QtCore, QtGui\n'), ((4037, 4056), 'PySide.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (4054, 4056), False, 'from PySide import QtCore, QtGui\n'), ((4136, 4175), 'PySide.QtGui.QLabel', 'QtGui.QLabel', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (4148, 4175), False, 'from PySide import QtCore, QtGui\n'), ((4299, 4341), 'PySide.QtGui.QLineEdit', 'QtGui.QLineEdit', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (4314, 4341), False, 'from PySide import QtCore, QtGui\n'), ((4535, 4554), 'PySide.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (4552, 4554), False, 'from PySide import QtCore, QtGui\n'), ((4638, 4677), 'PySide.QtGui.QLabel', 'QtGui.QLabel', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (4650, 4677), False, 'from PySide import QtCore, QtGui\n'), ((4805, 4847), 'PySide.QtGui.QLineEdit', 'QtGui.QLineEdit', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (4820, 4847), False, 'from PySide import QtCore, QtGui\n'), ((5057, 5104), 'PySide.QtGui.QPlainTextEdit', 'QtGui.QPlainTextEdit', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (5077, 5104), False, 'from PySide import QtCore, QtGui\n'), ((5324, 5343), 'PySide.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (5341, 5343), False, 'from PySide import QtCore, QtGui\n'), ((5436, 5480), 'PySide.QtGui.QPushButton', 'QtGui.QPushButton', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (5453, 5480), False, 'from PySide import QtCore, QtGui\n'), ((5622, 5666), 'PySide.QtGui.QPushButton', 'QtGui.QPushButton', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (5639, 5666), False, 'from PySide import QtCore, QtGui\n'), ((5872, 5905), 'PySide.QtGui.QWidget', 'QtGui.QWidget', (['self.centralwidget'], {}), '(self.centralwidget)\n', (5885, 5905), False, 'from PySide import QtCore, QtGui\n'), ((6103, 6151), 'PySide.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', (['self.horizontalLayoutWidget_4'], {}), '(self.horizontalLayoutWidget_4)\n', (6120, 6151), False, 'from PySide import QtCore, QtGui\n'), ((6306, 6349), 'PySide.QtGui.QLabel', 'QtGui.QLabel', (['self.horizontalLayoutWidget_4'], {}), '(self.horizontalLayoutWidget_4)\n', (6318, 6349), False, 'from PySide import QtCore, QtGui\n'), ((6478, 6524), 'PySide.QtGui.QLineEdit', 'QtGui.QLineEdit', (['self.horizontalLayoutWidget_4'], {}), '(self.horizontalLayoutWidget_4)\n', (6493, 6524), False, 'from PySide import QtCore, QtGui\n'), ((6674, 6707), 'PySide.QtGui.QWidget', 'QtGui.QWidget', (['self.centralwidget'], {}), '(self.centralwidget)\n', (6687, 6707), False, 'from PySide import QtCore, QtGui\n'), ((6890, 6936), 'PySide.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self.verticalLayoutWidget_2'], {}), '(self.verticalLayoutWidget_2)\n', (6907, 6936), False, 'from PySide import QtCore, QtGui\n'), ((7078, 7124), 'PySide.QtGui.QListWidget', 'QtGui.QListWidget', (['self.verticalLayoutWidget_2'], {}), '(self.verticalLayoutWidget_2)\n', (7095, 7124), False, 'from PySide import QtCore, QtGui\n'), ((7282, 7328), 'PySide.QtGui.QPushButton', 'QtGui.QPushButton', (['self.verticalLayoutWidget_2'], {}), '(self.verticalLayoutWidget_2)\n', (7299, 7328), False, 'from PySide import QtCore, QtGui\n'), ((7523, 7549), 'PySide.QtGui.QMenuBar', 'QtGui.QMenuBar', (['MainWindow'], {}), '(MainWindow)\n', (7537, 7549), False, 'from PySide import QtCore, QtGui\n'), ((7682, 7707), 'PySide.QtGui.QMenu', 'QtGui.QMenu', (['self.menubar'], {}), '(self.menubar)\n', (7693, 7707), False, 'from PySide import QtCore, QtGui\n'), ((7825, 7853), 'PySide.QtGui.QStatusBar', 'QtGui.QStatusBar', (['MainWindow'], {}), '(MainWindow)\n', (7841, 7853), False, 'from PySide import QtCore, QtGui\n'), ((7978, 8003), 'PySide.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (7991, 8003), False, 'from PySide import QtCore, QtGui\n'), ((8082, 8107), 'PySide.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (8095, 8107), False, 'from PySide import QtCore, QtGui\n'), ((8402, 8451), 'PySide.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainWindow'], {}), '(MainWindow)\n', (8439, 8451), False, 'from PySide import QtCore, QtGui\n'), ((820, 842), 'PySide.QtCore.QSize', 'QtCore.QSize', (['(600)', '(400)'], {}), '(600, 400)\n', (832, 842), False, 'from PySide import QtCore, QtGui\n'), ((878, 900), 'PySide.QtCore.QSize', 'QtCore.QSize', (['(600)', '(400)'], {}), '(600, 400)\n', (890, 900), False, 'from PySide import QtCore, QtGui\n'), ((1233, 1264), 'PySide.QtCore.QRect', 'QtCore.QRect', (['(280)', '(10)', '(311)', '(341)'], {}), '(280, 10, 311, 341)\n', (1245, 1264), False, 'from PySide import QtCore, QtGui\n'), ((5956, 5987), 'PySide.QtCore.QRect', 'QtCore.QRect', (['(200)', '(370)', '(160)', '(80)'], {}), '(200, 370, 160, 80)\n', (5968, 5987), False, 'from PySide import QtCore, QtGui\n'), ((6756, 6786), 'PySide.QtCore.QRect', 'QtCore.QRect', (['(10)', '(10)', '(261)', '(341)'], {}), '(10, 10, 261, 341)\n', (6768, 6786), False, 'from PySide import QtCore, QtGui\n'), ((7583, 7610), 'PySide.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(600)', '(21)'], {}), '(0, 0, 600, 21)\n', (7595, 7610), False, 'from PySide import QtCore, QtGui\n'), ((8528, 8634), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Cydia Repo Manager"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Cydia Repo Manager', None,\n QtGui.QApplication.UnicodeUTF8)\n", (8556, 8634), False, 'from PySide import QtCore, QtGui\n'), ((8663, 8761), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""TextLabel"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'TextLabel', None, QtGui.\n QApplication.UnicodeUTF8)\n", (8691, 8761), False, 'from PySide import QtCore, QtGui\n'), ((8787, 8880), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Name"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Name', None, QtGui.QApplication\n .UnicodeUTF8)\n", (8815, 8880), False, 'from PySide import QtCore, QtGui\n'), ((8906, 9002), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Version"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Version', None, QtGui.\n QApplication.UnicodeUTF8)\n", (8934, 9002), False, 'from PySide import QtCore, QtGui\n'), ((9028, 9124), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Package"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Package', None, QtGui.\n QApplication.UnicodeUTF8)\n", (9056, 9124), False, 'from PySide import QtCore, QtGui\n'), ((9150, 9246), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Section"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Section', None, QtGui.\n QApplication.UnicodeUTF8)\n", (9178, 9246), False, 'from PySide import QtCore, QtGui\n'), ((9272, 9367), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Author"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Author', None, QtGui.\n QApplication.UnicodeUTF8)\n", (9300, 9367), False, 'from PySide import QtCore, QtGui\n'), ((9393, 9491), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""File Path"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'File Path', None, QtGui.\n QApplication.UnicodeUTF8)\n", (9421, 9491), False, 'from PySide import QtCore, QtGui\n'), ((9522, 9617), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Delete"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Delete', None, QtGui.\n QApplication.UnicodeUTF8)\n", (9550, 9617), False, 'from PySide import QtCore, QtGui\n'), ((9646, 9739), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Save"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Save', None, QtGui.QApplication\n .UnicodeUTF8)\n", (9674, 9739), False, 'from PySide import QtCore, QtGui\n'), ((9765, 9858), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Name"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Name', None, QtGui.QApplication\n .UnicodeUTF8)\n", (9793, 9858), False, 'from PySide import QtCore, QtGui\n'), ((9891, 9989), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Add tweak"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Add tweak', None, QtGui.\n QApplication.UnicodeUTF8)\n", (9919, 9989), False, 'from PySide import QtCore, QtGui\n'), ((10017, 10110), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""File"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'File', None, QtGui.QApplication\n .UnicodeUTF8)\n", (10045, 10110), False, 'from PySide import QtCore, QtGui\n'), ((10139, 10232), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Open"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Open', None, QtGui.QApplication\n .UnicodeUTF8)\n", (10167, 10232), False, 'from PySide import QtCore, QtGui\n'), ((10261, 10354), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Save"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Save', None, QtGui.QApplication\n .UnicodeUTF8)\n", (10289, 10354), False, 'from PySide import QtCore, QtGui\n')]
|
from django import forms
from django.core.exceptions import ImproperlyConfigured
from mayan.apps.acls.models import AccessControlList
class FilteredModelFieldMixin:
def __init__(self, *args, **kwargs):
self.source_model = kwargs.pop('source_model', None)
self.permission = kwargs.pop('permission', None)
self.source_queryset = kwargs.pop('source_queryset', None)
if self.source_queryset is None:
if self.source_model:
self.source_queryset = self.source_model._meta.default_manager.all()
else:
raise ImproperlyConfigured(
'{} requires a source_queryset or a source_model to be '
'specified as keyword argument.'.format(
self.__class__.__name__
)
)
kwargs['queryset'] = self.source_queryset.none()
super().__init__(*args, **kwargs)
def reload(self):
if self.permission and self.user:
self.queryset = AccessControlList.objects.restrict_queryset(
permission=self.permission, queryset=self.source_queryset,
user=self.user
)
else:
self.queryset = self.source_queryset
class FilteredModelChoiceField(
FilteredModelFieldMixin, forms.ModelChoiceField
):
"""Single selection filtered model choice field"""
class FilteredModelMultipleChoiceField(
FilteredModelFieldMixin, forms.ModelMultipleChoiceField
):
"""Multiple selection filtered model choice field"""
|
[
"mayan.apps.acls.models.AccessControlList.objects.restrict_queryset"
] |
[((1037, 1159), 'mayan.apps.acls.models.AccessControlList.objects.restrict_queryset', 'AccessControlList.objects.restrict_queryset', ([], {'permission': 'self.permission', 'queryset': 'self.source_queryset', 'user': 'self.user'}), '(permission=self.permission,\n queryset=self.source_queryset, user=self.user)\n', (1080, 1159), False, 'from mayan.apps.acls.models import AccessControlList\n')]
|
import os
import configparser
class Config(configparser.ConfigParser):
"""
Creates a configfile <filepath> with the structure
contained in <default_dict> as default values.
<default_dict> should look like this:
default_dict = {
"GENERAL": {
"theme": 1,
"splash_screen": True,
...
},
"NETWORK": {
"server_addr": "127.0.0.1",
"server_port": "1337",
...
},
...
}
The configfile will be created upon instantiation.
The method <get_config()> can be used to retrieve
values from the config in real time from the file.
This means, if the config file is changed, e.g. by
the user, during runtime the program will fetch
these new values.
"""
def __init__(self, filepath, default_dict={}):
super().__init__()
self.filepath = filepath
if not os.path.isfile(filepath):
try:
for section in default_dict.keys():
self.add_section(section)
for item, value in default_dict[section].items():
self[section][item] = str(value)
with open(filepath, "w") as configfile:
self.write(configfile)
except Exception as e:
raise Exception(e.__class__, "Parameter default_dict likely did not have the required structure.")
def get_option(self, itemname, cast=str, fallback=None):
assert isinstance(itemname, str)
assert cast in (str, int, float, bool)
self.clear()
self.read(self.filepath)
found = False
for s in self.sections():
for c in self.options(s):
#print(c, itemname, c == itemname)
if c == itemname:
value = self[s][c]
#print(value)
found = True
break
# If setting does not exist, return fallback value
if not found:
return fallback
# Cast the retrieved value if demanded
if type(value) != cast:
try:
return cast(value)
except Exception as e:
return fallback
else:
return value
|
[
"os.path.isfile"
] |
[((967, 991), 'os.path.isfile', 'os.path.isfile', (['filepath'], {}), '(filepath)\n', (981, 991), False, 'import os\n')]
|
import geosoft.gxapi as gxapi
import geosoft.gxpy.gx as gx
import geosoft.gxpy.grid as gxgrid
import geosoft.gxpy.utility as gxu
# this example requires version 9.2.1, which adds iteration support
gxu.check_version('9.2.1')
# create context
gxc = gx.GXpy()
# create a gxapi.GXST instance to accumulate statistics
stats = gxapi.GXST.create()
# add each data to stats point-by-point (slow, better to use numpy or vector approach)
number_of_dummies = 0
with gxgrid.Grid.open('elevation_surfer.grd(SRF;VER=V7)') as grid:
for x, y, z, v in grid:
if v is None:
number_of_dummies += 1
else:
stats.data(v)
total_points = grid.nx * grid.ny
# print statistical properties
print('minimum: ', stats.get_info(gxapi.ST_MIN))
print('maximum: ', stats.get_info(gxapi.ST_MAX))
print('mean: ', stats.get_info(gxapi.ST_MEAN))
print('standard deviation:', stats.get_info(gxapi.ST_STDDEV))
print('number of dummies: ', number_of_dummies)
print('number of valid data points: ', total_points - number_of_dummies)
|
[
"geosoft.gxpy.grid.Grid.open",
"geosoft.gxpy.gx.GXpy",
"geosoft.gxapi.GXST.create",
"geosoft.gxpy.utility.check_version"
] |
[((198, 224), 'geosoft.gxpy.utility.check_version', 'gxu.check_version', (['"""9.2.1"""'], {}), "('9.2.1')\n", (215, 224), True, 'import geosoft.gxpy.utility as gxu\n'), ((249, 258), 'geosoft.gxpy.gx.GXpy', 'gx.GXpy', ([], {}), '()\n', (256, 258), True, 'import geosoft.gxpy.gx as gx\n'), ((324, 343), 'geosoft.gxapi.GXST.create', 'gxapi.GXST.create', ([], {}), '()\n', (341, 343), True, 'import geosoft.gxapi as gxapi\n'), ((459, 511), 'geosoft.gxpy.grid.Grid.open', 'gxgrid.Grid.open', (['"""elevation_surfer.grd(SRF;VER=V7)"""'], {}), "('elevation_surfer.grd(SRF;VER=V7)')\n", (475, 511), True, 'import geosoft.gxpy.grid as gxgrid\n')]
|
import torch
import torch.nn as nn
import numpy as np
import torch.nn.functional as F
"""In this script are all modules required for the generator and discriminator"""
### Helper Functions ###
def make_mlp(dim_list, activation_list, batch_norm=False, dropout=0):
"""
Generates MLP network:
Parameters
----------
dim_list : list, list of number for each layer
activation_list : list, list containing activation function for each layer
batch_norm : boolean, use batchnorm at each layer, default: False
dropout : float [0, 1], dropout probability applied on each layer (except last layer)
Returns
-------
nn.Sequential with layers
"""
layers = []
index = 0
for dim_in, dim_out in zip(dim_list[:-1], dim_list[1:]):
activation = activation_list[index]
layers.append(nn.Linear(dim_in, dim_out))
if batch_norm:
layers.append(nn.BatchNorm1d(dim_out))
if activation == 'relu':
layers.append(nn.ReLU())
elif activation == 'tanh':
layers.append(nn.Tanh())
elif activation == 'leakyrelu':
layers.append(nn.LeakyReLU())
elif activation == 'sigmoid':
layers.append(nn.Sigmoid())
if dropout > 0 and index < len(dim_list) - 2:
layers.append(nn.Dropout(p=dropout))
index += 1
return nn.Sequential(*layers)
### Convolutional Blocks and U-NET CNN ###
class Conv_Blocks(nn.Module):
def __init__(self, input_dim, output_dim, filter_size=3, batch_norm=False, non_lin="tanh", dropout=0.,
first_block=False, last_block=False, skip_connection=False):
super(Conv_Blocks, self).__init__()
self.skip_connection = skip_connection
self.last_block = last_block
self.first_block = first_block
self.Block = nn.Sequential()
self.Block.add_module("Conv_1", nn.Conv2d(input_dim, output_dim, filter_size, 1, 1))
if batch_norm:
self.Block.add_module("BN_1", nn.BatchNorm2d(output_dim))
if non_lin == "tanh":
self.Block.add_module("NonLin_1", nn.Tanh())
elif non_lin == "relu":
self.Block.add_module("NonLin_1", nn.ReLU())
elif non_lin == "leakyrelu":
self.Block.add_module("NonLin_1", nn.LeakyReLU())
else:
assert False, "non_lin = {} not valid: 'tanh', 'relu', 'leakyrelu'".format(non_lin)
self.Block.add_module("Pool", nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2), dilation=(1, 1), ceil_mode=False))
if dropout > 0:
self.Block.add_module("Drop", nn.Dropout2d(dropout))
def forward(self, x, ):
if self.skip_connection:
if not self.first_block:
x, skip_con_list = x
else:
skip_con_list = []
x = self.Block(x)
if self.skip_connection:
if not self.last_block:
skip_con_list.append(x)
x = [x, skip_con_list]
return x
class UpConv_Blocks(nn.Module):
def __init__(self, input_dim, output_dim, filter=4, padding=1, first_block=False, last_block=False,
batch_norm=False, non_lin="relu", dropout=0, skip_connection=False):
super(UpConv_Blocks, self).__init__()
self.Block = nn.Sequential()
self.skip_connection = skip_connection
self.first_block = first_block
self.last_block = last_block
if self.skip_connection and not self.first_block:
ouput_dim_conv = input_dim
input_dim *= 2
else:
ouput_dim_conv = output_dim
self.Block.add_module("UpConv", nn.ConvTranspose2d(input_dim, output_dim, filter, 2, padding))
if not last_block:
if batch_norm:
self.Block.add_module("BN_up", nn.BatchNorm2d(output_dim))
if non_lin == "tanh":
self.Block.add_module("NonLin_up", nn.Tanh())
elif non_lin == "relu":
self.Block.add_module("NonLin_up", nn.ReLU())
elif non_lin == "leakyrelu":
self.Block.add_module("NonLin_up", nn.LeakyReLU())
if dropout > 0:
self.Block.add_module("Drop_up", nn.Dropout2d(dropout))
def forward(self, x, ):
if self.skip_connection:
x, skip_con_list = x
if not self.first_block:
x = torch.cat((x, skip_con_list.pop(-1)), -3)
x = self.Block(x)
if self.skip_connection and not self.last_block:
x = [x, skip_con_list]
return x
class CNN(nn.Module):
def __init__(self,
social_pooling=False,
channels_cnn=4,
mlp=32,
encoder_h_dim=16,
insert_trajectory=False,
need_decoder=False,
PhysFeature=False,
grid_size_in=32,
grid_size_out=32,
num_layers=3,
dropout=0.,
batch_norm=False,
non_lin_cnn="tanh",
in_channels=3,
skip_connection=False,
):
super(CNN, self).__init__()
self.__dict__.update(locals())
self.bottleneck_dim = int(grid_size_in / 2 ** (num_layers - 1)) ** 2
num_layers_dec = int(num_layers + ((grid_size_out - grid_size_in) / grid_size_out))
self.encoder = nn.Sequential()
layer_out = channels_cnn
self.encoder.add_module("ConvBlock_1", Conv_Blocks(in_channels, channels_cnn,
dropout=dropout,
batch_norm=batch_norm,
non_lin=self.non_lin_cnn,
first_block=True,
skip_connection=self.skip_connection
))
layer_in = layer_out
for layer in np.arange(2, num_layers + 1):
if layer != num_layers:
layer_out = layer_in * 2
last_block = False
else:
layer_out = layer_in
last_block = True
self.encoder.add_module("ConvBlock_%s" % layer,
Conv_Blocks(layer_in, layer_out,
dropout=dropout,
batch_norm=batch_norm,
non_lin=self.non_lin_cnn,
skip_connection=self.skip_connection,
last_block=last_block
))
layer_in = layer_out
self.bootleneck_channel = layer_out
if self.need_decoder:
self.decoder = nn.Sequential()
layer_in = layer_out
for layer in range(1, num_layers_dec + 1):
first_block = False
extra_d = 0
layer_in = layer_out
last_block = False
filter = 4
padding = 1
if layer == 1:
if self.insert_trajectory:
extra_d = 1
first_block = True
layer_out = layer_in
else:
layer_out = int(layer_in / 2.)
if layer == num_layers_dec:
layer_out = 1
last_block = True
padding = 0
filter = 3
self.decoder.add_module("UpConv_%s" % layer,
UpConv_Blocks(int(layer_in + extra_d),
layer_out,
first_block=first_block,
filter=filter,
padding=padding,
dropout=dropout,
batch_norm=batch_norm,
non_lin=self.non_lin_cnn,
skip_connection=self.skip_connection,
last_block=last_block))
if self.insert_trajectory:
self.traj2cnn = make_mlp(
dim_list=[encoder_h_dim, mlp, self.bottleneck_dim],
activation_list=["tanh", "tanh"],
)
self.init_weights()
def init_weights(self):
def init_kaiming(m):
if type(m) in [nn.Conv2d, nn.ConvTranspose2d]:
torch.nn.init.kaiming_normal_(m.weight, mode='fan_in')
m.bias.data.fill_(0.01)
# if type(m) in [nn.ConvTranspose2d]:
# torch.nn.init.kaiming_normal_(m.weight, mode='fan_in')
# m.bias.data.fill_(50)
def init_xavier(m):
if type(m) == [nn.Conv2d, nn.ConvTranspose2d]:
torch.nn.init.xavier_uniform(m.weight)
m.bias.data.fill_(0.01)
if self.non_lin_cnn in ['relu', 'leakyrelu']:
self.apply(init_kaiming)
elif self.non_lin_cnn == "tanh":
self.apply(init_xavier)
else:
assert False, "non_lin not valid for initialisation"
def forward(self, image, traj_h=torch.empty(1), pool_h=torch.empty(1)):
output = {}
enc = self.encoder(image)
if self.PhysFeature:
# enc_out = self.leakyrelu(self.encoder_out(enc))
# enc_out = enc_out.permute(1, 0, 2, 3).view(1, enc_out.size(0), -1)
output.update(Features=enc)
if self.need_decoder:
if self.skip_connection:
batch, c, w, h = enc[0].size()
in_decoder, skip_con_list = enc
else:
batch, c, w, h = enc.size()
in_decoder = enc
if self.insert_trajectory:
traj_enc = self.traj2cnn(traj_h)
traj_enc = traj_enc.view(batch, 1, w, h)
in_decoder = torch.cat((traj_enc, in_decoder), 1)
if self.social_pooling:
social_enc = self.social_states(pool_h)
social_enc = social_enc.view(batch, 1, w, h)
in_decoder = torch.cat((social_enc, in_decoder), 1)
if self.skip_connection: in_decoder = [in_decoder, skip_con_list]
dec = self.decoder(in_decoder)
output.update(PosMap=dec)
return output
class MotionEncoder(nn.Module):
"""MotionEncoder extracts dynamic features of the past trajectory and consists of an encoding LSTM network"""
def __init__(self,
encoder_h_dim=64,
input_dim=2,
embedding_dim=16,
dropout=0.0):
""" Initialize MotionEncoder.
Parameters.
encoder_h_dim (int) - - dimensionality of hidden state
input_dim (int) - - input dimensionality of spatial coordinates
embedding_dim (int) - - dimensionality spatial embedding
dropout (float) - - dropout in LSTM layer
"""
super(MotionEncoder, self).__init__()
self.encoder_h_dim = encoder_h_dim
self.embedding_dim = embedding_dim
self.input_dim = input_dim
if embedding_dim:
self.spatial_embedding = nn.Linear(input_dim, embedding_dim)
self.encoder = nn.LSTM(embedding_dim, encoder_h_dim)
else:
self.encoder = nn.LSTM(input_dim, encoder_h_dim)
def init_hidden(self, batch, obs_traj):
return (
torch.zeros(1, batch, self.encoder_h_dim).to(obs_traj),
torch.zeros(1, batch, self.encoder_h_dim).to(obs_traj)
)
def forward(self, obs_traj, state_tuple=None):
""" Calculates forward pass of MotionEncoder
Parameters:
obs_traj (tensor) - - Tensor of shape (obs_len, batch, 2)
state_tuple (tuple of tensors) - - Tuple with hidden state (1, batch, encoder_h_dim) and cell state tensor (1, batch, encoder_h_dim)
Returns:
output (tensor) - - Output of LSTM netwok for all time steps (obs_len, batch, encoder_h_dim)
final_h (tensor) - - Final hidden state of LSTM network (1, batch, encoder_h_dim)
"""
# Encode observed Trajectory
batch = obs_traj.size(1)
if not state_tuple:
state_tuple = self.init_hidden(batch, obs_traj)
if self.embedding_dim:
obs_traj = self.spatial_embedding(obs_traj)
output, state = self.encoder(obs_traj, state_tuple)
final_h = state[0]
return output, final_h
class VisualNetwork(nn.Module):
"""VisualNetwork is the parent class for the attention and goal networks generating the CNN"""
def __init__(self,
decoder_h_dim=128,
dropout=0.0,
batch_norm=False,
mlp_dim=32,
img_scaling=0.25,
final_embedding_dim=4,
grid_size_in=16,
grid_size_out=16,
num_layers=1,
batch_norm_cnn=True,
non_lin_cnn="relu",
img_type="local_image",
skip_connection=False,
channels_cnn=4,
social_pooling=False,
**kwargs):
super(VisualNetwork, self).__init__()
self.__dict__.update(locals())
def init_cnn(self):
self.CNN = CNN(social_pooling=self.social_pooling,
channels_cnn=self.channels_cnn,
encoder_h_dim=self.decoder_h_dim,
mlp=self.mlp_dim,
insert_trajectory=True,
need_decoder=self.need_decoder,
PhysFeature=self.PhysFeature,
grid_size_in=self.grid_size_in,
grid_size_out=self.grid_size_out,
dropout=self.dropout,
batch_norm=self.batch_norm_cnn,
non_lin_cnn=self.non_lin_cnn,
num_layers=self.num_layers,
in_channels=4,
skip_connection=self.skip_connection
)
### Visual Attention ###
class AttentionNetwork(VisualNetwork):
def __init__(self,
noise_attention_dim=8,
**kwargs
):
super(AttentionNetwork, self).__init__()
VisualNetwork.__init__(self, **kwargs)
self.__dict__.update(locals())
self.PhysFeature = True
self.skip_connection = False
self.need_decoder = False
self.init_cnn()
self.final_embedding = self.CNN.bottleneck_dim + self.noise_attention_dim
attention_dims = [self.CNN.bootleneck_channel, self.mlp_dim, 1]
activation = ['leakyrelu', None]
self.cnn_attention = make_mlp(
attention_dims,
activation_list=activation, )
def get_noise(self, batch_size, type="gauss"):
"""
Create noise vector:
Parameters
----------
batchsize : int, length of noise vector
noise_type: str, 'uniform' or 'gaussian' noise
Returns
-------
Random noise vector
"""
if type == "gauss":
return torch.randn((1, batch_size, self.noise_attention_dim))
elif type == "uniform":
rand_num = torch.rand((1, batch_size, self.noise_attention_dim))
return rand_num
else:
raise ValueError('Unrecognized noise type "%s"' % noise_type)
class AttentionRoutingModule(AttentionNetwork):
def __init__(self,
**kwargs):
super(AttentionNetwork, self).__init__()
AttentionNetwork.__init__(self, **kwargs)
self.__dict__.update(locals())
self.img_patch = Patch_gen(img_scaling=self.img_scaling,
grid_size=self.grid_size_in,
type_img=self.img_type)
self.init_cnn()
def forward(self, scene_img, last_pos, h, noise=torch.Tensor()):
img_patch = self.img_patch.get_patch(scene_img, last_pos)
visual_features = self.CNN(img_patch, h)["Features"].permute(0, 2, 3, 1)
batch_size, hh, w, c = visual_features.size()
visual_features = visual_features.view(batch_size, -1, c)
attention_scores = self.cnn_attention(visual_features)
attention_vec = attention_scores.softmax(dim=1).squeeze(2).unsqueeze(0)
if self.noise_attention_dim > 0:
if len(noise) == 0:
noise = self.get_noise(batch_size)
else:
assert noise.size(-1) != self.noise_attention_dim, "dimension of noise {} not valid".format(
noise.size())
x = torch.cat((attention_vec, noise.to(attention_vec)), dim=2)
return x, attention_vec, img_patch, noise
class AttentionGlobal(AttentionNetwork):
"""Alternative Visual Attention to GoalModule"""
def __init__(self, **kwargs):
super(AttentionNetwork, self).__init__()
AttentionNetwork.__init__(self, **kwargs)
self.__dict__.update(locals())
self.init_cnn()
def forward(self, features, h, noise=torch.Tensor()):
visual_features = self.CNN(features, h)["Features"].permute(0, 2, 3, 1)
batch_size, hh, w, c = visual_features.size()
visual_features = visual_features.view(batch_size, -1, c)
attention_scores = self.cnn_attention(visual_features)
attention_vec = attention_scores.softmax(dim=1).squeeze(2).unsqueeze(0)
if self.noise_attention_dim > 0:
if len(noise) == 0:
noise = self.get_noise(batch_size)
else:
assert noise.size(-1) != self.noise_attention_dim, "dimension of noise {} not valid".format(
noise.size())
x = torch.cat((attention_vec, noise.to(attention_vec)), dim=2)
return x, attention_vec, noise
### GOAL Module ###
class GoalGlobal(VisualNetwork):
def __init__(self,
temperature=1, # temperature of the gumbel sampling
force_hard=True, # mode of the gumbel sampling
**kwargs):
super(GoalGlobal, self).__init__()
VisualNetwork.__init__(self, **kwargs)
self.__dict__.update(locals())
self.PhysFeature = False
self.need_decoder = True
self.init_cnn()
self.gumbelsampler = GumbelSampler(
temp=self.temperature,
grid_size_out=self.grid_size_out,
force_hard=force_hard,
scaling=self.img_scaling)
def forward(self, features, h, pool_h=torch.empty(1)):
cnn_out = self.CNN(features, h, pool_h)
final_pos, final_pos_map_decoder, final_pos_map, y_softmax, y_scores = self.gumbelsampler(cnn_out)
return final_pos, final_pos_map_decoder, final_pos_map, y_softmax, y_scores
class RoutingModule(nn.Module):
"""RoutingModule is part of TrajectoryGenerator and generates the prediction for each time step.
The MotionDecoder consists of a LSTM network and a local goal network or attention network"""
def __init__(
self,
seq_len=12,
input_dim=2,
decoder_h_dim=128,
embedding_dim=64,
dropout=0.0,
batch_norm=False,
mlp_dim=32,
img_scaling_local=0.25,
final_embedding_dim_rm=4,
rm_vis_type="attention",
grid_size_rm=8,
dropout_cnn_rm=0.0,
num_layers_rm=3,
non_lin_cnn_rm="relu",
force_hard_rm=True,
temperature_rm=1,
batch_norm_cnn_rm=False,
noise_attention_dim_rm=True,
skip_connection_rm=False,
channels_cnn_rm=4,
global_vis_type="goal"):
"""Initialise Motion Decoder network
Parameters.
seq_len (int) - - Prediction length of trajectory
input_dim (int) - - input / output dimensionality of spatial coordinates
decoder_h_dim (int) - - hidden state dimenstion of decoder LSTM
embedding_dim (int) - - dimensionality spatial embedding
dropout (float) - - dropout
final_embedding_dim (int) - - embedding for final position estimate
mlp_dim (int) - - bottleneck dimensionality of mlp networks
PhysAtt (bool) - - depreciated. should not be used
device (torch.device) - - Choose device: cpu or gpu (cuda)
batch_norm (bool) - - if true, applies batch norm in mlp networks
img_scaling (float) - - ratio [m/px] between real and pixel space
grid_size (int) - - defines size of image path in goal / attention network (grid size is 2xgrid_size +1 )
decoder_type ("goal", "attention", none) - -
"""
super(RoutingModule, self).__init__()
self.__dict__.update(locals())
if self.rm_vis_type:
if self.rm_vis_type == "attention":
self.rm_attention = AttentionRoutingModule(
channels_cnn=self.channels_cnn_rm,
decoder_h_dim=self.decoder_h_dim,
dropout=self.dropout_cnn_rm,
mlp_dim=self.mlp_dim,
img_scaling=self.img_scaling_local,
grid_size_in=self.grid_size_rm,
grid_size_out=self.grid_size_rm,
num_layers=self.num_layers_rm,
batch_norm_cnn=self.batch_norm_cnn_rm,
non_lin_cnn=self.non_lin_cnn_rm,
final_embedding_dim=final_embedding_dim_rm,
noise_attention_dim=self.noise_attention_dim_rm,
skip_connection=self.skip_connection_rm)
self.final_embedding_dim_rm = self.rm_attention.final_embedding
self.output_dim = self.decoder_h_dim + self.final_embedding_dim_rm
elif not self.rm_vis_type:
self.output_dim = self.decoder_h_dim
else:
assert False, "`{}` not valid for `decoder_type`: Choose `goal`, 'attention`, or none".format(decoder_type)
self.final_output = make_mlp(
[self.output_dim, self.mlp_dim, self.input_dim],
activation_list=["relu", None],
dropout=dropout,
batch_norm=self.batch_norm)
self.spatial_embedding = nn.Linear(self.input_dim, self.embedding_dim)
if self.global_vis_type == "goal":
self.input_dim_decoder = self.self.embedding_dim * 2 + 1
else:
self.input_dim_decoder = self.embedding_dim
self.decoder = nn.LSTM(self.input_dim_decoder, self.decoder_h_dim)
def forward(self, last_pos, rel_pos, state_tuple, dist_to_goal=0, scene_img=None):
""" Calculates forward pass of MotionDecoder
Parameters:
obs_traj (tensor) - - Tensor of shape (obs_len, batch, 2)
state_tuple (tuple of tensors) - - Tuple with hidden state (1, batch, encoder_h_dim) and cell state tensor (1, batch, encoder_h_dim)
Returns:
output (tensor) - - Output of LSTM netwok for all time steps (obs_len, batch, encoder_h_dim)
final_h (tensor) - - Final hidden state of LSTM network (1, batch, encoder_h_dim)
"""
batch_size = rel_pos.size(0)
pred_traj_fake_rel = []
pred_traj_fake = []
softmax_list = []
final_pos_list = []
img_patch_list = []
final_pos_map_decoder_list = []
for t in range(self.seq_len):
decoder_input = self.spatial_embedding(rel_pos)
decoder_input = decoder_input.view(1, batch_size, self.embedding_dim)
if self.global_vis_type != "none":
distance_embeding = self.spatial_embedding(dist_to_goal)
time_tensor = -1 + 2 * torch.ones(1, decoder_input.size(1), 1) * t / self.seq_len
time_tensor = time_tensor.to(decoder_input)
decoder_input = torch.cat((decoder_input, distance_embeding, time_tensor), -1)
output, state_tuple = self.decoder(decoder_input, state_tuple)
if self.rm_vis_type == "attention":
final_emb, y_softmax, img_patch, noise = self.rm_attention(scene_img, last_pos, state_tuple[0])
else:
final_emb = torch.Tensor([]).to(state_tuple[0])
img_patch = []
input_final = torch.cat((state_tuple[0], final_emb), 2)
img_patch_list.append(img_patch)
# rel_pos = final_pos[0]
rel_pos = self.final_output(input_final)
rel_pos = rel_pos.squeeze(0)
curr_pos = rel_pos + last_pos
dist_to_goal = dist_to_goal - rel_pos
pred_traj_fake_rel.append(rel_pos.clone().view(batch_size, -1))
pred_traj_fake.append(curr_pos.clone().view(batch_size, -1))
last_pos = curr_pos
pred_traj_fake_rel = torch.stack(pred_traj_fake_rel, dim=0)
pred_traj_fake = torch.stack(pred_traj_fake, dim=0)
output = {"out_xy": pred_traj_fake,
"out_dxdy": pred_traj_fake_rel,
"h": state_tuple[0]}
if self.rm_vis_type == "attention":
output.update({"image_patches": torch.stack(img_patch_list, dim=0)})
return output
class EncoderPrediction(nn.Module):
"""Part of Discriminator"""
def __init__(
self, input_dim=2,
encoder_h_dim_d=128,
embedding_dim=64,
dropout=0.0,
channels_cnn=4,
grid_size=16,
num_layers_cnn=2,
batch_norm_cnn=True,
batch_norm=False,
dropout_cnn=0,
mlp_dim=32,
image_scaling=0.5,
non_lin_cnn='tanh',
visual_features = False):
super().__init__()
self.__dict__.update(locals())
del self.self
self.bottleneck_dim = int(grid_size / 2 ** (num_layers_cnn - 1)) ** 2 * channels_cnn * 2 ** (num_layers_cnn - 2)
activation = ['leakyrelu', None]
in_channels = 4
self.bottleneck_dim = int(grid_size / 2 ** (num_layers_cnn - 1)) ** 2
self.encoder_out = nn.Conv2d(channels_cnn * 2 ** (num_layers_cnn - 2), 1, kernel_size=(1, 1), stride=1)
self.leakyrelu = nn.LeakyReLU()
self.inputFeatures = make_mlp(
[self.embedding_dim + self.bottleneck_dim, mlp_dim, self.embedding_dim],
activation_list=['leakyrelu', None],
dropout=dropout)
self.encoder = nn.LSTM(self.embedding_dim, self.encoder_h_dim_d, dropout=dropout)
if self.visual_features:
self.CNN = CNN(channels_cnn=self.channels_cnn,
encoder_h_dim=self.encoder_h_dim_d,
mlp=self.mlp_dim,
need_decoder=False,
PhysFeature=True,
insert_trajectory=False,
grid_size_in=self.grid_size,
num_layers=self.num_layers_cnn,
dropout=self.dropout_cnn,
batch_norm=batch_norm_cnn,
non_lin_cnn=self.non_lin_cnn,
in_channels=in_channels,
)
self.spatial_embedding = nn.Linear(2, self.embedding_dim)
real_classifier_dims = [self.encoder_h_dim_d, mlp_dim, 1]
self.real_classifier = make_mlp(
real_classifier_dims,
activation_list=activation,
dropout=dropout)
def init_hidden(self, batch, obs_traj):
return (torch.zeros(1, batch, self.encoder_h_dim_d).to(obs_traj),
torch.zeros(1, batch, self.encoder_h_dim_d).to(obs_traj))
def forward(self, dxdy, img_patch, state_tuple):
"""
Inputs:
- last_pos: Tensor of shape (batch, 2)
- last_pos_rel: Tensor of shape (batch, 2)
- state_tuple: (hh, ch) each tensor of shape (num_layers, batch, h_dim)
Output:
- pred_traj_fake_rel: tensor of shape (self.seq_len, batch, 2)
- pred_traj_fake: tensor of shape (self.seq_len, batch, 2)
- state_tuple[0]: final hidden state
"""
embedded_pos = self.spatial_embedding(dxdy)
if self.visual_features:
l, batch, c, x, y = img_patch.size()
img_patch = img_patch.reshape(l * batch, c, x, y)
cnn_out = self.CNN(img_patch)
visual_features = self.leakyrelu(self.encoder_out(cnn_out["Features"]))
visual_features = visual_features.view(l, batch, -1)
encoder_input = torch.cat((embedded_pos, visual_features), -1)
encoder_input = self.inputFeatures(encoder_input)
else:
encoder_input = embedded_pos
output, input_classifier = self.encoder(encoder_input, state_tuple)
dynamic_score = self.real_classifier(input_classifier[0])
return dynamic_score
class get_gumbel_map(nn.Module):
def __init__(self, grid_size):
super(get_gumbel_map, self).__init__()
x = torch.arange(0, grid_size * 2 + 1)
x = x.unsqueeze(1)
X = x.repeat(1, grid_size * 2 + 1)
x1 = X - grid_size
x2 = x1.T
x1 = x1.unsqueeze(2)
x2 = x2.unsqueeze(2)
self.gumbel_map = torch.cat((x2, x1), 2).view(1, -1, 2)
def forward(self, batch_size):
gumbel_map = self.gumbel_map.repeat(batch_size, 1, 1).float()
gumbel_map = gumbel_map + torch.rand_like(gumbel_map)
return gumbel_map
### Gumbel Sampling ###
class GumbelSampler(nn.Module):
def __init__(self,
temp=1,
grid_size_out=16,
scaling=0.5,
force_hard=True,
):
super(GumbelSampler, self).__init__()
self.temp = temp
self.grid_size_out = grid_size_out
self.scaling = scaling
self.gumbelsoftmax = GumbelSoftmax(temp=self.temp)
self.gumbel_map = get_gumbel_map(grid_size=self.grid_size_out)
self.force_hard = force_hard
def forward(self, cnn_out):
"""
:param cnn_out:
:type cnn_out:
:return:
final_pos: Tensor with probability for each position
final_pos_map: final_pos tensor reshaped
y_softmax_gumbel: tensor with gumbel probabilities
y_softmax: tensor with probabilites
:rtype:
"""
batch_size, c, hh, w = cnn_out["PosMap"].size()
gumbel_map = self.gumbel_map(batch_size).to(cnn_out["PosMap"])
y_scores = cnn_out["PosMap"].view(batch_size, -1)
final_pos_map, y_softmax_gumbel, y_softmax = self.gumbelsoftmax(y_scores, force_hard=self.force_hard)
final_pos = torch.sum(gumbel_map * final_pos_map.unsqueeze(2), 1).unsqueeze(0)
final_pos_map = final_pos_map.view(batch_size, c, hh, w)
y_softmax_gumbel = y_softmax_gumbel.view(batch_size, c, hh, w)
y_softmax = y_softmax.view(batch_size, c, hh, w)
final_pos = final_pos * self.scaling
return final_pos, final_pos_map, y_softmax_gumbel, y_softmax, y_scores
class Patch_gen():
def __init__(self, img_scaling=0.5,
grid_size=16,
type_img="small_image",
):
self.__dict__.update(locals())
def get_patch(self, scene_image, last_pos):
scale = 1. / self.img_scaling
last_pos_np = last_pos.detach().cpu().numpy()
image_list = []
for k in range(len(scene_image)):
image = scene_image[k][self.type_img]
center = last_pos_np[k] * scale
x_center, y_center = center.astype(int)
cropped_img = image.crop(
(int(x_center - self.grid_size), int(y_center - self.grid_size), int(x_center + self.grid_size + 1),
int(y_center + self.grid_size + 1)))
cropped_img = -1 + torch.from_numpy(np.array(cropped_img) * 1.) * 2. / 256
position = torch.zeros((1, self.grid_size * 2 + 1, self.grid_size * 2 + 1, 1))
position[0, self.grid_size, self.grid_size, 0] = 1.
image = torch.cat((cropped_img.float().unsqueeze(0), position), dim=3)
image = image.permute(0, 3, 1, 2)
image_list.append(image.clone())
img = torch.cat(image_list)
img = img.to(last_pos)
return img
"""
Gumbel Softmax Sampler
Requires 2D input [batchsize, number of categories]
Does not support sinlge binary category. Use two dimensions with softmax instead.
"""
class GumbelSoftmax(nn.Module):
def __init__(self, hard=False, temp=None):
super(GumbelSoftmax, self).__init__()
self.hard = hard
self.gpu = False
self.temp = temp
def cuda(self):
self.gpu = True
def cpu(self):
self.gpu = False
def sample_gumbel(self, shape, eps=1e-10):
"""Sample from Gumbel(0, 1)"""
noise = torch.rand(shape)
noise.add_(eps).log_().neg_()
noise.add_(eps).log_().neg_()
if self.gpu:
return Variable(noise).cuda()
else:
return Variable(noise)
def sample_gumbel_like(self, template_tensor, eps=1e-10):
uniform_samples_tensor = template_tensor.clone().uniform_()
gumble_samples_tensor = - torch.log(eps - torch.log(uniform_samples_tensor + eps))
return gumble_samples_tensor
def gumbel_softmax_sample(self, alpha, temperature, eps=1e-10):
""" Draw a sample from the Gumbel-Softmax distribution"""
dim = len(alpha.size()) - 1
gumble_samples_tensor = self.sample_gumbel_like(alpha.data)
gumble_trick_log_prob_samples = alpha + gumble_samples_tensor
gumble_log_temp = gumble_trick_log_prob_samples / temperature
max_gumble, _ = gumble_log_temp.max(1)
soft_samples_gumble = F.softmax(gumble_log_temp - max_gumble.unsqueeze(1), dim)
soft_samples_gumble = torch.max(soft_samples_gumble, torch.ones_like(soft_samples_gumble).to(alpha) * eps)
soft_samples = F.softmax(alpha, dim)
return soft_samples_gumble, soft_samples
def gumbel_softmax(self, logits, temperature, hard=False):
"""Sample from the Gumbel-Softmax distribution and optionally discretize.
Args:
logits: [batch_size, n_class] unnormalized log-probs
temperature: non-negative scalar
hard: if True, take argmax, but differentiate w.r.t. soft sample y
Returns:
[batch_size, n_class] sample from the Gumbel-Softmax distribution.
If hard=True, then the returned sample will be one-hot, otherwise it will
be a probabilitiy distribution that sums to 1 across classes
"""
soft_samples_gumble, soft_samples = self.gumbel_softmax_sample(logits, temperature)
if hard:
_, max_value_indexes = soft_samples_gumble.data.max(1, keepdim=True)
y_hard = logits.data.clone().zero_().scatter_(1, max_value_indexes, 1)
y = y_hard - soft_samples_gumble.data + soft_samples_gumble
else:
y = soft_samples_gumble
return y, soft_samples_gumble, soft_samples
def forward(self, alpha, temp=None, force_hard=False):
if not temp:
if self.temp:
temp = self.temp
else:
temp = 1
if self.training and not force_hard:
return self.gumbel_softmax(alpha, temperature=temp, hard=False)
else:
return self.gumbel_softmax(alpha, temperature=temp, hard=True)
if __name__ == "__main__":
print("Test Encoder")
print(MotionEncoder())
print("Test Decoder")
print(RoutingModule())
print("Test AttentionRoutingModule")
print(AttentionRoutingModule())
print("Test AttentionGlobal")
print(AttentionGlobal())
print("Test GoalGlobal")
print(GoalGlobal() )
print("Test Encoder Discriminator")
print(EncoderPrediction())
|
[
"torch.nn.Dropout",
"torch.empty",
"torch.cat",
"torch.randn",
"torch.rand_like",
"numpy.arange",
"torch.arange",
"torch.nn.init.kaiming_normal_",
"torch.nn.init.xavier_uniform",
"torch.Tensor",
"torch.nn.Linear",
"torch.zeros",
"torch.nn.LSTM",
"torch.log",
"torch.nn.Dropout2d",
"torch.nn.Tanh",
"torch.nn.Conv2d",
"torch.nn.BatchNorm1d",
"torch.nn.BatchNorm2d",
"torch.rand",
"torch.nn.MaxPool2d",
"torch.nn.LeakyReLU",
"torch.nn.Sigmoid",
"torch.ones_like",
"torch.nn.ReLU",
"torch.stack",
"torch.nn.ConvTranspose2d",
"torch.nn.Sequential",
"torch.nn.functional.softmax",
"numpy.array"
] |
[((1379, 1401), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (1392, 1401), True, 'import torch.nn as nn\n'), ((1850, 1865), 'torch.nn.Sequential', 'nn.Sequential', ([], {}), '()\n', (1863, 1865), True, 'import torch.nn as nn\n'), ((3321, 3336), 'torch.nn.Sequential', 'nn.Sequential', ([], {}), '()\n', (3334, 3336), True, 'import torch.nn as nn\n'), ((5461, 5476), 'torch.nn.Sequential', 'nn.Sequential', ([], {}), '()\n', (5474, 5476), True, 'import torch.nn as nn\n'), ((6126, 6154), 'numpy.arange', 'np.arange', (['(2)', '(num_layers + 1)'], {}), '(2, num_layers + 1)\n', (6135, 6154), True, 'import numpy as np\n'), ((9693, 9707), 'torch.empty', 'torch.empty', (['(1)'], {}), '(1)\n', (9704, 9707), False, 'import torch\n'), ((9716, 9730), 'torch.empty', 'torch.empty', (['(1)'], {}), '(1)\n', (9727, 9730), False, 'import torch\n'), ((16600, 16614), 'torch.Tensor', 'torch.Tensor', ([], {}), '()\n', (16612, 16614), False, 'import torch\n'), ((17771, 17785), 'torch.Tensor', 'torch.Tensor', ([], {}), '()\n', (17783, 17785), False, 'import torch\n'), ((19230, 19244), 'torch.empty', 'torch.empty', (['(1)'], {}), '(1)\n', (19241, 19244), False, 'import torch\n'), ((23135, 23180), 'torch.nn.Linear', 'nn.Linear', (['self.input_dim', 'self.embedding_dim'], {}), '(self.input_dim, self.embedding_dim)\n', (23144, 23180), True, 'import torch.nn as nn\n'), ((23389, 23440), 'torch.nn.LSTM', 'nn.LSTM', (['self.input_dim_decoder', 'self.decoder_h_dim'], {}), '(self.input_dim_decoder, self.decoder_h_dim)\n', (23396, 23440), True, 'import torch.nn as nn\n'), ((25747, 25785), 'torch.stack', 'torch.stack', (['pred_traj_fake_rel'], {'dim': '(0)'}), '(pred_traj_fake_rel, dim=0)\n', (25758, 25785), False, 'import torch\n'), ((25811, 25845), 'torch.stack', 'torch.stack', (['pred_traj_fake'], {'dim': '(0)'}), '(pred_traj_fake, dim=0)\n', (25822, 25845), False, 'import torch\n'), ((27024, 27112), 'torch.nn.Conv2d', 'nn.Conv2d', (['(channels_cnn * 2 ** (num_layers_cnn - 2))', '(1)'], {'kernel_size': '(1, 1)', 'stride': '(1)'}), '(channels_cnn * 2 ** (num_layers_cnn - 2), 1, kernel_size=(1, 1),\n stride=1)\n', (27033, 27112), True, 'import torch.nn as nn\n'), ((27134, 27148), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {}), '()\n', (27146, 27148), True, 'import torch.nn as nn\n'), ((27376, 27442), 'torch.nn.LSTM', 'nn.LSTM', (['self.embedding_dim', 'self.encoder_h_dim_d'], {'dropout': 'dropout'}), '(self.embedding_dim, self.encoder_h_dim_d, dropout=dropout)\n', (27383, 27442), True, 'import torch.nn as nn\n'), ((28181, 28213), 'torch.nn.Linear', 'nn.Linear', (['(2)', 'self.embedding_dim'], {}), '(2, self.embedding_dim)\n', (28190, 28213), True, 'import torch.nn as nn\n'), ((29977, 30011), 'torch.arange', 'torch.arange', (['(0)', '(grid_size * 2 + 1)'], {}), '(0, grid_size * 2 + 1)\n', (29989, 30011), False, 'import torch\n'), ((33247, 33268), 'torch.cat', 'torch.cat', (['image_list'], {}), '(image_list)\n', (33256, 33268), False, 'import torch\n'), ((33884, 33901), 'torch.rand', 'torch.rand', (['shape'], {}), '(shape)\n', (33894, 33901), False, 'import torch\n'), ((35002, 35023), 'torch.nn.functional.softmax', 'F.softmax', (['alpha', 'dim'], {}), '(alpha, dim)\n', (35011, 35023), True, 'import torch.nn.functional as F\n'), ((842, 868), 'torch.nn.Linear', 'nn.Linear', (['dim_in', 'dim_out'], {}), '(dim_in, dim_out)\n', (851, 868), True, 'import torch.nn as nn\n'), ((1906, 1957), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_dim', 'output_dim', 'filter_size', '(1)', '(1)'], {}), '(input_dim, output_dim, filter_size, 1, 1)\n', (1915, 1957), True, 'import torch.nn as nn\n'), ((2477, 2563), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2, 2)', 'stride': '(2, 2)', 'dilation': '(1, 1)', 'ceil_mode': '(False)'}), '(kernel_size=(2, 2), stride=(2, 2), dilation=(1, 1), ceil_mode=\n False)\n', (2489, 2563), True, 'import torch.nn as nn\n'), ((3679, 3740), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['input_dim', 'output_dim', 'filter', '(2)', 'padding'], {}), '(input_dim, output_dim, filter, 2, padding)\n', (3697, 3740), True, 'import torch.nn as nn\n'), ((7040, 7055), 'torch.nn.Sequential', 'nn.Sequential', ([], {}), '()\n', (7053, 7055), True, 'import torch.nn as nn\n'), ((11713, 11748), 'torch.nn.Linear', 'nn.Linear', (['input_dim', 'embedding_dim'], {}), '(input_dim, embedding_dim)\n', (11722, 11748), True, 'import torch.nn as nn\n'), ((11776, 11813), 'torch.nn.LSTM', 'nn.LSTM', (['embedding_dim', 'encoder_h_dim'], {}), '(embedding_dim, encoder_h_dim)\n', (11783, 11813), True, 'import torch.nn as nn\n'), ((11855, 11888), 'torch.nn.LSTM', 'nn.LSTM', (['input_dim', 'encoder_h_dim'], {}), '(input_dim, encoder_h_dim)\n', (11862, 11888), True, 'import torch.nn as nn\n'), ((15813, 15867), 'torch.randn', 'torch.randn', (['(1, batch_size, self.noise_attention_dim)'], {}), '((1, batch_size, self.noise_attention_dim))\n', (15824, 15867), False, 'import torch\n'), ((25222, 25263), 'torch.cat', 'torch.cat', (['(state_tuple[0], final_emb)', '(2)'], {}), '((state_tuple[0], final_emb), 2)\n', (25231, 25263), False, 'import torch\n'), ((29511, 29557), 'torch.cat', 'torch.cat', (['(embedded_pos, visual_features)', '(-1)'], {}), '((embedded_pos, visual_features), -1)\n', (29520, 29557), False, 'import torch\n'), ((30392, 30419), 'torch.rand_like', 'torch.rand_like', (['gumbel_map'], {}), '(gumbel_map)\n', (30407, 30419), False, 'import torch\n'), ((32925, 32992), 'torch.zeros', 'torch.zeros', (['(1, self.grid_size * 2 + 1, self.grid_size * 2 + 1, 1)'], {}), '((1, self.grid_size * 2 + 1, self.grid_size * 2 + 1, 1))\n', (32936, 32992), False, 'import torch\n'), ((919, 942), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['dim_out'], {}), '(dim_out)\n', (933, 942), True, 'import torch.nn as nn\n'), ((1003, 1012), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1010, 1012), True, 'import torch.nn as nn\n'), ((1326, 1347), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': 'dropout'}), '(p=dropout)\n', (1336, 1347), True, 'import torch.nn as nn\n'), ((2024, 2050), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['output_dim'], {}), '(output_dim)\n', (2038, 2050), True, 'import torch.nn as nn\n'), ((2128, 2137), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (2135, 2137), True, 'import torch.nn as nn\n'), ((2626, 2647), 'torch.nn.Dropout2d', 'nn.Dropout2d', (['dropout'], {}), '(dropout)\n', (2638, 2647), True, 'import torch.nn as nn\n'), ((8975, 9029), 'torch.nn.init.kaiming_normal_', 'torch.nn.init.kaiming_normal_', (['m.weight'], {'mode': '"""fan_in"""'}), "(m.weight, mode='fan_in')\n", (9004, 9029), False, 'import torch\n'), ((9329, 9367), 'torch.nn.init.xavier_uniform', 'torch.nn.init.xavier_uniform', (['m.weight'], {}), '(m.weight)\n', (9357, 9367), False, 'import torch\n'), ((10438, 10474), 'torch.cat', 'torch.cat', (['(traj_enc, in_decoder)', '(1)'], {}), '((traj_enc, in_decoder), 1)\n', (10447, 10474), False, 'import torch\n'), ((10659, 10697), 'torch.cat', 'torch.cat', (['(social_enc, in_decoder)', '(1)'], {}), '((social_enc, in_decoder), 1)\n', (10668, 10697), False, 'import torch\n'), ((15924, 15977), 'torch.rand', 'torch.rand', (['(1, batch_size, self.noise_attention_dim)'], {}), '((1, batch_size, self.noise_attention_dim))\n', (15934, 15977), False, 'import torch\n'), ((24782, 24844), 'torch.cat', 'torch.cat', (['(decoder_input, distance_embeding, time_tensor)', '(-1)'], {}), '((decoder_input, distance_embeding, time_tensor), -1)\n', (24791, 24844), False, 'import torch\n'), ((30214, 30236), 'torch.cat', 'torch.cat', (['(x2, x1)', '(2)'], {}), '((x2, x1), 2)\n', (30223, 30236), False, 'import torch\n'), ((1075, 1084), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (1082, 1084), True, 'import torch.nn as nn\n'), ((2217, 2226), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (2224, 2226), True, 'import torch.nn as nn\n'), ((3843, 3869), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['output_dim'], {}), '(output_dim)\n', (3857, 3869), True, 'import torch.nn as nn\n'), ((3956, 3965), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (3963, 3965), True, 'import torch.nn as nn\n'), ((4250, 4271), 'torch.nn.Dropout2d', 'nn.Dropout2d', (['dropout'], {}), '(dropout)\n', (4262, 4271), True, 'import torch.nn as nn\n'), ((11964, 12005), 'torch.zeros', 'torch.zeros', (['(1)', 'batch', 'self.encoder_h_dim'], {}), '(1, batch, self.encoder_h_dim)\n', (11975, 12005), False, 'import torch\n'), ((12032, 12073), 'torch.zeros', 'torch.zeros', (['(1)', 'batch', 'self.encoder_h_dim'], {}), '(1, batch, self.encoder_h_dim)\n', (12043, 12073), False, 'import torch\n'), ((26069, 26103), 'torch.stack', 'torch.stack', (['img_patch_list'], {'dim': '(0)'}), '(img_patch_list, dim=0)\n', (26080, 26103), False, 'import torch\n'), ((28486, 28529), 'torch.zeros', 'torch.zeros', (['(1)', 'batch', 'self.encoder_h_dim_d'], {}), '(1, batch, self.encoder_h_dim_d)\n', (28497, 28529), False, 'import torch\n'), ((28560, 28603), 'torch.zeros', 'torch.zeros', (['(1)', 'batch', 'self.encoder_h_dim_d'], {}), '(1, batch, self.encoder_h_dim_d)\n', (28571, 28603), False, 'import torch\n'), ((34271, 34310), 'torch.log', 'torch.log', (['(uniform_samples_tensor + eps)'], {}), '(uniform_samples_tensor + eps)\n', (34280, 34310), False, 'import torch\n'), ((1152, 1166), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {}), '()\n', (1164, 1166), True, 'import torch.nn as nn\n'), ((2311, 2325), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {}), '()\n', (2323, 2325), True, 'import torch.nn as nn\n'), ((4054, 4063), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (4061, 4063), True, 'import torch.nn as nn\n'), ((25128, 25144), 'torch.Tensor', 'torch.Tensor', (['[]'], {}), '([])\n', (25140, 25144), False, 'import torch\n'), ((34925, 34961), 'torch.ones_like', 'torch.ones_like', (['soft_samples_gumble'], {}), '(soft_samples_gumble)\n', (34940, 34961), False, 'import torch\n'), ((1232, 1244), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (1242, 1244), True, 'import torch.nn as nn\n'), ((4157, 4171), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {}), '()\n', (4169, 4171), True, 'import torch.nn as nn\n'), ((32862, 32883), 'numpy.array', 'np.array', (['cropped_img'], {}), '(cropped_img)\n', (32870, 32883), True, 'import numpy as np\n')]
|
import pytest
from pps import logic
from pps import config
import flexmock
import os
@pytest.mark.parametrize(
['a', 'b', 'f'],
[(420, 595, "A5"),
(595, 420, "A5"),
(595, 842, "A4"),
(842, 595, "A4"),
(842, 1191, "A3"),
(200, 200, config.PPS_CONFIG.UNKNOWN_PAPER_FORMAT)]
)
def test_get_format_from_size(a, b, f):
out = logic.get_format_from_size(a, b)
assert out == f
@pytest.mark.parametrize(
['file', 'output_to_test'],
[("not_existing_", config.PPS_CONFIG.UNKNOWN_PRINT_JOB_NAME),
("A4.test", 'Document')])
def test_get_job_name(file, output_to_test):
fake = flexmock(critical=lambda message: None)
path = os.path.dirname(os.path.abspath(__file__)) + "/fixtures/"
full_path_file = path + file
out = logic.get_print_job_name(full_path_file, fake)
assert out == output_to_test
def test_get_print_job_name():
fake = flexmock(critical=lambda message: None)
file = 'jj.log'
out = logic.get_print_job_name(file, fake)
assert out == config.PPS_CONFIG.UNKNOWN_PRINT_JOB_NAME
def test_get_file_format():
fake = flexmock(critical=lambda message: None)
out = logic.get_file_format("aa", "aa", fake)
assert out == config.PPS_CONFIG.UNKNOWN_PAPER_FORMAT
@pytest.mark.parametrize(
['file', 'format_to_test'],
[("A4.test", "A4")]
)
def test_get_file_format(file, format_to_test):
path = os.path.dirname(os.path.abspath(__file__)) + "/fixtures/"
fake = flexmock(critical=lambda message: None, warning=lambda message: None)
out = logic.get_file_format(path, file, fake)
assert out == format_to_test
@pytest.mark.parametrize(
['file', 'page_count_to_test'],
[("A4_4.test", "4"),
("A4.test", "1"),
("Not_existing_file", config.PPS_CONFIG.UNKNOWN_PAGE_COUNT)]
)
def test_get_number_of_pages(file, page_count_to_test):
path = os.path.dirname(os.path.abspath(__file__)) + "/fixtures/"
full_path = path + file
fake = flexmock(critical=lambda message: None)
out = logic.get_number_of_pages(full_path, fake)
assert out == page_count_to_test
@pytest.mark.parametrize(
['file', 'print_job_to_test'],
[("Not_existing_file", config.PPS_CONFIG.UNKNOWN_PRINT_JOB_ID)]
)
def test_get_print_job_id(file, print_job_to_test):
path = os.path.dirname(os.path.abspath(__file__)) + "/fixtures/"
full_path = path + file
fake = flexmock(critical=lambda message: None)
out = logic.get_print_job_id(full_path, fake)
assert out == print_job_to_test
|
[
"pps.logic.get_format_from_size",
"os.path.abspath",
"pps.logic.get_print_job_name",
"pps.logic.get_file_format",
"flexmock",
"pps.logic.get_number_of_pages",
"pps.logic.get_print_job_id",
"pytest.mark.parametrize"
] |
[((88, 282), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["['a', 'b', 'f']", "[(420, 595, 'A5'), (595, 420, 'A5'), (595, 842, 'A4'), (842, 595, 'A4'), (\n 842, 1191, 'A3'), (200, 200, config.PPS_CONFIG.UNKNOWN_PAPER_FORMAT)]"], {}), "(['a', 'b', 'f'], [(420, 595, 'A5'), (595, 420, 'A5'\n ), (595, 842, 'A4'), (842, 595, 'A4'), (842, 1191, 'A3'), (200, 200,\n config.PPS_CONFIG.UNKNOWN_PAPER_FORMAT)])\n", (111, 282), False, 'import pytest\n'), ((414, 557), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["['file', 'output_to_test']", "[('not_existing_', config.PPS_CONFIG.UNKNOWN_PRINT_JOB_NAME), ('A4.test',\n 'Document')]"], {}), "(['file', 'output_to_test'], [('not_existing_',\n config.PPS_CONFIG.UNKNOWN_PRINT_JOB_NAME), ('A4.test', 'Document')])\n", (437, 557), False, 'import pytest\n'), ((1259, 1331), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["['file', 'format_to_test']", "[('A4.test', 'A4')]"], {}), "(['file', 'format_to_test'], [('A4.test', 'A4')])\n", (1282, 1331), False, 'import pytest\n'), ((1626, 1791), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["['file', 'page_count_to_test']", "[('A4_4.test', '4'), ('A4.test', '1'), ('Not_existing_file', config.\n PPS_CONFIG.UNKNOWN_PAGE_COUNT)]"], {}), "(['file', 'page_count_to_test'], [('A4_4.test', '4'),\n ('A4.test', '1'), ('Not_existing_file', config.PPS_CONFIG.\n UNKNOWN_PAGE_COUNT)])\n", (1649, 1791), False, 'import pytest\n'), ((2100, 2224), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["['file', 'print_job_to_test']", "[('Not_existing_file', config.PPS_CONFIG.UNKNOWN_PRINT_JOB_ID)]"], {}), "(['file', 'print_job_to_test'], [(\n 'Not_existing_file', config.PPS_CONFIG.UNKNOWN_PRINT_JOB_ID)])\n", (2123, 2224), False, 'import pytest\n'), ((359, 391), 'pps.logic.get_format_from_size', 'logic.get_format_from_size', (['a', 'b'], {}), '(a, b)\n', (385, 391), False, 'from pps import logic\n'), ((626, 665), 'flexmock', 'flexmock', ([], {'critical': '(lambda message: None)'}), '(critical=lambda message: None)\n', (634, 665), False, 'import flexmock\n'), ((778, 824), 'pps.logic.get_print_job_name', 'logic.get_print_job_name', (['full_path_file', 'fake'], {}), '(full_path_file, fake)\n', (802, 824), False, 'from pps import logic\n'), ((902, 941), 'flexmock', 'flexmock', ([], {'critical': '(lambda message: None)'}), '(critical=lambda message: None)\n', (910, 941), False, 'import flexmock\n'), ((972, 1008), 'pps.logic.get_print_job_name', 'logic.get_print_job_name', (['file', 'fake'], {}), '(file, fake)\n', (996, 1008), False, 'from pps import logic\n'), ((1109, 1148), 'flexmock', 'flexmock', ([], {'critical': '(lambda message: None)'}), '(critical=lambda message: None)\n', (1117, 1148), False, 'import flexmock\n'), ((1159, 1198), 'pps.logic.get_file_format', 'logic.get_file_format', (['"""aa"""', '"""aa"""', 'fake'], {}), "('aa', 'aa', fake)\n", (1180, 1198), False, 'from pps import logic\n'), ((1470, 1539), 'flexmock', 'flexmock', ([], {'critical': '(lambda message: None)', 'warning': '(lambda message: None)'}), '(critical=lambda message: None, warning=lambda message: None)\n', (1478, 1539), False, 'import flexmock\n'), ((1550, 1589), 'pps.logic.get_file_format', 'logic.get_file_format', (['path', 'file', 'fake'], {}), '(path, file, fake)\n', (1571, 1589), False, 'from pps import logic\n'), ((1967, 2006), 'flexmock', 'flexmock', ([], {'critical': '(lambda message: None)'}), '(critical=lambda message: None)\n', (1975, 2006), False, 'import flexmock\n'), ((2017, 2059), 'pps.logic.get_number_of_pages', 'logic.get_number_of_pages', (['full_path', 'fake'], {}), '(full_path, fake)\n', (2042, 2059), False, 'from pps import logic\n'), ((2390, 2429), 'flexmock', 'flexmock', ([], {'critical': '(lambda message: None)'}), '(critical=lambda message: None)\n', (2398, 2429), False, 'import flexmock\n'), ((2440, 2479), 'pps.logic.get_print_job_id', 'logic.get_print_job_id', (['full_path', 'fake'], {}), '(full_path, fake)\n', (2462, 2479), False, 'from pps import logic\n'), ((693, 718), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (708, 718), False, 'import os\n'), ((1417, 1442), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1432, 1442), False, 'import os\n'), ((1886, 1911), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1901, 1911), False, 'import os\n'), ((2309, 2334), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (2324, 2334), False, 'import os\n')]
|
"""GENIE SP/BPC cBioPortal exporter CLI"""
import argparse
import synapseclient
from .bpc_config import Brca, Crc, Nsclc
from .sp_config import Akt1, Erbb2, Fgfr4
BPC_MAPPING = {"NSCLC": Nsclc,
'CRC': Crc,
'BrCa': Brca,
'AKT1': Akt1,
'ERRB2': Erbb2,
'FGFR4': Fgfr4}
def main():
"""Main"""
parser = argparse.ArgumentParser(
description='Run GENIE sponsored projects'
)
parser.add_argument("sp", type=str,
help='Specify sponsored project to run',
choices=BPC_MAPPING.keys())
parser.add_argument(
"cBioPath", type=str,
help='Specify path to cbio: must do '
'`git clone https://github.com/cBioPortal/cbioportal.git`'
)
parser.add_argument("release", type=str,
help='Specify bpc release')
parser.add_argument("--staging", action='store_true',
help="If true, files aren't uploaded onto synapse")
args = parser.parse_args()
syn = synapseclient.login()
BPC_MAPPING[args.sp](syn, args.cBioPath, release=args.release,
staging=args.staging).run()
if __name__ == '__main__':
main()
|
[
"synapseclient.login",
"argparse.ArgumentParser"
] |
[((386, 453), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run GENIE sponsored projects"""'}), "(description='Run GENIE sponsored projects')\n", (409, 453), False, 'import argparse\n'), ((1076, 1097), 'synapseclient.login', 'synapseclient.login', ([], {}), '()\n', (1095, 1097), False, 'import synapseclient\n')]
|
"""Core Classes for preprocessing"""
from typing import Callable, Iterable
import logging
import tensorflow as tf
from deepr.utils.field import Field
from deepr.prepros import base
from deepr.layers import Layer
LOGGER = logging.getLogger(__name__)
class Map(base.Prepro):
"""Map a function on each element of a tf.data.Dataset.
A :class:`~Map` instance applies a ``map_func`` to all elements of a
dataset. By default, elements are expected to be dictionaries. You
can set ``on_dict=False`` if your dataset does not yield
dictionaries.
If elements are dictionaries, you can use the additional argument
``update`` to choose to update dictionaries instead of overriding
them.
NOTE: If ``map_func`` is a :class:`~deepr.layers.Layer`, it directly uses ``forward``
or ``forward_as_dict`` to avoid inspection overhead from the
``Layer.__call__`` method.
WARNING: if ``map_func`` is a ``Layer``, the ``mode`` will not be
forwarded by the ``Map.apply()`` call, and the default ``None`` will
always be used. This is intended to keep the signature of the
generic ``map_func`` in line with the ``tf.Dataset.map`` method.
If you wish to use a :class:`~deepr.layers.Layer` with a given ``mode``, you can do
>>> from functools import partial
>>> from deepr import readers
>>> from deepr.layers import Sum
>>> from deepr.prepros import Map
>>> layer = Sum()
>>> prepro_fn = Map(partial(layer.forward_as_dict, mode=tf.estimator.ModeKeys.TRAIN))
For example, by setting `update=True` (DEFAULT behavior)
>>> def gen():
... yield {"a": 0}
>>> dataset = tf.data.Dataset.from_generator(gen, {"a": tf.int32}, {"a": tf.TensorShape([])})
>>> list(readers.from_dataset(dataset))
[{'a': 0}]
>>> def map_func(x):
... return {"b": x["a"] + 1}
>>> prepro_fn = Map(map_func, update=True)
>>> list(readers.from_dataset(prepro_fn(dataset)))
[{'a': 0, 'b': 1}]
On the other hand, ``update=False`` yields the output of the
``map_func``
>>> prepro_fn = Map(map_func, update=False)
>>> list(readers.from_dataset(prepro_fn(dataset)))
[{'b': 1}]
Because some preprocessing pipelines behave differently depending
on the mode (TRAIN, EVAL, PREDICT), an optional argument can be
provided. By setting modes, you select the modes on which the map
transformation should apply. For example:
>>> prepro_fn = Map(map_func, modes=[tf.estimator.ModeKeys.TRAIN])
>>> list(readers.from_dataset(prepro_fn(dataset, tf.estimator.ModeKeys.TRAIN)))
[{'a': 0, 'b': 1}]
>>> list(readers.from_dataset(prepro_fn(dataset, tf.estimator.ModeKeys.PREDICT)))
[{'a': 0}]
If the mode is not given at runtime, the preprocessing is applied.
>>> list(readers.from_dataset(prepro_fn(dataset)))
[{'a': 0, 'b': 1}]
Attributes
----------
map_func : Callable[[Any], Any]
Function to map to each element
modes : Iterable[str], Optional
Active modes for the map (will skip modes not in modes).
Default is None (all modes are considered active modes).
num_parallel_calls : int
Number of threads.
on_dict : bool
If True (default), assumes dataset yields dictionaries
update : bool
If True (default), combine element and map_func(element)
"""
def __init__(
self,
map_func: Callable,
on_dict: bool = True,
update: bool = True,
modes: Iterable[str] = None,
num_parallel_calls: int = None,
):
super().__init__()
self.map_func = map_func
self.on_dict = on_dict
self.update = update
self.modes = modes
self.num_parallel_calls = num_parallel_calls
if self.update and not self.on_dict:
raise ValueError("update=True but on_dict=False (incoherent)")
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.map_func})"
@property
def tf_map_func(self):
"""Return final map function."""
map_func = self.map_func
if isinstance(map_func, Layer):
map_func = map_func.forward_as_dict if self.on_dict else map_func.forward
if self.update:
return lambda x: {**x, **map_func(x)}
return map_func
def apply(self, dataset: tf.data.Dataset, mode: str = None):
if mode is not None and self.modes is not None and mode not in self.modes:
LOGGER.info(f"Not applying {self} (mode={mode})")
return dataset
return dataset.map(self.tf_map_func, num_parallel_calls=self.num_parallel_calls)
class Filter(base.Prepro):
"""Filter a dataset keeping only elements on which predicate is True
A :class:`~Filter` instance applies a ``predicate`` to all elements of a
dataset and keeps only element for which predicate returns True.
By default, elements are expected to be dictionaries. You can set
``on_dict=False`` if your dataset does not yield dictionaries.
Because some preprocessing pipelines behave differently depending
on the mode (TRAIN, EVAL, PREDICT), an optional argument can be
provided. By setting modes, you select the modes on which the map
transformation should apply. For example:
>>> from deepr import readers
>>> from deepr.prepros import Filter
>>> def gen():
... yield {"a": 0}
... yield {"a": 1}
>>> raw_dataset = tf.data.Dataset.from_generator(gen, {"a": tf.int32}, {"a": tf.TensorShape([])})
>>> list(readers.from_dataset(raw_dataset))
[{'a': 0}, {'a': 1}]
>>> def predicate(x):
... return {"b": tf.equal(x["a"], 0)}
>>> prepro_fn = Filter(predicate, modes=[tf.estimator.ModeKeys.TRAIN])
>>> raw_dataset = tf.data.Dataset.from_generator(gen, {"a": tf.int32}, {"a": tf.TensorShape([])})
>>> dataset = prepro_fn(raw_dataset, tf.estimator.ModeKeys.TRAIN)
>>> list(readers.from_dataset(dataset))
[{'a': 0}]
>>> dataset = prepro_fn(raw_dataset, tf.estimator.ModeKeys.PREDICT)
>>> list(readers.from_dataset(dataset))
[{'a': 0}, {'a': 1}]
If the mode is not given at runtime, the preprocessing is applied.
>>> dataset = prepro_fn(raw_dataset)
>>> list(readers.from_dataset(dataset))
[{'a': 0}]
Attributes
----------
predicate : Callable
Predicate function, returns either a tf.bool or a dictionary
with one key.
on_dict : bool, Optional
If True (default), assumes dataset yields dictionaries
modes : Iterable[str], Optional
Active modes for the map (will skip modes not in modes).
Default is None (all modes are considered active modes).
"""
def __init__(self, predicate: Callable, on_dict: bool = True, modes: Iterable[str] = None):
super().__init__()
self.predicate = predicate
self.on_dict = on_dict
self.modes = modes
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.predicate})"
@property
def tf_predicate(self):
"""Return final predicate function."""
predicate = self.predicate
if isinstance(predicate, Layer):
if predicate.n_out != 1:
msg = f"{predicate} has n_out = {predicate.n_out} (unable to retrieve predicate from layer outputs)"
raise ValueError(msg)
return lambda x: predicate.forward_as_dict(x)[predicate.outputs] if self.on_dict else predicate.forward
if self.on_dict:
return lambda x: list(predicate(x).values())[0]
return predicate
def apply(self, dataset: tf.data.Dataset, mode: str = None):
if mode is not None and self.modes is not None and mode not in self.modes:
LOGGER.info(f"Not applying {self} (mode={mode})")
return dataset
return dataset.filter(self.tf_predicate)
class Shuffle(base.Prepro):
"""Randomly shuffles the elements of a dataset.
Attributes
----------
buffer_size : int
Buffer size for the shuffle buffer
modes : Iterable[str], Optional
Active modes for the map (will skip modes not in modes).
Default is None (all modes are considered active modes).
"""
def __init__(
self,
buffer_size: int,
modes: Iterable[str] = None,
seed: tf.int64 = None,
reshuffle_each_iteration: bool = None,
):
super().__init__()
self.buffer_size = buffer_size
self.modes = modes
self.seed = seed
self.reshuffle_each_iteration = reshuffle_each_iteration
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.buffer_size})"
def apply(self, dataset: tf.data.Dataset, mode: str = None):
if mode is not None and self.modes is not None and mode not in self.modes:
LOGGER.info(f"Not applying {self} (mode={mode})")
return dataset
return dataset.shuffle(self.buffer_size, seed=self.seed, reshuffle_each_iteration=self.reshuffle_each_iteration)
class Repeat(base.Prepro):
"""Repeats a dataset so each original value is seen count times.
Attributes
----------
count : int
Number of dataset repeat, if None or -1, repeat forever.
modes : Iterable[str], Optional
Active modes for the map (will skip modes not in modes).
Default is None (all modes are considered active modes).
"""
def __init__(self, count: int = None, modes: Iterable[str] = None):
super().__init__()
self.count = count
self.modes = modes
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.count})"
def apply(self, dataset: tf.data.Dataset, mode: str = None):
if mode is not None and self.modes is not None and mode not in self.modes:
LOGGER.info(f"Not applying {self} (mode={mode})")
return dataset
return dataset.repeat(self.count)
class PaddedBatch(base.Prepro):
"""Combines consecutive elements of a dataset into padded batches.
NOTE: this applies on dataset yielding dictionaries ONLY.
If you want to create padded batches from other structures, you
need to create your own padded batch prepro wrapping the tensorflow
implementation. For example::
@deepr.prepros.prepro
def PaddedBatchDefault(dataset, batch_size, padded_shapes, padding_values):
return dataset.padded_batch(bath_size, padded_shapes, padding_values)
Attributes
----------
batch_size : int
Size of batches
fields : Iterable[Field]
Field information for each key of yielded dictionaries
modes : Iterable[str], Optional
Active modes for the map (will skip modes not in modes).
Default is None (all modes are considered active modes).
"""
def __init__(self, batch_size: int, fields: Iterable[Field], drop_remainder: bool = False):
super().__init__()
self.batch_size = batch_size
self.fields = fields
self.drop_remainder = drop_remainder
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.batch_size})"
def apply(self, dataset: tf.data.Dataset, mode: str = None):
# pylint: disable=unused-argument
padded_shapes = {field.name: field.shape for field in self.fields}
padding_values = {field.name: tf.constant(field.default, field.dtype) for field in self.fields}
return dataset.padded_batch(
self.batch_size,
padded_shapes=padded_shapes,
padding_values=padding_values,
drop_remainder=self.drop_remainder,
)
class Batch(base.Prepro):
"""Combines consecutive elements of a dataset into batches.
Attributes
----------
count : int
Number of dataset repeat
modes : Iterable[str], Optional
Active modes for the map (will skip modes not in modes).
Default is None (all modes are considered active modes).
"""
def __init__(self, batch_size: int, drop_remainder: bool = False):
super().__init__()
self.batch_size = batch_size
self.drop_remainder = drop_remainder
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.batch_size})"
def apply(self, dataset: tf.data.Dataset, mode: str = None):
# pylint: disable=unused-argument
return dataset.batch(self.batch_size, drop_remainder=self.drop_remainder)
class Prefetch(base.Prepro):
"""Creates a dataset that prefetch element on CPU / GPU.
Attributes
----------
buffer_size : int
Number of element to prefetch.
High values may lead to high memory consumption, it is
recommended to use a buffer_size of 1.
"""
def __init__(self, buffer_size: int):
super().__init__()
self.buffer_size = buffer_size
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.buffer_size})"
def apply(self, dataset: tf.data.Dataset, mode: str = None):
# pylint: disable=unused-argument
return dataset.prefetch(self.buffer_size)
class Take(base.Prepro):
"""Creates a dataset with at most count elements.
Attributes
----------
count : int
Cap the number of elements of a dataset to this number. Using
None means no capping (will not apply the take transformation).
"""
def __init__(self, count: int = None):
super().__init__()
self.count = count
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.count})"
def apply(self, dataset: tf.data.Dataset, mode: str = None):
# pylint: disable=unused-argument
if self.count is None:
return dataset
else:
return dataset.take(self.count)
class Cache(base.Prepro):
"""Cache Dataset in memory, unless a file is provided.
You must iterate over the dataset completely to cache it (i.e. a
``tf.error.OutOfRangeError`` must be raised).
If caching to file, note that it consumes a lot of disk space (10x
to 100x compared to tfrecords), and reloading seems brittle.
Prefer writing preprocessed data to tfrecord instead.
Attributes
----------
filename: str
"""
def __init__(self, filename: str = None, modes: Iterable[str] = None):
super().__init__()
self.filename = filename
self.modes = modes
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.filename})"
def apply(self, dataset: tf.data.Dataset, mode: str = None):
# pylint: disable=unused-argument
if mode is not None and self.modes is not None and mode not in self.modes:
LOGGER.info(f"Not applying {self} (mode={mode})")
return dataset
if self.filename:
return dataset.cache(self.filename)
else:
return dataset.cache()
|
[
"tensorflow.constant",
"logging.getLogger"
] |
[((226, 253), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (243, 253), False, 'import logging\n'), ((11425, 11464), 'tensorflow.constant', 'tf.constant', (['field.default', 'field.dtype'], {}), '(field.default, field.dtype)\n', (11436, 11464), True, 'import tensorflow as tf\n')]
|
"""Tools for generating maps from a text search."""
import geopy as gp
import numpy as np
import matplotlib.pyplot as plt
import warnings
from .tile import howmany, bounds2raster, bounds2img, _sm2ll, _calculate_zoom
from .plotting import INTERPOLATION, ZOOM, add_attribution
from . import providers
from ._providers import TileProvider
# Set user ID for Nominatim
_val = np.random.randint(1000000)
_default_user_agent = f"contextily_user_{_val}"
class Place(object):
"""Geocode a place by name and get its map.
This allows you to search for a name (e.g., city, street, country) and
grab map and location data from the internet.
Parameters
----------
search : string
The location to be searched.
zoom : int or None
[Optional. Default: None]
The level of detail to include in the map. Higher levels mean more
tiles and thus longer download time. If None, the zoom level will be
automatically determined.
path : str or None
[Optional. Default: None]
Path to a raster file that will be created after getting the place map.
If None, no raster file will be downloaded.
zoom_adjust : int or None
[Optional. Default: None]
The amount to adjust a chosen zoom level if it is chosen automatically.
source : contextily.providers object or str
[Optional. Default: Stamen Terrain web tiles]
The tile source: web tile provider or path to local file. The web tile
provider can be in the form of a `contextily.providers` object or a
URL. The placeholders for the XYZ in the URL need to be `{x}`, `{y}`,
`{z}`, respectively. For local file paths, the file is read with
`rasterio` and all bands are loaded into the basemap.
IMPORTANT: tiles are assumed to be in the Spherical Mercator
projection (EPSG:3857), unless the `crs` keyword is specified.
url : str [DEPRECATED]
[Optional. Default: 'http://tile.stamen.com/terrain/{z}/{x}/{y}.png']
Source url for web tiles, or path to local file. If
local, the file is read with `rasterio` and all
bands are loaded into the basemap.
geocoder : geopy.geocoders
[Optional. Default: geopy.geocoders.Nominatim()] Geocoder method to process `search`
Attributes
----------
geocode : geopy object
The result of calling ``geopy.geocoders.Nominatim`` with ``search`` as input.
s : float
The southern bbox edge.
n : float
The northern bbox edge.
e : float
The eastern bbox edge.
w : float
The western bbox edge.
im : ndarray
The image corresponding to the map of ``search``.
bbox : list
The bounding box of the returned image, expressed in lon/lat, with the
following order: [minX, minY, maxX, maxY]
bbox_map : tuple
The bounding box of the returned image, expressed in Web Mercator, with the
following order: [minX, minY, maxX, maxY]
"""
def __init__(
self,
search,
zoom=None,
path=None,
zoom_adjust=None,
source=None,
url=None,
geocoder=gp.geocoders.Nominatim(user_agent=_default_user_agent),
):
self.path = path
if url is not None and source is None:
warnings.warn(
'The "url" option is deprecated. Please use the "source"'
" argument instead.",
FutureWarning,
stacklevel=2,
)
source = url
elif url is not None and source is not None:
warnings.warn(
'The "url" argument is deprecated. Please use the "source"'
' argument. Do not supply a "url" argument. It will be ignored.',
FutureWarning,
stacklevel=2,
)
if source is None:
source = providers.Stamen.Terrain
self.source = source
self.zoom_adjust = zoom_adjust
# Get geocoded values
resp = geocoder.geocode(search)
bbox = np.array([float(ii) for ii in resp.raw["boundingbox"]])
if "display_name" in resp.raw.keys():
place = resp.raw["display_name"]
elif "address" in resp.raw.keys():
place = resp.raw["address"]
else:
place = search
self.place = place
self.search = search
self.s, self.n, self.w, self.e = bbox
self.bbox = [self.w, self.s, self.e, self.n] # So bbox is standard
self.latitude = resp.latitude
self.longitude = resp.longitude
self.geocode = resp
# Get map params
self.zoom = (
_calculate_zoom(self.w, self.s, self.e, self.n) if zoom is None else zoom
)
self.zoom = int(self.zoom)
if self.zoom_adjust is not None:
self.zoom += zoom_adjust
self.n_tiles = howmany(self.w, self.s, self.e, self.n, self.zoom, verbose=False)
# Get the map
self._get_map()
def _get_map(self):
kwargs = {"ll": True}
if self.source is not None:
kwargs["source"] = self.source
try:
if isinstance(self.path, str):
im, bbox = bounds2raster(
self.w, self.s, self.e, self.n, self.path, zoom=self.zoom, **kwargs
)
else:
im, bbox = bounds2img(
self.w, self.s, self.e, self.n, self.zoom, **kwargs
)
except Exception as err:
raise ValueError(
"Could not retrieve map with parameters: {}, {}, {}, {}, zoom={}\n{}\nError: {}".format(
self.w, self.s, self.e, self.n, self.zoom, kwargs, err
)
)
self.im = im
self.bbox_map = bbox
return im, bbox
def plot(self, ax=None, zoom=ZOOM, interpolation=INTERPOLATION, attribution=None):
"""
Plot a `Place` object
...
Parameters
----------
ax : AxesSubplot
Matplotlib axis with `x_lim` and `y_lim` set in Web
Mercator (EPSG=3857). If not provided, a new
12x12 figure will be set and the name of the place
will be added as title
zoom : int/'auto'
[Optional. Default='auto'] Level of detail for the
basemap. If 'auto', if calculates it automatically.
Ignored if `source` is a local file.
interpolation : str
[Optional. Default='bilinear'] Interpolation
algorithm to be passed to `imshow`. See
`matplotlib.pyplot.imshow` for further details.
attribution : str
[Optional. Defaults to attribution specified by the source of the map tiles]
Text to be added at the bottom of the axis. This
defaults to the attribution of the provider specified
in `source` if available. Specify False to not
automatically add an attribution, or a string to pass
a custom attribution.
Returns
-------
ax : AxesSubplot
Matplotlib axis with `x_lim` and `y_lim` set in Web
Mercator (EPSG=3857) containing the basemap
Examples
--------
>>> lvl = ctx.Place('Liverpool')
>>> lvl.plot()
"""
im = self.im
bbox = self.bbox_map
title = None
axisoff = False
if ax is None:
fig, ax = plt.subplots(figsize=(12, 12))
title = self.place
axisoff = True
ax.imshow(im, extent=bbox, interpolation=interpolation)
ax.set(xlabel="X", ylabel="Y")
if isinstance(self.source, (dict, TileProvider)) and attribution is None:
attribution = self.source.get("attribution")
if attribution:
add_attribution(ax, attribution)
if title is not None:
ax.set(title=title)
if axisoff:
ax.set_axis_off()
return ax
def __repr__(self):
s = "Place : {} | n_tiles: {} | zoom : {} | im : {}".format(
self.place, self.n_tiles, self.zoom, self.im.shape[:2]
)
return s
def plot_map(
place, bbox=None, title=None, ax=None, axis_off=True, latlon=True, attribution=None
):
"""Plot a map of the given place.
Parameters
----------
place : instance of Place or ndarray
The map to plot. If an ndarray, this must be an image corresponding
to a map. If an instance of ``Place``, the extent of the image and name
will be inferred from the bounding box.
ax : instance of matplotlib Axes object or None
The axis on which to plot. If None, one will be created.
axis_off : bool
Whether to turn off the axis border and ticks before plotting.
attribution : str
[Optional. Default to standard `ATTRIBUTION`] Text to be added at the
bottom of the axis.
Returns
-------
ax : instance of matplotlib Axes object or None
The axis on the map is plotted.
"""
warnings.warn(
(
"The method `plot_map` is deprecated and will be removed from the"
" library in future versions. Please use either `add_basemap` or"
" the internal method `Place.plot`"
),
DeprecationWarning,
)
if not isinstance(place, Place):
im = place
bbox = bbox
title = title
else:
im = place.im
if bbox is None:
bbox = place.bbox_map
if latlon is True:
# Convert w, s, e, n into lon/lat
w, e, s, n = bbox
w, s = _sm2ll(w, s)
e, n = _sm2ll(e, n)
bbox = [w, e, s, n]
title = place.place if title is None else title
if ax is None:
fig, ax = plt.subplots(figsize=(15, 15))
ax.imshow(im, extent=bbox)
ax.set(xlabel="X", ylabel="Y")
if title is not None:
ax.set(title=title)
if attribution:
add_attribution(ax, attribution)
if axis_off is True:
ax.set_axis_off()
return ax
|
[
"warnings.warn",
"numpy.random.randint",
"matplotlib.pyplot.subplots",
"geopy.geocoders.Nominatim"
] |
[((373, 399), 'numpy.random.randint', 'np.random.randint', (['(1000000)'], {}), '(1000000)\n', (390, 399), True, 'import numpy as np\n'), ((9145, 9352), 'warnings.warn', 'warnings.warn', (['"""The method `plot_map` is deprecated and will be removed from the library in future versions. Please use either `add_basemap` or the internal method `Place.plot`"""', 'DeprecationWarning'], {}), "(\n 'The method `plot_map` is deprecated and will be removed from the library in future versions. Please use either `add_basemap` or the internal method `Place.plot`'\n , DeprecationWarning)\n", (9158, 9352), False, 'import warnings\n'), ((3185, 3239), 'geopy.geocoders.Nominatim', 'gp.geocoders.Nominatim', ([], {'user_agent': '_default_user_agent'}), '(user_agent=_default_user_agent)\n', (3207, 3239), True, 'import geopy as gp\n'), ((9927, 9957), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(15, 15)'}), '(figsize=(15, 15))\n', (9939, 9957), True, 'import matplotlib.pyplot as plt\n'), ((3332, 3461), 'warnings.warn', 'warnings.warn', (['"""The "url" option is deprecated. Please use the "source" argument instead."""', 'FutureWarning'], {'stacklevel': '(2)'}), '(\n \'The "url" option is deprecated. Please use the "source" argument instead.\'\n , FutureWarning, stacklevel=2)\n', (3345, 3461), False, 'import warnings\n'), ((7541, 7571), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(12, 12)'}), '(figsize=(12, 12))\n', (7553, 7571), True, 'import matplotlib.pyplot as plt\n'), ((3624, 3799), 'warnings.warn', 'warnings.warn', (['"""The "url" argument is deprecated. Please use the "source" argument. Do not supply a "url" argument. It will be ignored."""', 'FutureWarning'], {'stacklevel': '(2)'}), '(\n \'The "url" argument is deprecated. Please use the "source" argument. Do not supply a "url" argument. It will be ignored.\'\n , FutureWarning, stacklevel=2)\n', (3637, 3799), False, 'import warnings\n')]
|
# Copyright (c) 2013-2015 by California Institute of Technology
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the California Institute of Technology nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH
# OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
"""Finite State Machines Module"""
from __future__ import absolute_import
from __future__ import print_function
import copy
from pprint import pformat
from random import choice
from tulip.transys.labeled_graphs import LabeledDiGraph
# inline imports:
#
# import sys
# from tulip.transys.export import machine2scxml
_hl = 40 * '-'
# port type
pure = {'present', 'absent'}
def is_valuation(ports, valuations):
for name, port_type in ports.items():
curvaluation = valuations[name]
# functional set membership description ?
if callable(port_type):
ok = port_type(curvaluation)
else:
ok = curvaluation in port_type
if not ok:
raise TypeError('Not a valuation.')
def create_machine_ports(spc_vars):
"""Create proper port domains of valuations, given port types.
@param spc_vars: port names and types inside tulip.
For arbitrary finite types the type can be a list of strings,
instead of a range of integers.
These are as originally defined by the user or synth.
"""
ports = dict()
for env_var, var_type in spc_vars.items():
if var_type == 'boolean':
domain = {0, 1}
elif isinstance(var_type, tuple):
# integer domain
start, end = var_type
domain = set(range(start, end + 1))
elif isinstance(var_type, list):
# arbitrary finite domain defined by list var_type
domain = set(var_type)
ports[env_var] = domain
return ports
class Transducer(LabeledDiGraph):
"""Sequential Transducer, i.e., a letter-to-letter function.
Inputs
======
P = {p1, p2,...} is the set of input ports.
An input port p takes values in a set Vp.
Set Vp is called the "type" of input port p.
A "valuation" is an assignment of values to the input ports in P.
We call "inputs" the set of pairs::
{(p_i, Vp_i),...}
of input ports p_i and their corresponding types Vp_i.
A guard is a predicate (bool-valued) used as sub-label for a transition.
A guard is defined by a set and evaluated using set membership.
So given an input port value p=x, then if::
x \in guard_set
then the guard is True, otherwise it is False.
The "inputs" are defined by an OrderedDict::
{'p1':explicit, 'p2':check, 'p3':None, ...}
where:
- C{explicit}:
is an iterable representation of Vp,
possible only for discrete Vp.
If 'p1' is explicitly typed, then guards are evaluated directly::
input_port_value == guard_value ?
- C{check}:
is a class with methods:
- C{__contains__(x) }:
check if guard value given to input port 'p1' is
in the set of possible values Vp.
- C{__call__(guard_set, input_port_value) }:
check if C{input_port_value} \\in C{guard_set}
This allows symbolic type definitions.
For example, C{input_port_value} might be assigned
int values, but the C{guard_set} be defined by
a symbolic expression as the str: 'x<=5'.
Then the user is responsible for providing
the appropriate method to the Mealy Machine,
using the custom C{check} class described here.
Note that we could provide a rudimentary library
for the basic types of checks, e.g., for
the above simple symbolic case, where using
function eval() is sufficient.
- C{None}:
signifies that no type is currently defined for
this input port, so input type checking and guard
evaluation are disabled.
This can be used to skip type definitions when
they are not needed by the user.
However, since Machines are in general the output
of synthesis, it follows that they are constructed
by code, so the benefits of typedefs will be
considerable compared to the required coding effort.
Guards annotate transitions::
Guards: States x States ---> Input_Predicates
Outputs
=======
Similarly defined to inputs, but:
- for Mealy Machines they annotate transitions
- for Moore Machines they annotate states
State Variables
===============
Similarly defined to inputs, they annotate states,
for both Mealy and Moore machines::
States ---> State_Variables
Update Function
===============
The transition relation:
- for Mealy Machines::
States x Input_Valuations ---> Output_Valuations x States
Note that in the range Output_Valuations are ordered before States
to emphasize that an output_valuation is produced
during the transition, NOT at the next state.
The data structure representation of the update function is
by storage of the Guards function and definition of Guard
evaluation for each input port via the OrderedDict discussed above.
- for Moore Machines::
States x Input_Valuations ---> States
States ---> Output_valuations
Note
====
A transducer may operate on either finite or infinite words, i.e.,
it is not equipped with interpretation semantics on the words,
so it does not "care" about word length.
It continues as long as its input is fed with letters.
For Machines, each state label consists of (possibly multiple) sublabels,
each of which is either a variable, or, only for Moore machines,
may be an output.
See Also
========
FSM, MealyMachine, MooreMachine
"""
def __init__(self):
# values will point to values of _*_label_def below
self.state_vars = dict()
self.inputs = dict()
self.outputs = dict()
# self.set_actions = {}
# state labeling
self._state_label_def = dict()
self._state_dot_label_format = {'type?label': ':',
'separator': r'\\n'}
# edge labeling
self._transition_label_def = dict()
self._transition_dot_label_format = {'type?label': ':',
'separator': r'\\n'}
self._transition_dot_mask = dict()
self._state_dot_mask = dict()
self.default_export_fname = 'fsm'
LabeledDiGraph.__init__(self)
self.dot_node_shape = {'normal': 'ellipse'}
self.default_export_fname = 'fsm'
def add_inputs(self, new_inputs, masks=None):
"""Create new inputs.
@param new_inputs: C{dict} of pairs {port_name : port_type}
where:
- port_name: str
- port_type: Iterable | check class
@type new_inputs: dict
@param masks: custom mask functions, for each sublabel
based on its current value
each such function returns:
- True, if the sublabel should be shown
- False, otherwise (to hide it)
@type masks: C{dict} of functions C{{port_name : mask_function}}
each C{mask_function} returns bool
"""
for port_name, port_type in new_inputs.items():
# append
self._transition_label_def[port_name] = port_type
# inform inputs
self.inputs[port_name] = port_type
# printing format
self._transition_dot_label_format[port_name] = str(port_name)
if masks is None:
continue
if port_name in masks:
mask_func = masks[port_name]
self._transition_dot_mask[port_name] = mask_func
def add_state_vars(self, new_state_vars):
for var_name, var_type in new_state_vars.items():
# append
self._state_label_def[var_name] = var_type
# inform state vars
self.state_vars[var_name] = self._state_label_def[var_name]
# printing format
self._state_dot_label_format[var_name] = str(var_name)
class MooreMachine(Transducer):
"""Moore machine.
A Moore machine implements the discrete dynamics::
x[k+1] = f(x[k], u[k] )
y[k] = g(x[k] )
where:
- k: discrete time = sequence index
- x: state = valuation of state variables
- X: set of states = S
- u: inputs = valuation of input ports
- y: output actions = valuation of output ports
- f: X-> 2^X, transition function
- g: X-> Out, output function
Observe that the output depends only on the state.
Note
====
valuation: assignment of values to each port
Reference
=========
U{[M56]
<https://tulip-control.sourceforge.io/doc/bibliography.html#m56>}
"""
def __init__(self):
"""Instantiate a Moore state machine."""
Transducer.__init__(self)
self.dot_node_shape = {'normal': 'ellipse'}
self.default_export_fname = 'moore'
def __str__(self):
"""Get informal string representation."""
s = (
_hl + '\nMoore Machine: ' + self.name + '\n' + _hl + '\n' +
'State Variables:\n\t(name : type)\n' +
_print_ports(self.state_vars) +
'Input Ports:\n\t(name : type)\n' +
_print_ports(self.inputs) +
'Output Ports:\n\t(name : type)\n' +
_print_ports(self.outputs) +
'States & State Var Values: (state : outputs : vars)\n')
for state, label_dict in self.states(data=True):
s += '\t' + str(state) + ' :\n'
# split into vars and outputs
var_values = {k: v for k, v in label_dict.items()
if k in self.state_vars}
output_values = {k: v for k, v in label_dict.items()
if k in self.outputs}
s += (_print_label(var_values) + ' : ' +
_print_label(output_values))
s += (
'Initial States:\n' +
pformat(self.states.initial, indent=3) + 2 * '\n')
s += 'Transitions & Labels: (from --> to : label)\n'
for from_state, to_state, label_dict in self.transitions(data=True):
s += (
'\t' + str(from_state) + ' ---> ' +
str(to_state) + ' :\n' +
_print_label(label_dict))
s += _hl + '\n'
return s
def add_outputs(self, new_outputs, masks=None):
for port_name, port_type in new_outputs.items():
# append
self._state_label_def[port_name] = port_type
# inform state vars
self.outputs[port_name] = port_type
# printing format
self._state_dot_label_format[port_name] = \
'/' + str(port_name)
if masks is None:
continue
if port_name in masks:
mask_func = masks[port_name]
self._state_dot_mask[port_name] = mask_func
class MealyMachine(Transducer):
"""Mealy machine.
Examples
========
Traffic Light: Fig. 3.14, p.72 U{[LS11]
<https://tulip-control.sourceforge.io/doc/bibliography.html#ls11>}
>>> m = MealyMachine()
>>> pure_signal = {'present', 'absent'}
>>> m.add_inputs([('tick', pure_signal) ])
>>> m.add_outputs([('go', pure_signal), ('stop', pure_signal) ])
>>> m.states.add_from(['red', 'green', 'yellow'])
>>> m.states.initial.add('red')
For brevity:
>>> p = 'present'
>>> a = 'absent'
The transitions can equivalently be defined with dict().
So instead of the previous C{m.transitions.add}, we can use:
>>> label = {'tick':p, 'go':p, 'stop':a}
>>> m.transitions.add('red', 'green', **label)
>>> label = {'tick':p, 'go':a, 'stop':p}
>>> m.transitions.add('green', 'yellow', **label)
>>> label = {'tick':p, 'go':a, 'stop':p}
>>> m.transitions.add('yellow', 'red', **label)
This avoids any ordering issues, i.e., changing the
order of the sublabels does not matter:
>>> label = {'go':p, 'tick':p, 'stop':a}
>>> m.transitions.add('red', 'green', **label)
Theory
======
A Mealy machine implements the discrete dynamics::
x[k+1] = f(x[k], u[k] )
y[k] = g(x[k], u[k] )
where:
- k: discrete time = sequence index
- x: state = valuation of state variables
- X: set of states = S
- u: inputs = valuation of input ports
- y: output actions = valuation of output ports
- f: X-> 2^X, transition function
- g: X-> Out, output function
Observe that the output is defined when a reaction occurs to an input.
Note
====
valuation: assignment of values to each port
Reference
=========
U{[M55]
<https://tulip-control.sourceforge.io/doc/bibliography.html#m55>}
"""
def __init__(self):
Transducer.__init__(self)
# will point to selected values of self._transition_label_def
self.dot_node_shape = {'normal': 'ellipse'}
self.default_export_fname = 'mealy'
def __str__(self):
"""Get informal string representation."""
s = (
_hl + '\nMealy Machine: ' + self.name + '\n' + _hl + '\n' +
'State Variables:\n\t(name : type)\n' +
_print_ports(self.state_vars))
s += 'States & State Var Values:\n'
for state, label_dict in self.states(data=True):
s += ('\t' + str(state) + ' :\n' +
_print_label(label_dict))
s += (
'Initial States:\n' +
pformat(self.states.initial, indent=3) + 2 * '\n' +
'Input Ports:\n\t(name : type)\n' +
_print_ports(self.inputs) +
'Output Ports:\n\t(name : type)\n' +
_print_ports(self.outputs) +
'Transitions & Labels: (from --> to : label)\n')
for from_state, to_state, label_dict in self.transitions(data=True):
s += (
'\t' + str(from_state) + ' ---> ' +
str(to_state) + ' :\n' +
_print_label(label_dict))
s += _hl + '\n'
return s
def _save(self, path, fileformat):
"""Export options available only for Mealy machines.
@type fileformat: 'scxml'
"""
if fileformat != 'scxml':
return False
from tulip.transys.export import machine2scxml
s = machine2scxml.mealy2scxml(self)
# dump to file
f = open(path, 'w')
f.write(s)
f.close()
return True
def add_outputs(self, new_outputs, masks=None):
"""Add new outputs.
@param new_outputs: dict of pairs {port_name : port_type}
where:
- port_name: str
- port_type: Iterable | check class
@type new_outputs: dict
@param masks: custom mask functions, for each sublabel
based on its current value
each such function returns:
- True, if the sublabel should be shown
- False, otherwise (to hide it)
@type masks: dict of functions
keys are port_names (see arg: new_outputs)
each function returns bool
"""
for port_name, port_type in new_outputs.items():
# append
self._transition_label_def[port_name] = port_type
# inform state vars
self.outputs[port_name] = \
self._transition_label_def[port_name]
# printing format
self._transition_dot_label_format[port_name] = \
'/' + str(port_name)
if masks is None:
continue
if port_name in masks:
mask_func = masks[port_name]
self._transition_dot_mask[port_name] = mask_func
def reaction(self, from_state, inputs, lazy=False):
"""Return next state and output, when reacting to given inputs.
The machine must be deterministic.
(for each state and input at most a single transition enabled,
this notion does not coincide with output-determinism)
Not exactly a wrapper of L{Transitions.find},
because it matches only that part of an edge label
that corresponds to the inputs.
@param from_state: transition starts from this state.
@type from_state: element of C{self.states}
@param inputs: C{dict} assigning a valid value to each input port.
@type inputs: {'port_name':port_value, ...}
@param lazy: Lazy evaluation of inputs? If lazy=True, then
allow an incomplete specification of input if there is
precisely one enabled transition.
@type lazy: bool
@return: output values and next state.
@rtype: (outputs, next_state)
where C{outputs}: C{{'port_name':port_value, ...}}
"""
if lazy:
restricted_inputs = set(self.inputs).intersection(inputs.keys())
else:
restricted_inputs = self.inputs
# match only inputs (explicit valuations, not symbolic)
enabled_trans = [
(i, j, d)
for i, j, d in self.edges_iter([from_state], data=True)
if project_dict(d, restricted_inputs) == inputs]
if len(enabled_trans) == 0:
some_possibilities = []
for i, j, d in self.edges_iter([from_state], data=True):
# The number of possible inputs to suggest here is
# arbitrary. Consider making it a function parameter.
if len(some_possibilities) >= 5:
break
possible_inputs = project_dict(d, restricted_inputs)
if possible_inputs not in some_possibilities:
some_possibilities.append(possible_inputs)
# must be deterministic
try:
((_, next_state, attr_dict), ) = enabled_trans
except ValueError:
if len(enabled_trans) == 0:
if len(some_possibilities) == 0:
raise Exception(
'state {from_state} is a dead-end. '
'There are no possible inputs from '
'it.'.format(from_state=from_state))
else:
raise Exception(
'not a valid input, '
'some possible inputs include: '
'{t}'.format(t=some_possibilities))
else:
raise Exception(
'must be input-deterministic, '
'found enabled transitions: '
'{t}'.format(t=enabled_trans))
outputs = project_dict(attr_dict, self.outputs)
return (next_state, outputs)
def reactionpart(self, from_state, inputs):
"""Wraps reaction() with lazy=True
"""
return self.reaction(from_state, inputs, lazy=True)
def run(self, from_state=None, input_sequences=None):
"""Guided or interactive run.
@param input_sequences: if C{None}, then call L{interactive_run},
otherwise call L{guided_run}.
@return: output of L{guided_run}, otherwise C{None}.
"""
if input_sequences is None:
interactive_run(self, from_state=from_state)
else:
return guided_run(self, from_state=from_state,
input_sequences=input_sequences)
def guided_run(mealy, from_state=None, input_sequences=None):
"""Run deterministic machine reacting to given inputs.
@param from_state: start simulation
@param mealy: input-deterministic Mealy machine
@type mealy: L{MealyMachine}
@param from_state: start simulation at this state.
If C{None}, then use the unique initial state C{Sinit}.
@param input_sequences: one sequence of values for each input port
@type input_sequences: C{dict} of C{lists}
@return: sequence of states and sequence of output valuations
@rtype: (states, output_sequences)
where:
- C{states} is a C{list} of states excluding C{from_state}
- C{output_sequences} is a C{dict} of C{lists}
"""
seqs = input_sequences # abbrv
missing_ports = set(mealy.inputs).difference(seqs)
if missing_ports:
raise ValueError('missing input port(s): ' + missing_ports)
# dict of lists ?
non_lists = {k: v for k, v in seqs.items() if not isinstance(v, list)}
if non_lists:
raise TypeError('Values must be lists, for: ' + str(non_lists))
# uniform list len ?
if len(set(len(x) for x in seqs.values())) > 1:
raise ValueError('All input sequences must be of equal length.')
# note: initial sys state non-determinism not checked
# initial sys edge non-determinism checked instead (more restrictive)
if from_state is None:
state = next(iter(mealy.states.initial))
else:
state = from_state
n = len(next(iter(seqs.values())))
states_seq = []
output_seqs = {k: list() for k in mealy.outputs}
for i in range(n):
inputs = {k: v[i] for k, v in seqs.items()}
state, outputs = mealy.reaction(state, inputs)
states_seq.append(state)
for k in output_seqs:
output_seqs[k].append(outputs[k])
return (states_seq, output_seqs)
def random_run(mealy, from_state=None, N=10):
"""Return run from given state for N random inputs.
Inputs selected randomly in a way that does not block the machine
So they are not arbitrarily random.
If the machine is a valid synthesis solution,
then all safe environment inputs can be generated this way.
Randomly generated inputs may violate liveness assumption on environment.
@param mealy: input-deterministic Mealy machine
@type mealy: C{MealyMachine}
@param N: number of reactions (inputs)
@type N: int
@return: same as L{guided_run}
"""
if from_state is None:
state = next(iter(mealy.states.initial))
else:
state = from_state
states_seq = []
output_seqs = {k: list() for k in mealy.outputs}
for i in range(N):
trans = mealy.transitions.find([state])
# choose next transition
selected_trans = choice(list(trans))
_, new_state, attr_dict = selected_trans
# extend execution trace
states_seq.append(new_state)
# extend output traces
outputs = project_dict(attr_dict, mealy.outputs)
for k in output_seqs:
output_seqs[k].append(outputs[k])
# updates
old_state = state
state = new_state
# printing
inputs = project_dict(attr_dict, mealy.inputs)
print(
'move from\n\t state: ' + str(old_state) +
'\n\t with input:' + str(inputs) +
'\n\t to state: ' + str(new_state) +
'\n\t reacting by producing output: ' + str(outputs))
return (states_seq, output_seqs)
def interactive_run(mealy, from_state=None):
"""Run input-deterministic Mealy machine using user input.
@param mealy: input-deterministic Mealy machine
@type mealy: L{MealyMachine}
"""
if from_state is None:
state = next(iter(mealy.states.initial))
else:
state = from_state
while True:
print('\n Current state: ' + str(state))
if _interactive_run_step(mealy, state) is None:
break
def _interactive_run_step(mealy, state):
if state is None:
raise Exception('Current state is None')
# note: the spaghettiness of previous version was caused
# by interactive simulation allowing both output-non-determinism
# and implementing spawning (which makes sense only for generators,
# *not* for transducers)
trans = mealy.transitions.find([state])
if not trans:
print('Stop: no outgoing transitions.')
return None
while True:
try:
selected_trans = _select_transition(mealy, trans)
except:
print('Selection not recognized. Please try again.')
if selected_trans is None:
return None
(from_, to_state, attr_dict) = selected_trans
inputs = project_dict(attr_dict, mealy.inputs)
outputs = project_dict(attr_dict, mealy.outputs)
print(
'Moving from state: ' + str(state) +
', to state: ' + str(to_state) + '\n' +
'given inputs: ' + str(inputs) + '\n' +
'reacting with outputs: ' + str(outputs))
return True
def _select_transition(mealy, trans):
msg = 'Found more than 1 outgoing transitions:' + 2 * '\n'
for i, t in enumerate(trans):
(from_state, to_state, attr_dict) = t
inputs = project_dict(attr_dict, mealy.inputs)
outputs = project_dict(attr_dict, mealy.outputs)
msg += (
'\t' + str(i) + ' : ' +
str(from_state) + ' ---> ' + str(to_state) + '\n' +
'\t inputs:' + str(inputs) +
'\t outputs:' + str(outputs) +
'\n\n')
msg += (
'\n' +
'Select from the available transitions above\n' +
'by giving its integer,\n' +
'Press "Enter" to stop the simulation:\n' +
'\t int = ')
import sys
print(msg)
id_selected = sys.stdin.readline().rstrip('\r\n')
if not id_selected:
return None
return trans[int(id_selected)]
def moore2mealy(moore):
"""Convert Moore machine to equivalent Mealy machine.
Reference
=========
U{[LS11]
<https://tulip-control.sourceforge.io/doc/bibliography.html#ls11>}
@type moore: L{MooreMachine}
@rtype: L{MealyMachine}
"""
if not isinstance(moore, MooreMachine):
raise TypeError('moore must be a MooreMachine')
mealy = MealyMachine()
# cp inputs
for port_name, port_type in moore.inputs.items():
mask_func = moore._transition_dot_mask.get(port_name)
if mask_func is None:
masks = None
else:
masks = {port_name: mask_func}
mealy.add_inputs({port_name: port_type}, masks=masks)
# cp outputs
for port_name, port_type in moore.outputs.items():
mask_func = moore._state_dot_mask.get(port_name)
if mask_func is None:
masks = None
else:
masks = {port_name: mask_func}
mealy.add_outputs({port_name: port_type}, masks=masks)
# cp states
mealy.states.add_from(moore.states())
mealy.states.initial.add_from(moore.states.initial)
# cp transitions
for si in moore:
output_values = {
k: v for k, v in moore.states[si].items()
if k in moore.outputs}
output_values = copy.deepcopy(output_values)
for si_, sj, attr_dict in moore.transitions.find(si):
# note that we don't filter only input ports,
# so other edge annotation is preserved
attr_dict = copy.deepcopy(attr_dict)
attr_dict.update(output_values)
mealy.transitions.add(si, sj, attr_dict)
return mealy
def mealy2moore(mealy):
"""Convert Mealy machine to almost equivalent Moore machine.
A Mealy machine cannot be transformed to an equivalent Moore machine.
It can be converted to a Moore machine with an arbitrary initial output,
which outputs the Mealy output at its next reaction.
Reference
=========
U{[LS11]
<https://tulip-control.sourceforge.io/doc/bibliography.html#ls11>}
@type mealy: L{MealyMachine}
@rtype: L{MooreMachine}
"""
# TODO: check for when Mealy is exactly convertible to Moore
if not isinstance(mealy, MealyMachine):
raise TypeError('moore must be a MealyMachine')
moore = MooreMachine()
# cp inputs
for port_name, port_type in mealy.inputs.items():
mask_func = mealy._transition_dot_mask.get(port_name)
if mask_func is None:
masks = None
else:
masks = {port_name: mask_func}
moore.add_inputs({port_name: port_type}, masks=masks)
# cp outputs
for port_name, port_type in mealy.outputs.items():
mask_func = mealy._transition_dot_mask.get(port_name)
if mask_func is None:
masks = None
else:
masks = {port_name: mask_func}
moore.add_outputs({port_name: port_type}, masks=masks)
# initial state with arbitrary label
out = {k: list(v)[0] for k, v in mealy.outputs.items()}
s0 = list(mealy.states.initial)[0]
# create maps between Moore and Mealy states
moore2mealy_states = dict() # {qj : si} (function)
mealy2moore_states = dict() # {si : {qj, qk, ...} } (relation)
new_s0 = _create_state_str(
s0, out, moore, moore2mealy_states,
mealy2moore_states)
moore.states.add(new_s0, out)
moore.states.initial.add(new_s0)
# cp transitions and create appropriate states
Q = set()
S = set()
Q.add(new_s0)
S.add(new_s0)
while Q:
new_si = Q.pop()
si = moore2mealy_states[new_si]
for si_, sj, attr_dict in mealy.transitions.find(si):
in_values, out_values = _split_io(attr_dict, mealy)
new_sj = _create_state_str(
sj, out_values, moore, moore2mealy_states,
mealy2moore_states)
moore.transitions.add(new_si, new_sj, in_values)
if new_sj not in S:
Q.add(new_sj)
S.add(new_sj)
return moore
def _print_ports(port_dict):
s = ''
for port_name, port_type in port_dict.items():
s += '\t' + str(port_name) + ' : '
s += pformat(port_type) + '\n'
s += '\n'
return s
def _print_label(label_dict):
s = ''
for name, value in label_dict.items():
s += '\t\t' + str(name) + ' : ' + str(value) + '\n'
s += '\n'
return s
def _create_state_str(mealy_state, output, moore,
moore2mealy_states,
mealy2moore_states):
"""Used to create Moore states when converting Mealy -> Moore."""
for s in mealy2moore_states.setdefault(mealy_state, set()):
# check output values
if moore.states[s] == output:
return s
# create new
n = len(moore)
s = 's' + str(n)
moore.states.add(s, output)
moore2mealy_states[s] = mealy_state
mealy2moore_states[mealy_state].add(s)
return s
def _split_io(attr_dict, machine):
"""Split into inputs and outputs."""
input_values = {k: v for k, v in attr_dict.items()
if k in machine.inputs}
output_values = {k: v for k, v in attr_dict.items()
if k in machine.outputs}
return input_values, output_values
project_dict = lambda x, y: {k: x[k] for k in x if k in y}
trim_dict = lambda x, y: {k: x[k] for k in x if k not in y}
def strip_ports(mealy, names):
"""Remove ports in C{names}.
For example, to remove the atomic propositions
labeling the transition system C{ts} used
(so they are dependent variables), call it as:
>>> strip_ports(mealy, ts.atomic_propositions)
@type mealy: L{MealyMachine}
@type names: iterable container of C{str}
"""
new = MealyMachine()
new.add_inputs(trim_dict(mealy.inputs, names))
new.add_outputs(trim_dict(mealy.outputs, names))
new.add_nodes_from(mealy)
new.states.initial.add_from(mealy.states.initial)
for u, v, d in mealy.edges_iter(data=True):
d = trim_dict(d, names)
new.add_edge(u, v, **d)
return new
|
[
"copy.deepcopy",
"pprint.pformat",
"tulip.transys.labeled_graphs.LabeledDiGraph.__init__",
"tulip.transys.export.machine2scxml.mealy2scxml",
"sys.stdin.readline"
] |
[((7969, 7998), 'tulip.transys.labeled_graphs.LabeledDiGraph.__init__', 'LabeledDiGraph.__init__', (['self'], {}), '(self)\n', (7992, 7998), False, 'from tulip.transys.labeled_graphs import LabeledDiGraph\n'), ((16016, 16047), 'tulip.transys.export.machine2scxml.mealy2scxml', 'machine2scxml.mealy2scxml', (['self'], {}), '(self)\n', (16041, 16047), False, 'from tulip.transys.export import machine2scxml\n'), ((28289, 28317), 'copy.deepcopy', 'copy.deepcopy', (['output_values'], {}), '(output_values)\n', (28302, 28317), False, 'import copy\n'), ((26874, 26894), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (26892, 26894), False, 'import sys\n'), ((28514, 28538), 'copy.deepcopy', 'copy.deepcopy', (['attr_dict'], {}), '(attr_dict)\n', (28527, 28538), False, 'import copy\n'), ((31205, 31223), 'pprint.pformat', 'pformat', (['port_type'], {}), '(port_type)\n', (31212, 31223), False, 'from pprint import pformat\n'), ((11613, 11651), 'pprint.pformat', 'pformat', (['self.states.initial'], {'indent': '(3)'}), '(self.states.initial, indent=3)\n', (11620, 11651), False, 'from pprint import pformat\n'), ((15179, 15217), 'pprint.pformat', 'pformat', (['self.states.initial'], {'indent': '(3)'}), '(self.states.initial, indent=3)\n', (15186, 15217), False, 'from pprint import pformat\n')]
|
import pickle
from propy.PyPro import GetProDes
import argparse
import os
parser = argparse.ArgumentParser(description='extract features')
parser.add_argument('--file', type=str, default='VFG-2706') # VFG-2706/VFG-740/VFG-2706-1066/VFG-564/COG-755
parser.add_argument('--feature', type=str, default='aac') # aac, dpc, ctd, pseaac1, pseaac2, label
args = parser.parse_args()
VFs_data_dir = os.getcwd() + "/data/"
f_file = open(VFs_data_dir + str(args.file), 'r')
all_data = pickle.load(f_file)
features_dir = VFs_data_dir + args.file + "_features/"
if not os.path.exists(features_dir):
os.makedirs(features_dir)
if args.feature == 'aac':
f_save = open(features_dir + "propy_AAC.txt", 'a')
elif args.feature == 'dpc':
f_save = open(features_dir + "propy_DPC.txt", 'a')
elif args.feature == 'ctd':
f_save = open(features_dir + "propy_CTD.txt", 'a')
elif args.feature == 'pseaac1':
f_save = open(features_dir + "propy_pseaac1.txt", 'a')
elif args.feature == 'pseaac2':
f_save = open(features_dir + "propy_pseaac2.txt", 'a')
elif args.feature == 'label':
f_save = open(features_dir + "labels.txt", 'a')
for i, each_compid in enumerate(all_data.keys()):
for j, each_sequence in enumerate(all_data[each_compid]):
if args.feature == 'label':
f_save.write('{}\n'.format(i))
else:
Des = GetProDes(each_sequence)
# print(j)
if args.feature == 'aac': # group 1 AAC:20, # DC:400
each_value = Des.GetAAComp().values()
elif args.feature == 'dpc':
each_value = Des.GetDPComp().values()
elif args.feature == 'ctd':
each_value = Des.GetCTD().values()
elif args.feature == 'pseaac1': # pseaac type1
each_value = Des.GetPAAC(lamda=10, weight=0.05).values()
elif args.feature == 'pseaac2': # pseaac type2
each_value = Des.GetAPAAC(lamda=20, weight=0.05).values()
each_value_p = '\t'.join(str(n) for n in each_value)
f_save.write('{}\n'.format(each_value_p))
f_save.close()
|
[
"os.makedirs",
"argparse.ArgumentParser",
"os.getcwd",
"os.path.exists",
"pickle.load",
"propy.PyPro.GetProDes"
] |
[((84, 139), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""extract features"""'}), "(description='extract features')\n", (107, 139), False, 'import argparse\n'), ((478, 497), 'pickle.load', 'pickle.load', (['f_file'], {}), '(f_file)\n', (489, 497), False, 'import pickle\n'), ((394, 405), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (403, 405), False, 'import os\n'), ((561, 589), 'os.path.exists', 'os.path.exists', (['features_dir'], {}), '(features_dir)\n', (575, 589), False, 'import os\n'), ((595, 620), 'os.makedirs', 'os.makedirs', (['features_dir'], {}), '(features_dir)\n', (606, 620), False, 'import os\n'), ((1357, 1381), 'propy.PyPro.GetProDes', 'GetProDes', (['each_sequence'], {}), '(each_sequence)\n', (1366, 1381), False, 'from propy.PyPro import GetProDes\n')]
|
from conans import ConanFile, CMake, tools
import re
from os import path
class StructuredConcurrencyExampleRecipe(ConanFile):
name = "structured_concurrency_example"
description = "example code for using structure concurrency with senders/receivers"
author = "<NAME>"
topics = ("C++", "concurrency")
homepage = "https://github.com/lucteo/structured_concurrency_example"
url = "https://github.com/lucteo/structured_concurrency_example"
license = "MIT License"
settings = "os", "compiler", "build_type", "arch"
generators = "cmake"
build_policy = "missing" # Some of the dependencies don't have builds for all our targets
options = {"shared": [True, False], "fPIC": [True, False], "with_profiling": [True, False]}
default_options = {"shared": False, "fPIC": True, "with_profiling": False}
exports_sources = ("include/*", "CMakeLists.txt")
def set_version(self):
self.version = "0.1.0"
def build_requirements(self):
# TODO: for some reason these doesn't work
# self.build_requires("libcurl/7.80.0")
# self.build_requires("opencv/4.5.3")
if self.options.with_profiling:
self.build_requires("tracy-interface/0.1.0")
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def build(self):
# Note: options "shared" and "fPIC" are automatically handled in CMake
cmake = self._configure_cmake()
cmake.build()
def package(self):
cmake = self._configure_cmake()
cmake.install()
def package_info(self):
self.cpp_info.libs = self.collect_libs()
def _configure_cmake(self):
cmake = CMake(self)
cmake.definitions["structured_concurrency_example.with_profiling"] = self.options.with_profiling
if self.settings.compiler == "Visual Studio" and self.options.shared:
cmake.definitions["CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS"] = True
cmake.configure(source_folder=None)
return cmake
|
[
"conans.CMake"
] |
[((1667, 1678), 'conans.CMake', 'CMake', (['self'], {}), '(self)\n', (1672, 1678), False, 'from conans import ConanFile, CMake, tools\n')]
|
from datetime import timedelta
from hashlib import md5
from unittest.mock import patch
from django.contrib.auth import get_user_model
from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest
from django.test import override_settings, RequestFactory
from axes.apps import AppConfig
from axes.helpers import (
get_cache_timeout,
get_client_str,
get_client_username,
get_client_cache_key,
get_client_parameters,
get_cool_off,
get_cool_off_iso8601,
get_lockout_response,
is_client_ip_address_blacklisted,
is_client_ip_address_whitelisted,
is_client_method_whitelisted,
is_ip_address_in_blacklist,
is_ip_address_in_whitelist,
is_user_attempt_whitelisted,
toggleable,
cleanse_parameters,
)
from axes.models import AccessAttempt
from tests.base import AxesTestCase
@override_settings(AXES_ENABLED=False)
class AxesDisabledTestCase(AxesTestCase):
def test_initialize(self):
AppConfig.logging_initialized = False
AppConfig.initialize()
self.assertFalse(AppConfig.logging_initialized)
def test_toggleable(self):
def is_true():
return True
self.assertTrue(is_true())
self.assertIsNone(toggleable(is_true)())
class CacheTestCase(AxesTestCase):
@override_settings(AXES_COOLOFF_TIME=3) # hours
def test_get_cache_timeout_integer(self):
timeout_seconds = float(60 * 60 * 3)
self.assertEqual(get_cache_timeout(), timeout_seconds)
@override_settings(AXES_COOLOFF_TIME=timedelta(seconds=420))
def test_get_cache_timeout_timedelta(self):
self.assertEqual(get_cache_timeout(), 420)
@override_settings(AXES_COOLOFF_TIME=None)
def test_get_cache_timeout_none(self):
self.assertEqual(get_cache_timeout(), None)
class TimestampTestCase(AxesTestCase):
def test_iso8601(self):
"""
Test get_cool_off_iso8601 correctly translates datetime.timedelta to ISO 8601 formatted duration.
"""
expected = {
timedelta(days=1, hours=25, minutes=42, seconds=8): "P2DT1H42M8S",
timedelta(days=7, seconds=342): "P7DT5M42S",
timedelta(days=0, hours=2, minutes=42): "PT2H42M",
timedelta(hours=20, seconds=42): "PT20H42S",
timedelta(seconds=300): "PT5M",
timedelta(seconds=9005): "PT2H30M5S",
timedelta(minutes=9005): "P6DT6H5M",
timedelta(days=15): "P15D",
}
for delta, iso_duration in expected.items():
with self.subTest(iso_duration):
self.assertEqual(get_cool_off_iso8601(delta), iso_duration)
class ClientStringTestCase(AxesTestCase):
@staticmethod
def get_expected_client_str(*args, **kwargs):
client_str_template = '{{username: "{0}", ip_address: "{1}", user_agent: "{2}", path_info: "{3}"}}'
return client_str_template.format(*args, **kwargs)
@override_settings(AXES_VERBOSE=True)
def test_verbose_ip_only_client_details(self):
username = "<EMAIL>"
ip_address = "127.0.0.1"
user_agent = "Googlebot/2.1 (+http://www.googlebot.com/bot.html)"
path_info = "/admin/"
expected = self.get_expected_client_str(
username, ip_address, user_agent, path_info
)
actual = get_client_str(username, ip_address, user_agent, path_info)
self.assertEqual(expected, actual)
@override_settings(AXES_VERBOSE=True)
def test_imbalanced_quotes(self):
username = "butterfly.. },,,"
ip_address = "127.0.0.1"
user_agent = "Googlebot/2.1 (+http://www.googlebot.com/bot.html)"
path_info = "/admin/"
expected = self.get_expected_client_str(
username, ip_address, user_agent, path_info
)
actual = get_client_str(username, ip_address, user_agent, path_info)
self.assertEqual(expected, actual)
@override_settings(AXES_VERBOSE=True)
def test_verbose_ip_only_client_details_tuple(self):
username = "<EMAIL>"
ip_address = "127.0.0.1"
user_agent = "Googlebot/2.1 (+http://www.googlebot.com/bot.html)"
path_info = ("admin", "login")
expected = self.get_expected_client_str(
username, ip_address, user_agent, path_info[0]
)
actual = get_client_str(username, ip_address, user_agent, path_info)
self.assertEqual(expected, actual)
@override_settings(AXES_VERBOSE=False)
def test_non_verbose_ip_only_client_details(self):
username = "<EMAIL>"
ip_address = "127.0.0.1"
user_agent = "Googlebot/2.1 (+http://www.googlebot.com/bot.html)"
path_info = "/admin/"
expected = '{ip_address: "127.0.0.1", path_info: "/admin/"}'
actual = get_client_str(username, ip_address, user_agent, path_info)
self.assertEqual(expected, actual)
@override_settings(AXES_ONLY_USER_FAILURES=True)
@override_settings(AXES_VERBOSE=True)
def test_verbose_user_only_client_details(self):
username = "<EMAIL>"
ip_address = "127.0.0.1"
user_agent = "Googlebot/2.1 (+http://www.googlebot.com/bot.html)"
path_info = "/admin/"
expected = self.get_expected_client_str(
username, ip_address, user_agent, path_info
)
actual = get_client_str(username, ip_address, user_agent, path_info)
self.assertEqual(expected, actual)
@override_settings(AXES_ONLY_USER_FAILURES=True)
@override_settings(AXES_VERBOSE=False)
def test_non_verbose_user_only_client_details(self):
username = "<EMAIL>"
ip_address = "127.0.0.1"
user_agent = "Googlebot/2.1 (+http://www.googlebot.com/bot.html)"
path_info = "/admin/"
expected = '{username: "<EMAIL>", path_info: "/admin/"}'
actual = get_client_str(username, ip_address, user_agent, path_info)
self.assertEqual(expected, actual)
@override_settings(AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True)
@override_settings(AXES_VERBOSE=True)
def test_verbose_user_ip_combo_client_details(self):
username = "<EMAIL>"
ip_address = "127.0.0.1"
user_agent = "Googlebot/2.1 (+http://www.googlebot.com/bot.html)"
path_info = "/admin/"
expected = self.get_expected_client_str(
username, ip_address, user_agent, path_info
)
actual = get_client_str(username, ip_address, user_agent, path_info)
self.assertEqual(expected, actual)
@override_settings(AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True)
@override_settings(AXES_VERBOSE=False)
def test_non_verbose_user_ip_combo_client_details(self):
username = "<EMAIL>"
ip_address = "127.0.0.1"
user_agent = "Googlebot/2.1 (+http://www.googlebot.com/bot.html)"
path_info = "/admin/"
expected = '{username: "<EMAIL>", ip_address: "127.0.0.1", path_info: "/admin/"}'
actual = get_client_str(username, ip_address, user_agent, path_info)
self.assertEqual(expected, actual)
@override_settings(AXES_USE_USER_AGENT=True)
@override_settings(AXES_VERBOSE=True)
def test_verbose_user_agent_client_details(self):
username = "<EMAIL>"
ip_address = "127.0.0.1"
user_agent = "Googlebot/2.1 (+http://www.googlebot.com/bot.html)"
path_info = "/admin/"
expected = self.get_expected_client_str(
username, ip_address, user_agent, path_info
)
actual = get_client_str(username, ip_address, user_agent, path_info)
self.assertEqual(expected, actual)
@override_settings(AXES_USE_USER_AGENT=True)
@override_settings(AXES_VERBOSE=False)
def test_non_verbose_user_agent_client_details(self):
username = "<EMAIL>"
ip_address = "127.0.0.1"
user_agent = "Googlebot/2.1 (+http://www.googlebot.com/bot.html)"
path_info = "/admin/"
expected = '{ip_address: "127.0.0.1", user_agent: "Googlebot/2.1 (+http://www.googlebot.com/bot.html)", path_info: "/admin/"}'
actual = get_client_str(username, ip_address, user_agent, path_info)
self.assertEqual(expected, actual)
@override_settings(AXES_CLIENT_STR_CALLABLE="tests.test_helpers.get_dummy_client_str")
def test_get_client_str_callable(self):
self.assertEqual(
get_client_str("username", "ip_address", "user_agent", "path_info"),
"client string"
)
def get_dummy_client_str(username, ip_address, user_agent, path_info):
return "client string"
class ClientParametersTestCase(AxesTestCase):
@override_settings(AXES_ONLY_USER_FAILURES=True)
def test_get_filter_kwargs_user(self):
self.assertEqual(
get_client_parameters(self.username, self.ip_address, self.user_agent),
[{"username": self.username}],
)
@override_settings(
AXES_ONLY_USER_FAILURES=False,
AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=False,
AXES_USE_USER_AGENT=False,
)
def test_get_filter_kwargs_ip(self):
self.assertEqual(
get_client_parameters(self.username, self.ip_address, self.user_agent),
[{"ip_address": self.ip_address}],
)
@override_settings(
AXES_ONLY_USER_FAILURES=False,
AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True,
AXES_USE_USER_AGENT=False,
)
def test_get_filter_kwargs_user_and_ip(self):
self.assertEqual(
get_client_parameters(self.username, self.ip_address, self.user_agent),
[{"username": self.username, "ip_address": self.ip_address}],
)
@override_settings(
AXES_ONLY_USER_FAILURES=False,
AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=False,
AXES_LOCK_OUT_BY_USER_OR_IP=True,
AXES_USE_USER_AGENT=False,
)
def test_get_filter_kwargs_user_or_ip(self):
self.assertEqual(
get_client_parameters(self.username, self.ip_address, self.user_agent),
[{"username": self.username}, {"ip_address": self.ip_address}],
)
@override_settings(
AXES_ONLY_USER_FAILURES=False,
AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=False,
AXES_USE_USER_AGENT=True,
)
def test_get_filter_kwargs_ip_and_agent(self):
self.assertEqual(
get_client_parameters(self.username, self.ip_address, self.user_agent),
[{"ip_address": self.ip_address}, {"user_agent": self.user_agent}],
)
@override_settings(
AXES_ONLY_USER_FAILURES=False,
AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True,
AXES_USE_USER_AGENT=True,
)
def test_get_filter_kwargs_user_ip_agent(self):
self.assertEqual(
get_client_parameters(self.username, self.ip_address, self.user_agent),
[
{"username": self.username, "ip_address": self.ip_address},
{"user_agent": self.user_agent},
],
)
class ClientCacheKeyTestCase(AxesTestCase):
def test_get_cache_key(self):
"""
Test the cache key format.
"""
cache_hash_digest = md5(self.ip_address.encode()).hexdigest()
cache_hash_key = f"axes-{cache_hash_digest}"
# Getting cache key from request
request_factory = RequestFactory()
request = request_factory.post(
"/admin/login/", data={"username": self.username, "password": "<PASSWORD>"}
)
self.assertEqual([cache_hash_key], get_client_cache_key(request))
# Getting cache key from AccessAttempt Object
attempt = AccessAttempt(
user_agent="<unknown>",
ip_address=self.ip_address,
username=self.username,
get_data="",
post_data="",
http_accept=request.META.get("HTTP_ACCEPT", "<unknown>"),
path_info=request.META.get("PATH_INFO", "<unknown>"),
failures_since_start=0,
)
self.assertEqual([cache_hash_key], get_client_cache_key(attempt))
def test_get_cache_key_empty_ip_address(self):
"""
Simulate an empty IP address in the request.
"""
empty_ip_address = ""
cache_hash_digest = md5(empty_ip_address.encode()).hexdigest()
cache_hash_key = f"axes-{cache_hash_digest}"
# Getting cache key from request
request_factory = RequestFactory()
request = request_factory.post(
"/admin/login/",
data={"username": self.username, "password": "<PASSWORD>"},
REMOTE_ADDR=empty_ip_address,
)
self.assertEqual([cache_hash_key], get_client_cache_key(request))
# Getting cache key from AccessAttempt Object
attempt = AccessAttempt(
user_agent="<unknown>",
ip_address=empty_ip_address,
username=self.username,
get_data="",
post_data="",
http_accept=request.META.get("HTTP_ACCEPT", "<unknown>"),
path_info=request.META.get("PATH_INFO", "<unknown>"),
failures_since_start=0,
)
self.assertEqual([cache_hash_key], get_client_cache_key(attempt))
def test_get_cache_key_credentials(self):
"""
Test the cache key format.
"""
ip_address = self.ip_address
cache_hash_digest = md5(ip_address.encode()).hexdigest()
cache_hash_key = f"axes-{cache_hash_digest}"
# Getting cache key from request
request_factory = RequestFactory()
request = request_factory.post(
"/admin/login/", data={"username": self.username, "password": "<PASSWORD>"}
)
# Difference between the upper test: new call signature with credentials
credentials = {"username": self.username}
self.assertEqual([cache_hash_key], get_client_cache_key(request, credentials))
# Getting cache key from AccessAttempt Object
attempt = AccessAttempt(
user_agent="<unknown>",
ip_address=ip_address,
username=self.username,
get_data="",
post_data="",
http_accept=request.META.get("HTTP_ACCEPT", "<unknown>"),
path_info=request.META.get("PATH_INFO", "<unknown>"),
failures_since_start=0,
)
self.assertEqual([cache_hash_key], get_client_cache_key(attempt))
class UsernameTestCase(AxesTestCase):
@override_settings(AXES_USERNAME_FORM_FIELD="username")
def test_default_get_client_username(self):
expected = "test-username"
request = HttpRequest()
request.POST["username"] = expected
actual = get_client_username(request)
self.assertEqual(expected, actual)
def test_default_get_client_username_drf(self):
class DRFRequest:
def __init__(self):
self.data = {}
self.POST = {}
expected = "test-username"
request = DRFRequest()
request.data["username"] = expected
actual = get_client_username(request)
self.assertEqual(expected, actual)
@override_settings(AXES_USERNAME_FORM_FIELD="username")
def test_default_get_client_username_credentials(self):
expected = "test-username"
expected_in_credentials = "test-credentials-username"
request = HttpRequest()
request.POST["username"] = expected
credentials = {"username": expected_in_credentials}
actual = get_client_username(request, credentials)
self.assertEqual(expected_in_credentials, actual)
def sample_customize_username(request, credentials):
return "prefixed-" + request.POST.get("username")
@override_settings(AXES_USERNAME_FORM_FIELD="username")
@override_settings(AXES_USERNAME_CALLABLE=sample_customize_username)
def test_custom_get_client_username_from_request(self):
provided = "test-username"
expected = "prefixed-" + provided
provided_in_credentials = "test-credentials-username"
request = HttpRequest()
request.POST["username"] = provided
credentials = {"username": provided_in_credentials}
actual = get_client_username(request, credentials)
self.assertEqual(expected, actual)
def sample_customize_username_credentials(request, credentials):
return "prefixed-" + credentials.get("username")
@override_settings(AXES_USERNAME_FORM_FIELD="username")
@override_settings(AXES_USERNAME_CALLABLE=sample_customize_username_credentials)
def test_custom_get_client_username_from_credentials(self):
provided = "test-username"
provided_in_credentials = "test-credentials-username"
expected_in_credentials = "prefixed-" + provided_in_credentials
request = HttpRequest()
request.POST["username"] = provided
credentials = {"username": provided_in_credentials}
actual = get_client_username(request, credentials)
self.assertEqual(expected_in_credentials, actual)
@override_settings(
AXES_USERNAME_CALLABLE=lambda request, credentials: "example"
) # pragma: no cover
def test_get_client_username(self):
self.assertEqual(get_client_username(HttpRequest(), {}), "example")
@override_settings(AXES_USERNAME_CALLABLE=lambda request: None) # pragma: no cover
def test_get_client_username_invalid_callable_too_few_arguments(self):
with self.assertRaises(TypeError):
get_client_username(HttpRequest(), {})
@override_settings(
AXES_USERNAME_CALLABLE=lambda request, credentials, extra: None
) # pragma: no cover
def test_get_client_username_invalid_callable_too_many_arguments(self):
with self.assertRaises(TypeError):
get_client_username(HttpRequest(), {})
@override_settings(AXES_USERNAME_CALLABLE=True)
def test_get_client_username_not_callable(self):
with self.assertRaises(TypeError):
get_client_username(HttpRequest(), {})
@override_settings(AXES_USERNAME_CALLABLE="tests.test_helpers.get_username")
def test_get_client_username_str(self):
self.assertEqual(get_client_username(HttpRequest(), {}), "username")
def get_username(request, credentials: dict) -> str:
return "username"
class IPWhitelistTestCase(AxesTestCase):
def setUp(self):
self.request = HttpRequest()
self.request.method = "POST"
self.request.META["REMOTE_ADDR"] = "127.0.0.1"
self.request.axes_ip_address = "127.0.0.1"
@override_settings(AXES_IP_WHITELIST=None)
def test_ip_in_whitelist_none(self):
self.assertFalse(is_ip_address_in_whitelist("127.0.0.2"))
@override_settings(AXES_IP_WHITELIST=["127.0.0.1"])
def test_ip_in_whitelist(self):
self.assertTrue(is_ip_address_in_whitelist("127.0.0.1"))
self.assertFalse(is_ip_address_in_whitelist("127.0.0.2"))
@override_settings(AXES_IP_BLACKLIST=None)
def test_ip_in_blacklist_none(self):
self.assertFalse(is_ip_address_in_blacklist("127.0.0.2"))
@override_settings(AXES_IP_BLACKLIST=["127.0.0.1"])
def test_ip_in_blacklist(self):
self.assertTrue(is_ip_address_in_blacklist("127.0.0.1"))
self.assertFalse(is_ip_address_in_blacklist("127.0.0.2"))
@override_settings(AXES_IP_BLACKLIST=["127.0.0.1"])
def test_is_client_ip_address_blacklisted_ip_in_blacklist(self):
self.assertTrue(is_client_ip_address_blacklisted(self.request))
@override_settings(AXES_IP_BLACKLIST=["127.0.0.2"])
def test_is_is_client_ip_address_blacklisted_ip_not_in_blacklist(self):
self.assertFalse(is_client_ip_address_blacklisted(self.request))
@override_settings(AXES_NEVER_LOCKOUT_WHITELIST=True)
@override_settings(AXES_IP_WHITELIST=["127.0.0.1"])
def test_is_client_ip_address_blacklisted_ip_in_whitelist(self):
self.assertFalse(is_client_ip_address_blacklisted(self.request))
@override_settings(AXES_ONLY_WHITELIST=True)
@override_settings(AXES_IP_WHITELIST=["127.0.0.2"])
def test_is_already_locked_ip_not_in_whitelist(self):
self.assertTrue(is_client_ip_address_blacklisted(self.request))
@override_settings(AXES_NEVER_LOCKOUT_WHITELIST=True)
@override_settings(AXES_IP_WHITELIST=["127.0.0.1"])
def test_is_client_ip_address_whitelisted_never_lockout(self):
self.assertTrue(is_client_ip_address_whitelisted(self.request))
@override_settings(AXES_ONLY_WHITELIST=True)
@override_settings(AXES_IP_WHITELIST=["127.0.0.1"])
def test_is_client_ip_address_whitelisted_only_allow(self):
self.assertTrue(is_client_ip_address_whitelisted(self.request))
@override_settings(AXES_ONLY_WHITELIST=True)
@override_settings(AXES_IP_WHITELIST=["127.0.0.2"])
def test_is_client_ip_address_whitelisted_not(self):
self.assertFalse(is_client_ip_address_whitelisted(self.request))
class MethodWhitelistTestCase(AxesTestCase):
def setUp(self):
self.request = HttpRequest()
self.request.method = "GET"
@override_settings(AXES_NEVER_LOCKOUT_GET=True)
def test_is_client_method_whitelisted(self):
self.assertTrue(is_client_method_whitelisted(self.request))
@override_settings(AXES_NEVER_LOCKOUT_GET=False)
def test_is_client_method_whitelisted_not(self):
self.assertFalse(is_client_method_whitelisted(self.request))
class LockoutResponseTestCase(AxesTestCase):
def setUp(self):
self.request = HttpRequest()
@override_settings(AXES_COOLOFF_TIME=42)
def test_get_lockout_response_cool_off(self):
get_lockout_response(request=self.request)
@override_settings(AXES_LOCKOUT_TEMPLATE="example.html")
@patch("axes.helpers.render")
def test_get_lockout_response_lockout_template(self, render):
self.assertFalse(render.called)
get_lockout_response(request=self.request)
self.assertTrue(render.called)
@override_settings(AXES_LOCKOUT_URL="https://example.com")
def test_get_lockout_response_lockout_url(self):
response = get_lockout_response(request=self.request)
self.assertEqual(type(response), HttpResponseRedirect)
def test_get_lockout_response_lockout_json(self):
self.request.META["HTTP_X_REQUESTED_WITH"] = "XMLHttpRequest"
response = get_lockout_response(request=self.request)
self.assertEqual(type(response), JsonResponse)
def test_get_lockout_response_lockout_response(self):
response = get_lockout_response(request=self.request)
self.assertEqual(type(response), HttpResponse)
def mock_get_cool_off_str():
return timedelta(seconds=30)
class AxesCoolOffTestCase(AxesTestCase):
@override_settings(AXES_COOLOFF_TIME=None)
def test_get_cool_off_none(self):
self.assertIsNone(get_cool_off())
@override_settings(AXES_COOLOFF_TIME=2)
def test_get_cool_off_int(self):
self.assertEqual(get_cool_off(), timedelta(hours=2))
@override_settings(AXES_COOLOFF_TIME=lambda: timedelta(seconds=30))
def test_get_cool_off_callable(self):
self.assertEqual(get_cool_off(), timedelta(seconds=30))
@override_settings(AXES_COOLOFF_TIME="tests.test_helpers.mock_get_cool_off_str")
def test_get_cool_off_path(self):
self.assertEqual(get_cool_off(), timedelta(seconds=30))
def mock_is_whitelisted(request, credentials):
return True
class AxesWhitelistTestCase(AxesTestCase):
def setUp(self):
self.user_model = get_user_model()
self.user = self.user_model.objects.create(username="jane.doe")
self.request = HttpRequest()
self.credentials = dict()
def test_is_whitelisted(self):
self.assertFalse(is_user_attempt_whitelisted(self.request, self.credentials))
@override_settings(AXES_WHITELIST_CALLABLE=mock_is_whitelisted)
def test_is_whitelisted_override_callable(self):
self.assertTrue(is_user_attempt_whitelisted(self.request, self.credentials))
@override_settings(AXES_WHITELIST_CALLABLE="tests.test_helpers.mock_is_whitelisted")
def test_is_whitelisted_override_path(self):
self.assertTrue(is_user_attempt_whitelisted(self.request, self.credentials))
@override_settings(AXES_WHITELIST_CALLABLE=42)
def test_is_whitelisted_override_invalid(self):
with self.assertRaises(TypeError):
is_user_attempt_whitelisted(self.request, self.credentials)
def mock_get_lockout_response(request, credentials):
return HttpResponse(status=400)
class AxesLockoutTestCase(AxesTestCase):
def setUp(self):
self.request = HttpRequest()
self.credentials = dict()
def test_get_lockout_response(self):
response = get_lockout_response(self.request, self.credentials)
self.assertEqual(403, response.status_code)
@override_settings(AXES_LOCKOUT_CALLABLE=mock_get_lockout_response)
def test_get_lockout_response_override_callable(self):
response = get_lockout_response(self.request, self.credentials)
self.assertEqual(400, response.status_code)
@override_settings(
AXES_LOCKOUT_CALLABLE="tests.test_helpers.mock_get_lockout_response"
)
def test_get_lockout_response_override_path(self):
response = get_lockout_response(self.request, self.credentials)
self.assertEqual(400, response.status_code)
@override_settings(AXES_LOCKOUT_CALLABLE=42)
def test_get_lockout_response_override_invalid(self):
with self.assertRaises(TypeError):
get_lockout_response(self.request, self.credentials)
class AxesCleanseParamsTestCase(AxesTestCase):
def setUp(self):
self.parameters = {
"username": "test_user",
"password": "<PASSWORD>",
"other_sensitive_data": "sensitive",
}
def test_cleanse_parameters(self):
cleansed = cleanse_parameters(self.parameters)
self.assertEqual("test_user", cleansed["username"])
self.assertEqual("********************", cleansed["password"])
self.assertEqual("sensitive", cleansed["other_sensitive_data"])
@override_settings(AXES_SENSITIVE_PARAMETERS=["other_sensitive_data"])
def test_cleanse_parameters_override_sensitive(self):
cleansed = cleanse_parameters(self.parameters)
self.assertEqual("test_user", cleansed["username"])
self.assertEqual("********************", cleansed["password"])
self.assertEqual("********************", cleansed["other_sensitive_data"])
@override_settings(AXES_SENSITIVE_PARAMETERS=["other_sensitive_data"])
@override_settings(AXES_PASSWORD_FORM_FIELD="username")
def test_cleanse_parameters_override_both(self):
cleansed = cleanse_parameters(self.parameters)
self.assertEqual("********************", cleansed["username"])
self.assertEqual("********************", cleansed["password"])
self.assertEqual("********************", cleansed["other_sensitive_data"])
@override_settings(AXES_PASSWORD_FORM_FIELD=None)
def test_cleanse_parameters_override_empty(self):
cleansed = cleanse_parameters(self.parameters)
self.assertEqual("test_user", cleansed["username"])
self.assertEqual("********************", cleansed["password"])
self.assertEqual("sensitive", cleansed["other_sensitive_data"])
|
[
"axes.helpers.is_client_ip_address_whitelisted",
"axes.helpers.get_cool_off_iso8601",
"axes.helpers.get_client_str",
"axes.helpers.is_ip_address_in_whitelist",
"axes.helpers.toggleable",
"axes.helpers.get_cool_off",
"axes.helpers.get_cache_timeout",
"django.http.HttpResponse",
"datetime.timedelta",
"axes.helpers.get_client_username",
"axes.helpers.is_client_ip_address_blacklisted",
"django.test.RequestFactory",
"axes.helpers.get_lockout_response",
"unittest.mock.patch",
"axes.helpers.cleanse_parameters",
"axes.helpers.is_ip_address_in_blacklist",
"django.test.override_settings",
"axes.helpers.is_user_attempt_whitelisted",
"django.http.HttpRequest",
"django.contrib.auth.get_user_model",
"axes.apps.AppConfig.initialize",
"axes.helpers.get_client_cache_key",
"axes.helpers.is_client_method_whitelisted",
"axes.helpers.get_client_parameters"
] |
[((856, 893), 'django.test.override_settings', 'override_settings', ([], {'AXES_ENABLED': '(False)'}), '(AXES_ENABLED=False)\n', (873, 893), False, 'from django.test import override_settings, RequestFactory\n'), ((1306, 1344), 'django.test.override_settings', 'override_settings', ([], {'AXES_COOLOFF_TIME': '(3)'}), '(AXES_COOLOFF_TIME=3)\n', (1323, 1344), False, 'from django.test import override_settings, RequestFactory\n'), ((1679, 1720), 'django.test.override_settings', 'override_settings', ([], {'AXES_COOLOFF_TIME': 'None'}), '(AXES_COOLOFF_TIME=None)\n', (1696, 1720), False, 'from django.test import override_settings, RequestFactory\n'), ((2946, 2982), 'django.test.override_settings', 'override_settings', ([], {'AXES_VERBOSE': '(True)'}), '(AXES_VERBOSE=True)\n', (2963, 2982), False, 'from django.test import override_settings, RequestFactory\n'), ((3443, 3479), 'django.test.override_settings', 'override_settings', ([], {'AXES_VERBOSE': '(True)'}), '(AXES_VERBOSE=True)\n', (3460, 3479), False, 'from django.test import override_settings, RequestFactory\n'), ((3936, 3972), 'django.test.override_settings', 'override_settings', ([], {'AXES_VERBOSE': '(True)'}), '(AXES_VERBOSE=True)\n', (3953, 3972), False, 'from django.test import override_settings, RequestFactory\n'), ((4451, 4488), 'django.test.override_settings', 'override_settings', ([], {'AXES_VERBOSE': '(False)'}), '(AXES_VERBOSE=False)\n', (4468, 4488), False, 'from django.test import override_settings, RequestFactory\n'), ((4907, 4954), 'django.test.override_settings', 'override_settings', ([], {'AXES_ONLY_USER_FAILURES': '(True)'}), '(AXES_ONLY_USER_FAILURES=True)\n', (4924, 4954), False, 'from django.test import override_settings, RequestFactory\n'), ((4960, 4996), 'django.test.override_settings', 'override_settings', ([], {'AXES_VERBOSE': '(True)'}), '(AXES_VERBOSE=True)\n', (4977, 4996), False, 'from django.test import override_settings, RequestFactory\n'), ((5459, 5506), 'django.test.override_settings', 'override_settings', ([], {'AXES_ONLY_USER_FAILURES': '(True)'}), '(AXES_ONLY_USER_FAILURES=True)\n', (5476, 5506), False, 'from django.test import override_settings, RequestFactory\n'), ((5512, 5549), 'django.test.override_settings', 'override_settings', ([], {'AXES_VERBOSE': '(False)'}), '(AXES_VERBOSE=False)\n', (5529, 5549), False, 'from django.test import override_settings, RequestFactory\n'), ((5966, 6030), 'django.test.override_settings', 'override_settings', ([], {'AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP': '(True)'}), '(AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True)\n', (5983, 6030), False, 'from django.test import override_settings, RequestFactory\n'), ((6036, 6072), 'django.test.override_settings', 'override_settings', ([], {'AXES_VERBOSE': '(True)'}), '(AXES_VERBOSE=True)\n', (6053, 6072), False, 'from django.test import override_settings, RequestFactory\n'), ((6539, 6603), 'django.test.override_settings', 'override_settings', ([], {'AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP': '(True)'}), '(AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True)\n', (6556, 6603), False, 'from django.test import override_settings, RequestFactory\n'), ((6609, 6646), 'django.test.override_settings', 'override_settings', ([], {'AXES_VERBOSE': '(False)'}), '(AXES_VERBOSE=False)\n', (6626, 6646), False, 'from django.test import override_settings, RequestFactory\n'), ((7092, 7135), 'django.test.override_settings', 'override_settings', ([], {'AXES_USE_USER_AGENT': '(True)'}), '(AXES_USE_USER_AGENT=True)\n', (7109, 7135), False, 'from django.test import override_settings, RequestFactory\n'), ((7141, 7177), 'django.test.override_settings', 'override_settings', ([], {'AXES_VERBOSE': '(True)'}), '(AXES_VERBOSE=True)\n', (7158, 7177), False, 'from django.test import override_settings, RequestFactory\n'), ((7641, 7684), 'django.test.override_settings', 'override_settings', ([], {'AXES_USE_USER_AGENT': '(True)'}), '(AXES_USE_USER_AGENT=True)\n', (7658, 7684), False, 'from django.test import override_settings, RequestFactory\n'), ((7690, 7727), 'django.test.override_settings', 'override_settings', ([], {'AXES_VERBOSE': '(False)'}), '(AXES_VERBOSE=False)\n', (7707, 7727), False, 'from django.test import override_settings, RequestFactory\n'), ((8215, 8305), 'django.test.override_settings', 'override_settings', ([], {'AXES_CLIENT_STR_CALLABLE': '"""tests.test_helpers.get_dummy_client_str"""'}), "(AXES_CLIENT_STR_CALLABLE=\n 'tests.test_helpers.get_dummy_client_str')\n", (8232, 8305), False, 'from django.test import override_settings, RequestFactory\n'), ((8642, 8689), 'django.test.override_settings', 'override_settings', ([], {'AXES_ONLY_USER_FAILURES': '(True)'}), '(AXES_ONLY_USER_FAILURES=True)\n', (8659, 8689), False, 'from django.test import override_settings, RequestFactory\n'), ((8902, 9029), 'django.test.override_settings', 'override_settings', ([], {'AXES_ONLY_USER_FAILURES': '(False)', 'AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP': '(False)', 'AXES_USE_USER_AGENT': '(False)'}), '(AXES_ONLY_USER_FAILURES=False,\n AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=False, AXES_USE_USER_AGENT=False)\n', (8919, 9029), False, 'from django.test import override_settings, RequestFactory\n'), ((9271, 9397), 'django.test.override_settings', 'override_settings', ([], {'AXES_ONLY_USER_FAILURES': '(False)', 'AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP': '(True)', 'AXES_USE_USER_AGENT': '(False)'}), '(AXES_ONLY_USER_FAILURES=False,\n AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True, AXES_USE_USER_AGENT=False)\n', (9288, 9397), False, 'from django.test import override_settings, RequestFactory\n'), ((9675, 9840), 'django.test.override_settings', 'override_settings', ([], {'AXES_ONLY_USER_FAILURES': '(False)', 'AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP': '(False)', 'AXES_LOCK_OUT_BY_USER_OR_IP': '(True)', 'AXES_USE_USER_AGENT': '(False)'}), '(AXES_ONLY_USER_FAILURES=False,\n AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=False,\n AXES_LOCK_OUT_BY_USER_OR_IP=True, AXES_USE_USER_AGENT=False)\n', (9692, 9840), False, 'from django.test import override_settings, RequestFactory\n'), ((10123, 10249), 'django.test.override_settings', 'override_settings', ([], {'AXES_ONLY_USER_FAILURES': '(False)', 'AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP': '(False)', 'AXES_USE_USER_AGENT': '(True)'}), '(AXES_ONLY_USER_FAILURES=False,\n AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=False, AXES_USE_USER_AGENT=True)\n', (10140, 10249), False, 'from django.test import override_settings, RequestFactory\n'), ((10534, 10659), 'django.test.override_settings', 'override_settings', ([], {'AXES_ONLY_USER_FAILURES': '(False)', 'AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP': '(True)', 'AXES_USE_USER_AGENT': '(True)'}), '(AXES_ONLY_USER_FAILURES=False,\n AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP=True, AXES_USE_USER_AGENT=True)\n', (10551, 10659), False, 'from django.test import override_settings, RequestFactory\n'), ((14481, 14535), 'django.test.override_settings', 'override_settings', ([], {'AXES_USERNAME_FORM_FIELD': '"""username"""'}), "(AXES_USERNAME_FORM_FIELD='username')\n", (14498, 14535), False, 'from django.test import override_settings, RequestFactory\n'), ((15169, 15223), 'django.test.override_settings', 'override_settings', ([], {'AXES_USERNAME_FORM_FIELD': '"""username"""'}), "(AXES_USERNAME_FORM_FIELD='username')\n", (15186, 15223), False, 'from django.test import override_settings, RequestFactory\n'), ((15759, 15813), 'django.test.override_settings', 'override_settings', ([], {'AXES_USERNAME_FORM_FIELD': '"""username"""'}), "(AXES_USERNAME_FORM_FIELD='username')\n", (15776, 15813), False, 'from django.test import override_settings, RequestFactory\n'), ((15819, 15886), 'django.test.override_settings', 'override_settings', ([], {'AXES_USERNAME_CALLABLE': 'sample_customize_username'}), '(AXES_USERNAME_CALLABLE=sample_customize_username)\n', (15836, 15886), False, 'from django.test import override_settings, RequestFactory\n'), ((16460, 16514), 'django.test.override_settings', 'override_settings', ([], {'AXES_USERNAME_FORM_FIELD': '"""username"""'}), "(AXES_USERNAME_FORM_FIELD='username')\n", (16477, 16514), False, 'from django.test import override_settings, RequestFactory\n'), ((16520, 16599), 'django.test.override_settings', 'override_settings', ([], {'AXES_USERNAME_CALLABLE': 'sample_customize_username_credentials'}), '(AXES_USERNAME_CALLABLE=sample_customize_username_credentials)\n', (16537, 16599), False, 'from django.test import override_settings, RequestFactory\n'), ((17095, 17180), 'django.test.override_settings', 'override_settings', ([], {'AXES_USERNAME_CALLABLE': "(lambda request, credentials: 'example')"}), "(AXES_USERNAME_CALLABLE=lambda request, credentials: 'example'\n )\n", (17112, 17180), False, 'from django.test import override_settings, RequestFactory\n'), ((17332, 17394), 'django.test.override_settings', 'override_settings', ([], {'AXES_USERNAME_CALLABLE': '(lambda request: None)'}), '(AXES_USERNAME_CALLABLE=lambda request: None)\n', (17349, 17394), False, 'from django.test import override_settings, RequestFactory\n'), ((17590, 17676), 'django.test.override_settings', 'override_settings', ([], {'AXES_USERNAME_CALLABLE': '(lambda request, credentials, extra: None)'}), '(AXES_USERNAME_CALLABLE=lambda request, credentials, extra:\n None)\n', (17607, 17676), False, 'from django.test import override_settings, RequestFactory\n'), ((17883, 17929), 'django.test.override_settings', 'override_settings', ([], {'AXES_USERNAME_CALLABLE': '(True)'}), '(AXES_USERNAME_CALLABLE=True)\n', (17900, 17929), False, 'from django.test import override_settings, RequestFactory\n'), ((18083, 18158), 'django.test.override_settings', 'override_settings', ([], {'AXES_USERNAME_CALLABLE': '"""tests.test_helpers.get_username"""'}), "(AXES_USERNAME_CALLABLE='tests.test_helpers.get_username')\n", (18100, 18158), False, 'from django.test import override_settings, RequestFactory\n'), ((18607, 18648), 'django.test.override_settings', 'override_settings', ([], {'AXES_IP_WHITELIST': 'None'}), '(AXES_IP_WHITELIST=None)\n', (18624, 18648), False, 'from django.test import override_settings, RequestFactory\n'), ((18762, 18812), 'django.test.override_settings', 'override_settings', ([], {'AXES_IP_WHITELIST': "['127.0.0.1']"}), "(AXES_IP_WHITELIST=['127.0.0.1'])\n", (18779, 18812), False, 'from django.test import override_settings, RequestFactory\n'), ((18986, 19027), 'django.test.override_settings', 'override_settings', ([], {'AXES_IP_BLACKLIST': 'None'}), '(AXES_IP_BLACKLIST=None)\n', (19003, 19027), False, 'from django.test import override_settings, RequestFactory\n'), ((19141, 19191), 'django.test.override_settings', 'override_settings', ([], {'AXES_IP_BLACKLIST': "['127.0.0.1']"}), "(AXES_IP_BLACKLIST=['127.0.0.1'])\n", (19158, 19191), False, 'from django.test import override_settings, RequestFactory\n'), ((19365, 19415), 'django.test.override_settings', 'override_settings', ([], {'AXES_IP_BLACKLIST': "['127.0.0.1']"}), "(AXES_IP_BLACKLIST=['127.0.0.1'])\n", (19382, 19415), False, 'from django.test import override_settings, RequestFactory\n'), ((19563, 19613), 'django.test.override_settings', 'override_settings', ([], {'AXES_IP_BLACKLIST': "['127.0.0.2']"}), "(AXES_IP_BLACKLIST=['127.0.0.2'])\n", (19580, 19613), False, 'from django.test import override_settings, RequestFactory\n'), ((19769, 19821), 'django.test.override_settings', 'override_settings', ([], {'AXES_NEVER_LOCKOUT_WHITELIST': '(True)'}), '(AXES_NEVER_LOCKOUT_WHITELIST=True)\n', (19786, 19821), False, 'from django.test import override_settings, RequestFactory\n'), ((19827, 19877), 'django.test.override_settings', 'override_settings', ([], {'AXES_IP_WHITELIST': "['127.0.0.1']"}), "(AXES_IP_WHITELIST=['127.0.0.1'])\n", (19844, 19877), False, 'from django.test import override_settings, RequestFactory\n'), ((20026, 20069), 'django.test.override_settings', 'override_settings', ([], {'AXES_ONLY_WHITELIST': '(True)'}), '(AXES_ONLY_WHITELIST=True)\n', (20043, 20069), False, 'from django.test import override_settings, RequestFactory\n'), ((20075, 20125), 'django.test.override_settings', 'override_settings', ([], {'AXES_IP_WHITELIST': "['127.0.0.2']"}), "(AXES_IP_WHITELIST=['127.0.0.2'])\n", (20092, 20125), False, 'from django.test import override_settings, RequestFactory\n'), ((20262, 20314), 'django.test.override_settings', 'override_settings', ([], {'AXES_NEVER_LOCKOUT_WHITELIST': '(True)'}), '(AXES_NEVER_LOCKOUT_WHITELIST=True)\n', (20279, 20314), False, 'from django.test import override_settings, RequestFactory\n'), ((20320, 20370), 'django.test.override_settings', 'override_settings', ([], {'AXES_IP_WHITELIST': "['127.0.0.1']"}), "(AXES_IP_WHITELIST=['127.0.0.1'])\n", (20337, 20370), False, 'from django.test import override_settings, RequestFactory\n'), ((20516, 20559), 'django.test.override_settings', 'override_settings', ([], {'AXES_ONLY_WHITELIST': '(True)'}), '(AXES_ONLY_WHITELIST=True)\n', (20533, 20559), False, 'from django.test import override_settings, RequestFactory\n'), ((20565, 20615), 'django.test.override_settings', 'override_settings', ([], {'AXES_IP_WHITELIST': "['127.0.0.1']"}), "(AXES_IP_WHITELIST=['127.0.0.1'])\n", (20582, 20615), False, 'from django.test import override_settings, RequestFactory\n'), ((20758, 20801), 'django.test.override_settings', 'override_settings', ([], {'AXES_ONLY_WHITELIST': '(True)'}), '(AXES_ONLY_WHITELIST=True)\n', (20775, 20801), False, 'from django.test import override_settings, RequestFactory\n'), ((20807, 20857), 'django.test.override_settings', 'override_settings', ([], {'AXES_IP_WHITELIST': "['127.0.0.2']"}), "(AXES_IP_WHITELIST=['127.0.0.2'])\n", (20824, 20857), False, 'from django.test import override_settings, RequestFactory\n'), ((21135, 21181), 'django.test.override_settings', 'override_settings', ([], {'AXES_NEVER_LOCKOUT_GET': '(True)'}), '(AXES_NEVER_LOCKOUT_GET=True)\n', (21152, 21181), False, 'from django.test import override_settings, RequestFactory\n'), ((21305, 21352), 'django.test.override_settings', 'override_settings', ([], {'AXES_NEVER_LOCKOUT_GET': '(False)'}), '(AXES_NEVER_LOCKOUT_GET=False)\n', (21322, 21352), False, 'from django.test import override_settings, RequestFactory\n'), ((21586, 21625), 'django.test.override_settings', 'override_settings', ([], {'AXES_COOLOFF_TIME': '(42)'}), '(AXES_COOLOFF_TIME=42)\n', (21603, 21625), False, 'from django.test import override_settings, RequestFactory\n'), ((21733, 21788), 'django.test.override_settings', 'override_settings', ([], {'AXES_LOCKOUT_TEMPLATE': '"""example.html"""'}), "(AXES_LOCKOUT_TEMPLATE='example.html')\n", (21750, 21788), False, 'from django.test import override_settings, RequestFactory\n'), ((21794, 21822), 'unittest.mock.patch', 'patch', (['"""axes.helpers.render"""'], {}), "('axes.helpers.render')\n", (21799, 21822), False, 'from unittest.mock import patch\n'), ((22025, 22082), 'django.test.override_settings', 'override_settings', ([], {'AXES_LOCKOUT_URL': '"""https://example.com"""'}), "(AXES_LOCKOUT_URL='https://example.com')\n", (22042, 22082), False, 'from django.test import override_settings, RequestFactory\n'), ((22721, 22742), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (22730, 22742), False, 'from datetime import timedelta\n'), ((22791, 22832), 'django.test.override_settings', 'override_settings', ([], {'AXES_COOLOFF_TIME': 'None'}), '(AXES_COOLOFF_TIME=None)\n', (22808, 22832), False, 'from django.test import override_settings, RequestFactory\n'), ((22919, 22957), 'django.test.override_settings', 'override_settings', ([], {'AXES_COOLOFF_TIME': '(2)'}), '(AXES_COOLOFF_TIME=2)\n', (22936, 22957), False, 'from django.test import override_settings, RequestFactory\n'), ((23241, 23320), 'django.test.override_settings', 'override_settings', ([], {'AXES_COOLOFF_TIME': '"""tests.test_helpers.mock_get_cool_off_str"""'}), "(AXES_COOLOFF_TIME='tests.test_helpers.mock_get_cool_off_str')\n", (23258, 23320), False, 'from django.test import override_settings, RequestFactory\n'), ((23868, 23930), 'django.test.override_settings', 'override_settings', ([], {'AXES_WHITELIST_CALLABLE': 'mock_is_whitelisted'}), '(AXES_WHITELIST_CALLABLE=mock_is_whitelisted)\n', (23885, 23930), False, 'from django.test import override_settings, RequestFactory\n'), ((24075, 24163), 'django.test.override_settings', 'override_settings', ([], {'AXES_WHITELIST_CALLABLE': '"""tests.test_helpers.mock_is_whitelisted"""'}), "(AXES_WHITELIST_CALLABLE=\n 'tests.test_helpers.mock_is_whitelisted')\n", (24092, 24163), False, 'from django.test import override_settings, RequestFactory\n'), ((24299, 24344), 'django.test.override_settings', 'override_settings', ([], {'AXES_WHITELIST_CALLABLE': '(42)'}), '(AXES_WHITELIST_CALLABLE=42)\n', (24316, 24344), False, 'from django.test import override_settings, RequestFactory\n'), ((24578, 24602), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(400)'}), '(status=400)\n', (24590, 24602), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((24910, 24976), 'django.test.override_settings', 'override_settings', ([], {'AXES_LOCKOUT_CALLABLE': 'mock_get_lockout_response'}), '(AXES_LOCKOUT_CALLABLE=mock_get_lockout_response)\n', (24927, 24976), False, 'from django.test import override_settings, RequestFactory\n'), ((25166, 25258), 'django.test.override_settings', 'override_settings', ([], {'AXES_LOCKOUT_CALLABLE': '"""tests.test_helpers.mock_get_lockout_response"""'}), "(AXES_LOCKOUT_CALLABLE=\n 'tests.test_helpers.mock_get_lockout_response')\n", (25183, 25258), False, 'from django.test import override_settings, RequestFactory\n'), ((25453, 25496), 'django.test.override_settings', 'override_settings', ([], {'AXES_LOCKOUT_CALLABLE': '(42)'}), '(AXES_LOCKOUT_CALLABLE=42)\n', (25470, 25496), False, 'from django.test import override_settings, RequestFactory\n'), ((26199, 26268), 'django.test.override_settings', 'override_settings', ([], {'AXES_SENSITIVE_PARAMETERS': "['other_sensitive_data']"}), "(AXES_SENSITIVE_PARAMETERS=['other_sensitive_data'])\n", (26216, 26268), False, 'from django.test import override_settings, RequestFactory\n'), ((26602, 26671), 'django.test.override_settings', 'override_settings', ([], {'AXES_SENSITIVE_PARAMETERS': "['other_sensitive_data']"}), "(AXES_SENSITIVE_PARAMETERS=['other_sensitive_data'])\n", (26619, 26671), False, 'from django.test import override_settings, RequestFactory\n'), ((26677, 26731), 'django.test.override_settings', 'override_settings', ([], {'AXES_PASSWORD_FORM_FIELD': '"""username"""'}), "(AXES_PASSWORD_FORM_FIELD='username')\n", (26694, 26731), False, 'from django.test import override_settings, RequestFactory\n'), ((27071, 27119), 'django.test.override_settings', 'override_settings', ([], {'AXES_PASSWORD_FORM_FIELD': 'None'}), '(AXES_PASSWORD_FORM_FIELD=None)\n', (27088, 27119), False, 'from django.test import override_settings, RequestFactory\n'), ((1021, 1043), 'axes.apps.AppConfig.initialize', 'AppConfig.initialize', ([], {}), '()\n', (1041, 1043), False, 'from axes.apps import AppConfig\n'), ((3333, 3392), 'axes.helpers.get_client_str', 'get_client_str', (['username', 'ip_address', 'user_agent', 'path_info'], {}), '(username, ip_address, user_agent, path_info)\n', (3347, 3392), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((3826, 3885), 'axes.helpers.get_client_str', 'get_client_str', (['username', 'ip_address', 'user_agent', 'path_info'], {}), '(username, ip_address, user_agent, path_info)\n', (3840, 3885), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((4341, 4400), 'axes.helpers.get_client_str', 'get_client_str', (['username', 'ip_address', 'user_agent', 'path_info'], {}), '(username, ip_address, user_agent, path_info)\n', (4355, 4400), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((4797, 4856), 'axes.helpers.get_client_str', 'get_client_str', (['username', 'ip_address', 'user_agent', 'path_info'], {}), '(username, ip_address, user_agent, path_info)\n', (4811, 4856), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((5349, 5408), 'axes.helpers.get_client_str', 'get_client_str', (['username', 'ip_address', 'user_agent', 'path_info'], {}), '(username, ip_address, user_agent, path_info)\n', (5363, 5408), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((5856, 5915), 'axes.helpers.get_client_str', 'get_client_str', (['username', 'ip_address', 'user_agent', 'path_info'], {}), '(username, ip_address, user_agent, path_info)\n', (5870, 5915), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((6429, 6488), 'axes.helpers.get_client_str', 'get_client_str', (['username', 'ip_address', 'user_agent', 'path_info'], {}), '(username, ip_address, user_agent, path_info)\n', (6443, 6488), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((6982, 7041), 'axes.helpers.get_client_str', 'get_client_str', (['username', 'ip_address', 'user_agent', 'path_info'], {}), '(username, ip_address, user_agent, path_info)\n', (6996, 7041), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((7531, 7590), 'axes.helpers.get_client_str', 'get_client_str', (['username', 'ip_address', 'user_agent', 'path_info'], {}), '(username, ip_address, user_agent, path_info)\n', (7545, 7590), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((8105, 8164), 'axes.helpers.get_client_str', 'get_client_str', (['username', 'ip_address', 'user_agent', 'path_info'], {}), '(username, ip_address, user_agent, path_info)\n', (8119, 8164), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((11344, 11360), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (11358, 11360), False, 'from django.test import override_settings, RequestFactory\n'), ((12435, 12451), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (12449, 12451), False, 'from django.test import override_settings, RequestFactory\n'), ((13559, 13575), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (13573, 13575), False, 'from django.test import override_settings, RequestFactory\n'), ((14638, 14651), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (14649, 14651), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((14714, 14742), 'axes.helpers.get_client_username', 'get_client_username', (['request'], {}), '(request)\n', (14733, 14742), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((15090, 15118), 'axes.helpers.get_client_username', 'get_client_username', (['request'], {}), '(request)\n', (15109, 15118), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((15400, 15413), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (15411, 15413), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((15536, 15577), 'axes.helpers.get_client_username', 'get_client_username', (['request', 'credentials'], {}), '(request, credentials)\n', (15555, 15577), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((16105, 16118), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (16116, 16118), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((16241, 16282), 'axes.helpers.get_client_username', 'get_client_username', (['request', 'credentials'], {}), '(request, credentials)\n', (16260, 16282), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((16852, 16865), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (16863, 16865), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((16988, 17029), 'axes.helpers.get_client_username', 'get_client_username', (['request', 'credentials'], {}), '(request, credentials)\n', (17007, 17029), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((18444, 18457), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (18455, 18457), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((21079, 21092), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (21090, 21092), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((21566, 21579), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (21577, 21579), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((21684, 21726), 'axes.helpers.get_lockout_response', 'get_lockout_response', ([], {'request': 'self.request'}), '(request=self.request)\n', (21704, 21726), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((21937, 21979), 'axes.helpers.get_lockout_response', 'get_lockout_response', ([], {'request': 'self.request'}), '(request=self.request)\n', (21957, 21979), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((22155, 22197), 'axes.helpers.get_lockout_response', 'get_lockout_response', ([], {'request': 'self.request'}), '(request=self.request)\n', (22175, 22197), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((22405, 22447), 'axes.helpers.get_lockout_response', 'get_lockout_response', ([], {'request': 'self.request'}), '(request=self.request)\n', (22425, 22447), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((22581, 22623), 'axes.helpers.get_lockout_response', 'get_lockout_response', ([], {'request': 'self.request'}), '(request=self.request)\n', (22601, 22623), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((23580, 23596), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (23594, 23596), False, 'from django.contrib.auth import get_user_model\n'), ((23692, 23705), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (23703, 23705), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((24690, 24703), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (24701, 24703), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((24799, 24851), 'axes.helpers.get_lockout_response', 'get_lockout_response', (['self.request', 'self.credentials'], {}), '(self.request, self.credentials)\n', (24819, 24851), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((25055, 25107), 'axes.helpers.get_lockout_response', 'get_lockout_response', (['self.request', 'self.credentials'], {}), '(self.request, self.credentials)\n', (25075, 25107), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((25342, 25394), 'axes.helpers.get_lockout_response', 'get_lockout_response', (['self.request', 'self.credentials'], {}), '(self.request, self.credentials)\n', (25362, 25394), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((25954, 25989), 'axes.helpers.cleanse_parameters', 'cleanse_parameters', (['self.parameters'], {}), '(self.parameters)\n', (25972, 25989), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((26346, 26381), 'axes.helpers.cleanse_parameters', 'cleanse_parameters', (['self.parameters'], {}), '(self.parameters)\n', (26364, 26381), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((26804, 26839), 'axes.helpers.cleanse_parameters', 'cleanse_parameters', (['self.parameters'], {}), '(self.parameters)\n', (26822, 26839), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((27193, 27228), 'axes.helpers.cleanse_parameters', 'cleanse_parameters', (['self.parameters'], {}), '(self.parameters)\n', (27211, 27228), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((1470, 1489), 'axes.helpers.get_cache_timeout', 'get_cache_timeout', ([], {}), '()\n', (1487, 1489), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((1647, 1666), 'axes.helpers.get_cache_timeout', 'get_cache_timeout', ([], {}), '()\n', (1664, 1666), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((1550, 1572), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(420)'}), '(seconds=420)\n', (1559, 1572), False, 'from datetime import timedelta\n'), ((1789, 1808), 'axes.helpers.get_cache_timeout', 'get_cache_timeout', ([], {}), '()\n', (1806, 1808), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((2049, 2099), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)', 'hours': '(25)', 'minutes': '(42)', 'seconds': '(8)'}), '(days=1, hours=25, minutes=42, seconds=8)\n', (2058, 2099), False, 'from datetime import timedelta\n'), ((2128, 2158), 'datetime.timedelta', 'timedelta', ([], {'days': '(7)', 'seconds': '(342)'}), '(days=7, seconds=342)\n', (2137, 2158), False, 'from datetime import timedelta\n'), ((2185, 2223), 'datetime.timedelta', 'timedelta', ([], {'days': '(0)', 'hours': '(2)', 'minutes': '(42)'}), '(days=0, hours=2, minutes=42)\n', (2194, 2223), False, 'from datetime import timedelta\n'), ((2248, 2279), 'datetime.timedelta', 'timedelta', ([], {'hours': '(20)', 'seconds': '(42)'}), '(hours=20, seconds=42)\n', (2257, 2279), False, 'from datetime import timedelta\n'), ((2305, 2327), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(300)'}), '(seconds=300)\n', (2314, 2327), False, 'from datetime import timedelta\n'), ((2349, 2372), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(9005)'}), '(seconds=9005)\n', (2358, 2372), False, 'from datetime import timedelta\n'), ((2399, 2422), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(9005)'}), '(minutes=9005)\n', (2408, 2422), False, 'from datetime import timedelta\n'), ((2448, 2466), 'datetime.timedelta', 'timedelta', ([], {'days': '(15)'}), '(days=15)\n', (2457, 2466), False, 'from datetime import timedelta\n'), ((8383, 8450), 'axes.helpers.get_client_str', 'get_client_str', (['"""username"""', '"""ip_address"""', '"""user_agent"""', '"""path_info"""'], {}), "('username', 'ip_address', 'user_agent', 'path_info')\n", (8397, 8450), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((8771, 8841), 'axes.helpers.get_client_parameters', 'get_client_parameters', (['self.username', 'self.ip_address', 'self.user_agent'], {}), '(self.username, self.ip_address, self.user_agent)\n', (8792, 8841), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((9136, 9206), 'axes.helpers.get_client_parameters', 'get_client_parameters', (['self.username', 'self.ip_address', 'self.user_agent'], {}), '(self.username, self.ip_address, self.user_agent)\n', (9157, 9206), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((9513, 9583), 'axes.helpers.get_client_parameters', 'get_client_parameters', (['self.username', 'self.ip_address', 'self.user_agent'], {}), '(self.username, self.ip_address, self.user_agent)\n', (9534, 9583), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((9959, 10029), 'axes.helpers.get_client_parameters', 'get_client_parameters', (['self.username', 'self.ip_address', 'self.user_agent'], {}), '(self.username, self.ip_address, self.user_agent)\n', (9980, 10029), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((10366, 10436), 'axes.helpers.get_client_parameters', 'get_client_parameters', (['self.username', 'self.ip_address', 'self.user_agent'], {}), '(self.username, self.ip_address, self.user_agent)\n', (10387, 10436), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((10777, 10847), 'axes.helpers.get_client_parameters', 'get_client_parameters', (['self.username', 'self.ip_address', 'self.user_agent'], {}), '(self.username, self.ip_address, self.user_agent)\n', (10798, 10847), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((11543, 11572), 'axes.helpers.get_client_cache_key', 'get_client_cache_key', (['request'], {}), '(request)\n', (11563, 11572), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((12051, 12080), 'axes.helpers.get_client_cache_key', 'get_client_cache_key', (['attempt'], {}), '(attempt)\n', (12071, 12080), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((12689, 12718), 'axes.helpers.get_client_cache_key', 'get_client_cache_key', (['request'], {}), '(request)\n', (12709, 12718), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((13198, 13227), 'axes.helpers.get_client_cache_key', 'get_client_cache_key', (['attempt'], {}), '(attempt)\n', (13218, 13227), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((13890, 13932), 'axes.helpers.get_client_cache_key', 'get_client_cache_key', (['request', 'credentials'], {}), '(request, credentials)\n', (13910, 13932), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((14405, 14434), 'axes.helpers.get_client_cache_key', 'get_client_cache_key', (['attempt'], {}), '(attempt)\n', (14425, 14434), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((18715, 18754), 'axes.helpers.is_ip_address_in_whitelist', 'is_ip_address_in_whitelist', (['"""127.0.0.2"""'], {}), "('127.0.0.2')\n", (18741, 18754), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((18873, 18912), 'axes.helpers.is_ip_address_in_whitelist', 'is_ip_address_in_whitelist', (['"""127.0.0.1"""'], {}), "('127.0.0.1')\n", (18899, 18912), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((18939, 18978), 'axes.helpers.is_ip_address_in_whitelist', 'is_ip_address_in_whitelist', (['"""127.0.0.2"""'], {}), "('127.0.0.2')\n", (18965, 18978), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((19094, 19133), 'axes.helpers.is_ip_address_in_blacklist', 'is_ip_address_in_blacklist', (['"""127.0.0.2"""'], {}), "('127.0.0.2')\n", (19120, 19133), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((19252, 19291), 'axes.helpers.is_ip_address_in_blacklist', 'is_ip_address_in_blacklist', (['"""127.0.0.1"""'], {}), "('127.0.0.1')\n", (19278, 19291), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((19318, 19357), 'axes.helpers.is_ip_address_in_blacklist', 'is_ip_address_in_blacklist', (['"""127.0.0.2"""'], {}), "('127.0.0.2')\n", (19344, 19357), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((19509, 19555), 'axes.helpers.is_client_ip_address_blacklisted', 'is_client_ip_address_blacklisted', (['self.request'], {}), '(self.request)\n', (19541, 19555), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((19715, 19761), 'axes.helpers.is_client_ip_address_blacklisted', 'is_client_ip_address_blacklisted', (['self.request'], {}), '(self.request)\n', (19747, 19761), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((19972, 20018), 'axes.helpers.is_client_ip_address_blacklisted', 'is_client_ip_address_blacklisted', (['self.request'], {}), '(self.request)\n', (20004, 20018), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((20208, 20254), 'axes.helpers.is_client_ip_address_blacklisted', 'is_client_ip_address_blacklisted', (['self.request'], {}), '(self.request)\n', (20240, 20254), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((20462, 20508), 'axes.helpers.is_client_ip_address_whitelisted', 'is_client_ip_address_whitelisted', (['self.request'], {}), '(self.request)\n', (20494, 20508), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((20704, 20750), 'axes.helpers.is_client_ip_address_whitelisted', 'is_client_ip_address_whitelisted', (['self.request'], {}), '(self.request)\n', (20736, 20750), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((20940, 20986), 'axes.helpers.is_client_ip_address_whitelisted', 'is_client_ip_address_whitelisted', (['self.request'], {}), '(self.request)\n', (20972, 20986), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((21255, 21297), 'axes.helpers.is_client_method_whitelisted', 'is_client_method_whitelisted', (['self.request'], {}), '(self.request)\n', (21283, 21297), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((21431, 21473), 'axes.helpers.is_client_method_whitelisted', 'is_client_method_whitelisted', (['self.request'], {}), '(self.request)\n', (21459, 21473), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((22897, 22911), 'axes.helpers.get_cool_off', 'get_cool_off', ([], {}), '()\n', (22909, 22911), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((23020, 23034), 'axes.helpers.get_cool_off', 'get_cool_off', ([], {}), '()\n', (23032, 23034), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((23036, 23054), 'datetime.timedelta', 'timedelta', ([], {'hours': '(2)'}), '(hours=2)\n', (23045, 23054), False, 'from datetime import timedelta\n'), ((23196, 23210), 'axes.helpers.get_cool_off', 'get_cool_off', ([], {}), '()\n', (23208, 23210), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((23212, 23233), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (23221, 23233), False, 'from datetime import timedelta\n'), ((23384, 23398), 'axes.helpers.get_cool_off', 'get_cool_off', ([], {}), '()\n', (23396, 23398), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((23400, 23421), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (23409, 23421), False, 'from datetime import timedelta\n'), ((23801, 23860), 'axes.helpers.is_user_attempt_whitelisted', 'is_user_attempt_whitelisted', (['self.request', 'self.credentials'], {}), '(self.request, self.credentials)\n', (23828, 23860), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((24008, 24067), 'axes.helpers.is_user_attempt_whitelisted', 'is_user_attempt_whitelisted', (['self.request', 'self.credentials'], {}), '(self.request, self.credentials)\n', (24035, 24067), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((24232, 24291), 'axes.helpers.is_user_attempt_whitelisted', 'is_user_attempt_whitelisted', (['self.request', 'self.credentials'], {}), '(self.request, self.credentials)\n', (24259, 24291), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((24452, 24511), 'axes.helpers.is_user_attempt_whitelisted', 'is_user_attempt_whitelisted', (['self.request', 'self.credentials'], {}), '(self.request, self.credentials)\n', (24479, 24511), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((25610, 25662), 'axes.helpers.get_lockout_response', 'get_lockout_response', (['self.request', 'self.credentials'], {}), '(self.request, self.credentials)\n', (25630, 25662), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((1241, 1260), 'axes.helpers.toggleable', 'toggleable', (['is_true'], {}), '(is_true)\n', (1251, 1260), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n'), ((17295, 17308), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (17306, 17308), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((17565, 17578), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (17576, 17578), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((17858, 17871), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (17869, 17871), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((18058, 18071), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (18069, 18071), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((18248, 18261), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (18259, 18261), False, 'from django.http import JsonResponse, HttpResponseRedirect, HttpResponse, HttpRequest\n'), ((23106, 23127), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (23115, 23127), False, 'from datetime import timedelta\n'), ((2618, 2645), 'axes.helpers.get_cool_off_iso8601', 'get_cool_off_iso8601', (['delta'], {}), '(delta)\n', (2638, 2645), False, 'from axes.helpers import get_cache_timeout, get_client_str, get_client_username, get_client_cache_key, get_client_parameters, get_cool_off, get_cool_off_iso8601, get_lockout_response, is_client_ip_address_blacklisted, is_client_ip_address_whitelisted, is_client_method_whitelisted, is_ip_address_in_blacklist, is_ip_address_in_whitelist, is_user_attempt_whitelisted, toggleable, cleanse_parameters\n')]
|
#!/share/bin/python
# Filename: initiate_tracing_updated.py
from __future__ import division
import os
import glob
i=int(os.getenv('PBS_ARRAYID'))-1 # The first index of a list is zero
input_filenames=glob.glob('/data/mat/data_processing/input_data/*.v3dpbd')
mkdir_command='mkdir /data/mat/data_processing/'+str(i)
os.system(mkdir_command)
copyfolder_command='cp -rp /data/mat/data_processing/Vaa3D_v3.200 /data/mat/data_processing/'+str(i)
os.system(copyfolder_command)
copyinputdata_command='cp '+input_filenames[i]+' /data/mat/data_processing/'+str(i)
os.system(copyinputdata_command)
working_dir='/data/mat/data_processing/'+str(i)+'/Vaa3D_v3.200'
os.chdir(working_dir)
filename_path='/data/mat/data_processing/'+str(i)+'/*.v3dpbd'
data_filenames=glob.glob(filename_path)
conversion_command='./start_vaa3d.sh -x convert_file_format -f convert_format -i '+data_filenames[0]+' -o '+data_filenames[0]+'.v3draw'
os.system(conversion_command)
filename_path='/data/mat/data_processing/'+str(i)+'/*.v3draw'
data_filenames=glob.glob(filename_path)
print('APP1')
tracing_command='timeout 3600s ./start_vaa3d.sh -x vn2 -f app1 -i '+data_filenames[0]+' -p NULL 0 40 1'
os.system(tracing_command)
print('APP2')
tracing_command='timeout 3600s ./start_vaa3d.sh -x vn2 -f app2 -i '+data_filenames[0]+' -p NULL 0 10 1 1 0 0 5 0 0 0'
os.system(tracing_command)
print('MOST')
tracing_command='timeout 3600s ./start_vaa3d.sh -x MOST -f MOST_trace -i '+data_filenames[0]+' -p 1 40'
os.system(tracing_command)
print('NEUTUBE')
tracing_command='timeout 3600s ./start_vaa3d.sh -x neuTube -f neutube_trace -i '+data_filenames[0]
os.system(tracing_command)
print('FARSIGHT Snake')
tracing_command='timeout 3600s ./start_vaa3d.sh -x snake -f snake_trace -i '+data_filenames[0]+' -p 1'
os.system(tracing_command)
print('3 from SimpleTracing')
tracing_command='timeout 3600s ./start_vaa3d.sh -x SimpleTracing -f tracing -i '+data_filenames[0]+' -o '+data_filenames[0]+'_simple.swc -p 1'
os.system(tracing_command)
tracing_command='timeout 3600s ./start_vaa3d.sh -x SimpleTracing -f ray_shooting -i '+data_filenames[0]+' -o '+data_filenames[0]+'_Rayshooting.swc'
os.system(tracing_command)
tracing_command='timeout 3600s ./start_vaa3d.sh -x SimpleTracing -f dfs -i '+data_filenames[0]+' -o '+data_filenames[0]+'_Rollerball.swc'
os.system(tracing_command)
print('TreMap')
tracing_command='timeout 3600s ./start_vaa3d.sh -x TReMap -f trace_mip -i '+data_filenames[0]+' -p 0 1 10 0 1 0 5'
os.system(tracing_command)
print('MST')
tracing_command='timeout 3600s ./start_vaa3d.sh -x MST_tracing -f trace_mst -i '+data_filenames[0]+' -p 1 5'
os.system(tracing_command)
print('NeuroGPSTree')
tracing_command='timeout 3600s ./start_vaa3d.sh -x NeuroGPSTree -f tracing_func -i '+data_filenames[0]+' -p 0.5 0.5 1 15 10 150'
os.system(tracing_command)
print('fastmarching_spanningtree')
tracing_command='timeout 3600s ./start_vaa3d.sh -x fastmarching_spanningtree -f tracing_func -i '+data_filenames[0]
os.system(tracing_command)
#print('meanshift')
#tracing_command='./start_vaa3d.sh -x BJUT_meanshift -f meanshift -i '+data_filenames[0]+' -p 1 3.0 10 0.6'
#os.system(tracing_command)
print('CWlab_method1_version1')
tracing_command='timeout 3600s ./start_vaa3d.sh -x CWlab_method1_version1 -f tracing_func -i '+data_filenames[0]+' -p 1'
os.system(tracing_command)
#print('LCM_boost')
#tracing_command='./start_vaa3d.sh -x LCM_boost -f LCM_boost -i '+data_filenames[0]+' -o '+data_filenames[0]+'_LCMboost.swc'
#os.system(tracing_command)
print('NeuroStalker')
tracing_command='timeout 3600s ./start_vaa3d.sh -x NeuroStalker -f tracing_func -i '+data_filenames[0]+' -p 1 1 1 5 5 30'
os.system(tracing_command)
print('nctuTW')
tracing_command='timeout 3600s ./start_vaa3d.sh -x nctuTW -f tracing_func -i '+data_filenames[0]+' -p NULL'
os.system(tracing_command)
print('tips_GD')
tracing_command='timeout 3600s ./start_vaa3d.sh -x tips_GD -f tracing_func -i '+data_filenames[0]
os.system(tracing_command)
print('SimpleAxisAnalyzer')
tracing_command='timeout 3600s ./start_vaa3d.sh -x SimpleAxisAnalyzer -f medial_axis_analysis -i '+data_filenames[0]
os.system(tracing_command)
print('NeuronChaser')
tracing_command='timeout 3600s ./start_vaa3d.sh -x NeuronChaser -f nc_func -i '+data_filenames[0]+' -p 1 10 0.7 20 60 10 5 1 0'
os.system(tracing_command)
print('smartTracing')
tracing_command='timeout 3600s ./start_vaa3d.sh -x smartTrace -f smartTrace -i '+data_filenames[0]
os.system(tracing_command)
print('neutu_autotrace')
tracing_command='timeout 3600s ./start_vaa3d.sh -x neutu_autotrace -f tracing -i '+data_filenames[0]
os.system(tracing_command)
print('Advantra')
tracing_command='timeout 3600s ./start_vaa3d.sh -x Advantra -f advantra_func -i '+data_filenames[0]+' -p 10 0.5 0.7 20 60 10 5 1'
os.system(tracing_command)
print('RegMST')
tracing_command='timeout 3600s ./start_vaa3d.sh -x RegMST -f tracing_func -i '+data_filenames[0]+' -p ./filter_banks/oof_fb_3d_scale_1_2_3_5_size_13_sep_cpd_rank_49.txt ./filter_banks/oof_fb_3d_scale_1_2_3_5_size_13_weigths_cpd_rank_49.txt ./filter_banks/proto_filter_AC_lap_633_822_sep_cpd_rank_49.txt ./filter_banks/proto_filter_AC_lap_633_822_weigths_cpd_rank_49.txt 1 2 ./trained_models/model_S/Regressor_ac_0.cfg ./trained_models/model_S/Regressor_ac_1.cfg 21 170'
os.system(tracing_command)
print('EnsembleNeuronTracer')
tracing_command='timeout 3600s ./start_vaa3d.sh -x EnsembleNeuronTracerBasic -f tracing_func -i '+data_filenames[0]
os.system(tracing_command)
print('EnsembleNeuronTracerV2n')
tracing_command='timeout 3600s ./start_vaa3d.sh -x EnsembleNeuronTracerV2n -f tracing_func -i '+data_filenames[0]
os.system(tracing_command)
print('EnsembleNeuronTracerV2s')
tracing_command='timeout 3600s ./start_vaa3d.sh -x EnsembleNeuronTracerV2s -f tracing_func -i '+data_filenames[0]
os.system(tracing_command)
#print('3DTraceSWC')
#tracing_command='./start_vaa3d.sh -x aVaaTrace3D -f func1 -i '+data_filenames[0]+' -p 50 5 2.5'
#os.system(tracing_command)
print('Rivulet')
tracing_command='timeout 3600s ./start_vaa3d.sh -x Rivulet -f tracing_func -i '+data_filenames[0]+' -p 1 2 1 10 0 2 0.95 3 0.5 1'
os.system(tracing_command)
print('ENT')
tracing_command='timeout 3600s ./start_vaa3d.sh -x ENT -f tracing_func -i '+data_filenames[0]
os.system(tracing_command)
print('APP2_new1')
tracing_command='timeout 3600s ./start_vaa3d.sh -x vn2 -f app2 -i '+data_filenames[0]+' -o '+data_filenames[0]+'_app2new1.swc -p NULL 0 AUTO 1 1 1 1 5 0 0 0'
os.system(tracing_command)
print('APP2_new2')
tracing_command='timeout 3600s ./start_vaa3d.sh -x vn2 -f app2 -i '+data_filenames[0]+' -o '+data_filenames[0]+'_app2new2.swc -p NULL 0 AUTO 1 1 0 1 5 0 0 0'
os.system(tracing_command)
print('APP2_new3')
tracing_command='timeout 3600s ./start_vaa3d.sh -x vn2 -f app2 -i '+data_filenames[0]+' -o '+data_filenames[0]+'_app2new3.swc -p NULL 0 10 1 1 1 1 5 0 0 0'
os.system(tracing_command)
print('LCM_boost_2')
tracing_command='timeout 3600s ./start_vaa3d.sh -x LCM_boost -f LCM_boost_2 -i '+data_filenames[0]+' -o '+data_filenames[0]+'_LCMboost_2.swc'
os.system(tracing_command)
#print('LCM_boost_3')
#tracing_command='./start_vaa3d.sh -x LCM_boost -f LCM_boost_3 -i '+data_filenames[0]+' -o '+data_filenames[0]+'_LCMboost_3.swc'
#os.system(tracing_command)
|
[
"os.getenv",
"os.system",
"os.chdir",
"glob.glob"
] |
[((202, 260), 'glob.glob', 'glob.glob', (['"""/data/mat/data_processing/input_data/*.v3dpbd"""'], {}), "('/data/mat/data_processing/input_data/*.v3dpbd')\n", (211, 260), False, 'import glob\n'), ((317, 341), 'os.system', 'os.system', (['mkdir_command'], {}), '(mkdir_command)\n', (326, 341), False, 'import os\n'), ((443, 472), 'os.system', 'os.system', (['copyfolder_command'], {}), '(copyfolder_command)\n', (452, 472), False, 'import os\n'), ((557, 589), 'os.system', 'os.system', (['copyinputdata_command'], {}), '(copyinputdata_command)\n', (566, 589), False, 'import os\n'), ((654, 675), 'os.chdir', 'os.chdir', (['working_dir'], {}), '(working_dir)\n', (662, 675), False, 'import os\n'), ((753, 777), 'glob.glob', 'glob.glob', (['filename_path'], {}), '(filename_path)\n', (762, 777), False, 'import glob\n'), ((914, 943), 'os.system', 'os.system', (['conversion_command'], {}), '(conversion_command)\n', (923, 943), False, 'import os\n'), ((1021, 1045), 'glob.glob', 'glob.glob', (['filename_path'], {}), '(filename_path)\n', (1030, 1045), False, 'import glob\n'), ((1165, 1191), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (1174, 1191), False, 'import os\n'), ((1329, 1355), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (1338, 1355), False, 'import os\n'), ((1475, 1501), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (1484, 1501), False, 'import os\n'), ((1623, 1649), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (1632, 1649), False, 'import os\n'), ((1782, 1808), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (1791, 1808), False, 'import os\n'), ((1987, 2013), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (1996, 2013), False, 'import os\n'), ((2163, 2189), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (2172, 2189), False, 'import os\n'), ((2333, 2359), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (2342, 2359), False, 'import os\n'), ((2496, 2522), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (2505, 2522), False, 'import os\n'), ((2650, 2676), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (2659, 2676), False, 'import os\n'), ((2833, 2859), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (2842, 2859), False, 'import os\n'), ((3016, 3042), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (3025, 3042), False, 'import os\n'), ((3360, 3386), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (3369, 3386), False, 'import os\n'), ((3715, 3741), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (3724, 3741), False, 'import os\n'), ((3871, 3897), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (3880, 3897), False, 'import os\n'), ((4018, 4044), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (4027, 4044), False, 'import os\n'), ((4195, 4221), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (4204, 4221), False, 'import os\n'), ((4377, 4403), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (4386, 4403), False, 'import os\n'), ((4530, 4556), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (4539, 4556), False, 'import os\n'), ((4688, 4714), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (4697, 4714), False, 'import os\n'), ((4868, 4894), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (4877, 4894), False, 'import os\n'), ((5386, 5412), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (5395, 5412), False, 'import os\n'), ((5564, 5590), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (5573, 5590), False, 'import os\n'), ((5743, 5769), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (5752, 5769), False, 'import os\n'), ((5918, 5944), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (5927, 5944), False, 'import os\n'), ((6240, 6266), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (6249, 6266), False, 'import os\n'), ((6375, 6401), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (6384, 6401), False, 'import os\n'), ((6581, 6607), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (6590, 6607), False, 'import os\n'), ((6787, 6813), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (6796, 6813), False, 'import os\n'), ((6991, 7017), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (7000, 7017), False, 'import os\n'), ((7182, 7208), 'os.system', 'os.system', (['tracing_command'], {}), '(tracing_command)\n', (7191, 7208), False, 'import os\n'), ((121, 145), 'os.getenv', 'os.getenv', (['"""PBS_ARRAYID"""'], {}), "('PBS_ARRAYID')\n", (130, 145), False, 'import os\n')]
|
import json
import os
from typing import Callable, Dict
import PIL.Image
import torch
import torch.utils.data
class SarcasmDataset(torch.utils.data.Dataset):
"""Dataset of Sarcasm videos."""
FRAMES_DIR_PATH = '../data/frames/utterances_final'
def __init__(self, transform: Callable = None, videos_data_path: str = '../data/sarcasm_data.json',
check_missing_videos: bool = True) -> None:
self.transform = transform
with open(videos_data_path) as file:
videos_data_dict = json.load(file)
for video_id in list(videos_data_dict.keys()): # Convert to list to possibly remove items.
video_folder_path = self._video_folder_path(video_id)
if not os.path.exists(video_folder_path):
if check_missing_videos:
raise FileNotFoundError(f"Directory {video_folder_path} not found, which was referenced in"
f" {videos_data_path}")
else:
del videos_data_dict[video_id]
self.video_ids = list(videos_data_dict.keys())
self.frame_count_by_video_id = {video_id: len(os.listdir(self._video_folder_path(video_id)))
for video_id in self.video_ids}
@staticmethod
def _video_folder_path(video_id: str) -> str:
return os.path.join(SarcasmDataset.FRAMES_DIR_PATH, video_id)
@staticmethod
def features_file_path(model_name: str, layer_name: str) -> str:
return f'../data/features/utterances_final/{model_name}_{layer_name}.hdf5'
def __getitem__(self, index) -> Dict[str, object]:
video_id = self.video_ids[index]
frames = None
video_folder_path = self._video_folder_path(video_id)
for i, frame_file_name in enumerate(os.listdir(video_folder_path)):
frame = PIL.Image.open(os.path.join(video_folder_path, frame_file_name))
if self.transform:
frame = self.transform(frame)
if frames is None:
# noinspection PyUnresolvedReferences
frames = torch.empty((self.frame_count_by_video_id[video_id], *frame.size()))
frames[i] = frame
return {'id': video_id, 'frames': frames}
def __len__(self) -> int:
return len(self.video_ids)
|
[
"json.load",
"os.path.join",
"os.listdir",
"os.path.exists"
] |
[((1377, 1431), 'os.path.join', 'os.path.join', (['SarcasmDataset.FRAMES_DIR_PATH', 'video_id'], {}), '(SarcasmDataset.FRAMES_DIR_PATH, video_id)\n', (1389, 1431), False, 'import os\n'), ((532, 547), 'json.load', 'json.load', (['file'], {}), '(file)\n', (541, 547), False, 'import json\n'), ((1830, 1859), 'os.listdir', 'os.listdir', (['video_folder_path'], {}), '(video_folder_path)\n', (1840, 1859), False, 'import os\n'), ((734, 767), 'os.path.exists', 'os.path.exists', (['video_folder_path'], {}), '(video_folder_path)\n', (748, 767), False, 'import os\n'), ((1897, 1945), 'os.path.join', 'os.path.join', (['video_folder_path', 'frame_file_name'], {}), '(video_folder_path, frame_file_name)\n', (1909, 1945), False, 'import os\n')]
|
#!/usr/bin/env python
# Copyright (c) 2017, DIANA-HEP
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import copy
import numbers
import functools
import numpy
import oamap.generator
import oamap.operations
import oamap.proxy
import oamap.schema
import oamap.util
class SingleThreadExecutor(object):
class PseudoFuture(object):
def __init__(self, result):
self._result = result
def result(self, timeout=None):
return self._result
def done(self):
return True
def exception(self, timeout=None):
raise NotImplementedError
def traceback(self, timeout=None):
raise NotImplementedError
def submit(self, fcn, *args, **kwargs):
args = tuple(x.result() if isinstance(x, self.PseudoFuture) else x for x in args)
kwargs = dict((n, x.result() if isinstance(x, self.PseudoFuture) else x) for n, x in kwargs.items())
return self.PseudoFuture(fcn(*args, **kwargs))
class Operation(object):
def __init__(self, name, args, kwargs, function):
self._name = name
self._args = args
self._kwargs = kwargs
self._function = function
def __repr__(self):
return "<{0} {1} {2} {3}>".format(self.__class__.__name__, self._name, repr(self._args), repr(self._kwargs))
def __str__(self):
return ".{0}({1}{2})".format(self._name, ", ".join(repr(x) for x in self._args), "".join(", {0}={1}".format(n, repr(x)) for n, x in self._kwargs.items()))
@property
def name(self):
return self._name
@property
def args(self):
return self._args
@property
def kwargs(self):
return self._kwargs
@property
def function(self):
return self._function
def apply(self, data):
return self._function(*((data,) + self._args), **self._kwargs)
class Recasting(Operation): pass
class Transformation(Operation): pass
class Action(Operation): pass
class Operable(object):
def __init__(self):
self._operations = ()
@staticmethod
def update_operations():
def newrecasting(name, function):
@functools.wraps(function)
def recasting(self, *args, **kwargs):
out = self.__class__.__new__(self.__class__)
Operable.__init__(out)
out.__dict__ = self.__dict__.copy()
out._operations = self._operations + (Recasting(name, args, kwargs, function),)
return out
return recasting
def newtransformation(name, function):
@functools.wraps(function)
def transformation(self, *args, **kwargs):
out = self.__class__.__new__(self.__class__)
Operable.__init__(out)
out.__dict__ = self.__dict__.copy()
out._operations = self._operations + (Transformation(name, args, kwargs, function),)
return out
return transformation
def newaction(name, function):
@functools.wraps(function)
def action(self, *args, **kwargs):
try:
combiner = kwargs.pop("combiner")
except KeyError:
combiner = function.combiner
out = self.__class__.__new__(self.__class__)
Operable.__init__(out)
out.__dict__ = self.__dict__.copy()
out._operations = self._operations + (Action(name, args, kwargs, function),)
return out.act(combiner)
return action
for n, x in oamap.operations.recastings.items():
setattr(Operable, n, oamap.util.MethodType(newrecasting(n, x), None, Operable))
for n, x in oamap.operations.transformations.items():
setattr(Operable, n, oamap.util.MethodType(newtransformation(n, x), None, Operable))
for n, x in oamap.operations.actions.items():
setattr(Operable, n, oamap.util.MethodType(newaction(n, x), None, Operable))
def _nooperations(self):
return len(self._operations) == 0
def _notransformations(self):
return all(isinstance(x, Recasting) for x in self._operations)
Operable.update_operations()
class _Data(Operable):
def __init__(self, name, schema, backends, executor, extension=None, packing=None, doc=None, metadata=None):
super(_Data, self).__init__()
self._name = name
self._schema = schema
self._backends = backends
self._executor = executor
self._extension = extension
self._packing = packing
self._doc = doc
self._metadata = metadata
self._cachedobject = None
def __repr__(self):
return "<Data {0}>{1}".format(repr(self._name), "".join(str(x) for x in self._operations))
def __str__(self):
return "<Data {0}>{1}".format(repr(self._name), "".join("\n " + str(x) for x in self._operations))
@property
def name(self):
return self._name
@property
def schema(self):
return self._schema.deepcopy()
@property
def extension(self):
return self._extension
@property
def packing(self):
return self._packing
@property
def doc(self):
return self._doc
@property
def metadata(self):
return self._metadata
def arrays(self):
return DataArrays(self._backends)
def transform(self, name, namespace, update):
if self._nooperations():
return [SingleThreadExecutor.PseudoFuture(update(self))]
elif self._notransformations():
result = self()
for operation in self._operations:
result = operation.apply(result)
if isinstance(result, oamap.proxy.ListProxy):
out = Dataset(name, result._generator.schema, self._backends, self._executor, [0, len(result)], extension=self._extension, packing=None, doc=self._doc, metadata=self._metadata)
else:
out = Data(name, result._generator.schema, self._backends, self._executor, extension=self._extension, packing=None, doc=self._doc, metadata=self._metadata)
return [SingleThreadExecutor.PseudoFuture(update(out))]
else:
def task(name, dataset, namespace, update):
result = dataset()
for operation in dataset._operations:
result = operation.apply(result)
backend = dataset._backends[namespace]
schema, roles2arrays = oamap.operations._DualSource.collect(result._generator.namedschema(), result._arrays, namespace, backend.prefix(name), backend.delimiter())
active = backend.instantiate(0)
if hasattr(active, "putall"):
active.putall(roles2arrays)
else:
for n, x in roles2arrays.items():
active[str(n)] = x
if isinstance(result, oamap.proxy.ListProxy):
out = Dataset(name, schema, dataset._backends, dataset._executor, [0, len(result)], extension=dataset._extension, packing=None, doc=dataset._doc, metadata=dataset._metadata)
else:
out = Data(name, schema, dataset._backends, dataset._executor, extension=dataset._extension, packing=None, doc=dataset._doc, metadata=dataset._metadata)
return update(out)
return [self._executor.submit(task, name, self, namespace, update)]
def act(self, combiner):
def task(dataset):
result = dataset()
for operation in dataset._operations:
result = operation.apply(result)
return result
return combiner([self._executor.submit(task, self)])
class Data(_Data):
def __call__(self):
if self._cachedobject is None:
if self._extension is None:
extension = oamap.util.import_module("oamap.extension.common")
elif isinstance(self._extension, basestring):
extension = oamap.util.import_module(self._extension)
else:
extension = [oamap.util.import_module(x) for x in self._extension]
self._cachedobject = self._schema(self.arrays(), extension=extension, packing=self._packing)
return self._cachedobject
class DataArrays(object):
def __init__(self, backends):
self._backends = backends
self._active = {}
self._partitionid = 0
def _toplevel(self, out, filtered):
return filtered
def getall(self, roles):
out = {}
for namespace, backend in self._backends.items():
filtered = self._toplevel(out, [x for x in roles if x.namespace == namespace])
if len(filtered) > 0:
active = self._active.get(namespace, None)
if active is None:
active = self._active[namespace] = backend.instantiate(self._partitionid)
if hasattr(active, "getall"):
out.update(active.getall(filtered))
else:
for x in roles:
out[x] = active[str(x)]
return out
def close(self):
for namespace, active in self._active.items():
if hasattr(active, "close"):
active.close()
self._active[namespace] = None
class Dataset(_Data):
def __init__(self, name, schema, backends, executor, offsets, extension=None, packing=None, doc=None, metadata=None):
if not isinstance(schema, oamap.schema.List):
raise TypeError("Dataset must have a list schema, not\n\n {0}".format(schema.__repr__(indent=" ")))
super(Dataset, self).__init__(name, schema, backends, executor, extension=extension, packing=packing, doc=doc, metadata=metadata)
if not isinstance(offsets, numpy.ndarray):
try:
if not all(isinstance(x, (numbers.Integral, numpy.integer)) and x >= 0 for x in offsets):
raise TypeError
except TypeError:
raise TypeError("offsets must be an iterable of non-negative integers")
offsets = numpy.array(offsets, dtype=numpy.int64)
if len(offsets.shape) != 1:
raise ValueError("offsets must be one-dimensional")
if len(offsets) < 2 or offsets[0] != 0:
raise ValueError("offsets must have at least two items, and the first one must be zero")
if not numpy.all(offsets[:-1] <= offsets[1:]):
raise ValueError("offsets must be monotonically increasing")
self._offsets = offsets
self._cachedpartition = None
def __repr__(self):
return "<Dataset {0} {1} partitions {2} entries>{3}".format(repr(self._name), self.numpartitions, self.numentries, "".join(str(x) for x in self._operations))
def __str__(self):
return "<Dataset {0} {1} partitions {2} entries>{3}".format(repr(self._name), self.numpartitions, self.numentries, "".join("\n " + str(x) for x in self._operations))
@property
def offsets(self):
return self._offsets.tolist()
@property
def starts(self):
return self._offsets[:-1].tolist()
@property
def stops(self):
return self._offsets[1:].tolist()
@property
def partitions(self):
return zip(self.start, self.stop)
@property
def numpartitions(self):
return len(self._offsets) - 1
@property
def numentries(self):
return int(self._offsets[-1])
def partition(self, partitionid):
if self._cachedpartition != partitionid:
self._cachedpartition = partitionid
if self._extension is None:
extension = oamap.util.import_module("oamap.extension.common")
elif isinstance(self._extension, basestring):
extension = oamap.util.import_module(self._extension)
else:
extension = [oamap.util.import_module(x) for x in self._extension]
self._cachedobject = self._schema(self.arrays(partitionid), extension=extension, packing=self._packing)
return self._cachedobject
def __iter__(self):
for partitionid in range(self.numpartitions):
for i in range(self._offsets[partitionid], self._offsets[partitionid + 1]):
yield self[i]
def __getitem__(self, index):
if isinstance(index, slice):
start, stop, step = oamap.util.slice2sss(index, self.numentries)
partitionid = max(0, min(numpy.searchsorted(self._offsets, start, side="right") - 1, self.numpartitions - 1))
localstart = start - self._offsets[partitionid]
localstop = stop - self._offsets[partitionid]
if localstop < -1 or localstop > (self._offsets[partitionid + 1] - self._offsets[partitionid]):
raise IndexError("slice spans multiple partitions")
out = self.partition(partitionid)
out._whence = localstart
out._stride = step
# out._length = int(math.ceil(float(abs(localstop - localstart)) / abs(step)))
d, m = divmod(abs(localstart - localstop), abs(step))
out._length = d + (1 if m != 0 else 0)
return out
else:
normindex = index if index >= 0 else index + self.numentries
if not 0 <= normindex < self.numentries:
raise IndexError("index {0} out of range for {1} entries".format(index, self.numentries))
partitionid = numpy.searchsorted(self._offsets, normindex, side="right") - 1
localindex = normindex - self._offsets[partitionid]
return self.partition(partitionid)[localindex]
def arrays(self, partitionid):
normid = partitionid if partitionid >= 0 else partitionid + self.numpartitions
if not 0 <= normid < self.numpartitions:
raise IndexError("partitionid {0} out of range for {1} partitions".format(partitionid, self.numpartitions))
startsrole = oamap.generator.StartsRole(self._schema._get_starts("object", "-"), self._schema.namespace, None)
stopsrole = oamap.generator.StopsRole(self._schema._get_stops("object", "-"), self._schema.namespace, None)
startsrole.stops = stopsrole
stopsrole.starts = startsrole
return DatasetArrays(normid, startsrole, stopsrole, self._offsets[normid + 1] - self._offsets[normid], self._backends)
def transform(self, name, namespace, update):
if self._nooperations():
return [SingleThreadExecutor.PseudoFuture(update(self))]
elif self._notransformations():
result = self.partition(0)
for operation in self._operations:
result = operation.apply(result)
if isinstance(result, oamap.proxy.ListProxy):
out = Dataset(name, result._generator.schema, self._backends, self._executor, self._offsets, extension=self._extension, packing=None, doc=self._doc, metadata=self._metadata)
else:
out = Data(name, result._generator.schema, self._backends, self._executor, extension=self._extension, packing=None, doc=self._doc, metadata=self._metadata)
return [SingleThreadExecutor.PseudoFuture(update(out))]
else:
def task(name, dataset, namespace, partitionid):
result = dataset.partition(partitionid)
for operation in dataset._operations:
result = operation.apply(result)
backend = dataset._backends[namespace]
schema, roles2arrays = oamap.operations._DualSource.collect(result._generator.namedschema(), result._arrays, namespace, backend.prefix(name), backend.delimiter())
active = backend.instantiate(partitionid)
if hasattr(active, "putall"):
active.putall(roles2arrays)
else:
for n, x in roles2arrays.items():
active[str(n)] = x
if isinstance(result, oamap.proxy.ListProxy):
return schema, len(result)
else:
return schema, 1
tasks = [self._executor.submit(task, name, self, namespace, i) for i in range(self.numpartitions)]
def collect(name, dataset, results, update):
if isinstance(results[0], tuple) and len(results[0]) == 2 and isinstance(results[0][0], oamap.schema.Schema):
offsets = numpy.cumsum([0] + [numentries for schema, numentries in results], dtype=numpy.int64)
schema = results[0][0]
else:
offsets = numpy.cumsum([0] + [x.result()[1] for x in results], dtype=numpy.int64)
schema = results[0].result()[0]
if isinstance(schema, oamap.schema.List):
out = Dataset(name, schema, dataset._backends, dataset._executor, offsets, extension=dataset._extension, packing=None, doc=dataset._doc, metadata=dataset._metadata)
else:
out = Data(name, schema, dataset._backends, dataset._executor, extension=dataset._extension, packing=None, doc=dataset._doc, metadata=dataset._metadata)
return update(out)
tasks.append(self._executor.submit(collect, name, self, tuple(tasks), update))
return tasks
def act(self, combiner):
def task(dataset, partitionid):
result = dataset.partition(partitionid)
for operation in dataset._operations:
result = operation.apply(result)
return result
return combiner([self._executor.submit(task, self, i) for i in range(self.numpartitions)])
class DatasetArrays(DataArrays):
def __init__(self, partitionid, startsrole, stopsrole, numentries, backends):
super(DatasetArrays, self).__init__(backends)
self._partitionid = partitionid
self._startsrole = startsrole
self._stopsrole = stopsrole
self._numentries = numentries
def _toplevel(self, out, filtered):
try:
index = filtered.index(self._startsrole)
except ValueError:
pass
else:
del filtered[index]
out[self._startsrole] = numpy.array([0], dtype=oamap.generator.ListGenerator.posdtype)
try:
index = filtered.index(self._stopsrole)
except ValueError:
pass
else:
del filtered[index]
out[self._stopsrole] = numpy.array([self._numentries], dtype=oamap.generator.ListGenerator.posdtype)
return filtered
|
[
"numpy.searchsorted",
"numpy.cumsum",
"numpy.array",
"functools.wraps",
"numpy.all"
] |
[((3620, 3645), 'functools.wraps', 'functools.wraps', (['function'], {}), '(function)\n', (3635, 3645), False, 'import functools\n'), ((4061, 4086), 'functools.wraps', 'functools.wraps', (['function'], {}), '(function)\n', (4076, 4086), False, 'import functools\n'), ((4509, 4534), 'functools.wraps', 'functools.wraps', (['function'], {}), '(function)\n', (4524, 4534), False, 'import functools\n'), ((11761, 11800), 'numpy.array', 'numpy.array', (['offsets'], {'dtype': 'numpy.int64'}), '(offsets, dtype=numpy.int64)\n', (11772, 11800), False, 'import numpy\n'), ((12065, 12103), 'numpy.all', 'numpy.all', (['(offsets[:-1] <= offsets[1:])'], {}), '(offsets[:-1] <= offsets[1:])\n', (12074, 12103), False, 'import numpy\n'), ((19925, 19987), 'numpy.array', 'numpy.array', (['[0]'], {'dtype': 'oamap.generator.ListGenerator.posdtype'}), '([0], dtype=oamap.generator.ListGenerator.posdtype)\n', (19936, 19987), False, 'import numpy\n'), ((20179, 20256), 'numpy.array', 'numpy.array', (['[self._numentries]'], {'dtype': 'oamap.generator.ListGenerator.posdtype'}), '([self._numentries], dtype=oamap.generator.ListGenerator.posdtype)\n', (20190, 20256), False, 'import numpy\n'), ((15136, 15194), 'numpy.searchsorted', 'numpy.searchsorted', (['self._offsets', 'normindex'], {'side': '"""right"""'}), "(self._offsets, normindex, side='right')\n", (15154, 15194), False, 'import numpy\n'), ((14137, 14191), 'numpy.searchsorted', 'numpy.searchsorted', (['self._offsets', 'start'], {'side': '"""right"""'}), "(self._offsets, start, side='right')\n", (14155, 14191), False, 'import numpy\n'), ((18127, 18217), 'numpy.cumsum', 'numpy.cumsum', (['([0] + [numentries for schema, numentries in results])'], {'dtype': 'numpy.int64'}), '([0] + [numentries for schema, numentries in results], dtype=\n numpy.int64)\n', (18139, 18217), False, 'import numpy\n')]
|
"""This module contains several helper functions which can be used to
find an address of the submitting system, for example to use as the
address parameter for HighThroughputExecutor.
The helper to use depends on the network environment around the submitter,
so some experimentation will probably be needed to choose the correct one.
"""
import logging
import os
import platform
import requests
import socket
import fcntl
import struct
import psutil
from typing import Set
logger = logging.getLogger(__name__)
def address_by_route() -> str:
"""Finds an address for the local host by querying the local routing table
for the route to Google DNS.
This will return an unusable value when the internet-facing address is
not reachable from workers.
"""
logger.debug("Finding address by querying local routing table")
addr = os.popen("/sbin/ip route get 8.8.8.8 | awk '{print $NF;exit}'").read().strip()
logger.debug("Address found: {}".format(addr))
return addr
def address_by_query() -> str:
"""Finds an address for the local host by querying ipify. This may
return an unusable value when the host is behind NAT, or when the
internet-facing address is not reachable from workers.
"""
logger.debug("Finding address by querying remote service")
response = requests.get('https://api.ipify.org')
if response.status_code == 200:
addr = response.text
logger.debug("Address found: {}".format(addr))
return addr
else:
raise RuntimeError("Remote service returned unexpected HTTP status code {}".format(response.status_code))
def address_by_hostname() -> str:
"""Returns the hostname of the local host.
This will return an unusable value when the hostname cannot be
resolved from workers.
"""
logger.debug("Finding address by using local hostname")
addr = platform.node()
logger.debug("Address found: {}".format(addr))
return addr
def address_by_interface(ifname: str) -> str:
"""Returns the IP address of the given interface name, e.g. 'eth0'
This is taken from a Stack Overflow answer: https://stackoverflow.com/questions/24196932/how-can-i-get-the-ip-address-of-eth0-in-python#24196955
Parameters
----------
ifname : str
Name of the interface whose address is to be returned. Required.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', bytes(ifname[:15], 'utf-8'))
)[20:24])
def get_all_addresses() -> Set[str]:
""" Uses a combination of methods to determine possible addresses.
Returns:
list of addresses as strings
"""
net_interfaces = psutil.net_if_addrs()
s_addresses = set()
for interface in net_interfaces:
try:
s_addresses.add(address_by_interface(interface))
except Exception:
logger.exception("Ignoring failure to fetch address from interface {}".format(interface))
pass
resolution_functions = [address_by_hostname, address_by_route, address_by_query]
for f in resolution_functions:
try:
s_addresses.add(f())
except Exception:
logger.exception("Ignoring an address finder exception")
return s_addresses
|
[
"platform.node",
"socket.socket",
"os.popen",
"requests.get",
"psutil.net_if_addrs",
"logging.getLogger"
] |
[((486, 513), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (503, 513), False, 'import logging\n'), ((1334, 1371), 'requests.get', 'requests.get', (['"""https://api.ipify.org"""'], {}), "('https://api.ipify.org')\n", (1346, 1371), False, 'import requests\n'), ((1900, 1915), 'platform.node', 'platform.node', ([], {}), '()\n', (1913, 1915), False, 'import platform\n'), ((2390, 2438), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (2403, 2438), False, 'import socket\n'), ((2793, 2814), 'psutil.net_if_addrs', 'psutil.net_if_addrs', ([], {}), '()\n', (2812, 2814), False, 'import psutil\n'), ((863, 926), 'os.popen', 'os.popen', (['"""/sbin/ip route get 8.8.8.8 | awk \'{print $NF;exit}\'"""'], {}), '("/sbin/ip route get 8.8.8.8 | awk \'{print $NF;exit}\'")\n', (871, 926), False, 'import os\n')]
|
#!/usr/bin env python
from tests.unit import AWSMockServiceTestCase
from boto.cloudsearch.domain import Domain
from boto.cloudsearch.layer1 import Layer1
import json
class TestCloudSearchCreateDomain(AWSMockServiceTestCase):
connection_class = Layer1
def default_body(self):
return """
<CreateDomainResponse xmlns="http://cloudsearch.amazonaws.com/doc/2011-02-01">
<CreateDomainResult>
<DomainStatus>
<SearchPartitionCount>0</SearchPartitionCount>
<SearchService>
<Arn>arn:aws:cs:us-east-1:1234567890:search/demo</Arn>
<Endpoint>search-demo-userdomain.us-east-1.cloudsearch.amazonaws.com</Endpoint>
</SearchService>
<NumSearchableDocs>0</NumSearchableDocs>
<Created>true</Created>
<DomainId>1234567890/demo</DomainId>
<Processing>false</Processing>
<SearchInstanceCount>0</SearchInstanceCount>
<DomainName>demo</DomainName>
<RequiresIndexDocuments>false</RequiresIndexDocuments>
<Deleted>false</Deleted>
<DocService>
<Arn>arn:aws:cs:us-east-1:1234567890:doc/demo</Arn>
<Endpoint>doc-demo-userdomain.us-east-1.cloudsearch.amazonaws.com</Endpoint>
</DocService>
</DomainStatus>
</CreateDomainResult>
<ResponseMetadata>
<RequestId>00000000-0000-0000-0000-000000000000</RequestId>
</ResponseMetadata>
</CreateDomainResponse>
"""
def test_create_domain(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_domain('demo')
self.assert_request_parameters({
'Action': 'CreateDomain',
'DomainName': 'demo',
'AWSAccessKeyId': 'aws_access_key_id',
'SignatureMethod': 'HmacSHA256',
'SignatureVersion': 2,
'Version': '2011-02-01',
}, ignore_params_values=['Timestamp'])
def test_cloudsearch_connect_result_endpoints(self):
"""Check that endpoints & ARNs are correctly returned from AWS"""
self.set_http_response(status_code=200)
api_response = self.service_connection.create_domain('demo')
domain = Domain(self, api_response)
self.assertEqual(domain.doc_service_arn,
"arn:aws:cs:us-east-1:1234567890:doc/demo")
self.assertEqual(
domain.doc_service_endpoint,
"doc-demo-userdomain.us-east-1.cloudsearch.amazonaws.com")
self.assertEqual(domain.search_service_arn,
"arn:aws:cs:us-east-1:1234567890:search/demo")
self.assertEqual(
domain.search_service_endpoint,
"search-demo-userdomain.us-east-1.cloudsearch.amazonaws.com")
def test_cloudsearch_connect_result_statuses(self):
"""Check that domain statuses are correctly returned from AWS"""
self.set_http_response(status_code=200)
api_response = self.service_connection.create_domain('demo')
domain = Domain(self, api_response)
self.assertEqual(domain.created, True)
self.assertEqual(domain.processing, False)
self.assertEqual(domain.requires_index_documents, False)
self.assertEqual(domain.deleted, False)
def test_cloudsearch_connect_result_details(self):
"""Check that the domain information is correctly returned from AWS"""
self.set_http_response(status_code=200)
api_response = self.service_connection.create_domain('demo')
domain = Domain(self, api_response)
self.assertEqual(domain.id, "1234567890/demo")
self.assertEqual(domain.name, "demo")
def test_cloudsearch_documentservice_creation(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_domain('demo')
domain = Domain(self, api_response)
document = domain.get_document_service()
self.assertEqual(
document.endpoint,
"doc-demo-userdomain.us-east-1.cloudsearch.amazonaws.com")
def test_cloudsearch_searchservice_creation(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_domain('demo')
domain = Domain(self, api_response)
search = domain.get_search_service()
self.assertEqual(
search.endpoint,
"search-demo-userdomain.us-east-1.cloudsearch.amazonaws.com")
class CloudSearchConnectionDeletionTest(AWSMockServiceTestCase):
connection_class = Layer1
def default_body(self):
return """
<DeleteDomainResponse xmlns="http://cloudsearch.amazonaws.com/doc/2011-02-01">
<DeleteDomainResult>
<DomainStatus>
<SearchPartitionCount>0</SearchPartitionCount>
<SearchService>
<Arn>arn:aws:cs:us-east-1:1234567890:search/demo</Arn>
<Endpoint>search-demo-userdomain.us-east-1.cloudsearch.amazonaws.com</Endpoint>
</SearchService>
<NumSearchableDocs>0</NumSearchableDocs>
<Created>true</Created>
<DomainId>1234567890/demo</DomainId>
<Processing>false</Processing>
<SearchInstanceCount>0</SearchInstanceCount>
<DomainName>demo</DomainName>
<RequiresIndexDocuments>false</RequiresIndexDocuments>
<Deleted>false</Deleted>
<DocService>
<Arn>arn:aws:cs:us-east-1:1234567890:doc/demo</Arn>
<Endpoint>doc-demo-userdomain.us-east-1.cloudsearch.amazonaws.com</Endpoint>
</DocService>
</DomainStatus>
</DeleteDomainResult>
<ResponseMetadata>
<RequestId>00000000-0000-0000-0000-000000000000</RequestId>
</ResponseMetadata>
</DeleteDomainResponse>
"""
def test_cloudsearch_deletion(self):
"""
Check that the correct arguments are sent to AWS when creating a
cloudsearch connection.
"""
self.set_http_response(status_code=200)
api_response = self.service_connection.delete_domain('demo')
self.assert_request_parameters({
'Action': 'DeleteDomain',
'DomainName': 'demo',
'AWSAccessKeyId': 'aws_access_key_id',
'SignatureMethod': 'HmacSHA256',
'SignatureVersion': 2,
'Version': '2011-02-01',
}, ignore_params_values=['Timestamp'])
class CloudSearchConnectionIndexDocumentTest(AWSMockServiceTestCase):
connection_class = Layer1
def default_body(self):
return """
<IndexDocumentsResponse xmlns="http://cloudsearch.amazonaws.com/doc/2011-02-01">
<IndexDocumentsResult>
<FieldNames>
<member>average_score</member>
<member>brand_id</member>
<member>colors</member>
<member>context</member>
<member>context_owner</member>
<member>created_at</member>
<member>creator_id</member>
<member>description</member>
<member>file_size</member>
<member>format</member>
<member>has_logo</member>
<member>has_messaging</member>
<member>height</member>
<member>image_id</member>
<member>ingested_from</member>
<member>is_advertising</member>
<member>is_photo</member>
<member>is_reviewed</member>
<member>modified_at</member>
<member>subject_date</member>
<member>tags</member>
<member>title</member>
<member>width</member>
</FieldNames>
</IndexDocumentsResult>
<ResponseMetadata>
<RequestId>eb2b2390-6bbd-11e2-ab66-93f3a90dcf2a</RequestId>
</ResponseMetadata>
</IndexDocumentsResponse>
"""
def test_cloudsearch_index_documents(self):
"""
Check that the correct arguments are sent to AWS when indexing a
domain.
"""
self.set_http_response(status_code=200)
api_response = self.service_connection.index_documents('demo')
self.assert_request_parameters({
'Action': 'IndexDocuments',
'DomainName': 'demo',
'AWSAccessKeyId': 'aws_access_key_id',
'SignatureMethod': 'HmacSHA256',
'SignatureVersion': 2,
'Version': '2011-02-01',
}, ignore_params_values=['Timestamp'])
def test_cloudsearch_index_documents_resp(self):
"""
Check that the AWS response is being parsed correctly when indexing a
domain.
"""
self.set_http_response(status_code=200)
api_response = self.service_connection.index_documents('demo')
self.assertEqual(api_response, ['average_score', 'brand_id', 'colors',
'context', 'context_owner',
'created_at', 'creator_id',
'description', 'file_size', 'format',
'has_logo', 'has_messaging', 'height',
'image_id', 'ingested_from',
'is_advertising', 'is_photo',
'is_reviewed', 'modified_at',
'subject_date', 'tags', 'title',
'width'])
|
[
"boto.cloudsearch.domain.Domain"
] |
[((2124, 2150), 'boto.cloudsearch.domain.Domain', 'Domain', (['self', 'api_response'], {}), '(self, api_response)\n', (2130, 2150), False, 'from boto.cloudsearch.domain import Domain\n'), ((2940, 2966), 'boto.cloudsearch.domain.Domain', 'Domain', (['self', 'api_response'], {}), '(self, api_response)\n', (2946, 2966), False, 'from boto.cloudsearch.domain import Domain\n'), ((3448, 3474), 'boto.cloudsearch.domain.Domain', 'Domain', (['self', 'api_response'], {}), '(self, api_response)\n', (3454, 3474), False, 'from boto.cloudsearch.domain import Domain\n'), ((3769, 3795), 'boto.cloudsearch.domain.Domain', 'Domain', (['self', 'api_response'], {}), '(self, api_response)\n', (3775, 3795), False, 'from boto.cloudsearch.domain import Domain\n'), ((4165, 4191), 'boto.cloudsearch.domain.Domain', 'Domain', (['self', 'api_response'], {}), '(self, api_response)\n', (4171, 4191), False, 'from boto.cloudsearch.domain import Domain\n')]
|