code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
import numpy as np import random import matplotlib.pyplot as plt n = 10 s = 0.5 S = 2 demand = [] replenish = [] x = [0] y = [-s] lambdas = np.array([1,2]) p = np.array([0.5,0.5]) for i in range(n): demand.append(random.uniform(0,1)) if x[-1] < s: y.append(S - s) replenish.append(S - x[-1]) x.append(max(S - demand[-1],0)) else: y.append(x[-1] - s) replenish.append(0) x.append(max(x[-1] - demand[-1],0)) plt.plot(x) plt.plot(y) plt.plot(replenish) plt.legend(['inventory','excess','replenish']) plt.show()
[ "matplotlib.pyplot.show", "matplotlib.pyplot.plot", "random.uniform", "matplotlib.pyplot.legend", "numpy.array" ]
[((141, 157), 'numpy.array', 'np.array', (['[1, 2]'], {}), '([1, 2])\n', (149, 157), True, 'import numpy as np\n'), ((161, 181), 'numpy.array', 'np.array', (['[0.5, 0.5]'], {}), '([0.5, 0.5])\n', (169, 181), True, 'import numpy as np\n'), ((467, 478), 'matplotlib.pyplot.plot', 'plt.plot', (['x'], {}), '(x)\n', (475, 478), True, 'import matplotlib.pyplot as plt\n'), ((479, 490), 'matplotlib.pyplot.plot', 'plt.plot', (['y'], {}), '(y)\n', (487, 490), True, 'import matplotlib.pyplot as plt\n'), ((491, 510), 'matplotlib.pyplot.plot', 'plt.plot', (['replenish'], {}), '(replenish)\n', (499, 510), True, 'import matplotlib.pyplot as plt\n'), ((511, 559), 'matplotlib.pyplot.legend', 'plt.legend', (["['inventory', 'excess', 'replenish']"], {}), "(['inventory', 'excess', 'replenish'])\n", (521, 559), True, 'import matplotlib.pyplot as plt\n'), ((558, 568), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (566, 568), True, 'import matplotlib.pyplot as plt\n'), ((218, 238), 'random.uniform', 'random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (232, 238), False, 'import random\n')]
# emacs: -*- mode: python; py-indent-offset: 4; tab-width: 4; indent-tabs-mode: nil -*- # ex: set sts=4 ts=4 sw=4 noet: # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## # # See COPYING file distributed along with the datalad package for the # copyright and license terms. # # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Downloader tests helper utils""" from unittest import SkipTest from datalad.downloaders.providers import Providers def get_test_providers(url=None, reload=False): """Return reusable instance of our global providers + verify credentials for url""" _test_providers = Providers.from_config_files(reload=reload) if url is not None: # check if we have credentials for the url provider = _test_providers.get_provider(url, only_nondefault=True) if provider is None or provider.credential is None: # no registered provider, or no credential needed,must be all kosher to access pass elif not provider.credential.is_known: raise SkipTest("This test requires known credentials for %s" % provider.credential.name) return _test_providers get_test_providers.__test__ = False
[ "datalad.downloaders.providers.Providers.from_config_files", "unittest.SkipTest" ]
[((669, 711), 'datalad.downloaders.providers.Providers.from_config_files', 'Providers.from_config_files', ([], {'reload': 'reload'}), '(reload=reload)\n', (696, 711), False, 'from datalad.downloaders.providers import Providers\n'), ((1095, 1182), 'unittest.SkipTest', 'SkipTest', (["('This test requires known credentials for %s' % provider.credential.name)"], {}), "('This test requires known credentials for %s' % provider.\n credential.name)\n", (1103, 1182), False, 'from unittest import SkipTest\n')]
import sqlite3 import datetime import sys import csv # Parses notes from the com.example.android.notepad app, can export lines from a subset of these to a csv file # In solid explorer, navigate to /data/data/com.example.android.notepad, put the note_pad.db next to this script db_file = 'note_pad.db' connection = sqlite3.connect(db_file) cursor = connection.cursor() if len(sys.argv) == 1: data = [row for row in cursor.execute("SELECT * FROM notes ORDER BY _id")] for row in data: created = datetime.datetime.fromtimestamp(row[3]/1000) print("ID " + str(row[0]) + " " + created.strftime("%Y-%m-%d") + " " + row[2].split('\n', 1)[0]) print("\nChoose range of IDs to include (argument in the format X-Y)") else: r = [int(v) for v in sys.argv[1].split("-")] data = [row for row in cursor.execute("SELECT * FROM notes WHERE _id >= ? AND _id <= ?", r)] print("Exporting rows:") for row in data: created = datetime.datetime.fromtimestamp(row[3]/1000) print("ID " + str(row[0]) + " " + created.strftime("%Y-%m-%d") + " " + row[2].split('\n', 1)[0]) print("Remove first line of note (shown above) in export? Y/N") remove_title = input().lower() == 'y' print("Filename for CSV export?") filename = input() + ".csv" with open(filename, 'w', newline='', encoding="utf-8") as csvfile: csvwriter = csv.writer(csvfile, delimiter=',', quotechar='"') for row in data: created = datetime.datetime.fromtimestamp(row[3]/1000).strftime("%Y-%m-%d") lines = [line for index, line in enumerate(row[2].split("\n")) if line.strip() != '' and (index > 0 or not remove_title)] csv_rows = [[created, line.strip()] for line in lines] csvwriter.writerows(csv_rows) print("Written " + filename) connection.close()
[ "sqlite3.connect", "csv.writer", "datetime.datetime.fromtimestamp" ]
[((327, 351), 'sqlite3.connect', 'sqlite3.connect', (['db_file'], {}), '(db_file)\n', (342, 351), False, 'import sqlite3\n'), ((531, 577), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['(row[3] / 1000)'], {}), '(row[3] / 1000)\n', (562, 577), False, 'import datetime\n'), ((988, 1034), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['(row[3] / 1000)'], {}), '(row[3] / 1000)\n', (1019, 1034), False, 'import datetime\n'), ((1420, 1469), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quotechar': '"""\\""""'}), '(csvfile, delimiter=\',\', quotechar=\'"\')\n', (1430, 1469), False, 'import csv\n'), ((1519, 1565), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['(row[3] / 1000)'], {}), '(row[3] / 1000)\n', (1550, 1565), False, 'import datetime\n')]
from django.conf import settings from django.views.generic import TemplateView from product.views.extra import picture_carousel class HomePageView(TemplateView): template_name = 'home.html' def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) if hasattr(settings, 'HOMEPAGE_PICTURE_CAROUSEL'): context['carousel'] = picture_carousel(settings.HOMEPAGE_PICTURE_CAROUSEL, 'carousel') return context
[ "product.views.extra.picture_carousel" ]
[((386, 450), 'product.views.extra.picture_carousel', 'picture_carousel', (['settings.HOMEPAGE_PICTURE_CAROUSEL', '"""carousel"""'], {}), "(settings.HOMEPAGE_PICTURE_CAROUSEL, 'carousel')\n", (402, 450), False, 'from product.views.extra import picture_carousel\n')]
import hashlib import os import json import requests from pymongo import MongoClient from flask import Flask from flask_sqlalchemy import SQLAlchemy app = Flask(__name__) db = SQLAlchemy(app) from models import People app.config['CONFIG_SHA1'] = '' app.config['PEOPLE_NAMES'] = list() def _check_sha1(file): BLOCKSIZE = 65536 sha1 = hashlib.sha1() with open(file, 'rb') as f: buf = f.read(BLOCKSIZE) while len(buf) > 0: sha1.update(buf) buf = f.read(BLOCKSIZE) return sha1.hexdigest() def _check_config_file(file): sha1 = _check_sha1(file) if sha1 != app.config['CONFIG_SHA1']: app.config['CONFIG_SHA1'] = sha1 update_config() def _get_names(): names_url = 'http://uinames.com/api/?amount=10&ext&region=United%20States' resp = requests.get(names_url) return [{'first_name': entry['name'], 'last_name': entry['surname'], 'age': entry['age']} for entry in resp.json()] def _create_db(): db.create_all() db.session.commit() def update_config(): app.config['PEOPLE_NAMES'] = _get_names() app.config.from_envvar('PFAE_CONFIG', silent=True) @app.route('/') def slash(): _check_config_file(os.environ['PFAE_CONFIG']) return json.dumps({'app_name': 'pfae'}), 200 @app.route('/test') def add_to_sql(): _check_config_file(os.environ['PFAE_CONFIG']) _create_db() db.engine.execute(People.__table__.insert(), app.config['PEOPLE_NAMES']) return '', 200 @app.route('/transfer') def transfer_to_nosql(): _check_config_file(os.environ['PFAE_CONFIG']) mongo = MongoClient('mongodb://{}:{}@{}'.format( app.config['MONGO_USER'], app.config['MONGO_PASSWORD'], app.config['MONGO_HOST'])) db = mongo.pfae mongo_people = db.people people = ([{'first_name': row.first_name, 'last_name': row.last_name, 'age': row.age} for row in People.query.all()]) mongo_people.insert_many(people) return '', 200 @app.route('/count') def count_records(): _check_config_file(os.environ['PFAE_CONFIG']) mongo = MongoClient('mongodb://{}:{}@{}'.format( app.config['MONGO_USER'], app.config['MONGO_PASSWORD'], app.config['MONGO_HOST'])) db = mongo.pfae people = db.people return json.dumps({'people_count': people.count()}), 200 if __name__ == '__main__': update_config() app.run()
[ "models.People.__table__.insert", "hashlib.sha1", "flask.Flask", "json.dumps", "flask_sqlalchemy.SQLAlchemy", "requests.get", "models.People.query.all" ]
[((156, 171), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (161, 171), False, 'from flask import Flask\n'), ((177, 192), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (187, 192), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((346, 360), 'hashlib.sha1', 'hashlib.sha1', ([], {}), '()\n', (358, 360), False, 'import hashlib\n'), ((824, 847), 'requests.get', 'requests.get', (['names_url'], {}), '(names_url)\n', (836, 847), False, 'import requests\n'), ((1260, 1292), 'json.dumps', 'json.dumps', (["{'app_name': 'pfae'}"], {}), "({'app_name': 'pfae'})\n", (1270, 1292), False, 'import json\n'), ((1427, 1452), 'models.People.__table__.insert', 'People.__table__.insert', ([], {}), '()\n', (1450, 1452), False, 'from models import People\n'), ((1944, 1962), 'models.People.query.all', 'People.query.all', ([], {}), '()\n', (1960, 1962), False, 'from models import People\n')]
"""Setup file for sopel-remind. See ``setup.cfg`` for setup config.""" from setuptools import setup setup()
[ "setuptools.setup" ]
[((101, 108), 'setuptools.setup', 'setup', ([], {}), '()\n', (106, 108), False, 'from setuptools import setup\n')]
#!/usr/bin/env python3 # exprs.py --- # # Filename: exprs.py # Author: <NAME> # Created: Wed Aug 19 15:47:31 2015 (-0400) # # # Copyright (c) 2015, <NAME>, University of Pennsylvania # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. All advertising materials mentioning features or use of this software # must display the following acknowledgement: # This product includes software developed by The University of Pennsylvania # 4. Neither the name of the University of Pennsylvania nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER ''AS IS'' AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # # Code: """Implements an expression type, along with a manager to create expressions as needed """ from utils import basetypes import collections from enum import IntEnum from exprs import exprtypes from semantics import semantics_types from utils import utils if __name__ == '__main__': utils.print_module_misuse_and_exit() class ExpressionKinds(IntEnum): """Expression Kinds variable_expression: An expression representing a typed variable. constant_expression: An expression representing a typed constant. function_expression: An expression representing a function application. """ variable_expression = 1 formal_parameter_expression = 2 constant_expression = 3 function_expression = 4 class _VariableExpression(collections.namedtuple('VariableExpression', ['expr_kind', 'variable_info', 'expr_id'])): def __str__(self): return expression_to_string(self) class _FormalParameterExpression(collections.namedtuple('FormalParameterExpression', ['expr_kind', 'unknown_function_info', 'parameter_type', 'parameter_position', 'expr_id'])): def __str__(self): return expression_to_string(self) _ConstantExpression = collections.namedtuple('ConstantExpression', ['expr_kind', 'value_object', 'expr_id']) class _FunctionExpression(collections.namedtuple('FunctionExpression', ['expr_kind', 'function_info', 'children', 'expr_id'])): def __str__(self): return expression_to_string(self) Value = collections.namedtuple('Value', ['value_object', 'value_type']) _variable_expression = ExpressionKinds.variable_expression _constant_expression = ExpressionKinds.constant_expression _function_expression = ExpressionKinds.function_expression _formal_parameter_expression = ExpressionKinds.formal_parameter_expression def get_expr_with_id(expr_object, expr_id): kind = expr_object.expr_kind if (kind == _variable_expression): (a, b, c) = expr_object return _VariableExpression(a, b, expr_id) elif (kind == _constant_expression): (a, b, c) = expr_object return _ConstantExpression(a, b, expr_id) elif (kind == _formal_parameter_expression): (a, b, c, d, e) = expr_object return _FormalParameterExpression(a, b, c, d, expr_id) elif (kind == _function_expression): (a, b, c, d) = expr_object return _FunctionExpression(a, b, c, expr_id) else: assert False def VariableExpression(variable_info): return _VariableExpression(_variable_expression, variable_info, None) def ConstantExpression(value_object): return _ConstantExpression(_constant_expression, value_object, None) def FunctionExpression(function_info, children): assert function_info is not None assert type(children) is tuple return _FunctionExpression(_function_expression, function_info, children, None) def FormalParameterExpression(unknown_function_info, parameter_type, parameter_position): return _FormalParameterExpression(_formal_parameter_expression, unknown_function_info, parameter_type, parameter_position, None) def value_to_string(the_value): if (the_value.value_type.type_code == exprtypes.TypeCodes.boolean_type): if (the_value.value_object == True): return 'true' else: return 'false' elif (the_value.value_type.type_code == exprtypes.TypeCodes.integer_type): return str(the_value.value_object) elif (the_value.value_type.type_code == exprtypes.TypeCodes.bit_vector_type): return utils.bitvector_to_string(the_value.value_object, the_value.value_type.size) class VariableInfo(object): __slots__ = ['variable_type', 'variable_eval_offset', 'variable_name', 'synthesis_ctx'] _undefined_offset = 1000000000 def __init__(self, variable_type, variable_name, variable_eval_offset = _undefined_offset, synthesis_ctx = None): self.variable_name = variable_name self.variable_type = variable_type self.variable_eval_offset = variable_eval_offset self.synthesis_ctx = None def __str__(self): return ('VariableInfo(%s, %s, %s)' % (str(self.variable_type), self.variable_name, str(self.variable_eval_offset))) def _constant_to_string(constant_type, constant_value): if constant_type == exprtypes.BoolType(): return str(constant_value).lower() elif constant_type == exprtypes.IntType(): return str(constant_value) elif constant_type == exprtypes.StringType(): return '"%s"' % constant_value else: return utils.bitvector_to_string(constant_value, constant_type.size) # non-recursive # def expression_to_string(expr): # """Returns a string representation of an expression""" # stack = [expr] # retval = '' # while len(stack) > 0: # curr = stack.pop(0) # if curr == None: # # retval = retval[:-1] + ') ' # retval += ') ' # continue # kind = curr.expr_kind # if (kind == _variable_expression): # retval += curr.variable_info.variable_name # elif (kind == _formal_parameter_expression): # retval += '_arg_%d' % curr.parameter_position # elif (kind == _constant_expression): # retval += _constant_to_string(curr.value_object.value_type, curr.value_object.value_object) # else: # if curr.function_info.function_name != 'let' \ # and curr.function_info.function_name != 'ne': # retval += '(' + curr.function_info.function_name # stack[0:0] = curr.children + (None,) # else: # retval += curr.function_info.to_string(curr) # retval += ' ' # # return retval[:-1] # recursive def expression_to_string(expr): """Returns a string representation of an expression""" kind = expr.expr_kind if (kind == _variable_expression): return expr.variable_info.variable_name elif (kind == _formal_parameter_expression): return '_arg_%d' % expr.parameter_position elif (kind == _constant_expression): return _constant_to_string(expr.value_object.value_type, expr.value_object.value_object) else: if expr.function_info.function_name != 'let' \ and expr.function_info.function_name != 'ne': retval = '(' + expr.function_info.function_name for child in expr.children: retval += ' ' retval += expression_to_string(child) retval += ')' return retval else: return expr.function_info.to_string(expr) def get_expression_type(expr): """Returns the type of the expression.""" kind = expr.expr_kind if (kind == _variable_expression): return expr.variable_info.variable_type elif (kind == _formal_parameter_expression): return expr.parameter_type elif (kind == _constant_expression): return expr.value_object.value_type elif (kind == _function_expression): return expr.function_info.range_type else: raise basetypes.UnhandledCaseError('Odd expression kind: %s' % expr.expr_kind) def get_expression_size(expr): """Returns the (syntactic) size of the expression.""" kind = expr.expr_kind if (kind == _variable_expression or kind == _constant_expression or kind == _formal_parameter_expression): return 1 elif (expr.expr_kind == ExpressionKinds.function_expression): retval = 1 for child in expr.children: retval += get_expression_size(child) return retval else: raise basetypes.UnhandledCaseError('Odd expression kind: %s' % expr.expr_kind) def substitute(expr, old_term, new_term): ret = substitute_all(expr, [(old_term, new_term)]) return ret def substitute_all(expr, substitute_pairs): for old,new in substitute_pairs: if expr == old: ret = new break else: if (expr.expr_kind == _function_expression): subst_children = [substitute_all(x, substitute_pairs) for x in expr.children] ret = FunctionExpression(expr.function_info, tuple(subst_children)) else: ret = expr return ret def find_all_applications(expr, function_name): ret = [] if (isinstance(expr, _FunctionExpression)): if (expr.function_info.function_name == function_name): ret.append(expr) for child in expr.children: ret.extend(find_all_applications(child, function_name)) return ret # Returns only the first application of a function it finds def find_application(expr, function_name): if isinstance(expr, _FunctionExpression): if expr.function_info.function_name == function_name: return expr else: for child in expr.children: ret = find_application(child, function_name) if ret is not None: return ret return None def find_all_synth_fun_apps(expr): if not is_function_expression(expr): return set() ret = set() for child in expr.children: ret = ret | find_all_synth_fun_apps(child) if expr.function_info.function_kind == semantics_types.FunctionKinds.synth_function: ret.add(expr) return ret def parent_of(expr, sub_expr): if not is_function_expression(expr): return None for child in expr.children: if child == sub_expr: return expr sub = parent_of(child, sub_expr) if sub is not None: return sub return None def get_all_constants(expr): if is_function_expression(expr): ret = set() for child in expr.children: ret = ret.union(get_all_constants(child)) return ret elif is_constant_expression(expr): return set([expr]) elif is_formal_parameter_expression(expr): return set() elif is_variable_expression(expr): return set() else: raise Exception def get_all_variables(expr): if is_function_expression(expr): ret = set() for child in expr.children: ret = ret.union(get_all_variables(child)) return ret elif is_constant_expression(expr): return set() elif is_formal_parameter_expression(expr): return set() elif is_variable_expression(expr): return set([expr]) else: raise Exception def get_all_formal_parameters(expr): if is_function_expression(expr): ret = set() for child in expr.children: ret = ret.union(get_all_formal_parameters(child)) return ret elif is_constant_expression(expr): return set() elif is_formal_parameter_expression(expr): return set([expr]) elif is_variable_expression(expr): return set() else: raise Exception def is_expression(obj): return (isinstance(obj, _VariableExpression) or isinstance(obj, _ConstantExpression) or isinstance(obj, _FormalParameterExpression) or isinstance(obj, _FunctionExpression)) def is_function_expression(obj): return isinstance(obj, _FunctionExpression) def is_constant_expression(obj): return isinstance(obj, _ConstantExpression) def is_formal_parameter_expression(obj): return isinstance(obj, _FormalParameterExpression) def is_variable_expression(obj): return isinstance(obj, _VariableExpression) def is_application_of(obj, func_name_or_info): assert is_expression(obj) if not isinstance(obj, _FunctionExpression): return False if func_name_or_info == obj.function_info or func_name_or_info == obj.function_info.function_name: return True return False def _check_equivalence_under_constraint(expr1, expr2, smt_ctx, arg_vars, constraint, random): import z3 expr1_smt = semantics_types.expression_to_smt(expr1, smt_ctx, arg_vars) expr2_smt = semantics_types.expression_to_smt(expr2, smt_ctx, arg_vars) if constraint is not None: constraint_smt = semantics_types.expression_to_smt(constraint, smt_ctx, arg_vars) else: constraint_smt = z3.BoolVal(True, ctx=smt_ctx.ctx()) condition = z3.And(constraint_smt, (expr1_smt != expr2_smt), smt_ctx.ctx()) if random: return random_sample(condition, smt_ctx.ctx(), arg_vars) else: return _z3_solve(condition, arg_vars) def check_equivalence_under_constraint(expr1, expr2, smt_ctx, arg_vars, constraint, random=False): return _check_equivalence_under_constraint(expr1, expr2, smt_ctx, arg_vars, constraint, random) def check_equivalence(expr1, expr2, smt_ctx, arg_vars, random=False): return _check_equivalence_under_constraint(expr1, expr2, smt_ctx, arg_vars, None, random) def _z3_solve(z3_expr, arg_vars): import z3 smt_solver = z3.Solver(ctx=z3_expr.ctx) smt_solver.push() smt_solver.add(z3_expr) r = smt_solver.check() smt_solver.pop() if r == z3.sat: point = [ smt_solver.model().evaluate(arg_var, True) for arg_var in arg_vars ] return point else: return None def sample(pred_or_pred_smt, smt_ctx, arg_vars): if is_expression(pred_or_pred_smt): pred_smt = semantics_types.expression_to_smt(pred_or_pred_smt, smt_ctx, arg_vars) else: pred_smt = pred_or_pred_smt return _z3_solve(pred_smt, arg_vars) # Is not really uniform random # The purpose is to make the pattern opaque to human eye def random_sample(pred_or_pred_smt, smt_ctx, arg_vars): import z3 import random if len(arg_vars) != 1 or type(arg_vars[0]) != z3.BitVecRef: raise NotImplementedError arg = arg_vars[0] bit_vec_size = arg.size() positions = list(range(bit_vec_size)) random.shuffle(positions) if is_expression(pred_or_pred_smt): pred_smt = semantics_types.expression_to_smt(pred_or_pred_smt, smt_ctx, arg_vars) else: pred_smt = pred_or_pred_smt orig_sample = _z3_solve(pred_smt, arg_vars) if orig_sample is None: return None zero = z3.BitVecVal(0, bit_vec_size, pred_smt.ctx) for position in positions: mask = z3.BitVecVal((1 << position), bit_vec_size, pred_smt.ctx) with_one = z3.And(pred_smt, (arg & mask == mask), pred_smt.ctx) with_zero = z3.And(pred_smt, (arg & mask == zero), pred_smt.ctx) with_one_sat = _z3_solve(with_one, arg_vars) with_zero_sat = _z3_solve(with_zero, arg_vars) assert with_one_sat is not None or with_zero_sat is not None if with_one_sat == None: pred_smt = with_zero elif with_zero_sat == None: pred_smt = with_one else: # Choose randomly pred_smt = random.choice([with_one, with_zero]) result = _z3_solve(pred_smt, arg_vars) assert result is not None return result def match(expr_template, expr): if expr_template == expr: return {} elif is_variable_expression(expr_template): return { expr_template:expr } elif (not is_function_expression(expr_template) or \ not is_function_expression(expr) or \ expr_template.function_info != expr.function_info): return None d = {} for child1, child2 in zip(expr_template.children, expr.children): dd = match(child1, child2) if dd is None: return None for v, e in dd.items(): if v in d: return None d[v] = e return d def equals(e1, e2): # print("1:", expression_to_string(e1)) # print("2:", expression_to_string(e2)) if e1.expr_kind != e2.expr_kind: ret = False else: kind = e1.expr_kind if (kind == _variable_expression): ret = e1.variable_info == e2.variable_info elif (kind == _formal_parameter_expression): ret = ((e1.unknown_function_info == e2.unknown_function_info) and (e1.parameter_position == e2.parameter_position)) elif (kind == _constant_expression): ret = (e1.value_object == e2.value_object) elif (kind == _function_expression): if e1.function_info.function_name != e2.function_info.function_name: ret = False else: ret = all(map(lambda ec1, ec2: equals(ec1,ec2), e1.children, e2.children)) else: assert False # print(ret) return ret def get_all_exprs(expr): result = set([expr]) if is_function_expression(expr): for child in expr.children: result.update(get_all_exprs(child)) return result def print_expr_as_solution(expr): params = list(get_all_formal_parameters(expr)) params.sort(key = lambda x: expression_to_string(x)) fp_string = '' for param in params: fp_string = fp_string + ' (' fp_string = fp_string + expression_to_string(param) fp_string = fp_string + ' ' + get_expression_type(param).print_string() + ') ' print('(define-fun f (%s) %s %s)' % ( fp_string, get_expression_type(expr).print_string(), expression_to_string(expr) ), flush=True) # # exprs.py ends here
[ "exprs.exprtypes.IntType", "utils.basetypes.UnhandledCaseError", "z3.And", "random.shuffle", "utils.utils.print_module_misuse_and_exit", "random.choice", "exprs.exprtypes.StringType", "exprs.exprtypes.BoolType", "z3.Solver", "collections.namedtuple", "z3.BitVecVal", "utils.utils.bitvector_to_string", "semantics.semantics_types.expression_to_smt" ]
[((2637, 2728), 'collections.namedtuple', 'collections.namedtuple', (['"""VariableExpression"""', "['expr_kind', 'variable_info', 'expr_id']"], {}), "('VariableExpression', ['expr_kind', 'variable_info',\n 'expr_id'])\n", (2659, 2728), False, 'import collections\n'), ((2872, 3023), 'collections.namedtuple', 'collections.namedtuple', (['"""FormalParameterExpression"""', "['expr_kind', 'unknown_function_info', 'parameter_type',\n 'parameter_position', 'expr_id']"], {}), "('FormalParameterExpression', ['expr_kind',\n 'unknown_function_info', 'parameter_type', 'parameter_position', 'expr_id']\n )\n", (2894, 3023), False, 'import collections\n'), ((3369, 3459), 'collections.namedtuple', 'collections.namedtuple', (['"""ConstantExpression"""', "['expr_kind', 'value_object', 'expr_id']"], {}), "('ConstantExpression', ['expr_kind', 'value_object',\n 'expr_id'])\n", (3391, 3459), False, 'import collections\n'), ((3529, 3632), 'collections.namedtuple', 'collections.namedtuple', (['"""FunctionExpression"""', "['expr_kind', 'function_info', 'children', 'expr_id']"], {}), "('FunctionExpression', ['expr_kind', 'function_info',\n 'children', 'expr_id'])\n", (3551, 3632), False, 'import collections\n'), ((3705, 3768), 'collections.namedtuple', 'collections.namedtuple', (['"""Value"""', "['value_object', 'value_type']"], {}), "('Value', ['value_object', 'value_type'])\n", (3727, 3768), False, 'import collections\n'), ((2171, 2207), 'utils.utils.print_module_misuse_and_exit', 'utils.print_module_misuse_and_exit', ([], {}), '()\n', (2205, 2207), False, 'from utils import utils\n'), ((14448, 14507), 'semantics.semantics_types.expression_to_smt', 'semantics_types.expression_to_smt', (['expr1', 'smt_ctx', 'arg_vars'], {}), '(expr1, smt_ctx, arg_vars)\n', (14481, 14507), False, 'from semantics import semantics_types\n'), ((14524, 14583), 'semantics.semantics_types.expression_to_smt', 'semantics_types.expression_to_smt', (['expr2', 'smt_ctx', 'arg_vars'], {}), '(expr2, smt_ctx, arg_vars)\n', (14557, 14583), False, 'from semantics import semantics_types\n'), ((15423, 15449), 'z3.Solver', 'z3.Solver', ([], {'ctx': 'z3_expr.ctx'}), '(ctx=z3_expr.ctx)\n', (15432, 15449), False, 'import z3\n'), ((16351, 16376), 'random.shuffle', 'random.shuffle', (['positions'], {}), '(positions)\n', (16365, 16376), False, 'import random\n'), ((16663, 16706), 'z3.BitVecVal', 'z3.BitVecVal', (['(0)', 'bit_vec_size', 'pred_smt.ctx'], {}), '(0, bit_vec_size, pred_smt.ctx)\n', (16675, 16706), False, 'import z3\n'), ((6762, 6782), 'exprs.exprtypes.BoolType', 'exprtypes.BoolType', ([], {}), '()\n', (6780, 6782), False, 'from exprs import exprtypes\n'), ((14640, 14704), 'semantics.semantics_types.expression_to_smt', 'semantics_types.expression_to_smt', (['constraint', 'smt_ctx', 'arg_vars'], {}), '(constraint, smt_ctx, arg_vars)\n', (14673, 14704), False, 'from semantics import semantics_types\n'), ((15817, 15887), 'semantics.semantics_types.expression_to_smt', 'semantics_types.expression_to_smt', (['pred_or_pred_smt', 'smt_ctx', 'arg_vars'], {}), '(pred_or_pred_smt, smt_ctx, arg_vars)\n', (15850, 15887), False, 'from semantics import semantics_types\n'), ((16437, 16507), 'semantics.semantics_types.expression_to_smt', 'semantics_types.expression_to_smt', (['pred_or_pred_smt', 'smt_ctx', 'arg_vars'], {}), '(pred_or_pred_smt, smt_ctx, arg_vars)\n', (16470, 16507), False, 'from semantics import semantics_types\n'), ((16753, 16808), 'z3.BitVecVal', 'z3.BitVecVal', (['(1 << position)', 'bit_vec_size', 'pred_smt.ctx'], {}), '(1 << position, bit_vec_size, pred_smt.ctx)\n', (16765, 16808), False, 'import z3\n'), ((16831, 16881), 'z3.And', 'z3.And', (['pred_smt', '(arg & mask == mask)', 'pred_smt.ctx'], {}), '(pred_smt, arg & mask == mask, pred_smt.ctx)\n', (16837, 16881), False, 'import z3\n'), ((16904, 16954), 'z3.And', 'z3.And', (['pred_smt', '(arg & mask == zero)', 'pred_smt.ctx'], {}), '(pred_smt, arg & mask == zero, pred_smt.ctx)\n', (16910, 16954), False, 'import z3\n'), ((6853, 6872), 'exprs.exprtypes.IntType', 'exprtypes.IntType', ([], {}), '()\n', (6870, 6872), False, 'from exprs import exprtypes\n'), ((10161, 10233), 'utils.basetypes.UnhandledCaseError', 'basetypes.UnhandledCaseError', (["('Odd expression kind: %s' % expr.expr_kind)"], {}), "('Odd expression kind: %s' % expr.expr_kind)\n", (10189, 10233), False, 'from utils import basetypes\n'), ((5859, 5935), 'utils.utils.bitvector_to_string', 'utils.bitvector_to_string', (['the_value.value_object', 'the_value.value_type.size'], {}), '(the_value.value_object, the_value.value_type.size)\n', (5884, 5935), False, 'from utils import utils\n'), ((6935, 6957), 'exprs.exprtypes.StringType', 'exprtypes.StringType', ([], {}), '()\n', (6955, 6957), False, 'from exprs import exprtypes\n'), ((7023, 7084), 'utils.utils.bitvector_to_string', 'utils.bitvector_to_string', (['constant_value', 'constant_type.size'], {}), '(constant_value, constant_type.size)\n', (7048, 7084), False, 'from utils import utils\n'), ((17326, 17362), 'random.choice', 'random.choice', (['[with_one, with_zero]'], {}), '([with_one, with_zero])\n', (17339, 17362), False, 'import random\n'), ((9612, 9684), 'utils.basetypes.UnhandledCaseError', 'basetypes.UnhandledCaseError', (["('Odd expression kind: %s' % expr.expr_kind)"], {}), "('Odd expression kind: %s' % expr.expr_kind)\n", (9640, 9684), False, 'from utils import basetypes\n')]
import os import string import time import vim RConsole = 0 Rterm = False try: import win32api import win32clipboard import win32com.client import win32con import win32gui except ImportError: import platform myPyVersion = platform.python_version() myArch = platform.architecture() vim.command("call RWarningMsgInp('Please install PyWin32. The Python version being used is: " + myPyVersion + " (" + myArch[0] + ")')") vim.command("let rplugin_pywin32 = 0") def RightClick(): global RConsole myHandle = win32gui.GetForegroundWindow() RaiseRConsole() time.sleep(0.05) lParam = (100 << 16) | 100 win32gui.SendMessage(RConsole, win32con.WM_RBUTTONDOWN, 0, lParam) win32gui.SendMessage(RConsole, win32con.WM_RBUTTONUP, 0, lParam) time.sleep(0.05) try: win32gui.SetForegroundWindow(myHandle) except: vim.command("call RWarningMsg('Could not put itself on foreground.')") def CntrlV(): global RConsole win32api.keybd_event(0x11, 0, 0, 0) try: win32api.PostMessage(RConsole, 0x100, 0x56, 0x002F0001) except: vim.command("call RWarningMsg('R Console window not found [1].')") RConsole = 0 pass if RConsole: time.sleep(0.05) try: win32api.PostMessage(RConsole, 0x101, 0x56, 0xC02F0001) except: vim.command("call RWarningMsg('R Console window not found [2].')") pass win32api.keybd_event(0x11, 0, 2, 0) def FindRConsole(): global RConsole Rttl = vim.eval("g:vimrplugin_R_window_title") Rtitle = Rttl RConsole = win32gui.FindWindow(None, Rtitle) if RConsole == 0: Rtitle = Rttl + " (64-bit)" RConsole = win32gui.FindWindow(None, Rtitle) if RConsole == 0: Rtitle = Rttl + " (32-bit)" RConsole = win32gui.FindWindow(None, Rtitle) if RConsole == 0: vim.command("call RWarningMsg('Could not find R Console.')") if RConsole: vim.command("let g:rplugin_R_window_ttl = '" + Rtitle + "'") def SendToRConsole(aString): global RConsole global Rterm SendToVimCom("\003Set R as busy [SendToRConsole()]") if sys.hexversion < 0x03000000: finalString = aString.decode("latin-1") + "\n" else: finalString = aString win32clipboard.OpenClipboard(0) win32clipboard.EmptyClipboard() win32clipboard.SetClipboardText(finalString) win32clipboard.CloseClipboard() if RConsole == 0: FindRConsole() if RConsole: if Rterm: RightClick() else: CntrlV() def RClearConsolePy(): global RConsole global Rterm if Rterm: return if RConsole == 0: FindRConsole() if RConsole: win32api.keybd_event(0x11, 0, 0, 0) try: win32api.PostMessage(RConsole, 0x100, 0x4C, 0x002F0001) except: vim.command("call RWarningMsg('R Console window not found [1].')") RConsole = 0 pass if RConsole: time.sleep(0.05) try: win32api.PostMessage(RConsole, 0x101, 0x4C, 0xC02F0001) except: vim.command("call RWarningMsg('R Console window not found [2].')") pass win32api.keybd_event(0x11, 0, 2, 0) def RaiseRConsole(): global RConsole FindRConsole() if RConsole: win32gui.SetForegroundWindow(RConsole) time.sleep(0.1) def SendQuitMsg(aString): global RConsole global Rterm SendToVimCom("\003Set R as busy [SendQuitMsg()]") if sys.hexversion < 0x03000000: finalString = aString.decode("latin-1") + "\n" else: finalString = aString + "\n" win32clipboard.OpenClipboard(0) win32clipboard.EmptyClipboard() win32clipboard.SetClipboardText(finalString) win32clipboard.CloseClipboard() sleepTime = float(vim.eval("g:vimrplugin_sleeptime")) RaiseRConsole() if RConsole and not Rterm: time.sleep(sleepTime) win32api.keybd_event(win32con.VK_CONTROL, 0, 0, 0) win32api.keybd_event(ord('V'), 0, win32con.KEYEVENTF_EXTENDEDKEY | 0, 0) time.sleep(0.05) win32api.keybd_event(ord('V'), 0, win32con.KEYEVENTF_EXTENDEDKEY | win32con.KEYEVENTF_KEYUP, 0) win32api.keybd_event(win32con.VK_CONTROL, 0, win32con.KEYEVENTF_KEYUP, 0) time.sleep(0.05) RConsole = 0 if RConsole and Rterm: RightClick() RConsole = 0 def GetRPath(): keyName = "SOFTWARE\\R-core\\R" kHandle = None try: kHandle = win32api.RegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE, keyName, 0, win32con.KEY_READ) rVersion, reserved, kclass, lastwrite = win32api.RegEnumKeyEx(kHandle)[-1] win32api.RegCloseKey(kHandle) kHandle = None keyName = keyName + "\\" + rVersion kHandle = win32api.RegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE, keyName, 0, win32con.KEY_READ) except: try: kHandle = win32api.RegOpenKeyEx(win32con.HKEY_CURRENT_USER, keyName, 0, win32con.KEY_READ) rVersion, reserved, kclass, lastwrite = win32api.RegEnumKeyEx(kHandle)[-1] win32api.RegCloseKey(kHandle) kHandle = None keyName = keyName + "\\" + rVersion kHandle = win32api.RegOpenKeyEx(win32con.HKEY_CURRENT_USER, keyName, 0, win32con.KEY_READ) except: vim.command("let s:rinstallpath = 'Key not found'") if kHandle: (kname, rpath, vtype) = win32api.RegEnumValue(kHandle, 0) win32api.RegCloseKey(kHandle) if kname == 'InstallPath': vim.command("let s:rinstallpath = '" + rpath + "'") else: vim.command("let s:rinstallpath = 'Path not found'") def StartRPy(): global Rterm if vim.eval("g:vimrplugin_Rterm") == "1": Rterm = True else: Rterm = False rpath = vim.eval("g:rplugin_Rgui") rpath = rpath.replace("\\", "\\\\") rargs = ['"' + rpath + '"'] r_args = vim.eval("b:rplugin_r_args") if r_args != " ": r_args = r_args.split(' ') i = 0 alen = len(r_args) while i < alen: rargs.append(r_args[i]) i = i + 1 kHandle = None keyName = "Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders" try: kHandle = win32api.RegOpenKeyEx(win32con.HKEY_CURRENT_USER, keyName, 0, win32con.KEY_READ) except: vim.command("RWarningMsg('Personal folder not found in registry')") if kHandle: i = 0 folder = "none" while folder != "Personal": try: (folder, fpath, vtype) = win32api.RegEnumValue(kHandle, i) except: break i = i + 1 win32api.RegCloseKey(kHandle) if folder == "Personal": rargs.append('HOME="' + fpath + '"') else: vim.command("RWarningMsg('Personal folder not found in registry')") if os.path.isfile(rpath): os.spawnv(os.P_NOWAIT, rpath, rargs) else: vim.command("echoerr 'File ' . g:rplugin_Rgui . ' not found.'") def OpenPDF(fn): try: os.startfile(fn) except Exception as errmsg: errstr = str(errmsg) errstr = errstr.replace("'", '"') vim.command("call RWarningMsg('" + errstr + "')") pass
[ "platform.python_version", "win32api.PostMessage", "os.path.isfile", "win32gui.GetForegroundWindow", "win32api.RegCloseKey", "win32gui.SetForegroundWindow", "platform.architecture", "os.spawnv", "win32api.RegOpenKeyEx", "vim.command", "win32api.RegEnumValue", "os.startfile", "win32clipboard.CloseClipboard", "vim.eval", "win32gui.SendMessage", "win32api.keybd_event", "time.sleep", "win32clipboard.SetClipboardText", "win32clipboard.OpenClipboard", "win32clipboard.EmptyClipboard", "win32gui.FindWindow", "win32api.RegEnumKeyEx" ]
[((552, 582), 'win32gui.GetForegroundWindow', 'win32gui.GetForegroundWindow', ([], {}), '()\n', (580, 582), False, 'import win32gui\n'), ((607, 623), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (617, 623), False, 'import time\n'), ((659, 725), 'win32gui.SendMessage', 'win32gui.SendMessage', (['RConsole', 'win32con.WM_RBUTTONDOWN', '(0)', 'lParam'], {}), '(RConsole, win32con.WM_RBUTTONDOWN, 0, lParam)\n', (679, 725), False, 'import win32gui\n'), ((730, 794), 'win32gui.SendMessage', 'win32gui.SendMessage', (['RConsole', 'win32con.WM_RBUTTONUP', '(0)', 'lParam'], {}), '(RConsole, win32con.WM_RBUTTONUP, 0, lParam)\n', (750, 794), False, 'import win32gui\n'), ((799, 815), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (809, 815), False, 'import time\n'), ((1002, 1035), 'win32api.keybd_event', 'win32api.keybd_event', (['(17)', '(0)', '(0)', '(0)'], {}), '(17, 0, 0, 0)\n', (1022, 1035), False, 'import win32api\n'), ((1471, 1504), 'win32api.keybd_event', 'win32api.keybd_event', (['(17)', '(0)', '(2)', '(0)'], {}), '(17, 0, 2, 0)\n', (1491, 1504), False, 'import win32api\n'), ((1559, 1598), 'vim.eval', 'vim.eval', (['"""g:vimrplugin_R_window_title"""'], {}), "('g:vimrplugin_R_window_title')\n", (1567, 1598), False, 'import vim\n'), ((1632, 1665), 'win32gui.FindWindow', 'win32gui.FindWindow', (['None', 'Rtitle'], {}), '(None, Rtitle)\n', (1651, 1665), False, 'import win32gui\n'), ((2352, 2383), 'win32clipboard.OpenClipboard', 'win32clipboard.OpenClipboard', (['(0)'], {}), '(0)\n', (2380, 2383), False, 'import win32clipboard\n'), ((2388, 2419), 'win32clipboard.EmptyClipboard', 'win32clipboard.EmptyClipboard', ([], {}), '()\n', (2417, 2419), False, 'import win32clipboard\n'), ((2424, 2468), 'win32clipboard.SetClipboardText', 'win32clipboard.SetClipboardText', (['finalString'], {}), '(finalString)\n', (2455, 2468), False, 'import win32clipboard\n'), ((2473, 2504), 'win32clipboard.CloseClipboard', 'win32clipboard.CloseClipboard', ([], {}), '()\n', (2502, 2504), False, 'import win32clipboard\n'), ((3775, 3806), 'win32clipboard.OpenClipboard', 'win32clipboard.OpenClipboard', (['(0)'], {}), '(0)\n', (3803, 3806), False, 'import win32clipboard\n'), ((3811, 3842), 'win32clipboard.EmptyClipboard', 'win32clipboard.EmptyClipboard', ([], {}), '()\n', (3840, 3842), False, 'import win32clipboard\n'), ((3847, 3891), 'win32clipboard.SetClipboardText', 'win32clipboard.SetClipboardText', (['finalString'], {}), '(finalString)\n', (3878, 3891), False, 'import win32clipboard\n'), ((3896, 3927), 'win32clipboard.CloseClipboard', 'win32clipboard.CloseClipboard', ([], {}), '()\n', (3925, 3927), False, 'import win32clipboard\n'), ((5962, 5988), 'vim.eval', 'vim.eval', (['"""g:rplugin_Rgui"""'], {}), "('g:rplugin_Rgui')\n", (5970, 5988), False, 'import vim\n'), ((6074, 6102), 'vim.eval', 'vim.eval', (['"""b:rplugin_r_args"""'], {}), "('b:rplugin_r_args')\n", (6082, 6102), False, 'import vim\n'), ((7054, 7075), 'os.path.isfile', 'os.path.isfile', (['rpath'], {}), '(rpath)\n', (7068, 7075), False, 'import os\n'), ((252, 277), 'platform.python_version', 'platform.python_version', ([], {}), '()\n', (275, 277), False, 'import platform\n'), ((291, 314), 'platform.architecture', 'platform.architecture', ([], {}), '()\n', (312, 314), False, 'import platform\n'), ((319, 464), 'vim.command', 'vim.command', (['(\n "call RWarningMsgInp(\'Please install PyWin32. The Python version being used is: "\n + myPyVersion + \' (\' + myArch[0] + ")\')")'], {}), '(\n "call RWarningMsgInp(\'Please install PyWin32. The Python version being used is: "\n + myPyVersion + \' (\' + myArch[0] + ")\')")\n', (330, 464), False, 'import vim\n'), ((459, 497), 'vim.command', 'vim.command', (['"""let rplugin_pywin32 = 0"""'], {}), "('let rplugin_pywin32 = 0')\n", (470, 497), False, 'import vim\n'), ((833, 871), 'win32gui.SetForegroundWindow', 'win32gui.SetForegroundWindow', (['myHandle'], {}), '(myHandle)\n', (861, 871), False, 'import win32gui\n'), ((1055, 1103), 'win32api.PostMessage', 'win32api.PostMessage', (['RConsole', '(256)', '(86)', '(3080193)'], {}), '(RConsole, 256, 86, 3080193)\n', (1075, 1103), False, 'import win32api\n'), ((1257, 1273), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (1267, 1273), False, 'import time\n'), ((1743, 1776), 'win32gui.FindWindow', 'win32gui.FindWindow', (['None', 'Rtitle'], {}), '(None, Rtitle)\n', (1762, 1776), False, 'import win32gui\n'), ((2032, 2092), 'vim.command', 'vim.command', (['("let g:rplugin_R_window_ttl = \'" + Rtitle + "\'")'], {}), '("let g:rplugin_R_window_ttl = \'" + Rtitle + "\'")\n', (2043, 2092), False, 'import vim\n'), ((2805, 2838), 'win32api.keybd_event', 'win32api.keybd_event', (['(17)', '(0)', '(0)', '(0)'], {}), '(17, 0, 0, 0)\n', (2825, 2838), False, 'import win32api\n'), ((3330, 3363), 'win32api.keybd_event', 'win32api.keybd_event', (['(17)', '(0)', '(2)', '(0)'], {}), '(17, 0, 2, 0)\n', (3350, 3363), False, 'import win32api\n'), ((3452, 3490), 'win32gui.SetForegroundWindow', 'win32gui.SetForegroundWindow', (['RConsole'], {}), '(RConsole)\n', (3480, 3490), False, 'import win32gui\n'), ((3499, 3514), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (3509, 3514), False, 'import time\n'), ((3950, 3984), 'vim.eval', 'vim.eval', (['"""g:vimrplugin_sleeptime"""'], {}), "('g:vimrplugin_sleeptime')\n", (3958, 3984), False, 'import vim\n'), ((4045, 4066), 'time.sleep', 'time.sleep', (['sleepTime'], {}), '(sleepTime)\n', (4055, 4066), False, 'import time\n'), ((4075, 4125), 'win32api.keybd_event', 'win32api.keybd_event', (['win32con.VK_CONTROL', '(0)', '(0)', '(0)'], {}), '(win32con.VK_CONTROL, 0, 0, 0)\n', (4095, 4125), False, 'import win32api\n'), ((4215, 4231), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (4225, 4231), False, 'import time\n'), ((4344, 4417), 'win32api.keybd_event', 'win32api.keybd_event', (['win32con.VK_CONTROL', '(0)', 'win32con.KEYEVENTF_KEYUP', '(0)'], {}), '(win32con.VK_CONTROL, 0, win32con.KEYEVENTF_KEYUP, 0)\n', (4364, 4417), False, 'import win32api\n'), ((4426, 4442), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (4436, 4442), False, 'import time\n'), ((4632, 4718), 'win32api.RegOpenKeyEx', 'win32api.RegOpenKeyEx', (['win32con.HKEY_LOCAL_MACHINE', 'keyName', '(0)', 'win32con.KEY_READ'], {}), '(win32con.HKEY_LOCAL_MACHINE, keyName, 0, win32con.\n KEY_READ)\n', (4653, 4718), False, 'import win32api\n'), ((4805, 4834), 'win32api.RegCloseKey', 'win32api.RegCloseKey', (['kHandle'], {}), '(kHandle)\n', (4825, 4834), False, 'import win32api\n'), ((4920, 5006), 'win32api.RegOpenKeyEx', 'win32api.RegOpenKeyEx', (['win32con.HKEY_LOCAL_MACHINE', 'keyName', '(0)', 'win32con.KEY_READ'], {}), '(win32con.HKEY_LOCAL_MACHINE, keyName, 0, win32con.\n KEY_READ)\n', (4941, 5006), False, 'import win32api\n'), ((5566, 5599), 'win32api.RegEnumValue', 'win32api.RegEnumValue', (['kHandle', '(0)'], {}), '(kHandle, 0)\n', (5587, 5599), False, 'import win32api\n'), ((5608, 5637), 'win32api.RegCloseKey', 'win32api.RegCloseKey', (['kHandle'], {}), '(kHandle)\n', (5628, 5637), False, 'import win32api\n'), ((5858, 5888), 'vim.eval', 'vim.eval', (['"""g:vimrplugin_Rterm"""'], {}), "('g:vimrplugin_Rterm')\n", (5866, 5888), False, 'import vim\n'), ((6416, 6501), 'win32api.RegOpenKeyEx', 'win32api.RegOpenKeyEx', (['win32con.HKEY_CURRENT_USER', 'keyName', '(0)', 'win32con.KEY_READ'], {}), '(win32con.HKEY_CURRENT_USER, keyName, 0, win32con.KEY_READ\n )\n', (6437, 6501), False, 'import win32api\n'), ((6840, 6869), 'win32api.RegCloseKey', 'win32api.RegCloseKey', (['kHandle'], {}), '(kHandle)\n', (6860, 6869), False, 'import win32api\n'), ((7085, 7121), 'os.spawnv', 'os.spawnv', (['os.P_NOWAIT', 'rpath', 'rargs'], {}), '(os.P_NOWAIT, rpath, rargs)\n', (7094, 7121), False, 'import os\n'), ((7140, 7203), 'vim.command', 'vim.command', (['"""echoerr \'File \' . g:rplugin_Rgui . \' not found.\'"""'], {}), '("echoerr \'File \' . g:rplugin_Rgui . \' not found.\'")\n', (7151, 7203), False, 'import vim\n'), ((7239, 7255), 'os.startfile', 'os.startfile', (['fn'], {}), '(fn)\n', (7251, 7255), False, 'import os\n'), ((892, 962), 'vim.command', 'vim.command', (['"""call RWarningMsg(\'Could not put itself on foreground.\')"""'], {}), '("call RWarningMsg(\'Could not put itself on foreground.\')")\n', (903, 962), False, 'import vim\n'), ((1131, 1197), 'vim.command', 'vim.command', (['"""call RWarningMsg(\'R Console window not found [1].\')"""'], {}), '("call RWarningMsg(\'R Console window not found [1].\')")\n', (1142, 1197), False, 'import vim\n'), ((1299, 1350), 'win32api.PostMessage', 'win32api.PostMessage', (['RConsole', '(257)', '(86)', '(3224305665)'], {}), '(RConsole, 257, 86, 3224305665)\n', (1319, 1350), False, 'import win32api\n'), ((1866, 1899), 'win32gui.FindWindow', 'win32gui.FindWindow', (['None', 'Rtitle'], {}), '(None, Rtitle)\n', (1885, 1899), False, 'import win32gui\n'), ((2866, 2914), 'win32api.PostMessage', 'win32api.PostMessage', (['RConsole', '(256)', '(76)', '(3080193)'], {}), '(RConsole, 256, 76, 3080193)\n', (2886, 2914), False, 'import win32api\n'), ((3092, 3108), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (3102, 3108), False, 'import time\n'), ((4762, 4792), 'win32api.RegEnumKeyEx', 'win32api.RegEnumKeyEx', (['kHandle'], {}), '(kHandle)\n', (4783, 4792), False, 'import win32api\n'), ((5685, 5736), 'vim.command', 'vim.command', (['("let s:rinstallpath = \'" + rpath + "\'")'], {}), '("let s:rinstallpath = \'" + rpath + "\'")\n', (5696, 5736), False, 'import vim\n'), ((5763, 5816), 'vim.command', 'vim.command', (['"""let s:rinstallpath = \'Path not found\'"""'], {}), '("let s:rinstallpath = \'Path not found\'")\n', (5774, 5816), False, 'import vim\n'), ((6517, 6584), 'vim.command', 'vim.command', (['"""RWarningMsg(\'Personal folder not found in registry\')"""'], {}), '("RWarningMsg(\'Personal folder not found in registry\')")\n', (6528, 6584), False, 'import vim\n'), ((6978, 7045), 'vim.command', 'vim.command', (['"""RWarningMsg(\'Personal folder not found in registry\')"""'], {}), '("RWarningMsg(\'Personal folder not found in registry\')")\n', (6989, 7045), False, 'import vim\n'), ((7367, 7416), 'vim.command', 'vim.command', (['("call RWarningMsg(\'" + errstr + "\')")'], {}), '("call RWarningMsg(\'" + errstr + "\')")\n', (7378, 7416), False, 'import vim\n'), ((1383, 1449), 'vim.command', 'vim.command', (['"""call RWarningMsg(\'R Console window not found [2].\')"""'], {}), '("call RWarningMsg(\'R Console window not found [2].\')")\n', (1394, 1449), False, 'import vim\n'), ((1946, 2006), 'vim.command', 'vim.command', (['"""call RWarningMsg(\'Could not find R Console.\')"""'], {}), '("call RWarningMsg(\'Could not find R Console.\')")\n', (1957, 2006), False, 'import vim\n'), ((2950, 3016), 'vim.command', 'vim.command', (['"""call RWarningMsg(\'R Console window not found [1].\')"""'], {}), '("call RWarningMsg(\'R Console window not found [1].\')")\n', (2961, 3016), False, 'import vim\n'), ((3142, 3193), 'win32api.PostMessage', 'win32api.PostMessage', (['RConsole', '(257)', '(76)', '(3224305665)'], {}), '(RConsole, 257, 76, 3224305665)\n', (3162, 3193), False, 'import win32api\n'), ((5049, 5134), 'win32api.RegOpenKeyEx', 'win32api.RegOpenKeyEx', (['win32con.HKEY_CURRENT_USER', 'keyName', '(0)', 'win32con.KEY_READ'], {}), '(win32con.HKEY_CURRENT_USER, keyName, 0, win32con.KEY_READ\n )\n', (5070, 5134), False, 'import win32api\n'), ((5229, 5258), 'win32api.RegCloseKey', 'win32api.RegCloseKey', (['kHandle'], {}), '(kHandle)\n', (5249, 5258), False, 'import win32api\n'), ((5356, 5441), 'win32api.RegOpenKeyEx', 'win32api.RegOpenKeyEx', (['win32con.HKEY_CURRENT_USER', 'keyName', '(0)', 'win32con.KEY_READ'], {}), '(win32con.HKEY_CURRENT_USER, keyName, 0, win32con.KEY_READ\n )\n', (5377, 5441), False, 'import win32api\n'), ((6734, 6767), 'win32api.RegEnumValue', 'win32api.RegEnumValue', (['kHandle', 'i'], {}), '(kHandle, i)\n', (6755, 6767), False, 'import win32api\n'), ((3234, 3300), 'vim.command', 'vim.command', (['"""call RWarningMsg(\'R Console window not found [2].\')"""'], {}), '("call RWarningMsg(\'R Console window not found [2].\')")\n', (3245, 3300), False, 'import vim\n'), ((5182, 5212), 'win32api.RegEnumKeyEx', 'win32api.RegEnumKeyEx', (['kHandle'], {}), '(kHandle)\n', (5203, 5212), False, 'import win32api\n'), ((5465, 5517), 'vim.command', 'vim.command', (['"""let s:rinstallpath = \'Key not found\'"""'], {}), '("let s:rinstallpath = \'Key not found\'")\n', (5476, 5517), False, 'import vim\n')]
from dateutil import parser import re import shutil import subprocess as sp import time import pytest from .utils import ( gen_basic_wf, gen_basic_wf_with_threadcount, gen_basic_wf_with_threadcount_concurrent, ) from ..core import Workflow from ..task import ShellCommandTask from ..submitter import Submitter from ... import mark from pathlib import Path import uuid from datetime import datetime slurm_available = bool(shutil.which("sbatch")) sge_available = bool(shutil.which("qsub")) @mark.task def sleep_add_one(x): time.sleep(1) return x + 1 def test_callable_wf(plugin, tmpdir): wf = gen_basic_wf() res = wf() assert res.output.out == 9 del wf, res # providing plugin wf = gen_basic_wf() res = wf(plugin="cf") assert res.output.out == 9 del wf, res # providing plugin_kwargs wf = gen_basic_wf() res = wf(plugin="cf", plugin_kwargs={"n_procs": 2}) assert res.output.out == 9 del wf, res # providing wrong plugin_kwargs wf = gen_basic_wf() with pytest.raises(TypeError, match="an unexpected keyword argument"): wf(plugin="cf", plugin_kwargs={"sbatch_args": "-N2"}) # providing submitter wf = gen_basic_wf() wf.cache_dir = tmpdir sub = Submitter(plugin) res = wf(submitter=sub) assert res.output.out == 9 def test_concurrent_wf(plugin, tmpdir): # concurrent workflow # A --> C # B --> D wf = Workflow("new_wf", input_spec=["x", "y"]) wf.inputs.x = 5 wf.inputs.y = 10 wf.add(sleep_add_one(name="taska", x=wf.lzin.x)) wf.add(sleep_add_one(name="taskb", x=wf.lzin.y)) wf.add(sleep_add_one(name="taskc", x=wf.taska.lzout.out)) wf.add(sleep_add_one(name="taskd", x=wf.taskb.lzout.out)) wf.set_output([("out1", wf.taskc.lzout.out), ("out2", wf.taskd.lzout.out)]) wf.cache_dir = tmpdir with Submitter(plugin) as sub: sub(wf) res = wf.result() assert res.output.out1 == 7 assert res.output.out2 == 12 def test_concurrent_wf_nprocs(tmpdir): # concurrent workflow # setting n_procs in Submitter that is passed to the worker # A --> C # B --> D wf = Workflow("new_wf", input_spec=["x", "y"]) wf.inputs.x = 5 wf.inputs.y = 10 wf.add(sleep_add_one(name="taska", x=wf.lzin.x)) wf.add(sleep_add_one(name="taskb", x=wf.lzin.y)) wf.add(sleep_add_one(name="taskc", x=wf.taska.lzout.out)) wf.add(sleep_add_one(name="taskd", x=wf.taskb.lzout.out)) wf.set_output([("out1", wf.taskc.lzout.out), ("out2", wf.taskd.lzout.out)]) wf.cache_dir = tmpdir with Submitter("cf", n_procs=2) as sub: sub(wf) res = wf.result() assert res.output.out1 == 7 assert res.output.out2 == 12 def test_wf_in_wf(plugin, tmpdir): """WF(A --> SUBWF(A --> B) --> B)""" wf = Workflow(name="wf_in_wf", input_spec=["x"]) wf.inputs.x = 3 wf.add(sleep_add_one(name="wf_a", x=wf.lzin.x)) # workflow task subwf = Workflow(name="sub_wf", input_spec=["x"]) subwf.add(sleep_add_one(name="sub_a", x=subwf.lzin.x)) subwf.add(sleep_add_one(name="sub_b", x=subwf.sub_a.lzout.out)) subwf.set_output([("out", subwf.sub_b.lzout.out)]) # connect, then add subwf.inputs.x = wf.wf_a.lzout.out subwf.cache_dir = tmpdir wf.add(subwf) wf.add(sleep_add_one(name="wf_b", x=wf.sub_wf.lzout.out)) wf.set_output([("out", wf.wf_b.lzout.out)]) wf.cache_dir = tmpdir with Submitter(plugin) as sub: sub(wf) res = wf.result() assert res.output.out == 7 @pytest.mark.flaky(reruns=2) # when dask def test_wf2(plugin_dask_opt, tmpdir): """workflow as a node workflow-node with one task and no splitter """ wfnd = Workflow(name="wfnd", input_spec=["x"]) wfnd.add(sleep_add_one(name="add2", x=wfnd.lzin.x)) wfnd.set_output([("out", wfnd.add2.lzout.out)]) wfnd.inputs.x = 2 wf = Workflow(name="wf", input_spec=["x"]) wf.add(wfnd) wf.set_output([("out", wf.wfnd.lzout.out)]) wf.cache_dir = tmpdir with Submitter(plugin=plugin_dask_opt) as sub: sub(wf) res = wf.result() assert res.output.out == 3 @pytest.mark.flaky(reruns=2) # when dask def test_wf_with_state(plugin_dask_opt, tmpdir): wf = Workflow(name="wf_with_state", input_spec=["x"]) wf.add(sleep_add_one(name="taska", x=wf.lzin.x)) wf.add(sleep_add_one(name="taskb", x=wf.taska.lzout.out)) wf.inputs.x = [1, 2, 3] wf.split("x") wf.set_output([("out", wf.taskb.lzout.out)]) wf.cache_dir = tmpdir with Submitter(plugin=plugin_dask_opt) as sub: sub(wf) res = wf.result() assert res[0].output.out == 3 assert res[1].output.out == 4 assert res[2].output.out == 5 def test_serial_wf(): # Use serial plugin to execute workflow instead of CF wf = gen_basic_wf() res = wf(plugin="serial") assert res.output.out == 9 @pytest.mark.skipif(not slurm_available, reason="slurm not installed") def test_slurm_wf(tmpdir): wf = gen_basic_wf() wf.cache_dir = tmpdir # submit workflow and every task as slurm job with Submitter("slurm") as sub: sub(wf) res = wf.result() assert res.output.out == 9 script_dir = tmpdir / "SlurmWorker_scripts" assert script_dir.exists() # ensure each task was executed with slurm assert len([sd for sd in script_dir.listdir() if sd.isdir()]) == 2 @pytest.mark.skipif(not slurm_available, reason="slurm not installed") def test_slurm_wf_cf(tmpdir): # submit entire workflow as single job executing with cf worker wf = gen_basic_wf() wf.cache_dir = tmpdir wf.plugin = "cf" with Submitter("slurm") as sub: sub(wf) res = wf.result() assert res.output.out == 9 script_dir = tmpdir / "SlurmWorker_scripts" assert script_dir.exists() # ensure only workflow was executed with slurm sdirs = [sd for sd in script_dir.listdir() if sd.isdir()] assert len(sdirs) == 1 # slurm scripts should be in the dirs that are using uid in the name assert sdirs[0].basename == wf.uid @pytest.mark.skipif(not slurm_available, reason="slurm not installed") def test_slurm_wf_state(tmpdir): wf = gen_basic_wf() wf.split("x") wf.inputs.x = [5, 6] wf.cache_dir = tmpdir with Submitter("slurm") as sub: sub(wf) res = wf.result() assert res[0].output.out == 9 assert res[1].output.out == 10 script_dir = tmpdir / "SlurmWorker_scripts" assert script_dir.exists() sdirs = [sd for sd in script_dir.listdir() if sd.isdir()] assert len(sdirs) == 2 * len(wf.inputs.x) @pytest.mark.skipif(not slurm_available, reason="slurm not installed") @pytest.mark.flaky(reruns=3) def test_slurm_max_jobs(tmpdir): wf = Workflow("new_wf", input_spec=["x", "y"], cache_dir=tmpdir) wf.inputs.x = 5 wf.inputs.y = 10 wf.add(sleep_add_one(name="taska", x=wf.lzin.x)) wf.add(sleep_add_one(name="taskb", x=wf.lzin.y)) wf.add(sleep_add_one(name="taskc", x=wf.taska.lzout.out)) wf.add(sleep_add_one(name="taskd", x=wf.taskb.lzout.out)) wf.set_output([("out1", wf.taskc.lzout.out), ("out2", wf.taskd.lzout.out)]) with Submitter("slurm", max_jobs=1) as sub: sub(wf) jobids = [] time.sleep(0.5) # allow time for sacct to collect itself for fl in (tmpdir / "SlurmWorker_scripts").visit("slurm-*.out"): jid = re.search(r"(?<=slurm-)\d+", fl.strpath) assert jid.group() jobids.append(jid.group()) time.sleep(0.2) del jid # query sacct for job eligibility timings queued = [] for jid in sorted(jobids): out = sp.run(["sacct", "-Xnj", jid, "-o", "Eligible"], capture_output=True) et = out.stdout.decode().strip() queued.append(parser.parse(et)) del out, et # compare timing between queued jobs prev = None for et in sorted(queued, reverse=True): if prev is None: prev = et continue assert (prev - et).seconds >= 2 @pytest.mark.skipif(not slurm_available, reason="slurm not installed") def test_slurm_args_1(tmpdir): """testing sbatch_args provided to the submitter""" task = sleep_add_one(x=1) task.cache_dir = tmpdir # submit workflow and every task as slurm job with Submitter("slurm", sbatch_args="-N1") as sub: sub(task) res = task.result() assert res.output.out == 2 script_dir = tmpdir / "SlurmWorker_scripts" assert script_dir.exists() @pytest.mark.skipif(not slurm_available, reason="slurm not installed") def test_slurm_args_2(tmpdir): """testing sbatch_args provided to the submitter exception should be raised for invalid options """ task = sleep_add_one(x=1) task.cache_dir = tmpdir # submit workflow and every task as slurm job with pytest.raises(RuntimeError, match="Error returned from sbatch:"): with Submitter("slurm", sbatch_args="-N1 --invalid") as sub: sub(task) @mark.task def sleep(x, job_name_part): time.sleep(x) import subprocess as sp # getting the job_id of the first job that sleeps job_id = 999 while job_id != "": time.sleep(3) id_p1 = sp.Popen(["squeue"], stdout=sp.PIPE) id_p2 = sp.Popen(["grep", job_name_part], stdin=id_p1.stdout, stdout=sp.PIPE) id_p3 = sp.Popen(["awk", "{print $1}"], stdin=id_p2.stdout, stdout=sp.PIPE) job_id = id_p3.communicate()[0].decode("utf-8").strip() return x @mark.task def cancel(job_name_part): import subprocess as sp # getting the job_id of the first job that sleeps job_id = "" while job_id == "": time.sleep(1) id_p1 = sp.Popen(["squeue"], stdout=sp.PIPE) id_p2 = sp.Popen(["grep", job_name_part], stdin=id_p1.stdout, stdout=sp.PIPE) id_p3 = sp.Popen(["awk", "{print $1}"], stdin=id_p2.stdout, stdout=sp.PIPE) job_id = id_p3.communicate()[0].decode("utf-8").strip() # # canceling the job proc = sp.run(["scancel", job_id, "--verbose"], stdout=sp.PIPE, stderr=sp.PIPE) # cancelling the job returns message in the sterr return proc.stderr.decode("utf-8").strip() @pytest.mark.flaky(reruns=1) @pytest.mark.skipif(not slurm_available, reason="slurm not installed") def test_slurm_cancel_rerun_1(tmpdir): """testing that tasks run with slurm is re-queue Running wf with 2 tasks, one sleeps and the other trying to get job_id of the first task and cancel it. The first job should be re-queue and finish without problem. (possibly has to be improved, in theory cancel job might finish before cancel) """ wf = Workflow( name="wf", input_spec=["x", "job_name_cancel", "job_name_resqueue"], cache_dir=tmpdir, ) wf.add(sleep(name="sleep1", x=wf.lzin.x, job_name_part=wf.lzin.job_name_cancel)) wf.add(cancel(name="cancel1", job_name_part=wf.lzin.job_name_resqueue)) wf.inputs.x = 10 wf.inputs.job_name_resqueue = "sleep1" wf.inputs.job_name_cancel = "cancel1" wf.set_output([("out", wf.sleep1.lzout.out), ("canc_out", wf.cancel1.lzout.out)]) with Submitter("slurm") as sub: sub(wf) res = wf.result() assert res.output.out == 10 # checking if indeed the sleep-task job was cancelled by cancel-task assert "Terminating" in res.output.canc_out assert "Invalid" not in res.output.canc_out script_dir = tmpdir / "SlurmWorker_scripts" assert script_dir.exists() @pytest.mark.flaky(reruns=1) @pytest.mark.skipif(not slurm_available, reason="slurm not installed") def test_slurm_cancel_rerun_2(tmpdir): """testing that tasks run with slurm that has --no-requeue Running wf with 2 tasks, one sleeps and the other gets job_id of the first task and cancel it. The first job is not able t be rescheduled and the error is returned. """ wf = Workflow(name="wf", input_spec=["x", "job_name"], cache_dir=tmpdir) wf.add(sleep(name="sleep2", x=wf.lzin.x)) wf.add(cancel(name="cancel2", job_name_part=wf.lzin.job_name)) wf.inputs.x = 10 wf.inputs.job_name = "sleep2" wf.set_output([("out", wf.sleep2.lzout.out), ("canc_out", wf.cancel2.lzout.out)]) with pytest.raises(Exception): with Submitter("slurm", sbatch_args="--no-requeue") as sub: sub(wf) @pytest.mark.skipif(not sge_available, reason="sge not installed") def test_sge_wf(tmpdir): """testing that a basic workflow can be run with the SGEWorker""" wf = gen_basic_wf() wf.cache_dir = tmpdir # submit workflow and every task as sge job with Submitter( "sge", ) as sub: sub(wf) res = wf.result() assert res.output.out == 9 script_dir = tmpdir / "SGEWorker_scripts" assert script_dir.exists() sdirs = [sd for sd in script_dir.listdir() if sd.isdir()] # ensure each task was executed with sge assert len([sd for sd in script_dir.listdir() if sd.isdir()]) == 2 @pytest.mark.skipif(not sge_available, reason="sge not installed") def test_sge_wf_cf(tmpdir): """testing the SGEWorker can submit SGE tasks while the workflow uses the concurrent futures plugin""" # submit entire workflow as single job executing with cf worker wf = gen_basic_wf() wf.cache_dir = tmpdir wf.plugin = "cf" with Submitter("sge") as sub: sub(wf) res = wf.result() assert res.output.out == 9 script_dir = tmpdir / "SGEWorker_scripts" assert script_dir.exists() # ensure only workflow was executed with slurm sdirs = [sd for sd in script_dir.listdir() if sd.isdir()] assert len(sdirs) == 1 # sge scripts should be in the dirs that are using uid in the name assert Path(sdirs[0]).name == wf.uid @pytest.mark.skipif(not sge_available, reason="sge not installed") def test_sge_wf_state(tmpdir): """testing the SGEWorker can be used with a workflow with state""" wf = gen_basic_wf() wf.split("x") wf.inputs.x = [5, 6] wf.cache_dir = tmpdir with Submitter("sge") as sub: sub(wf) res = wf.result() assert res[0].output.out == 9 assert res[1].output.out == 10 script_dir = tmpdir / "SGEWorker_scripts" assert script_dir.exists() sdirs = [sd for sd in script_dir.listdir() if sd.isdir()] assert len(sdirs) == 2 * len(wf.inputs.x) def qacct_output_to_dict(qacct_output): stdout_dict = {} for line in qacct_output.splitlines(): key_value = line.split(None, 1) if key_value[0] not in stdout_dict: stdout_dict[key_value[0]] = [] if len(key_value) > 1: stdout_dict[key_value[0]].append(key_value[1]) else: stdout_dict[key_value[0]].append(None) print(stdout_dict) return stdout_dict @pytest.mark.skipif(not sge_available, reason="sge not installed") def test_sge_set_threadcount(tmpdir): """testing the number of threads for an SGEWorker task can be set using the input_spec variable sgeThreads""" wf = gen_basic_wf_with_threadcount() wf.inputs.x = 5 wf.cache_dir = tmpdir jobids = [] with Submitter("sge") as sub: sub(wf) jobids = list(sub.worker.jobid_by_task_uid.values()) jobids.sort() print(f"jobids: {jobids}") out_job0 = ( sp.run(["qacct", "-j", jobids[0]], capture_output=True).stdout.decode().strip() ) out_job1 = ( sp.run(["qacct", "-j", jobids[1]], capture_output=True).stdout.decode().strip() ) out_job0_dict = qacct_output_to_dict(out_job0) out_job1_dict = qacct_output_to_dict(out_job1) assert int(out_job0_dict["slots"][0]) == 4 assert int(out_job1_dict["slots"][0]) == 1 @pytest.mark.skipif(not sge_available, reason="sge not installed") def test_sge_limit_maxthreads(tmpdir): """testing the ability to limit the number of threads used by the SGE at one time with the max_threads argument to SGEWorker""" wf = gen_basic_wf_with_threadcount_concurrent() wf.inputs.x = [5, 6] wf.split("x") wf.cache_dir = tmpdir jobids = [] with Submitter("sge", max_threads=8) as sub: sub(wf) jobids = list(sub.worker.jobid_by_task_uid.values()) jobids.sort() out_job0 = ( sp.run(["qacct", "-j", jobids[0]], capture_output=True).stdout.decode().strip() ) out_job1 = ( sp.run(["qacct", "-j", jobids[1]], capture_output=True).stdout.decode().strip() ) out_job2 = ( sp.run(["qacct", "-j", jobids[2]], capture_output=True).stdout.decode().strip() ) out_job3 = ( sp.run(["qacct", "-j", jobids[3]], capture_output=True).stdout.decode().strip() ) out_job0_dict = qacct_output_to_dict(out_job0) out_job1_dict = qacct_output_to_dict(out_job1) out_job2_dict = qacct_output_to_dict(out_job2) out_job3_dict = qacct_output_to_dict(out_job3) job_1_endtime = datetime.strptime( out_job1_dict["end_time"][0], f"%a %b %d %H:%M:%S %Y" ) # Running both task_1_1 and task_1_2 at once would exceed max_threads, # so task_1_2 waits for task_1_1 to complete job_2_starttime = datetime.strptime( out_job2_dict["start_time"][0], f"%a %b %d %H:%M:%S %Y" ) assert job_1_endtime < job_2_starttime @pytest.mark.skipif(not sge_available, reason="sge not installed") def test_sge_no_limit_maxthreads(tmpdir): """testing unlimited threads can be used at once by SGE when max_threads is not set""" wf = gen_basic_wf_with_threadcount_concurrent() wf.inputs.x = [5, 6] wf.split("x") wf.cache_dir = tmpdir jobids = [] with Submitter("sge", max_threads=None) as sub: sub(wf) jobids = list(sub.worker.jobid_by_task_uid.values()) jobids.sort() out_job0 = ( sp.run(["qacct", "-j", jobids[0]], capture_output=True).stdout.decode().strip() ) out_job1 = ( sp.run(["qacct", "-j", jobids[1]], capture_output=True).stdout.decode().strip() ) out_job2 = ( sp.run(["qacct", "-j", jobids[2]], capture_output=True).stdout.decode().strip() ) out_job0_dict = qacct_output_to_dict(out_job0) out_job1_dict = qacct_output_to_dict(out_job1) out_job2_dict = qacct_output_to_dict(out_job2) job_1_endtime = datetime.strptime( out_job1_dict["end_time"][0], f"%a %b %d %H:%M:%S %Y" ) # Running both task_1_1 and task_1_2 at once would not exceed max_threads, # so task_1_2 does not wait for task_1_1 to complete job_2_starttime = datetime.strptime( out_job2_dict["start_time"][0], f"%a %b %d %H:%M:%S %Y" ) assert job_1_endtime > job_2_starttime
[ "subprocess.run", "subprocess.Popen", "dateutil.parser.parse", "shutil.which", "time.sleep", "datetime.datetime.strptime", "pytest.mark.skipif", "pytest.mark.flaky", "pytest.raises", "pathlib.Path", "re.search" ]
[((3556, 3583), 'pytest.mark.flaky', 'pytest.mark.flaky', ([], {'reruns': '(2)'}), '(reruns=2)\n', (3573, 3583), False, 'import pytest\n'), ((4163, 4190), 'pytest.mark.flaky', 'pytest.mark.flaky', ([], {'reruns': '(2)'}), '(reruns=2)\n', (4180, 4190), False, 'import pytest\n'), ((4912, 4981), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not slurm_available)'], {'reason': '"""slurm not installed"""'}), "(not slurm_available, reason='slurm not installed')\n", (4930, 4981), False, 'import pytest\n'), ((5415, 5484), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not slurm_available)'], {'reason': '"""slurm not installed"""'}), "(not slurm_available, reason='slurm not installed')\n", (5433, 5484), False, 'import pytest\n'), ((6093, 6162), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not slurm_available)'], {'reason': '"""slurm not installed"""'}), "(not slurm_available, reason='slurm not installed')\n", (6111, 6162), False, 'import pytest\n'), ((6622, 6691), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not slurm_available)'], {'reason': '"""slurm not installed"""'}), "(not slurm_available, reason='slurm not installed')\n", (6640, 6691), False, 'import pytest\n'), ((6693, 6720), 'pytest.mark.flaky', 'pytest.mark.flaky', ([], {'reruns': '(3)'}), '(reruns=3)\n', (6710, 6720), False, 'import pytest\n'), ((8035, 8104), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not slurm_available)'], {'reason': '"""slurm not installed"""'}), "(not slurm_available, reason='slurm not installed')\n", (8053, 8104), False, 'import pytest\n'), ((8511, 8580), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not slurm_available)'], {'reason': '"""slurm not installed"""'}), "(not slurm_available, reason='slurm not installed')\n", (8529, 8580), False, 'import pytest\n'), ((10192, 10219), 'pytest.mark.flaky', 'pytest.mark.flaky', ([], {'reruns': '(1)'}), '(reruns=1)\n', (10209, 10219), False, 'import pytest\n'), ((10221, 10290), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not slurm_available)'], {'reason': '"""slurm not installed"""'}), "(not slurm_available, reason='slurm not installed')\n", (10239, 10290), False, 'import pytest\n'), ((11499, 11526), 'pytest.mark.flaky', 'pytest.mark.flaky', ([], {'reruns': '(1)'}), '(reruns=1)\n', (11516, 11526), False, 'import pytest\n'), ((11528, 11597), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not slurm_available)'], {'reason': '"""slurm not installed"""'}), "(not slurm_available, reason='slurm not installed')\n", (11546, 11597), False, 'import pytest\n'), ((12344, 12409), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not sge_available)'], {'reason': '"""sge not installed"""'}), "(not sge_available, reason='sge not installed')\n", (12362, 12409), False, 'import pytest\n'), ((12980, 13045), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not sge_available)'], {'reason': '"""sge not installed"""'}), "(not sge_available, reason='sge not installed')\n", (12998, 13045), False, 'import pytest\n'), ((13759, 13824), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not sge_available)'], {'reason': '"""sge not installed"""'}), "(not sge_available, reason='sge not installed')\n", (13777, 13824), False, 'import pytest\n'), ((14784, 14849), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not sge_available)'], {'reason': '"""sge not installed"""'}), "(not sge_available, reason='sge not installed')\n", (14802, 14849), False, 'import pytest\n'), ((15699, 15764), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not sge_available)'], {'reason': '"""sge not installed"""'}), "(not sge_available, reason='sge not installed')\n", (15717, 15764), False, 'import pytest\n'), ((17265, 17330), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not sge_available)'], {'reason': '"""sge not installed"""'}), "(not sge_available, reason='sge not installed')\n", (17283, 17330), False, 'import pytest\n'), ((436, 458), 'shutil.which', 'shutil.which', (['"""sbatch"""'], {}), "('sbatch')\n", (448, 458), False, 'import shutil\n'), ((481, 501), 'shutil.which', 'shutil.which', (['"""qsub"""'], {}), "('qsub')\n", (493, 501), False, 'import shutil\n'), ((542, 555), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (552, 555), False, 'import time\n'), ((7259, 7274), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (7269, 7274), False, 'import time\n'), ((9044, 9057), 'time.sleep', 'time.sleep', (['x'], {}), '(x)\n', (9054, 9057), False, 'import time\n'), ((10015, 10087), 'subprocess.run', 'sp.run', (["['scancel', job_id, '--verbose']"], {'stdout': 'sp.PIPE', 'stderr': 'sp.PIPE'}), "(['scancel', job_id, '--verbose'], stdout=sp.PIPE, stderr=sp.PIPE)\n", (10021, 10087), True, 'import subprocess as sp\n'), ((16897, 16969), 'datetime.datetime.strptime', 'datetime.strptime', (["out_job1_dict['end_time'][0]", 'f"""%a %b %d %H:%M:%S %Y"""'], {}), "(out_job1_dict['end_time'][0], f'%a %b %d %H:%M:%S %Y')\n", (16914, 16969), False, 'from datetime import datetime\n'), ((17130, 17204), 'datetime.datetime.strptime', 'datetime.strptime', (["out_job2_dict['start_time'][0]", 'f"""%a %b %d %H:%M:%S %Y"""'], {}), "(out_job2_dict['start_time'][0], f'%a %b %d %H:%M:%S %Y')\n", (17147, 17204), False, 'from datetime import datetime\n'), ((18266, 18338), 'datetime.datetime.strptime', 'datetime.strptime', (["out_job1_dict['end_time'][0]", 'f"""%a %b %d %H:%M:%S %Y"""'], {}), "(out_job1_dict['end_time'][0], f'%a %b %d %H:%M:%S %Y')\n", (18283, 18338), False, 'from datetime import datetime\n'), ((18511, 18585), 'datetime.datetime.strptime', 'datetime.strptime', (["out_job2_dict['start_time'][0]", 'f"""%a %b %d %H:%M:%S %Y"""'], {}), "(out_job2_dict['start_time'][0], f'%a %b %d %H:%M:%S %Y')\n", (18528, 18585), False, 'from datetime import datetime\n'), ((1048, 1112), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""an unexpected keyword argument"""'}), "(TypeError, match='an unexpected keyword argument')\n", (1061, 1112), False, 'import pytest\n'), ((7400, 7440), 're.search', 're.search', (['"""(?<=slurm-)\\\\d+"""', 'fl.strpath'], {}), "('(?<=slurm-)\\\\d+', fl.strpath)\n", (7409, 7440), False, 'import re\n'), ((7511, 7526), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (7521, 7526), False, 'import time\n'), ((7651, 7720), 'subprocess.run', 'sp.run', (["['sacct', '-Xnj', jid, '-o', 'Eligible']"], {'capture_output': '(True)'}), "(['sacct', '-Xnj', jid, '-o', 'Eligible'], capture_output=True)\n", (7657, 7720), True, 'import subprocess as sp\n'), ((8841, 8905), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""Error returned from sbatch:"""'}), "(RuntimeError, match='Error returned from sbatch:')\n", (8854, 8905), False, 'import pytest\n'), ((9190, 9203), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (9200, 9203), False, 'import time\n'), ((9220, 9256), 'subprocess.Popen', 'sp.Popen', (["['squeue']"], {'stdout': 'sp.PIPE'}), "(['squeue'], stdout=sp.PIPE)\n", (9228, 9256), True, 'import subprocess as sp\n'), ((9273, 9342), 'subprocess.Popen', 'sp.Popen', (["['grep', job_name_part]"], {'stdin': 'id_p1.stdout', 'stdout': 'sp.PIPE'}), "(['grep', job_name_part], stdin=id_p1.stdout, stdout=sp.PIPE)\n", (9281, 9342), True, 'import subprocess as sp\n'), ((9359, 9426), 'subprocess.Popen', 'sp.Popen', (["['awk', '{print $1}']"], {'stdin': 'id_p2.stdout', 'stdout': 'sp.PIPE'}), "(['awk', '{print $1}'], stdin=id_p2.stdout, stdout=sp.PIPE)\n", (9367, 9426), True, 'import subprocess as sp\n'), ((9676, 9689), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (9686, 9689), False, 'import time\n'), ((9706, 9742), 'subprocess.Popen', 'sp.Popen', (["['squeue']"], {'stdout': 'sp.PIPE'}), "(['squeue'], stdout=sp.PIPE)\n", (9714, 9742), True, 'import subprocess as sp\n'), ((9759, 9828), 'subprocess.Popen', 'sp.Popen', (["['grep', job_name_part]"], {'stdin': 'id_p1.stdout', 'stdout': 'sp.PIPE'}), "(['grep', job_name_part], stdin=id_p1.stdout, stdout=sp.PIPE)\n", (9767, 9828), True, 'import subprocess as sp\n'), ((9845, 9912), 'subprocess.Popen', 'sp.Popen', (["['awk', '{print $1}']"], {'stdin': 'id_p2.stdout', 'stdout': 'sp.PIPE'}), "(['awk', '{print $1}'], stdin=id_p2.stdout, stdout=sp.PIPE)\n", (9853, 9912), True, 'import subprocess as sp\n'), ((12227, 12251), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (12240, 12251), False, 'import pytest\n'), ((7784, 7800), 'dateutil.parser.parse', 'parser.parse', (['et'], {}), '(et)\n', (7796, 7800), False, 'from dateutil import parser\n'), ((13726, 13740), 'pathlib.Path', 'Path', (['sdirs[0]'], {}), '(sdirs[0])\n', (13730, 13740), False, 'from pathlib import Path\n'), ((15301, 15356), 'subprocess.run', 'sp.run', (["['qacct', '-j', jobids[0]]"], {'capture_output': '(True)'}), "(['qacct', '-j', jobids[0]], capture_output=True)\n", (15307, 15356), True, 'import subprocess as sp\n'), ((15412, 15467), 'subprocess.run', 'sp.run', (["['qacct', '-j', jobids[1]]"], {'capture_output': '(True)'}), "(['qacct', '-j', jobids[1]], capture_output=True)\n", (15418, 15467), True, 'import subprocess as sp\n'), ((16252, 16307), 'subprocess.run', 'sp.run', (["['qacct', '-j', jobids[0]]"], {'capture_output': '(True)'}), "(['qacct', '-j', jobids[0]], capture_output=True)\n", (16258, 16307), True, 'import subprocess as sp\n'), ((16363, 16418), 'subprocess.run', 'sp.run', (["['qacct', '-j', jobids[1]]"], {'capture_output': '(True)'}), "(['qacct', '-j', jobids[1]], capture_output=True)\n", (16369, 16418), True, 'import subprocess as sp\n'), ((16474, 16529), 'subprocess.run', 'sp.run', (["['qacct', '-j', jobids[2]]"], {'capture_output': '(True)'}), "(['qacct', '-j', jobids[2]], capture_output=True)\n", (16480, 16529), True, 'import subprocess as sp\n'), ((16585, 16640), 'subprocess.run', 'sp.run', (["['qacct', '-j', jobids[3]]"], {'capture_output': '(True)'}), "(['qacct', '-j', jobids[3]], capture_output=True)\n", (16591, 16640), True, 'import subprocess as sp\n'), ((17783, 17838), 'subprocess.run', 'sp.run', (["['qacct', '-j', jobids[0]]"], {'capture_output': '(True)'}), "(['qacct', '-j', jobids[0]], capture_output=True)\n", (17789, 17838), True, 'import subprocess as sp\n'), ((17894, 17949), 'subprocess.run', 'sp.run', (["['qacct', '-j', jobids[1]]"], {'capture_output': '(True)'}), "(['qacct', '-j', jobids[1]], capture_output=True)\n", (17900, 17949), True, 'import subprocess as sp\n'), ((18005, 18060), 'subprocess.run', 'sp.run', (["['qacct', '-j', jobids[2]]"], {'capture_output': '(True)'}), "(['qacct', '-j', jobids[2]], capture_output=True)\n", (18011, 18060), True, 'import subprocess as sp\n')]
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # Copyright (c) 2015 <NAME>. All rights reserved. # Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Handles all requests relating to shares. """ from oslo_config import cfg from oslo_log import log from oslo_utils import excutils from oslo_utils import strutils from oslo_utils import timeutils import six from manila.common import constants from manila.data import rpcapi as data_rpcapi from manila.db import base from manila import exception from manila.i18n import _ from manila import policy from manila import quota from manila.scheduler import rpcapi as scheduler_rpcapi from manila.share import access from manila.share import rpcapi as share_rpcapi from manila.share import share_types from manila.share import utils as share_utils from manila import utils share_api_opts = [ cfg.BoolOpt('use_scheduler_creating_share_from_snapshot', default=False, help='If set to False, then share creation from snapshot will ' 'be performed on the same host. ' 'If set to True, then scheduler will be used.' 'When enabling this option make sure that filter ' 'CreateShareFromSnapshot is enabled and to have hosts ' 'reporting replication_domain option.' ) ] CONF = cfg.CONF CONF.register_opts(share_api_opts) LOG = log.getLogger(__name__) GB = 1048576 * 1024 QUOTAS = quota.QUOTAS class API(base.Base): """API for interacting with the share manager.""" def __init__(self, db_driver=None): super(API, self).__init__(db_driver) self.scheduler_rpcapi = scheduler_rpcapi.SchedulerAPI() self.share_rpcapi = share_rpcapi.ShareAPI() self.access_helper = access.ShareInstanceAccess(self.db, None) def _get_all_availability_zones_with_subnets(self, context, share_network_id): compatible_azs = [] for az in self.db.availability_zone_get_all(context): if self.db.share_network_subnet_get_by_availability_zone_id( context, share_network_id=share_network_id, availability_zone_id=az['id']): compatible_azs.append(az['name']) return compatible_azs def _check_if_share_quotas_exceeded(self, context, quota_exception, share_size, operation='create'): overs = quota_exception.kwargs['overs'] usages = quota_exception.kwargs['usages'] quotas = quota_exception.kwargs['quotas'] def _consumed(name): return (usages[name]['reserved'] + usages[name]['in_use']) if 'gigabytes' in overs: LOG.warning("Quota exceeded for %(s_pid)s, " "tried to %(operation)s " "%(s_size)sG share (%(d_consumed)dG of " "%(d_quota)dG already consumed).", { 's_pid': context.project_id, 's_size': share_size, 'd_consumed': _consumed('gigabytes'), 'd_quota': quotas['gigabytes'], 'operation': operation}) raise exception.ShareSizeExceedsAvailableQuota() elif 'shares' in overs: LOG.warning("Quota exceeded for %(s_pid)s, " "tried to %(operation)s " "share (%(d_consumed)d shares " "already consumed).", { 's_pid': context.project_id, 'd_consumed': _consumed('shares'), 'operation': operation}) raise exception.ShareLimitExceeded(allowed=quotas['shares']) def _check_if_replica_quotas_exceeded(self, context, quota_exception, replica_size, resource_type='share_replica'): overs = quota_exception.kwargs['overs'] usages = quota_exception.kwargs['usages'] quotas = quota_exception.kwargs['quotas'] def _consumed(name): return (usages[name]['reserved'] + usages[name]['in_use']) if 'share_replicas' in overs: LOG.warning("Quota exceeded for %(s_pid)s, " "unable to create share-replica (%(d_consumed)d " "of %(d_quota)d already consumed).", { 's_pid': context.project_id, 'd_consumed': _consumed('share_replicas'), 'd_quota': quotas['share_replicas']}) exception_kwargs = {} if resource_type != 'share_replica': msg = _("Failed while creating a share with replication " "support. Maximum number of allowed share-replicas " "is exceeded.") exception_kwargs['message'] = msg raise exception.ShareReplicasLimitExceeded(**exception_kwargs) elif 'replica_gigabytes' in overs: LOG.warning("Quota exceeded for %(s_pid)s, " "unable to create a share replica size of " "%(s_size)sG (%(d_consumed)dG of " "%(d_quota)dG already consumed).", { 's_pid': context.project_id, 's_size': replica_size, 'd_consumed': _consumed('replica_gigabytes'), 'd_quota': quotas['replica_gigabytes']}) exception_kwargs = {} if resource_type != 'share_replica': msg = _("Failed while creating a share with replication " "support. Requested share replica exceeds allowed " "project/user or share type gigabytes quota.") exception_kwargs['message'] = msg raise exception.ShareReplicaSizeExceedsAvailableQuota( **exception_kwargs) def create(self, context, share_proto, size, name, description, snapshot_id=None, availability_zone=None, metadata=None, share_network_id=None, share_type=None, is_public=False, share_group_id=None, share_group_snapshot_member=None, availability_zones=None): """Create new share.""" self._check_metadata_properties(metadata) if snapshot_id is not None: snapshot = self.get_snapshot(context, snapshot_id) if snapshot['aggregate_status'] != constants.STATUS_AVAILABLE: msg = _("status must be '%s'") % constants.STATUS_AVAILABLE raise exception.InvalidShareSnapshot(reason=msg) if not size: size = snapshot['size'] else: snapshot = None def as_int(s): try: return int(s) except (ValueError, TypeError): return s # tolerate size as stringified int size = as_int(size) if not isinstance(size, int) or size <= 0: msg = (_("Share size '%s' must be an integer and greater than 0") % size) raise exception.InvalidInput(reason=msg) if snapshot and size < snapshot['size']: msg = (_("Share size '%s' must be equal or greater " "than snapshot size") % size) raise exception.InvalidInput(reason=msg) if snapshot is None: share_type_id = share_type['id'] if share_type else None else: source_share = self.db.share_get(context, snapshot['share_id']) source_share_az = source_share['instance']['availability_zone'] if availability_zone is None: availability_zone = source_share_az elif (availability_zone != source_share_az and not CONF.use_scheduler_creating_share_from_snapshot): LOG.error("The specified availability zone must be the same " "as parent share when you have the configuration " "option 'use_scheduler_creating_share_from_snapshot'" " set to False.") msg = _("The specified availability zone must be the same " "as the parent share when creating from snapshot.") raise exception.InvalidInput(reason=msg) if share_type is None: # Grab the source share's share_type if no new share type # has been provided. share_type_id = source_share['instance']['share_type_id'] share_type = share_types.get_share_type(context, share_type_id) else: share_type_id = share_type['id'] if share_type_id != source_share['instance']['share_type_id']: msg = _("Invalid share type specified: the requested " "share type must match the type of the source " "share. If a share type is not specified when " "requesting a new share from a snapshot, the " "share type of the source share will be applied " "to the new share.") raise exception.InvalidInput(reason=msg) supported_share_protocols = ( proto.upper() for proto in CONF.enabled_share_protocols) if not (share_proto and share_proto.upper() in supported_share_protocols): msg = (_("Invalid share protocol provided: %(provided)s. " "It is either disabled or unsupported. Available " "protocols: %(supported)s") % dict( provided=share_proto, supported=CONF.enabled_share_protocols)) raise exception.InvalidInput(reason=msg) deltas = {'shares': 1, 'gigabytes': size} share_type_attributes = self.get_share_attributes_from_share_type( share_type) share_type_supports_replication = share_type_attributes.get( 'replication_type', None) if share_type_supports_replication: deltas.update( {'share_replicas': 1, 'replica_gigabytes': size}) try: reservations = QUOTAS.reserve( context, share_type_id=share_type_id, **deltas) except exception.OverQuota as e: self._check_if_share_quotas_exceeded(context, e, size) if share_type_supports_replication: self._check_if_replica_quotas_exceeded(context, e, size, resource_type='share') share_group = None if share_group_id: try: share_group = self.db.share_group_get(context, share_group_id) except exception.NotFound as e: raise exception.InvalidParameterValue(six.text_type(e)) if (not share_group_snapshot_member and not (share_group['status'] == constants.STATUS_AVAILABLE)): params = { 'avail': constants.STATUS_AVAILABLE, 'status': share_group['status'], } msg = _("Share group status must be %(avail)s, got " "%(status)s.") % params raise exception.InvalidShareGroup(message=msg) if share_type_id: share_group_st_ids = [ st['share_type_id'] for st in share_group.get('share_types', [])] if share_type_id not in share_group_st_ids: params = { 'type': share_type_id, 'group': share_group_id, } msg = _("The specified share type (%(type)s) is not " "supported by the specified share group " "(%(group)s).") % params raise exception.InvalidParameterValue(msg) if not share_group.get('share_network_id') == share_network_id: params = { 'net': share_network_id, 'group': share_group_id } msg = _("The specified share network (%(net)s) is not " "supported by the specified share group " "(%(group)s).") % params raise exception.InvalidParameterValue(msg) options = { 'size': size, 'user_id': context.user_id, 'project_id': context.project_id, 'snapshot_id': snapshot_id, 'metadata': metadata, 'display_name': name, 'display_description': description, 'share_proto': share_proto, 'is_public': is_public, 'share_group_id': share_group_id, } options.update(share_type_attributes) if share_group_snapshot_member: options['source_share_group_snapshot_member_id'] = ( share_group_snapshot_member['id']) # NOTE(dviroel): If a target availability zone was not provided, the # scheduler will receive a list with all availability zones that # contains a subnet within the selected share network. if share_network_id and not availability_zone: azs_with_subnet = self._get_all_availability_zones_with_subnets( context, share_network_id) if not availability_zones: availability_zones = azs_with_subnet else: availability_zones = ( [az for az in availability_zones if az in azs_with_subnet]) if not availability_zones: msg = _( "The share network is not supported within any requested " "availability zone. Check the share type's " "'availability_zones' extra-spec and the availability " "zones of the share network subnets") raise exception.InvalidInput(message=msg) try: share = self.db.share_create(context, options, create_share_instance=False) QUOTAS.commit(context, reservations, share_type_id=share_type_id) except Exception: with excutils.save_and_reraise_exception(): try: self.db.share_delete(context, share['id']) finally: QUOTAS.rollback( context, reservations, share_type_id=share_type_id) host = None snapshot_host = None if snapshot: snapshot_host = snapshot['share']['instance']['host'] if not CONF.use_scheduler_creating_share_from_snapshot: # Shares from snapshots with restriction - source host only. # It is common situation for different types of backends. host = snapshot['share']['instance']['host'] if share_group and host is None: host = share_group['host'] self.create_instance( context, share, share_network_id=share_network_id, host=host, availability_zone=availability_zone, share_group=share_group, share_group_snapshot_member=share_group_snapshot_member, share_type_id=share_type_id, availability_zones=availability_zones, snapshot_host=snapshot_host) # Retrieve the share with instance details share = self.db.share_get(context, share['id']) return share def get_share_attributes_from_share_type(self, share_type): """Determine share attributes from the share type. The share type can change any time after shares of that type are created, so we copy some share type attributes to the share to consistently govern the behavior of that share over its lifespan. """ inferred_map = constants.ExtraSpecs.INFERRED_OPTIONAL_MAP snapshot_support_key = constants.ExtraSpecs.SNAPSHOT_SUPPORT create_share_from_snapshot_key = ( constants.ExtraSpecs.CREATE_SHARE_FROM_SNAPSHOT_SUPPORT) revert_to_snapshot_key = ( constants.ExtraSpecs.REVERT_TO_SNAPSHOT_SUPPORT) mount_snapshot_support_key = ( constants.ExtraSpecs.MOUNT_SNAPSHOT_SUPPORT) snapshot_support_default = inferred_map.get(snapshot_support_key) create_share_from_snapshot_support_default = inferred_map.get( create_share_from_snapshot_key) revert_to_snapshot_support_default = inferred_map.get( revert_to_snapshot_key) mount_snapshot_support_default = inferred_map.get( constants.ExtraSpecs.MOUNT_SNAPSHOT_SUPPORT) if share_type: snapshot_support = share_types.parse_boolean_extra_spec( snapshot_support_key, share_type.get('extra_specs', {}).get( snapshot_support_key, snapshot_support_default)) create_share_from_snapshot_support = ( share_types.parse_boolean_extra_spec( create_share_from_snapshot_key, share_type.get('extra_specs', {}).get( create_share_from_snapshot_key, create_share_from_snapshot_support_default))) revert_to_snapshot_support = ( share_types.parse_boolean_extra_spec( revert_to_snapshot_key, share_type.get('extra_specs', {}).get( revert_to_snapshot_key, revert_to_snapshot_support_default))) mount_snapshot_support = share_types.parse_boolean_extra_spec( mount_snapshot_support_key, share_type.get( 'extra_specs', {}).get( mount_snapshot_support_key, mount_snapshot_support_default)) replication_type = share_type.get('extra_specs', {}).get( 'replication_type') else: snapshot_support = snapshot_support_default create_share_from_snapshot_support = ( create_share_from_snapshot_support_default) revert_to_snapshot_support = revert_to_snapshot_support_default mount_snapshot_support = mount_snapshot_support_default replication_type = None return { 'snapshot_support': snapshot_support, 'create_share_from_snapshot_support': create_share_from_snapshot_support, 'revert_to_snapshot_support': revert_to_snapshot_support, 'replication_type': replication_type, 'mount_snapshot_support': mount_snapshot_support, } def create_instance(self, context, share, share_network_id=None, host=None, availability_zone=None, share_group=None, share_group_snapshot_member=None, share_type_id=None, availability_zones=None, snapshot_host=None): request_spec, share_instance = ( self.create_share_instance_and_get_request_spec( context, share, availability_zone=availability_zone, share_group=share_group, host=host, share_network_id=share_network_id, share_type_id=share_type_id, availability_zones=availability_zones, snapshot_host=snapshot_host)) if share_group_snapshot_member: # Inherit properties from the share_group_snapshot_member member_share_instance = share_group_snapshot_member[ 'share_instance'] updates = { 'host': member_share_instance['host'], 'share_network_id': member_share_instance['share_network_id'], 'share_server_id': member_share_instance['share_server_id'], } share = self.db.share_instance_update(context, share_instance['id'], updates) # NOTE(ameade): Do not cast to driver if creating from share group # snapshot return if host: self.share_rpcapi.create_share_instance( context, share_instance, host, request_spec=request_spec, filter_properties={}, snapshot_id=share['snapshot_id'], ) else: # Create share instance from scratch or from snapshot could happen # on hosts other than the source host. self.scheduler_rpcapi.create_share_instance( context, request_spec=request_spec, filter_properties={}) return share_instance def create_share_instance_and_get_request_spec( self, context, share, availability_zone=None, share_group=None, host=None, share_network_id=None, share_type_id=None, cast_rules_to_readonly=False, availability_zones=None, snapshot_host=None): availability_zone_id = None if availability_zone: availability_zone_id = self.db.availability_zone_get( context, availability_zone).id # TODO(u_glide): Add here validation that provided share network # doesn't conflict with provided availability_zone when Neutron # will have AZ support. share_instance = self.db.share_instance_create( context, share['id'], { 'share_network_id': share_network_id, 'status': constants.STATUS_CREATING, 'scheduled_at': timeutils.utcnow(), 'host': host if host else '', 'availability_zone_id': availability_zone_id, 'share_type_id': share_type_id, 'cast_rules_to_readonly': cast_rules_to_readonly, } ) share_properties = { 'id': share['id'], 'size': share['size'], 'user_id': share['user_id'], 'project_id': share['project_id'], 'metadata': self.db.share_metadata_get(context, share['id']), 'share_server_id': share_instance['share_server_id'], 'snapshot_support': share['snapshot_support'], 'create_share_from_snapshot_support': share['create_share_from_snapshot_support'], 'revert_to_snapshot_support': share['revert_to_snapshot_support'], 'mount_snapshot_support': share['mount_snapshot_support'], 'share_proto': share['share_proto'], 'share_type_id': share_type_id, 'is_public': share['is_public'], 'share_group_id': share['share_group_id'], 'source_share_group_snapshot_member_id': share[ 'source_share_group_snapshot_member_id'], 'snapshot_id': share['snapshot_id'], 'replication_type': share['replication_type'], } share_instance_properties = { 'id': share_instance['id'], 'availability_zone_id': share_instance['availability_zone_id'], 'share_network_id': share_instance['share_network_id'], 'share_server_id': share_instance['share_server_id'], 'share_id': share_instance['share_id'], 'host': share_instance['host'], 'status': share_instance['status'], 'replica_state': share_instance['replica_state'], 'share_type_id': share_instance['share_type_id'], } share_type = None if share_instance['share_type_id']: share_type = self.db.share_type_get( context, share_instance['share_type_id']) request_spec = { 'share_properties': share_properties, 'share_instance_properties': share_instance_properties, 'share_proto': share['share_proto'], 'share_id': share['id'], 'snapshot_id': share['snapshot_id'], 'snapshot_host': snapshot_host, 'share_type': share_type, 'share_group': share_group, 'availability_zone_id': availability_zone_id, 'availability_zones': availability_zones, } return request_spec, share_instance def create_share_replica(self, context, share, availability_zone=None, share_network_id=None): if not share.get('replication_type'): msg = _("Replication not supported for share %s.") raise exception.InvalidShare(message=msg % share['id']) if share.get('share_group_id'): msg = _("Replication not supported for shares in a group.") raise exception.InvalidShare(message=msg) self._check_is_share_busy(share) active_replica = self.db.share_replicas_get_available_active_replica( context, share['id']) if not active_replica: msg = _("Share %s does not have any active replica in available " "state.") raise exception.ReplicationException(reason=msg % share['id']) share_type = share_types.get_share_type( context, share.instance['share_type_id']) type_azs = share_type['extra_specs'].get('availability_zones', '') type_azs = [t for t in type_azs.split(',') if type_azs] if (availability_zone and type_azs and availability_zone not in type_azs): msg = _("Share replica cannot be created since the share type " "%(type)s is not supported within the availability zone " "chosen %(az)s.") type_name = '%s' % (share_type['name'] or '') type_id = '(ID: %s)' % share_type['id'] payload = {'type': '%s%s' % (type_name, type_id), 'az': availability_zone} raise exception.InvalidShare(message=msg % payload) try: reservations = QUOTAS.reserve( context, share_replicas=1, replica_gigabytes=share['size'], share_type_id=share_type['id'] ) except exception.OverQuota as e: self._check_if_replica_quotas_exceeded(context, e, share['size']) if share_network_id: if availability_zone: try: az = self.db.availability_zone_get(context, availability_zone) except exception.AvailabilityZoneNotFound: msg = _("Share replica cannot be created because the " "specified availability zone does not exist.") raise exception.InvalidInput(message=msg) if self.db.share_network_subnet_get_by_availability_zone_id( context, share_network_id, az.get('id')) is None: msg = _("Share replica cannot be created because the " "share network is not available within the " "specified availability zone.") raise exception.InvalidShare(message=msg) else: # NOTE(dviroel): If a target availability zone was not # provided, the scheduler will receive a list with all # availability zones that contains subnets within the # selected share network. azs_subnet = self._get_all_availability_zones_with_subnets( context, share_network_id) if not type_azs: type_azs = azs_subnet else: type_azs = ( [az for az in type_azs if az in azs_subnet]) if not type_azs: msg = _( "The share network is not supported within any " "requested availability zone. Check the share type's " "'availability_zones' extra-spec and the availability " "zones of the share network subnets") raise exception.InvalidInput(message=msg) if share['replication_type'] == constants.REPLICATION_TYPE_READABLE: cast_rules_to_readonly = True else: cast_rules_to_readonly = False try: request_spec, share_replica = ( self.create_share_instance_and_get_request_spec( context, share, availability_zone=availability_zone, share_network_id=share_network_id, share_type_id=share['instance']['share_type_id'], cast_rules_to_readonly=cast_rules_to_readonly, availability_zones=type_azs) ) QUOTAS.commit( context, reservations, project_id=share['project_id'], share_type_id=share_type['id'], ) except Exception: with excutils.save_and_reraise_exception(): try: self.db.share_replica_delete( context, share_replica['id'], need_to_update_usages=False) finally: QUOTAS.rollback( context, reservations, share_type_id=share_type['id']) all_replicas = self.db.share_replicas_get_all_by_share( context, share['id']) all_hosts = [r['host'] for r in all_replicas] request_spec['active_replica_host'] = active_replica['host'] request_spec['all_replica_hosts'] = ','.join(all_hosts) self.db.share_replica_update( context, share_replica['id'], {'replica_state': constants.REPLICA_STATE_OUT_OF_SYNC}) existing_snapshots = ( self.db.share_snapshot_get_all_for_share( context, share_replica['share_id']) ) snapshot_instance = { 'status': constants.STATUS_CREATING, 'progress': '0%', 'share_instance_id': share_replica['id'], } for snapshot in existing_snapshots: self.db.share_snapshot_instance_create( context, snapshot['id'], snapshot_instance) self.scheduler_rpcapi.create_share_replica( context, request_spec=request_spec, filter_properties={}) return share_replica def delete_share_replica(self, context, share_replica, force=False): # Disallow deletion of ONLY active replica, *even* when this # operation is forced. replicas = self.db.share_replicas_get_all_by_share( context, share_replica['share_id']) active_replicas = list(filter( lambda x: x['replica_state'] == constants.REPLICA_STATE_ACTIVE, replicas)) if (share_replica.get('replica_state') == constants.REPLICA_STATE_ACTIVE and len(active_replicas) == 1): msg = _("Cannot delete last active replica.") raise exception.ReplicationException(reason=msg) LOG.info("Deleting replica %s.", share_replica['id']) self.db.share_replica_update( context, share_replica['id'], { 'status': constants.STATUS_DELETING, 'terminated_at': timeutils.utcnow(), } ) if not share_replica['host']: # Delete any snapshot instances created on the database replica_snapshots = ( self.db.share_snapshot_instance_get_all_with_filters( context, {'share_instance_ids': share_replica['id']}) ) for snapshot in replica_snapshots: self.db.share_snapshot_instance_delete(context, snapshot['id']) # Delete the replica from the database self.db.share_replica_delete(context, share_replica['id']) else: self.share_rpcapi.delete_share_replica(context, share_replica, force=force) def promote_share_replica(self, context, share_replica): if share_replica.get('status') != constants.STATUS_AVAILABLE: msg = _("Replica %(replica_id)s must be in %(status)s state to be " "promoted.") raise exception.ReplicationException( reason=msg % {'replica_id': share_replica['id'], 'status': constants.STATUS_AVAILABLE}) replica_state = share_replica['replica_state'] if (replica_state in (constants.REPLICA_STATE_OUT_OF_SYNC, constants.STATUS_ERROR) and not context.is_admin): msg = _("Promoting a replica with 'replica_state': %s requires " "administrator privileges.") raise exception.AdminRequired( message=msg % replica_state) self.db.share_replica_update( context, share_replica['id'], {'status': constants.STATUS_REPLICATION_CHANGE}) self.share_rpcapi.promote_share_replica(context, share_replica) return self.db.share_replica_get(context, share_replica['id']) def update_share_replica(self, context, share_replica): if not share_replica['host']: msg = _("Share replica does not have a valid host.") raise exception.InvalidHost(reason=msg) self.share_rpcapi.update_share_replica(context, share_replica) def manage(self, context, share_data, driver_options): # Check whether there's a share already with the provided options: filters = { 'export_location_path': share_data['export_location_path'], 'host': share_data['host'], } share_server_id = share_data.get('share_server_id') if share_server_id: filters['share_server_id'] = share_data['share_server_id'] already_managed = self.db.share_instances_get_all(context, filters=filters) if already_managed: LOG.error("Found an existing share with export location %s!", share_data['export_location_path']) msg = _("A share already exists with the export path specified.") raise exception.InvalidShare(reason=msg) share_type_id = share_data['share_type_id'] share_type = share_types.get_share_type(context, share_type_id) dhss = share_types.parse_boolean_extra_spec( 'driver_handles_share_servers', share_type['extra_specs']['driver_handles_share_servers']) if dhss and not share_server_id: msg = _("Share Server ID parameter is required when managing a " "share using a share type with " "driver_handles_share_servers extra-spec set to True.") raise exception.InvalidInput(reason=msg) if not dhss and share_server_id: msg = _("Share Server ID parameter is not expected when managing a" " share using a share type with " "driver_handles_share_servers extra-spec set to False.") raise exception.InvalidInput(reason=msg) if share_server_id: try: share_server = self.db.share_server_get( context, share_data['share_server_id']) except exception.ShareServerNotFound: msg = _("Share Server specified was not found.") raise exception.InvalidInput(reason=msg) if share_server['status'] != constants.STATUS_ACTIVE: msg = _("The provided share server is not active.") raise exception.InvalidShareServer(reason=msg) subnet = self.db.share_network_subnet_get( context, share_server['share_network_subnet_id']) share_data['share_network_id'] = subnet['share_network_id'] share_data.update({ 'user_id': context.user_id, 'project_id': context.project_id, 'status': constants.STATUS_MANAGING, 'scheduled_at': timeutils.utcnow(), }) share_data.update( self.get_share_attributes_from_share_type(share_type)) share = self.db.share_create(context, share_data) export_location_path = share_data.pop('export_location_path') self.db.share_export_locations_update(context, share.instance['id'], export_location_path) request_spec = self._get_request_spec_dict( share, share_type, size=0, share_proto=share_data['share_proto'], host=share_data['host']) # NOTE(ganso): Scheduler is called to validate if share type # provided can fit in host provided. It will invoke manage upon # successful validation. self.scheduler_rpcapi.manage_share(context, share['id'], driver_options, request_spec) return self.db.share_get(context, share['id']) def _get_request_spec_dict(self, share, share_type, **kwargs): if share is None: share = {'instance': {}} # NOTE(dviroel): The share object can be a share instance object with # share data. share_instance = share.get('instance', share) share_properties = { 'size': kwargs.get('size', share.get('size')), 'user_id': kwargs.get('user_id', share.get('user_id')), 'project_id': kwargs.get('project_id', share.get('project_id')), 'snapshot_support': kwargs.get( 'snapshot_support', share_type.get('extra_specs', {}).get('snapshot_support') ), 'create_share_from_snapshot_support': kwargs.get( 'create_share_from_snapshot_support', share_type.get('extra_specs', {}).get( 'create_share_from_snapshot_support') ), 'revert_to_snapshot_support': kwargs.get( 'revert_to_snapshot_support', share_type.get('extra_specs', {}).get( 'revert_to_snapshot_support') ), 'mount_snapshot_support': kwargs.get( 'mount_snapshot_support', share_type.get('extra_specs', {}).get( 'mount_snapshot_support') ), 'share_proto': kwargs.get('share_proto', share.get('share_proto')), 'share_type_id': share_type['id'], 'is_public': kwargs.get('is_public', share.get('is_public')), 'share_group_id': kwargs.get( 'share_group_id', share.get('share_group_id')), 'source_share_group_snapshot_member_id': kwargs.get( 'source_share_group_snapshot_member_id', share.get('source_share_group_snapshot_member_id')), 'snapshot_id': kwargs.get('snapshot_id', share.get('snapshot_id')), } share_instance_properties = { 'availability_zone_id': kwargs.get( 'availability_zone_id', share_instance.get('availability_zone_id')), 'share_network_id': kwargs.get( 'share_network_id', share_instance.get('share_network_id')), 'share_server_id': kwargs.get( 'share_server_id', share_instance.get('share_server_id')), 'share_id': kwargs.get('share_id', share_instance.get('share_id')), 'host': kwargs.get('host', share_instance.get('host')), 'status': kwargs.get('status', share_instance.get('status')), } request_spec = { 'share_properties': share_properties, 'share_instance_properties': share_instance_properties, 'share_type': share_type, 'share_id': share.get('id'), } return request_spec def unmanage(self, context, share): policy.check_policy(context, 'share', 'unmanage') self._check_is_share_busy(share) update_data = {'status': constants.STATUS_UNMANAGING, 'terminated_at': timeutils.utcnow()} share_ref = self.db.share_update(context, share['id'], update_data) self.share_rpcapi.unmanage_share(context, share_ref) # NOTE(u_glide): We should update 'updated_at' timestamp of # share server here, when manage/unmanage operations will be supported # for driver_handles_share_servers=True mode def manage_snapshot(self, context, snapshot_data, driver_options): try: share = self.db.share_get(context, snapshot_data['share_id']) except exception.NotFound: raise exception.ShareNotFound(share_id=snapshot_data['share_id']) if share['has_replicas']: msg = (_("Share %s has replicas. Snapshots of this share cannot " "currently be managed until all replicas are removed.") % share['id']) raise exception.InvalidShare(reason=msg) existing_snapshots = self.db.share_snapshot_get_all_for_share( context, snapshot_data['share_id']) for existing_snap in existing_snapshots: for inst in existing_snap.get('instances'): if (snapshot_data['provider_location'] == inst['provider_location']): msg = _("A share snapshot %(share_snapshot_id)s is " "already managed for provider location " "%(provider_location)s.") % { 'share_snapshot_id': existing_snap['id'], 'provider_location': snapshot_data['provider_location'], } raise exception.ManageInvalidShareSnapshot( reason=msg) snapshot_data.update({ 'user_id': context.user_id, 'project_id': context.project_id, 'status': constants.STATUS_MANAGING, 'share_size': share['size'], 'progress': '0%', 'share_proto': share['share_proto'] }) snapshot = self.db.share_snapshot_create(context, snapshot_data) self.share_rpcapi.manage_snapshot(context, snapshot, share['host'], driver_options) return snapshot def unmanage_snapshot(self, context, snapshot, host): update_data = {'status': constants.STATUS_UNMANAGING, 'terminated_at': timeutils.utcnow()} snapshot_ref = self.db.share_snapshot_update(context, snapshot['id'], update_data) self.share_rpcapi.unmanage_snapshot(context, snapshot_ref, host) def revert_to_snapshot(self, context, share, snapshot): """Revert a share to a snapshot.""" reservations = self._handle_revert_to_snapshot_quotas( context, share, snapshot) try: if share.get('has_replicas'): self._revert_to_replicated_snapshot( context, share, snapshot, reservations) else: self._revert_to_snapshot( context, share, snapshot, reservations) except Exception: with excutils.save_and_reraise_exception(): if reservations: QUOTAS.rollback( context, reservations, share_type_id=share['instance']['share_type_id']) def _handle_revert_to_snapshot_quotas(self, context, share, snapshot): """Reserve extra quota if a revert will result in a larger share.""" # Note(cknight): This value may be positive or negative. size_increase = snapshot['size'] - share['size'] if not size_increase: return None try: return QUOTAS.reserve( context, project_id=share['project_id'], gigabytes=size_increase, user_id=share['user_id'], share_type_id=share['instance']['share_type_id']) except exception.OverQuota as exc: usages = exc.kwargs['usages'] quotas = exc.kwargs['quotas'] consumed_gb = (usages['gigabytes']['reserved'] + usages['gigabytes']['in_use']) msg = _("Quota exceeded for %(s_pid)s. Reverting share " "%(s_sid)s to snapshot %(s_ssid)s will increase the " "share's size by %(s_size)sG, " "(%(d_consumed)dG of %(d_quota)dG already consumed).") msg_args = { 's_pid': context.project_id, 's_sid': share['id'], 's_ssid': snapshot['id'], 's_size': size_increase, 'd_consumed': consumed_gb, 'd_quota': quotas['gigabytes'], } message = msg % msg_args LOG.error(message) raise exception.ShareSizeExceedsAvailableQuota(message=message) def _revert_to_snapshot(self, context, share, snapshot, reservations): """Revert a non-replicated share to a snapshot.""" # Set status of share to 'reverting' self.db.share_update( context, snapshot['share_id'], {'status': constants.STATUS_REVERTING}) # Set status of snapshot to 'restoring' self.db.share_snapshot_update( context, snapshot['id'], {'status': constants.STATUS_RESTORING}) # Send revert API to share host self.share_rpcapi.revert_to_snapshot( context, share, snapshot, share['instance']['host'], reservations) def _revert_to_replicated_snapshot(self, context, share, snapshot, reservations): """Revert a replicated share to a snapshot.""" # Get active replica active_replica = self.db.share_replicas_get_available_active_replica( context, share['id']) if not active_replica: msg = _('Share %s has no active replica in available state.') raise exception.ReplicationException(reason=msg % share['id']) # Get snapshot instance on active replica snapshot_instance_filters = { 'share_instance_ids': active_replica['id'], 'snapshot_ids': snapshot['id'], } snapshot_instances = ( self.db.share_snapshot_instance_get_all_with_filters( context, snapshot_instance_filters)) active_snapshot_instance = ( snapshot_instances[0] if snapshot_instances else None) if not active_snapshot_instance: msg = _('Share %(share)s has no snapshot %(snap)s associated with ' 'its active replica.') msg_args = {'share': share['id'], 'snap': snapshot['id']} raise exception.ReplicationException(reason=msg % msg_args) # Set active replica to 'reverting' self.db.share_replica_update( context, active_replica['id'], {'status': constants.STATUS_REVERTING}) # Set snapshot instance on active replica to 'restoring' self.db.share_snapshot_instance_update( context, active_snapshot_instance['id'], {'status': constants.STATUS_RESTORING}) # Send revert API to active replica host self.share_rpcapi.revert_to_snapshot( context, share, snapshot, active_replica['host'], reservations) @policy.wrap_check_policy('share') def delete(self, context, share, force=False): """Delete share.""" share = self.db.share_get(context, share['id']) share_id = share['id'] statuses = (constants.STATUS_AVAILABLE, constants.STATUS_ERROR, constants.STATUS_INACTIVE) if not (force or share['status'] in statuses): msg = _("Share status must be one of %(statuses)s") % { "statuses": statuses} raise exception.InvalidShare(reason=msg) # NOTE(gouthamr): If the share has more than one replica, # it can't be deleted until the additional replicas are removed. if share.has_replicas: msg = _("Share %s has replicas. Remove the replicas before " "deleting the share.") % share_id raise exception.Conflict(err=msg) snapshots = self.db.share_snapshot_get_all_for_share(context, share_id) if len(snapshots): msg = _("Share still has %d dependent snapshots.") % len(snapshots) raise exception.InvalidShare(reason=msg) share_group_snapshot_members_count = ( self.db.count_share_group_snapshot_members_in_share( context, share_id)) if share_group_snapshot_members_count: msg = ( _("Share still has %d dependent share group snapshot " "members.") % share_group_snapshot_members_count) raise exception.InvalidShare(reason=msg) self._check_is_share_busy(share) for share_instance in share.instances: if share_instance['host']: self.delete_instance(context, share_instance, force=force) else: self.db.share_instance_delete( context, share_instance['id'], need_to_update_usages=True) def delete_instance(self, context, share_instance, force=False): policy.check_policy(context, 'share', 'delete') statuses = (constants.STATUS_AVAILABLE, constants.STATUS_ERROR, constants.STATUS_INACTIVE) if not (force or share_instance['status'] in statuses): msg = _("Share instance status must be one of %(statuses)s") % { "statuses": statuses} raise exception.InvalidShareInstance(reason=msg) share_instance = self.db.share_instance_update( context, share_instance['id'], {'status': constants.STATUS_DELETING, 'terminated_at': timeutils.utcnow()} ) self.share_rpcapi.delete_share_instance(context, share_instance, force=force) # NOTE(u_glide): 'updated_at' timestamp is used to track last usage of # share server. This is required for automatic share servers cleanup # because we should track somehow period of time when share server # doesn't have shares (unused). We do this update only on share # deletion because share server with shares cannot be deleted, so no # need to do this update on share creation or any other share operation if share_instance['share_server_id']: self.db.share_server_update( context, share_instance['share_server_id'], {'updated_at': timeutils.utcnow()}) def delete_share_server(self, context, server): """Delete share server.""" policy.check_policy(context, 'share_server', 'delete', server) shares = self.db.share_instances_get_all_by_share_server(context, server['id']) if shares: raise exception.ShareServerInUse(share_server_id=server['id']) share_groups = self.db.share_group_get_all_by_share_server( context, server['id']) if share_groups: LOG.error("share server '%(ssid)s' in use by share groups.", {'ssid': server['id']}) raise exception.ShareServerInUse(share_server_id=server['id']) # NOTE(vponomaryov): There is no share_server status update here, # it is intentional. # Status will be changed in manila.share.manager after verification # for race condition between share creation on server # and server deletion. self.share_rpcapi.delete_share_server(context, server) def manage_share_server( self, context, identifier, host, share_net_subnet, driver_opts): """Manage a share server.""" try: matched_servers = self.db.share_server_search_by_identifier( context, identifier) except exception.ShareServerNotFound: pass else: msg = _("Identifier %(identifier)s specified matches existing " "share servers: %(servers)s.") % { 'identifier': identifier, 'servers': ', '.join(s['identifier'] for s in matched_servers) } raise exception.InvalidInput(reason=msg) values = { 'host': host, 'share_network_subnet_id': share_net_subnet['id'], 'status': constants.STATUS_MANAGING, 'is_auto_deletable': False, 'identifier': identifier, } server = self.db.share_server_create(context, values) self.share_rpcapi.manage_share_server( context, server, identifier, driver_opts) return self.db.share_server_get(context, server['id']) def unmanage_share_server(self, context, share_server, force=False): """Unmanage a share server.""" shares = self.db.share_instances_get_all_by_share_server( context, share_server['id']) if shares: raise exception.ShareServerInUse( share_server_id=share_server['id']) share_groups = self.db.share_group_get_all_by_share_server( context, share_server['id']) if share_groups: LOG.error("share server '%(ssid)s' in use by share groups.", {'ssid': share_server['id']}) raise exception.ShareServerInUse( share_server_id=share_server['id']) update_data = {'status': constants.STATUS_UNMANAGING, 'terminated_at': timeutils.utcnow()} share_server = self.db.share_server_update( context, share_server['id'], update_data) self.share_rpcapi.unmanage_share_server( context, share_server, force=force) def create_snapshot(self, context, share, name, description, force=False): policy.check_policy(context, 'share', 'create_snapshot', share) if ((not force) and (share['status'] != constants.STATUS_AVAILABLE)): msg = _("Source share status must be " "%s") % constants.STATUS_AVAILABLE raise exception.InvalidShare(reason=msg) size = share['size'] self._check_is_share_busy(share) try: reservations = QUOTAS.reserve( context, snapshots=1, snapshot_gigabytes=size, share_type_id=share['instance']['share_type_id']) except exception.OverQuota as e: overs = e.kwargs['overs'] usages = e.kwargs['usages'] quotas = e.kwargs['quotas'] def _consumed(name): return (usages[name]['reserved'] + usages[name]['in_use']) if 'snapshot_gigabytes' in overs: msg = ("Quota exceeded for %(s_pid)s, tried to create " "%(s_size)sG snapshot (%(d_consumed)dG of " "%(d_quota)dG already consumed).") LOG.warning(msg, { 's_pid': context.project_id, 's_size': size, 'd_consumed': _consumed('snapshot_gigabytes'), 'd_quota': quotas['snapshot_gigabytes']}) raise exception.SnapshotSizeExceedsAvailableQuota() elif 'snapshots' in overs: msg = ("Quota exceeded for %(s_pid)s, tried to create " "snapshot (%(d_consumed)d snapshots " "already consumed).") LOG.warning(msg, {'s_pid': context.project_id, 'd_consumed': _consumed('snapshots')}) raise exception.SnapshotLimitExceeded( allowed=quotas['snapshots']) options = {'share_id': share['id'], 'size': share['size'], 'user_id': context.user_id, 'project_id': context.project_id, 'status': constants.STATUS_CREATING, 'progress': '0%', 'share_size': share['size'], 'display_name': name, 'display_description': description, 'share_proto': share['share_proto']} try: snapshot = self.db.share_snapshot_create(context, options) QUOTAS.commit( context, reservations, share_type_id=share['instance']['share_type_id']) except Exception: with excutils.save_and_reraise_exception(): try: self.db.snapshot_delete(context, share['id']) finally: QUOTAS.rollback( context, reservations, share_type_id=share['instance']['share_type_id']) # If replicated share, create snapshot instances for each replica if share.get('has_replicas'): snapshot = self.db.share_snapshot_get(context, snapshot['id']) share_instance_id = snapshot['instance']['share_instance_id'] replicas = self.db.share_replicas_get_all_by_share( context, share['id']) replicas = [r for r in replicas if r['id'] != share_instance_id] snapshot_instance = { 'status': constants.STATUS_CREATING, 'progress': '0%', } for replica in replicas: snapshot_instance.update({'share_instance_id': replica['id']}) self.db.share_snapshot_instance_create( context, snapshot['id'], snapshot_instance) self.share_rpcapi.create_replicated_snapshot( context, share, snapshot) else: self.share_rpcapi.create_snapshot(context, share, snapshot) return snapshot def migration_start( self, context, share, dest_host, force_host_assisted_migration, preserve_metadata, writable, nondisruptive, preserve_snapshots, new_share_network=None, new_share_type=None): """Migrates share to a new host.""" if force_host_assisted_migration and ( preserve_metadata or writable or nondisruptive or preserve_snapshots): msg = _('Invalid parameter combination. Cannot set parameters ' '"nondisruptive", "writable", "preserve_snapshots" or ' '"preserve_metadata" to True when enabling the ' '"force_host_assisted_migration" option.') LOG.error(msg) raise exception.InvalidInput(reason=msg) share_instance = share.instance # NOTE(gouthamr): Ensure share does not have replicas. # Currently share migrations are disallowed for replicated shares. if share.has_replicas: msg = _('Share %s has replicas. Remove the replicas before ' 'attempting to migrate the share.') % share['id'] LOG.error(msg) raise exception.Conflict(err=msg) # TODO(ganso): We do not support migrating shares in or out of groups # for now. if share.get('share_group_id'): msg = _('Share %s is a member of a group. This operation is not ' 'currently supported for shares that are members of ' 'groups.') % share['id'] LOG.error(msg) raise exception.InvalidShare(reason=msg) # We only handle "available" share for now if share_instance['status'] != constants.STATUS_AVAILABLE: msg = _('Share instance %(instance_id)s status must be available, ' 'but current status is: %(instance_status)s.') % { 'instance_id': share_instance['id'], 'instance_status': share_instance['status']} raise exception.InvalidShare(reason=msg) # Access rules status must not be error if share_instance['access_rules_status'] == constants.STATUS_ERROR: msg = _('Share instance %(instance_id)s access rules status must ' 'not be in %(error)s when attempting to start a ' 'migration.') % { 'instance_id': share_instance['id'], 'error': constants.STATUS_ERROR} raise exception.InvalidShare(reason=msg) self._check_is_share_busy(share) if force_host_assisted_migration: # We only handle shares without snapshots for # host-assisted migration snaps = self.db.share_snapshot_get_all_for_share(context, share['id']) if snaps: msg = _("Share %s must not have snapshots when using " "host-assisted migration.") % share['id'] raise exception.Conflict(err=msg) dest_host_host = share_utils.extract_host(dest_host) # Make sure the host is in the list of available hosts utils.validate_service_host(context, dest_host_host) if new_share_type: share_type = new_share_type new_share_type_id = new_share_type['id'] dhss = share_type['extra_specs']['driver_handles_share_servers'] dhss = strutils.bool_from_string(dhss, strict=True) if (dhss and not new_share_network and not share_instance['share_network_id']): msg = _( "New share network must be provided when share type of" " given share %s has extra_spec " "'driver_handles_share_servers' as True.") % share['id'] raise exception.InvalidInput(reason=msg) else: share_type = {} share_type_id = share_instance['share_type_id'] if share_type_id: share_type = share_types.get_share_type(context, share_type_id) new_share_type_id = share_instance['share_type_id'] dhss = share_type['extra_specs']['driver_handles_share_servers'] dhss = strutils.bool_from_string(dhss, strict=True) if dhss: if new_share_network: new_share_network_id = new_share_network['id'] else: new_share_network_id = share_instance['share_network_id'] else: if new_share_network: msg = _( "New share network must not be provided when share type of" " given share %s has extra_spec " "'driver_handles_share_servers' as False.") % share['id'] raise exception.InvalidInput(reason=msg) new_share_network_id = None # Make sure the destination is different than the source if (new_share_network_id == share_instance['share_network_id'] and new_share_type_id == share_instance['share_type_id'] and dest_host == share_instance['host']): msg = ("Destination host (%(dest_host)s), share network " "(%(dest_sn)s) or share type (%(dest_st)s) are the same " "as the current host's '%(src_host)s', '%(src_sn)s' and " "'%(src_st)s' respectively. Nothing to be done.") % { 'dest_host': dest_host, 'dest_sn': new_share_network_id, 'dest_st': new_share_type_id, 'src_host': share_instance['host'], 'src_sn': share_instance['share_network_id'], 'src_st': share_instance['share_type_id'], } LOG.info(msg) self.db.share_update( context, share['id'], {'task_state': constants.TASK_STATE_MIGRATION_SUCCESS}) return 200 service = self.db.service_get_by_args( context, dest_host_host, 'manila-share') type_azs = share_type['extra_specs'].get('availability_zones', '') type_azs = [t for t in type_azs.split(',') if type_azs] if type_azs and service['availability_zone']['name'] not in type_azs: msg = _("Share %(shr)s cannot be migrated to host %(dest)s " "because share type %(type)s is not supported within the " "availability zone (%(az)s) that the host is in.") type_name = '%s' % (share_type['name'] or '') type_id = '(ID: %s)' % share_type['id'] payload = {'type': '%s%s' % (type_name, type_id), 'az': service['availability_zone']['name'], 'shr': share['id'], 'dest': dest_host} raise exception.InvalidShare(reason=msg % payload) request_spec = self._get_request_spec_dict( share, share_type, availability_zone_id=service['availability_zone_id'], share_network_id=new_share_network_id) self.db.share_update( context, share['id'], {'task_state': constants.TASK_STATE_MIGRATION_STARTING}) self.db.share_instance_update(context, share_instance['id'], {'status': constants.STATUS_MIGRATING}) self.scheduler_rpcapi.migrate_share_to_host( context, share['id'], dest_host, force_host_assisted_migration, preserve_metadata, writable, nondisruptive, preserve_snapshots, new_share_network_id, new_share_type_id, request_spec) return 202 def migration_complete(self, context, share): if share['task_state'] not in ( constants.TASK_STATE_DATA_COPYING_COMPLETED, constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE): msg = self._migration_validate_error_message(share) if msg is None: msg = _("First migration phase of share %s not completed" " yet.") % share['id'] LOG.error(msg) raise exception.InvalidShare(reason=msg) share_instance_id, new_share_instance_id = ( self.get_migrating_instances(share)) share_instance_ref = self.db.share_instance_get( context, share_instance_id, with_share_data=True) self.share_rpcapi.migration_complete(context, share_instance_ref, new_share_instance_id) def get_migrating_instances(self, share): share_instance_id = None new_share_instance_id = None for instance in share.instances: if instance['status'] == constants.STATUS_MIGRATING: share_instance_id = instance['id'] if instance['status'] == constants.STATUS_MIGRATING_TO: new_share_instance_id = instance['id'] if None in (share_instance_id, new_share_instance_id): msg = _("Share instances %(instance_id)s and " "%(new_instance_id)s in inconsistent states, cannot" " continue share migration for share %(share_id)s" ".") % {'instance_id': share_instance_id, 'new_instance_id': new_share_instance_id, 'share_id': share['id']} raise exception.ShareMigrationFailed(reason=msg) return share_instance_id, new_share_instance_id def migration_get_progress(self, context, share): if share['task_state'] == ( constants.TASK_STATE_MIGRATION_DRIVER_IN_PROGRESS): share_instance_id, migrating_instance_id = ( self.get_migrating_instances(share)) share_instance_ref = self.db.share_instance_get( context, share_instance_id, with_share_data=True) service_host = share_utils.extract_host(share_instance_ref['host']) service = self.db.service_get_by_args( context, service_host, 'manila-share') if utils.service_is_up(service): try: result = self.share_rpcapi.migration_get_progress( context, share_instance_ref, migrating_instance_id) except exception.InvalidShare: # reload to get the latest task_state share = self.db.share_get(context, share['id']) result = self._migration_get_progress_state(share) except Exception: msg = _("Failed to obtain migration progress of share " "%s.") % share['id'] LOG.exception(msg) raise exception.ShareMigrationError(reason=msg) else: result = None elif share['task_state'] == ( constants.TASK_STATE_DATA_COPYING_IN_PROGRESS): data_rpc = data_rpcapi.DataAPI() LOG.info("Sending request to get share migration information" " of share %s.", share['id']) services = self.db.service_get_all_by_topic(context, 'manila-data') if len(services) > 0 and utils.service_is_up(services[0]): try: result = data_rpc.data_copy_get_progress( context, share['id']) except Exception: msg = _("Failed to obtain migration progress of share " "%s.") % share['id'] LOG.exception(msg) raise exception.ShareMigrationError(reason=msg) else: result = None else: result = self._migration_get_progress_state(share) if not (result and result.get('total_progress') is not None): msg = self._migration_validate_error_message(share) if msg is None: msg = _("Migration progress of share %s cannot be obtained at " "this moment.") % share['id'] LOG.error(msg) raise exception.InvalidShare(reason=msg) return result def _migration_get_progress_state(self, share): task_state = share['task_state'] if task_state in (constants.TASK_STATE_MIGRATION_SUCCESS, constants.TASK_STATE_DATA_COPYING_ERROR, constants.TASK_STATE_MIGRATION_CANCELLED, constants.TASK_STATE_MIGRATION_CANCEL_IN_PROGRESS, constants.TASK_STATE_MIGRATION_COMPLETING, constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE, constants.TASK_STATE_DATA_COPYING_COMPLETED, constants.TASK_STATE_DATA_COPYING_COMPLETING, constants.TASK_STATE_DATA_COPYING_CANCELLED, constants.TASK_STATE_MIGRATION_ERROR): return {'total_progress': 100} elif task_state in (constants.TASK_STATE_MIGRATION_STARTING, constants.TASK_STATE_MIGRATION_DRIVER_STARTING, constants.TASK_STATE_DATA_COPYING_STARTING, constants.TASK_STATE_MIGRATION_IN_PROGRESS): return {'total_progress': 0} else: return None def _migration_validate_error_message(self, resource, resource_type='share'): task_state = resource['task_state'] if task_state == constants.TASK_STATE_MIGRATION_SUCCESS: msg = _("Migration of %(resource_type)s %(resource_id)s has " "already completed.") % { 'resource_id': resource['id'], 'resource_type': resource_type} elif task_state in (None, constants.TASK_STATE_MIGRATION_ERROR): msg = _("There is no migration being performed for " "%(resource_type)s %(resource_id)s at this moment.") % { 'resource_id': resource['id'], 'resource_type': resource_type} elif task_state == constants.TASK_STATE_MIGRATION_CANCELLED: msg = _("Migration of %(resource_type)s %(resource_id)s was " "already cancelled.") % { 'resource_id': resource['id'], 'resource_type': resource_type} elif task_state in (constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE, constants.TASK_STATE_DATA_COPYING_COMPLETED): msg = _("Migration of %(resource_type)s %(resource_id)s has " "already completed first phase.") % { 'resource_id': resource['id'], 'resource_type': resource_type} else: return None return msg def migration_cancel(self, context, share): migrating = True if share['task_state'] in ( constants.TASK_STATE_DATA_COPYING_COMPLETED, constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE, constants.TASK_STATE_MIGRATION_DRIVER_IN_PROGRESS): share_instance_id, migrating_instance_id = ( self.get_migrating_instances(share)) share_instance_ref = self.db.share_instance_get( context, share_instance_id, with_share_data=True) service_host = share_utils.extract_host(share_instance_ref['host']) service = self.db.service_get_by_args( context, service_host, 'manila-share') if utils.service_is_up(service): self.share_rpcapi.migration_cancel( context, share_instance_ref, migrating_instance_id) else: migrating = False elif share['task_state'] == ( constants.TASK_STATE_DATA_COPYING_IN_PROGRESS): data_rpc = data_rpcapi.DataAPI() LOG.info("Sending request to cancel migration of " "share %s.", share['id']) services = self.db.service_get_all_by_topic(context, 'manila-data') if len(services) > 0 and utils.service_is_up(services[0]): try: data_rpc.data_copy_cancel(context, share['id']) except Exception: msg = _("Failed to cancel migration of share " "%s.") % share['id'] LOG.exception(msg) raise exception.ShareMigrationError(reason=msg) else: migrating = False else: migrating = False if not migrating: msg = self._migration_validate_error_message(share) if msg is None: msg = _("Migration of share %s cannot be cancelled at this " "moment.") % share['id'] LOG.error(msg) raise exception.InvalidShare(reason=msg) @policy.wrap_check_policy('share') def delete_snapshot(self, context, snapshot, force=False): statuses = (constants.STATUS_AVAILABLE, constants.STATUS_ERROR) if not (force or snapshot['aggregate_status'] in statuses): msg = _("Share Snapshot status must be one of %(statuses)s.") % { "statuses": statuses} raise exception.InvalidShareSnapshot(reason=msg) share = self.db.share_get(context, snapshot['share_id']) snapshot_instances = ( self.db.share_snapshot_instance_get_all_with_filters( context, {'snapshot_ids': snapshot['id']}) ) for snapshot_instance in snapshot_instances: self.db.share_snapshot_instance_update( context, snapshot_instance['id'], {'status': constants.STATUS_DELETING}) if share['has_replicas']: self.share_rpcapi.delete_replicated_snapshot( context, snapshot, share['instance']['host'], share_id=share['id'], force=force) else: self.share_rpcapi.delete_snapshot( context, snapshot, share['instance']['host'], force=force) @policy.wrap_check_policy('share') def update(self, context, share, fields): return self.db.share_update(context, share['id'], fields) @policy.wrap_check_policy('share') def snapshot_update(self, context, snapshot, fields): return self.db.share_snapshot_update(context, snapshot['id'], fields) def get(self, context, share_id): rv = self.db.share_get(context, share_id) if not rv['is_public']: policy.check_policy(context, 'share', 'get', rv) return rv def get_all(self, context, search_opts=None, sort_key='created_at', sort_dir='desc'): policy.check_policy(context, 'share', 'get_all') if search_opts is None: search_opts = {} LOG.debug("Searching for shares by: %s", search_opts) # Prepare filters filters = {} filter_keys = [ 'display_name', 'share_group_id', 'display_name~', 'display_description', 'display_description~', 'snapshot_id', 'status', 'share_type_id', 'project_id', 'export_location_id', 'export_location_path', 'limit', 'offset', 'host', 'share_network_id'] for key in filter_keys: if key in search_opts: filters[key] = search_opts.pop(key) if 'metadata' in search_opts: filters['metadata'] = search_opts.pop('metadata') if not isinstance(filters['metadata'], dict): msg = _("Wrong metadata filter provided: " "%s.") % six.text_type(filters['metadata']) raise exception.InvalidInput(reason=msg) if 'extra_specs' in search_opts: # Verify policy for extra-specs access policy.check_policy(context, 'share_types_extra_spec', 'index') filters['extra_specs'] = search_opts.pop('extra_specs') if not isinstance(filters['extra_specs'], dict): msg = _("Wrong extra specs filter provided: " "%s.") % six.text_type(filters['extra_specs']) raise exception.InvalidInput(reason=msg) if not (isinstance(sort_key, six.string_types) and sort_key): msg = _("Wrong sort_key filter provided: " "'%s'.") % six.text_type(sort_key) raise exception.InvalidInput(reason=msg) if not (isinstance(sort_dir, six.string_types) and sort_dir): msg = _("Wrong sort_dir filter provided: " "'%s'.") % six.text_type(sort_dir) raise exception.InvalidInput(reason=msg) is_public = search_opts.pop('is_public', False) is_public = strutils.bool_from_string(is_public, strict=True) # Get filtered list of shares if 'host' in filters: policy.check_policy(context, 'share', 'list_by_host') if 'share_server_id' in search_opts: # NOTE(vponomaryov): this is project_id independent policy.check_policy(context, 'share', 'list_by_share_server_id') shares = self.db.share_get_all_by_share_server( context, search_opts.pop('share_server_id'), filters=filters, sort_key=sort_key, sort_dir=sort_dir) elif (context.is_admin and utils.is_all_tenants(search_opts)): shares = self.db.share_get_all( context, filters=filters, sort_key=sort_key, sort_dir=sort_dir) else: shares = self.db.share_get_all_by_project( context, project_id=context.project_id, filters=filters, is_public=is_public, sort_key=sort_key, sort_dir=sort_dir) # NOTE(vponomaryov): we do not need 'all_tenants' opt anymore search_opts.pop('all_tenants', None) if search_opts: results = [] for s in shares: # values in search_opts can be only strings if (all(s.get(k, None) == v or (v in (s.get(k.rstrip('~')) if k.endswith('~') and s.get(k.rstrip('~')) else ())) for k, v in search_opts.items())): results.append(s) shares = results return shares def get_snapshot(self, context, snapshot_id): policy.check_policy(context, 'share_snapshot', 'get_snapshot') return self.db.share_snapshot_get(context, snapshot_id) def get_all_snapshots(self, context, search_opts=None, sort_key='share_id', sort_dir='desc'): policy.check_policy(context, 'share_snapshot', 'get_all_snapshots') search_opts = search_opts or {} LOG.debug("Searching for snapshots by: %s", search_opts) # Read and remove key 'all_tenants' if was provided all_tenants = search_opts.pop('all_tenants', None) string_args = {'sort_key': sort_key, 'sort_dir': sort_dir} string_args.update(search_opts) for k, v in string_args.items(): if not (isinstance(v, six.string_types) and v): msg = _("Wrong '%(k)s' filter provided: " "'%(v)s'.") % {'k': k, 'v': string_args[k]} raise exception.InvalidInput(reason=msg) if (context.is_admin and all_tenants): snapshots = self.db.share_snapshot_get_all( context, filters=search_opts, sort_key=sort_key, sort_dir=sort_dir) else: snapshots = self.db.share_snapshot_get_all_by_project( context, context.project_id, filters=search_opts, sort_key=sort_key, sort_dir=sort_dir) # Remove key 'usage' if provided search_opts.pop('usage', None) if search_opts: results = [] not_found = object() for snapshot in snapshots: if (all(snapshot.get(k, not_found) == v or (v in snapshot.get(k.rstrip('~')) if k.endswith('~') and snapshot.get(k.rstrip('~')) else ()) for k, v in search_opts.items())): results.append(snapshot) snapshots = results return snapshots def get_latest_snapshot_for_share(self, context, share_id): """Get the newest snapshot of a share.""" return self.db.share_snapshot_get_latest_for_share(context, share_id) @staticmethod def _is_invalid_share_instance(instance): return (instance['host'] is None or instance['status'] in constants. INVALID_SHARE_INSTANCE_STATUSES_FOR_ACCESS_RULE_UPDATES) def allow_access(self, ctx, share, access_type, access_to, access_level=None, metadata=None): """Allow access to share.""" # Access rule validation: if access_level not in constants.ACCESS_LEVELS + (None, ): msg = _("Invalid share access level: %s.") % access_level raise exception.InvalidShareAccess(reason=msg) self._check_metadata_properties(metadata) access_exists = self.db.share_access_check_for_existing_access( ctx, share['id'], access_type, access_to) if access_exists: raise exception.ShareAccessExists(access_type=access_type, access=access_to) # Share instance validation if any(instance for instance in share.instances if self._is_invalid_share_instance(instance)): msg = _("New access rules cannot be applied while the share or " "any of its replicas or migration copies lacks a valid " "host or is in an invalid state.") raise exception.InvalidShare(message=msg) values = { 'share_id': share['id'], 'access_type': access_type, 'access_to': access_to, 'access_level': access_level, 'metadata': metadata, } access = self.db.share_access_create(ctx, values) for share_instance in share.instances: self.allow_access_to_instance(ctx, share_instance) return access def allow_access_to_instance(self, context, share_instance): self._conditionally_transition_share_instance_access_rules_status( context, share_instance) self.share_rpcapi.update_access(context, share_instance) def _conditionally_transition_share_instance_access_rules_status( self, context, share_instance): conditionally_change = { constants.STATUS_ACTIVE: constants.SHARE_INSTANCE_RULES_SYNCING, } self.access_helper.get_and_update_share_instance_access_rules_status( context, conditionally_change=conditionally_change, share_instance_id=share_instance['id']) def deny_access(self, ctx, share, access): """Deny access to share.""" if any(instance for instance in share.instances if self._is_invalid_share_instance(instance)): msg = _("Access rules cannot be denied while the share, " "any of its replicas or migration copies lacks a valid " "host or is in an invalid state.") raise exception.InvalidShare(message=msg) for share_instance in share.instances: self.deny_access_to_instance(ctx, share_instance, access) def deny_access_to_instance(self, context, share_instance, access): self._conditionally_transition_share_instance_access_rules_status( context, share_instance) updates = {'state': constants.ACCESS_STATE_QUEUED_TO_DENY} self.access_helper.get_and_update_share_instance_access_rule( context, access['id'], updates=updates, share_instance_id=share_instance['id']) self.share_rpcapi.update_access(context, share_instance) def access_get_all(self, context, share, filters=None): """Returns all access rules for share.""" policy.check_policy(context, 'share', 'access_get_all') rules = self.db.share_access_get_all_for_share( context, share['id'], filters=filters) return rules def access_get(self, context, access_id): """Returns access rule with the id.""" policy.check_policy(context, 'share', 'access_get') rule = self.db.share_access_get(context, access_id) return rule @policy.wrap_check_policy('share') def get_share_metadata(self, context, share): """Get all metadata associated with a share.""" rv = self.db.share_metadata_get(context, share['id']) return dict(rv.items()) @policy.wrap_check_policy('share') def delete_share_metadata(self, context, share, key): """Delete the given metadata item from a share.""" self.db.share_metadata_delete(context, share['id'], key) def _check_is_share_busy(self, share): """Raises an exception if share is busy with an active task.""" if share.is_busy: msg = _("Share %(share_id)s is busy as part of an active " "task: %(task)s.") % { 'share_id': share['id'], 'task': share['task_state'] } raise exception.ShareBusyException(reason=msg) def _check_metadata_properties(self, metadata=None): if not metadata: metadata = {} for k, v in metadata.items(): if not k: msg = _("Metadata property key is blank.") LOG.warning(msg) raise exception.InvalidMetadata(message=msg) if len(k) > 255: msg = _("Metadata property key is " "greater than 255 characters.") LOG.warning(msg) raise exception.InvalidMetadataSize(message=msg) if not v: msg = _("Metadata property value is blank.") LOG.warning(msg) raise exception.InvalidMetadata(message=msg) if len(v) > 1023: msg = _("Metadata property value is " "greater than 1023 characters.") LOG.warning(msg) raise exception.InvalidMetadataSize(message=msg) def update_share_access_metadata(self, context, access_id, metadata): """Updates share access metadata.""" self._check_metadata_properties(metadata) return self.db.share_access_metadata_update( context, access_id, metadata) @policy.wrap_check_policy('share') def update_share_metadata(self, context, share, metadata, delete=False): """Updates or creates share metadata. If delete is True, metadata items that are not specified in the `metadata` argument will be deleted. """ orig_meta = self.get_share_metadata(context, share) if delete: _metadata = metadata else: _metadata = orig_meta.copy() _metadata.update(metadata) self._check_metadata_properties(_metadata) self.db.share_metadata_update(context, share['id'], _metadata, delete) return _metadata def get_share_network(self, context, share_net_id): return self.db.share_network_get(context, share_net_id) def extend(self, context, share, new_size): policy.check_policy(context, 'share', 'extend') if share['status'] != constants.STATUS_AVAILABLE: msg_params = { 'valid_status': constants.STATUS_AVAILABLE, 'share_id': share['id'], 'status': share['status'], } msg = _("Share %(share_id)s status must be '%(valid_status)s' " "to extend, but current status is: " "%(status)s.") % msg_params raise exception.InvalidShare(reason=msg) self._check_is_share_busy(share) size_increase = int(new_size) - share['size'] if size_increase <= 0: msg = (_("New size for extend must be greater " "than current size. (current: %(size)s, " "extended: %(new_size)s).") % {'new_size': new_size, 'size': share['size']}) raise exception.InvalidInput(reason=msg) replicas = self.db.share_replicas_get_all_by_share( context, share['id']) supports_replication = len(replicas) > 0 deltas = { 'project_id': share['project_id'], 'gigabytes': size_increase, 'user_id': share['user_id'], 'share_type_id': share['instance']['share_type_id'] } # NOTE(carloss): If the share type supports replication, we must get # all the replicas that pertain to the share and calculate the final # size (size to increase * amount of replicas), since all the replicas # are going to be extended when the driver sync them. if supports_replication: replica_gigs_to_increase = len(replicas) * size_increase deltas.update({'replica_gigabytes': replica_gigs_to_increase}) try: # we give the user_id of the share, to update the quota usage # for the user, who created the share, because on share delete # only this quota will be decreased reservations = QUOTAS.reserve(context, **deltas) except exception.OverQuota as exc: # Check if the exceeded quota was 'gigabytes' self._check_if_share_quotas_exceeded(context, exc, share['size'], operation='extend') # NOTE(carloss): Check if the exceeded quota is # 'replica_gigabytes'. If so the failure could be caused due to # lack of quotas to extend the share's replicas, then the # '_check_if_replica_quotas_exceeded' method can't be reused here # since the error message must be different from the default one. if supports_replication: overs = exc.kwargs['overs'] usages = exc.kwargs['usages'] quotas = exc.kwargs['quotas'] def _consumed(name): return (usages[name]['reserved'] + usages[name]['in_use']) if 'replica_gigabytes' in overs: LOG.warning("Replica gigabytes quota exceeded " "for %(s_pid)s, tried to extend " "%(s_size)sG share (%(d_consumed)dG of " "%(d_quota)dG already consumed).", { 's_pid': context.project_id, 's_size': share['size'], 'd_consumed': _consumed( 'replica_gigabytes'), 'd_quota': quotas['replica_gigabytes']}) msg = _("Failed while extending a share with replication " "support. There is no available quota to extend " "the share and its %(count)d replicas. Maximum " "number of allowed replica_gigabytes is " "exceeded.") % {'count': len(replicas)} raise exception.ShareReplicaSizeExceedsAvailableQuota( message=msg) self.update(context, share, {'status': constants.STATUS_EXTENDING}) self.share_rpcapi.extend_share(context, share, new_size, reservations) LOG.info("Extend share request issued successfully.", resource=share) def shrink(self, context, share, new_size): policy.check_policy(context, 'share', 'shrink') status = six.text_type(share['status']).lower() valid_statuses = (constants.STATUS_AVAILABLE, constants.STATUS_SHRINKING_POSSIBLE_DATA_LOSS_ERROR) if status not in valid_statuses: msg_params = { 'valid_status': ", ".join(valid_statuses), 'share_id': share['id'], 'status': status, } msg = _("Share %(share_id)s status must in (%(valid_status)s) " "to shrink, but current status is: " "%(status)s.") % msg_params raise exception.InvalidShare(reason=msg) self._check_is_share_busy(share) size_decrease = int(share['size']) - int(new_size) if size_decrease <= 0 or new_size <= 0: msg = (_("New size for shrink must be less " "than current size and greater than 0 (current: %(size)s," " new: %(new_size)s)") % {'new_size': new_size, 'size': share['size']}) raise exception.InvalidInput(reason=msg) self.update(context, share, {'status': constants.STATUS_SHRINKING}) self.share_rpcapi.shrink_share(context, share, new_size) LOG.info("Shrink share (id=%(id)s) request issued successfully." " New size: %(size)s", {'id': share['id'], 'size': new_size}) def snapshot_allow_access(self, context, snapshot, access_type, access_to): """Allow access to a share snapshot.""" access_exists = self.db.share_snapshot_check_for_existing_access( context, snapshot['id'], access_type, access_to) if access_exists: raise exception.ShareSnapshotAccessExists(access_type=access_type, access=access_to) values = { 'share_snapshot_id': snapshot['id'], 'access_type': access_type, 'access_to': access_to, } if any((instance['status'] != constants.STATUS_AVAILABLE) or (instance['share_instance']['host'] is None) for instance in snapshot.instances): msg = _("New access rules cannot be applied while the snapshot or " "any of its replicas or migration copies lacks a valid " "host or is not in %s state.") % constants.STATUS_AVAILABLE raise exception.InvalidShareSnapshotInstance(reason=msg) access = self.db.share_snapshot_access_create(context, values) for snapshot_instance in snapshot.instances: self.share_rpcapi.snapshot_update_access( context, snapshot_instance) return access def snapshot_deny_access(self, context, snapshot, access): """Deny access to a share snapshot.""" if any((instance['status'] != constants.STATUS_AVAILABLE) or (instance['share_instance']['host'] is None) for instance in snapshot.instances): msg = _("Access rules cannot be denied while the snapshot or " "any of its replicas or migration copies lacks a valid " "host or is not in %s state.") % constants.STATUS_AVAILABLE raise exception.InvalidShareSnapshotInstance(reason=msg) for snapshot_instance in snapshot.instances: rule = self.db.share_snapshot_instance_access_get( context, access['id'], snapshot_instance['id']) self.db.share_snapshot_instance_access_update( context, rule['access_id'], snapshot_instance['id'], {'state': constants.ACCESS_STATE_QUEUED_TO_DENY}) self.share_rpcapi.snapshot_update_access( context, snapshot_instance) def snapshot_access_get_all(self, context, snapshot): """Returns all access rules for share snapshot.""" rules = self.db.share_snapshot_access_get_all_for_share_snapshot( context, snapshot['id'], {}) return rules def snapshot_access_get(self, context, access_id): """Returns snapshot access rule with the id.""" rule = self.db.share_snapshot_access_get(context, access_id) return rule def snapshot_export_locations_get(self, context, snapshot): return self.db.share_snapshot_export_locations_get(context, snapshot) def snapshot_export_location_get(self, context, el_id): return self.db.share_snapshot_instance_export_location_get(context, el_id) def share_server_migration_get_destination(self, context, source_server_id, status=None): filters = {'source_share_server_id': source_server_id} if status: filters.update({'status': status}) dest_share_servers = self.db.share_server_get_all_with_filters( context, filters=filters) if not dest_share_servers: msg = _("A destination share server wasn't found for source " "share server %s.") % source_server_id raise exception.InvalidShareServer(reason=msg) if len(dest_share_servers) > 1: msg = _("More than one destination share server was found for " "source share server %s. Aborting...") % source_server_id raise exception.InvalidShareServer(reason=msg) return dest_share_servers[0] def get_share_server_migration_request_spec_dict( self, context, share_instances, snapshot_instances, **kwargs): """Returns request specs related to share server and all its shares.""" shares_total_size = sum([instance.get('size', 0) for instance in share_instances]) snapshots_total_size = sum([instance.get('size', 0) for instance in snapshot_instances]) shares_req_spec = [] for share_instance in share_instances: share_type_id = share_instance['share_type_id'] share_type = share_types.get_share_type(context, share_type_id) req_spec = self._get_request_spec_dict(share_instance, share_type, **kwargs) shares_req_spec.append(req_spec) server_request_spec = { 'shares_size': shares_total_size, 'snapshots_size': snapshots_total_size, 'shares_req_spec': shares_req_spec, } return server_request_spec def _migration_initial_checks(self, context, share_server, dest_host, new_share_network): shares = self.db.share_get_all_by_share_server( context, share_server['id']) if len(shares) == 0: msg = _("Share server %s does not have shares." % share_server['id']) raise exception.InvalidShareServer(reason=msg) # We only handle "active" share servers for now if share_server['status'] != constants.STATUS_ACTIVE: msg = _('Share server %(server_id)s status must be active, ' 'but current status is: %(server_status)s.') % { 'server_id': share_server['id'], 'server_status': share_server['status']} raise exception.InvalidShareServer(reason=msg) share_groups_related_to_share_server = ( self.db.share_group_get_all_by_share_server( context, share_server['id'])) if share_groups_related_to_share_server: msg = _("The share server %s can not be migrated because it is " "related to a share group.") % share_server['id'] raise exception.InvalidShareServer(reason=msg) # Same backend and same network, nothing changes src_backend = share_utils.extract_host(share_server['host'], level='backend_name') dest_backend = share_utils.extract_host(dest_host, level='backend_name') current_share_network_id = shares[0]['instance']['share_network_id'] if (src_backend == dest_backend and (new_share_network is None or new_share_network['id'] == current_share_network_id)): msg = _('There is no difference between source and destination ' 'backends and between source and destination share ' 'networks. Share server migration will not proceed.') raise exception.InvalidShareServer(reason=msg) filters = {'source_share_server_id': share_server['id'], 'status': constants.STATUS_SERVER_MIGRATING_TO} dest_share_servers = self.db.share_server_get_all_with_filters( context, filters=filters) if len(dest_share_servers): msg = _("There is at least one destination share server pointing " "to this source share server. Clean up your environment " "before starting a new migration.") raise exception.InvalidShareServer(reason=msg) dest_service_host = share_utils.extract_host(dest_host) # Make sure the host is in the list of available hosts utils.validate_service_host(context, dest_service_host) service = self.db.service_get_by_args( context, dest_service_host, 'manila-share') # Get all share types type_ids = set([share['instance']['share_type_id'] for share in shares]) types = [share_types.get_share_type(context, type_id) for type_id in type_ids] # Check if share type azs are supported by the destination host for share_type in types: azs = share_type['extra_specs'].get('availability_zones', '') if azs and service['availability_zone']['name'] not in azs: msg = _("Share server %(server)s cannot be migrated to host " "%(dest)s because the share type %(type)s is used by " "one of the shares, and this share type is not " "supported within the availability zone (%(az)s) that " "the host is in.") type_name = '%s' % (share_type['name'] or '') type_id = '(ID: %s)' % share_type['id'] payload = {'type': '%s%s' % (type_name, type_id), 'az': service['availability_zone']['name'], 'server': share_server['id'], 'dest': dest_host} raise exception.InvalidShareServer(reason=msg % payload) if new_share_network: new_share_network_id = new_share_network['id'] else: new_share_network_id = shares[0]['instance']['share_network_id'] # NOTE(carloss): check if the new or old share network has a subnet # that spans the availability zone of the destination host, otherwise # we should deny this operation. dest_az = self.db.availability_zone_get( context, service['availability_zone']['name']) compatible_subnet = ( self.db.share_network_subnet_get_by_availability_zone_id( context, new_share_network_id, dest_az['id'])) if not compatible_subnet: msg = _("The share network %(network)s does not have a subnet " "that spans the destination host availability zone.") payload = {'network': new_share_network_id} raise exception.InvalidShareServer(reason=msg % payload) # NOTE(carloss): Refreshing the list of shares since something could've # changed from the initial list. shares = self.db.share_get_all_by_share_server( context, share_server['id']) for share in shares: if share['status'] != constants.STATUS_AVAILABLE: msg = _('Share %(share_id)s status must be available, ' 'but current status is: %(share_status)s.') % { 'share_id': share['id'], 'share_status': share['status']} raise exception.InvalidShareServer(reason=msg) if share.has_replicas: msg = _('Share %s has replicas. Remove the replicas of all ' 'shares in the share server before attempting to ' 'migrate it.') % share['id'] LOG.error(msg) raise exception.InvalidShareServer(reason=msg) # NOTE(carloss): Not validating the flag preserve_snapshots at this # point, considering that even if the admin set the value to False, # the driver can still support preserving snapshots and the # snapshots would be copied anyway. So the share/manager will be # responsible for checking if the driver does not support snapshot # preservation, and if there are snapshots in the share server. share_snapshots = self.db.share_snapshot_get_all_for_share( context, share['id']) all_snapshots_are_available = all( [snapshot['status'] == constants.STATUS_AVAILABLE for snapshot in share_snapshots]) if not all_snapshots_are_available: msg = _( "All snapshots must have '%(status)s' status to be " "migrated by the driver along with share " "%(resource_id)s.") % { 'resource_id': share['id'], 'status': constants.STATUS_AVAILABLE, } LOG.error(msg) raise exception.InvalidShareServer(reason=msg) if share.get('share_group_id'): msg = _('Share %s is a member of a group. This operation is ' 'not currently supported for share servers that ' 'contain shares members of groups.') % share['id'] LOG.error(msg) raise exception.InvalidShareServer(reason=msg) share_instance = share['instance'] # Access rules status must not be error if share_instance['access_rules_status'] == constants.STATUS_ERROR: msg = _( 'Share instance %(instance_id)s access rules status must ' 'not be in %(error)s when attempting to start a share ' 'server migration.') % { 'instance_id': share_instance['id'], 'error': constants.STATUS_ERROR} raise exception.InvalidShareServer(reason=msg) try: self._check_is_share_busy(share) except exception.ShareBusyException as e: raise exception.InvalidShareServer(reason=e.msg) return shares, types, service, new_share_network_id def share_server_migration_check(self, context, share_server, dest_host, writable, nondisruptive, preserve_snapshots, new_share_network=None): """Migrates share server to a new host.""" shares, types, service, new_share_network_id = ( self._migration_initial_checks(context, share_server, dest_host, new_share_network)) # NOTE(dviroel): Service is up according to validations made on initial # checks result = self.share_rpcapi.share_server_migration_check( context, share_server['id'], dest_host, writable, nondisruptive, preserve_snapshots, new_share_network_id) return result def share_server_migration_start( self, context, share_server, dest_host, writable, nondisruptive, preserve_snapshots, new_share_network=None): """Migrates share server to a new host.""" shares, types, dest_service, new_share_network_id = ( self._migration_initial_checks(context, share_server, dest_host, new_share_network)) # Updates the share server status to migration starting self.db.share_server_update( context, share_server['id'], {'task_state': constants.TASK_STATE_MIGRATION_STARTING, 'status': constants.STATUS_SERVER_MIGRATING}) share_snapshots = [ self.db.share_snapshot_get_all_for_share(context, share['id']) for share in shares] snapshot_instance_ids = [] for snapshot_list in share_snapshots: for snapshot in snapshot_list: snapshot_instance_ids.append(snapshot['instance']['id']) share_instance_ids = [share['instance']['id'] for share in shares] # Updates all shares and snapshot instances self.db.share_and_snapshot_instances_status_update( context, {'status': constants.STATUS_SERVER_MIGRATING}, share_instance_ids=share_instance_ids, snapshot_instance_ids=snapshot_instance_ids, current_expected_status=constants.STATUS_AVAILABLE ) # NOTE(dviroel): Service is up according to validations made on initial # checks self.share_rpcapi.share_server_migration_start( context, share_server, dest_host, writable, nondisruptive, preserve_snapshots, new_share_network_id) def share_server_migration_complete(self, context, share_server): """Invokes 2nd phase of share server migration.""" if share_server['status'] != constants.STATUS_SERVER_MIGRATING: msg = _("Share server %s is not migrating") % share_server['id'] LOG.error(msg) raise exception.InvalidShareServer(reason=msg) if (share_server['task_state'] != constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE): msg = _("The first phase of migration has to finish to " "request the completion of server %s's " "migration.") % share_server['id'] LOG.error(msg) raise exception.InvalidShareServer(reason=msg) dest_share_server = self.share_server_migration_get_destination( context, share_server['id'], status=constants.STATUS_SERVER_MIGRATING_TO ) dest_host = share_utils.extract_host(dest_share_server['host']) utils.validate_service_host(context, dest_host) self.share_rpcapi.share_server_migration_complete( context, dest_share_server['host'], share_server, dest_share_server) return { 'destination_share_server_id': dest_share_server['id'] } def share_server_migration_cancel(self, context, share_server): """Attempts to cancel share server migration.""" if share_server['status'] != constants.STATUS_SERVER_MIGRATING: msg = _("Migration of share server %s cannot be cancelled because " "the provided share server is not being migrated.") LOG.error(msg) raise exception.InvalidShareServer(reason=msg) if share_server['task_state'] in ( constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE, constants.TASK_STATE_MIGRATION_DRIVER_IN_PROGRESS): dest_share_server = self.share_server_migration_get_destination( context, share_server['id'], status=constants.STATUS_SERVER_MIGRATING_TO ) dest_host = share_utils.extract_host(dest_share_server['host']) utils.validate_service_host(context, dest_host) self.share_rpcapi.share_server_migration_cancel( context, dest_share_server['host'], share_server, dest_share_server) else: msg = self._migration_validate_error_message( share_server, resource_type='share_server') if msg is None: msg = _("Migration of share server %s can be cancelled only " "after the driver already started the migration, or " "when the first phase of the migration gets " "completed.") % share_server['id'] LOG.error(msg) raise exception.InvalidShareServer(reason=msg) def share_server_migration_get_progress(self, context, src_share_server_id): """Retrieve migration progress for a given share server.""" try: share_server = self.db.share_server_get(context, src_share_server_id) except exception.ShareServerNotFound: msg = _('Share server %s was not found. We will search for a ' 'successful migration') % src_share_server_id LOG.debug(msg) # Search for a successful migration, raise an error if not found dest_share_server = self.share_server_migration_get_destination( context, src_share_server_id, status=constants.STATUS_ACTIVE ) return { 'total_progress': 100, 'destination_share_server_id': dest_share_server['id'], 'task_state': dest_share_server['task_state'], } # Source server still exists so it must be in 'server_migrating' status if share_server['status'] != constants.STATUS_SERVER_MIGRATING: msg = _("Migration progress of share server %s cannot be " "obtained. The provided share server is not being " "migrated.") % share_server['id'] LOG.error(msg) raise exception.InvalidShareServer(reason=msg) dest_share_server = self.share_server_migration_get_destination( context, share_server['id'], status=constants.STATUS_SERVER_MIGRATING_TO ) if (share_server['task_state'] == constants.TASK_STATE_MIGRATION_DRIVER_IN_PROGRESS): dest_host = share_utils.extract_host(dest_share_server['host']) utils.validate_service_host(context, dest_host) try: result = ( self.share_rpcapi.share_server_migration_get_progress( context, dest_share_server['host'], share_server, dest_share_server)) except Exception: msg = _("Failed to obtain migration progress of share " "server %s.") % share_server['id'] LOG.exception(msg) raise exception.ShareServerMigrationError(reason=msg) else: result = self._migration_get_progress_state(share_server) if not (result and result.get('total_progress') is not None): msg = self._migration_validate_error_message( share_server, resource_type='share_server') if msg is None: msg = _("Migration progress of share server %s cannot be " "obtained at this moment.") % share_server['id'] LOG.error(msg) raise exception.InvalidShareServer(reason=msg) result.update({ 'destination_share_server_id': dest_share_server['id'], 'task_state': dest_share_server['task_state'] }) return result
[ "oslo_log.log.getLogger", "manila.scheduler.rpcapi.SchedulerAPI", "manila.data.rpcapi.DataAPI", "manila.exception.InvalidShare", "manila.exception.InvalidShareAccess", "manila.share.share_types.parse_boolean_extra_spec", "manila.exception.ShareBusyException", "manila.share.utils.extract_host", "manila.exception.InvalidShareSnapshot", "manila.exception.ShareNotFound", "manila.exception.ShareReplicasLimitExceeded", "manila.i18n._", "manila.exception.ShareLimitExceeded", "manila.exception.ShareServerMigrationError", "oslo_utils.timeutils.utcnow", "manila.exception.InvalidHost", "manila.exception.InvalidMetadataSize", "manila.exception.InvalidInput", "manila.exception.ShareAccessExists", "manila.exception.ShareReplicaSizeExceedsAvailableQuota", "manila.exception.Conflict", "manila.exception.InvalidParameterValue", "manila.share.rpcapi.ShareAPI", "manila.exception.InvalidShareGroup", "manila.exception.ReplicationException", "six.text_type", "manila.exception.AdminRequired", "manila.utils.validate_service_host", "oslo_utils.excutils.save_and_reraise_exception", "manila.exception.InvalidShareServer", "manila.exception.ShareMigrationFailed", "manila.utils.service_is_up", "manila.exception.ShareSizeExceedsAvailableQuota", "manila.exception.SnapshotSizeExceedsAvailableQuota", "manila.exception.ShareMigrationError", "oslo_config.cfg.BoolOpt", "manila.exception.SnapshotLimitExceeded", "manila.exception.ShareServerInUse", "manila.share.share_types.get_share_type", "manila.utils.is_all_tenants", "manila.exception.ShareSnapshotAccessExists", "manila.exception.InvalidMetadata", "oslo_utils.strutils.bool_from_string", "manila.policy.wrap_check_policy", "manila.exception.InvalidShareInstance", "manila.share.access.ShareInstanceAccess", "manila.policy.check_policy", "manila.exception.InvalidShareSnapshotInstance", "manila.exception.ManageInvalidShareSnapshot" ]
[((2093, 2116), 'oslo_log.log.getLogger', 'log.getLogger', (['__name__'], {}), '(__name__)\n', (2106, 2116), False, 'from oslo_log import log\n'), ((1513, 1876), 'oslo_config.cfg.BoolOpt', 'cfg.BoolOpt', (['"""use_scheduler_creating_share_from_snapshot"""'], {'default': '(False)', 'help': '"""If set to False, then share creation from snapshot will be performed on the same host. If set to True, then scheduler will be used.When enabling this option make sure that filter CreateShareFromSnapshot is enabled and to have hosts reporting replication_domain option."""'}), "('use_scheduler_creating_share_from_snapshot', default=False,\n help=\n 'If set to False, then share creation from snapshot will be performed on the same host. If set to True, then scheduler will be used.When enabling this option make sure that filter CreateShareFromSnapshot is enabled and to have hosts reporting replication_domain option.'\n )\n", (1524, 1876), False, 'from oslo_config import cfg\n'), ((49017, 49050), 'manila.policy.wrap_check_policy', 'policy.wrap_check_policy', (['"""share"""'], {}), "('share')\n", (49041, 49050), False, 'from manila import policy\n'), ((76820, 76853), 'manila.policy.wrap_check_policy', 'policy.wrap_check_policy', (['"""share"""'], {}), "('share')\n", (76844, 76853), False, 'from manila import policy\n'), ((78026, 78059), 'manila.policy.wrap_check_policy', 'policy.wrap_check_policy', (['"""share"""'], {}), "('share')\n", (78050, 78059), False, 'from manila import policy\n'), ((78178, 78211), 'manila.policy.wrap_check_policy', 'policy.wrap_check_policy', (['"""share"""'], {}), "('share')\n", (78202, 78211), False, 'from manila import policy\n'), ((88506, 88539), 'manila.policy.wrap_check_policy', 'policy.wrap_check_policy', (['"""share"""'], {}), "('share')\n", (88530, 88539), False, 'from manila import policy\n'), ((88746, 88779), 'manila.policy.wrap_check_policy', 'policy.wrap_check_policy', (['"""share"""'], {}), "('share')\n", (88770, 88779), False, 'from manila import policy\n'), ((90621, 90654), 'manila.policy.wrap_check_policy', 'policy.wrap_check_policy', (['"""share"""'], {}), "('share')\n", (90645, 90654), False, 'from manila import policy\n'), ((2355, 2386), 'manila.scheduler.rpcapi.SchedulerAPI', 'scheduler_rpcapi.SchedulerAPI', ([], {}), '()\n', (2384, 2386), True, 'from manila.scheduler import rpcapi as scheduler_rpcapi\n'), ((2415, 2438), 'manila.share.rpcapi.ShareAPI', 'share_rpcapi.ShareAPI', ([], {}), '()\n', (2436, 2438), True, 'from manila.share import rpcapi as share_rpcapi\n'), ((2468, 2509), 'manila.share.access.ShareInstanceAccess', 'access.ShareInstanceAccess', (['self.db', 'None'], {}), '(self.db, None)\n', (2494, 2509), False, 'from manila.share import access\n'), ((26296, 26364), 'manila.share.share_types.get_share_type', 'share_types.get_share_type', (['context', "share.instance['share_type_id']"], {}), "(context, share.instance['share_type_id'])\n", (26322, 26364), False, 'from manila.share import share_types\n'), ((35685, 35735), 'manila.share.share_types.get_share_type', 'share_types.get_share_type', (['context', 'share_type_id'], {}), '(context, share_type_id)\n', (35711, 35735), False, 'from manila.share import share_types\n'), ((35752, 35883), 'manila.share.share_types.parse_boolean_extra_spec', 'share_types.parse_boolean_extra_spec', (['"""driver_handles_share_servers"""', "share_type['extra_specs']['driver_handles_share_servers']"], {}), "('driver_handles_share_servers',\n share_type['extra_specs']['driver_handles_share_servers'])\n", (35788, 35883), False, 'from manila.share import share_types\n'), ((41284, 41333), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share"""', '"""unmanage"""'], {}), "(context, 'share', 'unmanage')\n", (41303, 41333), False, 'from manila import policy\n'), ((50968, 51015), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share"""', '"""delete"""'], {}), "(context, 'share', 'delete')\n", (50987, 51015), False, 'from manila import policy\n'), ((52494, 52556), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share_server"""', '"""delete"""', 'server'], {}), "(context, 'share_server', 'delete', server)\n", (52513, 52556), False, 'from manila import policy\n'), ((55740, 55803), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share"""', '"""create_snapshot"""', 'share'], {}), "(context, 'share', 'create_snapshot', share)\n", (55759, 55803), False, 'from manila import policy\n'), ((62759, 62794), 'manila.share.utils.extract_host', 'share_utils.extract_host', (['dest_host'], {}), '(dest_host)\n', (62783, 62794), True, 'from manila.share import utils as share_utils\n'), ((62867, 62919), 'manila.utils.validate_service_host', 'utils.validate_service_host', (['context', 'dest_host_host'], {}), '(context, dest_host_host)\n', (62894, 62919), False, 'from manila import utils\n'), ((63948, 63992), 'oslo_utils.strutils.bool_from_string', 'strutils.bool_from_string', (['dhss'], {'strict': '(True)'}), '(dhss, strict=True)\n', (63973, 63992), False, 'from oslo_utils import strutils\n'), ((78663, 78711), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share"""', '"""get_all"""'], {}), "(context, 'share', 'get_all')\n", (78682, 78711), False, 'from manila import policy\n'), ((80711, 80760), 'oslo_utils.strutils.bool_from_string', 'strutils.bool_from_string', (['is_public'], {'strict': '(True)'}), '(is_public, strict=True)\n', (80736, 80760), False, 'from oslo_utils import strutils\n'), ((82301, 82363), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share_snapshot"""', '"""get_snapshot"""'], {}), "(context, 'share_snapshot', 'get_snapshot')\n", (82320, 82363), False, 'from manila import policy\n'), ((82561, 82628), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share_snapshot"""', '"""get_all_snapshots"""'], {}), "(context, 'share_snapshot', 'get_all_snapshots')\n", (82580, 82628), False, 'from manila import policy\n'), ((88082, 88137), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share"""', '"""access_get_all"""'], {}), "(context, 'share', 'access_get_all')\n", (88101, 88137), False, 'from manila import policy\n'), ((88368, 88419), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share"""', '"""access_get"""'], {}), "(context, 'share', 'access_get')\n", (88387, 88419), False, 'from manila import policy\n'), ((91488, 91535), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share"""', '"""extend"""'], {}), "(context, 'share', 'extend')\n", (91507, 91535), False, 'from manila import policy\n'), ((95923, 95970), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share"""', '"""shrink"""'], {}), "(context, 'share', 'shrink')\n", (95942, 95970), False, 'from manila import policy\n'), ((104025, 104093), 'manila.share.utils.extract_host', 'share_utils.extract_host', (["share_server['host']"], {'level': '"""backend_name"""'}), "(share_server['host'], level='backend_name')\n", (104049, 104093), True, 'from manila.share import utils as share_utils\n'), ((104164, 104221), 'manila.share.utils.extract_host', 'share_utils.extract_host', (['dest_host'], {'level': '"""backend_name"""'}), "(dest_host, level='backend_name')\n", (104188, 104221), True, 'from manila.share import utils as share_utils\n'), ((105372, 105407), 'manila.share.utils.extract_host', 'share_utils.extract_host', (['dest_host'], {}), '(dest_host)\n', (105396, 105407), True, 'from manila.share import utils as share_utils\n'), ((105479, 105534), 'manila.utils.validate_service_host', 'utils.validate_service_host', (['context', 'dest_service_host'], {}), '(context, dest_service_host)\n', (105506, 105534), False, 'from manila import utils\n'), ((114817, 114868), 'manila.share.utils.extract_host', 'share_utils.extract_host', (["dest_share_server['host']"], {}), "(dest_share_server['host'])\n", (114841, 114868), True, 'from manila.share import utils as share_utils\n'), ((114877, 114924), 'manila.utils.validate_service_host', 'utils.validate_service_host', (['context', 'dest_host'], {}), '(context, dest_host)\n', (114904, 114924), False, 'from manila import utils\n'), ((3968, 4010), 'manila.exception.ShareSizeExceedsAvailableQuota', 'exception.ShareSizeExceedsAvailableQuota', ([], {}), '()\n', (4008, 4010), False, 'from manila import exception\n'), ((5723, 5779), 'manila.exception.ShareReplicasLimitExceeded', 'exception.ShareReplicasLimitExceeded', ([], {}), '(**exception_kwargs)\n', (5759, 5779), False, 'from manila import exception\n'), ((7994, 8028), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (8016, 8028), False, 'from manila import exception\n'), ((8213, 8247), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (8235, 8247), False, 'from manila import exception\n'), ((10700, 10734), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (10722, 10734), False, 'from manila import exception\n'), ((25624, 25668), 'manila.i18n._', '_', (['"""Replication not supported for share %s."""'], {}), "('Replication not supported for share %s.')\n", (25625, 25668), False, 'from manila.i18n import _\n'), ((25687, 25736), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'message': "(msg % share['id'])"}), "(message=msg % share['id'])\n", (25709, 25736), False, 'from manila import exception\n'), ((25796, 25849), 'manila.i18n._', '_', (['"""Replication not supported for shares in a group."""'], {}), "('Replication not supported for shares in a group.')\n", (25797, 25849), False, 'from manila.i18n import _\n'), ((25868, 25903), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'message': 'msg'}), '(message=msg)\n', (25890, 25903), False, 'from manila import exception\n'), ((26109, 26175), 'manila.i18n._', '_', (['"""Share %s does not have any active replica in available state."""'], {}), "('Share %s does not have any active replica in available state.')\n", (26110, 26175), False, 'from manila.i18n import _\n'), ((26217, 26273), 'manila.exception.ReplicationException', 'exception.ReplicationException', ([], {'reason': "(msg % share['id'])"}), "(reason=msg % share['id'])\n", (26247, 26273), False, 'from manila import exception\n'), ((26634, 26766), 'manila.i18n._', '_', (['"""Share replica cannot be created since the share type %(type)s is not supported within the availability zone chosen %(az)s."""'], {}), "('Share replica cannot be created since the share type %(type)s is not supported within the availability zone chosen %(az)s.'\n )\n", (26635, 26766), False, 'from manila.i18n import _\n'), ((27046, 27091), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'message': '(msg % payload)'}), '(message=msg % payload)\n', (27068, 27091), False, 'from manila import exception\n'), ((32155, 32194), 'manila.i18n._', '_', (['"""Cannot delete last active replica."""'], {}), "('Cannot delete last active replica.')\n", (32156, 32194), False, 'from manila.i18n import _\n'), ((32213, 32255), 'manila.exception.ReplicationException', 'exception.ReplicationException', ([], {'reason': 'msg'}), '(reason=msg)\n', (32243, 32255), False, 'from manila import exception\n'), ((33449, 33520), 'manila.i18n._', '_', (['"""Replica %(replica_id)s must be in %(status)s state to be promoted."""'], {}), "('Replica %(replica_id)s must be in %(status)s state to be promoted.')\n", (33450, 33520), False, 'from manila.i18n import _\n'), ((33562, 33685), 'manila.exception.ReplicationException', 'exception.ReplicationException', ([], {'reason': "(msg % {'replica_id': share_replica['id'], 'status': constants.\n STATUS_AVAILABLE})"}), "(reason=msg % {'replica_id': share_replica[\n 'id'], 'status': constants.STATUS_AVAILABLE})\n", (33592, 33685), False, 'from manila import exception\n'), ((33967, 34056), 'manila.i18n._', '_', (['"""Promoting a replica with \'replica_state\': %s requires administrator privileges."""'], {}), '("Promoting a replica with \'replica_state\': %s requires administrator privileges."\n )\n', (33968, 34056), False, 'from manila.i18n import _\n'), ((34093, 34145), 'manila.exception.AdminRequired', 'exception.AdminRequired', ([], {'message': '(msg % replica_state)'}), '(message=msg % replica_state)\n', (34116, 34145), False, 'from manila import exception\n'), ((34568, 34614), 'manila.i18n._', '_', (['"""Share replica does not have a valid host."""'], {}), "('Share replica does not have a valid host.')\n", (34569, 34614), False, 'from manila.i18n import _\n'), ((34633, 34666), 'manila.exception.InvalidHost', 'exception.InvalidHost', ([], {'reason': 'msg'}), '(reason=msg)\n', (34654, 34666), False, 'from manila import exception\n'), ((35498, 35557), 'manila.i18n._', '_', (['"""A share already exists with the export path specified."""'], {}), "('A share already exists with the export path specified.')\n", (35499, 35557), False, 'from manila.i18n import _\n'), ((35576, 35610), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (35598, 35610), False, 'from manila import exception\n'), ((35965, 36111), 'manila.i18n._', '_', (['"""Share Server ID parameter is required when managing a share using a share type with driver_handles_share_servers extra-spec set to True."""'], {}), "('Share Server ID parameter is required when managing a share using a share type with driver_handles_share_servers extra-spec set to True.'\n )\n", (35966, 36111), False, 'from manila.i18n import _\n'), ((36171, 36205), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (36193, 36205), False, 'from manila import exception\n'), ((36265, 36416), 'manila.i18n._', '_', (['"""Share Server ID parameter is not expected when managing a share using a share type with driver_handles_share_servers extra-spec set to False."""'], {}), "('Share Server ID parameter is not expected when managing a share using a share type with driver_handles_share_servers extra-spec set to False.'\n )\n", (36266, 36416), False, 'from manila.i18n import _\n'), ((36476, 36510), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (36498, 36510), False, 'from manila import exception\n'), ((41479, 41497), 'oslo_utils.timeutils.utcnow', 'timeutils.utcnow', ([], {}), '()\n', (41495, 41497), False, 'from oslo_utils import timeutils\n'), ((42352, 42386), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (42374, 42386), False, 'from manila import exception\n'), ((43911, 43929), 'oslo_utils.timeutils.utcnow', 'timeutils.utcnow', ([], {}), '()\n', (43927, 43929), False, 'from oslo_utils import timeutils\n'), ((47551, 47606), 'manila.i18n._', '_', (['"""Share %s has no active replica in available state."""'], {}), "('Share %s has no active replica in available state.')\n", (47552, 47606), False, 'from manila.i18n import _\n'), ((47625, 47681), 'manila.exception.ReplicationException', 'exception.ReplicationException', ([], {'reason': "(msg % share['id'])"}), "(reason=msg % share['id'])\n", (47655, 47681), False, 'from manila import exception\n'), ((48195, 48281), 'manila.i18n._', '_', (['"""Share %(share)s has no snapshot %(snap)s associated with its active replica."""'], {}), "('Share %(share)s has no snapshot %(snap)s associated with its active replica.'\n )\n", (48196, 48281), False, 'from manila.i18n import _\n'), ((48388, 48441), 'manila.exception.ReplicationException', 'exception.ReplicationException', ([], {'reason': '(msg % msg_args)'}), '(reason=msg % msg_args)\n', (48418, 48441), False, 'from manila import exception\n'), ((49515, 49549), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (49537, 49549), False, 'from manila import exception\n'), ((49866, 49893), 'manila.exception.Conflict', 'exception.Conflict', ([], {'err': 'msg'}), '(err=msg)\n', (49884, 49893), False, 'from manila import exception\n'), ((50100, 50134), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (50122, 50134), False, 'from manila import exception\n'), ((50508, 50542), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (50530, 50542), False, 'from manila import exception\n'), ((51334, 51376), 'manila.exception.InvalidShareInstance', 'exception.InvalidShareInstance', ([], {'reason': 'msg'}), '(reason=msg)\n', (51364, 51376), False, 'from manila import exception\n'), ((52748, 52804), 'manila.exception.ShareServerInUse', 'exception.ShareServerInUse', ([], {'share_server_id': "server['id']"}), "(share_server_id=server['id'])\n", (52774, 52804), False, 'from manila import exception\n'), ((53071, 53127), 'manila.exception.ShareServerInUse', 'exception.ShareServerInUse', ([], {'share_server_id': "server['id']"}), "(share_server_id=server['id'])\n", (53097, 53127), False, 'from manila import exception\n'), ((54093, 54127), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (54115, 54127), False, 'from manila import exception\n'), ((54862, 54924), 'manila.exception.ShareServerInUse', 'exception.ShareServerInUse', ([], {'share_server_id': "share_server['id']"}), "(share_server_id=share_server['id'])\n", (54888, 54924), False, 'from manila import exception\n'), ((55220, 55282), 'manila.exception.ShareServerInUse', 'exception.ShareServerInUse', ([], {'share_server_id': "share_server['id']"}), "(share_server_id=share_server['id'])\n", (55246, 55282), False, 'from manila import exception\n'), ((55403, 55421), 'oslo_utils.timeutils.utcnow', 'timeutils.utcnow', ([], {}), '()\n', (55419, 55421), False, 'from oslo_utils import timeutils\n'), ((56007, 56041), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (56029, 56041), False, 'from manila import exception\n'), ((60107, 60308), 'manila.i18n._', '_', (['"""Invalid parameter combination. Cannot set parameters "nondisruptive", "writable", "preserve_snapshots" or "preserve_metadata" to True when enabling the "force_host_assisted_migration" option."""'], {}), '(\'Invalid parameter combination. Cannot set parameters "nondisruptive", "writable", "preserve_snapshots" or "preserve_metadata" to True when enabling the "force_host_assisted_migration" option.\'\n )\n', (60108, 60308), False, 'from manila.i18n import _\n'), ((60418, 60452), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (60440, 60452), False, 'from manila import exception\n'), ((60852, 60879), 'manila.exception.Conflict', 'exception.Conflict', ([], {'err': 'msg'}), '(err=msg)\n', (60870, 60879), False, 'from manila import exception\n'), ((61260, 61294), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (61282, 61294), False, 'from manila import exception\n'), ((61697, 61731), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (61719, 61731), False, 'from manila import exception\n'), ((62164, 62198), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (62186, 62198), False, 'from manila import exception\n'), ((63137, 63181), 'oslo_utils.strutils.bool_from_string', 'strutils.bool_from_string', (['dhss'], {'strict': '(True)'}), '(dhss, strict=True)\n', (63162, 63181), False, 'from oslo_utils import strutils\n'), ((66053, 66216), 'manila.i18n._', '_', (['"""Share %(shr)s cannot be migrated to host %(dest)s because share type %(type)s is not supported within the availability zone (%(az)s) that the host is in."""'], {}), "('Share %(shr)s cannot be migrated to host %(dest)s because share type %(type)s is not supported within the availability zone (%(az)s) that the host is in.'\n )\n", (66054, 66216), False, 'from manila.i18n import _\n'), ((66600, 66644), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': '(msg % payload)'}), '(reason=msg % payload)\n', (66622, 66644), False, 'from manila import exception\n'), ((67912, 67946), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (67934, 67946), False, 'from manila import exception\n'), ((69182, 69224), 'manila.exception.ShareMigrationFailed', 'exception.ShareMigrationFailed', ([], {'reason': 'msg'}), '(reason=msg)\n', (69212, 69224), False, 'from manila import exception\n'), ((69709, 69761), 'manila.share.utils.extract_host', 'share_utils.extract_host', (["share_instance_ref['host']"], {}), "(share_instance_ref['host'])\n", (69733, 69761), True, 'from manila.share import utils as share_utils\n'), ((69885, 69913), 'manila.utils.service_is_up', 'utils.service_is_up', (['service'], {}), '(service)\n', (69904, 69913), False, 'from manila import utils\n'), ((71930, 71964), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (71952, 71964), False, 'from manila import exception\n'), ((75252, 75304), 'manila.share.utils.extract_host', 'share_utils.extract_host', (["share_instance_ref['host']"], {}), "(share_instance_ref['host'])\n", (75276, 75304), True, 'from manila.share import utils as share_utils\n'), ((75428, 75456), 'manila.utils.service_is_up', 'utils.service_is_up', (['service'], {}), '(service)\n', (75447, 75456), False, 'from manila import utils\n'), ((76779, 76813), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (76801, 76813), False, 'from manila import exception\n'), ((77191, 77233), 'manila.exception.InvalidShareSnapshot', 'exception.InvalidShareSnapshot', ([], {'reason': 'msg'}), '(reason=msg)\n', (77221, 77233), False, 'from manila import exception\n'), ((78481, 78529), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share"""', '"""get"""', 'rv'], {}), "(context, 'share', 'get', rv)\n", (78500, 78529), False, 'from manila import policy\n'), ((79784, 79847), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share_types_extra_spec"""', '"""index"""'], {}), "(context, 'share_types_extra_spec', 'index')\n", (79803, 79847), False, 'from manila import policy\n'), ((80366, 80400), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (80388, 80400), False, 'from manila import exception\n'), ((80599, 80633), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (80621, 80633), False, 'from manila import exception\n'), ((80842, 80895), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share"""', '"""list_by_host"""'], {}), "(context, 'share', 'list_by_host')\n", (80861, 80895), False, 'from manila import policy\n'), ((81017, 81081), 'manila.policy.check_policy', 'policy.check_policy', (['context', '"""share"""', '"""list_by_share_server_id"""'], {}), "(context, 'share', 'list_by_share_server_id')\n", (81036, 81081), False, 'from manila import policy\n'), ((85012, 85052), 'manila.exception.InvalidShareAccess', 'exception.InvalidShareAccess', ([], {'reason': 'msg'}), '(reason=msg)\n', (85040, 85052), False, 'from manila import exception\n'), ((85275, 85345), 'manila.exception.ShareAccessExists', 'exception.ShareAccessExists', ([], {'access_type': 'access_type', 'access': 'access_to'}), '(access_type=access_type, access=access_to)\n', (85302, 85345), False, 'from manila import exception\n'), ((85565, 85714), 'manila.i18n._', '_', (['"""New access rules cannot be applied while the share or any of its replicas or migration copies lacks a valid host or is in an invalid state."""'], {}), "('New access rules cannot be applied while the share or any of its replicas or migration copies lacks a valid host or is in an invalid state.'\n )\n", (85566, 85714), False, 'from manila.i18n import _\n'), ((85774, 85809), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'message': 'msg'}), '(message=msg)\n', (85796, 85809), False, 'from manila import exception\n'), ((87115, 87257), 'manila.i18n._', '_', (['"""Access rules cannot be denied while the share, any of its replicas or migration copies lacks a valid host or is in an invalid state."""'], {}), "('Access rules cannot be denied while the share, any of its replicas or migration copies lacks a valid host or is in an invalid state.'\n )\n", (87116, 87257), False, 'from manila.i18n import _\n'), ((87317, 87352), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'message': 'msg'}), '(message=msg)\n', (87339, 87352), False, 'from manila import exception\n'), ((89335, 89375), 'manila.exception.ShareBusyException', 'exception.ShareBusyException', ([], {'reason': 'msg'}), '(reason=msg)\n', (89363, 89375), False, 'from manila import exception\n'), ((91979, 92013), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (92001, 92013), False, 'from manila import exception\n'), ((92433, 92467), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (92455, 92467), False, 'from manila import exception\n'), ((96577, 96611), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'reason': 'msg'}), '(reason=msg)\n', (96599, 96611), False, 'from manila import exception\n'), ((97057, 97091), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (97079, 97091), False, 'from manila import exception\n'), ((97736, 97814), 'manila.exception.ShareSnapshotAccessExists', 'exception.ShareSnapshotAccessExists', ([], {'access_type': 'access_type', 'access': 'access_to'}), '(access_type=access_type, access=access_to)\n', (97771, 97814), False, 'from manila import exception\n'), ((98462, 98512), 'manila.exception.InvalidShareSnapshotInstance', 'exception.InvalidShareSnapshotInstance', ([], {'reason': 'msg'}), '(reason=msg)\n', (98500, 98512), False, 'from manila import exception\n'), ((99303, 99353), 'manila.exception.InvalidShareSnapshotInstance', 'exception.InvalidShareSnapshotInstance', ([], {'reason': 'msg'}), '(reason=msg)\n', (99341, 99353), False, 'from manila import exception\n'), ((101204, 101244), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (101232, 101244), False, 'from manila import exception\n'), ((101457, 101497), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (101485, 101497), False, 'from manila import exception\n'), ((102166, 102216), 'manila.share.share_types.get_share_type', 'share_types.get_share_type', (['context', 'share_type_id'], {}), '(context, share_type_id)\n', (102192, 102216), False, 'from manila.share import share_types\n'), ((102951, 103014), 'manila.i18n._', '_', (["('Share server %s does not have shares.' % share_server['id'])"], {}), "('Share server %s does not have shares.' % share_server['id'])\n", (102952, 103014), False, 'from manila.i18n import _\n'), ((103053, 103093), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (103081, 103093), False, 'from manila import exception\n'), ((103495, 103535), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (103523, 103535), False, 'from manila import exception\n'), ((103904, 103944), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (103932, 103944), False, 'from manila import exception\n'), ((104527, 104691), 'manila.i18n._', '_', (['"""There is no difference between source and destination backends and between source and destination share networks. Share server migration will not proceed."""'], {}), "('There is no difference between source and destination backends and between source and destination share networks. Share server migration will not proceed.'\n )\n", (104528, 104691), False, 'from manila.i18n import _\n'), ((104751, 104791), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (104779, 104791), False, 'from manila import exception\n'), ((105089, 105242), 'manila.i18n._', '_', (['"""There is at least one destination share server pointing to this source share server. Clean up your environment before starting a new migration."""'], {}), "('There is at least one destination share server pointing to this source share server. Clean up your environment before starting a new migration.'\n )\n", (105090, 105242), False, 'from manila.i18n import _\n'), ((105302, 105342), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (105330, 105342), False, 'from manila import exception\n'), ((105792, 105836), 'manila.share.share_types.get_share_type', 'share_types.get_share_type', (['context', 'type_id'], {}), '(context, type_id)\n', (105818, 105836), False, 'from manila.share import share_types\n'), ((107615, 107728), 'manila.i18n._', '_', (['"""The share network %(network)s does not have a subnet that spans the destination host availability zone."""'], {}), "('The share network %(network)s does not have a subnet that spans the destination host availability zone.'\n )\n", (107616, 107728), False, 'from manila.i18n import _\n'), ((107821, 107871), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': '(msg % payload)'}), '(reason=msg % payload)\n', (107849, 107871), False, 'from manila import exception\n'), ((114193, 114233), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (114221, 114233), False, 'from manila import exception\n'), ((114574, 114614), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (114602, 114614), False, 'from manila import exception\n'), ((115389, 115504), 'manila.i18n._', '_', (['"""Migration of share server %s cannot be cancelled because the provided share server is not being migrated."""'], {}), "('Migration of share server %s cannot be cancelled because the provided share server is not being migrated.'\n )\n", (115390, 115504), False, 'from manila.i18n import _\n'), ((115568, 115608), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (115596, 115608), False, 'from manila import exception\n'), ((116010, 116061), 'manila.share.utils.extract_host', 'share_utils.extract_host', (["dest_share_server['host']"], {}), "(dest_share_server['host'])\n", (116034, 116061), True, 'from manila.share import utils as share_utils\n'), ((116074, 116121), 'manila.utils.validate_service_host', 'utils.validate_service_host', (['context', 'dest_host'], {}), '(context, dest_host)\n', (116101, 116121), False, 'from manila import utils\n'), ((116775, 116815), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (116803, 116815), False, 'from manila import exception\n'), ((118235, 118275), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (118263, 118275), False, 'from manila import exception\n'), ((118593, 118644), 'manila.share.utils.extract_host', 'share_utils.extract_host', (["dest_share_server['host']"], {}), "(dest_share_server['host'])\n", (118617, 118644), True, 'from manila.share import utils as share_utils\n'), ((118657, 118704), 'manila.utils.validate_service_host', 'utils.validate_service_host', (['context', 'dest_host'], {}), '(context, dest_host)\n', (118684, 118704), False, 'from manila import utils\n'), ((119704, 119744), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (119732, 119744), False, 'from manila import exception\n'), ((4445, 4499), 'manila.exception.ShareLimitExceeded', 'exception.ShareLimitExceeded', ([], {'allowed': "quotas['shares']"}), "(allowed=quotas['shares'])\n", (4473, 4499), False, 'from manila import exception\n'), ((5486, 5605), 'manila.i18n._', '_', (['"""Failed while creating a share with replication support. Maximum number of allowed share-replicas is exceeded."""'], {}), "('Failed while creating a share with replication support. Maximum number of allowed share-replicas is exceeded.'\n )\n", (5487, 5605), False, 'from manila.i18n import _\n'), ((6692, 6759), 'manila.exception.ShareReplicaSizeExceedsAvailableQuota', 'exception.ShareReplicaSizeExceedsAvailableQuota', ([], {}), '(**exception_kwargs)\n', (6739, 6759), False, 'from manila import exception\n'), ((7457, 7499), 'manila.exception.InvalidShareSnapshot', 'exception.InvalidShareSnapshot', ([], {'reason': 'msg'}), '(reason=msg)\n', (7487, 7499), False, 'from manila import exception\n'), ((7890, 7948), 'manila.i18n._', '_', (['"""Share size \'%s\' must be an integer and greater than 0"""'], {}), '("Share size \'%s\' must be an integer and greater than 0")\n', (7891, 7948), False, 'from manila.i18n import _\n'), ((8098, 8162), 'manila.i18n._', '_', (['"""Share size \'%s\' must be equal or greater than snapshot size"""'], {}), '("Share size \'%s\' must be equal or greater than snapshot size")\n', (8099, 8162), False, 'from manila.i18n import _\n'), ((9475, 9525), 'manila.share.share_types.get_share_type', 'share_types.get_share_type', (['context', 'share_type_id'], {}), '(context, share_type_id)\n', (9501, 9525), False, 'from manila.share import share_types\n'), ((10388, 10517), 'manila.i18n._', '_', (['"""Invalid share protocol provided: %(provided)s. It is either disabled or unsupported. Available protocols: %(supported)s"""'], {}), "('Invalid share protocol provided: %(provided)s. It is either disabled or unsupported. Available protocols: %(supported)s'\n )\n", (10389, 10517), False, 'from manila.i18n import _\n'), ((12251, 12291), 'manila.exception.InvalidShareGroup', 'exception.InvalidShareGroup', ([], {'message': 'msg'}), '(message=msg)\n', (12278, 12291), False, 'from manila import exception\n'), ((13357, 13393), 'manila.exception.InvalidParameterValue', 'exception.InvalidParameterValue', (['msg'], {}), '(msg)\n', (13388, 13393), False, 'from manila import exception\n'), ((14697, 14892), 'manila.i18n._', '_', (['"""The share network is not supported within any requested availability zone. Check the share type\'s \'availability_zones\' extra-spec and the availability zones of the share network subnets"""'], {}), '("The share network is not supported within any requested availability zone. Check the share type\'s \'availability_zones\' extra-spec and the availability zones of the share network subnets"\n )\n', (14698, 14892), False, 'from manila.i18n import _\n'), ((15000, 15035), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'message': 'msg'}), '(message=msg)\n', (15022, 15035), False, 'from manila import exception\n'), ((22780, 22798), 'oslo_utils.timeutils.utcnow', 'timeutils.utcnow', ([], {}), '()\n', (22796, 22798), False, 'from oslo_utils import timeutils\n'), ((32500, 32518), 'oslo_utils.timeutils.utcnow', 'timeutils.utcnow', ([], {}), '()\n', (32516, 32518), False, 'from oslo_utils import timeutils\n'), ((36935, 36980), 'manila.i18n._', '_', (['"""The provided share server is not active."""'], {}), "('The provided share server is not active.')\n", (36936, 36980), False, 'from manila.i18n import _\n'), ((37003, 37043), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (37031, 37043), False, 'from manila import exception\n'), ((37429, 37447), 'oslo_utils.timeutils.utcnow', 'timeutils.utcnow', ([], {}), '()\n', (37445, 37447), False, 'from oslo_utils import timeutils\n'), ((42050, 42109), 'manila.exception.ShareNotFound', 'exception.ShareNotFound', ([], {'share_id': "snapshot_data['share_id']"}), "(share_id=snapshot_data['share_id'])\n", (42073, 42109), False, 'from manila import exception\n'), ((42164, 42280), 'manila.i18n._', '_', (['"""Share %s has replicas. Snapshots of this share cannot currently be managed until all replicas are removed."""'], {}), "('Share %s has replicas. Snapshots of this share cannot currently be managed until all replicas are removed.'\n )\n", (42165, 42280), False, 'from manila.i18n import _\n'), ((45837, 46024), 'manila.i18n._', '_', (['"""Quota exceeded for %(s_pid)s. Reverting share %(s_sid)s to snapshot %(s_ssid)s will increase the share\'s size by %(s_size)sG, (%(d_consumed)dG of %(d_quota)dG already consumed)."""'], {}), '("Quota exceeded for %(s_pid)s. Reverting share %(s_sid)s to snapshot %(s_ssid)s will increase the share\'s size by %(s_size)sG, (%(d_consumed)dG of %(d_quota)dG already consumed)."\n )\n', (45838, 46024), False, 'from manila.i18n import _\n'), ((46471, 46528), 'manila.exception.ShareSizeExceedsAvailableQuota', 'exception.ShareSizeExceedsAvailableQuota', ([], {'message': 'message'}), '(message=message)\n', (46511, 46528), False, 'from manila import exception\n'), ((49409, 49454), 'manila.i18n._', '_', (['"""Share status must be one of %(statuses)s"""'], {}), "('Share status must be one of %(statuses)s')\n", (49410, 49454), False, 'from manila.i18n import _\n'), ((49739, 49813), 'manila.i18n._', '_', (['"""Share %s has replicas. Remove the replicas before deleting the share."""'], {}), "('Share %s has replicas. Remove the replicas before deleting the share.')\n", (49740, 49813), False, 'from manila.i18n import _\n'), ((50020, 50064), 'manila.i18n._', '_', (['"""Share still has %d dependent snapshots."""'], {}), "('Share still has %d dependent snapshots.')\n", (50021, 50064), False, 'from manila.i18n import _\n'), ((50367, 50430), 'manila.i18n._', '_', (['"""Share still has %d dependent share group snapshot members."""'], {}), "('Share still has %d dependent share group snapshot members.')\n", (50368, 50430), False, 'from manila.i18n import _\n'), ((51218, 51273), 'manila.i18n._', '_', (['"""Share instance status must be one of %(statuses)s"""'], {}), "('Share instance status must be one of %(statuses)s')\n", (51219, 51273), False, 'from manila.i18n import _\n'), ((51557, 51575), 'oslo_utils.timeutils.utcnow', 'timeutils.utcnow', ([], {}), '()\n', (51573, 51575), False, 'from oslo_utils import timeutils\n'), ((53827, 53917), 'manila.i18n._', '_', (['"""Identifier %(identifier)s specified matches existing share servers: %(servers)s."""'], {}), "('Identifier %(identifier)s specified matches existing share servers: %(servers)s.'\n )\n", (53828, 53917), False, 'from manila.i18n import _\n'), ((55901, 55936), 'manila.i18n._', '_', (['"""Source share status must be %s"""'], {}), "('Source share status must be %s')\n", (55902, 55936), False, 'from manila.i18n import _\n'), ((60682, 60774), 'manila.i18n._', '_', (['"""Share %s has replicas. Remove the replicas before attempting to migrate the share."""'], {}), "('Share %s has replicas. Remove the replicas before attempting to migrate the share.'\n )\n", (60683, 60774), False, 'from manila.i18n import _\n'), ((61036, 61159), 'manila.i18n._', '_', (['"""Share %s is a member of a group. This operation is not currently supported for shares that are members of groups."""'], {}), "('Share %s is a member of a group. This operation is not currently supported for shares that are members of groups.'\n )\n", (61037, 61159), False, 'from manila.i18n import _\n'), ((61432, 61542), 'manila.i18n._', '_', (['"""Share instance %(instance_id)s status must be available, but current status is: %(instance_status)s."""'], {}), "('Share instance %(instance_id)s status must be available, but current status is: %(instance_status)s.'\n )\n", (61433, 61542), False, 'from manila.i18n import _\n'), ((61875, 61998), 'manila.i18n._', '_', (['"""Share instance %(instance_id)s access rules status must not be in %(error)s when attempting to start a migration."""'], {}), "('Share instance %(instance_id)s access rules status must not be in %(error)s when attempting to start a migration.'\n )\n", (61876, 61998), False, 'from manila.i18n import _\n'), ((62705, 62732), 'manila.exception.Conflict', 'exception.Conflict', ([], {'err': 'msg'}), '(err=msg)\n', (62723, 62732), False, 'from manila import exception\n'), ((63548, 63582), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (63570, 63582), False, 'from manila import exception\n'), ((63744, 63794), 'manila.share.share_types.get_share_type', 'share_types.get_share_type', (['context', 'share_type_id'], {}), '(context, share_type_id)\n', (63770, 63794), False, 'from manila.share import share_types\n'), ((64507, 64541), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (64529, 64541), False, 'from manila import exception\n'), ((68794, 68939), 'manila.i18n._', '_', (['"""Share instances %(instance_id)s and %(new_instance_id)s in inconsistent states, cannot continue share migration for share %(share_id)s."""'], {}), "('Share instances %(instance_id)s and %(new_instance_id)s in inconsistent states, cannot continue share migration for share %(share_id)s.'\n )\n", (68795, 68939), False, 'from manila.i18n import _\n'), ((70767, 70788), 'manila.data.rpcapi.DataAPI', 'data_rpcapi.DataAPI', ([], {}), '()\n', (70786, 70788), True, 'from manila.data import rpcapi as data_rpcapi\n'), ((73450, 73524), 'manila.i18n._', '_', (['"""Migration of %(resource_type)s %(resource_id)s has already completed."""'], {}), "('Migration of %(resource_type)s %(resource_id)s has already completed.')\n", (73451, 73524), False, 'from manila.i18n import _\n'), ((75761, 75782), 'manila.data.rpcapi.DataAPI', 'data_rpcapi.DataAPI', ([], {}), '()\n', (75780, 75782), True, 'from manila.data import rpcapi as data_rpcapi\n'), ((77075, 77130), 'manila.i18n._', '_', (['"""Share Snapshot status must be one of %(statuses)s."""'], {}), "('Share Snapshot status must be one of %(statuses)s.')\n", (77076, 77130), False, 'from manila.i18n import _\n'), ((79645, 79679), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (79667, 79679), False, 'from manila import exception\n'), ((80132, 80166), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (80154, 80166), False, 'from manila import exception\n'), ((80256, 80298), 'manila.i18n._', '_', (['"""Wrong sort_key filter provided: \'%s\'."""'], {}), '("Wrong sort_key filter provided: \'%s\'.")\n', (80257, 80298), False, 'from manila.i18n import _\n'), ((80324, 80347), 'six.text_type', 'six.text_type', (['sort_key'], {}), '(sort_key)\n', (80337, 80347), False, 'import six\n'), ((80489, 80531), 'manila.i18n._', '_', (['"""Wrong sort_dir filter provided: \'%s\'."""'], {}), '("Wrong sort_dir filter provided: \'%s\'.")\n', (80490, 80531), False, 'from manila.i18n import _\n'), ((80557, 80580), 'six.text_type', 'six.text_type', (['sort_dir'], {}), '(sort_dir)\n', (80570, 80580), False, 'import six\n'), ((81309, 81342), 'manila.utils.is_all_tenants', 'utils.is_all_tenants', (['search_opts'], {}), '(search_opts)\n', (81329, 81342), False, 'from manila import utils\n'), ((83212, 83246), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (83234, 83246), False, 'from manila import exception\n'), ((84942, 84978), 'manila.i18n._', '_', (['"""Invalid share access level: %s."""'], {}), "('Invalid share access level: %s.')\n", (84943, 84978), False, 'from manila.i18n import _\n'), ((89122, 89190), 'manila.i18n._', '_', (['"""Share %(share_id)s is busy as part of an active task: %(task)s."""'], {}), "('Share %(share_id)s is busy as part of an active task: %(task)s.')\n", (89123, 89190), False, 'from manila.i18n import _\n'), ((89568, 89604), 'manila.i18n._', '_', (['"""Metadata property key is blank."""'], {}), "('Metadata property key is blank.')\n", (89569, 89604), False, 'from manila.i18n import _\n'), ((89660, 89698), 'manila.exception.InvalidMetadata', 'exception.InvalidMetadata', ([], {'message': 'msg'}), '(message=msg)\n', (89685, 89698), False, 'from manila import exception\n'), ((89750, 89808), 'manila.i18n._', '_', (['"""Metadata property key is greater than 255 characters."""'], {}), "('Metadata property key is greater than 255 characters.')\n", (89751, 89808), False, 'from manila.i18n import _\n'), ((89891, 89933), 'manila.exception.InvalidMetadataSize', 'exception.InvalidMetadataSize', ([], {'message': 'msg'}), '(message=msg)\n', (89920, 89933), False, 'from manila import exception\n'), ((89978, 90016), 'manila.i18n._', '_', (['"""Metadata property value is blank."""'], {}), "('Metadata property value is blank.')\n", (89979, 90016), False, 'from manila.i18n import _\n'), ((90072, 90110), 'manila.exception.InvalidMetadata', 'exception.InvalidMetadata', ([], {'message': 'msg'}), '(message=msg)\n', (90097, 90110), False, 'from manila import exception\n'), ((90163, 90224), 'manila.i18n._', '_', (['"""Metadata property value is greater than 1023 characters."""'], {}), "('Metadata property value is greater than 1023 characters.')\n", (90164, 90224), False, 'from manila.i18n import _\n'), ((90307, 90349), 'manila.exception.InvalidMetadataSize', 'exception.InvalidMetadataSize', ([], {'message': 'msg'}), '(message=msg)\n', (90336, 90349), False, 'from manila import exception\n'), ((91798, 91906), 'manila.i18n._', '_', (['"""Share %(share_id)s status must be \'%(valid_status)s\' to extend, but current status is: %(status)s."""'], {}), '("Share %(share_id)s status must be \'%(valid_status)s\' to extend, but current status is: %(status)s."\n )\n', (91799, 91906), False, 'from manila.i18n import _\n'), ((92161, 92270), 'manila.i18n._', '_', (['"""New size for extend must be greater than current size. (current: %(size)s, extended: %(new_size)s)."""'], {}), "('New size for extend must be greater than current size. (current: %(size)s, extended: %(new_size)s).'\n )\n", (92162, 92270), False, 'from manila.i18n import _\n'), ((95989, 96019), 'six.text_type', 'six.text_type', (["share['status']"], {}), "(share['status'])\n", (96002, 96019), False, 'import six\n'), ((96396, 96504), 'manila.i18n._', '_', (['"""Share %(share_id)s status must in (%(valid_status)s) to shrink, but current status is: %(status)s."""'], {}), "('Share %(share_id)s status must in (%(valid_status)s) to shrink, but current status is: %(status)s.'\n )\n", (96397, 96504), False, 'from manila.i18n import _\n'), ((96781, 96899), 'manila.i18n._', '_', (['"""New size for shrink must be less than current size and greater than 0 (current: %(size)s, new: %(new_size)s)"""'], {}), "('New size for shrink must be less than current size and greater than 0 (current: %(size)s, new: %(new_size)s)'\n )\n", (96782, 96899), False, 'from manila.i18n import _\n'), ((98224, 98372), 'manila.i18n._', '_', (['"""New access rules cannot be applied while the snapshot or any of its replicas or migration copies lacks a valid host or is not in %s state."""'], {}), "('New access rules cannot be applied while the snapshot or any of its replicas or migration copies lacks a valid host or is not in %s state.'\n )\n", (98225, 98372), False, 'from manila.i18n import _\n'), ((99070, 99213), 'manila.i18n._', '_', (['"""Access rules cannot be denied while the snapshot or any of its replicas or migration copies lacks a valid host or is not in %s state."""'], {}), "('Access rules cannot be denied while the snapshot or any of its replicas or migration copies lacks a valid host or is not in %s state.'\n )\n", (99071, 99213), False, 'from manila.i18n import _\n'), ((101071, 101143), 'manila.i18n._', '_', (['"""A destination share server wasn\'t found for source share server %s."""'], {}), '("A destination share server wasn\'t found for source share server %s.")\n', (101072, 101143), False, 'from manila.i18n import _\n'), ((101303, 101401), 'manila.i18n._', '_', (['"""More than one destination share server was found for source share server %s. Aborting..."""'], {}), "('More than one destination share server was found for source share server %s. Aborting...'\n )\n", (101304, 101401), False, 'from manila.i18n import _\n'), ((103231, 103332), 'manila.i18n._', '_', (['"""Share server %(server_id)s status must be active, but current status is: %(server_status)s."""'], {}), "('Share server %(server_id)s status must be active, but current status is: %(server_status)s.'\n )\n", (103232, 103332), False, 'from manila.i18n import _\n'), ((103757, 103846), 'manila.i18n._', '_', (['"""The share server %s can not be migrated because it is related to a share group."""'], {}), "('The share server %s can not be migrated because it is related to a share group.'\n )\n", (103758, 103846), False, 'from manila.i18n import _\n'), ((106153, 106380), 'manila.i18n._', '_', (['"""Share server %(server)s cannot be migrated to host %(dest)s because the share type %(type)s is used by one of the shares, and this share type is not supported within the availability zone (%(az)s) that the host is in."""'], {}), "('Share server %(server)s cannot be migrated to host %(dest)s because the share type %(type)s is used by one of the shares, and this share type is not supported within the availability zone (%(az)s) that the host is in.'\n )\n", (106154, 106380), False, 'from manila.i18n import _\n'), ((106864, 106914), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': '(msg % payload)'}), '(reason=msg % payload)\n', (106892, 106914), False, 'from manila import exception\n'), ((108462, 108502), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (108490, 108502), False, 'from manila import exception\n'), ((108797, 108837), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (108825, 108837), False, 'from manila import exception\n'), ((110015, 110055), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (110043, 110055), False, 'from manila import exception\n'), ((110382, 110422), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (110410, 110422), False, 'from manila import exception\n'), ((110968, 111008), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'msg'}), '(reason=msg)\n', (110996, 111008), False, 'from manila import exception\n'), ((114089, 114126), 'manila.i18n._', '_', (['"""Share server %s is not migrating"""'], {}), "('Share server %s is not migrating')\n", (114090, 114126), False, 'from manila.i18n import _\n'), ((114362, 114466), 'manila.i18n._', '_', (['"""The first phase of migration has to finish to request the completion of server %s\'s migration."""'], {}), '("The first phase of migration has to finish to request the completion of server %s\'s migration."\n )\n', (114363, 114466), False, 'from manila.i18n import _\n'), ((118011, 118127), 'manila.i18n._', '_', (['"""Migration progress of share server %s cannot be obtained. The provided share server is not being migrated."""'], {}), "('Migration progress of share server %s cannot be obtained. The provided share server is not being migrated.'\n )\n", (118012, 118127), False, 'from manila.i18n import _\n'), ((6425, 6574), 'manila.i18n._', '_', (['"""Failed while creating a share with replication support. Requested share replica exceeds allowed project/user or share type gigabytes quota."""'], {}), "('Failed while creating a share with replication support. Requested share replica exceeds allowed project/user or share type gigabytes quota.'\n )\n", (6426, 6574), False, 'from manila.i18n import _\n'), ((7381, 7405), 'manila.i18n._', '_', (['"""status must be \'%s\'"""'], {}), '("status must be \'%s\'")\n', (7382, 7405), False, 'from manila.i18n import _\n'), ((9039, 9146), 'manila.i18n._', '_', (['"""The specified availability zone must be the same as the parent share when creating from snapshot."""'], {}), "('The specified availability zone must be the same as the parent share when creating from snapshot.'\n )\n", (9040, 9146), False, 'from manila.i18n import _\n'), ((9191, 9225), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (9213, 9225), False, 'from manila import exception\n'), ((9698, 9950), 'manila.i18n._', '_', (['"""Invalid share type specified: the requested share type must match the type of the source share. If a share type is not specified when requesting a new share from a snapshot, the share type of the source share will be applied to the new share."""'], {}), "('Invalid share type specified: the requested share type must match the type of the source share. If a share type is not specified when requesting a new share from a snapshot, the share type of the source share will be applied to the new share.'\n )\n", (9699, 9950), False, 'from manila.i18n import _\n'), ((10127, 10161), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (10149, 10161), False, 'from manila import exception\n'), ((12134, 12192), 'manila.i18n._', '_', (['"""Share group status must be %(avail)s, got %(status)s."""'], {}), "('Share group status must be %(avail)s, got %(status)s.')\n", (12135, 12192), False, 'from manila.i18n import _\n'), ((12900, 12936), 'manila.exception.InvalidParameterValue', 'exception.InvalidParameterValue', (['msg'], {}), '(msg)\n', (12931, 12936), False, 'from manila import exception\n'), ((13170, 13276), 'manila.i18n._', '_', (['"""The specified share network (%(net)s) is not supported by the specified share group (%(group)s)."""'], {}), "('The specified share network (%(net)s) is not supported by the specified share group (%(group)s).'\n )\n", (13171, 13276), False, 'from manila.i18n import _\n'), ((15300, 15337), 'oslo_utils.excutils.save_and_reraise_exception', 'excutils.save_and_reraise_exception', ([], {}), '()\n', (15335, 15337), False, 'from oslo_utils import excutils\n'), ((28076, 28200), 'manila.i18n._', '_', (['"""Share replica cannot be created because the share network is not available within the specified availability zone."""'], {}), "('Share replica cannot be created because the share network is not available within the specified availability zone.'\n )\n", (28077, 28200), False, 'from manila.i18n import _\n'), ((28284, 28319), 'manila.exception.InvalidShare', 'exception.InvalidShare', ([], {'message': 'msg'}), '(message=msg)\n', (28306, 28319), False, 'from manila import exception\n'), ((28973, 29169), 'manila.i18n._', '_', (['"""The share network is not supported within any requested availability zone. Check the share type\'s \'availability_zones\' extra-spec and the availability zones of the share network subnets"""'], {}), '("The share network is not supported within any requested availability zone. Check the share type\'s \'availability_zones\' extra-spec and the availability zones of the share network subnets"\n )\n', (28974, 29169), False, 'from manila.i18n import _\n'), ((29297, 29332), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'message': 'msg'}), '(message=msg)\n', (29319, 29332), False, 'from manila import exception\n'), ((30164, 30201), 'oslo_utils.excutils.save_and_reraise_exception', 'excutils.save_and_reraise_exception', ([], {}), '()\n', (30199, 30201), False, 'from oslo_utils import excutils\n'), ((36746, 36788), 'manila.i18n._', '_', (['"""Share Server specified was not found."""'], {}), "('Share Server specified was not found.')\n", (36747, 36788), False, 'from manila.i18n import _\n'), ((36811, 36845), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'reason': 'msg'}), '(reason=msg)\n', (36833, 36845), False, 'from manila import exception\n'), ((43146, 43194), 'manila.exception.ManageInvalidShareSnapshot', 'exception.ManageInvalidShareSnapshot', ([], {'reason': 'msg'}), '(reason=msg)\n', (43182, 43194), False, 'from manila import exception\n'), ((44741, 44778), 'oslo_utils.excutils.save_and_reraise_exception', 'excutils.save_and_reraise_exception', ([], {}), '()\n', (44776, 44778), False, 'from oslo_utils import excutils\n'), ((52377, 52395), 'oslo_utils.timeutils.utcnow', 'timeutils.utcnow', ([], {}), '()\n', (52393, 52395), False, 'from oslo_utils import timeutils\n'), ((57083, 57128), 'manila.exception.SnapshotSizeExceedsAvailableQuota', 'exception.SnapshotSizeExceedsAvailableQuota', ([], {}), '()\n', (57126, 57128), False, 'from manila import exception\n'), ((58325, 58362), 'oslo_utils.excutils.save_and_reraise_exception', 'excutils.save_and_reraise_exception', ([], {}), '()\n', (58360, 58362), False, 'from oslo_utils import excutils\n'), ((62568, 62641), 'manila.i18n._', '_', (['"""Share %s must not have snapshots when using host-assisted migration."""'], {}), "('Share %s must not have snapshots when using host-assisted migration.')\n", (62569, 62641), False, 'from manila.i18n import _\n'), ((63316, 63449), 'manila.i18n._', '_', (['"""New share network must be provided when share type of given share %s has extra_spec \'driver_handles_share_servers\' as True."""'], {}), '("New share network must be provided when share type of given share %s has extra_spec \'driver_handles_share_servers\' as True."\n )\n', (63317, 63449), False, 'from manila.i18n import _\n'), ((64270, 64408), 'manila.i18n._', '_', (['"""New share network must not be provided when share type of given share %s has extra_spec \'driver_handles_share_servers\' as False."""'], {}), '("New share network must not be provided when share type of given share %s has extra_spec \'driver_handles_share_servers\' as False."\n )\n', (64271, 64408), False, 'from manila.i18n import _\n'), ((67768, 67825), 'manila.i18n._', '_', (['"""First migration phase of share %s not completed yet."""'], {}), "('First migration phase of share %s not completed yet.')\n", (67769, 67825), False, 'from manila.i18n import _\n'), ((71033, 71065), 'manila.utils.service_is_up', 'utils.service_is_up', (['services[0]'], {}), '(services[0])\n', (71052, 71065), False, 'from manila import utils\n'), ((71773, 71843), 'manila.i18n._', '_', (['"""Migration progress of share %s cannot be obtained at this moment."""'], {}), "('Migration progress of share %s cannot be obtained at this moment.')\n", (71774, 71843), False, 'from manila.i18n import _\n'), ((73738, 73839), 'manila.i18n._', '_', (['"""There is no migration being performed for %(resource_type)s %(resource_id)s at this moment."""'], {}), "('There is no migration being performed for %(resource_type)s %(resource_id)s at this moment.'\n )\n", (73739, 73839), False, 'from manila.i18n import _\n'), ((76012, 76044), 'manila.utils.service_is_up', 'utils.service_is_up', (['services[0]'], {}), '(services[0])\n', (76031, 76044), False, 'from manila import utils\n'), ((76630, 76692), 'manila.i18n._', '_', (['"""Migration of share %s cannot be cancelled at this moment."""'], {}), "('Migration of share %s cannot be cancelled at this moment.')\n", (76631, 76692), False, 'from manila.i18n import _\n'), ((79518, 79558), 'manila.i18n._', '_', (['"""Wrong metadata filter provided: %s."""'], {}), "('Wrong metadata filter provided: %s.')\n", (79519, 79558), False, 'from manila.i18n import _\n'), ((79588, 79622), 'six.text_type', 'six.text_type', (["filters['metadata']"], {}), "(filters['metadata'])\n", (79601, 79622), False, 'import six\n'), ((79999, 80042), 'manila.i18n._', '_', (['"""Wrong extra specs filter provided: %s."""'], {}), "('Wrong extra specs filter provided: %s.')\n", (80000, 80042), False, 'from manila.i18n import _\n'), ((80072, 80109), 'six.text_type', 'six.text_type', (["filters['extra_specs']"], {}), "(filters['extra_specs'])\n", (80085, 80109), False, 'import six\n'), ((83086, 83130), 'manila.i18n._', '_', (['"""Wrong \'%(k)s\' filter provided: \'%(v)s\'."""'], {}), '("Wrong \'%(k)s\' filter provided: \'%(v)s\'.")\n', (83087, 83130), False, 'from manila.i18n import _\n'), ((108204, 108299), 'manila.i18n._', '_', (['"""Share %(share_id)s status must be available, but current status is: %(share_status)s."""'], {}), "('Share %(share_id)s status must be available, but current status is: %(share_status)s.'\n )\n", (108205, 108299), False, 'from manila.i18n import _\n'), ((108561, 108680), 'manila.i18n._', '_', (['"""Share %s has replicas. Remove the replicas of all shares in the share server before attempting to migrate it."""'], {}), "('Share %s has replicas. Remove the replicas of all shares in the share server before attempting to migrate it.'\n )\n", (108562, 108680), False, 'from manila.i18n import _\n'), ((109647, 109763), 'manila.i18n._', '_', (['"""All snapshots must have \'%(status)s\' status to be migrated by the driver along with share %(resource_id)s."""'], {}), '("All snapshots must have \'%(status)s\' status to be migrated by the driver along with share %(resource_id)s."\n )\n', (109648, 109763), False, 'from manila.i18n import _\n'), ((110123, 110265), 'manila.i18n._', '_', (['"""Share %s is a member of a group. This operation is not currently supported for share servers that contain shares members of groups."""'], {}), "('Share %s is a member of a group. This operation is not currently supported for share servers that contain shares members of groups.'\n )\n", (110124, 110265), False, 'from manila.i18n import _\n'), ((110625, 110761), 'manila.i18n._', '_', (['"""Share instance %(instance_id)s access rules status must not be in %(error)s when attempting to start a share server migration."""'], {}), "('Share instance %(instance_id)s access rules status must not be in %(error)s when attempting to start a share server migration.'\n )\n", (110626, 110761), False, 'from manila.i18n import _\n'), ((111151, 111193), 'manila.exception.InvalidShareServer', 'exception.InvalidShareServer', ([], {'reason': 'e.msg'}), '(reason=e.msg)\n', (111179, 111193), False, 'from manila import exception\n'), ((116467, 116632), 'manila.i18n._', '_', (['"""Migration of share server %s can be cancelled only after the driver already started the migration, or when the first phase of the migration gets completed."""'], {}), "('Migration of share server %s can be cancelled only after the driver already started the migration, or when the first phase of the migration gets completed.'\n )\n", (116468, 116632), False, 'from manila.i18n import _\n'), ((117221, 117298), 'manila.i18n._', '_', (['"""Share server %s was not found. We will search for a successful migration"""'], {}), "('Share server %s was not found. We will search for a successful migration')\n", (117222, 117298), False, 'from manila.i18n import _\n'), ((119161, 119208), 'manila.exception.ShareServerMigrationError', 'exception.ShareServerMigrationError', ([], {'reason': 'msg'}), '(reason=msg)\n', (119196, 119208), False, 'from manila import exception\n'), ((119533, 119610), 'manila.i18n._', '_', (['"""Migration progress of share server %s cannot be obtained at this moment."""'], {}), "('Migration progress of share server %s cannot be obtained at this moment.')\n", (119534, 119610), False, 'from manila.i18n import _\n'), ((11806, 11822), 'six.text_type', 'six.text_type', (['e'], {}), '(e)\n', (11819, 11822), False, 'import six\n'), ((12703, 12807), 'manila.i18n._', '_', (['"""The specified share type (%(type)s) is not supported by the specified share group (%(group)s)."""'], {}), "('The specified share type (%(type)s) is not supported by the specified share group (%(group)s).'\n )\n", (12704, 12807), False, 'from manila.i18n import _\n'), ((27713, 27810), 'manila.i18n._', '_', (['"""Share replica cannot be created because the specified availability zone does not exist."""'], {}), "('Share replica cannot be created because the specified availability zone does not exist.'\n )\n", (27714, 27810), False, 'from manila.i18n import _\n'), ((27863, 27898), 'manila.exception.InvalidInput', 'exception.InvalidInput', ([], {'message': 'msg'}), '(message=msg)\n', (27885, 27898), False, 'from manila import exception\n'), ((42749, 42861), 'manila.i18n._', '_', (['"""A share snapshot %(share_snapshot_id)s is already managed for provider location %(provider_location)s."""'], {}), "('A share snapshot %(share_snapshot_id)s is already managed for provider location %(provider_location)s.'\n )\n", (42750, 42861), False, 'from manila.i18n import _\n'), ((57504, 57564), 'manila.exception.SnapshotLimitExceeded', 'exception.SnapshotLimitExceeded', ([], {'allowed': "quotas['snapshots']"}), "(allowed=quotas['snapshots'])\n", (57535, 57564), False, 'from manila import exception\n'), ((70551, 70592), 'manila.exception.ShareMigrationError', 'exception.ShareMigrationError', ([], {'reason': 'msg'}), '(reason=msg)\n', (70580, 70592), False, 'from manila import exception\n'), ((74044, 74118), 'manila.i18n._', '_', (['"""Migration of %(resource_type)s %(resource_id)s was already cancelled."""'], {}), "('Migration of %(resource_type)s %(resource_id)s was already cancelled.')\n", (74045, 74118), False, 'from manila.i18n import _\n'), ((95529, 95589), 'manila.exception.ShareReplicaSizeExceedsAvailableQuota', 'exception.ShareReplicaSizeExceedsAvailableQuota', ([], {'message': 'msg'}), '(message=msg)\n', (95576, 95589), False, 'from manila import exception\n'), ((118995, 119055), 'manila.i18n._', '_', (['"""Failed to obtain migration progress of share server %s."""'], {}), "('Failed to obtain migration progress of share server %s.')\n", (118996, 119055), False, 'from manila.i18n import _\n'), ((70387, 70440), 'manila.i18n._', '_', (['"""Failed to obtain migration progress of share %s."""'], {}), "('Failed to obtain migration progress of share %s.')\n", (70388, 70440), False, 'from manila.i18n import _\n'), ((71421, 71462), 'manila.exception.ShareMigrationError', 'exception.ShareMigrationError', ([], {'reason': 'msg'}), '(reason=msg)\n', (71450, 71462), False, 'from manila import exception\n'), ((74412, 74503), 'manila.i18n._', '_', (['"""Migration of %(resource_type)s %(resource_id)s has already completed first phase."""'], {}), "('Migration of %(resource_type)s %(resource_id)s has already completed first phase.'\n )\n", (74413, 74503), False, 'from manila.i18n import _\n'), ((76350, 76391), 'manila.exception.ShareMigrationError', 'exception.ShareMigrationError', ([], {'reason': 'msg'}), '(reason=msg)\n', (76379, 76391), False, 'from manila import exception\n'), ((95157, 95356), 'manila.i18n._', '_', (['"""Failed while extending a share with replication support. There is no available quota to extend the share and its %(count)d replicas. Maximum number of allowed replica_gigabytes is exceeded."""'], {}), "('Failed while extending a share with replication support. There is no available quota to extend the share and its %(count)d replicas. Maximum number of allowed replica_gigabytes is exceeded.'\n )\n", (95158, 95356), False, 'from manila.i18n import _\n'), ((71257, 71310), 'manila.i18n._', '_', (['"""Failed to obtain migration progress of share %s."""'], {}), "('Failed to obtain migration progress of share %s.')\n", (71258, 71310), False, 'from manila.i18n import _\n'), ((76195, 76239), 'manila.i18n._', '_', (['"""Failed to cancel migration of share %s."""'], {}), "('Failed to cancel migration of share %s.')\n", (76196, 76239), False, 'from manila.i18n import _\n')]
# Copyright (c) 2006-2013 Regents of the University of Minnesota. # For licensing terms, see the file LICENSE. import sys import conf import g from item import item_base from item.util import revision from item.util.item_type import Item_Type from util_ import db_glue from util_ import misc __all__ = ['One', 'Many'] log = g.log.getLogger('item_helper') class One(item_base.One): ''' Represents information managed by a versioned item but itself is outside of the versioned and revisioned item system. ''' item_type_id = None item_type_table = None # 'item_helper' item_gwis_abbrev = 'itmh' child_item_types = None local_defns = [ ] attr_defns = item_base.One.attr_defns + local_defns psql_defns = item_base.One.psql_defns + local_defns gwis_defns = item_base.One.attr_defns_reduce_for_gwis(attr_defns) __slots__ = [ ] + [attr_defn[0] for attr_defn in local_defns] # *** Constructor def __init__(self, qb=None, row=None, req=None, copy_from=None): item_base.One.__init__(self, qb, row, req, copy_from) # def save_core(self, qb): # Don't call base class. # Also, we never called validize. g.assurt(not self.valid) class Many(item_base.Many): one_class = One __slots__ = () # *** Constructor def __init__(self): item_base.Many.__init__(self) # *** # ***
[ "item.item_base.One.__init__", "item.item_base.Many.__init__", "g.log.getLogger", "item.item_base.One.attr_defns_reduce_for_gwis", "g.assurt" ]
[((329, 359), 'g.log.getLogger', 'g.log.getLogger', (['"""item_helper"""'], {}), "('item_helper')\n", (344, 359), False, 'import g\n'), ((801, 853), 'item.item_base.One.attr_defns_reduce_for_gwis', 'item_base.One.attr_defns_reduce_for_gwis', (['attr_defns'], {}), '(attr_defns)\n', (841, 853), False, 'from item import item_base\n'), ((1023, 1076), 'item.item_base.One.__init__', 'item_base.One.__init__', (['self', 'qb', 'row', 'req', 'copy_from'], {}), '(self, qb, row, req, copy_from)\n', (1045, 1076), False, 'from item import item_base\n'), ((1188, 1212), 'g.assurt', 'g.assurt', (['(not self.valid)'], {}), '(not self.valid)\n', (1196, 1212), False, 'import g\n'), ((1333, 1362), 'item.item_base.Many.__init__', 'item_base.Many.__init__', (['self'], {}), '(self)\n', (1356, 1362), False, 'from item import item_base\n')]
# -*- coding: utf-8 -*- from qgis.core import QgsProject, QgsField from PyQt5.QtWidgets import QFileDialog from PyQt5.QtCore import QVariant def node_has_child_name(node, child_name): for c in node.children(): if child_name == c.name(): return True return False def add_group(group_name, parent_group=None): root = QgsProject.instance().layerTreeRoot() if parent_group is not None: node = root.findGroup(parent_group) if node is None: raise ValueError("Unable to find parent group named: '%s'" % str(parent_group)) else: node = root if not node_has_child_name(node, group_name): node.addGroup(group_name) def add_layer_to_group(layer, group_name, add_to_layer_list=True): if layer is None: return if add_to_layer_list: QgsProject.instance().addMapLayer(layer, False) root = QgsProject.instance().layerTreeRoot() group = root.findGroup(group_name) if group is None: group = root.insertGroup(0, group_name) group.addLayer(layer) def hide_group(group_name): root = QgsProject.instance().layerTreeRoot() group = root.findGroup(group_name) group.setItemVisibilityChecked(False) group.setExpanded(False) def hide_layers(layers_prefix_to_hide: list): layers = QgsProject.instance().mapLayers() for layer in layers: for layer_prefix in layers_prefix_to_hide: if layer_prefix in layer: ltl = QgsProject.instance().layerTreeRoot().findLayer(layers[layer].id()) ltl.setItemVisibilityChecked(False) def remove_layers(layers_prefix_to_remove: list): layers = QgsProject.instance().mapLayers() for layer in layers: for layer_prefix in layers_prefix_to_remove: if layer_prefix in layer: QgsProject.instance().removeMapLayer(layers[layer]) def has_layer(layer_prefix_to_find: list): layers = QgsProject.instance().mapLayers() for layer in layers: if layer_prefix_to_find in layer: return True return False def browse_and_set_file_path(lineEdit, **kwargs): file_path = QFileDialog.getOpenFileName(**kwargs) lineEdit.clear() if isinstance(file_path, tuple): file_path = file_path[0] lineEdit.insert(file_path) def browse_and_set_directory_path(lineEdit): file_path = QFileDialog.getExistingDirectory() lineEdit.clear() if isinstance(file_path, tuple): file_path = file_path[0] lineEdit.insert(file_path) def set_marked_field(layer, AttributeName, value): if AttributeName not in layer.fields().names(): print(AttributeName + ' not in the attributes at the beginning') layer.startEditing() layer.addAttribute(QgsField(AttributeName, QVariant.Int)) layer.commitChanges() print('Initialized ' + AttributeName + ': ', AttributeName in layer.fields().names()) # Set all AttributeName attributes to value : layer.startEditing() index = layer.fields().indexFromName(AttributeName) for f in layer.getFeatures(): layer.changeAttributeValue(f.id(), index, value) layer.commitChanges() def refresh_layers(iface): for layer in iface.mapCanvas().layers(): layer.triggerRepaint()
[ "PyQt5.QtWidgets.QFileDialog.getOpenFileName", "qgis.core.QgsField", "qgis.core.QgsProject.instance", "PyQt5.QtWidgets.QFileDialog.getExistingDirectory" ]
[((2158, 2195), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', ([], {}), '(**kwargs)\n', (2185, 2195), False, 'from PyQt5.QtWidgets import QFileDialog\n'), ((2381, 2415), 'PyQt5.QtWidgets.QFileDialog.getExistingDirectory', 'QFileDialog.getExistingDirectory', ([], {}), '()\n', (2413, 2415), False, 'from PyQt5.QtWidgets import QFileDialog\n'), ((351, 372), 'qgis.core.QgsProject.instance', 'QgsProject.instance', ([], {}), '()\n', (370, 372), False, 'from qgis.core import QgsProject, QgsField\n'), ((896, 917), 'qgis.core.QgsProject.instance', 'QgsProject.instance', ([], {}), '()\n', (915, 917), False, 'from qgis.core import QgsProject, QgsField\n'), ((1109, 1130), 'qgis.core.QgsProject.instance', 'QgsProject.instance', ([], {}), '()\n', (1128, 1130), False, 'from qgis.core import QgsProject, QgsField\n'), ((1317, 1338), 'qgis.core.QgsProject.instance', 'QgsProject.instance', ([], {}), '()\n', (1336, 1338), False, 'from qgis.core import QgsProject, QgsField\n'), ((1672, 1693), 'qgis.core.QgsProject.instance', 'QgsProject.instance', ([], {}), '()\n', (1691, 1693), False, 'from qgis.core import QgsProject, QgsField\n'), ((1948, 1969), 'qgis.core.QgsProject.instance', 'QgsProject.instance', ([], {}), '()\n', (1967, 1969), False, 'from qgis.core import QgsProject, QgsField\n'), ((2772, 2809), 'qgis.core.QgsField', 'QgsField', (['AttributeName', 'QVariant.Int'], {}), '(AttributeName, QVariant.Int)\n', (2780, 2809), False, 'from qgis.core import QgsProject, QgsField\n'), ((837, 858), 'qgis.core.QgsProject.instance', 'QgsProject.instance', ([], {}), '()\n', (856, 858), False, 'from qgis.core import QgsProject, QgsField\n'), ((1838, 1859), 'qgis.core.QgsProject.instance', 'QgsProject.instance', ([], {}), '()\n', (1857, 1859), False, 'from qgis.core import QgsProject, QgsField\n'), ((1487, 1508), 'qgis.core.QgsProject.instance', 'QgsProject.instance', ([], {}), '()\n', (1506, 1508), False, 'from qgis.core import QgsProject, QgsField\n')]
import asyncio import requests, time, aiohttp, json, pprint,mydb import lime_torrent, glodls_torrent, thepiratebay_torrent,x1337_torrent import nyaasi_torrent, anidex_torrent, nyaapantsu_torrent, evztv_torrent import galaxy_torrent from random import randint torrent_services = [ 'thepiratebay_torrent', 'nyaasi_torrent', 'nyaapantsu_torrent', 'evztv_torrent', 'glodls_torrent', 'galaxy_torrent', 'lime_torrent', 'x1337_torrent', 'anidex_torrent' ] http_proxy = ['http://192.168.3.11','http://192.168.127.12','http://95.170.220.172'] proxyDict = { "http" : 'http://192.168.3.11', } async def call_url(url, types, count): print('Starting {}'.format(url)) nap = randint(0,3) await asyncio.sleep(nap) resp = requests.get(url, proxies=proxyDict) data = resp.text details = eval(types).single_page_details(data, count) pprint.pprint(details) return details def fetch(type): try: urls = eval(type).get_links() start = time.time() count = 0 while True: futures = [] for url in urls[:500]: count+=1 futures.append(call_url(url, type, count)) loop = asyncio.get_event_loop() loop.run_until_complete(asyncio.wait(futures)) del urls[0:500] if len(urls) == 0: insert = mydb.readFile(type.split('_torrent')[0]) print("*" * 100) print("Database inserted successfully") print("*" * 100) break # loop.close() except ValueError: pass for torrent in torrent_services: fetch(torrent)
[ "asyncio.get_event_loop", "random.randint", "asyncio.sleep", "time.time", "pprint.pprint", "requests.get", "asyncio.wait" ]
[((741, 754), 'random.randint', 'randint', (['(0)', '(3)'], {}), '(0, 3)\n', (748, 754), False, 'from random import randint\n'), ((794, 830), 'requests.get', 'requests.get', (['url'], {'proxies': 'proxyDict'}), '(url, proxies=proxyDict)\n', (806, 830), False, 'import requests, time, aiohttp, json, pprint, mydb\n'), ((915, 937), 'pprint.pprint', 'pprint.pprint', (['details'], {}), '(details)\n', (928, 937), False, 'import requests, time, aiohttp, json, pprint, mydb\n'), ((764, 782), 'asyncio.sleep', 'asyncio.sleep', (['nap'], {}), '(nap)\n', (777, 782), False, 'import asyncio\n'), ((1039, 1050), 'time.time', 'time.time', ([], {}), '()\n', (1048, 1050), False, 'import requests, time, aiohttp, json, pprint, mydb\n'), ((1253, 1277), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (1275, 1277), False, 'import asyncio\n'), ((1314, 1335), 'asyncio.wait', 'asyncio.wait', (['futures'], {}), '(futures)\n', (1326, 1335), False, 'import asyncio\n')]
#Here we can add mutliple files in single Add button from tkinter.filedialog import * from tkinter import messagebox import PyPDF2 from PDFDragDrop import * #NOTE: WE can move the below CLASS and load_pdf function to another python file and import the things #NOTE2: I have imported the Drag and Drop functions from the Python file PDFDragDrop.py class pdf_doc(): def __init__(self, filename): self.filename = filename self.display = filename.split('/')[-1] self.pdf = load_pdf(filename) self.pages = self.pdf.getNumPages() self.start = int(1) self.end = int(self.pages) def add_to_writer(self, writer): for i in range(self.start-1, self.end): writer.addPage(self.pdf.getPage(i)) def load_pdf(filename): f = open(filename, 'rb') return PyPDF2.PdfFileReader(f) def load(): fs = askopenfilenames(filetypes=(('PDF File', '*.pdf'), ('All Files', '*.*'))) for f in fs: pdf = pdf_doc(f) pdf_list.append(pdf) listbox.insert(END, pdf.display) print(pdf_list) def remove(): index = int(listbox.curselection()[0]) pdf_list.pop(index) listbox.delete(ANCHOR) print(pdf_list) firstindex = 0 value = listbox.get(firstindex) filename.set(value) pages.set(pdf_list[firstindex].pages) start.set(pdf_list[firstindex].start) end.set(pdf_list[firstindex].end) def save_pdf(): #get the writer writer = PyPDF2.PdfFileWriter() if pdf_list: output_filename = asksaveasfilename(filetypes=(('PDF File', '*.pdf'), ('All files', '*.*'))) output_filename = output_filename+'.pdf' print(output_filename) output_file = open(output_filename, 'wb') for doc in pdf_list: doc.add_to_writer(writer) writer.write(output_file) output_file.close() openmergedpdf = messagebox.askyesno("Merge Completed", "Do you want to open the merged PDF?") if openmergedpdf: os.startfile(output_filename) root.quit() def display(*args): index = int(listbox.curselection()[0]) value = listbox.get(index) filename.set(value) pages.set(pdf_list[index].pages) start.set(pdf_list[index].start) end.set(pdf_list[index].end) def set_start(*args): index = int(listbox.curselection()[0]) if start.get(): try: pdf_list[index].start = int(start.get()) if ((pdf_list[index].start <= 0) | (pdf_list[index].start > pdf_list[index].end)): pdf_list[index].start = 1 except ValueError: messagebox.showerror(title='ValueError',message='Enter Valid Integer') def set_end(*args): index = int(listbox.curselection()[0]) if end.get(): try: pdf_list[index].end = int(end.get()) if ((pdf_list[index].end <= 0) | (pdf_list[index].end < pdf_list[index].end) | (pdf_list[index].end > pdf_list[index].pages)): pdf_list[index].end = int(pdf_list[index].pages) except ValueError: messagebox.showerror(title='ValueError', message='Enter Valid Integer') #list to hold the PDF objects of type pdf_doc class pdf_list = [] root = Tk() root.title('PDF Merger') icon = PhotoImage(file="pdfIcon.png") root.iconphoto(False, icon) filename = StringVar() pages = StringVar() start = StringVar() end = StringVar() Label(root,text='PDF Merger', foreground='blue').grid(row=0,column=0,columnspan=4) Button(root,text='Add PDFs', command=load).grid(row=2, column=0) Button(root,text='Remove PDF', command=remove).grid(row=3, column=0) #listbox from the DragDrop library. extra parameter is passed here. listbox = DragDropListbox(root,pdf_list) listbox.bind('<<ListboxSelect>>', display) listbox.grid(row=1, rowspan=4, column=1) Label(root,text='Selected File: ').grid(row=1, column=2) Label(root, textvariable=filename,width=20).grid(row=1,column=3,sticky=(N,S,E,W)) Label(root, text='Total Pages: ').grid(row=2, column=2) Label(root, textvariable=pages).grid(row=2, column=3) Label(root, text='Start Page: ').grid(row=3, column=2) s= Entry(root, textvariable=start, width=3) s.grid(row=3, column=3) Label(root, text='End Page: ').grid(row=4, column=2) e= Entry(root, textvariable=end, width=3) e.grid(row=4, column=3) Button(root, text='Merge and Save PDF: ', command=save_pdf, width=20, fg="red").grid(row=5, column=0, columnspan=4) for child in root.winfo_children(): child.grid_configure(padx=10, pady=10) #To Update the Object after selecting the required Pages from the particluar PDF # It shall update the written variable in start and end start.trace('w',set_start) end.trace('w',set_end) root.mainloop()
[ "PyPDF2.PdfFileReader", "tkinter.messagebox.askyesno", "tkinter.messagebox.showerror", "PyPDF2.PdfFileWriter" ]
[((844, 867), 'PyPDF2.PdfFileReader', 'PyPDF2.PdfFileReader', (['f'], {}), '(f)\n', (864, 867), False, 'import PyPDF2\n'), ((1508, 1530), 'PyPDF2.PdfFileWriter', 'PyPDF2.PdfFileWriter', ([], {}), '()\n', (1528, 1530), False, 'import PyPDF2\n'), ((1950, 2027), 'tkinter.messagebox.askyesno', 'messagebox.askyesno', (['"""Merge Completed"""', '"""Do you want to open the merged PDF?"""'], {}), "('Merge Completed', 'Do you want to open the merged PDF?')\n", (1969, 2027), False, 'from tkinter import messagebox\n'), ((2691, 2762), 'tkinter.messagebox.showerror', 'messagebox.showerror', ([], {'title': '"""ValueError"""', 'message': '"""Enter Valid Integer"""'}), "(title='ValueError', message='Enter Valid Integer')\n", (2711, 2762), False, 'from tkinter import messagebox\n'), ((3160, 3231), 'tkinter.messagebox.showerror', 'messagebox.showerror', ([], {'title': '"""ValueError"""', 'message': '"""Enter Valid Integer"""'}), "(title='ValueError', message='Enter Valid Integer')\n", (3180, 3231), False, 'from tkinter import messagebox\n')]
""" Copyright 2014 Sotera Defense Solutions, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import mysql.connector from datawakeio.data_connector import ExtractedDataConnector class MySqlEntityDataConnector(ExtractedDataConnector): """Provides connection to local mysql database for extracted entity data""" def __init__(self, config): """ :param config: database connection info and table names { user:..., database:..., password:..., host:...., } :return: a new EntityDataConnector for a mysql database """ ExtractedDataConnector.__init__(self) self.config = config self.cnx = None def open(self): """ Open a new database connection. """ self.close() user = self.config['user'] db = self.config['database'] pw = self.config['password'] host = self.config['host'] self.cnx = mysql.connector.connect(user=user, password=pw, host=host, database=db) def close(self): """Close any existing database connection. """ if self.cnx is not None: try: self.cnx.close() except: pass finally: self.cnx = None def _checkConn(self): """Open a new database conneciton if one does not currently exist.""" if self.cnx is None: self.open() def insert_entities(self, url, entity_type, entity_values): self._checkConn() cursor = self.cnx.cursor() try: for entity_value in entity_values: sql = "select count(1) from general_extractor_web_index where url = %s and entity_type = %s and entity_value = %s" params = [url,entity_type,entity_value] cursor.execute(sql,params) count = cursor.fetchall()[0][0] if count == 0: sql = "INSERT INTO general_extractor_web_index (url,entity_type,entity_value) VALUES (%s,%s,%s)" cursor.execute(sql,params) self.cnx.commit() cursor.close() except: self.close() raise def insert_domain_entities(self, domain,url, entity_type, entity_values): self._checkConn() cursor = self.cnx.cursor() try: for entity_value in entity_values: sql = "select count(1) from domain_extractor_web_index where domain = %s and url = %s and entity_type = %s and entity_value = %s" params = [domain,url,entity_type,entity_value] cursor.execute(sql,params) count = cursor.fetchall()[0][0] if count == 0: sql = "INSERT INTO domain_extractor_web_index (domain,url,entity_type,entity_value) VALUES (%s,%s,%s,%s)" cursor.execute(sql,params) self.cnx.commit() cursor.close() except: self.close() raise # # DOMAINS #### def get_domain_entity_matches(self, domain, type, values): self._checkConn() cursor = self.cnx.cursor() sql = "" params = [] max = len(values) - 1 for i in range(len(values)): params.append(domain + '\0' + type + '\0' + values[i]) sql = sql + "select rowkey from datawake_domain_entities where rowkey = %s" if i < max: sql = sql + " union all " try: cursor.execute(sql, params) rows = cursor.fetchall() cursor.close() return map(lambda x: x[0].split('\0')[2], rows) except: self.close() raise
[ "datawakeio.data_connector.ExtractedDataConnector.__init__" ]
[((1173, 1210), 'datawakeio.data_connector.ExtractedDataConnector.__init__', 'ExtractedDataConnector.__init__', (['self'], {}), '(self)\n', (1204, 1210), False, 'from datawakeio.data_connector import ExtractedDataConnector\n')]
# # Copyright (c) 2017, Massachusetts Institute of Technology All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright notice, this # list of conditions and the following disclaimer in the documentation and/or # other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # import struct import select from subprocess import Popen from tempfile import mkdtemp import os import sys class DisconnectException(Exception): pass def getheader(fd): header = fd.read(48) #print "getheader got %d bytes from %s" % (len(header),str(fd)) if header == 'bye'.ljust(48, '-'): raise DisconnectException('Disconnect') if len(header) == 0: #print 'got 0 bytes from %s' % (str(fd),) return ('', 0, 0, 0) else: hdr = struct.unpack('Iihbbbbbbiiiiiiii', header) nbytes = hdr[0]-48 nargs = hdr[3] idx = hdr[4] return (header, nbytes, nargs, idx) def getbytes(fd, num): bytes = '' while len(bytes) < num: b = fd.read(num-len(bytes)) if len(b) == 0: raise Exception('short read when getting %d bytes' % (num-len(bytes),)) else: bytes = bytes+b return bytes def getmsg(fd, timeout=None): if timeout is None: msg = '' nargs = 1000 idx = -1 while idx <= (nargs-1): header, numbytes, nargs, idx = getheader(fd) msg = msg+header+getbytes(fd, numbytes) else: s = select.select([fd], [], [], timeout) if fd in s[0]: header, numbytes, nargs, idx = getheader(fd) msg = header+getbytes(fd, numbytes) else: raise Exception('Getmsg timeout') return msg def disconnectMdsip(fifodir): #print "Disconnecting" try: sys.stdout.flush() fifo_out = open(fifodir+'/in', 'r+b') fifo_out.write('bye'.ljust(48, '-')) fifo_out.flush() fifo_out.close() except Exception: print('got exception shutting down:', sys.exec_info()) sys.stdout.flush() raise Exception('mdsip disconnect') def doMdsip(self): ans = ('500 BAD_REQUEST', [('Content-type', 'text/text')], 'unknown error') op = self.path_parts[1].lower() if op == 'connect': tmpdir = mkdtemp(prefix='mdsip_http_') if 'MDSPLUS_DIR' in os.environ: mdsplus_dir = os.environ['MDSPLUS_DIR'] else: mdsplus_dir = '/usr/local/mdsplus' p = Popen('setsid %s/bin/mdsip-server-http %s' % (mdsplus_dir, tmpdir), preexec_fn=os.setsid, shell=True) if p.wait() == 0: ans = ('200 OK', [('Content-type', 'text/text')], tmpdir) else: raise Exception('Error running mdsip-server-http process') elif op == 'msg': if 'HTTP_TMPDIR' in self.environ: fifodir = self.environ['HTTP_TMPDIR'] else: raise Exception('No temp dir provided in request') sys.stdout = open('%s/mdsip-wsgi.log' % (fifodir,), 'a') if 'HTTP_FINISHED' in self.environ: disconnectMdsip(fifodir) msg = '' #print 'getting msg' sys.stdout.flush() sys.stdout.flush() try: msg = getmsg(self.environ['wsgi.input']) except DisconnectException: disconnectMdsip(fifodir) #print 'got message of %d bytes' % (len(msg),) # sys.stdout.flush() nofifo = True tries = 0 #print 'opening fifo in' # sys.stdout.flush() while nofifo: try: fifo_out = open(fifodir+'/in', 'r+b') nofifo = False except: from time import sleep tries = tries+1 if tries > 10: raise Exception('Cannot access server fifo file') sleep(1) #print 'opened input fifo' #print 'writing msg' # sys.stdout.flush() fifo_out.write(msg) #print 'done writing msg' fifo_out.flush() fifo_out.close() #print 'getting response' # sys.stdout.flush() msg = '' while len(msg) == 0: fifo_in = os.fdopen( os.open(fifodir+'/out', os.O_RDONLY | os.O_NONBLOCK)) msg = getmsg(fifo_in, 3600) #print 'got %d byte response' % (len(msg),) sys.stdout.flush() if len(msg) == 0: from time import sleep sleep(1) fifo_in.close() ans = ('200 OK', [('Content-type', 'application/octet-stream')], msg) #print 'Done transaction' # sys.stdout.flush() sys.stdout.close() else: raise Exception('Invalid mdsip operation specified') return ans
[ "subprocess.Popen", "sys.exec_info", "os.open", "sys.stdout.close", "struct.unpack", "time.sleep", "select.select", "tempfile.mkdtemp", "sys.stdout.flush" ]
[((1840, 1882), 'struct.unpack', 'struct.unpack', (['"""Iihbbbbbbiiiiiiii"""', 'header'], {}), "('Iihbbbbbbiiiiiiii', header)\n", (1853, 1882), False, 'import struct\n'), ((2567, 2603), 'select.select', 'select.select', (['[fd]', '[]', '[]', 'timeout'], {}), '([fd], [], [], timeout)\n', (2580, 2603), False, 'import select\n'), ((2883, 2901), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2899, 2901), False, 'import sys\n'), ((3373, 3402), 'tempfile.mkdtemp', 'mkdtemp', ([], {'prefix': '"""mdsip_http_"""'}), "(prefix='mdsip_http_')\n", (3380, 3402), False, 'from tempfile import mkdtemp\n'), ((3568, 3673), 'subprocess.Popen', 'Popen', (["('setsid %s/bin/mdsip-server-http %s' % (mdsplus_dir, tmpdir))"], {'preexec_fn': 'os.setsid', 'shell': '(True)'}), "('setsid %s/bin/mdsip-server-http %s' % (mdsplus_dir, tmpdir),\n preexec_fn=os.setsid, shell=True)\n", (3573, 3673), False, 'from subprocess import Popen\n'), ((3136, 3154), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (3152, 3154), False, 'import sys\n'), ((4260, 4278), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (4276, 4278), False, 'import sys\n'), ((4287, 4305), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (4303, 4305), False, 'import sys\n'), ((5791, 5809), 'sys.stdout.close', 'sys.stdout.close', ([], {}), '()\n', (5807, 5809), False, 'import sys\n'), ((3111, 3126), 'sys.exec_info', 'sys.exec_info', ([], {}), '()\n', (3124, 3126), False, 'import sys\n'), ((5497, 5515), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (5513, 5515), False, 'import sys\n'), ((5335, 5389), 'os.open', 'os.open', (["(fifodir + '/out')", '(os.O_RDONLY | os.O_NONBLOCK)'], {}), "(fifodir + '/out', os.O_RDONLY | os.O_NONBLOCK)\n", (5342, 5389), False, 'import os\n'), ((5601, 5609), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (5606, 5609), False, 'from time import sleep\n'), ((4963, 4971), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (4968, 4971), False, 'from time import sleep\n')]
# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import absolute_import, division, print_function, unicode_literals from numpy.testing import assert_allclose import pytest from astropy import units as u from astropy.coordinates import SkyCoord from astropy.tests.helper import assert_quantity_allclose from astropy.utils.data import get_pkg_data_filename from astropy.io import fits from astropy.wcs import WCS from ...tests.helpers import make_simple_wcs from ...core import PixCoord from ..line import LinePixelRegion, LineSkyRegion from .utils import ASTROPY_LT_13, HAS_MATPLOTLIB # noqa from .test_common import BaseTestPixelRegion, BaseTestSkyRegion @pytest.fixture(scope='session') def wcs(): filename = get_pkg_data_filename('data/example_header.fits') header = fits.getheader(filename) return WCS(header) class TestLinePixelRegion(BaseTestPixelRegion): reg = LinePixelRegion(PixCoord(3, 4), PixCoord(4, 4)) sample_box = [-2, 8, -1, 9] inside = [] outside = [(3.1, 4.2), (5, 4)] expected_area = 0 expected_repr = '<LinePixelRegion(start=PixCoord(x=3, y=4), end=PixCoord(x=4, y=4))>' expected_str = 'Region: LinePixelRegion\nstart: PixCoord(x=3, y=4)\nend: PixCoord(x=4, y=4)' def test_pix_sky_roundtrip(self): wcs = make_simple_wcs(SkyCoord(2 * u.deg, 3 * u.deg), 0.1 * u.deg, 20) reg_new = self.reg.to_sky(wcs).to_pixel(wcs) assert_allclose(reg_new.start.x, self.reg.start.x) assert_allclose(reg_new.start.y, self.reg.start.y) assert_allclose(reg_new.end.x, self.reg.end.x) assert_allclose(reg_new.end.y, self.reg.end.y) @pytest.mark.skipif('not HAS_MATPLOTLIB') def test_as_patch(self): patch = self.reg.as_patch() assert 'Arrow' in str(patch) class TestLineSkyRegion(BaseTestSkyRegion): start = SkyCoord(3 * u.deg, 4 * u.deg, frame='galactic') end = SkyCoord(3 * u.deg, 5 * u.deg, frame='galactic') reg = LineSkyRegion(start, end) if ASTROPY_LT_13: expected_repr = ('<LineSkyRegion(start=<SkyCoord (Galactic): (l, b) in deg\n' ' (3.0, 4.0)>, end=<SkyCoord (Galactic): (l, b) in deg\n' ' (3.0, 5.0)>)>') expected_str = ('Region: LineSkyRegion\nstart: <SkyCoord (Galactic): (l, b) in deg\n' ' (3.0, 4.0)>\nend: <SkyCoord (Galactic): (l, b) in deg\n' ' (3.0, 5.0)>') else: expected_repr = ('<LineSkyRegion(start=<SkyCoord (Galactic): (l, b) in deg\n' ' ( 3., 4.)>, end=<SkyCoord (Galactic): (l, b) in deg\n' ' ( 3., 5.)>)>') expected_str = ('Region: LineSkyRegion\nstart: <SkyCoord (Galactic): (l, b) in deg\n' ' ( 3., 4.)>\nend: <SkyCoord (Galactic): (l, b) in deg\n' ' ( 3., 5.)>') def test_transformation(self, wcs): pixline = self.reg.to_pixel(wcs) assert_allclose(pixline.start.x, -50.5) assert_allclose(pixline.start.y, 299.5) assert_allclose(pixline.end.x, -50.5) assert_allclose(pixline.end.y, 349.5) skyline = pixline.to_sky(wcs) assert_quantity_allclose(skyline.start.data.lon, self.reg.start.data.lon) assert_quantity_allclose(skyline.start.data.lat, self.reg.start.data.lat) assert_quantity_allclose(skyline.end.data.lon, self.reg.end.data.lon) assert_quantity_allclose(skyline.end.data.lat, self.reg.end.data.lat)
[ "astropy.tests.helper.assert_quantity_allclose", "pytest.fixture", "astropy.utils.data.get_pkg_data_filename", "astropy.io.fits.getheader", "astropy.wcs.WCS", "pytest.mark.skipif", "numpy.testing.assert_allclose", "astropy.coordinates.SkyCoord" ]
[((694, 725), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (708, 725), False, 'import pytest\n'), ((752, 801), 'astropy.utils.data.get_pkg_data_filename', 'get_pkg_data_filename', (['"""data/example_header.fits"""'], {}), "('data/example_header.fits')\n", (773, 801), False, 'from astropy.utils.data import get_pkg_data_filename\n'), ((815, 839), 'astropy.io.fits.getheader', 'fits.getheader', (['filename'], {}), '(filename)\n', (829, 839), False, 'from astropy.io import fits\n'), ((851, 862), 'astropy.wcs.WCS', 'WCS', (['header'], {}), '(header)\n', (854, 862), False, 'from astropy.wcs import WCS\n'), ((1669, 1709), 'pytest.mark.skipif', 'pytest.mark.skipif', (['"""not HAS_MATPLOTLIB"""'], {}), "('not HAS_MATPLOTLIB')\n", (1687, 1709), False, 'import pytest\n'), ((1871, 1919), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['(3 * u.deg)', '(4 * u.deg)'], {'frame': '"""galactic"""'}), "(3 * u.deg, 4 * u.deg, frame='galactic')\n", (1879, 1919), False, 'from astropy.coordinates import SkyCoord\n'), ((1930, 1978), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['(3 * u.deg)', '(5 * u.deg)'], {'frame': '"""galactic"""'}), "(3 * u.deg, 5 * u.deg, frame='galactic')\n", (1938, 1978), False, 'from astropy.coordinates import SkyCoord\n'), ((1443, 1493), 'numpy.testing.assert_allclose', 'assert_allclose', (['reg_new.start.x', 'self.reg.start.x'], {}), '(reg_new.start.x, self.reg.start.x)\n', (1458, 1493), False, 'from numpy.testing import assert_allclose\n'), ((1502, 1552), 'numpy.testing.assert_allclose', 'assert_allclose', (['reg_new.start.y', 'self.reg.start.y'], {}), '(reg_new.start.y, self.reg.start.y)\n', (1517, 1552), False, 'from numpy.testing import assert_allclose\n'), ((1561, 1607), 'numpy.testing.assert_allclose', 'assert_allclose', (['reg_new.end.x', 'self.reg.end.x'], {}), '(reg_new.end.x, self.reg.end.x)\n', (1576, 1607), False, 'from numpy.testing import assert_allclose\n'), ((1616, 1662), 'numpy.testing.assert_allclose', 'assert_allclose', (['reg_new.end.y', 'self.reg.end.y'], {}), '(reg_new.end.y, self.reg.end.y)\n', (1631, 1662), False, 'from numpy.testing import assert_allclose\n'), ((3025, 3064), 'numpy.testing.assert_allclose', 'assert_allclose', (['pixline.start.x', '(-50.5)'], {}), '(pixline.start.x, -50.5)\n', (3040, 3064), False, 'from numpy.testing import assert_allclose\n'), ((3073, 3112), 'numpy.testing.assert_allclose', 'assert_allclose', (['pixline.start.y', '(299.5)'], {}), '(pixline.start.y, 299.5)\n', (3088, 3112), False, 'from numpy.testing import assert_allclose\n'), ((3121, 3158), 'numpy.testing.assert_allclose', 'assert_allclose', (['pixline.end.x', '(-50.5)'], {}), '(pixline.end.x, -50.5)\n', (3136, 3158), False, 'from numpy.testing import assert_allclose\n'), ((3167, 3204), 'numpy.testing.assert_allclose', 'assert_allclose', (['pixline.end.y', '(349.5)'], {}), '(pixline.end.y, 349.5)\n', (3182, 3204), False, 'from numpy.testing import assert_allclose\n'), ((3253, 3326), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['skyline.start.data.lon', 'self.reg.start.data.lon'], {}), '(skyline.start.data.lon, self.reg.start.data.lon)\n', (3277, 3326), False, 'from astropy.tests.helper import assert_quantity_allclose\n'), ((3335, 3408), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['skyline.start.data.lat', 'self.reg.start.data.lat'], {}), '(skyline.start.data.lat, self.reg.start.data.lat)\n', (3359, 3408), False, 'from astropy.tests.helper import assert_quantity_allclose\n'), ((3417, 3486), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['skyline.end.data.lon', 'self.reg.end.data.lon'], {}), '(skyline.end.data.lon, self.reg.end.data.lon)\n', (3441, 3486), False, 'from astropy.tests.helper import assert_quantity_allclose\n'), ((3495, 3564), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['skyline.end.data.lat', 'self.reg.end.data.lat'], {}), '(skyline.end.data.lat, self.reg.end.data.lat)\n', (3519, 3564), False, 'from astropy.tests.helper import assert_quantity_allclose\n'), ((1333, 1363), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['(2 * u.deg)', '(3 * u.deg)'], {}), '(2 * u.deg, 3 * u.deg)\n', (1341, 1363), False, 'from astropy.coordinates import SkyCoord\n')]
import fresnel import rowan from ... import draw from .FresnelPrimitive import FresnelPrimitiveSolid class Spheropolygons(FresnelPrimitiveSolid, draw.Spheropolygons): __doc__ = draw.Spheropolygons.__doc__ def render(self, scene): geometry = fresnel.geometry.Polygon( scene=scene, vertices=self.vertices, position=self.positions, angle=rowan.geometry.angle(rowan.normalize(self.orientations)), color=fresnel.color.linear(self.colors), rounding_radius=self.radius, material=self._material, outline_width=self.outline) return geometry
[ "rowan.normalize", "fresnel.color.linear" ]
[((478, 511), 'fresnel.color.linear', 'fresnel.color.linear', (['self.colors'], {}), '(self.colors)\n', (498, 511), False, 'import fresnel\n'), ((423, 457), 'rowan.normalize', 'rowan.normalize', (['self.orientations'], {}), '(self.orientations)\n', (438, 457), False, 'import rowan\n')]
from __future__ import division, absolute_import, print_function import sys import numpy as np from numpy.testing import ( TestCase, run_module_suite, assert_, assert_raises, assert_array_equal ) class TestTake(TestCase): def test_simple(self): a = [[1, 2], [3, 4]] a_str = [[b'1', b'2'], [b'3', b'4']] modes = ['raise', 'wrap', 'clip'] indices = [-1, 4] index_arrays = [np.empty(0, dtype=np.intp), np.empty(tuple(), dtype=np.intp), np.empty((1, 1), dtype=np.intp)] real_indices = {} real_indices['raise'] = {-1:1, 4:IndexError} real_indices['wrap'] = {-1:1, 4:0} real_indices['clip'] = {-1:0, 4:1} # Currently all types but object, use the same function generation. # So it should not be necessary to test all. However test also a non # refcounted struct on top of object. types = np.int, np.object, np.dtype([('', 'i', 2)]) for t in types: # ta works, even if the array may be odd if buffer interface is used ta = np.array(a if np.issubdtype(t, np.number) else a_str, dtype=t) tresult = list(ta.T.copy()) for index_array in index_arrays: if index_array.size != 0: tresult[0].shape = (2,) + index_array.shape tresult[1].shape = (2,) + index_array.shape for mode in modes: for index in indices: real_index = real_indices[mode][index] if real_index is IndexError and index_array.size != 0: index_array.put(0, index) assert_raises(IndexError, ta.take, index_array, mode=mode, axis=1) elif index_array.size != 0: index_array.put(0, index) res = ta.take(index_array, mode=mode, axis=1) assert_array_equal(res, tresult[real_index]) else: res = ta.take(index_array, mode=mode, axis=1) assert_(res.shape == (2,) + index_array.shape) def test_refcounting(self): objects = [object() for i in range(10)] for mode in ('raise', 'clip', 'wrap'): a = np.array(objects) b = np.array([2, 2, 4, 5, 3, 5]) a.take(b, out=a[:6]) del a if hasattr(sys, 'getrefcount'): assert_(all(sys.getrefcount(o) == 3 for o in objects)) # not contiguous, example: a = np.array(objects * 2)[::2] a.take(b, out=a[:6]) del a if hasattr(sys, 'getrefcount'): assert_(all(sys.getrefcount(o) == 3 for o in objects)) def test_unicode_mode(self): d = np.arange(10) k = b'\xc3\xa4'.decode("UTF8") assert_raises(ValueError, d.take, 5, mode=k) def test_empty_partition(self): # In reference to github issue #6530 a_original = np.array([0, 2, 4, 6, 8, 10]) a = a_original.copy() # An empty partition should be a successful no-op a.partition(np.array([], dtype=np.int16)) assert_array_equal(a, a_original) def test_empty_argpartition(self): # In reference to github issue #6530 a = np.array([0, 2, 4, 6, 8, 10]) a = a.argpartition(np.array([], dtype=np.int16)) b = np.array([0, 1, 2, 3, 4, 5]) assert_array_equal(a, b) if __name__ == "__main__": run_module_suite()
[ "numpy.testing.run_module_suite", "numpy.testing.assert_raises", "numpy.testing.assert_array_equal", "numpy.empty", "numpy.dtype", "sys.getrefcount", "numpy.testing.assert_", "numpy.arange", "numpy.array", "numpy.issubdtype" ]
[((3676, 3694), 'numpy.testing.run_module_suite', 'run_module_suite', ([], {}), '()\n', (3692, 3694), False, 'from numpy.testing import TestCase, run_module_suite, assert_, assert_raises, assert_array_equal\n'), ((2943, 2956), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (2952, 2956), True, 'import numpy as np\n'), ((3004, 3048), 'numpy.testing.assert_raises', 'assert_raises', (['ValueError', 'd.take', '(5)'], {'mode': 'k'}), '(ValueError, d.take, 5, mode=k)\n', (3017, 3048), False, 'from numpy.testing import TestCase, run_module_suite, assert_, assert_raises, assert_array_equal\n'), ((3152, 3181), 'numpy.array', 'np.array', (['[0, 2, 4, 6, 8, 10]'], {}), '([0, 2, 4, 6, 8, 10])\n', (3160, 3181), True, 'import numpy as np\n'), ((3330, 3363), 'numpy.testing.assert_array_equal', 'assert_array_equal', (['a', 'a_original'], {}), '(a, a_original)\n', (3348, 3363), False, 'from numpy.testing import TestCase, run_module_suite, assert_, assert_raises, assert_array_equal\n'), ((3469, 3498), 'numpy.array', 'np.array', (['[0, 2, 4, 6, 8, 10]'], {}), '([0, 2, 4, 6, 8, 10])\n', (3477, 3498), True, 'import numpy as np\n'), ((3577, 3605), 'numpy.array', 'np.array', (['[0, 1, 2, 3, 4, 5]'], {}), '([0, 1, 2, 3, 4, 5])\n', (3585, 3605), True, 'import numpy as np\n'), ((3618, 3642), 'numpy.testing.assert_array_equal', 'assert_array_equal', (['a', 'b'], {}), '(a, b)\n', (3636, 3642), False, 'from numpy.testing import TestCase, run_module_suite, assert_, assert_raises, assert_array_equal\n'), ((427, 453), 'numpy.empty', 'np.empty', (['(0)'], {'dtype': 'np.intp'}), '(0, dtype=np.intp)\n', (435, 453), True, 'import numpy as np\n'), ((537, 568), 'numpy.empty', 'np.empty', (['(1, 1)'], {'dtype': 'np.intp'}), '((1, 1), dtype=np.intp)\n', (545, 568), True, 'import numpy as np\n'), ((969, 993), 'numpy.dtype', 'np.dtype', (["[('', 'i', 2)]"], {}), "([('', 'i', 2)])\n", (977, 993), True, 'import numpy as np\n'), ((2420, 2437), 'numpy.array', 'np.array', (['objects'], {}), '(objects)\n', (2428, 2437), True, 'import numpy as np\n'), ((2454, 2482), 'numpy.array', 'np.array', (['[2, 2, 4, 5, 3, 5]'], {}), '([2, 2, 4, 5, 3, 5])\n', (2462, 2482), True, 'import numpy as np\n'), ((3291, 3319), 'numpy.array', 'np.array', (['[]'], {'dtype': 'np.int16'}), '([], dtype=np.int16)\n', (3299, 3319), True, 'import numpy as np\n'), ((3530, 3558), 'numpy.array', 'np.array', (['[]'], {'dtype': 'np.int16'}), '([], dtype=np.int16)\n', (3538, 3558), True, 'import numpy as np\n'), ((2704, 2725), 'numpy.array', 'np.array', (['(objects * 2)'], {}), '(objects * 2)\n', (2712, 2725), True, 'import numpy as np\n'), ((1130, 1157), 'numpy.issubdtype', 'np.issubdtype', (['t', 'np.number'], {}), '(t, np.number)\n', (1143, 1157), True, 'import numpy as np\n'), ((1735, 1801), 'numpy.testing.assert_raises', 'assert_raises', (['IndexError', 'ta.take', 'index_array'], {'mode': 'mode', 'axis': '(1)'}), '(IndexError, ta.take, index_array, mode=mode, axis=1)\n', (1748, 1801), False, 'from numpy.testing import TestCase, run_module_suite, assert_, assert_raises, assert_array_equal\n'), ((2052, 2096), 'numpy.testing.assert_array_equal', 'assert_array_equal', (['res', 'tresult[real_index]'], {}), '(res, tresult[real_index])\n', (2070, 2096), False, 'from numpy.testing import TestCase, run_module_suite, assert_, assert_raises, assert_array_equal\n'), ((2229, 2275), 'numpy.testing.assert_', 'assert_', (['(res.shape == (2,) + index_array.shape)'], {}), '(res.shape == (2,) + index_array.shape)\n', (2236, 2275), False, 'from numpy.testing import TestCase, run_module_suite, assert_, assert_raises, assert_array_equal\n'), ((2606, 2624), 'sys.getrefcount', 'sys.getrefcount', (['o'], {}), '(o)\n', (2621, 2624), False, 'import sys\n'), ((2854, 2872), 'sys.getrefcount', 'sys.getrefcount', (['o'], {}), '(o)\n', (2869, 2872), False, 'import sys\n')]
"""Magnetic Module engage command request, result, and implementation models.""" from __future__ import annotations from typing import Optional, TYPE_CHECKING from typing_extensions import Literal, Type from pydantic import BaseModel, Field from ..command import AbstractCommandImpl, BaseCommand, BaseCommandCreate from opentrons.hardware_control import HardwareControlAPI from opentrons.hardware_control.modules import MagDeck if TYPE_CHECKING: from opentrons.protocol_engine.state import StateView EngageCommandType = Literal["magneticModule/engageMagnet"] class EngageParams(BaseModel): """Input data to engage a Magnetic Module.""" moduleId: str = Field( ..., description=( "The ID of the Magnetic Module whose magnets you want to raise," " from a prior `loadModule` command." ), ) # todo(mm, 2022-02-17): Using true millimeters differs from the current JSON # protocol schema v6 draft. Ideally, change the v6 draft to match this. engageHeight: float = Field( ..., description=( "How high, in millimeters, to raise the magnets." "\n\n" "Zero is level with the bottom of the labware." " This will be a few millimeters above the magnets' hardware home position." "\n\n" "Units are always true millimeters." " This is unlike certain labware definitions," " engage commands in the Python Protocol API," " and engage commands in older versions of the JSON protocol schema." " Take care to convert properly." ), ) class EngageResult(BaseModel): """The result of a Magnetic Module engage command.""" pass class EngageImplementation(AbstractCommandImpl[EngageParams, EngageResult]): """The implementation of a Magnetic Module engage command.""" def __init__( self, state_view: StateView, hardware_api: HardwareControlAPI, **unused_dependencies: object, ) -> None: self._state_view = state_view self._hardware_api = hardware_api async def execute(self, params: EngageParams) -> EngageResult: """Execute a Magnetic Module engage command.""" await self._engage_magnets( magnetic_module_id=params.moduleId, mm_from_base=params.engageHeight, ) return EngageResult() async def _engage_magnets( self, magnetic_module_id: str, mm_from_base: float, ) -> None: """Engage a loaded Magnetic Module's magnets. Raises: ModuleDoesNotExistError: If the given module ID doesn't point to a module that's already been loaded. WrongModuleTypeError: If the given module ID points to a non-Magnetic module. ModuleNotAttachedError: If the given module ID points to a valid loaded Magnetic Module, but that module's hardware wasn't found attached. EngageHeightOutOfRangeError: If the given height is unreachable. """ # Allow propagation of ModuleDoesNotExistError. model = self._state_view.modules.get_model(module_id=magnetic_module_id) # Allow propagation of WrongModuleTypeError and EngageHeightOutOfRangeError. hardware_height = self._state_view.modules.calculate_magnet_hardware_height( magnetic_module_model=model, mm_from_base=mm_from_base, ) if not self._state_view.get_configs().use_virtual_modules: # Allow propagation of ModuleNotAttachedError. hardware_module = self._state_view.modules.find_loaded_hardware_module( module_id=magnetic_module_id, attached_modules=self._hardware_api.attached_modules, expected_type=MagDeck, ) await hardware_module.engage(height=hardware_height) class Engage(BaseCommand[EngageParams, EngageResult]): """A command to engage a Magnetic Module's magnets.""" commandType: EngageCommandType = "magneticModule/engageMagnet" params: EngageParams result: Optional[EngageResult] _ImplementationCls: Type[EngageImplementation] = EngageImplementation class EngageCreate(BaseCommandCreate[EngageParams]): """A request to create a Magnetic Module engage command.""" commandType: EngageCommandType = "magneticModule/engageMagnet" params: EngageParams _CommandCls: Type[Engage] = Engage
[ "pydantic.Field" ]
[((676, 809), 'pydantic.Field', 'Field', (['...'], {'description': '"""The ID of the Magnetic Module whose magnets you want to raise, from a prior `loadModule` command."""'}), "(..., description=\n 'The ID of the Magnetic Module whose magnets you want to raise, from a prior `loadModule` command.'\n )\n", (681, 809), False, 'from pydantic import BaseModel, Field\n'), ((1046, 1476), 'pydantic.Field', 'Field', (['...'], {'description': '"""How high, in millimeters, to raise the magnets.\n\nZero is level with the bottom of the labware. This will be a few millimeters above the magnets\' hardware home position.\n\nUnits are always true millimeters. This is unlike certain labware definitions, engage commands in the Python Protocol API, and engage commands in older versions of the JSON protocol schema. Take care to convert properly."""'}), '(..., description=\n """How high, in millimeters, to raise the magnets.\n\nZero is level with the bottom of the labware. This will be a few millimeters above the magnets\' hardware home position.\n\nUnits are always true millimeters. This is unlike certain labware definitions, engage commands in the Python Protocol API, and engage commands in older versions of the JSON protocol schema. Take care to convert properly."""\n )\n', (1051, 1476), False, 'from pydantic import BaseModel, Field\n')]
#!/usr/bin/env python from __future__ import print_function import time import numpy as np import numpy.linalg as la import roslib; roslib.load_manifest('team_wpi') import rospy from std_msgs.msg import Header import heapq import math from other_toolbox import * # Import standard priority queue definitions class PriorityQueue: def __init__(self): self.elements = [] def empty(self): return len(self.elements) == 0 def put(self, item, priority): heapq.heappush(self.elements, (priority, item)) def get(self): return heapq.heappop(self.elements)[1] # Neighbors in 4 connected space: 2n neighbours def neighbors(home, bounds): nbr = [] step = 0.01 in_bounds = True for j in range(0, len(home)): if -bounds >= home[j] >= bounds: in_bounds = False break if in_bounds == True: for j in range(0, len(home)): n1 = list(home) n1[j] = round(home[j] + step, 2) n2 = list(home) n2[j] = round(home[j] - step, 2) nbr.append(tuple(n1)) nbr.append(tuple(n2)) nbr = tuple(nbr) return nbr def heuristic(pos1, pos2): heur = distance(pos1, pos2) return heur def cost(pos1, pos2): cost = distance(pos1, pos2) return cost # Implement the A* algorithm to compute a path for the robot def goto(start, index, bounds, targets, goals, obstacles): frontier = PriorityQueue() frontier.put(start, 0) n = len(start) came_from = {} cost_so_far = {} came_from[start] = None cost_so_far[start] = 0 path = [] obstacle_threshold = 0.3 goal_threshold = -0.2 target = targets[index] path_search_state_time = time.time() while not frontier.empty(): current_state = frontier.get() if in_goal(current_state, index, goals, goal_threshold): break for next in neighbors(current_state, bounds): new_cost = cost_so_far[current_state] + cost(current_state, next) if next not in cost_so_far or new_cost < cost_so_far[next]: cost_so_far[next] = new_cost priority = heuristic(target, next) if not in_collision(next, obstacles, obstacle_threshold): frontier.put(next, priority) came_from[next] = current_state path_plan_time = time.time() - path_search_state_time if (2*n < path_plan_time): path = [] return path #print('Path planning time: ' + str(time.time() - path_search_state_time)) if in_goal(current_state, index, goals, goal_threshold): path = [current_state] while current_state != start: current_state = came_from[current_state] path.append(current_state) path.reverse() return path def planner(start, bounds, targets, goals, obstacles): # Without waypoints first = True goal_counter = 0 path = [] for goal in goals: if first: first = False new_path = goto(start, goal_counter, bounds, targets, goals, obstacles) else: new_path = goto(cur_path, goal_counter, bounds, targets, goals, obstacles) path.append(new_path) cur_path = new_path[len(path) - 1] goal_counter += 1 return path def planner_waypoint(start, samples, bounds, targets, goals, obstacles): # With waypoints first = True goal_counter = 0 waypoints = [] for goal in goals: if first: first = False new_path = goto(start, goal_counter, bounds, targets, goals, obstacles) else: new_path = goto(cur_path, goal_counter, bounds, targets, goals, obstacles) path_length = len(new_path) if path_length == 0: # and len(start) == 1: waypoints = [] return waypoints cur_path = new_path[path_length - 1] # Sample waypoints along the path sample_size = math.ceil(path_length / samples) for sample in range(0, samples): waypoints.append(new_path[int(sample * sample_size)]) waypoints.append(new_path[path_length - 1]) goal_counter += 1 return waypoints
[ "heapq.heappush", "math.ceil", "heapq.heappop", "time.time", "roslib.load_manifest" ]
[((133, 165), 'roslib.load_manifest', 'roslib.load_manifest', (['"""team_wpi"""'], {}), "('team_wpi')\n", (153, 165), False, 'import roslib\n'), ((1758, 1769), 'time.time', 'time.time', ([], {}), '()\n', (1767, 1769), False, 'import time\n'), ((497, 544), 'heapq.heappush', 'heapq.heappush', (['self.elements', '(priority, item)'], {}), '(self.elements, (priority, item))\n', (511, 544), False, 'import heapq\n'), ((4036, 4068), 'math.ceil', 'math.ceil', (['(path_length / samples)'], {}), '(path_length / samples)\n', (4045, 4068), False, 'import math\n'), ((584, 612), 'heapq.heappop', 'heapq.heappop', (['self.elements'], {}), '(self.elements)\n', (597, 612), False, 'import heapq\n'), ((2422, 2433), 'time.time', 'time.time', ([], {}), '()\n', (2431, 2433), False, 'import time\n')]
import tensorflow as tf import numpy as np, h5py import scipy.io as sio import sys import random import kNN import re import os from numpy import * def weight_variable(shape): initial = tf.truncated_normal(shape, stddev=0.1) return tf.Variable(initial) def bias_variable(shape): initial = tf.constant(0.1, shape=shape) return tf.Variable(initial) def compute_accuracy(test_att, test_visual, test_id, test_label): global left_a2 att_pre = sess.run(left_a2, feed_dict={att_features: test_att}) test_id = np.squeeze(np.asarray(test_id)) outpre = [0]*2933 test_label = np.squeeze(np.asarray(test_label)) test_label = test_label.astype("float32") for i in range(2933): outputLabel = kNN.kNNClassify(test_visual[i,:], att_pre, test_id, 1) outpre[i] = outputLabel correct_prediction = tf.equal(outpre, test_label) accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) result = sess.run(accuracy, feed_dict={att_features: test_att, visual_features: test_visual}) return result f=sio.loadmat('./data/CUB_data/train_attr.mat') att=np.array(f['train_attr']) att.shape f=sio.loadmat('./data/CUB_data/train_cub_googlenet_bn.mat') x=np.array(f['train_cub_googlenet_bn']) x.shape f=sio.loadmat('./data/CUB_data/test_cub_googlenet_bn.mat') x_test=np.array(f['test_cub_googlenet_bn']) x_test.shape f=sio.loadmat('./data/CUB_data/test_labels_cub.mat') test_label=np.array(f['test_labels_cub']) test_label.shape f=sio.loadmat('./data/CUB_data/testclasses_id.mat') test_id=np.array(f['testclasses_id']) f=sio.loadmat('./data/CUB_data/test_proto.mat') att_pro=np.array(f['test_proto']) # # data shuffle def data_iterator(): """ A simple data iterator """ batch_idx = 0 while True: # shuffle labels and features idxs = np.arange(0, len(x)) np.random.shuffle(idxs) shuf_visual = x[idxs] shuf_att = att[idxs] batch_size = 100 for batch_idx in range(0, len(x), batch_size): visual_batch = shuf_visual[batch_idx:batch_idx+batch_size] visual_batch = visual_batch.astype("float32") att_batch = shuf_att[batch_idx:batch_idx+batch_size] yield att_batch, visual_batch # # Placeholder # define placeholder for inputs to network att_features = tf.placeholder(tf.float32, [None, 312]) visual_features = tf.placeholder(tf.float32, [None, 1024]) # # Network # CUB 312 700 1024 ReLu, 1e-2 * regularisers, 100 batch, 0.00001 Adam W_left_a1 = weight_variable([312, 700]) b_left_a1 = bias_variable([700]) left_a1 = tf.nn.relu(tf.matmul(att_features, W_left_a1) + b_left_a1) W_left_a2 = weight_variable([700, 1024]) b_left_a2 = bias_variable([1024]) left_a2 = tf.nn.relu(tf.matmul(left_a1, W_left_a2) + b_left_a2) # # loss loss_a = tf.reduce_mean(tf.square(left_a2 - visual_features)) # L2 regularisation for the fully connected parameters. regularizers_a = (tf.nn.l2_loss(W_left_a1) + tf.nn.l2_loss(b_left_a1) + tf.nn.l2_loss(W_left_a2) + tf.nn.l2_loss(b_left_a2)) # Add the regularization term to the loss. loss_a += 1e-2 * regularizers_a train_step = tf.train.AdamOptimizer(0.00001).minimize(loss_a) sess = tf.Session() sess.run(tf.global_variables_initializer()) # # Run iter_ = data_iterator() for i in range(1000000): att_batch_val, visual_batch_val = iter_.next() sess.run(train_step, feed_dict={att_features: att_batch_val, visual_features: visual_batch_val}) if i % 1000 == 0: print(compute_accuracy(att_pro, x_test, test_id, test_label))
[ "numpy.random.shuffle", "scipy.io.loadmat", "tensorflow.global_variables_initializer", "numpy.asarray", "tensorflow.Session", "tensorflow.constant", "tensorflow.placeholder", "tensorflow.cast", "tensorflow.Variable", "numpy.array", "tensorflow.matmul", "tensorflow.square", "tensorflow.nn.l2_loss", "kNN.kNNClassify", "tensorflow.train.AdamOptimizer", "tensorflow.truncated_normal", "tensorflow.equal" ]
[((1074, 1119), 'scipy.io.loadmat', 'sio.loadmat', (['"""./data/CUB_data/train_attr.mat"""'], {}), "('./data/CUB_data/train_attr.mat')\n", (1085, 1119), True, 'import scipy.io as sio\n'), ((1124, 1149), 'numpy.array', 'np.array', (["f['train_attr']"], {}), "(f['train_attr'])\n", (1132, 1149), True, 'import numpy as np, h5py\n'), ((1163, 1220), 'scipy.io.loadmat', 'sio.loadmat', (['"""./data/CUB_data/train_cub_googlenet_bn.mat"""'], {}), "('./data/CUB_data/train_cub_googlenet_bn.mat')\n", (1174, 1220), True, 'import scipy.io as sio\n'), ((1223, 1260), 'numpy.array', 'np.array', (["f['train_cub_googlenet_bn']"], {}), "(f['train_cub_googlenet_bn'])\n", (1231, 1260), True, 'import numpy as np, h5py\n'), ((1272, 1328), 'scipy.io.loadmat', 'sio.loadmat', (['"""./data/CUB_data/test_cub_googlenet_bn.mat"""'], {}), "('./data/CUB_data/test_cub_googlenet_bn.mat')\n", (1283, 1328), True, 'import scipy.io as sio\n'), ((1336, 1372), 'numpy.array', 'np.array', (["f['test_cub_googlenet_bn']"], {}), "(f['test_cub_googlenet_bn'])\n", (1344, 1372), True, 'import numpy as np, h5py\n'), ((1389, 1439), 'scipy.io.loadmat', 'sio.loadmat', (['"""./data/CUB_data/test_labels_cub.mat"""'], {}), "('./data/CUB_data/test_labels_cub.mat')\n", (1400, 1439), True, 'import scipy.io as sio\n'), ((1451, 1481), 'numpy.array', 'np.array', (["f['test_labels_cub']"], {}), "(f['test_labels_cub'])\n", (1459, 1481), True, 'import numpy as np, h5py\n'), ((1502, 1551), 'scipy.io.loadmat', 'sio.loadmat', (['"""./data/CUB_data/testclasses_id.mat"""'], {}), "('./data/CUB_data/testclasses_id.mat')\n", (1513, 1551), True, 'import scipy.io as sio\n'), ((1560, 1589), 'numpy.array', 'np.array', (["f['testclasses_id']"], {}), "(f['testclasses_id'])\n", (1568, 1589), True, 'import numpy as np, h5py\n'), ((1593, 1638), 'scipy.io.loadmat', 'sio.loadmat', (['"""./data/CUB_data/test_proto.mat"""'], {}), "('./data/CUB_data/test_proto.mat')\n", (1604, 1638), True, 'import scipy.io as sio\n'), ((1647, 1672), 'numpy.array', 'np.array', (["f['test_proto']"], {}), "(f['test_proto'])\n", (1655, 1672), True, 'import numpy as np, h5py\n'), ((2353, 2392), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, 312]'], {}), '(tf.float32, [None, 312])\n', (2367, 2392), True, 'import tensorflow as tf\n'), ((2411, 2451), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, 1024]'], {}), '(tf.float32, [None, 1024])\n', (2425, 2451), True, 'import tensorflow as tf\n'), ((3275, 3287), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (3285, 3287), True, 'import tensorflow as tf\n'), ((195, 233), 'tensorflow.truncated_normal', 'tf.truncated_normal', (['shape'], {'stddev': '(0.1)'}), '(shape, stddev=0.1)\n', (214, 233), True, 'import tensorflow as tf\n'), ((245, 265), 'tensorflow.Variable', 'tf.Variable', (['initial'], {}), '(initial)\n', (256, 265), True, 'import tensorflow as tf\n'), ((307, 336), 'tensorflow.constant', 'tf.constant', (['(0.1)'], {'shape': 'shape'}), '(0.1, shape=shape)\n', (318, 336), True, 'import tensorflow as tf\n'), ((348, 368), 'tensorflow.Variable', 'tf.Variable', (['initial'], {}), '(initial)\n', (359, 368), True, 'import tensorflow as tf\n'), ((854, 882), 'tensorflow.equal', 'tf.equal', (['outpre', 'test_label'], {}), '(outpre, test_label)\n', (862, 882), True, 'import tensorflow as tf\n'), ((2855, 2891), 'tensorflow.square', 'tf.square', (['(left_a2 - visual_features)'], {}), '(left_a2 - visual_features)\n', (2864, 2891), True, 'import tensorflow as tf\n'), ((3065, 3089), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['b_left_a2'], {}), '(b_left_a2)\n', (3078, 3089), True, 'import tensorflow as tf\n'), ((3297, 3330), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (3328, 3330), True, 'import tensorflow as tf\n'), ((550, 569), 'numpy.asarray', 'np.asarray', (['test_id'], {}), '(test_id)\n', (560, 569), True, 'import numpy as np, h5py\n'), ((622, 644), 'numpy.asarray', 'np.asarray', (['test_label'], {}), '(test_label)\n', (632, 644), True, 'import numpy as np, h5py\n'), ((741, 796), 'kNN.kNNClassify', 'kNN.kNNClassify', (['test_visual[i, :]', 'att_pre', 'test_id', '(1)'], {}), '(test_visual[i, :], att_pre, test_id, 1)\n', (756, 796), False, 'import kNN\n'), ((913, 952), 'tensorflow.cast', 'tf.cast', (['correct_prediction', 'tf.float32'], {}), '(correct_prediction, tf.float32)\n', (920, 952), True, 'import tensorflow as tf\n'), ((1864, 1887), 'numpy.random.shuffle', 'np.random.shuffle', (['idxs'], {}), '(idxs)\n', (1881, 1887), True, 'import numpy as np, h5py\n'), ((2631, 2665), 'tensorflow.matmul', 'tf.matmul', (['att_features', 'W_left_a1'], {}), '(att_features, W_left_a1)\n', (2640, 2665), True, 'import tensorflow as tf\n'), ((2776, 2805), 'tensorflow.matmul', 'tf.matmul', (['left_a1', 'W_left_a2'], {}), '(left_a1, W_left_a2)\n', (2785, 2805), True, 'import tensorflow as tf\n'), ((3038, 3062), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['W_left_a2'], {}), '(W_left_a2)\n', (3051, 3062), True, 'import tensorflow as tf\n'), ((3218, 3247), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['(1e-05)'], {}), '(1e-05)\n', (3240, 3247), True, 'import tensorflow as tf\n'), ((2968, 2992), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['W_left_a1'], {}), '(W_left_a1)\n', (2981, 2992), True, 'import tensorflow as tf\n'), ((2995, 3019), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['b_left_a1'], {}), '(b_left_a1)\n', (3008, 3019), True, 'import tensorflow as tf\n')]
from flask import Flask, jsonify server = Flask(__name__) #flask 객체 movies = [ { "name": "The Shawshank Redemption", "casts": ["<NAME>", "<NAME>", "<NAME>", "<NAME>"], "genres": ["Drama"] }, { "name": "The Godfather ", "casts": ["<NAME>", "<NAME>", "<NAME>", "<NAME>"], "genres": ["Crime", "Drama"] } ] @server.route('/') # 접속하는 url def index(): return {'flask':'test'} @server.route('/movies') # 접속하는 url def hello_world(): return jsonify(movies) if __name__ == "__main__": server.run(host="0.0.0.0", port="5000", debug=True)
[ "flask.jsonify", "flask.Flask" ]
[((43, 58), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (48, 58), False, 'from flask import Flask, jsonify\n'), ((496, 511), 'flask.jsonify', 'jsonify', (['movies'], {}), '(movies)\n', (503, 511), False, 'from flask import Flask, jsonify\n')]
import os import random import numpy as np from torch.utils.data import Dataset from PIL import Image from utils.cartoongan import smooth_image_edges class CartoonDataset(Dataset): def __init__(self, data_dir, src_style='real', tar_style='gongqijun', src_transform=None, tar_transform=None): self.data_dir = data_dir self.src_data, self.tar_data = self._load_data(data_dir, src_style, tar_style) print("total {} {} images for training".format(len(self.src_data), src_style)) print("total {} {} images for training".format(len(self.tar_data), tar_style)) self.src_transform = src_transform self.tar_transform = tar_transform def _load_data(self, data_dir, src_style, tar_style): src_data = [] with open(os.path.join(data_dir, '{}_train.txt'.format(src_style)), 'r') as f: lines = f.readlines() for line in lines: path = line.strip() src_data.append(path) tar_data = [] with open(os.path.join(data_dir, '{}_train.txt'.format(tar_style)), 'r') as f: lines = f.readlines() for line in lines: path = line.strip() tar_data.append(path) return src_data, tar_data def _shuffle_data(self): np.random.shuffle(self.src_data) np.random.shuffle(self.tar_data) def __len__(self): return len(self.src_data) def __getitem__(self, index): src_path = self.src_data[index] tar_path = self.tar_data[index] src_img = Image.open(os.path.join(self.data_dir, src_path)) tar_img = Image.open(os.path.join(self.data_dir, tar_path)) src_img = src_img.convert('RGB') tar_img = tar_img.convert('RGB') # transform src img if self.src_transform is not None: src_img = self.src_transform(src_img) # transform tar img if self.tar_transform is not None: tar_img = self.tar_transform(tar_img) return src_img, tar_img class CartoonDefaultDataset(Dataset): def __init__(self, data_dir, style='real', transform=None): self.data_dir = data_dir self.data = self._load_data(data_dir, style) print("total {} {} images for testing".format(len(self.data), style)) self.transform = transform def _load_data(self, data_dir, style): data = [] with open(os.path.join(data_dir, '{}_test.txt'.format(style)), 'r') as f: lines = f.readlines() for line in lines: path = line.strip() data.append(path) return data def __len__(self): return len(self.data) def __getitem__(self, index): path = self.data[index] img = Image.open(os.path.join(self.data_dir, path)) img = img.convert('RGB') # transform src img if self.transform is not None: img = self.transform(img) return img class CartoonGANDataset(CartoonDataset): def __init__(self, data_dir, src_style='real', tar_style='gongqijun', src_transform=None, tar_transform=None): super(CartoonGANDataset, self).__init__(data_dir, src_style, tar_style, src_transform, tar_transform) def __getitem__(self, index): src_path = self.src_data[index] tar_path = self.tar_data[index] src_img = Image.open(os.path.join(self.data_dir, src_path)) tar_img = Image.open(os.path.join(self.data_dir, tar_path)) src_img = src_img.convert('RGB') tar_img = tar_img.convert('RGB') # get edge smoothed transform smooth_tar_img = smooth_image_edges(np.asarray(tar_img)) smooth_tar_img = Image.fromarray(smooth_tar_img) # transform src img if self.src_transform is not None: src_img = self.src_transform(src_img) # transform tar img if self.tar_transform is not None: tar_img = self.tar_transform(tar_img) smooth_tar_img = self.tar_transform(smooth_tar_img) return src_img, tar_img, smooth_tar_img class StarCartoonDataset(Dataset): def __init__(self, data_dir, src_transform=None, tar_transform=None): self.data_dir = data_dir self.src_data, self.tar_data = self._load_data(data_dir) self.src_transform = src_transform self.tar_transform = tar_transform def _load_data(self, data_dir): src_data = [] with open(os.path.join(data_dir, 'real_train.txt'), 'r') as f: lines = f.readlines() for line in lines: path = line.strip() src_data.append(path) styles = ['gongqijun', 'xinhaicheng', 'disney', 'tangqian'] tar_data = {} for i, style in enumerate(styles): tar_data[i] = [] with open(os.path.join(data_dir, '{}_train.txt'.format(style)), 'r') as f: lines = f.readlines() for line in lines: path = line.strip() tar_data[i].append(path) return src_data, tar_data def _shuffle_data(self): for key, item in self.tar_data.items(): np.random.shuffle(item) self.tar_data[key] = item def __len__(self): return len(self.src_data) def __getitem__(self, index): # sample a target tar_label = random.randint(0, 3) src_path = self.src_data[index] tar_path = self.tar_data[tar_label][index] src_img = Image.open(os.path.join(self.data_dir, src_path)) tar_img = Image.open(os.path.join(self.data_dir, tar_path)) src_img = src_img.convert('RGB') tar_img = tar_img.convert('RGB') if self.src_transform: src_img = self.src_transform(src_img) if self.tar_transform: tar_img = self.tar_transform(tar_img) return src_img, tar_img, tar_label class ClassifierDataset(Dataset): def __init__(self, data_dir, split, transform=None): self.data_dir = data_dir self.data, self.labels = self._load_data(data_dir, split) self.transform = transform def _load_data(self, data_dir, split): styles = ['disney', 'gongqijun','tangqian','xinhaicheng'] class_dict = { "disney": 0, "gongqijun": 1, "tangqian": 2, "xinhaicheng": 3, } data = [] labels = [] for i, style in enumerate(styles): cls = class_dict[style] with open(os.path.join(data_dir, '{}_{}.txt'.format(style, split)), 'r') as f: lines = f.readlines() for line in lines: path = line.strip() data.append(path) labels.append(int(cls)) return data, labels def __len__(self): return len(self.data) def __getitem__(self, index): path = self.data[index] label = np.asarray(self.labels[index], dtype=np.int64) img = Image.open(os.path.join(self.data_dir, path)) img = img.convert('RGB') if self.transform: img = self.transform(img) return img, label if __name__ == '__main__': from tqdm import tqdm data_dir = '/home/zhaobin/cartoon/' style = 'gongqijun' dataset = StarCartoonDataset(data_dir) import matplotlib.pyplot as plt for i in tqdm(range(len(dataset)), total=len(dataset)): src_img, tar_img, tar_label = dataset.__getitem__(i)
[ "random.randint", "numpy.asarray", "PIL.Image.fromarray", "os.path.join", "numpy.random.shuffle" ]
[((1307, 1339), 'numpy.random.shuffle', 'np.random.shuffle', (['self.src_data'], {}), '(self.src_data)\n', (1324, 1339), True, 'import numpy as np\n'), ((1348, 1380), 'numpy.random.shuffle', 'np.random.shuffle', (['self.tar_data'], {}), '(self.tar_data)\n', (1365, 1380), True, 'import numpy as np\n'), ((3718, 3749), 'PIL.Image.fromarray', 'Image.fromarray', (['smooth_tar_img'], {}), '(smooth_tar_img)\n', (3733, 3749), False, 'from PIL import Image\n'), ((5402, 5422), 'random.randint', 'random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (5416, 5422), False, 'import random\n'), ((6988, 7034), 'numpy.asarray', 'np.asarray', (['self.labels[index]'], {'dtype': 'np.int64'}), '(self.labels[index], dtype=np.int64)\n', (6998, 7034), True, 'import numpy as np\n'), ((1583, 1620), 'os.path.join', 'os.path.join', (['self.data_dir', 'src_path'], {}), '(self.data_dir, src_path)\n', (1595, 1620), False, 'import os\n'), ((1651, 1688), 'os.path.join', 'os.path.join', (['self.data_dir', 'tar_path'], {}), '(self.data_dir, tar_path)\n', (1663, 1688), False, 'import os\n'), ((2795, 2828), 'os.path.join', 'os.path.join', (['self.data_dir', 'path'], {}), '(self.data_dir, path)\n', (2807, 2828), False, 'import os\n'), ((3400, 3437), 'os.path.join', 'os.path.join', (['self.data_dir', 'src_path'], {}), '(self.data_dir, src_path)\n', (3412, 3437), False, 'import os\n'), ((3468, 3505), 'os.path.join', 'os.path.join', (['self.data_dir', 'tar_path'], {}), '(self.data_dir, tar_path)\n', (3480, 3505), False, 'import os\n'), ((3672, 3691), 'numpy.asarray', 'np.asarray', (['tar_img'], {}), '(tar_img)\n', (3682, 3691), True, 'import numpy as np\n'), ((5201, 5224), 'numpy.random.shuffle', 'np.random.shuffle', (['item'], {}), '(item)\n', (5218, 5224), True, 'import numpy as np\n'), ((5543, 5580), 'os.path.join', 'os.path.join', (['self.data_dir', 'src_path'], {}), '(self.data_dir, src_path)\n', (5555, 5580), False, 'import os\n'), ((5611, 5648), 'os.path.join', 'os.path.join', (['self.data_dir', 'tar_path'], {}), '(self.data_dir, tar_path)\n', (5623, 5648), False, 'import os\n'), ((7060, 7093), 'os.path.join', 'os.path.join', (['self.data_dir', 'path'], {}), '(self.data_dir, path)\n', (7072, 7093), False, 'import os\n'), ((4477, 4517), 'os.path.join', 'os.path.join', (['data_dir', '"""real_train.txt"""'], {}), "(data_dir, 'real_train.txt')\n", (4489, 4517), False, 'import os\n')]
from mhcgnomes import Allele from nose.tools import eq_ def test_allele_get_A0201(): allele = Allele.get("HLA", "A", "02", "01") assert allele is not None assert type(allele) is Allele eq_(allele.species_prefix, "HLA") eq_(allele.gene_name, "A") eq_(list(allele.allele_fields), ["02", "01"]) eq_(allele.mhc_class, "Ia") def test_restrict_num_allele_fields_A02010101(): allele_eight_digit = Allele.get("HLA", "A", "02", "01", "01", "01") assert allele_eight_digit is not None assert type(allele_eight_digit) is Allele eq_(allele_eight_digit.num_allele_fields, 4) allele_four_digit = allele_eight_digit.restrict_allele_fields(2) eq_(allele_four_digit.num_allele_fields, 2) def test_no_annotations(): allele = Allele.get("HLA", "A", "02", "01", "01", "01") assert not allele.annotation_null assert not allele.annotation_cystosolic assert not allele.annotation_aberrant_expression assert not allele.annotation_secreted assert not allele.annotation_pseudogene assert not allele.annotation_questionable assert not allele.annotation_low_expression assert not allele.annotation_group assert not allele.annotation_splice_variant def test_annotation_null(): allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="N") assert allele.annotation_null assert not allele.annotation_cystosolic assert not allele.annotation_aberrant_expression assert not allele.annotation_secreted assert not allele.annotation_pseudogene assert not allele.annotation_questionable assert not allele.annotation_low_expression assert not allele.annotation_group assert not allele.annotation_splice_variant def test_annotation_cytosolic(): allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="C") assert not allele.annotation_null assert allele.annotation_cystosolic assert not allele.annotation_aberrant_expression assert not allele.annotation_secreted assert not allele.annotation_pseudogene assert not allele.annotation_questionable assert not allele.annotation_low_expression assert not allele.annotation_group assert not allele.annotation_splice_variant def test_annotation_secreted(): allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="S") assert not allele.annotation_null assert not allele.annotation_cystosolic assert not allele.annotation_aberrant_expression assert allele.annotation_secreted assert not allele.annotation_pseudogene assert not allele.annotation_questionable assert not allele.annotation_low_expression assert not allele.annotation_group assert not allele.annotation_splice_variant def test_annotation_questionable(): allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="Q") assert not allele.annotation_null assert not allele.annotation_cystosolic assert not allele.annotation_aberrant_expression assert not allele.annotation_secreted assert not allele.annotation_pseudogene assert allele.annotation_questionable assert not allele.annotation_low_expression assert not allele.annotation_group assert not allele.annotation_splice_variant def test_annotation_group(): allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="G") assert not allele.annotation_null assert not allele.annotation_cystosolic assert not allele.annotation_aberrant_expression assert not allele.annotation_secreted assert not allele.annotation_pseudogene assert not allele.annotation_questionable assert not allele.annotation_low_expression assert allele.annotation_group assert not allele.annotation_splice_variant def test_annotation_splice_variant(): allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="Sp") assert not allele.annotation_null assert not allele.annotation_cystosolic assert not allele.annotation_aberrant_expression assert not allele.annotation_secreted assert not allele.annotation_pseudogene assert not allele.annotation_questionable assert not allele.annotation_low_expression assert not allele.annotation_group assert allele.annotation_splice_variant def test_annotation_pseudogene(): allele = Allele.get("HLA", "A", "02", "01", "01", "01", annotation="Ps") assert not allele.annotation_null assert not allele.annotation_cystosolic assert not allele.annotation_aberrant_expression assert not allele.annotation_secreted assert allele.annotation_pseudogene assert not allele.annotation_questionable assert not allele.annotation_low_expression assert not allele.annotation_group assert not allele.annotation_splice_variant
[ "mhcgnomes.Allele.get", "nose.tools.eq_" ]
[((99, 133), 'mhcgnomes.Allele.get', 'Allele.get', (['"""HLA"""', '"""A"""', '"""02"""', '"""01"""'], {}), "('HLA', 'A', '02', '01')\n", (109, 133), False, 'from mhcgnomes import Allele\n'), ((202, 235), 'nose.tools.eq_', 'eq_', (['allele.species_prefix', '"""HLA"""'], {}), "(allele.species_prefix, 'HLA')\n", (205, 235), False, 'from nose.tools import eq_\n'), ((240, 266), 'nose.tools.eq_', 'eq_', (['allele.gene_name', '"""A"""'], {}), "(allele.gene_name, 'A')\n", (243, 266), False, 'from nose.tools import eq_\n'), ((321, 348), 'nose.tools.eq_', 'eq_', (['allele.mhc_class', '"""Ia"""'], {}), "(allele.mhc_class, 'Ia')\n", (324, 348), False, 'from nose.tools import eq_\n'), ((425, 471), 'mhcgnomes.Allele.get', 'Allele.get', (['"""HLA"""', '"""A"""', '"""02"""', '"""01"""', '"""01"""', '"""01"""'], {}), "('HLA', 'A', '02', '01', '01', '01')\n", (435, 471), False, 'from mhcgnomes import Allele\n'), ((564, 608), 'nose.tools.eq_', 'eq_', (['allele_eight_digit.num_allele_fields', '(4)'], {}), '(allele_eight_digit.num_allele_fields, 4)\n', (567, 608), False, 'from nose.tools import eq_\n'), ((682, 725), 'nose.tools.eq_', 'eq_', (['allele_four_digit.num_allele_fields', '(2)'], {}), '(allele_four_digit.num_allele_fields, 2)\n', (685, 725), False, 'from nose.tools import eq_\n'), ((768, 814), 'mhcgnomes.Allele.get', 'Allele.get', (['"""HLA"""', '"""A"""', '"""02"""', '"""01"""', '"""01"""', '"""01"""'], {}), "('HLA', 'A', '02', '01', '01', '01')\n", (778, 814), False, 'from mhcgnomes import Allele\n'), ((1259, 1321), 'mhcgnomes.Allele.get', 'Allele.get', (['"""HLA"""', '"""A"""', '"""02"""', '"""01"""', '"""01"""', '"""01"""'], {'annotation': '"""N"""'}), "('HLA', 'A', '02', '01', '01', '01', annotation='N')\n", (1269, 1321), False, 'from mhcgnomes import Allele\n'), ((1768, 1830), 'mhcgnomes.Allele.get', 'Allele.get', (['"""HLA"""', '"""A"""', '"""02"""', '"""01"""', '"""01"""', '"""01"""'], {'annotation': '"""C"""'}), "('HLA', 'A', '02', '01', '01', '01', annotation='C')\n", (1778, 1830), False, 'from mhcgnomes import Allele\n'), ((2275, 2337), 'mhcgnomes.Allele.get', 'Allele.get', (['"""HLA"""', '"""A"""', '"""02"""', '"""01"""', '"""01"""', '"""01"""'], {'annotation': '"""S"""'}), "('HLA', 'A', '02', '01', '01', '01', annotation='S')\n", (2285, 2337), False, 'from mhcgnomes import Allele\n'), ((2786, 2848), 'mhcgnomes.Allele.get', 'Allele.get', (['"""HLA"""', '"""A"""', '"""02"""', '"""01"""', '"""01"""', '"""01"""'], {'annotation': '"""Q"""'}), "('HLA', 'A', '02', '01', '01', '01', annotation='Q')\n", (2796, 2848), False, 'from mhcgnomes import Allele\n'), ((3291, 3353), 'mhcgnomes.Allele.get', 'Allele.get', (['"""HLA"""', '"""A"""', '"""02"""', '"""01"""', '"""01"""', '"""01"""'], {'annotation': '"""G"""'}), "('HLA', 'A', '02', '01', '01', '01', annotation='G')\n", (3301, 3353), False, 'from mhcgnomes import Allele\n'), ((3804, 3867), 'mhcgnomes.Allele.get', 'Allele.get', (['"""HLA"""', '"""A"""', '"""02"""', '"""01"""', '"""01"""', '"""01"""'], {'annotation': '"""Sp"""'}), "('HLA', 'A', '02', '01', '01', '01', annotation='Sp')\n", (3814, 3867), False, 'from mhcgnomes import Allele\n'), ((4314, 4377), 'mhcgnomes.Allele.get', 'Allele.get', (['"""HLA"""', '"""A"""', '"""02"""', '"""01"""', '"""01"""', '"""01"""'], {'annotation': '"""Ps"""'}), "('HLA', 'A', '02', '01', '01', '01', annotation='Ps')\n", (4324, 4377), False, 'from mhcgnomes import Allele\n')]
"""Generic utils.""" import os import json import shutil import numpy as np from artifice.log import logger def divup(a, b): return (a + b - 1) // b def listwrap(val): """Wrap `val` as a list. :param val: iterable or constant :returns: `list(val)` if `val` is iterable, else [val] """ if isinstance(val, list): return val if isinstance(val, tuple): return list(val) return [val] def listify(val, length): """Ensure `val` is a list of size `length`. :param val: iterable or constant :param length: integer length :returns: listified `val`. :raises: RuntimeError if 1 < len(val) != length """ if not isinstance(val, str) and hasattr(val, '__iter__'): val = list(val) if len(val) == 1: return val * length if len(val) != length: raise RuntimeError("mismatched length") return val return [val] * length def jsonable(hist): """Make a history dictionary json-serializable. :param hist: dictionary of lists of float-like numbers. """ out = {} for k, v in hist.items(): out[k] = list(map(float, v)) return out def json_save(fname, obj): """Saves obj to fname as JSON.""" with open(fname, 'w') as f: f.write(json.dumps(obj)) def json_load(fname): with open(fname, 'r') as f: obj = json.loads(f.read()) return obj def atleast_4d(image): """Expand a numpy array (typically an image) to 4d. Inserts batch dim, then channel dim. :param image: :returns: :rtype: """ if image.ndim >= 4: return image if image.ndim == 3: return image[np.newaxis, :, :, :] if image.ndim == 2: return image[np.newaxis, :, :, np.newaxis] if image.ndim == 1: return image[np.newaxis, :, np.newaxis, np.newaxis] raise ValueError(f"incompatible image dimension: {image.ndim}") def rm(path): if not os.path.exists(path): return if os.path.isfile(path): os.remove(path) elif os.path.isdir(path): shutil.rmtree(path) else: raise RuntimeError(f"bad path: {path}") logger.info(f"removed {path}.")
[ "os.remove", "artifice.log.logger.info", "os.path.isdir", "os.path.exists", "json.dumps", "os.path.isfile", "shutil.rmtree" ]
[((1864, 1884), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (1878, 1884), False, 'import os\n'), ((2012, 2043), 'artifice.log.logger.info', 'logger.info', (['f"""removed {path}."""'], {}), "(f'removed {path}.')\n", (2023, 2043), False, 'from artifice.log import logger\n'), ((1826, 1846), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (1840, 1846), False, 'import os\n'), ((1890, 1905), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (1899, 1905), False, 'import os\n'), ((1913, 1932), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (1926, 1932), False, 'import os\n'), ((1209, 1224), 'json.dumps', 'json.dumps', (['obj'], {}), '(obj)\n', (1219, 1224), False, 'import json\n'), ((1938, 1957), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (1951, 1957), False, 'import shutil\n')]
import asyncio import logging from time import sleep from learn_asyncio import configure_logging def non_awaitable_io_bound_function(task_id: int, seconds: int): logging.info("Task %d started", task_id) sleep(seconds) logging.info("Task %d done", task_id) return f"result from Task {task_id}" async def main(): """ This is of course a bad idea. The functions do run, synchronously, because they are evaluated before `gather` is called. That function, however, expects awaitables - instead, it receives strings. """ try: await asyncio.gather( non_awaitable_io_bound_function(1, 2), non_awaitable_io_bound_function(2, 4), ) except TypeError as e: logging.error("Caught a TypeError: %s", e) """ Luckily, we can cheat. Asyncio can run run a function in a thread, and await not the function, but the thread as a whole. In Python, only one thread can run at a time. So, this mechanism cannot be used to speed up calculations. But to complete an IO bound task, a thread does not need to run all the time: it only needs to check if the task (sleeping, in this case) is done every now and then. """ logging.info("\n\nEXAMPLE 2 - TO_THREAD\n") await asyncio.gather( asyncio.to_thread(non_awaitable_io_bound_function, 3, 1), asyncio.to_thread(non_awaitable_io_bound_function, 4, 2), asyncio.to_thread(non_awaitable_io_bound_function, 5, 3), asyncio.to_thread(non_awaitable_io_bound_function, 6, 4), ) configure_logging() asyncio.run(main())
[ "logging.error", "learn_asyncio.configure_logging", "time.sleep", "logging.info", "asyncio.to_thread" ]
[((1566, 1585), 'learn_asyncio.configure_logging', 'configure_logging', ([], {}), '()\n', (1583, 1585), False, 'from learn_asyncio import configure_logging\n'), ((169, 209), 'logging.info', 'logging.info', (['"""Task %d started"""', 'task_id'], {}), "('Task %d started', task_id)\n", (181, 209), False, 'import logging\n'), ((214, 228), 'time.sleep', 'sleep', (['seconds'], {}), '(seconds)\n', (219, 228), False, 'from time import sleep\n'), ((233, 270), 'logging.info', 'logging.info', (['"""Task %d done"""', 'task_id'], {}), "('Task %d done', task_id)\n", (245, 270), False, 'import logging\n'), ((1223, 1267), 'logging.info', 'logging.info', (['"""\n\nEXAMPLE 2 - TO_THREAD\n"""'], {}), '("""\n\nEXAMPLE 2 - TO_THREAD\n""")\n', (1235, 1267), False, 'import logging\n'), ((743, 785), 'logging.error', 'logging.error', (['"""Caught a TypeError: %s"""', 'e'], {}), "('Caught a TypeError: %s', e)\n", (756, 785), False, 'import logging\n'), ((1302, 1358), 'asyncio.to_thread', 'asyncio.to_thread', (['non_awaitable_io_bound_function', '(3)', '(1)'], {}), '(non_awaitable_io_bound_function, 3, 1)\n', (1319, 1358), False, 'import asyncio\n'), ((1368, 1424), 'asyncio.to_thread', 'asyncio.to_thread', (['non_awaitable_io_bound_function', '(4)', '(2)'], {}), '(non_awaitable_io_bound_function, 4, 2)\n', (1385, 1424), False, 'import asyncio\n'), ((1434, 1490), 'asyncio.to_thread', 'asyncio.to_thread', (['non_awaitable_io_bound_function', '(5)', '(3)'], {}), '(non_awaitable_io_bound_function, 5, 3)\n', (1451, 1490), False, 'import asyncio\n'), ((1500, 1556), 'asyncio.to_thread', 'asyncio.to_thread', (['non_awaitable_io_bound_function', '(6)', '(4)'], {}), '(non_awaitable_io_bound_function, 6, 4)\n', (1517, 1556), False, 'import asyncio\n')]
import numpy as np import re from hls4ml.model.optimizer import OptimizerPass from hls4ml.model.hls_model import Conv1D, Conv2D, register_layer from hls4ml.templates import templates class PointwiseConv1D(Conv1D): ''' Optimized Conv1D implementation for 1x1 kernels. ''' # Nothing to do, will pick up function and config from class name pass class PointwiseConv2D(Conv2D): ''' Optimized Conv2D implementation for 1x1 kernels. ''' # Nothing to do, will pick up function and config from class name pass pointwise_conv1d_function_template = 'nnet::pointwise_conv_1d_{data_format}<{input_t}, {output_t}, {config}>({input}, {output}, {w}, {b});' pointwise_conv2d_function_template = 'nnet::pointwise_conv_2d_{data_format}<{input_t}, {output_t}, {config}>({input}, {output}, {w}, {b});' sepconv1d_include_list = ['nnet_utils/nnet_conv1d.h', 'nnet_utils/nnet_sepconv1d_stream.h'] sepconv2d_include_list = ['nnet_utils/nnet_conv2d.h', 'nnet_utils/nnet_sepconv2d_stream.h'] # Register the layer types to the layer map register_layer('PointwiseConv1D', PointwiseConv1D) register_layer('PointwiseConv2D', PointwiseConv2D) # Register the templates for config and function templates.get_backend('Vivado').register_templates( 'PointwiseConv1D', pointwise_conv1d_function_template, templates.get_backend('Vivado').get_config_template('Conv1D'), sepconv1d_include_list ) templates.get_backend('Vivado').register_templates( 'PointwiseConv2D', pointwise_conv2d_function_template, templates.get_backend('Vivado').get_config_template('Conv2D'), sepconv2d_include_list ) class OptimizePointwiseConv(OptimizerPass): def match(self, node): return node.__class__.__name__ in ['Conv1D', 'Conv2D'] and \ node.get_attr('filt_height', 1) == 1 and \ node.get_attr('filt_width') == 1 def transform(self, model, node): dim = node.__class__.__name__[-2:] # '1D' or '2D' pw_node = model.make_node('PointwiseConv' + dim, node.name, node.attributes.copy(), node.inputs.copy()) model.replace_node(node, pw_node) return True
[ "hls4ml.templates.templates.get_backend", "hls4ml.model.hls_model.register_layer" ]
[((1041, 1091), 'hls4ml.model.hls_model.register_layer', 'register_layer', (['"""PointwiseConv1D"""', 'PointwiseConv1D'], {}), "('PointwiseConv1D', PointwiseConv1D)\n", (1055, 1091), False, 'from hls4ml.model.hls_model import Conv1D, Conv2D, register_layer\n'), ((1092, 1142), 'hls4ml.model.hls_model.register_layer', 'register_layer', (['"""PointwiseConv2D"""', 'PointwiseConv2D'], {}), "('PointwiseConv2D', PointwiseConv2D)\n", (1106, 1142), False, 'from hls4ml.model.hls_model import Conv1D, Conv2D, register_layer\n'), ((1193, 1224), 'hls4ml.templates.templates.get_backend', 'templates.get_backend', (['"""Vivado"""'], {}), "('Vivado')\n", (1214, 1224), False, 'from hls4ml.templates import templates\n'), ((1405, 1436), 'hls4ml.templates.templates.get_backend', 'templates.get_backend', (['"""Vivado"""'], {}), "('Vivado')\n", (1426, 1436), False, 'from hls4ml.templates import templates\n'), ((1312, 1343), 'hls4ml.templates.templates.get_backend', 'templates.get_backend', (['"""Vivado"""'], {}), "('Vivado')\n", (1333, 1343), False, 'from hls4ml.templates import templates\n'), ((1524, 1555), 'hls4ml.templates.templates.get_backend', 'templates.get_backend', (['"""Vivado"""'], {}), "('Vivado')\n", (1545, 1555), False, 'from hls4ml.templates import templates\n')]
""" Script calculates the mean January-April sea ice extent for the Bering Sea over the 1850 to 2018 period and 1979-2018 period Notes ----- Author : <NAME> Date : 24 March 2019 """ ### Import modules import numpy as np import matplotlib.pyplot as plt import datetime import scipy.stats as sts ### Define directories directorydata = '/home/zlabe/Documents/Projects/BeringSeaIce2018/BAMS/Data/' ### Define time now = datetime.datetime.now() currentmn = str(now.month) currentdy = str(now.day) currentyr = str(now.year) currenttime = currentmn + '_' + currentdy + '_' + currentyr titletime = currentmn + '/' + currentdy + '/' + currentyr print('\n' '----Calculating Bering SIE - %s----' % titletime) ### Define years years = np.arange(1850,2018+1,1) yearsat = np.arange(1979,2018+1,1) ############################################################################### ############################################################################### ############################################################################### #### Retrieve data from NSIDC regional extent in Bering Sea ### Retrieve data from NSIDC regional extent in Bering Sea beringjan = np.genfromtxt(directorydata + \ 'Bering_SIE_NSIDC_01_1979-2018.txt')/1e6 beringfeb = np.genfromtxt(directorydata + \ 'Bering_SIE_NSIDC_02_1979-2018.txt')/1e6 beringmar = np.genfromtxt(directorydata + \ 'Bering_SIE_NSIDC_03_1979-2018.txt')/1e6 beringapr = np.genfromtxt(directorydata + \ 'Bering_SIE_NSIDC_04_1979-2018.txt')/1e6 meansat = (beringjan + beringfeb + beringmar + beringapr)/4. ### Save sea ice extent data from NSIDC np.savetxt(directorydata + 'Bering_SIE_NSIDC_Jan-Apr_1979-2018.txt',meansat, delimiter=',',header='File contains mean Jan-Apr SIE from NSIDC' \ '\n Sea Ice Index v3 for years 1979-2018 \n') ############################################################################### ############################################################################### ############################################################################### #### Retrieve data from Sea Ice Atlas atlasjan = np.genfromtxt(directorydata + 'Bering_SIE85_iceatlas_' \ '01_1850-2018.txt',skip_header=1) atlasfeb = np.genfromtxt(directorydata + 'Bering_SIE85_iceatlas_' \ '02_1850-2018.txt',skip_header=1) atlasmar = np.genfromtxt(directorydata + 'Bering_SIE85_iceatlas_' \ '03_1850-2018.txt',skip_header=1) atlasapr = np.genfromtxt(directorydata + 'Bering_SIE85_iceatlas_' \ '04_1850-2018.txt',skip_header=1) meanatlas = (atlasjan + atlasfeb + atlasmar + atlasapr)/4. ### Save sea ice extent data from NSIDC np.savetxt(directorydata + 'Bering_SIE85_iceatlas_Jan-Apr_1850-2018.txt',meanatlas, delimiter=',',header='File contains mean Jan-Apr SIE from historical' \ '\n ice atlas (University of Alaska) for years' \ '\n 1850-2018 \n') ############################################################################### ############################################################################### ############################################################################### #### Compute Correlations satperiod = meanatlas[-40:] ### Mask any nans before correlation mask = ~np.logical_or(np.isnan(satperiod),np.isnan(meansat)) corr,p = sts.pearsonr(satperiod[mask],meansat[mask]) print('\n>>> Correlation between ice atlas and NSIDC is --> %s' % np.round(corr,3)) print('\n>>> P-value between ice atlas and NSIDC is --> %s' % p)
[ "numpy.savetxt", "numpy.genfromtxt", "scipy.stats.pearsonr", "numpy.isnan", "numpy.arange", "numpy.round", "datetime.datetime.now" ]
[((441, 464), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (462, 464), False, 'import datetime\n'), ((749, 777), 'numpy.arange', 'np.arange', (['(1850)', '(2018 + 1)', '(1)'], {}), '(1850, 2018 + 1, 1)\n', (758, 777), True, 'import numpy as np\n'), ((784, 812), 'numpy.arange', 'np.arange', (['(1979)', '(2018 + 1)', '(1)'], {}), '(1979, 2018 + 1, 1)\n', (793, 812), True, 'import numpy as np\n'), ((1781, 1983), 'numpy.savetxt', 'np.savetxt', (["(directorydata + 'Bering_SIE_NSIDC_Jan-Apr_1979-2018.txt')", 'meansat'], {'delimiter': '""","""', 'header': '"""File contains mean Jan-Apr SIE from NSIDC\n Sea Ice Index v3 for years 1979-2018 \n"""'}), '(directorydata + \'Bering_SIE_NSIDC_Jan-Apr_1979-2018.txt\',\n meansat, delimiter=\',\', header=\n """File contains mean Jan-Apr SIE from NSIDC\n Sea Ice Index v3 for years 1979-2018 \n"""\n )\n', (1791, 1983), True, 'import numpy as np\n'), ((2275, 2365), 'numpy.genfromtxt', 'np.genfromtxt', (["(directorydata + 'Bering_SIE85_iceatlas_01_1850-2018.txt')"], {'skip_header': '(1)'}), "(directorydata + 'Bering_SIE85_iceatlas_01_1850-2018.txt',\n skip_header=1)\n", (2288, 2365), True, 'import numpy as np\n'), ((2403, 2493), 'numpy.genfromtxt', 'np.genfromtxt', (["(directorydata + 'Bering_SIE85_iceatlas_02_1850-2018.txt')"], {'skip_header': '(1)'}), "(directorydata + 'Bering_SIE85_iceatlas_02_1850-2018.txt',\n skip_header=1)\n", (2416, 2493), True, 'import numpy as np\n'), ((2531, 2621), 'numpy.genfromtxt', 'np.genfromtxt', (["(directorydata + 'Bering_SIE85_iceatlas_03_1850-2018.txt')"], {'skip_header': '(1)'}), "(directorydata + 'Bering_SIE85_iceatlas_03_1850-2018.txt',\n skip_header=1)\n", (2544, 2621), True, 'import numpy as np\n'), ((2659, 2749), 'numpy.genfromtxt', 'np.genfromtxt', (["(directorydata + 'Bering_SIE85_iceatlas_04_1850-2018.txt')"], {'skip_header': '(1)'}), "(directorydata + 'Bering_SIE85_iceatlas_04_1850-2018.txt',\n skip_header=1)\n", (2672, 2749), True, 'import numpy as np\n'), ((2904, 3135), 'numpy.savetxt', 'np.savetxt', (["(directorydata + 'Bering_SIE85_iceatlas_Jan-Apr_1850-2018.txt')", 'meanatlas'], {'delimiter': '""","""', 'header': '"""File contains mean Jan-Apr SIE from historical\n ice atlas (University of Alaska) for years\n 1850-2018 \n"""'}), '(directorydata + \'Bering_SIE85_iceatlas_Jan-Apr_1850-2018.txt\',\n meanatlas, delimiter=\',\', header=\n """File contains mean Jan-Apr SIE from historical\n ice atlas (University of Alaska) for years\n 1850-2018 \n"""\n )\n', (2914, 3135), True, 'import numpy as np\n'), ((3553, 3597), 'scipy.stats.pearsonr', 'sts.pearsonr', (['satperiod[mask]', 'meansat[mask]'], {}), '(satperiod[mask], meansat[mask])\n', (3565, 3597), True, 'import scipy.stats as sts\n'), ((1182, 1248), 'numpy.genfromtxt', 'np.genfromtxt', (["(directorydata + 'Bering_SIE_NSIDC_01_1979-2018.txt')"], {}), "(directorydata + 'Bering_SIE_NSIDC_01_1979-2018.txt')\n", (1195, 1248), True, 'import numpy as np\n'), ((1294, 1360), 'numpy.genfromtxt', 'np.genfromtxt', (["(directorydata + 'Bering_SIE_NSIDC_02_1979-2018.txt')"], {}), "(directorydata + 'Bering_SIE_NSIDC_02_1979-2018.txt')\n", (1307, 1360), True, 'import numpy as np\n'), ((1406, 1472), 'numpy.genfromtxt', 'np.genfromtxt', (["(directorydata + 'Bering_SIE_NSIDC_03_1979-2018.txt')"], {}), "(directorydata + 'Bering_SIE_NSIDC_03_1979-2018.txt')\n", (1419, 1472), True, 'import numpy as np\n'), ((1518, 1584), 'numpy.genfromtxt', 'np.genfromtxt', (["(directorydata + 'Bering_SIE_NSIDC_04_1979-2018.txt')"], {}), "(directorydata + 'Bering_SIE_NSIDC_04_1979-2018.txt')\n", (1531, 1584), True, 'import numpy as np\n'), ((3505, 3524), 'numpy.isnan', 'np.isnan', (['satperiod'], {}), '(satperiod)\n', (3513, 3524), True, 'import numpy as np\n'), ((3525, 3542), 'numpy.isnan', 'np.isnan', (['meansat'], {}), '(meansat)\n', (3533, 3542), True, 'import numpy as np\n'), ((3663, 3680), 'numpy.round', 'np.round', (['corr', '(3)'], {}), '(corr, 3)\n', (3671, 3680), True, 'import numpy as np\n')]
"""Configures pytest (beyond the ini file).""" import matplotlib as mpl import numpy import pytest from matplotlib import pyplot as plt from dapper.dpr_config import rc @pytest.fixture(autouse=True) def add_sci(doctest_namespace): """Add numpy as np for doctests.""" doctest_namespace["np"] = numpy doctest_namespace["mpl"] = mpl doctest_namespace["plt"] = plt doctest_namespace["rnd"] = numpy.random doctest_namespace["rc"] = rc
[ "pytest.fixture" ]
[((173, 201), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (187, 201), False, 'import pytest\n')]
import os import time import numpy as np # from IPython import embed print("perform experiments on amazoncat 13K (multilabel)") leaf_example_multiplier = 2 lr = 1 bits = 30 alpha = 0.1 # 0.3 passes = 4 learn_at_leaf = True use_oas = True # num_queries = 1 #does not really use dream_at_update = 1 # hal_version = 1 #does not really use loss = "squared" dream_repeats = 3 # Precision_at_K = 5 num_examples = 1186239 max_num_labels = 13330 tree_node = int( num_examples / (np.log(num_examples) / np.log(2) * leaf_example_multiplier) ) train_data = "amazoncat_train.mat.mult_label.vw.txt" test_data = "amazoncat_test.mat.mult_label.vw.txt" if os.path.exists(train_data) is not True: os.system("wget http://kalman.ml.cmu.edu/wen_datasets/{}".format(train_data)) if os.path.exists(test_data) is not True: os.system("wget http://kalman.ml.cmu.edu/wen_datasets/{}".format(test_data)) saved_model = "{}.vw".format(train_data) print("## Training...") start = time.time() # train_data = 'tmp_rcv1x.vw.txt' command_line = f"../../build/vowpalwabbit/vw -d {train_data} --memory_tree {tree_node} {'--learn_at_leaf' if learn_at_leaf else ''} --dream_at_update {dream_at_update}\ --max_number_of_labels {max_num_labels} --dream_repeats {dream_repeats} {'--oas' if use_oas else ''} \ --leaf_example_multiplier {leaf_example_multiplier} --alpha {alpha} -l {lr} -b {bits} -c --passes {passes} --loss_function {loss} --holdout_off -f {saved_model}" os.system(command_line) train_time = time.time() - start print("## Testing...") start = time.time() os.system( "../../build/vowpalwabbit/vw {} --oas {} -i {}".format( test_data, use_oas, saved_model ) ) test_time = time.time() - start print("## train time {}, and test time {}".format(train_time, test_time))
[ "numpy.log", "os.path.exists", "os.system", "time.time" ]
[((971, 982), 'time.time', 'time.time', ([], {}), '()\n', (980, 982), False, 'import time\n'), ((1471, 1494), 'os.system', 'os.system', (['command_line'], {}), '(command_line)\n', (1480, 1494), False, 'import os\n'), ((1560, 1571), 'time.time', 'time.time', ([], {}), '()\n', (1569, 1571), False, 'import time\n'), ((651, 677), 'os.path.exists', 'os.path.exists', (['train_data'], {}), '(train_data)\n', (665, 677), False, 'import os\n'), ((776, 801), 'os.path.exists', 'os.path.exists', (['test_data'], {}), '(test_data)\n', (790, 801), False, 'import os\n'), ((1508, 1519), 'time.time', 'time.time', ([], {}), '()\n', (1517, 1519), False, 'import time\n'), ((1703, 1714), 'time.time', 'time.time', ([], {}), '()\n', (1712, 1714), False, 'import time\n'), ((481, 501), 'numpy.log', 'np.log', (['num_examples'], {}), '(num_examples)\n', (487, 501), True, 'import numpy as np\n'), ((504, 513), 'numpy.log', 'np.log', (['(2)'], {}), '(2)\n', (510, 513), True, 'import numpy as np\n')]
""" The things we need to do using SSH """ import os import time from fabric import Connection class ssh: """ ssh connection """ def __init__(self, ip, username, password): self.ip = ip self.username = username self.password = password pass def execute(self, command): while True: try: print(" - run: " + command) result = Connection(self.ip, self.username, connect_kwargs={"password", self.password}).run( command, hide=True) return result except: print(" - an error occured, retry in 3 seconds") time.sleep(3) def upload(self, localpath, remotepath): while True: try: print(" - upload: " + localpath + " > " + remotepath) result = Connection(self.ip, self.username, connect_kwargs={"password", self.password}).put( localpath, remote=remotepath ) return result except: print(" - an error occured, retry in 3 seconds") time.sleep(3)
[ "fabric.Connection", "time.sleep" ]
[((744, 757), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (754, 757), False, 'import time\n'), ((1273, 1286), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1283, 1286), False, 'import time\n'), ((438, 516), 'fabric.Connection', 'Connection', (['self.ip', 'self.username'], {'connect_kwargs': "{'password', self.password}"}), "(self.ip, self.username, connect_kwargs={'password', self.password})\n", (448, 516), False, 'from fabric import Connection\n'), ((937, 1015), 'fabric.Connection', 'Connection', (['self.ip', 'self.username'], {'connect_kwargs': "{'password', self.password}"}), "(self.ip, self.username, connect_kwargs={'password', self.password})\n", (947, 1015), False, 'from fabric import Connection\n')]
# -*- coding: utf-8 -*- ''' Author: TJUZQC Date: 2020-10-25 13:07:30 LastEditors: TJUZQC LastEditTime: 2020-11-20 19:21:52 Description: None ''' import torch import torch.nn as nn from .modules import * """ Recurrent U-Net """ class R2U_Net(nn.Module): def __init__(self, n_channels=3, n_classes=1, t=2, bilinear=True): self.n_channels = n_channels self.n_classes = n_classes self.bilinear = bilinear super(R2U_Net, self).__init__() self.Maxpool = nn.MaxPool2d(kernel_size=2, stride=2) self.Upsample = nn.Upsample(scale_factor=2) self.RRCNN1 = RRCNN_block(ch_in=n_channels, ch_out=64, t=t) self.RRCNN2 = RRCNN_block(ch_in=64, ch_out=128, t=t) self.RRCNN3 = RRCNN_block(ch_in=128, ch_out=256, t=t) self.RRCNN4 = RRCNN_block(ch_in=256, ch_out=512, t=t) self.RRCNN5 = RRCNN_block(ch_in=512, ch_out=1024, t=t) self.Up5 = up_conv(ch_in=1024, ch_out=512, bilinear=bilinear) self.Up_RRCNN5 = RRCNN_block(ch_in=1024, ch_out=512, t=t) self.Up4 = up_conv(ch_in=512, ch_out=256, bilinear=bilinear) self.Up_RRCNN4 = RRCNN_block(ch_in=512, ch_out=256, t=t) self.Up3 = up_conv(ch_in=256, ch_out=128, bilinear=bilinear) self.Up_RRCNN3 = RRCNN_block(ch_in=256, ch_out=128, t=t) self.Up2 = up_conv(ch_in=128, ch_out=64, bilinear=bilinear) self.Up_RRCNN2 = RRCNN_block(ch_in=128, ch_out=64, t=t) self.Conv_1x1 = nn.Conv2d( 64, n_classes, kernel_size=1, stride=1, padding=0) def forward(self, x): # encoding path x1 = self.RRCNN1(x) x2 = self.Maxpool(x1) x2 = self.RRCNN2(x2) x3 = self.Maxpool(x2) x3 = self.RRCNN3(x3) x4 = self.Maxpool(x3) x4 = self.RRCNN4(x4) x5 = self.Maxpool(x4) x5 = self.RRCNN5(x5) # decoding + concat path d5 = self.Up5(x5) d5 = torch.cat((x4, d5), dim=1) d5 = self.Up_RRCNN5(d5) d4 = self.Up4(d5) d4 = torch.cat((x3, d4), dim=1) d4 = self.Up_RRCNN4(d4) d3 = self.Up3(d4) d3 = torch.cat((x2, d3), dim=1) d3 = self.Up_RRCNN3(d3) d2 = self.Up2(d3) d2 = torch.cat((x1, d2), dim=1) d2 = self.Up_RRCNN2(d2) d1 = self.Conv_1x1(d2) return d1
[ "torch.nn.MaxPool2d", "torch.nn.Upsample", "torch.nn.Conv2d", "torch.cat" ]
[((497, 534), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2)', 'stride': '(2)'}), '(kernel_size=2, stride=2)\n', (509, 534), True, 'import torch.nn as nn\n'), ((559, 586), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2)'}), '(scale_factor=2)\n', (570, 586), True, 'import torch.nn as nn\n'), ((1473, 1533), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', 'n_classes'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(64, n_classes, kernel_size=1, stride=1, padding=0)\n', (1482, 1533), True, 'import torch.nn as nn\n'), ((1939, 1965), 'torch.cat', 'torch.cat', (['(x4, d5)'], {'dim': '(1)'}), '((x4, d5), dim=1)\n', (1948, 1965), False, 'import torch\n'), ((2038, 2064), 'torch.cat', 'torch.cat', (['(x3, d4)'], {'dim': '(1)'}), '((x3, d4), dim=1)\n', (2047, 2064), False, 'import torch\n'), ((2137, 2163), 'torch.cat', 'torch.cat', (['(x2, d3)'], {'dim': '(1)'}), '((x2, d3), dim=1)\n', (2146, 2163), False, 'import torch\n'), ((2236, 2262), 'torch.cat', 'torch.cat', (['(x1, d2)'], {'dim': '(1)'}), '((x1, d2), dim=1)\n', (2245, 2262), False, 'import torch\n')]
class Fetcher(object): def fetch_addr(self, ref, app, facilities, auto_extract=False): from caty.util.path import is_mafs_path from caty.core.command import VarStorage from caty.core.command.param import Option, Argument from caty.core.script.interpreter.executor import CommandExecutor from caty.core.script.builder import CommandBuilder from caty.core.facility import TransactionPendingAdaptor from caty.core.exception import throw_caty_exception import caty.jsontools as json import caty.jsontools.selector as selector ref = json.untagged(ref) cmd_class = None for name in (ref['t'] + '.GET', ref[u't'] + '.get'): if app._schema_module.has_command_type(name): cmd_class = app._schema_module.get_command(name) break if not cmd_class: throw_caty_exception(u'CommandNotFound', name) raw_args = [ref[u'a'][0]] if len(ref['a']) == 2 and auto_extract: raw_args.append(ref[u'a'][1]) cmd_class = app._schema_module.get_command(name) opts = [] args = [] for v in raw_args: args.append(Argument(v)) builder = CommandBuilder(facilities, {}) cmd_instance = builder.make_cmd(cmd_class, [], opts, args, (0, 0), app._schema_module) cmd_instance.set_facility(facilities) var_storage = VarStorage(None, None) cmd_instance.set_var_storage(var_storage) executor = TransactionPendingAdaptor(CommandExecutor(cmd_instance, app, facilities), facilities) r = executor(None) return r class TypeQuery(object): type = u'type' def __init__(self, label, value): self.label = label self.value = value self.optional = False self.repeat = False def __repr__(self): return 'type query: %s:%s%s' % (repr(self.label), repr(self.value), '?' if self.optional else '') class TagQuery(object): type = u'tag' def __init__(self, tag, value): self.tag = tag self.value = value self.label = None self.optional = False self.repeat = False class ObjectQuery(object): type = u'object' def __init__(self, queries, wildcard): self.queries = queries self.wildcard = wildcard self.label = None self.optional = False self.repeat = False self.value = queries class ArrayQuery(object): type = u'array' def __init__(self, queries, repeat): self.queries = queries self.repeat = repeat self.label = None self.optional = False self.value = queries class AddressQuery(object): type = u'address' def __init__(self): self.value = None self.label = None self.optional = False self.repeat = False class ReferenceQuery(object): type = u'reference' def __init__(self, subq): self.value = subq self.label = None self.optional = False self.repeat = False
[ "caty.core.exception.throw_caty_exception", "caty.jsontools.untagged", "caty.core.command.VarStorage", "caty.core.script.builder.CommandBuilder", "caty.core.command.param.Argument", "caty.core.script.interpreter.executor.CommandExecutor" ]
[((613, 631), 'caty.jsontools.untagged', 'json.untagged', (['ref'], {}), '(ref)\n', (626, 631), True, 'import caty.jsontools as json\n'), ((1247, 1277), 'caty.core.script.builder.CommandBuilder', 'CommandBuilder', (['facilities', '{}'], {}), '(facilities, {})\n', (1261, 1277), False, 'from caty.core.script.builder import CommandBuilder\n'), ((1441, 1463), 'caty.core.command.VarStorage', 'VarStorage', (['None', 'None'], {}), '(None, None)\n', (1451, 1463), False, 'from caty.core.command import VarStorage\n'), ((901, 947), 'caty.core.exception.throw_caty_exception', 'throw_caty_exception', (['u"""CommandNotFound"""', 'name'], {}), "(u'CommandNotFound', name)\n", (921, 947), False, 'from caty.core.exception import throw_caty_exception\n'), ((1559, 1605), 'caty.core.script.interpreter.executor.CommandExecutor', 'CommandExecutor', (['cmd_instance', 'app', 'facilities'], {}), '(cmd_instance, app, facilities)\n', (1574, 1605), False, 'from caty.core.script.interpreter.executor import CommandExecutor\n'), ((1216, 1227), 'caty.core.command.param.Argument', 'Argument', (['v'], {}), '(v)\n', (1224, 1227), False, 'from caty.core.command.param import Option, Argument\n')]
# Generated by Django 3.1.2 on 2020-11-18 01:33 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('zonas', '0003_grupozona_zonaengrupo'), ] operations = [ migrations.AlterUniqueTogether( name='zonaengrupo', unique_together={('grupo', 'zona')}, ), ]
[ "django.db.migrations.AlterUniqueTogether" ]
[((228, 320), 'django.db.migrations.AlterUniqueTogether', 'migrations.AlterUniqueTogether', ([], {'name': '"""zonaengrupo"""', 'unique_together': "{('grupo', 'zona')}"}), "(name='zonaengrupo', unique_together={(\n 'grupo', 'zona')})\n", (258, 320), False, 'from django.db import migrations\n')]
# Generated by Django 3.1 on 2020-08-08 12:30 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('retro_news', '0002_blogarticle'), ] operations = [ migrations.RemoveField( model_name='blogarticle', name='date_created', ), migrations.AddField( model_name='blogarticle', name='created', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='customuser', name='first_name', field=models.CharField(blank=True, max_length=150, verbose_name='first name'), ), ]
[ "django.db.migrations.RemoveField", "django.db.models.DateTimeField", "django.db.models.CharField" ]
[((229, 298), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""blogarticle"""', 'name': '"""date_created"""'}), "(model_name='blogarticle', name='date_created')\n", (251, 298), False, 'from django.db import migrations, models\n'), ((448, 483), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (468, 483), False, 'from django.db import migrations, models\n'), ((613, 684), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(150)', 'verbose_name': '"""first name"""'}), "(blank=True, max_length=150, verbose_name='first name')\n", (629, 684), False, 'from django.db import migrations, models\n')]
#-*- coding:utf-8 _*- """ @author:charlesXu @file: urls.py @desc: 接口url @time: 2019/05/10 """ # =============== # # apis 下面的路由 # # =============== from django.urls import path from Chatbot_Rest.Api.intent_detection.intent_rest_controller import intent_controller from Chatbot_Rest.Api.info_extraction.entity_extraction_controller import entity_ext_controller from Chatbot_Rest.Api.bot.bot_controller import get_chat_msg # 聊天 from Chatbot_Rest.Api.time_convert.time_convert_server import time_convert # 时间转换器 from Chatbot_Rest.Api.Sensitive_word.Sensitive_word_controller import sensitive_controller from Chatbot_Rest.Api.sim_sentence import Sim_sentence_controller urlpatterns = [ path('entity', entity_ext_controller), # 实体抽取 path('intent', intent_controller), # 意图识别 path('chat', get_chat_msg), # chatbot接口 path('time', time_convert), # 时间转换器 path('sensitive', sensitive_controller), # 敏感词检测 path('sim_sentence', Sim_sentence_controller.sim_sentence_controller), # 短文本相似度 ]
[ "django.urls.path" ]
[((705, 742), 'django.urls.path', 'path', (['"""entity"""', 'entity_ext_controller'], {}), "('entity', entity_ext_controller)\n", (709, 742), False, 'from django.urls import path\n'), ((755, 788), 'django.urls.path', 'path', (['"""intent"""', 'intent_controller'], {}), "('intent', intent_controller)\n", (759, 788), False, 'from django.urls import path\n'), ((805, 831), 'django.urls.path', 'path', (['"""chat"""', 'get_chat_msg'], {}), "('chat', get_chat_msg)\n", (809, 831), False, 'from django.urls import path\n'), ((860, 886), 'django.urls.path', 'path', (['"""time"""', 'time_convert'], {}), "('time', time_convert)\n", (864, 886), False, 'from django.urls import path\n'), ((904, 943), 'django.urls.path', 'path', (['"""sensitive"""', 'sensitive_controller'], {}), "('sensitive', sensitive_controller)\n", (908, 943), False, 'from django.urls import path\n'), ((966, 1035), 'django.urls.path', 'path', (['"""sim_sentence"""', 'Sim_sentence_controller.sim_sentence_controller'], {}), "('sim_sentence', Sim_sentence_controller.sim_sentence_controller)\n", (970, 1035), False, 'from django.urls import path\n')]
import os import re import subprocess from mgstest import require_apache_modules, require_match from unittest import SkipTest def prepare_env(): require_apache_modules('mod_http2.so') curl = os.environ['HTTP_CLI'] if curl == 'no': raise SkipTest('curl not found!') proc = subprocess.run([curl, '-V'], stdout=subprocess.PIPE, check=True, text=True) if not re.search(r'\bHTTP2\b', proc.stdout): raise SkipTest(f'{curl} does not support HTTP/2!') def run_connection(testname, conn_log, response_log): """Check if HTTP/2 connections using mod_gnutls and mod_http2 work.""" url = f'https://{os.environ["TEST_HOST"]}:{os.environ["TEST_PORT"]}' \ '/status?auto' command = [os.environ['HTTP_CLI'], '--http2', '--location', '--verbose', '--cacert', 'authority/x509.pem', url] proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) print(proc.stderr) print(proc.stderr, file=conn_log) print(proc.stdout) print(proc.stdout, file=response_log) proc.check_returncode() def post_check(conn_log, response_log): print('Checking for HTTP/2 in logged header:') print(require_match(re.compile(r'\bHTTP/2 200\b'), conn_log).group(0)) print('Checking for TLS session status:') print(require_match(re.compile(r'^Current TLS session:\s\(TLS.*$'), response_log) .group(0))
[ "subprocess.run", "mgstest.require_apache_modules", "unittest.SkipTest", "re.search", "re.compile" ]
[((151, 189), 'mgstest.require_apache_modules', 'require_apache_modules', (['"""mod_http2.so"""'], {}), "('mod_http2.so')\n", (173, 189), False, 'from mgstest import require_apache_modules, require_match\n'), ((298, 373), 'subprocess.run', 'subprocess.run', (["[curl, '-V']"], {'stdout': 'subprocess.PIPE', 'check': '(True)', 'text': '(True)'}), "([curl, '-V'], stdout=subprocess.PIPE, check=True, text=True)\n", (312, 373), False, 'import subprocess\n'), ((881, 967), 'subprocess.run', 'subprocess.run', (['command'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'text': '(True)'}), '(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n text=True)\n', (895, 967), False, 'import subprocess\n'), ((259, 286), 'unittest.SkipTest', 'SkipTest', (['"""curl not found!"""'], {}), "('curl not found!')\n", (267, 286), False, 'from unittest import SkipTest\n'), ((411, 448), 're.search', 're.search', (['"""\\\\bHTTP2\\\\b"""', 'proc.stdout'], {}), "('\\\\bHTTP2\\\\b', proc.stdout)\n", (420, 448), False, 'import re\n'), ((463, 507), 'unittest.SkipTest', 'SkipTest', (['f"""{curl} does not support HTTP/2!"""'], {}), "(f'{curl} does not support HTTP/2!')\n", (471, 507), False, 'from unittest import SkipTest\n'), ((1287, 1317), 're.compile', 're.compile', (['"""\\\\bHTTP/2 200\\\\b"""'], {}), "('\\\\bHTTP/2 200\\\\b')\n", (1297, 1317), False, 'import re\n'), ((1408, 1455), 're.compile', 're.compile', (['"""^Current TLS session:\\\\s\\\\(TLS.*$"""'], {}), "('^Current TLS session:\\\\s\\\\(TLS.*$')\n", (1418, 1455), False, 'import re\n')]
# Generated by Django 2.1 on 2018-09-11 23:42 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('django_app', '0001_initial'), ] operations = [ migrations.AlterField( model_name='users', name='portfolio_picture', field=models.ImageField(blank=True, default='media/images/defaultuser.png', upload_to='media/images/'), ), ]
[ "django.db.models.ImageField" ]
[((336, 436), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'default': '"""media/images/defaultuser.png"""', 'upload_to': '"""media/images/"""'}), "(blank=True, default='media/images/defaultuser.png',\n upload_to='media/images/')\n", (353, 436), False, 'from django.db import migrations, models\n')]
from django.test import TestCase from django.contrib.auth.models import User from tastypie.test import ResourceTestCaseMixin from tastypie.models import ApiKey import json # Create your tests here. class Resourcetest(ResourceTestCaseMixin, TestCase): def setUp(self): super().setUp() user = User.objects.create_user( "api_client_1", "<EMAIL>", "<PASSWORD>", ) user.save() api_key = ApiKey.objects.create( user=user, key="<KEY>" ) api_key.save() def get_credentials(self): return self.create_apikey( "api_client_1", "<KEY>" ) def test_get_detail_unauthenticated(self): response = self.api_client.get("/api/v1/peoples/", format="json") self.assertHttpUnauthorized(response) def test_throttling(self): count = 0 for i in range(1, 115): response = self.api_client.get( "/api/v1/peoples/", data={ "username": "api_client_1", "api_key": "<KEY>", "name": "c-3po", "birth_year": "112BBY", }, format="json", authentication=self.get_credentials(), ) self.assertHttpTooManyRequests(response) def test_external_limit(self): self.uri = "/api/v1/peoples/" count = 0 for i in range(1, 12): t_name = "c-3po{}".format(i) response = self.api_client.get( self.uri, data={ "username": "api_client_1", "api_key": "<KEY>", "name": t_name, "birth_year": "112BBY", }, format="json", ) self.assertEqual(response.status_code, 503)
[ "tastypie.models.ApiKey.objects.create", "django.contrib.auth.models.User.objects.create_user" ]
[((312, 377), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['"""api_client_1"""', '"""<EMAIL>"""', '"""<PASSWORD>"""'], {}), "('api_client_1', '<EMAIL>', '<PASSWORD>')\n", (336, 377), False, 'from django.contrib.auth.models import User\n'), ((463, 508), 'tastypie.models.ApiKey.objects.create', 'ApiKey.objects.create', ([], {'user': 'user', 'key': '"""<KEY>"""'}), "(user=user, key='<KEY>')\n", (484, 508), False, 'from tastypie.models import ApiKey\n')]
import pytest from pytest import raises from vyper import compiler from vyper.exceptions import SyntaxException, TypeMismatch fail_list = [ (""" @public def foo(): x: bytes[9] = raw_call(0x1234567890123456789012345678901234567890, b"cow", outsize=4, outsize=9) """, SyntaxException), """ @public def foo(): raw_log([b"cow"], b"dog") """, """ @public def foo(): raw_log([], 0x1234567890123456789012345678901234567890) """, """ @public def foo(): # fails because raw_call without outsize does not return a value x: bytes[9] = raw_call(0x1234567890123456789012345678901234567890, b"cow") """, ] @pytest.mark.parametrize('bad_code', fail_list) def test_raw_call_fail(bad_code): if isinstance(bad_code, tuple): with raises(bad_code[1]): compiler.compile_code(bad_code[0]) else: with raises(TypeMismatch): compiler.compile_code(bad_code) valid_list = [ """ @public def foo(): x: bytes[9] = raw_call( 0x1234567890123456789012345678901234567890, b"cow", outsize=4, gas=595757 ) """, """ @public def foo(): x: bytes[9] = raw_call( 0x1234567890123456789012345678901234567890, b"cow", outsize=4, gas=595757, value=as_wei_value(9, "wei") ) """, """ @public def foo(): x: bytes[9] = raw_call( 0x1234567890123456789012345678901234567890, b"cow", outsize=4, gas=595757, value=9 ) """, """ @public def foo(): raw_call(0x1234567890123456789012345678901234567890, b"cow") """, ] @pytest.mark.parametrize('good_code', valid_list) def test_raw_call_success(good_code): assert compiler.compile_code(good_code) is not None
[ "pytest.mark.parametrize", "pytest.raises", "vyper.compiler.compile_code" ]
[((649, 695), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""bad_code"""', 'fail_list'], {}), "('bad_code', fail_list)\n", (672, 695), False, 'import pytest\n'), ((1643, 1691), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""good_code"""', 'valid_list'], {}), "('good_code', valid_list)\n", (1666, 1691), False, 'import pytest\n'), ((1741, 1773), 'vyper.compiler.compile_code', 'compiler.compile_code', (['good_code'], {}), '(good_code)\n', (1762, 1773), False, 'from vyper import compiler\n'), ((780, 799), 'pytest.raises', 'raises', (['bad_code[1]'], {}), '(bad_code[1])\n', (786, 799), False, 'from pytest import raises\n'), ((813, 847), 'vyper.compiler.compile_code', 'compiler.compile_code', (['bad_code[0]'], {}), '(bad_code[0])\n', (834, 847), False, 'from vyper import compiler\n'), ((871, 891), 'pytest.raises', 'raises', (['TypeMismatch'], {}), '(TypeMismatch)\n', (877, 891), False, 'from pytest import raises\n'), ((905, 936), 'vyper.compiler.compile_code', 'compiler.compile_code', (['bad_code'], {}), '(bad_code)\n', (926, 936), False, 'from vyper import compiler\n')]
import numpy as np import pandas as pd from sklearn.ensemble import ExtraTreesRegressor from sklearn.pipeline import make_pipeline, make_union from sklearn.preprocessing import Binarizer, MinMaxScaler from sklearn.tree import DecisionTreeRegressor from tpot.builtins import StackingEstimator, ZeroCount from tpot.export_utils import set_param_recursive from metstab_shap.config import utils_section, csv_section from metstab_shap.config import parse_data_config, parse_representation_config, parse_task_config from metstab_shap.data import load_data # load data (and change to classification if needed) data_cfg = parse_data_config('configs/data/rat.cfg') repr_cfg = parse_representation_config('configs/repr/maccs.cfg') task_cfg = parse_task_config('configs/task/regression.cfg') x, y, _, test_x, test_y, smiles, test_smiles = load_data(data_cfg, **repr_cfg[utils_section]) training_features = x training_target = y testing_features = test_x # Average CV score on the training set was: -0.15289999993179348 exported_pipeline = make_pipeline( ZeroCount(), MinMaxScaler(), StackingEstimator(estimator=DecisionTreeRegressor(max_depth=5, max_features=0.25, min_samples_leaf=3, min_samples_split=14, splitter="best")), StackingEstimator(estimator=ExtraTreesRegressor(bootstrap=False, max_depth=4, max_features=0.7500000000000001, max_samples=None, min_samples_leaf=1, min_samples_split=10, n_estimators=1000)), Binarizer(threshold=0.9), ExtraTreesRegressor(bootstrap=False, max_depth=None, max_features=0.1, max_samples=0.7, min_samples_leaf=1, min_samples_split=4, n_estimators=500) ) # Fix random state for all the steps in exported pipeline set_param_recursive(exported_pipeline.steps, 'random_state', 666) exported_pipeline.fit(training_features, training_target) results = exported_pipeline.predict(testing_features) print('Success.')
[ "tpot.builtins.ZeroCount", "metstab_shap.config.parse_task_config", "metstab_shap.config.parse_data_config", "tpot.export_utils.set_param_recursive", "sklearn.tree.DecisionTreeRegressor", "sklearn.preprocessing.MinMaxScaler", "sklearn.ensemble.ExtraTreesRegressor", "metstab_shap.data.load_data", "sklearn.preprocessing.Binarizer", "metstab_shap.config.parse_representation_config" ]
[((616, 657), 'metstab_shap.config.parse_data_config', 'parse_data_config', (['"""configs/data/rat.cfg"""'], {}), "('configs/data/rat.cfg')\n", (633, 657), False, 'from metstab_shap.config import parse_data_config, parse_representation_config, parse_task_config\n'), ((669, 722), 'metstab_shap.config.parse_representation_config', 'parse_representation_config', (['"""configs/repr/maccs.cfg"""'], {}), "('configs/repr/maccs.cfg')\n", (696, 722), False, 'from metstab_shap.config import parse_data_config, parse_representation_config, parse_task_config\n'), ((734, 782), 'metstab_shap.config.parse_task_config', 'parse_task_config', (['"""configs/task/regression.cfg"""'], {}), "('configs/task/regression.cfg')\n", (751, 782), False, 'from metstab_shap.config import parse_data_config, parse_representation_config, parse_task_config\n'), ((830, 876), 'metstab_shap.data.load_data', 'load_data', (['data_cfg'], {}), '(data_cfg, **repr_cfg[utils_section])\n', (839, 876), False, 'from metstab_shap.data import load_data\n'), ((1668, 1733), 'tpot.export_utils.set_param_recursive', 'set_param_recursive', (['exported_pipeline.steps', '"""random_state"""', '(666)'], {}), "(exported_pipeline.steps, 'random_state', 666)\n", (1687, 1733), False, 'from tpot.export_utils import set_param_recursive\n'), ((1051, 1062), 'tpot.builtins.ZeroCount', 'ZeroCount', ([], {}), '()\n', (1060, 1062), False, 'from tpot.builtins import StackingEstimator, ZeroCount\n'), ((1068, 1082), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {}), '()\n', (1080, 1082), False, 'from sklearn.preprocessing import Binarizer, MinMaxScaler\n'), ((1431, 1455), 'sklearn.preprocessing.Binarizer', 'Binarizer', ([], {'threshold': '(0.9)'}), '(threshold=0.9)\n', (1440, 1455), False, 'from sklearn.preprocessing import Binarizer, MinMaxScaler\n'), ((1461, 1611), 'sklearn.ensemble.ExtraTreesRegressor', 'ExtraTreesRegressor', ([], {'bootstrap': '(False)', 'max_depth': 'None', 'max_features': '(0.1)', 'max_samples': '(0.7)', 'min_samples_leaf': '(1)', 'min_samples_split': '(4)', 'n_estimators': '(500)'}), '(bootstrap=False, max_depth=None, max_features=0.1,\n max_samples=0.7, min_samples_leaf=1, min_samples_split=4, n_estimators=500)\n', (1480, 1611), False, 'from sklearn.ensemble import ExtraTreesRegressor\n'), ((1116, 1232), 'sklearn.tree.DecisionTreeRegressor', 'DecisionTreeRegressor', ([], {'max_depth': '(5)', 'max_features': '(0.25)', 'min_samples_leaf': '(3)', 'min_samples_split': '(14)', 'splitter': '"""best"""'}), "(max_depth=5, max_features=0.25, min_samples_leaf=3,\n min_samples_split=14, splitter='best')\n", (1137, 1232), False, 'from sklearn.tree import DecisionTreeRegressor\n'), ((1263, 1433), 'sklearn.ensemble.ExtraTreesRegressor', 'ExtraTreesRegressor', ([], {'bootstrap': '(False)', 'max_depth': '(4)', 'max_features': '(0.7500000000000001)', 'max_samples': 'None', 'min_samples_leaf': '(1)', 'min_samples_split': '(10)', 'n_estimators': '(1000)'}), '(bootstrap=False, max_depth=4, max_features=\n 0.7500000000000001, max_samples=None, min_samples_leaf=1,\n min_samples_split=10, n_estimators=1000)\n', (1282, 1433), False, 'from sklearn.ensemble import ExtraTreesRegressor\n')]
import bs4 import click import logging import requests from utils.now import now from request_with_fake_headers import request_with_fake_headers # from crawl_none_category import crawl_none_category_dictionary from utils.soup_library import ( crawl_from_internals, get_a_soup_of_difference, get_external_url_set, get_internal_url_set, is_xe_based_soup, ) from utils.db_library import insert_row, select_urls_by_category, select_all_urls from typing import Set, List, Optional from urllib.parse import urlparse, parse_qsl, urlencode, urlunparse from collections import Counter from utils.url_library import ( is_internal_url, assemble_url, validate_url, normalize_url, is_internal_specific_url, ) logging.basicConfig( filename="crawl.log", level=logging.DEBUG, format="%(asctime)s %(message)s" ) def classify_tag(text: str) -> str: category_keywords_dictionary = { "webtoon": ["웹툰", "webtoon", "애니", "만화", "툰", "코믹"], "sportslive": ["스포츠라이브", "중계", "sportslive"], "torrent": ["토렌트", "torrent", "토렌토", "토렌", "토랜"], "streaming": ["다시보기", "영화", "드라마", "TV", "티비"], "adult": ["성인", "야동", "19영상", "서양", "동양"], "link": ["링크모음", "주소모음"], } for (category, keywords) in category_keywords_dictionary.items(): if any(keyword in text for keyword in keywords): return category return "else" def is_redirected(url: str, response: requests.Response): # TODO: 좀 더 테스트 필요 return url != response.url def get_category_dictionary(main_url: str, main_soup: bs4.BeautifulSoup): # TODO: 필요한 지 생각해봐야함 div_soup = bs4.BeautifulSoup( "\n".join([str(div_tag) for div_tag in main_soup.find_all("div", limit=5)]), "html5lib", ) categories = [ "webtoon", "sportslive", "adult", "torrent", "streaming", "link", ] a_tags = div_soup.find_all("a", {"href": True}) url_text_tuples = [(a_tag["href"].strip(), a_tag.text) for a_tag in a_tags] category_dictionary = { # TODO: 누더기... category: set( filter( validate_url, [ href if "http" in href else normalize_url(main_url) + assemble_url(href) for (href, text) in url_text_tuples if (classify_tag(text) == category) and is_internal_url(href, main_url) # 그누보드를 쓰는 사이트에서 카테고리 url 을 뽑기 위해 걸러냄 and href.find("&wr_id") == -1 and href.find("document_srl") == -1 and not ( # XE 쓰는 사이트에서 index 숨겼을 때 게시판 까지만 참조하도록함 is_xe_based_soup(main_soup) == True and len(urlparse(href).path.split("/")) > 2 ) and href != main_url and "#" not in href ], ) ) for category in categories } return category_dictionary def get_next_page_url(a_soup: bs4.BeautifulSoup, current_url: str) -> Optional[str]: parse_result = urlparse(current_url) parsed_query = parse_result.query parsed_query_string_dictionary = dict(parse_qsl(parsed_query)) # page가 쿼리에 있으면 그걸 사용하고 없으면 1로 가정 current_page = ( int(parsed_query_string_dictionary["page"]) if "page" in parsed_query else 1 ) next_page_in_string = str(current_page + 1) next_parsed_query_string_dictionary = { **parsed_query_string_dictionary, "page": next_page_in_string, } next_page_parsed = [ # attribute 중 query만 page를 다음 페이지로 바꿔줌 https://docs.python.org/ko/3/library/urllib.parse.html 표 참조 urlencode(next_parsed_query_string_dictionary) if index == 4 else attirbute for index, attirbute in enumerate(parse_result) ] next_page_url = urlunparse(next_page_parsed) # 다음 페이지 호출하는 url이 html에 존재할 때만 반환 return ( next_page_url if any( [ a_tag["href"].strip() for a_tag in a_soup.find_all("a", {"href": True}) if "page=" + next_page_in_string in a_tag["href"].strip() ] ) else None ) def get_external_internal_urls( category_url: str, main_url: str, main_soup: bs4.BeautifulSoup ): category_response = request_with_fake_headers(category_url) category_soup = bs4.BeautifulSoup(category_response.content, "html5lib") a_soup_of_category_diff_main = get_a_soup_of_difference(category_soup, main_soup) next_page_url = get_next_page_url(a_soup_of_category_diff_main, category_url) page_count = 0 category_soups: List[bs4.BeautifulSoup] = [category_soup] while next_page_url is not None and page_count < 5: current_page_url = next_page_url current_page_response = request_with_fake_headers(current_page_url) current_page_soup = bs4.BeautifulSoup(current_page_response.content, "html5lib") # main 과 비교해야 페이지가 있는 a tag 가 살아있음 current_diff_a_soup = get_a_soup_of_difference(current_page_soup, main_soup) category_soups.append(current_page_soup) next_page_url = get_next_page_url(current_diff_a_soup, current_page_url) page_count += 1 diff_soups = ( len(category_soups) > 1 and [ # 다음 페이지가 있으면 여기서 자기들 끼리 비교해서 page 관련 태그를 없애고자 함 get_a_soup_of_difference(category_soups[index], category_soups[index - 1]) for index, _ in enumerate(category_soups) ] or [a_soup_of_category_diff_main] ) # 한 페이지 별로 Set을 만들어서 광고등으로 여러번 나온 url을 하나만 나오도록 함 external_urls: List[Set[str]] = [ get_external_url_set(diff_soup, main_url) for diff_soup in diff_soups ] internal_urls: List[Set[str]] = [ set( filter( # category_url로 필터하면 main_url에 붙여서 만든 internal_url 들이 걸러질 것 lambda url: is_internal_specific_url(url, category_url), get_internal_url_set(diff_soup, main_url), ) ) for diff_soup in diff_soups ] def integrate_urls(url_sets: List[Set[str]]) -> List[str]: # 2차원 배열 1차원으로 flatten 하고 원소별 개수 Counter로 체크 counter = Counter([url for url_set in url_sets for url in url_set]) # 여러 페이지에서 중복으로 나오는 url은 광고일 확률이 높으므로 필터 urls = [url for url, count in counter.items() if count == 1] return urls return { "external": integrate_urls(external_urls), "internal": integrate_urls(internal_urls), } def crawl_link_collection_site(main_urls: List[str], visited: List[str], options): limit = options["limit"] if limit == 0: return 0 force_crawl = options["force_crawl"] next_urls = [] for main_url in main_urls: if validate_url(main_url) == False: click.echo(f"{main_url} is invalid. Please check.") logging.error(f"{main_url} is invalid.") continue main_url = normalize_url(main_url) if main_url in visited and force_crawl == False: click.echo( f"{main_url} has been already visited. Please check for illegals.db or set --force-crawl option to True." ) logging.warning( f"{main_url} has been already visited. Please check for illegals.db or set --force-crawl option to True." ) continue response = request_with_fake_headers(main_url) if is_redirected(main_url, response) == True: main_url = normalize_url(response.url) soup = bs4.BeautifulSoup(response.content, "html5lib") category_dictionary = get_category_dictionary(main_url, soup) click.echo("Collecting category dictionary is done.") # print(category_dictionary) specific_url_dict = dict() # is_none_category_url = True # for category_urls in category_dictionary.values(): # if len(category_urls) > 0: # is_none_category_url = False # if is_none_category_url == True: # specific_url_dict = crawl_none_category_dictionary(main_url) # else: for category, category_urls in category_dictionary.items(): for category_url in category_urls: result = get_external_internal_urls(category_url, main_url, soup) click.echo(f"Collecting urls for {category} of {main_url} is done.") specific_url_dict[category] = list( set( result["external"] if len(result["external"]) > 0 else crawl_from_internals(result["internal"], main_url) ) ) urls_in_db = select_all_urls() # print(urls_in_db) def get_default_row(url, expected_category): return { "main_url": url, "expected_category": expected_category, "main_html_path": None, "captured_url": None, "captured_file_path": None, "google_analytics_code": None, "telegram_url": None, "twitter_url": None, "similarity_group": None, "engine": None, "next_url": None, "visited": False, "site_available": False, "ip_address": None, "created_at": now(), "last_visited_at": None, } for category, category_urls in specific_url_dict.items(): for url_from_dict in category_urls: normalized_url_from_dict = normalize_url(url_from_dict) print(normalized_url_from_dict) if normalized_url_from_dict not in urls_in_db: insert_row(get_default_row(normalized_url_from_dict, category)) if category == "link": next_urls.append(url_from_dict) if main_url not in urls_in_db: insert_row(get_default_row(main_url, "link")) visited.append(main_url) click.echo(f"Crawling for {main_url} is done.") crawl_link_collection_site( next_urls, visited, {"limit": limit - 1, "force_crawl": force_crawl} ) return 1 @click.command() @click.argument("url") @click.option("-l", "--limit", default=1, type=int, help="depth for recursive crawling") @click.option( "-f", "--force-crawl", default=False, type=bool, help="bool for force crawl visited site", ) def main(url, limit: int, force_crawl: bool): """Crawl site which collects illegal site urls""" logging.info("PROCESS STARTED") visited_link_urls = select_urls_by_category("link") crawl_link_collection_site( [url], visited_link_urls, {"limit": limit, "force_crawl": force_crawl} ) if __name__ == "__main__": # pylint: disable=no-value-for-parameter main()
[ "utils.url_library.is_internal_url", "click.option", "click.echo", "utils.soup_library.get_a_soup_of_difference", "urllib.parse.urlparse", "logging.error", "logging.warning", "request_with_fake_headers.request_with_fake_headers", "click.command", "utils.url_library.validate_url", "utils.soup_library.get_external_url_set", "utils.url_library.assemble_url", "collections.Counter", "utils.now.now", "urllib.parse.urlencode", "utils.soup_library.is_xe_based_soup", "bs4.BeautifulSoup", "urllib.parse.parse_qsl", "utils.soup_library.get_internal_url_set", "utils.soup_library.crawl_from_internals", "utils.db_library.select_urls_by_category", "click.argument", "logging.basicConfig", "utils.url_library.normalize_url", "urllib.parse.urlunparse", "logging.info", "utils.db_library.select_all_urls", "utils.url_library.is_internal_specific_url" ]
[((738, 839), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': '"""crawl.log"""', 'level': 'logging.DEBUG', 'format': '"""%(asctime)s %(message)s"""'}), "(filename='crawl.log', level=logging.DEBUG, format=\n '%(asctime)s %(message)s')\n", (757, 839), False, 'import logging\n'), ((10442, 10457), 'click.command', 'click.command', ([], {}), '()\n', (10455, 10457), False, 'import click\n'), ((10459, 10480), 'click.argument', 'click.argument', (['"""url"""'], {}), "('url')\n", (10473, 10480), False, 'import click\n'), ((10482, 10574), 'click.option', 'click.option', (['"""-l"""', '"""--limit"""'], {'default': '(1)', 'type': 'int', 'help': '"""depth for recursive crawling"""'}), "('-l', '--limit', default=1, type=int, help=\n 'depth for recursive crawling')\n", (10494, 10574), False, 'import click\n'), ((10571, 10679), 'click.option', 'click.option', (['"""-f"""', '"""--force-crawl"""'], {'default': '(False)', 'type': 'bool', 'help': '"""bool for force crawl visited site"""'}), "('-f', '--force-crawl', default=False, type=bool, help=\n 'bool for force crawl visited site')\n", (10583, 10679), False, 'import click\n'), ((3198, 3219), 'urllib.parse.urlparse', 'urlparse', (['current_url'], {}), '(current_url)\n', (3206, 3219), False, 'from urllib.parse import urlparse, parse_qsl, urlencode, urlunparse\n'), ((3952, 3980), 'urllib.parse.urlunparse', 'urlunparse', (['next_page_parsed'], {}), '(next_page_parsed)\n', (3962, 3980), False, 'from urllib.parse import urlparse, parse_qsl, urlencode, urlunparse\n'), ((4440, 4479), 'request_with_fake_headers.request_with_fake_headers', 'request_with_fake_headers', (['category_url'], {}), '(category_url)\n', (4465, 4479), False, 'from request_with_fake_headers import request_with_fake_headers\n'), ((4500, 4556), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['category_response.content', '"""html5lib"""'], {}), "(category_response.content, 'html5lib')\n", (4517, 4556), False, 'import bs4\n'), ((4593, 4643), 'utils.soup_library.get_a_soup_of_difference', 'get_a_soup_of_difference', (['category_soup', 'main_soup'], {}), '(category_soup, main_soup)\n', (4617, 4643), False, 'from utils.soup_library import crawl_from_internals, get_a_soup_of_difference, get_external_url_set, get_internal_url_set, is_xe_based_soup\n'), ((10802, 10833), 'logging.info', 'logging.info', (['"""PROCESS STARTED"""'], {}), "('PROCESS STARTED')\n", (10814, 10833), False, 'import logging\n'), ((10858, 10889), 'utils.db_library.select_urls_by_category', 'select_urls_by_category', (['"""link"""'], {}), "('link')\n", (10881, 10889), False, 'from utils.db_library import insert_row, select_urls_by_category, select_all_urls\n'), ((3300, 3323), 'urllib.parse.parse_qsl', 'parse_qsl', (['parsed_query'], {}), '(parsed_query)\n', (3309, 3323), False, 'from urllib.parse import urlparse, parse_qsl, urlencode, urlunparse\n'), ((4937, 4980), 'request_with_fake_headers.request_with_fake_headers', 'request_with_fake_headers', (['current_page_url'], {}), '(current_page_url)\n', (4962, 4980), False, 'from request_with_fake_headers import request_with_fake_headers\n'), ((5009, 5069), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['current_page_response.content', '"""html5lib"""'], {}), "(current_page_response.content, 'html5lib')\n", (5026, 5069), False, 'import bs4\n'), ((5143, 5197), 'utils.soup_library.get_a_soup_of_difference', 'get_a_soup_of_difference', (['current_page_soup', 'main_soup'], {}), '(current_page_soup, main_soup)\n', (5167, 5197), False, 'from utils.soup_library import crawl_from_internals, get_a_soup_of_difference, get_external_url_set, get_internal_url_set, is_xe_based_soup\n'), ((5779, 5820), 'utils.soup_library.get_external_url_set', 'get_external_url_set', (['diff_soup', 'main_url'], {}), '(diff_soup, main_url)\n', (5799, 5820), False, 'from utils.soup_library import crawl_from_internals, get_a_soup_of_difference, get_external_url_set, get_internal_url_set, is_xe_based_soup\n'), ((6336, 6393), 'collections.Counter', 'Counter', (['[url for url_set in url_sets for url in url_set]'], {}), '([url for url_set in url_sets for url in url_set])\n', (6343, 6393), False, 'from collections import Counter\n'), ((7099, 7122), 'utils.url_library.normalize_url', 'normalize_url', (['main_url'], {}), '(main_url)\n', (7112, 7122), False, 'from utils.url_library import is_internal_url, assemble_url, validate_url, normalize_url, is_internal_specific_url\n'), ((7546, 7581), 'request_with_fake_headers.request_with_fake_headers', 'request_with_fake_headers', (['main_url'], {}), '(main_url)\n', (7571, 7581), False, 'from request_with_fake_headers import request_with_fake_headers\n'), ((7702, 7749), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['response.content', '"""html5lib"""'], {}), "(response.content, 'html5lib')\n", (7719, 7749), False, 'import bs4\n'), ((7829, 7882), 'click.echo', 'click.echo', (['"""Collecting category dictionary is done."""'], {}), "('Collecting category dictionary is done.')\n", (7839, 7882), False, 'import click\n'), ((8876, 8893), 'utils.db_library.select_all_urls', 'select_all_urls', ([], {}), '()\n', (8891, 8893), False, 'from utils.db_library import insert_row, select_urls_by_category, select_all_urls\n'), ((10261, 10308), 'click.echo', 'click.echo', (['f"""Crawling for {main_url} is done."""'], {}), "(f'Crawling for {main_url} is done.')\n", (10271, 10308), False, 'import click\n'), ((3794, 3840), 'urllib.parse.urlencode', 'urlencode', (['next_parsed_query_string_dictionary'], {}), '(next_parsed_query_string_dictionary)\n', (3803, 3840), False, 'from urllib.parse import urlparse, parse_qsl, urlencode, urlunparse\n'), ((6908, 6930), 'utils.url_library.validate_url', 'validate_url', (['main_url'], {}), '(main_url)\n', (6920, 6930), False, 'from utils.url_library import is_internal_url, assemble_url, validate_url, normalize_url, is_internal_specific_url\n'), ((6953, 7004), 'click.echo', 'click.echo', (['f"""{main_url} is invalid. Please check."""'], {}), "(f'{main_url} is invalid. Please check.')\n", (6963, 7004), False, 'import click\n'), ((7017, 7057), 'logging.error', 'logging.error', (['f"""{main_url} is invalid."""'], {}), "(f'{main_url} is invalid.')\n", (7030, 7057), False, 'import logging\n'), ((7192, 7319), 'click.echo', 'click.echo', (['f"""{main_url} has been already visited. Please check for illegals.db or set --force-crawl option to True."""'], {}), "(\n f'{main_url} has been already visited. Please check for illegals.db or set --force-crawl option to True.'\n )\n", (7202, 7319), False, 'import click\n'), ((7352, 7484), 'logging.warning', 'logging.warning', (['f"""{main_url} has been already visited. Please check for illegals.db or set --force-crawl option to True."""'], {}), "(\n f'{main_url} has been already visited. Please check for illegals.db or set --force-crawl option to True.'\n )\n", (7367, 7484), False, 'import logging\n'), ((7659, 7686), 'utils.url_library.normalize_url', 'normalize_url', (['response.url'], {}), '(response.url)\n', (7672, 7686), False, 'from utils.url_library import is_internal_url, assemble_url, validate_url, normalize_url, is_internal_specific_url\n'), ((5491, 5565), 'utils.soup_library.get_a_soup_of_difference', 'get_a_soup_of_difference', (['category_soups[index]', 'category_soups[index - 1]'], {}), '(category_soups[index], category_soups[index - 1])\n', (5515, 5565), False, 'from utils.soup_library import crawl_from_internals, get_a_soup_of_difference, get_external_url_set, get_internal_url_set, is_xe_based_soup\n'), ((6092, 6133), 'utils.soup_library.get_internal_url_set', 'get_internal_url_set', (['diff_soup', 'main_url'], {}), '(diff_soup, main_url)\n', (6112, 6133), False, 'from utils.soup_library import crawl_from_internals, get_a_soup_of_difference, get_external_url_set, get_internal_url_set, is_xe_based_soup\n'), ((8490, 8558), 'click.echo', 'click.echo', (['f"""Collecting urls for {category} of {main_url} is done."""'], {}), "(f'Collecting urls for {category} of {main_url} is done.')\n", (8500, 8558), False, 'import click\n'), ((9579, 9584), 'utils.now.now', 'now', ([], {}), '()\n', (9582, 9584), False, 'from utils.now import now\n'), ((9799, 9827), 'utils.url_library.normalize_url', 'normalize_url', (['url_from_dict'], {}), '(url_from_dict)\n', (9812, 9827), False, 'from utils.url_library import is_internal_url, assemble_url, validate_url, normalize_url, is_internal_specific_url\n'), ((6031, 6074), 'utils.url_library.is_internal_specific_url', 'is_internal_specific_url', (['url', 'category_url'], {}), '(url, category_url)\n', (6055, 6074), False, 'from utils.url_library import is_internal_url, assemble_url, validate_url, normalize_url, is_internal_specific_url\n'), ((2273, 2296), 'utils.url_library.normalize_url', 'normalize_url', (['main_url'], {}), '(main_url)\n', (2286, 2296), False, 'from utils.url_library import is_internal_url, assemble_url, validate_url, normalize_url, is_internal_specific_url\n'), ((2299, 2317), 'utils.url_library.assemble_url', 'assemble_url', (['href'], {}), '(href)\n', (2311, 2317), False, 'from utils.url_library import is_internal_url, assemble_url, validate_url, normalize_url, is_internal_specific_url\n'), ((2454, 2485), 'utils.url_library.is_internal_url', 'is_internal_url', (['href', 'main_url'], {}), '(href, main_url)\n', (2469, 2485), False, 'from utils.url_library import is_internal_url, assemble_url, validate_url, normalize_url, is_internal_specific_url\n'), ((8763, 8813), 'utils.soup_library.crawl_from_internals', 'crawl_from_internals', (["result['internal']", 'main_url'], {}), "(result['internal'], main_url)\n", (8783, 8813), False, 'from utils.soup_library import crawl_from_internals, get_a_soup_of_difference, get_external_url_set, get_internal_url_set, is_xe_based_soup\n'), ((2769, 2796), 'utils.soup_library.is_xe_based_soup', 'is_xe_based_soup', (['main_soup'], {}), '(main_soup)\n', (2785, 2796), False, 'from utils.soup_library import crawl_from_internals, get_a_soup_of_difference, get_external_url_set, get_internal_url_set, is_xe_based_soup\n'), ((2837, 2851), 'urllib.parse.urlparse', 'urlparse', (['href'], {}), '(href)\n', (2845, 2851), False, 'from urllib.parse import urlparse, parse_qsl, urlencode, urlunparse\n')]
from django.db import models from django.contrib.auth.models import User from ckeditor.fields import RichTextField import uuid #This needs to be shareable class Mumble(models.Model): parent =models.ForeignKey("self", on_delete=models.CASCADE, null=True, blank=True) #For re-mumble (Share) functionality remumble = models.ForeignKey("self", on_delete=models.CASCADE, related_name='remumbles', null=True, blank=True) user = models.ForeignKey(User, on_delete=models.CASCADE) #content is allowed to be plan for remumbles content = RichTextField(null=True, blank=True) image = models.ImageField(blank=True, null=True) vote_rank = models.IntegerField(blank=True, null=True, default=0) comment_count = models.IntegerField(blank=True, null=True, default=0) share_count = models.IntegerField(blank=True, null=True, default=0) created = models.DateTimeField(auto_now_add=True) votes = models.ManyToManyField(User, related_name='mumble_user', blank=True, through='MumbleVote') id = models.UUIDField(default=uuid.uuid4, unique=True, primary_key=True, editable=False) class Meta: ordering = ['-created'] def __str__(self): try: content = self.content[0:80] except: content = 'Remumbled: ' + str(self.remumble.content[0:80]) return content @property def shares(self): queryset = self.remumbles.all() return queryset @property def comments(self): #Still need a way to get all sub elemsnts queryset = self.mumble_set.all() return queryset class MumbleVote(models.Model): CHOICES = ( ('upvote', 'upvote'), ('downvote', 'downvote'), ) user = models.ForeignKey(User, on_delete=models.CASCADE, null=True, blank=True) mumble = models.ForeignKey(Mumble, on_delete=models.CASCADE, null=True, blank=True) value = models.CharField(max_length=20, choices=CHOICES) id = models.UUIDField(default=uuid.uuid4, unique=True, primary_key=True, editable=False) def __str__(self): return str(self.user) + ' ' + str(self.value) + '"' + str(self.mumble) + '"'
[ "django.db.models.ManyToManyField", "django.db.models.UUIDField", "django.db.models.ForeignKey", "django.db.models.CharField", "django.db.models.ImageField", "django.db.models.IntegerField", "ckeditor.fields.RichTextField", "django.db.models.DateTimeField" ]
[((197, 271), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""self"""'], {'on_delete': 'models.CASCADE', 'null': '(True)', 'blank': '(True)'}), "('self', on_delete=models.CASCADE, null=True, blank=True)\n", (214, 271), False, 'from django.db import models\n'), ((328, 433), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""self"""'], {'on_delete': 'models.CASCADE', 'related_name': '"""remumbles"""', 'null': '(True)', 'blank': '(True)'}), "('self', on_delete=models.CASCADE, related_name=\n 'remumbles', null=True, blank=True)\n", (345, 433), False, 'from django.db import models\n'), ((440, 489), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE'}), '(User, on_delete=models.CASCADE)\n', (457, 489), False, 'from django.db import models\n'), ((553, 589), 'ckeditor.fields.RichTextField', 'RichTextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (566, 589), False, 'from ckeditor.fields import RichTextField\n'), ((602, 642), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (619, 642), False, 'from django.db import models\n'), ((659, 712), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)', 'default': '(0)'}), '(blank=True, null=True, default=0)\n', (678, 712), False, 'from django.db import models\n'), ((733, 786), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)', 'default': '(0)'}), '(blank=True, null=True, default=0)\n', (752, 786), False, 'from django.db import models\n'), ((805, 858), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)', 'default': '(0)'}), '(blank=True, null=True, default=0)\n', (824, 858), False, 'from django.db import models\n'), ((873, 912), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (893, 912), False, 'from django.db import models\n'), ((925, 1019), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['User'], {'related_name': '"""mumble_user"""', 'blank': '(True)', 'through': '"""MumbleVote"""'}), "(User, related_name='mumble_user', blank=True,\n through='MumbleVote')\n", (947, 1019), False, 'from django.db import models\n'), ((1025, 1112), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'unique': '(True)', 'primary_key': '(True)', 'editable': '(False)'}), '(default=uuid.uuid4, unique=True, primary_key=True,\n editable=False)\n', (1041, 1112), False, 'from django.db import models\n'), ((1748, 1820), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'null': '(True)', 'blank': '(True)'}), '(User, on_delete=models.CASCADE, null=True, blank=True)\n', (1765, 1820), False, 'from django.db import models\n'), ((1834, 1908), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Mumble'], {'on_delete': 'models.CASCADE', 'null': '(True)', 'blank': '(True)'}), '(Mumble, on_delete=models.CASCADE, null=True, blank=True)\n', (1851, 1908), False, 'from django.db import models\n'), ((1921, 1969), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'choices': 'CHOICES'}), '(max_length=20, choices=CHOICES)\n', (1937, 1969), False, 'from django.db import models\n'), ((1979, 2066), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'unique': '(True)', 'primary_key': '(True)', 'editable': '(False)'}), '(default=uuid.uuid4, unique=True, primary_key=True,\n editable=False)\n', (1995, 2066), False, 'from django.db import models\n')]
import random import sys import heapq from typing import Callable, Iterator, List, Tuple, Any, Optional, TYPE_CHECKING import numpy as np if TYPE_CHECKING: import pandas import pyarrow from ray.data.impl.sort import SortKeyT from ray.data.aggregate import AggregateFn from ray.data.block import ( Block, BlockAccessor, BlockMetadata, T, U, KeyType, AggType, BlockExecStats, KeyFn, ) from ray.data.impl.block_builder import BlockBuilder from ray.data.impl.size_estimator import SizeEstimator class SimpleBlockBuilder(BlockBuilder[T]): def __init__(self): self._items = [] self._size_estimator = SizeEstimator() def add(self, item: T) -> None: self._items.append(item) self._size_estimator.add(item) def add_block(self, block: List[T]) -> None: assert isinstance(block, list), block self._items.extend(block) for item in block: self._size_estimator.add(item) def num_rows(self) -> int: return len(self._items) def build(self) -> Block: return list(self._items) def get_estimated_memory_usage(self) -> int: return self._size_estimator.size_bytes() class SimpleBlockAccessor(BlockAccessor): def __init__(self, items: List[T]): self._items = items def num_rows(self) -> int: return len(self._items) def iter_rows(self) -> Iterator[T]: return iter(self._items) def slice(self, start: int, end: int, copy: bool) -> "SimpleBlockAccessor[T]": view = self._items[start:end] if copy: view = view.copy() return view def random_shuffle(self, random_seed: Optional[int]) -> List[T]: random = np.random.RandomState(random_seed) items = self._items.copy() random.shuffle(items) return items def to_pandas(self) -> "pandas.DataFrame": import pandas return pandas.DataFrame({"value": self._items}) def to_numpy(self, column: str = None) -> np.ndarray: if column: raise ValueError("`column` arg not supported for list block") return np.array(self._items) def to_arrow(self) -> "pyarrow.Table": import pyarrow return pyarrow.Table.from_pandas(self.to_pandas()) def to_block(self) -> List[T]: return self._items def size_bytes(self) -> int: return sys.getsizeof(self._items) def schema(self) -> Any: if self._items: return type(self._items[0]) else: return None def zip(self, other: "Block[T]") -> "Block[T]": if not isinstance(other, list): raise ValueError( "Cannot zip {} with block of type {}".format(type(self), type(other)) ) if len(other) != len(self._items): raise ValueError( "Cannot zip self (length {}) with block of length {}".format( len(self), len(other) ) ) return list(zip(self._items, other)) @staticmethod def builder() -> SimpleBlockBuilder[T]: return SimpleBlockBuilder() def sample(self, n_samples: int = 1, key: "SortKeyT" = None) -> List[T]: if not callable(key) and key is not None: raise NotImplementedError( "Python sort key must be either None or a callable " "function, was: {}".format(key) ) k = min(n_samples, len(self._items)) ret = random.sample(self._items, k) if key is None: return ret return [key(x) for x in ret] def count(self, on: KeyFn) -> Optional[U]: if on is not None and not callable(on): raise ValueError( "on must be a callable or None when aggregating on Simple blocks, but " f"got: {type(on)}." ) if self.num_rows() == 0: return None count = 0 for r in self.iter_rows(): if on is not None: r = on(r) if r is not None: count += 1 return count def _apply_accum( self, init: AggType, accum: Callable[[AggType, T], AggType], on: KeyFn, ignore_nulls: bool, ) -> Optional[U]: """Helper providing null handling around applying an aggregation.""" if on is not None and not callable(on): raise ValueError( "on must be a callable or None when aggregating on Simple blocks, but " f"got: {type(on)}." ) if self.num_rows() == 0: return None has_data = False a = init for r in self.iter_rows(): if on is not None: r = on(r) if r is None: if ignore_nulls: continue else: return None else: has_data = True a = accum(a, r) return a if has_data else None def sum(self, on: KeyFn, ignore_nulls: bool) -> Optional[U]: return self._apply_accum(0, lambda a, r: a + r, on, ignore_nulls) def min(self, on: KeyFn, ignore_nulls: bool) -> Optional[U]: return self._apply_accum(float("inf"), min, on, ignore_nulls) def max(self, on: KeyFn, ignore_nulls: bool) -> Optional[U]: return self._apply_accum(float("-inf"), max, on, ignore_nulls) def mean(self, on: KeyFn, ignore_nulls: bool) -> Optional[U]: return self._apply_accum( [0, 0], lambda a, r: [a[0] + r, a[1] + 1], on, ignore_nulls, ) def std(self, on: KeyFn, ignore_nulls: bool) -> Optional[U]: def accum(a: List[float], r: float) -> List[float]: # Accumulates the current count, the current mean, and the sum of # squared differences from the current mean (M2). M2, mean, count = a count += 1 delta = r - mean mean += delta / count delta2 = r - mean M2 += delta * delta2 return [M2, mean, count] return self._apply_accum([0, 0, 0], accum, on, ignore_nulls) def sum_of_squared_diffs_from_mean( self, on: KeyFn, ignore_nulls: bool, mean: Optional[U] = None, ) -> Optional[U]: if mean is None: # If precomputed mean not given, we compute it ourselves. mean = self.mean(on, ignore_nulls) return self._apply_accum( 0, lambda a, r: a + (r - mean) ** 2, on, ignore_nulls, ) def sort_and_partition( self, boundaries: List[T], key: "SortKeyT", descending: bool ) -> List["Block[T]"]: items = sorted(self._items, key=key, reverse=descending) if len(boundaries) == 0: return [items] # For each boundary value, count the number of items that are less # than it. Since the block is sorted, these counts partition the items # such that boundaries[i] <= x < boundaries[i + 1] for each x in # partition[i]. If `descending` is true, `boundaries` would also be # in descending order and we only need to count the number of items # *greater than* the boundary value instead. key_fn = key if key else lambda x: x comp_fn = ( (lambda x, b: key_fn(x) > b) if descending else (lambda x, b: key_fn(x) < b) ) # noqa E731 # Compute the boundary indices in O(n) time via scan. boundary_indices = [] remaining = boundaries.copy() for i, x in enumerate(items): while remaining and not comp_fn(x, remaining[0]): remaining.pop(0) boundary_indices.append(i) for _ in remaining: boundary_indices.append(len(items)) assert len(boundary_indices) == len(boundaries) ret = [] prev_i = 0 for i in boundary_indices: ret.append(items[prev_i:i]) prev_i = i ret.append(items[prev_i:]) return ret def combine( self, key: KeyFn, aggs: Tuple[AggregateFn] ) -> Block[Tuple[KeyType, AggType]]: """Combine rows with the same key into an accumulator. This assumes the block is already sorted by key in ascending order. Args: key: The key function that returns the key from the row or None for global aggregation. agg: The aggregations to do. Returns: A sorted block of (k, v_1, ..., v_n) tuples where k is the groupby key and v_i is the partially combined accumulator for the ith given aggregation. If key is None then the k element of tuple is omitted. """ if key is not None and not callable(key): raise ValueError( "key must be a callable or None when aggregating on Simple blocks, but " f"got: {type(key)}." ) def iter_groups() -> Iterator[Tuple[KeyType, Block]]: """Creates an iterator over zero-copy group views.""" if key is None: # Global aggregation consists of a single "group", so we short-circuit. yield None, self.to_block() return start = end = 0 iter = self.iter_rows() next_row = None # Use a bool to indicate if next_row is valid # instead of checking if next_row is None # since a row can have None value. has_next_row = False while True: try: if not has_next_row: next_row = next(iter) has_next_row = True next_key = key(next_row) while key(next_row) == next_key: end += 1 try: next_row = next(iter) except StopIteration: has_next_row = False next_row = None break yield next_key, self.slice(start, end, copy=False) start = end except StopIteration: break ret = [] for group_key, group_view in iter_groups(): # Aggregate. accumulators = [agg.init(group_key) for agg in aggs] for i in range(len(aggs)): accumulators[i] = aggs[i].accumulate_block(accumulators[i], group_view) # Build the row. if key is None: ret.append(tuple(accumulators)) else: ret.append((group_key,) + tuple(accumulators)) return ret @staticmethod def merge_sorted_blocks( blocks: List[Block[T]], key: "SortKeyT", descending: bool ) -> Tuple[Block[T], BlockMetadata]: stats = BlockExecStats.builder() ret = [x for block in blocks for x in block] ret.sort(key=key, reverse=descending) return ret, SimpleBlockAccessor(ret).get_metadata( None, exec_stats=stats.build() ) @staticmethod def aggregate_combined_blocks( blocks: List[Block[Tuple[KeyType, AggType]]], key: KeyFn, aggs: Tuple[AggregateFn], ) -> Tuple[Block[Tuple[KeyType, U]], BlockMetadata]: """Aggregate sorted, partially combined blocks with the same key range. This assumes blocks are already sorted by key in ascending order, so we can do merge sort to get all the rows with the same key. Args: blocks: A list of partially combined and sorted blocks. key: The key function that returns the key from the row or None for global aggregation. aggs: The aggregations to do. Returns: A block of (k, v_1, ..., v_n) tuples and its metadata where k is the groupby key and v_i is the corresponding aggregation result for the ith given aggregation. If key is None then the k element of tuple is omitted. """ stats = BlockExecStats.builder() key_fn = (lambda r: r[0]) if key else (lambda r: 0) iter = heapq.merge( *[SimpleBlockAccessor(block).iter_rows() for block in blocks], key=key_fn ) next_row = None ret = [] while True: try: if next_row is None: next_row = next(iter) next_key = key_fn(next_row) def gen(): nonlocal iter nonlocal next_row while key_fn(next_row) == next_key: yield next_row try: next_row = next(iter) except StopIteration: next_row = None break first = True accumulators = [None] * len(aggs) for r in gen(): if first: for i in range(len(aggs)): accumulators[i] = r[i + 1] if key else r[i] first = False else: for i in range(len(aggs)): accumulators[i] = aggs[i].merge( accumulators[i], r[i + 1] if key else r[i] ) if key is None: ret.append( tuple( agg.finalize(accumulator) for agg, accumulator in zip(aggs, accumulators) ) ) else: ret.append( (next_key,) + tuple( agg.finalize(accumulator) for agg, accumulator in zip(aggs, accumulators) ) ) except StopIteration: break return ret, SimpleBlockAccessor(ret).get_metadata( None, exec_stats=stats.build() )
[ "pandas.DataFrame", "random.sample", "random.shuffle", "numpy.random.RandomState", "ray.data.impl.size_estimator.SizeEstimator", "numpy.array", "sys.getsizeof", "ray.data.block.BlockExecStats.builder" ]
[((667, 682), 'ray.data.impl.size_estimator.SizeEstimator', 'SizeEstimator', ([], {}), '()\n', (680, 682), False, 'from ray.data.impl.size_estimator import SizeEstimator\n'), ((1746, 1780), 'numpy.random.RandomState', 'np.random.RandomState', (['random_seed'], {}), '(random_seed)\n', (1767, 1780), True, 'import numpy as np\n'), ((1824, 1845), 'random.shuffle', 'random.shuffle', (['items'], {}), '(items)\n', (1838, 1845), False, 'import random\n'), ((1953, 1993), 'pandas.DataFrame', 'pandas.DataFrame', (["{'value': self._items}"], {}), "({'value': self._items})\n", (1969, 1993), False, 'import pandas\n'), ((2161, 2182), 'numpy.array', 'np.array', (['self._items'], {}), '(self._items)\n', (2169, 2182), True, 'import numpy as np\n'), ((2422, 2448), 'sys.getsizeof', 'sys.getsizeof', (['self._items'], {}), '(self._items)\n', (2435, 2448), False, 'import sys\n'), ((3530, 3559), 'random.sample', 'random.sample', (['self._items', 'k'], {}), '(self._items, k)\n', (3543, 3559), False, 'import random\n'), ((11113, 11137), 'ray.data.block.BlockExecStats.builder', 'BlockExecStats.builder', ([], {}), '()\n', (11135, 11137), False, 'from ray.data.block import Block, BlockAccessor, BlockMetadata, T, U, KeyType, AggType, BlockExecStats, KeyFn\n'), ((12345, 12369), 'ray.data.block.BlockExecStats.builder', 'BlockExecStats.builder', ([], {}), '()\n', (12367, 12369), False, 'from ray.data.block import Block, BlockAccessor, BlockMetadata, T, U, KeyType, AggType, BlockExecStats, KeyFn\n')]
# -*- coding:utf-8 -*- from preprocessing import Tokenizer import random import csv import json import numpy as np import sentencepiece as spm from konlpy.tag import Okt import torch from torch.utils.data import Dataset, DataLoader class BertLMDataset(Dataset): def __init__(self, dataset, tokenizer: Tokenizer, vocab_size=5000): self.tokenizer = tokenizer # 데이터 로딩 with open(dataset, 'r', encoding='utf-8') as f: self.data = json.load(f) # 데이터 전처리 (str to int) for i, d in enumerate(self.data): self.data[i]['content'] = tokenizer.tokens_to_ids(d['content']) # masking을 위한 토큰 클래스 로딩 self.total_tokens = tokenizer.get_tokens(vocab_prefix=f'vocab_{vocab_size}', for_masking=True) def __getitem__(self, item): tokens = self.data[item]['content'] masked_tokens, candi_index, answers = self._masking(tokens) masked_tokens = torch.LongTensor(masked_tokens) mask = np.zeros_like(masked_tokens) mask[candi_index] = 1 # ex) [0, 1, 1, 0, 0, 1, ...] mask = torch.from_numpy(mask).long() sparse_answers = np.zeros_like(masked_tokens) sparse_answers[candi_index] = answers # ex) [0, 32, 5, 0, 0, 12, ...] sparse_answers = torch.from_numpy(sparse_answers).long() return masked_tokens, mask, sparse_answers def _masking(self, tokens): sep_idx = tokens.index(self.tokenizer.token_to_id('[SEP]')) t_tokens = tokens[1:sep_idx] k = int(len(t_tokens) * 0.15) candi_index = list(range(1, len(t_tokens)+1)) # CLS를 제외했기 때문에 +1 random.shuffle(candi_index) candi_index = candi_index[:k] random_token_index = candi_index[:int(k * 0.1)] # 랜덤 마스킹 # correct_token_index = candi_index[int(k * 0.1):int(k * 0.2)] # 정답 마스킹 mask_token_index = candi_index[int(k * 0.2):] # 마스크토큰 마스킹 masked_tokens = np.array(tokens) answers = masked_tokens[candi_index] # MASK에 해당하는 라벨 토큰 for idx in random_token_index: masked_tokens[idx] = self.tokenizer.token_to_id(random.choice(self.total_tokens)) masked_tokens[mask_token_index] = self.tokenizer.token_to_id('[MASK]') return masked_tokens, candi_index, answers def __len__(self): return len(self.data) class BertClsDataset(Dataset): def __init__(self, dataset, tokenizer: Tokenizer, max_num_seq=20, inference=False, vocab_size=5000, is_train=True): self.max_num_seq = max_num_seq self.inference = inference self.is_train = is_train self.tokenizer = tokenizer self.total_tokens = tokenizer.get_tokens(vocab_prefix=f'vocab_{vocab_size}', for_masking=True) # 데이터 로딩 with open(dataset, 'r', encoding='utf-8') as f: self.data = json.load(f) # 데이터 전처리 (str to int) for i, d in enumerate(self.data): doc = d['content'] n_doc = [] for sub_doc in doc: n_doc.append(self.tokenizer.tokens_to_ids(sub_doc)) # n_doc.append(list(map(self.tokenizer.PieceToId, sub_doc.split()))) self.data[i]['content'] = n_doc def __getitem__(self, item): doc = self.data[item]['content'] if not self.inference and len(doc) > self.max_num_seq: # 문장 수가 많으면 일부 문장만 선택 sp = random.choice(list(range(len(doc) - self.max_num_seq))) doc = doc[sp:sp + self.max_num_seq] if self.is_train: for i, sub_doc in enumerate(doc): ## doc[i] = self._masking(sub_doc, mask_rate=0.3) doc = torch.LongTensor(doc) label = self.data[item]['label'] return doc, label def _masking(self, tokens, mask_rate=0.1): sep_idx = list(tokens).index(self.tokenizer.token_to_id('[SEP]')) t_tokens = tokens[1:sep_idx] k = int(len(t_tokens) * mask_rate) candi_index = list(range(1, len(t_tokens)+1)) # CLS를 제외했기 때문에 +1 random.shuffle(candi_index) candi_index = candi_index[:k] random_token_index = candi_index[:int(k * 0.2)] # 랜덤 마스킹 mask_token_index = candi_index[int(k * 0.8):] # UNK 마스킹 masked_tokens = np.array(tokens) for idx in random_token_index: masked_tokens[idx] = self.tokenizer.token_to_id(random.choice(self.total_tokens)) masked_tokens[mask_token_index] = self.tokenizer.token_to_id('[UNK]') return masked_tokens def __len__(self): return len(self.data) if __name__ == '__main__': dataset = BertClsDataset('bertcls_val_v5000_t128.json') data_loader = DataLoader(dataset, batch_size=1, shuffle=False) for i, (doc, label) in enumerate(data_loader): print(doc.shape) print(doc) print(label) if i > 0: break
[ "numpy.zeros_like", "json.load", "torch.utils.data.DataLoader", "torch.LongTensor", "random.shuffle", "random.choice", "numpy.array", "torch.from_numpy" ]
[((4787, 4835), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': '(1)', 'shuffle': '(False)'}), '(dataset, batch_size=1, shuffle=False)\n', (4797, 4835), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((974, 1005), 'torch.LongTensor', 'torch.LongTensor', (['masked_tokens'], {}), '(masked_tokens)\n', (990, 1005), False, 'import torch\n'), ((1024, 1052), 'numpy.zeros_like', 'np.zeros_like', (['masked_tokens'], {}), '(masked_tokens)\n', (1037, 1052), True, 'import numpy as np\n'), ((1189, 1217), 'numpy.zeros_like', 'np.zeros_like', (['masked_tokens'], {}), '(masked_tokens)\n', (1202, 1217), True, 'import numpy as np\n'), ((1685, 1712), 'random.shuffle', 'random.shuffle', (['candi_index'], {}), '(candi_index)\n', (1699, 1712), False, 'import random\n'), ((1998, 2014), 'numpy.array', 'np.array', (['tokens'], {}), '(tokens)\n', (2006, 2014), True, 'import numpy as np\n'), ((3740, 3761), 'torch.LongTensor', 'torch.LongTensor', (['doc'], {}), '(doc)\n', (3756, 3761), False, 'import torch\n'), ((4126, 4153), 'random.shuffle', 'random.shuffle', (['candi_index'], {}), '(candi_index)\n', (4140, 4153), False, 'import random\n'), ((4355, 4371), 'numpy.array', 'np.array', (['tokens'], {}), '(tokens)\n', (4363, 4371), True, 'import numpy as np\n'), ((493, 505), 'json.load', 'json.load', (['f'], {}), '(f)\n', (502, 505), False, 'import json\n'), ((2916, 2928), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2925, 2928), False, 'import json\n'), ((1131, 1153), 'torch.from_numpy', 'torch.from_numpy', (['mask'], {}), '(mask)\n', (1147, 1153), False, 'import torch\n'), ((1324, 1356), 'torch.from_numpy', 'torch.from_numpy', (['sparse_answers'], {}), '(sparse_answers)\n', (1340, 1356), False, 'import torch\n'), ((2182, 2214), 'random.choice', 'random.choice', (['self.total_tokens'], {}), '(self.total_tokens)\n', (2195, 2214), False, 'import random\n'), ((4473, 4505), 'random.choice', 'random.choice', (['self.total_tokens'], {}), '(self.total_tokens)\n', (4486, 4505), False, 'import random\n')]
# 자작 문제풀이 # Question number. 004 # Author: <NAME> # Github name: zao95 # ========== Question ========== # 철수는 현재 아래 지도에서 좌상단에 위치하고 있다. # 철수는 지금부터 우하단에 위치한 집을 향해 가는데, # 최단거리로 가는 전제하에 가는 길의 숫자와 연산자로 계산하면서 간다면, # 최소값과 최대값은 얼마이고, 그 길은 어떤 길인가? # route_map = [ # ['1', '-', '2', '-', '5', ], # ['-', '4', '*', '8', '*', ], # ['6', '+', '3', '-', '2', ], # ['*', '4', '*', '8', '+', ], # ['9', '+', '5', '+', '8', ], # ] # ============================== # import import random from modules import mq004_module route_map = [ [1, '-', 2, '-', 5, ], ['+', 4, '*', 8, '*', ], [6, '+', 3, '-', 2, ], ['*', 4, '*', 8, '+', ], [9, '+', 5, '+', 8, ], ] move_sequence = [[True, True, True, True], [False, False, False, False]] move_count = 0 res = [] if __name__ == '__main__': for i in range(0, 5, 1): for j in range(0, len(move_sequence[0])): for k in range(0, len(move_sequence[1])): res.append(mq004_module.move(route_map, mq004_module.route_change(move_sequence, move_count, j, k))) print("최소값 : " + str(min(res))) print("최대값 : " + str(max(res))) move_count += 1 # ===== 참조 ===== # # 경우의 수 # # 팩토리얼에서 *이 +가 된 형태 # a = ( # (1)+ # (2+1)+(1)+ # (3+2+1)+(2+1)+(1)+ # (4+3+2+1)+(3+2+1)+(2+1)+1+ # ((4+3+2+1)+(3+2+1)+(2+1)+1)+((3+2+1)+(2+1)+1)+((2+1)+1)+1 # ) # print(a) # 1 1 # 5 4 # 15 10 # 35 20 # 70 35
[ "modules.mq004_module.route_change" ]
[((997, 1055), 'modules.mq004_module.route_change', 'mq004_module.route_change', (['move_sequence', 'move_count', 'j', 'k'], {}), '(move_sequence, move_count, j, k)\n', (1022, 1055), False, 'from modules import mq004_module\n')]
from collections import defaultdict from typing import Dict, List, Type from pydantic import BaseModel from typing_extensions import Literal from ...models import DOCUMENT_CLASSIFICATION, SEQ2SEQ, SEQUENCE_LABELING from . import examples encodings = Literal[ 'Auto', 'ascii', 'big5', 'big5hkscs', 'cp037', 'cp273', 'cp424', 'cp437', 'cp500', 'cp720', 'cp737', 'cp775', 'cp850', 'cp852', 'cp855', 'cp856', 'cp857', 'cp858', 'cp860', 'cp861', 'cp862', 'cp863', 'cp864', 'cp865', 'cp866', 'cp869', 'cp874', 'cp875', 'cp932', 'cp949', 'cp950', 'cp1006', 'cp1026', 'cp1125', 'cp1140', 'cp1250', 'cp1251', 'cp1252', 'cp1253', 'cp1254', 'cp1255', 'cp1256', 'cp1257', 'cp1258', 'cp65001', 'euc_jp', 'euc_jis_2004', 'euc_jisx0213', 'euc_kr', 'gb2312', 'gbk', 'gb18030', 'hz', 'iso2022_jp', 'iso2022_jp_1', 'iso2022_jp_2', 'iso2022_jp_2004', 'iso2022_jp_3', 'iso2022_jp_ext', 'iso2022_kr', 'latin_1', 'iso8859_2', 'iso8859_3', 'iso8859_4', 'iso8859_5', 'iso8859_6', 'iso8859_7', 'iso8859_8', 'iso8859_9', 'iso8859_10', 'iso8859_11', 'iso8859_13', 'iso8859_14', 'iso8859_15', 'iso8859_16', 'johab', 'koi8_r', 'koi8_t', 'koi8_u', 'kz1048', 'mac_cyrillic', 'mac_greek', 'mac_iceland', 'mac_latin2', 'mac_roman', 'mac_turkish', 'ptcp154', 'shift_jis', 'shift_jis_2004', 'shift_jisx0213', 'utf_32', 'utf_32_be', 'utf_32_le', 'utf_16', 'utf_16_be', 'utf_16_le', 'utf_7', 'utf_8', 'utf_8_sig' ] class Format: name = '' accept_types = '' @classmethod def dict(cls): return { 'name': cls.name, 'accept_types': cls.accept_types } class CSV(Format): name = 'CSV' accept_types = 'text/csv' class FastText(Format): name = 'fastText' accept_types = 'text/plain' class JSON(Format): name = 'JSON' accept_types = 'application/json' class JSONL(Format): name = 'JSONL' accept_types = '*' class Excel(Format): name = 'Excel' accept_types = 'application/vnd.ms-excel, application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' class TextFile(Format): name = 'TextFile' accept_types = 'text/*' class TextLine(Format): name = 'TextLine' accept_types = 'text/*' class CoNLL(Format): name = 'CoNLL' accept_types = 'text/*' class OptionColumn(BaseModel): encoding: encodings = 'utf_8' column_data: str = 'text' column_label: str = 'label' class OptionDelimiter(OptionColumn): encoding: encodings = 'utf_8' delimiter: Literal[',', '\t', ';', '|', ' '] = ',' class OptionNone(BaseModel): encoding: encodings = 'utf_8' class OptionCoNLL(BaseModel): encoding: encodings = 'utf_8' scheme: Literal['IOB2', 'IOE2', 'IOBES', 'BILOU'] = 'IOB2' delimiter: Literal[' ', ''] = ' ' class Options: options: Dict[str, List] = defaultdict(list) @classmethod def filter_by_task(cls, task_name: str): options = cls.options[task_name] return [ { **format.dict(), **option.schema(), 'example': example } for format, option, example in options ] @classmethod def register(cls, task: str, format: Type[Format], option: Type[BaseModel], example: str): cls.options[task].append((format, option, example)) # Text Classification Options.register(DOCUMENT_CLASSIFICATION, TextFile, OptionNone, examples.Generic_TextFile) Options.register(DOCUMENT_CLASSIFICATION, TextLine, OptionNone, examples.Generic_TextLine) Options.register(DOCUMENT_CLASSIFICATION, CSV, OptionDelimiter, examples.Category_CSV) Options.register(DOCUMENT_CLASSIFICATION, FastText, OptionNone, examples.Category_fastText) Options.register(DOCUMENT_CLASSIFICATION, JSON, OptionColumn, examples.Category_JSON) Options.register(DOCUMENT_CLASSIFICATION, JSONL, OptionColumn, examples.Category_JSONL) Options.register(DOCUMENT_CLASSIFICATION, Excel, OptionColumn, examples.Category_CSV) # Sequence Labeling Options.register(SEQUENCE_LABELING, TextFile, OptionNone, examples.Generic_TextFile) Options.register(SEQUENCE_LABELING, TextLine, OptionNone, examples.Generic_TextLine) Options.register(SEQUENCE_LABELING, JSONL, OptionColumn, examples.Offset_JSONL) Options.register(SEQUENCE_LABELING, CoNLL, OptionCoNLL, examples.Offset_CoNLL) # Sequence to sequence Options.register(SEQ2SEQ, TextFile, OptionNone, examples.Generic_TextFile) Options.register(SEQ2SEQ, TextLine, OptionNone, examples.Generic_TextLine) Options.register(SEQ2SEQ, CSV, OptionDelimiter, examples.Text_CSV) Options.register(SEQ2SEQ, JSON, OptionColumn, examples.Text_JSON) Options.register(SEQ2SEQ, JSONL, OptionColumn, examples.Text_JSONL) Options.register(SEQ2SEQ, Excel, OptionColumn, examples.Text_CSV)
[ "collections.defaultdict" ]
[((3173, 3190), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (3184, 3190), False, 'from collections import defaultdict\n')]
from fastapi import FastAPI, Request, HTTPException from fastapi.staticfiles import StaticFiles from fastapi.responses import HTMLResponse from bson import ObjectId from datetime import datetime as dt from .router import router as api_router from .router import templates app = FastAPI() app.mount("/static", StaticFiles(directory="static"), name="static") app.include_router(api_router, tags=["articles"], prefix="/api") @app.get("/", response_class=HTMLResponse) async def get_homepage(request: Request): articles = [] for a in ( await request.app.mongodb["articles"] .find( {"category": "News", "source_short": {"$in": ["nyt", "wsj", "ft", "bbc"]}} ) .sort("_id", -1) # newest-scraped articles .to_list(length=60) ): articles.append(a) articles = sorted( articles, key=lambda d: ( dt.strptime(d["pubdate"], "%Y-%m-%dT%H:%M:%S").date(), 1 / d["pagerank"], ), reverse=True, ) return templates.TemplateResponse( "home.html", {"request": request, "articles": articles} ) # async def get_landing(request: Request): # articles = [] # for a in ( # await request.app.mongodb["articles"] # .find() # .sort("pubdate", -1) # .to_list(length=40) # ): # articles.append(a) # return templates.TemplateResponse( # "home.html", {"request": request, "articles": articles} # ) @app.get("/latest", response_class=HTMLResponse) async def get_homepage(request: Request): articles = [] for a in ( await request.app.mongodb["articles"] .find({"category": "News"}) .sort("pubdate", -1) .to_list(length=100) ): articles.append(a) return templates.TemplateResponse( "home.html", {"request": request, "articles": articles} ) @app.get("/pubs/{pub}", response_class=HTMLResponse) async def get_pub_home(pub: str, request: Request): articles = [] for a in ( await request.app.mongodb["articles"] .find({"source_short": pub}) .sort("pagerank") .to_list(length=100) ): articles.append(a) articles = sorted( articles, key=lambda d: ( dt.strptime(d["pubdate"], "%Y-%m-%dT%H:%M:%S").date(), 1 / d["pagerank"], ), reverse=True, ) return templates.TemplateResponse( "home.html", {"request": request, "articles": articles} ) @app.get("/article/{id}", response_class=HTMLResponse) async def get_article(id: str, request: Request): if ( article := await request.app.mongodb["articles"].find_one({"_id": ObjectId(id)}) ) is not None: return templates.TemplateResponse( "article.html", {"request": request, "a": article} ) raise HTTPException(status_code=404, detail=f"Article {id} not found") @app.get("/{cat}", response_class=HTMLResponse) async def get_pub_home(cat: str, request: Request): articles = [] for a in ( await request.app.mongodb["articles"] .find({"category": str.capitalize(cat)}) .sort("pubdate", -1) .to_list(length=100) ): articles.append(a) articles = sorted( articles, key=lambda d: ( dt.strptime(d["pubdate"], "%Y-%m-%dT%H:%M:%S").date(), 1 / d["pagerank"], ), reverse=True, ) return templates.TemplateResponse( "home.html", {"request": request, "articles": articles} )
[ "fastapi.staticfiles.StaticFiles", "fastapi.HTTPException", "datetime.datetime.strptime", "bson.ObjectId", "fastapi.FastAPI" ]
[((280, 289), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (287, 289), False, 'from fastapi import FastAPI, Request, HTTPException\n'), ((311, 342), 'fastapi.staticfiles.StaticFiles', 'StaticFiles', ([], {'directory': '"""static"""'}), "(directory='static')\n", (322, 342), False, 'from fastapi.staticfiles import StaticFiles\n'), ((2879, 2943), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': 'f"""Article {id} not found"""'}), "(status_code=404, detail=f'Article {id} not found')\n", (2892, 2943), False, 'from fastapi import FastAPI, Request, HTTPException\n'), ((2717, 2729), 'bson.ObjectId', 'ObjectId', (['id'], {}), '(id)\n', (2725, 2729), False, 'from bson import ObjectId\n'), ((894, 940), 'datetime.datetime.strptime', 'dt.strptime', (["d['pubdate']", '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(d['pubdate'], '%Y-%m-%dT%H:%M:%S')\n", (905, 940), True, 'from datetime import datetime as dt\n'), ((2292, 2338), 'datetime.datetime.strptime', 'dt.strptime', (["d['pubdate']", '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(d['pubdate'], '%Y-%m-%dT%H:%M:%S')\n", (2303, 2338), True, 'from datetime import datetime as dt\n'), ((3344, 3390), 'datetime.datetime.strptime', 'dt.strptime', (["d['pubdate']", '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(d['pubdate'], '%Y-%m-%dT%H:%M:%S')\n", (3355, 3390), True, 'from datetime import datetime as dt\n')]
#! /usr/bin/env python """ This tool combines results from the dna_pipeline.py and/or rna_pipeline.py to create an unified table with all the variants (filtered) and their epitopes (for each effect). The table contains useful information for post-analysis. @author: <NAME> <<EMAIL>> """ import statistics from argparse import ArgumentParser, RawDescriptionHelpFormatter from collections import defaultdict from scipy import stats import os import sys from hlapipeline.variants import * import pandas as pd def main(dna_variants, dna_names, rna_variants, rna_names, rna_counts, cDNA_DICT, AA_DICT, tumor_coverage, tumor_var_depth, tumor_var_freq, normal_var_freq, normal_coverage, t2n_ratio, num_callers, num_callers_indel, tumor_coverage_rna, tumor_var_depth_rna, tumor_var_freq_rna, num_callers_rna, ensembl_version): if not dna_variants and not rna_variants: sys.stderr.write("Error, no variants given as input (DNA or RNA).\n") sys.exit(1) # TODO add sanity check for parameters AA_seq_dict = dict() with open(AA_DICT, "r") as handle: for line in handle.readlines(): tokens = line.split(":") AA_seq_dict[tokens[0]] = tokens[1].strip() cDNA_seq_dict = dict() with open(cDNA_DICT, "r") as handle: for line in handle.readlines(): tokens = line.split(":") cDNA_seq_dict[tokens[0]] = tokens[1].strip() variant_dict = defaultdict(list) if dna_variants and len(dna_variants) > 0 and len(dna_variants) == len(dna_names): print('Loading DNA variants..') for file, name in zip(dna_variants, dna_names): variants = filter_variants_dna(file, normal_coverage, tumor_coverage, tumor_var_depth, tumor_var_freq, normal_var_freq, t2n_ratio, num_callers, num_callers_indel, ensembl_version, cDNA_seq_dict, AA_seq_dict) for variant in variants: variant_dict[variant.key].append((variant, name)) if rna_variants and len(rna_variants) > 0 and len(rna_variants) == len(rna_names): print('Loading RNA variants..') for file, name in zip(rna_variants, rna_names): variants = filter_variants_rna(file, tumor_coverage_rna, tumor_var_depth_rna, tumor_var_freq_rna, num_callers_rna, ensembl_version, cDNA_seq_dict, AA_seq_dict) for variant in variants: variant_dict[variant.key].append((variant, name)) # TODO this could be done more elegantly and efficiently counts_dict = defaultdict(lambda: defaultdict(float)) counts_stats = defaultdict(float) counts_stats_percentile = defaultdict(lambda: defaultdict(float)) if rna_counts and len(rna_counts) > 0 and len(rna_counts) == len(rna_names): print('Loading Gene counts..') for file, name in zip(rna_counts, rna_names): counts_table = pd.read_csv(file, sep='\t', skiprows=1) counts = counts_table.iloc[:, 6].to_numpy() lengths = counts_table['Length'].to_numpy() rpb = counts / lengths counts_table['RPKM'] = (rpb / sum(counts)) * 1e9 counts_table['TPM'] = (rpb / sum(rpb)) * 1e6 gene_counts_tpm = counts_table.iloc[:, [0, 6, 8]].values.tolist() tpm_filtered = list(filter(lambda x: x != 0, counts_table['TPM'])) for gene, expr, tpm in gene_counts_tpm: counts_dict[name][gene] = float(expr) counts_stats_percentile[name][gene] = np.around(stats.percentileofscore(tpm_filtered, float(tpm), kind='strict'), 3) counts_stats[name] = np.around(np.mean(counts), 3) counts_table['Percentile_TPM'] = counts_stats_percentile[name].values() counts_table.to_csv(file + '.final', sep='\t', index=False) print('Creating merged variants..') header_final = 'Variant key\tDBsnp ID\tGnomad MAF\tCosmic ID\tDNA samples (passing)\tNumber of DNA samples (passing)\t' \ 'DNA samples (failing)\tNumber of DNA samples (failing)\t' \ 'RNA samples (passing)\tNumber of RNA samples (passing)\t' \ 'RNA samples (failing)\tNumber of RNA samples (failing)\tEffects\t' \ 'cDNA change\tAA change\tEpitope creation flags\tWt Epitope\t' \ 'Mut Epitope\tTranscripts\tDNA Callers Sample(Name:NDP;NAD;NVAF;TDP;TAD;TVAF)\t' \ 'RNA Callers Sample(Name:TDP;TAD;TVAF)\tGeneCount info Sample(gene;exp;mean;percentile)\n' final_file = open('overlap_final.txt', 'w') final_file.write(header_final) final_file_rna = open('overlap_final_rna_unique.txt', 'w') final_file_rna.write(header_final) final_file_discarded = open('overlap_final_discarded.txt', 'w') final_file_discarded.write(header_final) final_file_discarded_rna = open('overlap_final_rna_unique_discarded.txt', 'w') final_file_discarded_rna.write(header_final) for key, value in variant_dict.items(): # key = variant key # value = list of (Variant, sample_name) tuples rna_name_pass = set([name for variant, name in value if variant.type == 'rna' and variant.status]) rna_name_fail = set([name for variant, name in value if variant.type == 'rna' and not variant.status]) rna_callers = ';'.join( set(['{}:({})'.format(name, variant.callers) for variant, name in value if variant.type == 'rna'])) dna_name_pass = set([name for variant, name in value if variant.type == 'dna' and variant.status]) dna_name_fail = set([name for variant, name in value if variant.type == 'dna' and not variant.status]) dna_callers = ';'.join( set(['{}:({})'.format(name, variant.callers) for variant, name in value if variant.type == 'dna'])) num_rna_pass = len(rna_name_pass) num_rna_fail = len(rna_name_fail) num_dna_pass = len(dna_name_pass) num_dna_fail = len(dna_name_fail) # These are the same for all the variants in the same position dbsnp = value[0][0].dbsnp gnomad = value[0][0].gnomad cosmic = value[0][0].cosmic gene = value[0][0].gene # Check that the gene is the correct one for the variant # Create a dictionary of epitopes so to keep unique ones (different mut peptide) epitopes_dict = defaultdict(list) for variant, _ in value: for e in variant.epitopes: epitopes_dict[e.mutseq].append(e) # Iterate epitopes in the variant and write info to output for _, epitopes in epitopes_dict.items(): # all epitopes share the mutated peptide so we can just take the first one epitope = epitopes[0] gene_locus = [] # Get gene exp. if any if gene is not None and len(counts_dict) > 0: for name, gene_counts in counts_dict.items(): try: gene_count = gene_counts[gene] gene_mean = counts_stats[name] gene_percentile = counts_stats_percentile[name][gene] gene_locus.append('{}:({})'.format(name, ';'.join([gene, str(gene_count), str(gene_mean), str(gene_percentile)]))) except KeyError: gene_locus.append("{}:-".format(name)) else: gene_locus = ["-"] effect = ';'.join(set(['{}_{}_{}'.format(e.func, e.gene, e.transcript) for e in epitopes])) transcripts = ';'.join(set([e.transcript for e in epitopes])) to_write = '\t'.join(str(x) for x in [key, dbsnp, gnomad, cosmic, ';'.join(dna_name_pass), num_dna_pass, ';'.join(dna_name_fail), num_dna_fail, ';'.join(rna_name_pass), num_rna_pass, ';'.join(rna_name_fail), num_rna_fail, effect, epitope.dnamut, epitope.aamut, epitope.flags, epitope.wtseq, epitope.mutseq, transcripts, dna_callers, rna_callers, ';'.join(gene_locus)]) if num_dna_pass >= 1: final_file.write(to_write + '\n') elif num_dna_fail >= 1: final_file_discarded.write(to_write + '\n') elif num_rna_pass >= 1: final_file_rna.write(to_write + '\n') else: final_file_discarded_rna.write(to_write + '\n') final_file.close() final_file_rna.close() final_file_discarded.close() final_file_discarded_rna.close() if __name__ == '__main__': parser = ArgumentParser(description=__doc__, formatter_class=RawDescriptionHelpFormatter) parser.add_argument('--dna', nargs='+', default=None, required=False, help='List of annotated vcf files with the variants obtained with the DNA pipeline') parser.add_argument('--dna-names', nargs='+', default=None, required=False, help='List of names for each DNA sample/file (to include in the report)') parser.add_argument('--rna', nargs='+', default=None, required=False, help='List of annotated vcf files with the variants obtained with the RNA pipeline') parser.add_argument('--rna-names', nargs='+', default=None, required=False, help='List of names for each RNA sample/file (to include in the report)') parser.add_argument('--rna-counts', nargs='+', default=None, required=False, help='List of gene counts files obtained with the RNA pipeline') parser.add_argument('--dictAA', help='Path to a dictionary of transcript IDs to peptide sequences', required=True) parser.add_argument('--dictcDNA', help='Path to a dictionary of transcript IDs to DNA sequences', required=True) parser.add_argument('--filter-dna-tumor-cov', type=int, default=10, required=False, dest='tumor_coverage', help='Filter for DNA variants tumor number of reads (coverage) (DP). Default=10') parser.add_argument('--filter-dna-tumor-depth', type=int, default=4, required=False, dest='tumor_var_depth', help='Filter for DNA variants tumor number of allelic reads (AD). Default=4') parser.add_argument('--filter-dna-tumor-vaf', type=float, default=7, required=False, dest='tumor_var_freq', help='Filter for DNA variants tumor variant allele frequency (VAF) (minimum value). Default=7') parser.add_argument('--filter-dna-normal-vaf', type=float, default=100, required=False, dest='normal_var_freq', help='Filter for DNA variants normal variant allele frequency (VAF) (maximum value). Default=100') parser.add_argument('--filter-dna-normal-cov', type=int, default=10, required=False, dest='normal_coverage', help='Filter for DNA variants normal number of reads (coverage) (DP) (minimum value). Default=10') parser.add_argument('--filter-dna-tn-ratio', type=int, default=5, required=False, dest='t2n_ratio', help='Filter for DNA variants tumor-normal VAF ratio. Default=5') parser.add_argument('--filter-dna-snv-callers', type=int, default=2, required=False, choices=[1, 2, 3, 4], dest='num_callers', help='Filter for DNA SNPs variants number of callers required. Default=2') parser.add_argument('--filter-dna-indel-callers', type=int, default=1, required=False, choices=[1, 2], dest='num_callers_indel', help='Filter for DNA indels variants number of callers required. Default=1') parser.add_argument('--filter-rna-tumor-cov', type=int, default=10, required=False, dest='tumor_coverage_rna', help='Filter for RNA variants tumor number of reads (coverage) (minimum value) (DP). Default=10') parser.add_argument('--filter-rna-tumor-depth', type=int, default=5, required=False, dest='tumor_var_depth_rna', help='Filter for RNA variants tumor number of allelic reads (AD) (minimum value). Default=5') parser.add_argument('--filter-rna-tumor-vaf', type=float, default=3, required=False, dest='tumor_var_freq_rna', help='Filter for RNA variants tumor variant allele frequency (VAF) (minimum value). Default=3') parser.add_argument('--filter-rna-callers', type=int, default=2, required=False, choices=[1, 2], dest='num_callers_rna', help='Filter for RNA variants number of callers required. Default=2') parser.add_argument('--ensembl-version', type=str, required=True, help='Ensembl version number that was used to annotate the variants with VEP') args = parser.parse_args() main(args.dna, args.dna_names, args.rna, args.rna_names, args.rna_counts, os.path.abspath(args.dictcDNA), os.path.abspath(args.dictAA), args.tumor_coverage, args.tumor_var_depth, args.tumor_var_freq, args.normal_var_freq, args.normal_coverage, args.t2n_ratio, args.num_callers, args.num_callers_indel, args.tumor_coverage_rna, args.tumor_var_depth_rna, args.tumor_var_freq_rna, args.num_callers_rna, args.ensembl_version)
[ "os.path.abspath", "argparse.ArgumentParser", "pandas.read_csv", "collections.defaultdict", "sys.stderr.write", "sys.exit" ]
[((1602, 1619), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1613, 1619), False, 'from collections import defaultdict\n'), ((3499, 3517), 'collections.defaultdict', 'defaultdict', (['float'], {}), '(float)\n', (3510, 3517), False, 'from collections import defaultdict\n'), ((10021, 10106), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '__doc__', 'formatter_class': 'RawDescriptionHelpFormatter'}), '(description=__doc__, formatter_class=RawDescriptionHelpFormatter\n )\n', (10035, 10106), False, 'from argparse import ArgumentParser, RawDescriptionHelpFormatter\n'), ((1049, 1118), 'sys.stderr.write', 'sys.stderr.write', (['"""Error, no variants given as input (DNA or RNA).\n"""'], {}), "('Error, no variants given as input (DNA or RNA).\\n')\n", (1065, 1118), False, 'import sys\n'), ((1127, 1138), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1135, 1138), False, 'import sys\n'), ((7264, 7281), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (7275, 7281), False, 'from collections import defaultdict\n'), ((14474, 14504), 'os.path.abspath', 'os.path.abspath', (['args.dictcDNA'], {}), '(args.dictcDNA)\n', (14489, 14504), False, 'import os\n'), ((14515, 14543), 'os.path.abspath', 'os.path.abspath', (['args.dictAA'], {}), '(args.dictAA)\n', (14530, 14543), False, 'import os\n'), ((3460, 3478), 'collections.defaultdict', 'defaultdict', (['float'], {}), '(float)\n', (3471, 3478), False, 'from collections import defaultdict\n'), ((3568, 3586), 'collections.defaultdict', 'defaultdict', (['float'], {}), '(float)\n', (3579, 3586), False, 'from collections import defaultdict\n'), ((3789, 3828), 'pandas.read_csv', 'pd.read_csv', (['file'], {'sep': '"""\t"""', 'skiprows': '(1)'}), "(file, sep='\\t', skiprows=1)\n", (3800, 3828), True, 'import pandas as pd\n')]
import pytest from scipy.sparse import csr_matrix from FasterSpMV.cuda_spmv import * from FasterSpMV.matrix_tools import * def test_spmv(): # define matrix parameters n_row = n_col = 10 slice_height = 2 # generate a sparse matrix fill with random value sp_matrix, nnz_count, row_max_nnz = random_spmatrix(n_row, n_col, 10) # convert sparse matrix to CSR format csr_rowptr, csr_colidx, csr_val = spmatrix_to_csr(sp_matrix) # convert CSR to Sliced ELLPACK format slice_count, sell_colidx, sell_sliceptr, _, sell_val = \ csr_to_sell(n_row, csr_rowptr, csr_colidx, csr_val, slice_height) # generate a random vector rand = np.random.RandomState(0) x = rand.randn(n_col).astype(np.float32) # get exact y sp_A = csr_matrix((csr_val, csr_colidx, csr_rowptr), shape=(n_row, n_col)) y_exact = sp_A.dot(x) # SciPy SpMV # CSR test nblocks = (n_row,) # global blocks nthreads = (1,) # threads per block, better be a multiple of 32 # CUDA buffer bf_csr_rowptr = cuda.to_device(csr_rowptr) bf_csr_colidx = cuda.to_device(csr_colidx) bf_csr_val = cuda.to_device(csr_val) bf_x = cuda.to_device(x) bf_csr_y = cuda.device_array(n_row, dtype=np.float32) cuda_csr_spmv[nblocks, nthreads](bf_csr_rowptr, bf_csr_colidx, bf_csr_val, bf_x, bf_csr_y) csr_y = bf_csr_y.copy_to_host() # Sliced ELLPACK test nblocks = (slice_count,) # global blocks nthreads = (slice_height,) # threads per block, better be a multiple of 32 # CUDA buffer bf_sell_y = cuda.device_array(slice_height * slice_count, dtype=np.float32) bf_sell_sliceptr = cuda.to_device(sell_sliceptr) bf_sell_colidx = cuda.to_device(sell_colidx) bf_sell_val = cuda.to_device(sell_val) bf_x = cuda.to_device(x) cuda_sell_spmv[nblocks, nthreads](bf_sell_sliceptr, bf_sell_colidx, bf_sell_val, bf_x, slice_height, bf_sell_y) sell_y = bf_sell_y.copy_to_host() # check the result assert y_exact == pytest.approx(csr_y, rel=1e-6, abs=1e-12) assert y_exact == pytest.approx(sell_y, rel=1e-6, abs=1e-12)
[ "scipy.sparse.csr_matrix", "pytest.approx" ]
[((778, 845), 'scipy.sparse.csr_matrix', 'csr_matrix', (['(csr_val, csr_colidx, csr_rowptr)'], {'shape': '(n_row, n_col)'}), '((csr_val, csr_colidx, csr_rowptr), shape=(n_row, n_col))\n', (788, 845), False, 'from scipy.sparse import csr_matrix\n'), ((2200, 2242), 'pytest.approx', 'pytest.approx', (['csr_y'], {'rel': '(1e-06)', 'abs': '(1e-12)'}), '(csr_y, rel=1e-06, abs=1e-12)\n', (2213, 2242), False, 'import pytest\n'), ((2264, 2307), 'pytest.approx', 'pytest.approx', (['sell_y'], {'rel': '(1e-06)', 'abs': '(1e-12)'}), '(sell_y, rel=1e-06, abs=1e-12)\n', (2277, 2307), False, 'import pytest\n')]
import configparser import psycopg2 from sql_queries import create_table_queries, drop_table_queries#, copy_table_queries, insert_table_queries def get_queryName(query, searchTermStart='from', searchTermEnd=' ', toLower=True): ''' Retrieve table name being processed Parameters ---------- query : str SQL like query text for database operation. searchTermStart : str Search term which is before the 'to be extracted' string (e.g. "FROM" mytable ... ) searchTermEnd : str Search term which is afterwards the 'to be extracted' string (e.g. FROM mytable "..." ) toLower : bool Convert all strings to lower cases. Return ------ query_part : str The extracted string value (e.g. table name). ''' if toLower: query = query.lower() searchTermStart = searchTermStart.lower() searchTermEnd = searchTermEnd.lower() if searchTermStart in query: query_part = query[query.find(searchTermStart)+len(searchTermStart)+1:] if searchTermEnd in query_part: if query_part.find(searchTermEnd) > 0: query_part = query_part[0:query_part.find(searchTermEnd)] else: query_part = 'unkown' return query_part def drop_tables(cur, conn): ''' Drops each table using the queries in `drop_table_queries` list. ''' for query in drop_table_queries: try: query_objectName = get_queryName(query, searchTermStart='DROP TABLE IF EXISTS', searchTermEnd=' ') print('executing TABLE DROP command for query: {}'.format(query_objectName)) cur.execute(query) conn.commit() except Exception as error: print('Executing DROP TABLES command failed for query: {}\n\Exception: {}'.format(query, error)) print('\tsuccessfully dropped table!') def create_tables(cur, conn): ''' Creates each table using the queries in `create_table_queries` list. ''' for query in create_table_queries: try: query_objectName = get_queryName(query, searchTermStart='CREATE TABLE IF NOT EXISTS', searchTermEnd='(') print('executing TABLE CREATE command for query: {}'.format(query_objectName)) cur.execute(query) conn.commit() except Exception as error: print('Executing CREATE TABLES command failed for query: {}\n\Exception: {}'.format(query, error)) print('\tsuccessfully created table!') def main(): ''' - Drops (if exists) and Creates the tables on AWS. - Establishes connection with the sparkify database and gets cursor to it. - Drops all the tables. - Creates all tables. - Finally, closes the connection. ''' config = configparser.ConfigParser() config.read('dwh.cfg') print('[SUCCESS] imported AWS config.') try: conn = psycopg2.connect("host={} dbname={} user={} password={} port={}".format(*config['CLUSTER'].values())) cur = conn.cursor() print('[SUCCESS] connected to AWS cluster (host={}:{} DBname={}).'.format(config.get('CLUSTER', 'HOST') , config.get('CLUSTER', 'DB_PORT') , config.get('CLUSTER', 'DB_NAME'))) except Exception as error: print('[FAIL] connecting to AWS cluster.') print('Exception message: {}'.format(error)) print('starting to drop tables ...') drop_tables(cur, conn) print('starting to create tables ...') create_tables(cur, conn) conn.close() print('successfully closed connection.') if __name__ == "__main__": main()
[ "configparser.ConfigParser" ]
[((2816, 2843), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (2841, 2843), False, 'import configparser\n')]
# -*- coding: utf-8 -*- ''' Managing software RAID with mdadm ================================== A state module for creating or destroying software RAID devices. .. code-block:: yaml /dev/md0: raid.present: - level: 5 - devices: - /dev/xvdd - /dev/xvde - /dev/xvdf - chunk: 256 - run: True ''' # Import python libs from __future__ import absolute_import, print_function, unicode_literals import logging # Import salt libs import salt.utils.path # Set up logger log = logging.getLogger(__name__) # Define the module's virtual name __virtualname__ = 'raid' def __virtual__(): ''' mdadm provides raid functions for Linux ''' if __grains__['kernel'] != 'Linux': return False if not salt.utils.path.which('mdadm'): return False return __virtualname__ def present(name, level, devices, **kwargs): ''' Verify that the raid is present .. versionchanged:: 2014.7.0 name The name of raid device to be created level The RAID level to use when creating the raid. devices A list of devices used to build the array. kwargs Optional arguments to be passed to mdadm. Example: .. code-block:: yaml /dev/md0: raid.present: - level: 5 - devices: - /dev/xvdd - /dev/xvde - /dev/xvdf - chunk: 256 - run: True ''' ret = {'changes': {}, 'comment': '', 'name': name, 'result': True} # Device exists raids = __salt__['raid.list']() present = raids.get(name) # Decide whether to create or assemble missing = [] uuid_dict = {} new_devices = [] for dev in devices: if dev == 'missing' or not __salt__['file.access'](dev, 'f'): missing.append(dev) continue superblock = __salt__['raid.examine'](dev) if 'MD_UUID' in superblock: uuid = superblock['MD_UUID'] if uuid not in uuid_dict: uuid_dict[uuid] = [] uuid_dict[uuid].append(dev) else: new_devices.append(dev) if len(uuid_dict) > 1: ret['comment'] = 'Devices are a mix of RAID constituents with multiple MD_UUIDs: {0}.'.format( sorted(uuid_dict.keys())) ret['result'] = False return ret elif len(uuid_dict) == 1: uuid = list(uuid_dict.keys())[0] if present and present['uuid'] != uuid: ret['comment'] = 'Devices MD_UUIDs: {0} differs from present RAID uuid {1}.'.format(uuid, present['uuid']) ret['result'] = False return ret devices_with_superblock = uuid_dict[uuid] else: devices_with_superblock = [] if present: do_assemble = False do_create = False elif len(devices_with_superblock) > 0: do_assemble = True do_create = False verb = 'assembled' else: if len(new_devices) == 0: ret['comment'] = 'All devices are missing: {0}.'.format(missing) ret['result'] = False return ret do_assemble = False do_create = True verb = 'created' # If running with test use the test_mode with create or assemble if __opts__['test']: if do_assemble: res = __salt__['raid.assemble'](name, devices_with_superblock, test_mode=True, **kwargs) elif do_create: res = __salt__['raid.create'](name, level, new_devices + ['missing'] * len(missing), test_mode=True, **kwargs) if present: ret['comment'] = 'Raid {0} already present.'.format(name) if do_assemble or do_create: ret['comment'] = 'Raid will be {0} with: {1}'.format(verb, res) ret['result'] = None if (do_assemble or present) and len(new_devices) > 0: ret['comment'] += ' New devices will be added: {0}'.format(new_devices) ret['result'] = None if len(missing) > 0: ret['comment'] += ' Missing devices: {0}'.format(missing) return ret # Attempt to create or assemble the array if do_assemble: __salt__['raid.assemble'](name, devices_with_superblock, **kwargs) elif do_create: __salt__['raid.create'](name, level, new_devices + ['missing'] * len(missing), **kwargs) if not present: raids = __salt__['raid.list']() changes = raids.get(name) if changes: ret['comment'] = 'Raid {0} {1}.'.format(name, verb) ret['changes'] = changes # Saving config __salt__['raid.save_config']() else: ret['comment'] = 'Raid {0} failed to be {1}.'.format(name, verb) ret['result'] = False else: ret['comment'] = 'Raid {0} already present.'.format(name) if (do_assemble or present) and len(new_devices) > 0 and ret['result']: for d in new_devices: res = __salt__['raid.add'](name, d) if not res: ret['comment'] += ' Unable to add {0} to {1}.\n'.format(d, name) ret['result'] = False else: ret['comment'] += ' Added new device {0} to {1}.\n'.format(d, name) if ret['result']: ret['changes']['added'] = new_devices if len(missing) > 0: ret['comment'] += ' Missing devices: {0}'.format(missing) return ret def absent(name): ''' Verify that the raid is absent name The name of raid device to be destroyed .. code-block:: yaml /dev/md0: raid: - absent ''' ret = {'changes': {}, 'comment': '', 'name': name, 'result': True} # Raid does not exist if name not in __salt__['raid.list'](): ret['comment'] = 'Raid {0} already absent'.format(name) return ret elif __opts__['test']: ret['comment'] = 'Raid {0} is set to be destroyed'.format(name) ret['result'] = None return ret else: # Attempt to destroy raid ret['result'] = __salt__['raid.destroy'](name) if ret['result']: ret['comment'] = 'Raid {0} has been destroyed'.format(name) else: ret['comment'] = 'Raid {0} failed to be destroyed'.format(name) return ret
[ "logging.getLogger" ]
[((545, 572), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (562, 572), False, 'import logging\n')]
import os from datetime import datetime from typing import List import redis from rq import Connection, Worker class NBWorker(Worker): """Extensions of the default Worker class to set ip_address""" def set_ip_address(self, ip_address): self.ip_address = ip_address def inactive_time(self): """it calculates in seconds, the total inactive time of a worker""" working = self.total_working_time now = datetime.utcnow() birth_elapsed = (now - self.birth_date).seconds return birth_elapsed - working def start_worker(redis_dsn, queues: List[str], ip_address: str, name: str): rdb = redis.from_url(redis_dsn) pid = os.getpid() with Connection(connection=rdb): print(f"Running in {pid} with ip {ip_address}", pid) # qs = sys.argv[1:] or ['default'] w = NBWorker(queues, name=name) w.set_ip_address(ip_address) w.work()
[ "datetime.datetime.utcnow", "redis.from_url", "rq.Connection", "os.getpid" ]
[((649, 674), 'redis.from_url', 'redis.from_url', (['redis_dsn'], {}), '(redis_dsn)\n', (663, 674), False, 'import redis\n'), ((685, 696), 'os.getpid', 'os.getpid', ([], {}), '()\n', (694, 696), False, 'import os\n'), ((447, 464), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (462, 464), False, 'from datetime import datetime\n'), ((707, 733), 'rq.Connection', 'Connection', ([], {'connection': 'rdb'}), '(connection=rdb)\n', (717, 733), False, 'from rq import Connection, Worker\n')]
import re import csv types = ['CS', 'SS', 'ALLOY', 'PPL', 'TITANIUM', 'HASTELLOY', 'CORTEN', 'ALUMINUM', 'COPPER', 'BRONZE', 'IRON', 'GS', 'BURNING BARS', 'SX', 'COAL', 'BRICK', 'CEMENT', 'MORTAR', 'AL203', 'STEEL'] dimdesc = ['MM', '\"', '/', 'SCH'] def find_type(description): for tp in types: if any(tp in s for s in description): return tp return 'NA' def find_dimensions(description): dimensions = [] for lm in description: if re.search('\d', lm): for dm in dimdesc: if any( dm in lm for dm in dimdesc): dimensions.append(lm) break if len(dimensions) > 0: return ','.join(dimensions).strip() else: return 'NA' def fileFilter(csvfile): try: with open(csvfile, 'r', encoding='utf-8') as f: reader = csv.reader(f, dialect="excel-tab") for row in reader: print(row) except UnicodeDecodeError as exception: print(exception) input_file = open(csvfile, "rb") s = input_file.read() print(s) input_file.close() s = s.replace(b'\xb0', bytes(b'\xc2\xb0')) s = s.replace(b'\xd1', bytes(b'\xc3\x91')) s = s.replace(b'\xba', bytes(b'\xc2\xb0')) output_file = open(csvfile, "wb") output_file.write(s) output_file.close() print('input file corrected')
[ "csv.reader", "re.search" ]
[((654, 674), 're.search', 're.search', (['"""\\\\d"""', 'lm'], {}), "('\\\\d', lm)\n", (663, 674), False, 'import re\n'), ((1042, 1076), 'csv.reader', 'csv.reader', (['f'], {'dialect': '"""excel-tab"""'}), "(f, dialect='excel-tab')\n", (1052, 1076), False, 'import csv\n')]
import numpy as np import tensorflow as tf import pandas as pd import matplotlib.pyplot as plt # First we load the entire CSV file into an m x n matrix D = np.matrix(pd.read_csv("linreg-scaling-synthetic.csv", header=None).values) # Make a convenient variable to remember the number of input columns n = 2 # We extract all rows and the first n columns into X_data # Then we flip it X_data = D[:, 0:n].transpose() # We extract all rows and the last column into y_data # Then we flip it y_data = D[:, n].transpose() # We compute the mean and standard deviation of each feature means = X_data.mean(axis=1) deviations = X_data.std(axis=1) print(means) print(deviations) # Define data placeholders x = tf.placeholder(tf.float32, shape=(n, None)) y = tf.placeholder(tf.float32, shape=(1, None)) # Apply the rescaling x_scaled = (x - means) / deviations # Define trainable variables A = tf.get_variable("A", shape=(1, n)) b = tf.get_variable("b", shape=()) # Define model output, using the scaled features y_predicted = tf.matmul(A, x_scaled) + b # Define the loss function L = tf.reduce_sum((y_predicted - y)**2) # Define optimizer object optimizer = tf.train.AdamOptimizer(learning_rate=0.1).minimize(L) # Create a session and initialize variables session = tf.Session() session.run(tf.global_variables_initializer()) # Main optimization loop for t in range(2000): _, current_loss, current_A, current_b = session.run([optimizer, L, A, b], feed_dict={ x: X_data, y: y_data }) print("t = %g, loss = %g, A = %s, b = %g" % (t, current_loss, str(current_A), current_b))
[ "tensorflow.reduce_sum", "tensorflow.global_variables_initializer", "pandas.read_csv", "tensorflow.Session", "tensorflow.placeholder", "tensorflow.matmul", "tensorflow.train.AdamOptimizer", "tensorflow.get_variable" ]
[((704, 747), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '(n, None)'}), '(tf.float32, shape=(n, None))\n', (718, 747), True, 'import tensorflow as tf\n'), ((752, 795), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '(1, None)'}), '(tf.float32, shape=(1, None))\n', (766, 795), True, 'import tensorflow as tf\n'), ((889, 923), 'tensorflow.get_variable', 'tf.get_variable', (['"""A"""'], {'shape': '(1, n)'}), "('A', shape=(1, n))\n", (904, 923), True, 'import tensorflow as tf\n'), ((928, 958), 'tensorflow.get_variable', 'tf.get_variable', (['"""b"""'], {'shape': '()'}), "('b', shape=())\n", (943, 958), True, 'import tensorflow as tf\n'), ((1082, 1119), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['((y_predicted - y) ** 2)'], {}), '((y_predicted - y) ** 2)\n', (1095, 1119), True, 'import tensorflow as tf\n'), ((1266, 1278), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (1276, 1278), True, 'import tensorflow as tf\n'), ((1023, 1045), 'tensorflow.matmul', 'tf.matmul', (['A', 'x_scaled'], {}), '(A, x_scaled)\n', (1032, 1045), True, 'import tensorflow as tf\n'), ((1291, 1324), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (1322, 1324), True, 'import tensorflow as tf\n'), ((167, 223), 'pandas.read_csv', 'pd.read_csv', (['"""linreg-scaling-synthetic.csv"""'], {'header': 'None'}), "('linreg-scaling-synthetic.csv', header=None)\n", (178, 223), True, 'import pandas as pd\n'), ((1157, 1198), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': '(0.1)'}), '(learning_rate=0.1)\n', (1179, 1198), True, 'import tensorflow as tf\n')]
import http from typing import List, Optional from fastapi import APIRouter, Depends, Query from app.dependencies import get_current_user from app.models.user import User from app.schemas.stars import StarCreateSchema, StarSchema from app.services.stars import create_star, delete_star, get_stars router = APIRouter() @router.post( "", response_description="Star message, channel or server", response_model=StarSchema, status_code=http.HTTPStatus.CREATED, ) async def post_create_star(star: StarCreateSchema, current_user: User = Depends(get_current_user)): return await create_star(star, current_user=current_user) @router.get("", summary="List all user's stars", response_model=List[StarSchema], status_code=http.HTTPStatus.OK) async def get_fetch_stars( star_type: Optional[str] = Query(None, alias="type"), current_user: User = Depends(get_current_user) ): return await get_stars(current_user=current_user, stars_type=star_type) @router.delete("/{star_id}", summary="Remove star", status_code=http.HTTPStatus.NO_CONTENT) async def delete_remove_star(star_id: str, current_user: User = Depends(get_current_user)): await delete_star(star_id, current_user=current_user)
[ "app.services.stars.delete_star", "app.services.stars.get_stars", "fastapi.Query", "app.services.stars.create_star", "fastapi.Depends", "fastapi.APIRouter" ]
[((309, 320), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (318, 320), False, 'from fastapi import APIRouter, Depends, Query\n'), ((551, 576), 'fastapi.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (558, 576), False, 'from fastapi import APIRouter, Depends, Query\n'), ((815, 840), 'fastapi.Query', 'Query', (['None'], {'alias': '"""type"""'}), "(None, alias='type')\n", (820, 840), False, 'from fastapi import APIRouter, Depends, Query\n'), ((863, 888), 'fastapi.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (870, 888), False, 'from fastapi import APIRouter, Depends, Query\n'), ((1126, 1151), 'fastapi.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (1133, 1151), False, 'from fastapi import APIRouter, Depends, Query\n'), ((596, 640), 'app.services.stars.create_star', 'create_star', (['star'], {'current_user': 'current_user'}), '(star, current_user=current_user)\n', (607, 640), False, 'from app.services.stars import create_star, delete_star, get_stars\n'), ((909, 967), 'app.services.stars.get_stars', 'get_stars', ([], {'current_user': 'current_user', 'stars_type': 'star_type'}), '(current_user=current_user, stars_type=star_type)\n', (918, 967), False, 'from app.services.stars import create_star, delete_star, get_stars\n'), ((1164, 1211), 'app.services.stars.delete_star', 'delete_star', (['star_id'], {'current_user': 'current_user'}), '(star_id, current_user=current_user)\n', (1175, 1211), False, 'from app.services.stars import create_star, delete_star, get_stars\n')]
# -*- coding: utf-8 -*- """Standard sequence-to-sequence model.""" import six import tensorflow as tf from opennmt import constants from opennmt import inputters from opennmt import layers from opennmt.layers import noise from opennmt.layers import reducer from opennmt.models.model import Model from opennmt.utils import compat from opennmt.utils.losses import cross_entropy_sequence_loss from opennmt.utils.misc import print_bytes, format_translation_output, merge_dict, shape_list from opennmt.decoders.decoder import get_sampling_probability def shift_target_sequence(inputter, data): """Prepares shifted target sequences. Given a target sequence ``a b c``, the decoder input should be ``<s> a b c`` and the output should be ``a b c </s>`` for the dynamic decoding to start on ``<s>`` and stop on ``</s>``. Args: inputter: The :class:`opennmt.inputters.inputter.Inputter` that processed :obj:`data`. data: A dict of ``tf.Tensor`` containing ``ids`` and ``length`` keys. Returns: The updated :obj:`data` dictionary with ``ids`` the sequence prefixed with the start token id and ``ids_out`` the sequence suffixed with the end token id. Additionally, the ``length`` is increased by 1 to reflect the added token on both sequences. """ _ = inputter ids = data["ids"] bos = tf.constant([constants.START_OF_SENTENCE_ID], dtype=ids.dtype) eos = tf.constant([constants.END_OF_SENTENCE_ID], dtype=ids.dtype) data["ids_out"] = tf.concat([ids, eos], axis=0) data["ids"] = tf.concat([bos, ids], axis=0) data["length"] += 1 # Increment length accordingly. return data class EmbeddingsSharingLevel(object): """Level of embeddings sharing. Possible values are: * ``NONE``: no sharing (default) * ``SOURCE_TARGET_INPUT``: share source and target word embeddings * ``TARGET``: share target word embeddings and softmax weights * ``ALL``: share words embeddings and softmax weights """ NONE = 0 SOURCE_TARGET_INPUT = 1 TARGET = 2 ALL = 3 @staticmethod def share_input_embeddings(level): """Returns ``True`` if input embeddings should be shared at :obj:`level`.""" return level in (EmbeddingsSharingLevel.SOURCE_TARGET_INPUT, EmbeddingsSharingLevel.ALL) @staticmethod def share_target_embeddings(level): """Returns ``True`` if target embeddings should be shared at :obj:`level`.""" return level in (EmbeddingsSharingLevel.TARGET, EmbeddingsSharingLevel.ALL) class SequenceToSequence(Model): """A sequence to sequence model.""" def __init__(self, source_inputter, target_inputter, encoder, decoder, share_embeddings=EmbeddingsSharingLevel.NONE, alignment_file_key="train_alignments", daisy_chain_variables=False, name="seq2seq"): """Initializes a sequence-to-sequence model. Args: source_inputter: A :class:`opennmt.inputters.inputter.Inputter` to process the source data. target_inputter: A :class:`opennmt.inputters.inputter.Inputter` to process the target data. Currently, only the :class:`opennmt.inputters.text_inputter.WordEmbedder` is supported. encoder: A :class:`opennmt.encoders.encoder.Encoder` to encode the source. decoder: A :class:`opennmt.decoders.decoder.Decoder` to decode the target. share_embeddings: Level of embeddings sharing, see :class:`opennmt.models.sequence_to_sequence.EmbeddingsSharingLevel` for possible values. alignment_file_key: The data configuration key of the training alignment file to support guided alignment. daisy_chain_variables: If ``True``, copy variables in a daisy chain between devices for this model. Not compatible with RNN based models. name: The name of this model. Raises: TypeError: if :obj:`target_inputter` is not a :class:`opennmt.inputters.text_inputter.WordEmbedder` (same for :obj:`source_inputter` when embeddings sharing is enabled) or if :obj:`source_inputter` and :obj:`target_inputter` do not have the same ``dtype``. """ if source_inputter.dtype != target_inputter.dtype: raise TypeError( "Source and target inputters must have the same dtype, " "saw: {} and {}".format(source_inputter.dtype, target_inputter.dtype)) if not isinstance(target_inputter, inputters.WordEmbedder): raise TypeError("Target inputter must be a WordEmbedder") if EmbeddingsSharingLevel.share_input_embeddings(share_embeddings): if isinstance(source_inputter, inputters.ParallelInputter): source_inputters = source_inputter.inputters else: source_inputters = [source_inputter] for inputter in source_inputters: if not isinstance(inputter, inputters.WordEmbedder): raise TypeError("Sharing embeddings requires all inputters to be a " "WordEmbedder") examples_inputter = SequenceToSequenceInputter( source_inputter, target_inputter, share_parameters=EmbeddingsSharingLevel.share_input_embeddings(share_embeddings), alignment_file_key=alignment_file_key) super(SequenceToSequence, self).__init__( name, daisy_chain_variables=daisy_chain_variables, examples_inputter=examples_inputter) self.encoder = encoder self.decoder = decoder self.share_embeddings = share_embeddings self.output_layer = None def auto_config(self, num_devices=1): config = super(SequenceToSequence, self).auto_config(num_devices=num_devices) return merge_dict(config, { "params": { "beam_width": 4 }, "train": { "sample_buffer_size": -1, "train_steps": 500000 }, "infer": { "batch_size": 32, "bucket_width": 5 } }) def _build(self): self.examples_inputter.build() if EmbeddingsSharingLevel.share_target_embeddings(self.share_embeddings): self.output_layer = layers.Dense( self.labels_inputter.vocabulary_size, weight=self.labels_inputter.embedding, transpose=True, dtype=self.labels_inputter.dtype) with tf.name_scope(tf.get_variable_scope().name + "/"): self.output_layer.build([None, self.decoder.output_size]) def _call(self, features, labels, params, mode): training = mode == tf.estimator.ModeKeys.TRAIN features_length = self.features_inputter.get_length(features) source_inputs = self.features_inputter.make_inputs(features, training=training) with tf.variable_scope("encoder"): encoder_outputs, encoder_state, encoder_sequence_length = self.encoder.encode( source_inputs, sequence_length=features_length, mode=mode) target_vocab_size = self.labels_inputter.vocabulary_size target_dtype = self.labels_inputter.dtype if labels is not None: target_inputs = self.labels_inputter.make_inputs(labels, training=training) with tf.variable_scope("decoder"): sampling_probability = None if mode == tf.estimator.ModeKeys.TRAIN: sampling_probability = get_sampling_probability( tf.train.get_or_create_global_step(), read_probability=params.get("scheduled_sampling_read_probability"), schedule_type=params.get("scheduled_sampling_type"), k=params.get("scheduled_sampling_k")) logits, _, _, attention = self.decoder.decode( target_inputs, self.labels_inputter.get_length(labels), vocab_size=target_vocab_size, initial_state=encoder_state, sampling_probability=sampling_probability, embedding=self.labels_inputter.embedding, output_layer=self.output_layer, mode=mode, memory=encoder_outputs, memory_sequence_length=encoder_sequence_length, return_alignment_history=True) if "alignment" in labels: outputs = { "logits": logits, "attention": attention } else: outputs = logits else: outputs = None if mode != tf.estimator.ModeKeys.TRAIN: with tf.variable_scope("decoder", reuse=labels is not None): batch_size = tf.shape(tf.contrib.framework.nest.flatten(encoder_outputs)[0])[0] beam_width = params.get("beam_width", 1) start_tokens = tf.fill([batch_size], constants.START_OF_SENTENCE_ID) end_token = constants.END_OF_SENTENCE_ID sampled_ids, _, sampled_length, log_probs, alignment = ( self.decoder.dynamic_decode_and_search( self.labels_inputter.embedding, start_tokens, end_token, vocab_size=target_vocab_size, initial_state=encoder_state, output_layer=self.output_layer, beam_width=beam_width, length_penalty=params.get("length_penalty", 0), maximum_iterations=params.get("maximum_iterations", 250), minimum_length=params.get("minimum_decoding_length", 0), mode=mode, memory=encoder_outputs, memory_sequence_length=encoder_sequence_length, dtype=target_dtype, return_alignment_history=True, sample_from=params.get("sampling_topk"), sample_temperature=params.get("sampling_temperature"), coverage_penalty=params.get("coverage_penalty", 0))) target_vocab_rev = self.labels_inputter.vocabulary_lookup_reverse() target_tokens = target_vocab_rev.lookup(tf.cast(sampled_ids, tf.int64)) if params.get("replace_unknown_target", False): if alignment is None: raise TypeError("replace_unknown_target is not compatible with decoders " "that don't return alignment history") if not isinstance(self.features_inputter, inputters.WordEmbedder): raise TypeError("replace_unknown_target is only defined when the source " "inputter is a WordEmbedder") source_tokens = features["tokens"] if beam_width > 1: source_tokens = tf.contrib.seq2seq.tile_batch(source_tokens, multiplier=beam_width) # Merge batch and beam dimensions. original_shape = tf.shape(target_tokens) target_tokens = tf.reshape(target_tokens, [-1, original_shape[-1]]) align_shape = shape_list(alignment) attention = tf.reshape( alignment, [align_shape[0] * align_shape[1], align_shape[2], align_shape[3]]) # We don't have attention for </s> but ensure that the attention time dimension matches # the tokens time dimension. attention = reducer.align_in_time(attention, tf.shape(target_tokens)[1]) replaced_target_tokens = replace_unknown_target(target_tokens, source_tokens, attention) target_tokens = tf.reshape(replaced_target_tokens, original_shape) decoding_noise = params.get("decoding_noise") if decoding_noise: sampled_length -= 1 # Ignore </s> target_tokens, sampled_length = _add_noise( target_tokens, sampled_length, decoding_noise, params.get("decoding_subword_token", "■")) sampled_length += 1 alignment = None # Invalidate alignments. predictions = { "tokens": target_tokens, "length": sampled_length, "log_probs": log_probs } if alignment is not None: predictions["alignment"] = alignment num_hypotheses = params.get("num_hypotheses", 1) if num_hypotheses > 0: if num_hypotheses > beam_width: raise ValueError("n_best cannot be greater than beam_width") for key, value in six.iteritems(predictions): predictions[key] = value[:, :num_hypotheses] else: predictions = None return outputs, predictions def compute_loss(self, outputs, labels, training=True, params=None): if params is None: params = {} if isinstance(outputs, dict): logits = outputs["logits"] attention = outputs.get("attention") else: logits = outputs attention = None labels_lengths = self.labels_inputter.get_length(labels) loss, loss_normalizer, loss_token_normalizer = cross_entropy_sequence_loss( logits, labels["ids_out"], labels_lengths, label_smoothing=params.get("label_smoothing", 0.0), average_in_time=params.get("average_loss_in_time", False), training=training) if training: gold_alignments = labels.get("alignment") guided_alignment_type = params.get("guided_alignment_type") if gold_alignments is not None and guided_alignment_type is not None: if attention is None: tf.logging.warning("This model did not return attention vectors; " "guided alignment will not be applied") else: loss += guided_alignment_cost( attention[:, :-1], # Do not constrain last timestep. gold_alignments, labels_lengths - 1, guided_alignment_type, guided_alignment_weight=params.get("guided_alignment_weight", 1)) return loss, loss_normalizer, loss_token_normalizer def print_prediction(self, prediction, params=None, stream=None): if params is None: params = {} num_hypotheses = len(prediction["tokens"]) for i in range(num_hypotheses): target_length = prediction["length"][i] - 1 # Ignore </s>. tokens = prediction["tokens"][i][:target_length] sentence = self.labels_inputter.tokenizer.detokenize(tokens) score = None attention = None alignment_type = None if params.get("with_scores"): score = prediction["log_probs"][i] if params.get("with_alignments"): attention = prediction["alignment"][i][:target_length] alignment_type = params["with_alignments"] sentence = format_translation_output( sentence, score=score, attention=attention, alignment_type=alignment_type) print_bytes(tf.compat.as_bytes(sentence), stream=stream) class SequenceToSequenceInputter(inputters.ExampleInputter): """A custom :class:`opennmt.inputters.inputter.ExampleInputter` that possibly injects alignment information during training. """ def __init__(self, features_inputter, labels_inputter, share_parameters=False, alignment_file_key=None): super(SequenceToSequenceInputter, self).__init__( features_inputter, labels_inputter, share_parameters=share_parameters) self.alignment_file_key = alignment_file_key self.alignment_file = None def initialize(self, metadata, asset_dir=None, asset_prefix=""): if self.alignment_file_key is not None and self.alignment_file_key in metadata: self.alignment_file = metadata[self.alignment_file_key] return super(SequenceToSequenceInputter, self).initialize( metadata, asset_dir=asset_dir, asset_prefix=asset_prefix) def make_dataset(self, data_file, training=None): dataset = super(SequenceToSequenceInputter, self).make_dataset( data_file, training=training) if self.alignment_file is None or not training: return dataset return tf.data.Dataset.zip((dataset, tf.data.TextLineDataset(self.alignment_file))) def make_features(self, element=None, features=None, training=None): if self.alignment_file is None or not training: return super(SequenceToSequenceInputter, self).make_features( element=element, features=features, training=training) text, alignment = element features, labels = super(SequenceToSequenceInputter, self).make_features( text, features=features, training=training) labels["alignment"] = alignment_matrix_from_pharaoh( alignment, self.features_inputter.get_length(features), self.labels_inputter.get_length(labels) - 1) # Ignore special token. return features, labels def _get_names(self): return ["encoder", "decoder"] def _get_shared_name(self): return "shared_embeddings" def alignment_matrix_from_pharaoh(alignment_line, source_length, target_length, dtype=tf.float32): """Parse Pharaoh alignments into an alignment matrix. Args: alignment_line: A string ``tf.Tensor`` in the Pharaoh format. source_length: The length of the source sentence, without special symbols. target_length The length of the target sentence, without special symbols. dtype: The output matrix dtype. Defaults to ``tf.float32`` for convenience when computing the guided alignment loss. Returns: The alignment matrix as a 2-D ``tf.Tensor`` of type :obj:`dtype` and shape ``[target_length, source_length]``, where ``[i, j] = 1`` if the ``i`` th target word is aligned with the ``j`` th source word. """ if compat.tf_supports("strings.split"): align_pairs_str = tf.strings.split([alignment_line]).values align_pairs_flat_str = tf.strings.split(align_pairs_str, sep="-").values else: align_pairs_str = tf.string_split([alignment_line], delimiter=" ").values align_pairs_flat_str = tf.string_split(align_pairs_str, delimiter="-").values align_pairs_flat = compat.tf_compat(v2="strings.to_number", v1="string_to_number")( align_pairs_flat_str, out_type=tf.int64) sparse_indices = tf.reshape(align_pairs_flat, [-1, 2]) sparse_values = tf.ones([tf.shape(sparse_indices)[0]], dtype=dtype) source_length = tf.cast(source_length, tf.int64) target_length = tf.cast(target_length, tf.int64) if compat.tf_supports("sparse.to_dense"): alignment_matrix_sparse = tf.sparse.SparseTensor( sparse_indices, sparse_values, [source_length, target_length]) alignment_matrix = tf.sparse.to_dense(alignment_matrix_sparse, validate_indices=False) else: alignment_matrix = tf.sparse_to_dense( sparse_indices, [source_length, target_length], sparse_values, validate_indices=False) return tf.transpose(alignment_matrix) def guided_alignment_cost(attention_probs, gold_alignment, sequence_length, guided_alignment_type, guided_alignment_weight=1): """Computes the guided alignment cost. Args: attention_probs: The attention probabilities, a float ``tf.Tensor`` of shape :math:`[B, T_t, T_s]`. gold_alignment: The true alignment matrix, a float ``tf.Tensor`` of shape :math:`[B, T_t, T_s]`. sequence_length: The length of each sequence. guided_alignment_type: The type of guided alignment cost function to compute (can be: ce, mse). guided_alignment_weight: The weight applied to the guided alignment cost. Returns: The guided alignment cost. """ weights = tf.sequence_mask( sequence_length, maxlen=tf.shape(attention_probs)[1], dtype=attention_probs.dtype) if guided_alignment_type == "ce": cross_entropy = -tf.reduce_sum(tf.log(attention_probs + 1e-6) * gold_alignment, axis=-1) loss = tf.reduce_sum(cross_entropy * weights) elif guided_alignment_type == "mse": loss = tf.losses.mean_squared_error( gold_alignment, attention_probs, weights=tf.expand_dims(weights, -1)) else: raise ValueError("invalid guided_alignment_type: %s" % guided_alignment_type) return guided_alignment_weight * loss def align_tokens_from_attention(tokens, attention): """Returns aligned tokens from the attention. Args: tokens: The tokens on which the attention is applied as a string ``tf.Tensor`` of shape :math:`[B, T_s]`. attention: The attention vector of shape :math:`[B, T_t, T_s]`. Returns: The aligned tokens as a string ``tf.Tensor`` of shape :math:`[B, T_t]`. """ alignment = tf.argmax(attention, axis=-1, output_type=tf.int32) batch_size = tf.shape(tokens)[0] max_time = tf.shape(attention)[1] batch_ids = tf.range(batch_size) batch_ids = tf.tile(batch_ids, [max_time]) batch_ids = tf.reshape(batch_ids, [max_time, batch_size]) batch_ids = tf.transpose(batch_ids, perm=[1, 0]) aligned_pos = tf.stack([batch_ids, alignment], axis=-1) aligned_tokens = tf.gather_nd(tokens, aligned_pos) return aligned_tokens def replace_unknown_target(target_tokens, source_tokens, attention, unknown_token=constants.UNKNOWN_TOKEN): """Replaces all target unknown tokens by the source token with the highest attention. Args: target_tokens: A a string ``tf.Tensor`` of shape :math:`[B, T_t]`. source_tokens: A a string ``tf.Tensor`` of shape :math:`[B, T_s]`. attention: The attention vector of shape :math:`[B, T_t, T_s]`. unknown_token: The target token to replace. Returns: A string ``tf.Tensor`` with the same shape and type as :obj:`target_tokens` but will all instances of :obj:`unknown_token` replaced by the aligned source token. """ aligned_source_tokens = align_tokens_from_attention(source_tokens, attention) return tf.where( tf.equal(target_tokens, unknown_token), x=aligned_source_tokens, y=target_tokens) def _add_noise(tokens, lengths, params, subword_token): if not isinstance(params, list): raise ValueError("Expected a list of noise modules") noises = [] for module in params: noise_type, args = six.next(six.iteritems(module)) if not isinstance(args, list): args = [args] noise_type = noise_type.lower() if noise_type == "dropout": noise_class = noise.WordDropout elif noise_type == "replacement": noise_class = noise.WordReplacement elif noise_type == "permutation": noise_class = noise.WordPermutation else: raise ValueError("Invalid noise type: %s" % noise_type) noises.append(noise_class(*args)) noiser = noise.WordNoiser( noises=noises, subword_token=subword_token, is_spacer=subword_token == "▁") return noiser(tokens, lengths, keep_shape=True)
[ "opennmt.utils.misc.shape_list", "tensorflow.contrib.seq2seq.tile_batch", "opennmt.utils.misc.merge_dict", "tensorflow.reduce_sum", "opennmt.layers.noise.WordNoiser", "tensorflow.gather_nd", "tensorflow.logging.warning", "tensorflow.reshape", "tensorflow.get_variable_scope", "tensorflow.string_split", "six.iteritems", "tensorflow.compat.as_bytes", "tensorflow.contrib.framework.nest.flatten", "tensorflow.sparse.to_dense", "tensorflow.train.get_or_create_global_step", "tensorflow.concat", "opennmt.utils.compat.tf_supports", "tensorflow.variable_scope", "tensorflow.stack", "tensorflow.cast", "opennmt.utils.compat.tf_compat", "opennmt.utils.misc.format_translation_output", "tensorflow.equal", "tensorflow.sparse.SparseTensor", "opennmt.layers.Dense", "tensorflow.range", "tensorflow.constant", "tensorflow.transpose", "tensorflow.tile", "tensorflow.log", "tensorflow.expand_dims", "tensorflow.strings.split", "tensorflow.argmax", "tensorflow.fill", "tensorflow.shape", "tensorflow.sparse_to_dense", "tensorflow.data.TextLineDataset" ]
[((1332, 1394), 'tensorflow.constant', 'tf.constant', (['[constants.START_OF_SENTENCE_ID]'], {'dtype': 'ids.dtype'}), '([constants.START_OF_SENTENCE_ID], dtype=ids.dtype)\n', (1343, 1394), True, 'import tensorflow as tf\n'), ((1403, 1463), 'tensorflow.constant', 'tf.constant', (['[constants.END_OF_SENTENCE_ID]'], {'dtype': 'ids.dtype'}), '([constants.END_OF_SENTENCE_ID], dtype=ids.dtype)\n', (1414, 1463), True, 'import tensorflow as tf\n'), ((1484, 1513), 'tensorflow.concat', 'tf.concat', (['[ids, eos]'], {'axis': '(0)'}), '([ids, eos], axis=0)\n', (1493, 1513), True, 'import tensorflow as tf\n'), ((1530, 1559), 'tensorflow.concat', 'tf.concat', (['[bos, ids]'], {'axis': '(0)'}), '([bos, ids], axis=0)\n', (1539, 1559), True, 'import tensorflow as tf\n'), ((17298, 17333), 'opennmt.utils.compat.tf_supports', 'compat.tf_supports', (['"""strings.split"""'], {}), "('strings.split')\n", (17316, 17333), False, 'from opennmt.utils import compat\n'), ((17796, 17833), 'tensorflow.reshape', 'tf.reshape', (['align_pairs_flat', '[-1, 2]'], {}), '(align_pairs_flat, [-1, 2])\n', (17806, 17833), True, 'import tensorflow as tf\n'), ((17922, 17954), 'tensorflow.cast', 'tf.cast', (['source_length', 'tf.int64'], {}), '(source_length, tf.int64)\n', (17929, 17954), True, 'import tensorflow as tf\n'), ((17973, 18005), 'tensorflow.cast', 'tf.cast', (['target_length', 'tf.int64'], {}), '(target_length, tf.int64)\n', (17980, 18005), True, 'import tensorflow as tf\n'), ((18011, 18048), 'opennmt.utils.compat.tf_supports', 'compat.tf_supports', (['"""sparse.to_dense"""'], {}), "('sparse.to_dense')\n", (18029, 18048), False, 'from opennmt.utils import compat\n'), ((18445, 18475), 'tensorflow.transpose', 'tf.transpose', (['alignment_matrix'], {}), '(alignment_matrix)\n', (18457, 18475), True, 'import tensorflow as tf\n'), ((20246, 20297), 'tensorflow.argmax', 'tf.argmax', (['attention'], {'axis': '(-1)', 'output_type': 'tf.int32'}), '(attention, axis=-1, output_type=tf.int32)\n', (20255, 20297), True, 'import tensorflow as tf\n'), ((20383, 20403), 'tensorflow.range', 'tf.range', (['batch_size'], {}), '(batch_size)\n', (20391, 20403), True, 'import tensorflow as tf\n'), ((20418, 20448), 'tensorflow.tile', 'tf.tile', (['batch_ids', '[max_time]'], {}), '(batch_ids, [max_time])\n', (20425, 20448), True, 'import tensorflow as tf\n'), ((20463, 20508), 'tensorflow.reshape', 'tf.reshape', (['batch_ids', '[max_time, batch_size]'], {}), '(batch_ids, [max_time, batch_size])\n', (20473, 20508), True, 'import tensorflow as tf\n'), ((20523, 20559), 'tensorflow.transpose', 'tf.transpose', (['batch_ids'], {'perm': '[1, 0]'}), '(batch_ids, perm=[1, 0])\n', (20535, 20559), True, 'import tensorflow as tf\n'), ((20576, 20617), 'tensorflow.stack', 'tf.stack', (['[batch_ids, alignment]'], {'axis': '(-1)'}), '([batch_ids, alignment], axis=-1)\n', (20584, 20617), True, 'import tensorflow as tf\n'), ((20637, 20670), 'tensorflow.gather_nd', 'tf.gather_nd', (['tokens', 'aligned_pos'], {}), '(tokens, aligned_pos)\n', (20649, 20670), True, 'import tensorflow as tf\n'), ((22316, 22413), 'opennmt.layers.noise.WordNoiser', 'noise.WordNoiser', ([], {'noises': 'noises', 'subword_token': 'subword_token', 'is_spacer': "(subword_token == '▁')"}), "(noises=noises, subword_token=subword_token, is_spacer=\n subword_token == '▁')\n", (22332, 22413), False, 'from opennmt.layers import noise\n'), ((5668, 5835), 'opennmt.utils.misc.merge_dict', 'merge_dict', (['config', "{'params': {'beam_width': 4}, 'train': {'sample_buffer_size': -1,\n 'train_steps': 500000}, 'infer': {'batch_size': 32, 'bucket_width': 5}}"], {}), "(config, {'params': {'beam_width': 4}, 'train': {\n 'sample_buffer_size': -1, 'train_steps': 500000}, 'infer': {\n 'batch_size': 32, 'bucket_width': 5}})\n", (5678, 5835), False, 'from opennmt.utils.misc import print_bytes, format_translation_output, merge_dict, shape_list\n'), ((17665, 17728), 'opennmt.utils.compat.tf_compat', 'compat.tf_compat', ([], {'v2': '"""strings.to_number"""', 'v1': '"""string_to_number"""'}), "(v2='strings.to_number', v1='string_to_number')\n", (17681, 17728), False, 'from opennmt.utils import compat\n'), ((18080, 18169), 'tensorflow.sparse.SparseTensor', 'tf.sparse.SparseTensor', (['sparse_indices', 'sparse_values', '[source_length, target_length]'], {}), '(sparse_indices, sparse_values, [source_length,\n target_length])\n', (18102, 18169), True, 'import tensorflow as tf\n'), ((18198, 18265), 'tensorflow.sparse.to_dense', 'tf.sparse.to_dense', (['alignment_matrix_sparse'], {'validate_indices': '(False)'}), '(alignment_matrix_sparse, validate_indices=False)\n', (18216, 18265), True, 'import tensorflow as tf\n'), ((18297, 18406), 'tensorflow.sparse_to_dense', 'tf.sparse_to_dense', (['sparse_indices', '[source_length, target_length]', 'sparse_values'], {'validate_indices': '(False)'}), '(sparse_indices, [source_length, target_length],\n sparse_values, validate_indices=False)\n', (18315, 18406), True, 'import tensorflow as tf\n'), ((19517, 19555), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(cross_entropy * weights)'], {}), '(cross_entropy * weights)\n', (19530, 19555), True, 'import tensorflow as tf\n'), ((20313, 20329), 'tensorflow.shape', 'tf.shape', (['tokens'], {}), '(tokens)\n', (20321, 20329), True, 'import tensorflow as tf\n'), ((20346, 20365), 'tensorflow.shape', 'tf.shape', (['attention'], {}), '(attention)\n', (20354, 20365), True, 'import tensorflow as tf\n'), ((21538, 21576), 'tensorflow.equal', 'tf.equal', (['target_tokens', 'unknown_token'], {}), '(target_tokens, unknown_token)\n', (21546, 21576), True, 'import tensorflow as tf\n'), ((6106, 6255), 'opennmt.layers.Dense', 'layers.Dense', (['self.labels_inputter.vocabulary_size'], {'weight': 'self.labels_inputter.embedding', 'transpose': '(True)', 'dtype': 'self.labels_inputter.dtype'}), '(self.labels_inputter.vocabulary_size, weight=self.\n labels_inputter.embedding, transpose=True, dtype=self.labels_inputter.dtype\n )\n', (6118, 6255), False, 'from opennmt import layers\n'), ((6678, 6706), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""encoder"""'], {}), "('encoder')\n", (6695, 6706), True, 'import tensorflow as tf\n'), ((14223, 14327), 'opennmt.utils.misc.format_translation_output', 'format_translation_output', (['sentence'], {'score': 'score', 'attention': 'attention', 'alignment_type': 'alignment_type'}), '(sentence, score=score, attention=attention,\n alignment_type=alignment_type)\n', (14248, 14327), False, 'from opennmt.utils.misc import print_bytes, format_translation_output, merge_dict, shape_list\n'), ((17357, 17391), 'tensorflow.strings.split', 'tf.strings.split', (['[alignment_line]'], {}), '([alignment_line])\n', (17373, 17391), True, 'import tensorflow as tf\n'), ((17426, 17468), 'tensorflow.strings.split', 'tf.strings.split', (['align_pairs_str'], {'sep': '"""-"""'}), "(align_pairs_str, sep='-')\n", (17442, 17468), True, 'import tensorflow as tf\n'), ((17506, 17554), 'tensorflow.string_split', 'tf.string_split', (['[alignment_line]'], {'delimiter': '""" """'}), "([alignment_line], delimiter=' ')\n", (17521, 17554), True, 'import tensorflow as tf\n'), ((17589, 17636), 'tensorflow.string_split', 'tf.string_split', (['align_pairs_str'], {'delimiter': '"""-"""'}), "(align_pairs_str, delimiter='-')\n", (17604, 17636), True, 'import tensorflow as tf\n'), ((21851, 21872), 'six.iteritems', 'six.iteritems', (['module'], {}), '(module)\n', (21864, 21872), False, 'import six\n'), ((7110, 7138), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""decoder"""'], {}), "('decoder')\n", (7127, 7138), True, 'import tensorflow as tf\n'), ((8335, 8389), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""decoder"""'], {'reuse': '(labels is not None)'}), "('decoder', reuse=labels is not None)\n", (8352, 8389), True, 'import tensorflow as tf\n'), ((8551, 8604), 'tensorflow.fill', 'tf.fill', (['[batch_size]', 'constants.START_OF_SENTENCE_ID'], {}), '([batch_size], constants.START_OF_SENTENCE_ID)\n', (8558, 8604), True, 'import tensorflow as tf\n'), ((9797, 9827), 'tensorflow.cast', 'tf.cast', (['sampled_ids', 'tf.int64'], {}), '(sampled_ids, tf.int64)\n', (9804, 9827), True, 'import tensorflow as tf\n'), ((10510, 10533), 'tensorflow.shape', 'tf.shape', (['target_tokens'], {}), '(target_tokens)\n', (10518, 10533), True, 'import tensorflow as tf\n'), ((10558, 10609), 'tensorflow.reshape', 'tf.reshape', (['target_tokens', '[-1, original_shape[-1]]'], {}), '(target_tokens, [-1, original_shape[-1]])\n', (10568, 10609), True, 'import tensorflow as tf\n'), ((10632, 10653), 'opennmt.utils.misc.shape_list', 'shape_list', (['alignment'], {}), '(alignment)\n', (10642, 10653), False, 'from opennmt.utils.misc import print_bytes, format_translation_output, merge_dict, shape_list\n'), ((10674, 10766), 'tensorflow.reshape', 'tf.reshape', (['alignment', '[align_shape[0] * align_shape[1], align_shape[2], align_shape[3]]'], {}), '(alignment, [align_shape[0] * align_shape[1], align_shape[2],\n align_shape[3]])\n', (10684, 10766), True, 'import tensorflow as tf\n'), ((11111, 11161), 'tensorflow.reshape', 'tf.reshape', (['replaced_target_tokens', 'original_shape'], {}), '(replaced_target_tokens, original_shape)\n', (11121, 11161), True, 'import tensorflow as tf\n'), ((11986, 12012), 'six.iteritems', 'six.iteritems', (['predictions'], {}), '(predictions)\n', (11999, 12012), False, 'import six\n'), ((14383, 14411), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['sentence'], {}), '(sentence)\n', (14401, 14411), True, 'import tensorflow as tf\n'), ((15623, 15667), 'tensorflow.data.TextLineDataset', 'tf.data.TextLineDataset', (['self.alignment_file'], {}), '(self.alignment_file)\n', (15646, 15667), True, 'import tensorflow as tf\n'), ((17861, 17885), 'tensorflow.shape', 'tf.shape', (['sparse_indices'], {}), '(sparse_indices)\n', (17869, 17885), True, 'import tensorflow as tf\n'), ((19318, 19343), 'tensorflow.shape', 'tf.shape', (['attention_probs'], {}), '(attention_probs)\n', (19326, 19343), True, 'import tensorflow as tf\n'), ((10374, 10441), 'tensorflow.contrib.seq2seq.tile_batch', 'tf.contrib.seq2seq.tile_batch', (['source_tokens'], {'multiplier': 'beam_width'}), '(source_tokens, multiplier=beam_width)\n', (10403, 10441), True, 'import tensorflow as tf\n'), ((13025, 13138), 'tensorflow.logging.warning', 'tf.logging.warning', (['"""This model did not return attention vectors; guided alignment will not be applied"""'], {}), "(\n 'This model did not return attention vectors; guided alignment will not be applied'\n )\n", (13043, 13138), True, 'import tensorflow as tf\n'), ((19448, 19479), 'tensorflow.log', 'tf.log', (['(attention_probs + 1e-06)'], {}), '(attention_probs + 1e-06)\n', (19454, 19479), True, 'import tensorflow as tf\n'), ((19685, 19712), 'tensorflow.expand_dims', 'tf.expand_dims', (['weights', '(-1)'], {}), '(weights, -1)\n', (19699, 19712), True, 'import tensorflow as tf\n'), ((7297, 7333), 'tensorflow.train.get_or_create_global_step', 'tf.train.get_or_create_global_step', ([], {}), '()\n', (7331, 7333), True, 'import tensorflow as tf\n'), ((10962, 10985), 'tensorflow.shape', 'tf.shape', (['target_tokens'], {}), '(target_tokens)\n', (10970, 10985), True, 'import tensorflow as tf\n'), ((6312, 6335), 'tensorflow.get_variable_scope', 'tf.get_variable_scope', ([], {}), '()\n', (6333, 6335), True, 'import tensorflow as tf\n'), ((8421, 8471), 'tensorflow.contrib.framework.nest.flatten', 'tf.contrib.framework.nest.flatten', (['encoder_outputs'], {}), '(encoder_outputs)\n', (8454, 8471), True, 'import tensorflow as tf\n')]
#!/usr/bin/env python """ setup the disperion database file structure and configuration file """ import os import tempfile import numpy as np from dispersion import Material, Writer, Interpolation, Catalogue from dispersion.config import default_config, write_config def get_root_dir(conf): """ get the root dir from the user """ question = ("path for root directory of the catalogue file" + " system [default: {}]> ") default = conf['Path'] validator = os.path.isabs data_name = "root directory" return ask_and_confirm(question, default, validator, data_name) def get_catalogue_name(conf): """ get the catalogue file name from the user """ question = ("name of the catalogue file" + " [default: {}]> ") default = conf['File'] validator = valid_file_name data_name = "catalogue file name" return ask_and_confirm(question, default, validator, data_name) def ask_and_confirm(question, default, validator, data_name, confirm=True): """ Returns ------- user_input: str the data from the user confirmed_input: bool true if the input was confirmed by the user Parameters ---------- question: str the question to prompt the user input default: str the default value of this value validator: function function to validate the input data_name: str name of the data that is being input """ user_input = ask(question, default, validator) confirmation_question = ("confirm {} as ".format(data_name) + "{}? [y/n]> ".format(user_input)) return [user_input, get_confirmation(confirmation_question)] def ask(question, default, validator): """ ask for user input with default value and then validate """ valid_input = False while not valid_input: user_input = input(question.format(default)) if user_input == "": user_input = default if validator(user_input): valid_input = True else: print("input is not valid") return user_input def get_confirmation(question): """ get a yes/no answer to a question """ confirmed_input = False while not confirmed_input: confirmation1 = input(question) if confirmation1 in {'y', 'yes'}: confirmed_input = True elif confirmation1 in {'n', 'no'}: confirmed_input = False break else: print("input invalid") return confirmed_input def valid_file_name(filename): """ test if filename is valid create a file with the filename in a temporary directory and delete the directory afterwards. """ with tempfile.TemporaryDirectory() as temp_dir: file_path = os.path.join(temp_dir, filename) try: open(file_path, 'r') return True except IOError: try: open(file_path, 'w') return True except IOError: return False def install_modules(conf): """ make a subfolder for each module and ask to download files """ install_funcs = {"UserData":install_userdata, "RefractiveIndexInfo":install_rii} for module in conf['Modules']: if module == "UserData": install = True else: question = "install module {}? [y/n]> ".format(module) install = get_confirmation(question) conf['Modules'][module] = install if install: module_dir = os.path.join(conf['Path'], module) if not os.path.isdir(module_dir): os.mkdir(module_dir) install_funcs[module](module_dir, conf) return conf def install_userdata(module_dir, conf): make_example_txt(module_dir) make_example_yaml(module_dir) def make_example_txt(dir_path): test_data = np.array([[400., 1.7, 0.1], [500., 1.6, 0.05], [600., 1.5, 0.0], [700., 1.4, 0.0]]) mat = Material(tabulated_nk=test_data, spectrum_type='wavelength', unit='nanometer') mat.meta_data['Reference'] = "Literature reference to the data" mat.meta_data['Comment'] = "Any additional information goes here" mat.meta_data['Name'] = "Short name of the material" mat.meta_data['FullName'] = "Full name of the material" mat.meta_data['Author'] = "The author of this data file" mat.meta_data['MetaComment'] = " This is a multiline meta-comment\n" + \ " which provides information not\n" + \ " in metadata" filepath = os.path.join(dir_path, "example_file.txt") write = Writer(filepath, mat) write.write_file() def make_example_yaml(dir_path): model_params = {'name': 'Sellmeier', 'specrtrum_type':'wavelength', 'unit':'micrometer', 'valid_range':np.array([0.350, 2.0]), 'parameters': np.array([0, 1.0, 0.05, 2.0, 0.1, 10., 25.])} mat = Material(model_kw=model_params, spectrum_type='wavelength', unit='micrometer') mat.meta_data['Reference'] = "Literature reference to the data" mat.meta_data['Comment'] = "Any additional information goes here" mat.meta_data['Name'] = "Short name of the material" mat.meta_data['FullName'] = "Full name of the material" mat.meta_data['Author'] = "The author of this data file" mat.meta_data['MetaComment'] = " This is a multiline meta-comment\n" + \ " which provides information not\n" + \ " in metadata" k_data = np.array([[400., 0.1], [500., 0.05], [600., 0.0], [700., 0.0]]) interp = Interpolation(k_data, unit='nm') mat.data['imag'] = interp filepath = os.path.join(dir_path, "example_file2.yml") write = Writer(filepath, mat) write.write_file() def install_rii(module_dir, conf): """ download the refractive index info database from github """ question = ("download the refractive index info database from github?" + " (required python package <GitPython>)" + " [y/n]> ") install = get_confirmation(question) if install: from git import Repo git_url = "https://github.com/polyanskiy/refractiveindex.info-database.git" #install_dir = os.path.join(conf['Path'], "RefractiveIndexInfo") Repo.clone_from(git_url, module_dir) def maybe_rebuild_catalogue(conf): question = "rebuild catalogue? [y/n]> " rebuild = get_confirmation(question) if rebuild: cat = Catalogue(config=conf, rebuild= 'All') cat.save_to_file() def main(): conf = default_config() print("This script will provide a default configuration for the \n"+ "dispersion package") confirmed_valid_path = False while not confirmed_valid_path: [path, confirmed_valid_path] = get_root_dir(conf) conf['Path'] = path #print("Path will be se to: {}".format(path)) confirmed_db_nane = False while not confirmed_db_nane: [name, confirmed_db_nane] = get_catalogue_name(conf) conf['File'] = name #print("Filename will be set to {}".format(name)) conf = install_modules(conf) write_config(conf) maybe_rebuild_catalogue(conf) if __name__ == "__main__": main()
[ "os.mkdir", "tempfile.TemporaryDirectory", "dispersion.Material", "dispersion.Writer", "os.path.isdir", "dispersion.config.default_config", "dispersion.config.write_config", "dispersion.Interpolation", "numpy.array", "git.Repo.clone_from", "dispersion.Catalogue", "os.path.join" ]
[((3971, 4062), 'numpy.array', 'np.array', (['[[400.0, 1.7, 0.1], [500.0, 1.6, 0.05], [600.0, 1.5, 0.0], [700.0, 1.4, 0.0]]'], {}), '([[400.0, 1.7, 0.1], [500.0, 1.6, 0.05], [600.0, 1.5, 0.0], [700.0,\n 1.4, 0.0]])\n', (3979, 4062), True, 'import numpy as np\n'), ((4143, 4221), 'dispersion.Material', 'Material', ([], {'tabulated_nk': 'test_data', 'spectrum_type': '"""wavelength"""', 'unit': '"""nanometer"""'}), "(tabulated_nk=test_data, spectrum_type='wavelength', unit='nanometer')\n", (4151, 4221), False, 'from dispersion import Material, Writer, Interpolation, Catalogue\n'), ((4774, 4816), 'os.path.join', 'os.path.join', (['dir_path', '"""example_file.txt"""'], {}), "(dir_path, 'example_file.txt')\n", (4786, 4816), False, 'import os\n'), ((4829, 4850), 'dispersion.Writer', 'Writer', (['filepath', 'mat'], {}), '(filepath, mat)\n', (4835, 4850), False, 'from dispersion import Material, Writer, Interpolation, Catalogue\n'), ((5277, 5355), 'dispersion.Material', 'Material', ([], {'model_kw': 'model_params', 'spectrum_type': '"""wavelength"""', 'unit': '"""micrometer"""'}), "(model_kw=model_params, spectrum_type='wavelength', unit='micrometer')\n", (5285, 5355), False, 'from dispersion import Material, Writer, Interpolation, Catalogue\n'), ((5887, 5954), 'numpy.array', 'np.array', (['[[400.0, 0.1], [500.0, 0.05], [600.0, 0.0], [700.0, 0.0]]'], {}), '([[400.0, 0.1], [500.0, 0.05], [600.0, 0.0], [700.0, 0.0]])\n', (5895, 5954), True, 'import numpy as np\n'), ((6033, 6065), 'dispersion.Interpolation', 'Interpolation', (['k_data'], {'unit': '"""nm"""'}), "(k_data, unit='nm')\n", (6046, 6065), False, 'from dispersion import Material, Writer, Interpolation, Catalogue\n'), ((6111, 6154), 'os.path.join', 'os.path.join', (['dir_path', '"""example_file2.yml"""'], {}), "(dir_path, 'example_file2.yml')\n", (6123, 6154), False, 'import os\n'), ((6167, 6188), 'dispersion.Writer', 'Writer', (['filepath', 'mat'], {}), '(filepath, mat)\n', (6173, 6188), False, 'from dispersion import Material, Writer, Interpolation, Catalogue\n'), ((7017, 7033), 'dispersion.config.default_config', 'default_config', ([], {}), '()\n', (7031, 7033), False, 'from dispersion.config import default_config, write_config\n'), ((7580, 7598), 'dispersion.config.write_config', 'write_config', (['conf'], {}), '(conf)\n', (7592, 7598), False, 'from dispersion.config import default_config, write_config\n'), ((2771, 2800), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (2798, 2800), False, 'import tempfile\n'), ((2834, 2866), 'os.path.join', 'os.path.join', (['temp_dir', 'filename'], {}), '(temp_dir, filename)\n', (2846, 2866), False, 'import os\n'), ((5075, 5096), 'numpy.array', 'np.array', (['[0.35, 2.0]'], {}), '([0.35, 2.0])\n', (5083, 5096), True, 'import numpy as np\n'), ((5133, 5179), 'numpy.array', 'np.array', (['[0, 1.0, 0.05, 2.0, 0.1, 10.0, 25.0]'], {}), '([0, 1.0, 0.05, 2.0, 0.1, 10.0, 25.0])\n', (5141, 5179), True, 'import numpy as np\n'), ((6739, 6775), 'git.Repo.clone_from', 'Repo.clone_from', (['git_url', 'module_dir'], {}), '(git_url, module_dir)\n', (6754, 6775), False, 'from git import Repo\n'), ((6927, 6964), 'dispersion.Catalogue', 'Catalogue', ([], {'config': 'conf', 'rebuild': '"""All"""'}), "(config=conf, rebuild='All')\n", (6936, 6964), False, 'from dispersion import Material, Writer, Interpolation, Catalogue\n'), ((3628, 3662), 'os.path.join', 'os.path.join', (["conf['Path']", 'module'], {}), "(conf['Path'], module)\n", (3640, 3662), False, 'import os\n'), ((3682, 3707), 'os.path.isdir', 'os.path.isdir', (['module_dir'], {}), '(module_dir)\n', (3695, 3707), False, 'import os\n'), ((3725, 3745), 'os.mkdir', 'os.mkdir', (['module_dir'], {}), '(module_dir)\n', (3733, 3745), False, 'import os\n')]
import os import math import torch from torch import nn, optim import logging import numpy as np import torch.nn.functional as F from torch.autograd import Variable import utils from contrastqg import (T5ForConditionalGeneration) logger = logging.getLogger() class QGenerator(object): def __init__(self, args, tokenizer): self.network = T5ForConditionalGeneration.from_pretrained(args.pretrain_generator_type) self.network.resize_token_embeddings(len(tokenizer)) self.network.load_state_dict(torch.load(args.generator_load_dir + '/models.pkl')) logger.info("sccuess load checkpoint from {} !".format(args.generator_load_dir)) self.tokenizer = tokenizer self.batchify_inputs = utils.select_gen_input_refactor(args) def predict(self, inputs): self.network.eval() outputs = self.network.generate(**inputs) pred_tokens = self.tokenizer.convert_outputs_to_tokens(outputs) return pred_tokens def set_device(self, device): self.device = device self.network.to(self.device) def parallelize(self): """Use data parallel to copy the model across several gpus. This will take all gpus visible with CUDA_VISIBLE_DEVICES. """ self.parallel = True self.network = torch.nn.DataParallel(self.network)
[ "torch.load", "contrastqg.T5ForConditionalGeneration.from_pretrained", "torch.nn.DataParallel", "utils.select_gen_input_refactor", "logging.getLogger" ]
[((240, 259), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (257, 259), False, 'import logging\n'), ((351, 423), 'contrastqg.T5ForConditionalGeneration.from_pretrained', 'T5ForConditionalGeneration.from_pretrained', (['args.pretrain_generator_type'], {}), '(args.pretrain_generator_type)\n', (393, 423), False, 'from contrastqg import T5ForConditionalGeneration\n'), ((730, 767), 'utils.select_gen_input_refactor', 'utils.select_gen_input_refactor', (['args'], {}), '(args)\n', (761, 767), False, 'import utils\n'), ((1333, 1368), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['self.network'], {}), '(self.network)\n', (1354, 1368), False, 'import torch\n'), ((522, 573), 'torch.load', 'torch.load', (["(args.generator_load_dir + '/models.pkl')"], {}), "(args.generator_load_dir + '/models.pkl')\n", (532, 573), False, 'import torch\n')]
import sys sys.path.append("utils") sys.path.append("models") from file_io import * from train_utils import * import numpy as np import pandas as pd import matplotlib as mp import matplotlib.pyplot as plt import time from test import init_test from pathlib import Path import torch from torch.utils.data import Dataset, DataLoader, sampler from torch import nn from random_word import RandomWords import pandas as pd from dataloaders import get_double_scan_v1_loader #from Unet2D import Unet2D import torch.optim as optim import torchvision from tqdm import tqdm import torch.nn.functional as F import time import os from importlib import import_module from torch.utils.tensorboard import SummaryWriter import torchgeometry from logger_utils import * np.random.seed(int(time.time())) PRINT_DEBUG = False def train_step(X_batch, Y_batch, optimizer, model, loss_fn, acc_fn): X_batch = X_batch.cuda() Y_batch = Y_batch.cuda() optimizer.zero_grad() outputs = model(X_batch) loss = loss_fn(outputs, Y_batch) loss.backward() optimizer.step() acc = acc_fn(outputs, Y_batch) return loss, acc, outputs def check_accuracy(valid_dl, model, loss_fn, acc_fn, classes, tb_writer, seen_train_ex, other_logdir): model.eval() running_loss = 0.0 running_acc = 0.0 running_dice = 0.0 running_class_dice = np.zeros(classes) save_batch = True with torch.no_grad(): for X_batch, Y_batch in valid_dl: X_batch = X_batch.cuda() Y_batch = Y_batch.cuda() cur_batch_sz = X_batch.size(0) outputs = model(X_batch) loss = loss_fn(outputs, Y_batch.long()) acc = acc_fn(outputs, Y_batch) dice_score, dice_class_scores = mean_dice_score(outputs, Y_batch, classes) running_acc += acc * cur_batch_sz running_loss += loss * cur_batch_sz running_dice += dice_score * cur_batch_sz running_class_dice += dice_class_scores * cur_batch_sz average_loss = running_loss / len(valid_dl.dataset) average_acc = running_acc / len(valid_dl.dataset) average_dice_sc = running_dice / len(valid_dl.dataset) average_dice_class_sc = running_class_dice / len(valid_dl.dataset) tb_writer.add_scalar("Val CE loss", average_loss, seen_train_ex) tb_writer.add_scalar("Val dice acc", average_dice_sc, seen_train_ex) tb_writer.add_scalar("Val px acc", average_acc, seen_train_ex) #tb_writer.add_custom_scalars("Val class dice acc", numpy_to_class_dict(average_dice_class_sc), seen_train_ex) for i,value in enumerate(average_dice_class_sc): tb_writer.add_scalar(f'Val dice class_{i+1}', value, seen_train_ex) print('{} Loss: {:.4f} PxAcc: {} Dice: {}'.format("Validation", average_loss, average_acc, average_dice_sc)) return average_dice_sc, average_dice_class_sc def numpy_to_class_dict(np_arr): ret_dict = {} for val in np_arr: ret_dict[f'Class {val+1}'] = val return ret_dict def train(model, classes, train_dl, valid_dl, loss_fn, optimizer, scheduler, acc_fn, epochs, tb_writer, hparam_log, other_logdir): print(other_logdir) start = time.time() model.cuda() len_train_ds = len(train_dl.dataset) print("Len train ds") print(len_train_ds) seen_train_ex = 0 avg_dice = 0.0 avg_train_loss = 0.0 best_acc = 0.0 runs_without_improved_dice = 0 highest_dice = 0.0 seen_train_ex_highest_dice = 0 hparam_log["hgst dice"] = 0.0 hparam_log["hgst dice step"] = 0.0 hparam_log["hgst dice tr CE loss"] = 0.0 for epoch in range(epochs): save_batch = True model.train() weight = epoch/epochs print("weight", weight) #loss_fn = weighted_combined_loss(nn.CrossEntropyLoss(), dice_loss, weight) print('Epoch {}/{}'.format(epoch, epochs - 1)) print('-' * 10) running_loss = 0.0 running_acc = 0.0 step = 0 # iterate over data for X_batch, Y_batch in train_dl: #print("x batch shape",X_batch.shape) #print("y batch shape",Y_batch.shape) loss, acc, outputs = train_step(X_batch, Y_batch, optimizer, model, loss_fn, acc_fn) running_acc += acc*X_batch.size(0) running_loss += loss*X_batch.size(0) step += 1 seen_train_ex += X_batch.size(0) tb_writer.add_scalar("Train CE loss", loss, seen_train_ex) tb_writer.add_scalar("Train px acc", acc, seen_train_ex) if step % 25 == 0: print('Current step: {} Loss: {} Acc: {} '.format(step, loss, acc)) avg_dice, avg_dice_cl = check_accuracy(valid_dl, model, loss_fn, acc_fn, classes, tb_writer, seen_train_ex, other_logdir) if avg_dice > highest_dice: print("highest_dice", highest_dice) highest_dice = avg_dice highest_dice_cl = avg_dice_cl hparam_log["hgst dice"] = highest_dice for i,dice in enumerate(avg_dice_cl): hparam_log[f'Class {i+1}'] = dice hparam_log["hgst dice step"] = seen_train_ex hparam_log["hgst dice tr CE loss"] = loss.item() runs_without_improved_dice = 0 torch.save(model.state_dict(), os.path.join(other_logdir, "state_dict.pth")) else: runs_without_improved_dice +=1 avg_train_loss = running_loss / len_train_ds avg_train_acc = running_acc / len_train_ds scheduler.step(avg_train_loss) print_epoch_stats(epoch, epochs, avg_train_loss, avg_train_acc) if runs_without_improved_dice > 20: print("Dice not improving for 12 epochs, abort training") break hparam_log["last step"] = seen_train_ex hparam_log["last dice"] = avg_dice hparam_log["last train loss"] = avg_train_loss time_elapsed = time.time() - start print('Training complete in {:.0f}m {:.0f}s'.format(time_elapsed // 60, time_elapsed % 60)) def dict_to_numpy(hparam_dict): hparam_dict["last train loss"] = hparam_dict["last train loss"].item() for key in hparam_dict: try: hparam_dict[key] = hparam_dict[key].item() except: pass try: hparam_dict[key] = hparam_dict[key].detach().cpu().numpy() except: pass def init_test(cfg): hparam_log = {} bs = cfg["batch_size"] epochs_val = cfg["epochs"] learn_rate = cfg["learning_rate"] lr_patience = cfg["lr_patience"] train_transforms = cfg["train_transforms"] val_transforms = cfg["val_transforms"] model_file = cfg["model"] dataset = cfg["dataset"] channel_ratio = cfg["channel_ratio"] cross_entr_weights = cfg["cross_entr_weights"] continue_training = False if "custom_logdir" in cfg: cust_logdir = cfg["custom_logdir"] else: cust_logdir = "" tb_logdir = os.path.join("logdir", "tensorboard", dataset, cust_logdir, model_file) other_logdir = os.path.join("logdir", "other", dataset, cust_logdir, model_file) print("other_logdir", other_logdir) try: try_number = len(os.listdir(tb_logdir)) except: try_number = 0 r = RandomWords() if continue_training: logdir_folder = "N1_None" else: random_word = r.get_random_word() logdir_folder = f'N{try_number}_{random_word}' tb_logdir = os.path.join(tb_logdir, logdir_folder) other_logdir = os.path.join(other_logdir, logdir_folder) os.makedirs(other_logdir, exist_ok=True) print("other_logdir:", other_logdir) print("tb_logdir:", tb_logdir) tb_writer = SummaryWriter(tb_logdir) train_loader, val_loader = get_double_scan_v1_loader(bs, train_transforms) classes = 1 model_path = os.path.join("models",dataset) model_import = import_model_from_path(model_file, model_path) unet = model_import.Unet2D(6,2, channel_ratio) if continue_training: unet.load_state_dict(torch.load(os.path.join(other_logdir, "state_dict.pth"))) unet.cuda() loss_fn = torchgeometry.losses.dice_loss loss_fn = torch.nn.CrossEntropyLoss(weight=torch.tensor(cross_entr_weights).cuda()) #loss_fn2 = dice_loss #loss_fn3 = weighted_combined_loss(loss_fn, loss_fn2) opt = torch.optim.Adam(unet.parameters(), lr=learn_rate) scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(opt, patience=3, verbose=True) train(unet, classes, train_loader, val_loader, loss_fn, opt, scheduler, mean_pixel_accuracy, epochs_val, tb_writer, hparam_log, other_logdir) init_test(cfg, logdir_folder, cust_logdir) if __name__ == "__main__": args = add_config_parser() cfg = get_dict(args, print_config=True) init_test(cfg)
[ "sys.path.append", "torch.no_grad", "os.makedirs", "numpy.zeros", "torch.optim.lr_scheduler.ReduceLROnPlateau", "time.time", "dataloaders.get_double_scan_v1_loader", "random_word.RandomWords", "torch.utils.tensorboard.SummaryWriter", "test.init_test", "os.path.join", "os.listdir", "torch.tensor" ]
[((11, 35), 'sys.path.append', 'sys.path.append', (['"""utils"""'], {}), "('utils')\n", (26, 35), False, 'import sys\n'), ((36, 61), 'sys.path.append', 'sys.path.append', (['"""models"""'], {}), "('models')\n", (51, 61), False, 'import sys\n'), ((1356, 1373), 'numpy.zeros', 'np.zeros', (['classes'], {}), '(classes)\n', (1364, 1373), True, 'import numpy as np\n'), ((3180, 3191), 'time.time', 'time.time', ([], {}), '()\n', (3189, 3191), False, 'import time\n'), ((6984, 7055), 'os.path.join', 'os.path.join', (['"""logdir"""', '"""tensorboard"""', 'dataset', 'cust_logdir', 'model_file'], {}), "('logdir', 'tensorboard', dataset, cust_logdir, model_file)\n", (6996, 7055), False, 'import os\n'), ((7075, 7140), 'os.path.join', 'os.path.join', (['"""logdir"""', '"""other"""', 'dataset', 'cust_logdir', 'model_file'], {}), "('logdir', 'other', dataset, cust_logdir, model_file)\n", (7087, 7140), False, 'import os\n'), ((7284, 7297), 'random_word.RandomWords', 'RandomWords', ([], {}), '()\n', (7295, 7297), False, 'from random_word import RandomWords\n'), ((7482, 7520), 'os.path.join', 'os.path.join', (['tb_logdir', 'logdir_folder'], {}), '(tb_logdir, logdir_folder)\n', (7494, 7520), False, 'import os\n'), ((7540, 7581), 'os.path.join', 'os.path.join', (['other_logdir', 'logdir_folder'], {}), '(other_logdir, logdir_folder)\n', (7552, 7581), False, 'import os\n'), ((7586, 7626), 'os.makedirs', 'os.makedirs', (['other_logdir'], {'exist_ok': '(True)'}), '(other_logdir, exist_ok=True)\n', (7597, 7626), False, 'import os\n'), ((7720, 7744), 'torch.utils.tensorboard.SummaryWriter', 'SummaryWriter', (['tb_logdir'], {}), '(tb_logdir)\n', (7733, 7744), False, 'from torch.utils.tensorboard import SummaryWriter\n'), ((7778, 7825), 'dataloaders.get_double_scan_v1_loader', 'get_double_scan_v1_loader', (['bs', 'train_transforms'], {}), '(bs, train_transforms)\n', (7803, 7825), False, 'from dataloaders import get_double_scan_v1_loader\n'), ((7860, 7891), 'os.path.join', 'os.path.join', (['"""models"""', 'dataset'], {}), "('models', dataset)\n", (7872, 7891), False, 'import os\n'), ((8435, 8508), 'torch.optim.lr_scheduler.ReduceLROnPlateau', 'torch.optim.lr_scheduler.ReduceLROnPlateau', (['opt'], {'patience': '(3)', 'verbose': '(True)'}), '(opt, patience=3, verbose=True)\n', (8477, 8508), False, 'import torch\n'), ((8662, 8704), 'test.init_test', 'init_test', (['cfg', 'logdir_folder', 'cust_logdir'], {}), '(cfg, logdir_folder, cust_logdir)\n', (8671, 8704), False, 'from test import init_test\n'), ((8815, 8829), 'test.init_test', 'init_test', (['cfg'], {}), '(cfg)\n', (8824, 8829), False, 'from test import init_test\n'), ((776, 787), 'time.time', 'time.time', ([], {}), '()\n', (785, 787), False, 'import time\n'), ((1405, 1420), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1418, 1420), False, 'import torch\n'), ((5921, 5932), 'time.time', 'time.time', ([], {}), '()\n', (5930, 5932), False, 'import time\n'), ((7216, 7237), 'os.listdir', 'os.listdir', (['tb_logdir'], {}), '(tb_logdir)\n', (7226, 7237), False, 'import os\n'), ((5314, 5358), 'os.path.join', 'os.path.join', (['other_logdir', '"""state_dict.pth"""'], {}), "(other_logdir, 'state_dict.pth')\n", (5326, 5358), False, 'import os\n'), ((8076, 8120), 'os.path.join', 'os.path.join', (['other_logdir', '"""state_dict.pth"""'], {}), "(other_logdir, 'state_dict.pth')\n", (8088, 8120), False, 'import os\n'), ((8233, 8265), 'torch.tensor', 'torch.tensor', (['cross_entr_weights'], {}), '(cross_entr_weights)\n', (8245, 8265), False, 'import torch\n')]
import logging import datetime import base64 import uuid import os from datetime import datetime, timedelta from django import forms from django.core.files.base import ContentFile from django.forms import modelformset_factory from django.forms.formsets import BaseFormSet from django.contrib.auth.models import User from django.conf import settings from django.core.files.base import ContentFile from django.utils.timezone import localtime, now from django.shortcuts import get_object_or_404 from localflavor.us.us_states import US_STATES from localflavor.ca.ca_provinces import PROVINCE_CHOICES from nadine import email from nadine.utils import mailgun from nadine.models.core import HowHeard, Industry, Neighborhood, URLType, GENDER_CHOICES from nadine.models.profile import UserProfile, MemberNote, user_photo_path from nadine.models.membership import Membership, MembershipPackage, ResourceSubscription, IndividualMembership, SubscriptionDefault from nadine.models.usage import PAYMENT_CHOICES, CoworkingDay from nadine.models.resource import Room, Resource from nadine.models.organization import Organization, OrganizationMember from nadine.models.billing import UserBill, Payment from nadine.utils.payment_api import PaymentAPI from member.models import HelpText, MOTD logger = logging.getLogger(__name__) class DateRangeForm(forms.Form): START_DATE_PARAM = 'start' END_DATE_PARAM = 'end' start = forms.DateField() end = forms.DateField() @staticmethod def from_request(request, days=31): # Pull the start and end parameters start_param = DateRangeForm.START_DATE_PARAM end_param = DateRangeForm.END_DATE_PARAM # Get our start date start_str = request.POST.get(start_param, None) if not start_str: start_str = request.GET.get(start_param, None) if not start_str: start_date = localtime(now()).date() - timedelta(days=days) start_str = start_date.isoformat() # Get our end date end_str = request.POST.get(end_param, None) if not end_str: end_str = request.GET.get(end_param, None) if not end_str: tomorrow = localtime(now()) + timedelta(days=1) end_str = tomorrow.date().isoformat() return DateRangeForm({start_param: start_str, end_param: end_str}) def get_dates(self): if not self.is_valid(): return (None, None) return (self.cleaned_data['start'], self.cleaned_data['end']) class OrganizationForm(forms.Form): def __init__(self, *args, **kwargs): if 'instance' in kwargs: self.instance = kwargs['instance'] del kwargs['instance'] super(OrganizationForm, self).__init__(*args, **kwargs) if hasattr(self, 'instance'): self.initial['org_id'] = self.instance.id self.initial['name'] = self.instance.name self.initial['blurb'] = self.instance.blurb self.initial['bio'] = self.instance.bio self.initial['photo'] = self.instance.photo self.initial['public'] = self.instance.public #self.initial['locked'] = self.instance.locked org_id = forms.IntegerField(required=True, widget=forms.HiddenInput) name = forms.CharField(min_length=1, max_length=128, label="Organization Name", required=True, widget=forms.TextInput(attrs={'autocapitalize': "words"})) blurb = forms.CharField(widget=forms.Textarea, max_length=112, required=False) bio = forms.CharField(widget=forms.Textarea, max_length=512, required=False) photo = forms.FileField(required=False) public = forms.BooleanField(required=False) #locked = forms.BooleanField(required=False) def save(self): org_id = self.cleaned_data['org_id'] org = Organization.objects.get(id=org_id) org.name = self.cleaned_data['name'] org.blurb = self.cleaned_data['blurb'] org.bio = self.cleaned_data['bio'] if self.cleaned_data['photo']: # Delete the old one before we save the new one org.photo.delete() org.photo = self.cleaned_data['photo'] if 'public' in self.cleaned_data: org.public = self.cleaned_data['public'] if 'locked' in self.cleaned_data: org.locked = self.cleaned_data['locked'] org.save() class OrganizationSearchForm(forms.Form): terms = forms.CharField(max_length=100) class OrganizationMemberForm(forms.Form): def __init__(self, *args, **kwargs): if 'instance' in kwargs: self.instance = kwargs['instance'] del kwargs['instance'] super(OrganizationMemberForm, self).__init__(*args, **kwargs) if hasattr(self, 'instance'): self.initial['member_id'] = self.instance.id self.initial['org_id'] = self.instance.organization.id self.initial['username'] = self.instance.user.username self.initial['title'] = self.instance.title self.initial['start_date'] = self.instance.start_date self.initial['end_date'] = self.instance.end_date self.initial['admin'] = self.instance.admin org_id = forms.IntegerField(required=True, widget=forms.HiddenInput) member_id = forms.IntegerField(required=False, widget=forms.HiddenInput) username = forms.CharField(required=False, widget=forms.HiddenInput) title = forms.CharField(max_length=128, required=False, widget=forms.TextInput(attrs={'autocapitalize': "words"})) start_date = forms.DateField(widget=forms.DateInput(attrs={'placeholder':'e.g. 12/28/16', 'class':'datepicker'}, format='%m/%d/%Y'), required=True) end_date = forms.DateField(widget=forms.DateInput(attrs={'placeholder':'e.g. 12/28/16', 'class':'datepicker'}, format='%m/%d/%Y'), required=False) admin = forms.BooleanField(required=False) def is_valid(self): # run the parent validation first super_valid = super(OrganizationMemberForm, self).is_valid() has_member_id = 'member_id' in self.cleaned_data and self.cleaned_data['member_id'] has_username = 'username' in self.cleaned_data and self.cleaned_data['username'] if not (has_member_id or has_username): self.add_error('username', 'No user data provided') return super_valid and (has_member_id or has_username) def clean_member(self): self.member = None self.organization = Organization.objects.get(id=self.cleaned_data['org_id']) # Populate our member from either member_id (edit) or username (add) member_id = self.cleaned_data['member_id'] username=self.cleaned_data['username'] if member_id: self.member = OrganizationMember.objects.get(id=member_id) elif username: user = User.objects.get(username=username) self.member = OrganizationMember(organization=self.organization, user=user) else: raise Exception("Form must contain member_id or username!") return self.member def save(self): if not self.is_valid(): raise Exception('The form must be valid in order to save') member = self.clean_member() member.title = self.cleaned_data['title'] member.start_date = self.cleaned_data['start_date'] member.end_date = self.cleaned_data['end_date'] if 'admin' in self.cleaned_data: member.admin = self.cleaned_data['admin'] member.save() class PaymentForm(forms.Form): bill_id = forms.IntegerField(required=True, widget=forms.HiddenInput) username = forms.CharField(required=True, widget=forms.HiddenInput) # created_by = forms.CharField(required=False, widget=forms.HiddenInput) payment_date = forms.DateField(required=True, widget=forms.DateInput(attrs={'placeholder':'e.g. 12/28/16', 'class':'datepicker'}, format='%m/%d/%Y')) note = forms.CharField(max_length=256, required=False) amount = forms.DecimalField(min_value=0, max_value=10000, required=True, max_digits=7, decimal_places=2) def save(self, created_by=None): bill = UserBill.objects.get(pk=self.cleaned_data['bill_id']) user = User.objects.get(username=self.cleaned_data['username']) amount = self.cleaned_data['amount'] if amount > bill.total_owed: raise Exception("Amount of $%s exceeds amount owed $%s" %(amount, bill.total_owed)) payment = Payment.objects.create(bill=bill, user=user) payment.created_ts = self.cleaned_data['payment_date'] if created_by: payment.created_by = User.objects.get(username=created_by) payment.note = self.cleaned_data['note'] payment.amount = amount payment.save() return payment class MemberSearchForm(forms.Form): terms = forms.CharField(max_length=100) class NewUserForm(forms.Form): first_name = forms.CharField(max_length=100, label="First name *", required=True, widget=forms.TextInput(attrs={'autocapitalize': "words"})) last_name = forms.CharField(max_length=100, label="Last name *", required=True, widget=forms.TextInput(attrs={'autocapitalize': "words"})) email = forms.EmailField(max_length=100, label="Email *", required=True) def clean_first_name(self): return self.cleaned_data['first_name'].strip().title() def clean_last_name(self): return self.cleaned_data['last_name'].strip().title() def clean_email(self): email = self.cleaned_data['email'].strip().lower() if User.objects.filter(email=email).count() > 0: raise forms.ValidationError("Email address '%s' already in use." % email) if not mailgun.validate_address(email): raise forms.ValidationError("Email address '%s' is not valid." % email) return email def create_username(self, suffix=""): clean_first = self.cleaned_data['first_name'].strip().lower() clean_last = self.cleaned_data['last_name'].strip().lower() username = "%s_%s%s" % (clean_first, clean_last, suffix) clean_username = username.replace(" ", "_") clean_username = clean_username.replace(".", "_") clean_username = clean_username.replace("-", "_") clean_username = clean_username.replace("+", "") clean_username = clean_username.replace("@", "") clean_username = clean_username.replace("'", "") return clean_username def save(self): "Creates the User and Profile records with the field data and returns the user" if not self.is_valid(): raise Exception('The form must be valid in order to save') # Generate a unique username tries = 1 username = self.create_username() while User.objects.filter(username=username).count() > 0: tries = tries + 1 username = self.create_username(suffix=tries) first = self.cleaned_data['first_name'] last = self.cleaned_data['last_name'] email = self.cleaned_data['email'] user = User(username=username, first_name=first, last_name=last, email=email) password = User.objects.make_random_password(length=32) user.set_password(password) user.save() return user class Meta: widgets = { 'first_name': forms.TextInput(attrs={'autocapitalize': 'on', 'autocorrect': 'off'}), 'last_name': forms.TextInput(attrs={'autocapitalize': 'on', 'autocorrect': 'off'}), } def get_state_choices(): if settings.COUNTRY == 'US': return US_STATES elif settings.COUNTRY == 'CA': return PROVINCE_CHOICES class ProfileImageForm(forms.Form): username = forms.CharField(required=False, widget=forms.HiddenInput) organization = forms.IntegerField(required=False, widget=forms.HiddenInput) photo = forms.FileField(required=False) cropped_image_data = forms.CharField(widget=forms.HiddenInput()) def save(self): raw_img_data = self.cleaned_data['cropped_image_data'] if not raw_img_data or len(raw_img_data) == 0: # Nothing to save here return img_data = base64.b64decode(raw_img_data) if self.cleaned_data['username']: user = get_object_or_404(User, username=self.cleaned_data['username']) filename = "user_photos/%s.jpg" % self.cleaned_data['username'] if user.profile.photo: user.profile.photo.delete() user.profile.photo.save(filename, ContentFile(img_data)) elif self.cleaned_data['organization']: organization = get_object_or_404(Organization, id=self.cleaned_data['organization']) filename = "org_photos/%s.jpg" % self.cleaned_data['username'] if organization.photo: organization.photo.delete() organization.photo.save(filename, ContentFile(img_data)) class BaseLinkFormSet(BaseFormSet): def clean(self): if any(self.errors): return url_types = [] urls = [] for form in self.forms: if form.cleaned_data: username = form.cleaned_data['username'] org_id = form.cleaned_data['org_id'] url_type = form.cleaned_data['url_type'] url = form.cleaned_data['url'] if url_type and url : urls.append(url) if url and not url_type: raise forms.ValidationError(message='All websites must have a URL', code='missing_anchor') if url_type and not url: raise forms.ValidationError(message='All URLS must have a type', code='missing_type') class LinkForm(forms.Form): username = forms.CharField(required=False, widget=forms.HiddenInput) org_id = forms.IntegerField(required=False, widget=forms.HiddenInput) url_type = forms.ModelChoiceField(widget=forms.Select(attrs={'class': 'browser-default'}), label='Website Type', queryset=URLType.objects.all(), required=False) url = forms.URLField(widget=forms.URLInput(attrs={'placeholder': 'http://www.facebook.com/myprofile'}), required=False) def save(self): if not self.is_valid(): raise Exception('The form must be valid in order to save') if self.cleaned_data['username']: user = User.objects.get(username=self.cleaned_data['username']) user.profile.save_url(self.cleaned_data['url_type'], self.cleaned_data['url']) if self.cleaned_data['org_id']: org = Organization.objects.get(id=self.cleaned_data['org_id']) org.save_url(self.cleaned_data['url_type'], self.cleaned_data['url']) class EditProfileForm(forms.Form): username = forms.CharField(required=True, widget=forms.HiddenInput) first_name = forms.CharField(max_length=100, required=True) last_name = forms.CharField(max_length=100, required=True) address1 = forms.CharField(max_length=100, required=False) address2 = forms.CharField(max_length=100, required=False) city = forms.CharField(max_length=100, required=False) state = forms.ChoiceField(widget=forms.Select(attrs={'class': 'browser-default'}), choices=get_state_choices, required=False) zipcode = forms.CharField(max_length=16, required=False) phone = forms.CharField(max_length=20, required=False) phone2 = forms.CharField(max_length=20, required=False) url_personal = forms.URLField(required=False) url_professional = forms.URLField(required=False) url_facebook = forms.URLField(required=False) url_twitter = forms.URLField(required=False) url_linkedin = forms.URLField(required=False) url_github = forms.URLField(required=False) gender = forms.ChoiceField(widget=forms.Select(attrs={'class': 'browser-default'}), choices=GENDER_CHOICES, required=False) howHeard = forms.ModelChoiceField(widget=forms.Select(attrs={'class': 'browser-default'}), label="How heard", queryset=HowHeard.objects.all(), required=False) industry = forms.ModelChoiceField(widget=forms.Select(attrs={'class': 'browser-default'}), queryset=Industry.objects.all(), required=False) neighborhood = forms.ModelChoiceField(widget=forms.Select(attrs={'class': 'browser-default'}), queryset=Neighborhood.objects.all(), required=False) bio = forms.CharField(widget=forms.Textarea, max_length=512, required=False) has_kids = forms.NullBooleanField(widget=forms.NullBooleanSelect(attrs={'class':'browser-default'}), required=False) self_employed = forms.NullBooleanField(widget=forms.NullBooleanSelect(attrs={'class':'browser-default'}), required=False) public_profile = forms.BooleanField(required=False) emergency_name = forms.CharField(widget=forms.TextInput(attrs={'size': '50'}), label="Name", required=False) emergency_relationship = forms.CharField(widget=forms.TextInput(attrs={'size': '50'}), label="Relationship", required=False) emergency_phone = forms.CharField(widget=forms.TextInput(attrs={'size': '16'}), label="Phone", required=False) emergency_email = forms.EmailField(widget=forms.TextInput(attrs={'size': '50'}), label="E-mail", required=False) def save(self): if not self.is_valid(): raise Exception('The form must be valid in order to save') user = User.objects.get(username=self.cleaned_data['username']) user.first_name = self.cleaned_data['first_name'] user.last_name = self.cleaned_data['last_name'] user.save() # Profile data user.profile.phone = self.cleaned_data['phone'] user.profile.phone2 = self.cleaned_data['phone2'] user.profile.address1 = self.cleaned_data['address1'] user.profile.address2 = self.cleaned_data['address2'] user.profile.city = self.cleaned_data['city'] user.profile.state = self.cleaned_data['state'] user.profile.zipcode = self.cleaned_data['zipcode'] user.profile.bio = self.cleaned_data['bio'] user.profile.gender = self.cleaned_data['gender'] user.profile.howHeard = self.cleaned_data['howHeard'] user.profile.industry = self.cleaned_data['industry'] user.profile.neighborhood = self.cleaned_data['neighborhood'] user.profile.has_kids = self.cleaned_data['has_kids'] user.profile.self_employed = self.cleaned_data['self_employed'] user.profile.public_profile = self.cleaned_data['public_profile'] user.profile.save() # Save the URLs user.profile.save_url("personal", self.cleaned_data['url_personal']) user.profile.save_url("professional", self.cleaned_data['url_professional']) user.profile.save_url("facebook", self.cleaned_data['url_facebook']) user.profile.save_url("twitter", self.cleaned_data['url_twitter']) user.profile.save_url("linkedin", self.cleaned_data['url_linkedin']) user.profile.save_url("github", self.cleaned_data['url_github']) # Emergency Contact data emergency_contact = user.get_emergency_contact() emergency_contact.name=self.cleaned_data['emergency_name'] emergency_contact.relationship=self.cleaned_data['emergency_relationship'] emergency_contact.phone=self.cleaned_data['emergency_phone'] emergency_contact.email=self.cleaned_data['emergency_email'] emergency_contact.save() return user.profile class EventForm(forms.Form): user = forms.ModelChoiceField(widget=forms.Select(attrs={'class': 'browser-default'}), queryset=User.objects.order_by('first_name')) room = forms.ModelChoiceField(widget=forms.Select(attrs={'class': 'browser-default'}), queryset=Room.objects.all(), required=False) start_time = forms.DateTimeField(widget=forms.DateTimeInput(attrs={'placeholder':'e.g. 12/28/16 14:30'}, format='%m/%d/%Y %H:%M'), required=True) end_time = forms.DateTimeField(widget=forms.DateTimeInput(attrs={'placeholder':'e.g. 12/28/16 16:30'}, format='%m/%d/%Y %H:%M:%S'), required=True) description = forms.CharField(max_length=100, required=False) charge = forms.DecimalField(decimal_places=2, max_digits=9, required=True) publicly_viewable = forms.ChoiceField(widget=forms.Select(attrs={'class': 'browser-default'}), choices=((True, 'Yes'), (False, 'No')), required=False) def save(self): if not self.is_valid(): raise Exception('The form must be valid in order to save') user = self.cleaned_data['user'] room = self.cleaned_data['room'] start_ts = self.cleaned_data['start_time'] end_ts = self.cleaned_data['end_time'] description = self.cleaned_data['description'] charge = self.cleaned_data['charge'] is_public = self.cleaned_data['publicly_viewable'] event = Event(user=user, room=room, start_ts=start_ts, end_ts=end_ts, description=description, charge=charge, is_public=is_public) event.save() return event class CoworkingDayForm(forms.Form): username = forms.CharField(widget=forms.TextInput(attrs={'readonly':'readonly'})) visit_date = forms.DateField(widget=forms.HiddenInput()) payment = forms.ChoiceField(choices=PAYMENT_CHOICES, required=True) note = forms.CharField(required=False) def save(self): "Creates the Daily Log to track member activity" if not self.is_valid(): raise Exception('The form must be valid in order to save') # Make sure there isn't another log for this member on this day u = User.objects.get(username=self.cleaned_data['username']) v = self.cleaned_data['visit_date'] if CoworkingDay.objects.filter(user=u, visit_date=v).count() > 0: raise Exception('Member already signed in') day = CoworkingDay() day.user = u day.visit_date = v day.payment = self.cleaned_data['payment'] day.note = self.cleaned_data['note'] day.save() return day class SubscriptionForm(forms.Form): username = forms.CharField(required=False, widget=forms.HiddenInput({'class':'username_td'})) created_ts = forms.DateField(required=False, widget=forms.HiddenInput) created_by = forms.CharField(required=False, widget=forms.HiddenInput({'class':'created_by_td'})) s_id = forms.IntegerField(required=False, widget=forms.HiddenInput(attrs={'class':'s_id'})) resource = forms.ModelChoiceField(queryset=Resource.objects.all(), required=False, widget=forms.Select(attrs={'class': 'resource'})) allowance = forms.IntegerField(min_value=0, required=False) start_date = forms.DateField(widget=forms.TextInput(attrs={'class': 'start_date'}), required=False) end_date = forms.DateField(widget=forms.TextInput(attrs={'class': 'end_date'}), required=False) monthly_rate = forms.IntegerField(min_value=0, required=False) overage_rate = forms.IntegerField(required=False, min_value=0) paid_by = forms.CharField(widget=forms.TextInput(attrs={'class': 'paying_user'}), max_length=128, required=False) def save(self): if not self.is_valid(): raise Exception('The form must be valid in order to save') username = self.cleaned_data['username'] user = User.objects.get(username=username) if self.cleaned_data['created_ts']: created_ts = self.cleaned_data['created_ts'] else: created_ts = localtime(now()) if self.cleaned_data['s_id']: s_id = self.cleaned_data['id'] created_by_user = self.cleaned_data['created_by'] created_by = User.objects.get(username=created_by_user) resource = self.cleaned_data['resource'] allowance = self.cleaned_data['allowance'] start_date = self.cleaned_data['start_date'] end_date = self.cleaned_data['end_date'] monthly_rate = self.cleaned_data['monthly_rate'] overage_rate = self.cleaned_data['overage_rate'] if self.cleaned_data['paid_by']: paid_by = self.cleaned_data['paid_by'] else: paid_by = None if s_id: sub = ResourceSubscription.objects.get(id=s_id) sub.end_date = end_date else: sub = ResourceSubscription(created_ts=created_ts, created_by=created_by, resource=resource, allowance=allowance, start_date=start_date, end_date=end_date, monthly_rate=monthly_rate, overage_rate=overage_rate, paid_by=paid_by, membership=user.membership) sub.save() return sub class MembershipForm(forms.Form): username = forms.CharField(required=False, widget=forms.HiddenInput) org = forms.CharField(required=False, widget=forms.HiddenInput) package = forms.ModelChoiceField(widget=forms.Select(attrs={'class': 'browser-default'}), label='Choose a Package', queryset=MembershipPackage.objects.filter(enabled=True).order_by('name'), required=True) bill_day = forms.IntegerField(min_value=1, max_value=31, required=False) def save(self): if not self.is_valid(): raise Exception('The form must be valid in order to save') package = self.cleaned_data['package'] bill_day = self.cleaned_data['bill_day'] if self.cleaned_data['username'] and self.cleaned_data['org']: raise Exception('You cannot save a membership for an organization AND a user in the same form.') elif self.cleaned_data['username']: username = self.cleaned_data['username'] to_update = User.objects.get(username=username) elif self.cleaned_data['org']: org = self.cleaned_data['username'] to_update = Organization.objects.get(id=org) else: raise Exception('A user or organization is required to save a membership.') membership = to_update.membership # membership.package = package membership.bill_day = bill_day membership.save() return membership class HelpTextForm(forms.Form): title = forms.CharField(max_length=128, label='Help Text Title', required=True, widget=forms.TextInput(attrs={'autocapitalize': "words", "placeholder":"e.g. Welcome Info"})) template = forms.CharField(widget=forms.Textarea(attrs={'placeholder':'<h1>Hello World</h1>'}), required=True) slug = forms.CharField(widget=forms.TextInput(attrs={"placeholder":"Single Word for URL e.g. 'hello'"}), max_length=16, required=True) order = forms.IntegerField(required=True, widget=forms.HiddenInput) def save(self): if not self.is_valid(): raise Exception('The form must be valid in order to save') title = self.cleaned_data['title'] template = self.cleaned_data['template'] slug = self.cleaned_data['slug'] order = self.cleaned_data['order'] help_text = HelpText(title=title, template=template, slug=slug, order=order) help_text.save() return help_text class MOTDForm(forms.Form): today = localtime(now()).date() start_ts = forms.DateField(initial=today, required=True) end_ts = forms.DateField(required=True) message = forms.CharField(required=True) delay_ms = forms.IntegerField(required=True, widget=forms.HiddenInput) def save(self): if not self.is_valid(): raise Exception('The form must be valid in order to save') start_ts = self.cleaned_data['start_ts'] end_ts = self.cleaned_data['end_ts'] message = self.cleaned_data['message'] delay_ms = self.cleaned_data['delay_ms'] motd = MOTD(start_ts=start_ts, end_ts=end_ts, message=message, delay_ms=delay_ms) motd.save() return motd class RoomForm(forms.Form): name = forms.CharField(required=True, max_length=64) room_id = forms.IntegerField(required=False, widget=forms.HiddenInput) location = forms.CharField(widget=forms.TextInput(attrs={'placeholder':'i.e. By the elevator'}), max_length=128, required=False) description = forms.CharField(widget=forms.Textarea, required=False) floor = forms.IntegerField(min_value=1, max_value=100, required=True) seats = forms.IntegerField(min_value=1, max_value=1000, required=True) max_capacity = forms.IntegerField(min_value=1, max_value=1000, required=True) has_av = forms.BooleanField(required=False) has_phone = forms.BooleanField(required=False) default_rate = forms.FloatField(required=True, min_value=0, max_value=None) image = forms.FileField(required=False) members_only = forms.BooleanField(required=False) def save(self): if not self.is_valid(): raise Exception('The form must be valid in order to save') room_id = self.cleaned_data['room_id'] name = self.cleaned_data['name'] location = self.cleaned_data['location'] description = self.cleaned_data['description'] floor = self.cleaned_data['floor'] seats = self.cleaned_data['seats'] max_capacity = self.cleaned_data['max_capacity'] has_av = self.cleaned_data['has_av'] has_phone = self.cleaned_data['has_phone'] default_rate = self.cleaned_data['default_rate'] image = self.cleaned_data['image'] members_only = self.cleaned_data['members_only'] if room_id != None: room = Room.objects.get(id=room_id) room.name = name room.location = location room.description = description room.floor = floor room.seats = seats room.max_capacity = max_capacity room.has_av = has_av room.has_phone = has_phone room.default_rate = default_rate room.members_only = members_only if image: room.image = image room.save() else: room = Room(name=name, location=location, description=description, floor=floor, seats=seats, max_capacity=max_capacity, has_av=has_av, has_phone=has_phone, default_rate=default_rate, image=image, members_only=members_only) room.save() return room # TODO - Not quite ready yet --JLS # class DocUploadForm(forms.Form): # name = forms.CharField(required=True) # document = forms.FileField(required=True) # # def save(self): # name = self.cleaned_data['name'] # document = self.cleaned_data['document'] # doc = Documents(name=name, document=document) # doc.save() # # return doc class MembershipPackageForm(forms.Form): name = forms.CharField(widget=forms.TextInput(attrs={'class': 'name-input'}), max_length=128, required=False) sub_id = forms.IntegerField(required=False, widget=forms.HiddenInput) package = forms.IntegerField(required=False, widget=forms.HiddenInput(attrs={'class':'package-id'})) enabled = forms.ChoiceField(choices=((True, 'Yes'), (False, 'No')), required=False) resource = forms.ModelChoiceField(widget=forms.Select(attrs={'class': 'browser-default'}), label='Choose a Resource', queryset=Resource.objects.all(), required=False) allowance = forms.IntegerField(min_value=0, required=False) monthly_rate = forms.IntegerField(min_value=0, required=False) overage_rate = forms.IntegerField(min_value=0, required=False) delete = forms.BooleanField(widget=forms.CheckboxInput(attrs={'class':'browser-default del-checkbox'}), required=False) def save(self): if not self.is_valid(): raise Exception('The form must be valid in order to save') name = self.cleaned_data['name'] package = self.cleaned_data['package'] resource = self.cleaned_data['resource'] allowance = self.cleaned_data['allowance'] monthly_rate = self.cleaned_data['monthly_rate'] overage_rate = self.cleaned_data['overage_rate'] enabled = self.cleaned_data['enabled'] delete = self.cleaned_data['delete'] if enabled == 'False': enabled = False else: enabled = True if self.cleaned_data['sub_id']: p = MembershipPackage.objects.get(id=package) p.enabled = enabled p.save() sub_default = SubscriptionDefault.objects.get(id=self.cleaned_data['sub_id']) if delete == True: sub_default.delete() else: sub_default.allowance = allowance sub_default.monthly_rate = monthly_rate sub_default.overage_rate = overage_rate sub_default.save() else: if MembershipPackage.objects.filter(name=name): raise Exception('A membership package with this name already exists.') if package: p = MembershipPackage.objects.get(id=package) p.enabled = enabled p.save() else: p = MembershipPackage(name=name, enabled=enabled) p.save() sub_default = SubscriptionDefault(package=p, resource=resource, allowance=allowance, monthly_rate=monthly_rate, overage_rate=overage_rate) print(('Default is %s' % sub_default)) sub_default.save() return sub_default # Copyright 2018 Office Nomads LLC (http://www.officenomads.com/) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
[ "nadine.models.organization.OrganizationMember", "nadine.models.membership.SubscriptionDefault", "nadine.models.usage.CoworkingDay.objects.filter", "django.forms.EmailField", "nadine.models.resource.Room.objects.get", "django.forms.FloatField", "django.contrib.auth.models.User", "django.contrib.auth.models.User.objects.filter", "base64.b64decode", "logging.getLogger", "nadine.models.billing.UserBill.objects.get", "django.contrib.auth.models.User.objects.make_random_password", "nadine.models.membership.MembershipPackage.objects.filter", "nadine.models.membership.MembershipPackage.objects.get", "nadine.models.core.HowHeard.objects.all", "nadine.models.resource.Room.objects.all", "django.forms.DecimalField", "nadine.models.resource.Room", "django.forms.IntegerField", "django.contrib.auth.models.User.objects.get", "django.forms.ChoiceField", "django.forms.BooleanField", "django.forms.URLInput", "nadine.models.membership.ResourceSubscription.objects.get", "django.core.files.base.ContentFile", "django.utils.timezone.now", "django.forms.ValidationError", "datetime.timedelta", "django.contrib.auth.models.User.objects.order_by", "django.forms.HiddenInput", "django.forms.DateTimeInput", "nadine.models.core.Neighborhood.objects.all", "django.forms.Textarea", "member.models.MOTD", "django.forms.Select", "django.forms.TextInput", "django.forms.DateInput", "nadine.models.core.Industry.objects.all", "django.shortcuts.get_object_or_404", "member.models.HelpText", "nadine.models.core.URLType.objects.all", "nadine.models.organization.Organization.objects.get", "nadine.utils.mailgun.validate_address", "nadine.models.resource.Resource.objects.all", "django.forms.CheckboxInput", "nadine.models.membership.SubscriptionDefault.objects.get", "nadine.models.organization.OrganizationMember.objects.get", "nadine.models.billing.Payment.objects.create", "django.forms.NullBooleanSelect", "django.forms.URLField", "nadine.models.membership.MembershipPackage", "nadine.models.usage.CoworkingDay", "nadine.models.membership.ResourceSubscription", "django.forms.DateField", "django.forms.CharField", "django.forms.FileField" ]
[((1288, 1315), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1305, 1315), False, 'import logging\n'), ((1422, 1439), 'django.forms.DateField', 'forms.DateField', ([], {}), '()\n', (1437, 1439), False, 'from django import forms\n'), ((1450, 1467), 'django.forms.DateField', 'forms.DateField', ([], {}), '()\n', (1465, 1467), False, 'from django import forms\n'), ((3214, 3273), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(True)', 'widget': 'forms.HiddenInput'}), '(required=True, widget=forms.HiddenInput)\n', (3232, 3273), False, 'from django import forms\n'), ((3444, 3514), 'django.forms.CharField', 'forms.CharField', ([], {'widget': 'forms.Textarea', 'max_length': '(112)', 'required': '(False)'}), '(widget=forms.Textarea, max_length=112, required=False)\n', (3459, 3514), False, 'from django import forms\n'), ((3525, 3595), 'django.forms.CharField', 'forms.CharField', ([], {'widget': 'forms.Textarea', 'max_length': '(512)', 'required': '(False)'}), '(widget=forms.Textarea, max_length=512, required=False)\n', (3540, 3595), False, 'from django import forms\n'), ((3608, 3639), 'django.forms.FileField', 'forms.FileField', ([], {'required': '(False)'}), '(required=False)\n', (3623, 3639), False, 'from django import forms\n'), ((3653, 3687), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'required': '(False)'}), '(required=False)\n', (3671, 3687), False, 'from django import forms\n'), ((4434, 4465), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (4449, 4465), False, 'from django import forms\n'), ((5219, 5278), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(True)', 'widget': 'forms.HiddenInput'}), '(required=True, widget=forms.HiddenInput)\n', (5237, 5278), False, 'from django import forms\n'), ((5295, 5355), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(False)', 'widget': 'forms.HiddenInput'}), '(required=False, widget=forms.HiddenInput)\n', (5313, 5355), False, 'from django import forms\n'), ((5371, 5428), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(False)', 'widget': 'forms.HiddenInput'}), '(required=False, widget=forms.HiddenInput)\n', (5386, 5428), False, 'from django import forms\n'), ((5863, 5897), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'required': '(False)'}), '(required=False)\n', (5881, 5897), False, 'from django import forms\n'), ((7571, 7630), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(True)', 'widget': 'forms.HiddenInput'}), '(required=True, widget=forms.HiddenInput)\n', (7589, 7630), False, 'from django import forms\n'), ((7646, 7702), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(True)', 'widget': 'forms.HiddenInput'}), '(required=True, widget=forms.HiddenInput)\n', (7661, 7702), False, 'from django import forms\n'), ((7945, 7992), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(256)', 'required': '(False)'}), '(max_length=256, required=False)\n', (7960, 7992), False, 'from django import forms\n'), ((8006, 8106), 'django.forms.DecimalField', 'forms.DecimalField', ([], {'min_value': '(0)', 'max_value': '(10000)', 'required': '(True)', 'max_digits': '(7)', 'decimal_places': '(2)'}), '(min_value=0, max_value=10000, required=True, max_digits=\n 7, decimal_places=2)\n', (8024, 8106), False, 'from django import forms\n'), ((8856, 8887), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (8871, 8887), False, 'from django import forms\n'), ((9221, 9285), 'django.forms.EmailField', 'forms.EmailField', ([], {'max_length': '(100)', 'label': '"""Email *"""', 'required': '(True)'}), "(max_length=100, label='Email *', required=True)\n", (9237, 9285), False, 'from django import forms\n'), ((11749, 11806), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(False)', 'widget': 'forms.HiddenInput'}), '(required=False, widget=forms.HiddenInput)\n', (11764, 11806), False, 'from django import forms\n'), ((11826, 11886), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(False)', 'widget': 'forms.HiddenInput'}), '(required=False, widget=forms.HiddenInput)\n', (11844, 11886), False, 'from django import forms\n'), ((11899, 11930), 'django.forms.FileField', 'forms.FileField', ([], {'required': '(False)'}), '(required=False)\n', (11914, 11930), False, 'from django import forms\n'), ((13812, 13869), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(False)', 'widget': 'forms.HiddenInput'}), '(required=False, widget=forms.HiddenInput)\n', (13827, 13869), False, 'from django import forms\n'), ((13883, 13943), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(False)', 'widget': 'forms.HiddenInput'}), '(required=False, widget=forms.HiddenInput)\n', (13901, 13943), False, 'from django import forms\n'), ((14816, 14872), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(True)', 'widget': 'forms.HiddenInput'}), '(required=True, widget=forms.HiddenInput)\n', (14831, 14872), False, 'from django import forms\n'), ((14890, 14936), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(True)'}), '(max_length=100, required=True)\n', (14905, 14936), False, 'from django import forms\n'), ((14953, 14999), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(True)'}), '(max_length=100, required=True)\n', (14968, 14999), False, 'from django import forms\n'), ((15015, 15062), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)'}), '(max_length=100, required=False)\n', (15030, 15062), False, 'from django import forms\n'), ((15078, 15125), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)'}), '(max_length=100, required=False)\n', (15093, 15125), False, 'from django import forms\n'), ((15137, 15184), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)'}), '(max_length=100, required=False)\n', (15152, 15184), False, 'from django import forms\n'), ((15329, 15375), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(16)', 'required': '(False)'}), '(max_length=16, required=False)\n', (15344, 15375), False, 'from django import forms\n'), ((15388, 15434), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(20)', 'required': '(False)'}), '(max_length=20, required=False)\n', (15403, 15434), False, 'from django import forms\n'), ((15448, 15494), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(20)', 'required': '(False)'}), '(max_length=20, required=False)\n', (15463, 15494), False, 'from django import forms\n'), ((15514, 15544), 'django.forms.URLField', 'forms.URLField', ([], {'required': '(False)'}), '(required=False)\n', (15528, 15544), False, 'from django import forms\n'), ((15568, 15598), 'django.forms.URLField', 'forms.URLField', ([], {'required': '(False)'}), '(required=False)\n', (15582, 15598), False, 'from django import forms\n'), ((15618, 15648), 'django.forms.URLField', 'forms.URLField', ([], {'required': '(False)'}), '(required=False)\n', (15632, 15648), False, 'from django import forms\n'), ((15667, 15697), 'django.forms.URLField', 'forms.URLField', ([], {'required': '(False)'}), '(required=False)\n', (15681, 15697), False, 'from django import forms\n'), ((15717, 15747), 'django.forms.URLField', 'forms.URLField', ([], {'required': '(False)'}), '(required=False)\n', (15731, 15747), False, 'from django import forms\n'), ((15765, 15795), 'django.forms.URLField', 'forms.URLField', ([], {'required': '(False)'}), '(required=False)\n', (15779, 15795), False, 'from django import forms\n'), ((16393, 16463), 'django.forms.CharField', 'forms.CharField', ([], {'widget': 'forms.Textarea', 'max_length': '(512)', 'required': '(False)'}), '(widget=forms.Textarea, max_length=512, required=False)\n', (16408, 16463), False, 'from django import forms\n'), ((16732, 16766), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'required': '(False)'}), '(required=False)\n', (16750, 16766), False, 'from django import forms\n'), ((20098, 20145), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(False)'}), '(max_length=100, required=False)\n', (20113, 20145), False, 'from django import forms\n'), ((20159, 20224), 'django.forms.DecimalField', 'forms.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(9)', 'required': '(True)'}), '(decimal_places=2, max_digits=9, required=True)\n', (20177, 20224), False, 'from django import forms\n'), ((21227, 21284), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'choices': 'PAYMENT_CHOICES', 'required': '(True)'}), '(choices=PAYMENT_CHOICES, required=True)\n', (21244, 21284), False, 'from django import forms\n'), ((21296, 21327), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(False)'}), '(required=False)\n', (21311, 21327), False, 'from django import forms\n'), ((22190, 22247), 'django.forms.DateField', 'forms.DateField', ([], {'required': '(False)', 'widget': 'forms.HiddenInput'}), '(required=False, widget=forms.HiddenInput)\n', (22205, 22247), False, 'from django import forms\n'), ((22599, 22646), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(0)', 'required': '(False)'}), '(min_value=0, required=False)\n', (22617, 22646), False, 'from django import forms\n'), ((22870, 22917), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(0)', 'required': '(False)'}), '(min_value=0, required=False)\n', (22888, 22917), False, 'from django import forms\n'), ((22937, 22984), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(False)', 'min_value': '(0)'}), '(required=False, min_value=0)\n', (22955, 22984), False, 'from django import forms\n'), ((24622, 24679), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(False)', 'widget': 'forms.HiddenInput'}), '(required=False, widget=forms.HiddenInput)\n', (24637, 24679), False, 'from django import forms\n'), ((24690, 24747), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(False)', 'widget': 'forms.HiddenInput'}), '(required=False, widget=forms.HiddenInput)\n', (24705, 24747), False, 'from django import forms\n'), ((24972, 25033), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(1)', 'max_value': '(31)', 'required': '(False)'}), '(min_value=1, max_value=31, required=False)\n', (24990, 25033), False, 'from django import forms\n'), ((26488, 26547), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(True)', 'widget': 'forms.HiddenInput'}), '(required=True, widget=forms.HiddenInput)\n', (26506, 26547), False, 'from django import forms\n'), ((27065, 27110), 'django.forms.DateField', 'forms.DateField', ([], {'initial': 'today', 'required': '(True)'}), '(initial=today, required=True)\n', (27080, 27110), False, 'from django import forms\n'), ((27124, 27154), 'django.forms.DateField', 'forms.DateField', ([], {'required': '(True)'}), '(required=True)\n', (27139, 27154), False, 'from django import forms\n'), ((27169, 27199), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(True)'}), '(required=True)\n', (27184, 27199), False, 'from django import forms\n'), ((27215, 27274), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(True)', 'widget': 'forms.HiddenInput'}), '(required=True, widget=forms.HiddenInput)\n', (27233, 27274), False, 'from django import forms\n'), ((27761, 27806), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(True)', 'max_length': '(64)'}), '(required=True, max_length=64)\n', (27776, 27806), False, 'from django import forms\n'), ((27821, 27881), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(False)', 'widget': 'forms.HiddenInput'}), '(required=False, widget=forms.HiddenInput)\n', (27839, 27881), False, 'from django import forms\n'), ((28033, 28087), 'django.forms.CharField', 'forms.CharField', ([], {'widget': 'forms.Textarea', 'required': '(False)'}), '(widget=forms.Textarea, required=False)\n', (28048, 28087), False, 'from django import forms\n'), ((28100, 28161), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(1)', 'max_value': '(100)', 'required': '(True)'}), '(min_value=1, max_value=100, required=True)\n', (28118, 28161), False, 'from django import forms\n'), ((28174, 28236), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(1)', 'max_value': '(1000)', 'required': '(True)'}), '(min_value=1, max_value=1000, required=True)\n', (28192, 28236), False, 'from django import forms\n'), ((28256, 28318), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(1)', 'max_value': '(1000)', 'required': '(True)'}), '(min_value=1, max_value=1000, required=True)\n', (28274, 28318), False, 'from django import forms\n'), ((28332, 28366), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'required': '(False)'}), '(required=False)\n', (28350, 28366), False, 'from django import forms\n'), ((28383, 28417), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'required': '(False)'}), '(required=False)\n', (28401, 28417), False, 'from django import forms\n'), ((28437, 28497), 'django.forms.FloatField', 'forms.FloatField', ([], {'required': '(True)', 'min_value': '(0)', 'max_value': 'None'}), '(required=True, min_value=0, max_value=None)\n', (28453, 28497), False, 'from django import forms\n'), ((28510, 28541), 'django.forms.FileField', 'forms.FileField', ([], {'required': '(False)'}), '(required=False)\n', (28525, 28541), False, 'from django import forms\n'), ((28561, 28595), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'required': '(False)'}), '(required=False)\n', (28579, 28595), False, 'from django import forms\n'), ((30690, 30750), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(False)', 'widget': 'forms.HiddenInput'}), '(required=False, widget=forms.HiddenInput)\n', (30708, 30750), False, 'from django import forms\n'), ((30870, 30943), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'choices': "((True, 'Yes'), (False, 'No'))", 'required': '(False)'}), "(choices=((True, 'Yes'), (False, 'No')), required=False)\n", (30887, 30943), False, 'from django import forms\n'), ((31131, 31178), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(0)', 'required': '(False)'}), '(min_value=0, required=False)\n', (31149, 31178), False, 'from django import forms\n'), ((31198, 31245), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(0)', 'required': '(False)'}), '(min_value=0, required=False)\n', (31216, 31245), False, 'from django import forms\n'), ((31265, 31312), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(0)', 'required': '(False)'}), '(min_value=0, required=False)\n', (31283, 31312), False, 'from django import forms\n'), ((3817, 3852), 'nadine.models.organization.Organization.objects.get', 'Organization.objects.get', ([], {'id': 'org_id'}), '(id=org_id)\n', (3841, 3852), False, 'from nadine.models.organization import Organization, OrganizationMember\n'), ((6474, 6530), 'nadine.models.organization.Organization.objects.get', 'Organization.objects.get', ([], {'id': "self.cleaned_data['org_id']"}), "(id=self.cleaned_data['org_id'])\n", (6498, 6530), False, 'from nadine.models.organization import Organization, OrganizationMember\n'), ((8155, 8208), 'nadine.models.billing.UserBill.objects.get', 'UserBill.objects.get', ([], {'pk': "self.cleaned_data['bill_id']"}), "(pk=self.cleaned_data['bill_id'])\n", (8175, 8208), False, 'from nadine.models.billing import UserBill, Payment\n'), ((8224, 8280), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': "self.cleaned_data['username']"}), "(username=self.cleaned_data['username'])\n", (8240, 8280), False, 'from django.contrib.auth.models import User\n'), ((8477, 8521), 'nadine.models.billing.Payment.objects.create', 'Payment.objects.create', ([], {'bill': 'bill', 'user': 'user'}), '(bill=bill, user=user)\n', (8499, 8521), False, 'from nadine.models.billing import UserBill, Payment\n'), ((11092, 11162), 'django.contrib.auth.models.User', 'User', ([], {'username': 'username', 'first_name': 'first', 'last_name': 'last', 'email': 'email'}), '(username=username, first_name=first, last_name=last, email=email)\n', (11096, 11162), False, 'from django.contrib.auth.models import User\n'), ((11182, 11226), 'django.contrib.auth.models.User.objects.make_random_password', 'User.objects.make_random_password', ([], {'length': '(32)'}), '(length=32)\n', (11215, 11226), False, 'from django.contrib.auth.models import User\n'), ((12212, 12242), 'base64.b64decode', 'base64.b64decode', (['raw_img_data'], {}), '(raw_img_data)\n', (12228, 12242), False, 'import base64\n'), ((17382, 17438), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': "self.cleaned_data['username']"}), "(username=self.cleaned_data['username'])\n", (17398, 17438), False, 'from django.contrib.auth.models import User\n'), ((21594, 21650), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': "self.cleaned_data['username']"}), "(username=self.cleaned_data['username'])\n", (21610, 21650), False, 'from django.contrib.auth.models import User\n'), ((21840, 21854), 'nadine.models.usage.CoworkingDay', 'CoworkingDay', ([], {}), '()\n', (21852, 21854), False, 'from nadine.models.usage import PAYMENT_CHOICES, CoworkingDay\n'), ((23292, 23327), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': 'username'}), '(username=username)\n', (23308, 23327), False, 'from django.contrib.auth.models import User\n'), ((23646, 23688), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': 'created_by_user'}), '(username=created_by_user)\n', (23662, 23688), False, 'from django.contrib.auth.models import User\n'), ((26869, 26933), 'member.models.HelpText', 'HelpText', ([], {'title': 'title', 'template': 'template', 'slug': 'slug', 'order': 'order'}), '(title=title, template=template, slug=slug, order=order)\n', (26877, 26933), False, 'from member.models import HelpText, MOTD\n'), ((27605, 27679), 'member.models.MOTD', 'MOTD', ([], {'start_ts': 'start_ts', 'end_ts': 'end_ts', 'message': 'message', 'delay_ms': 'delay_ms'}), '(start_ts=start_ts, end_ts=end_ts, message=message, delay_ms=delay_ms)\n', (27609, 27679), False, 'from member.models import HelpText, MOTD\n'), ((3380, 3430), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'autocapitalize': 'words'}"}), "(attrs={'autocapitalize': 'words'})\n", (3395, 3430), False, 'from django import forms\n'), ((5496, 5546), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'autocapitalize': 'words'}"}), "(attrs={'autocapitalize': 'words'})\n", (5511, 5546), False, 'from django import forms\n'), ((5588, 5689), 'django.forms.DateInput', 'forms.DateInput', ([], {'attrs': "{'placeholder': 'e.g. 12/28/16', 'class': 'datepicker'}", 'format': '"""%m/%d/%Y"""'}), "(attrs={'placeholder': 'e.g. 12/28/16', 'class':\n 'datepicker'}, format='%m/%d/%Y')\n", (5603, 5689), False, 'from django import forms\n'), ((5738, 5839), 'django.forms.DateInput', 'forms.DateInput', ([], {'attrs': "{'placeholder': 'e.g. 12/28/16', 'class': 'datepicker'}", 'format': '"""%m/%d/%Y"""'}), "(attrs={'placeholder': 'e.g. 12/28/16', 'class':\n 'datepicker'}, format='%m/%d/%Y')\n", (5753, 5839), False, 'from django import forms\n'), ((6755, 6799), 'nadine.models.organization.OrganizationMember.objects.get', 'OrganizationMember.objects.get', ([], {'id': 'member_id'}), '(id=member_id)\n', (6785, 6799), False, 'from nadine.models.organization import Organization, OrganizationMember\n'), ((7837, 7938), 'django.forms.DateInput', 'forms.DateInput', ([], {'attrs': "{'placeholder': 'e.g. 12/28/16', 'class': 'datepicker'}", 'format': '"""%m/%d/%Y"""'}), "(attrs={'placeholder': 'e.g. 12/28/16', 'class':\n 'datepicker'}, format='%m/%d/%Y')\n", (7852, 7938), False, 'from django import forms\n'), ((8641, 8678), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': 'created_by'}), '(username=created_by)\n', (8657, 8678), False, 'from django.contrib.auth.models import User\n'), ((9014, 9064), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'autocapitalize': 'words'}"}), "(attrs={'autocapitalize': 'words'})\n", (9029, 9064), False, 'from django import forms\n'), ((9157, 9207), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'autocapitalize': 'words'}"}), "(attrs={'autocapitalize': 'words'})\n", (9172, 9207), False, 'from django import forms\n'), ((9638, 9705), 'django.forms.ValidationError', 'forms.ValidationError', (['("Email address \'%s\' already in use." % email)'], {}), '("Email address \'%s\' already in use." % email)\n', (9659, 9705), False, 'from django import forms\n'), ((9721, 9752), 'nadine.utils.mailgun.validate_address', 'mailgun.validate_address', (['email'], {}), '(email)\n', (9745, 9752), False, 'from nadine.utils import mailgun\n'), ((9772, 9837), 'django.forms.ValidationError', 'forms.ValidationError', (['("Email address \'%s\' is not valid." % email)'], {}), '("Email address \'%s\' is not valid." % email)\n', (9793, 9837), False, 'from django import forms\n'), ((11367, 11436), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'autocapitalize': 'on', 'autocorrect': 'off'}"}), "(attrs={'autocapitalize': 'on', 'autocorrect': 'off'})\n", (11382, 11436), False, 'from django import forms\n'), ((11463, 11532), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'autocapitalize': 'on', 'autocorrect': 'off'}"}), "(attrs={'autocapitalize': 'on', 'autocorrect': 'off'})\n", (11478, 11532), False, 'from django import forms\n'), ((11979, 11998), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (11996, 11998), False, 'from django import forms\n'), ((12305, 12368), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['User'], {'username': "self.cleaned_data['username']"}), "(User, username=self.cleaned_data['username'])\n", (12322, 12368), False, 'from django.shortcuts import get_object_or_404\n'), ((13989, 14037), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'browser-default'}"}), "(attrs={'class': 'browser-default'})\n", (14001, 14037), False, 'from django import forms\n'), ((14070, 14091), 'nadine.models.core.URLType.objects.all', 'URLType.objects.all', ([], {}), '()\n', (14089, 14091), False, 'from nadine.models.core import HowHeard, Industry, Neighborhood, URLType, GENDER_CHOICES\n'), ((14141, 14215), 'django.forms.URLInput', 'forms.URLInput', ([], {'attrs': "{'placeholder': 'http://www.facebook.com/myprofile'}"}), "(attrs={'placeholder': 'http://www.facebook.com/myprofile'})\n", (14155, 14215), False, 'from django import forms\n'), ((14419, 14475), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': "self.cleaned_data['username']"}), "(username=self.cleaned_data['username'])\n", (14435, 14475), False, 'from django.contrib.auth.models import User\n'), ((14625, 14681), 'nadine.models.organization.Organization.objects.get', 'Organization.objects.get', ([], {'id': "self.cleaned_data['org_id']"}), "(id=self.cleaned_data['org_id'])\n", (14649, 14681), False, 'from nadine.models.organization import Organization, OrganizationMember\n'), ((15222, 15270), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'browser-default'}"}), "(attrs={'class': 'browser-default'})\n", (15234, 15270), False, 'from django import forms\n'), ((15834, 15882), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'browser-default'}"}), "(attrs={'class': 'browser-default'})\n", (15846, 15882), False, 'from django import forms\n'), ((15969, 16017), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'browser-default'}"}), "(attrs={'class': 'browser-default'})\n", (15981, 16017), False, 'from django import forms\n'), ((16047, 16069), 'nadine.models.core.HowHeard.objects.all', 'HowHeard.objects.all', ([], {}), '()\n', (16067, 16069), False, 'from nadine.models.core import HowHeard, Industry, Neighborhood, URLType, GENDER_CHOICES\n'), ((16132, 16180), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'browser-default'}"}), "(attrs={'class': 'browser-default'})\n", (16144, 16180), False, 'from django import forms\n'), ((16191, 16213), 'nadine.models.core.Industry.objects.all', 'Industry.objects.all', ([], {}), '()\n', (16211, 16213), False, 'from nadine.models.core import HowHeard, Industry, Neighborhood, URLType, GENDER_CHOICES\n'), ((16280, 16328), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'browser-default'}"}), "(attrs={'class': 'browser-default'})\n", (16292, 16328), False, 'from django import forms\n'), ((16339, 16365), 'nadine.models.core.Neighborhood.objects.all', 'Neighborhood.objects.all', ([], {}), '()\n', (16363, 16365), False, 'from nadine.models.core import HowHeard, Industry, Neighborhood, URLType, GENDER_CHOICES\n'), ((16509, 16568), 'django.forms.NullBooleanSelect', 'forms.NullBooleanSelect', ([], {'attrs': "{'class': 'browser-default'}"}), "(attrs={'class': 'browser-default'})\n", (16532, 16568), False, 'from django import forms\n'), ((16635, 16694), 'django.forms.NullBooleanSelect', 'forms.NullBooleanSelect', ([], {'attrs': "{'class': 'browser-default'}"}), "(attrs={'class': 'browser-default'})\n", (16658, 16694), False, 'from django import forms\n'), ((16812, 16849), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'size': '50'}"}), "(attrs={'size': '50'})\n", (16827, 16849), False, 'from django import forms\n'), ((16933, 16970), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'size': '50'}"}), "(attrs={'size': '50'})\n", (16948, 16970), False, 'from django import forms\n'), ((17055, 17092), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'size': '16'}"}), "(attrs={'size': '16'})\n", (17070, 17092), False, 'from django import forms\n'), ((17171, 17208), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'size': '50'}"}), "(attrs={'size': '50'})\n", (17186, 17208), False, 'from django import forms\n'), ((19547, 19595), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'browser-default'}"}), "(attrs={'class': 'browser-default'})\n", (19559, 19595), False, 'from django import forms\n'), ((19606, 19641), 'django.contrib.auth.models.User.objects.order_by', 'User.objects.order_by', (['"""first_name"""'], {}), "('first_name')\n", (19627, 19641), False, 'from django.contrib.auth.models import User\n'), ((19684, 19732), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'browser-default'}"}), "(attrs={'class': 'browser-default'})\n", (19696, 19732), False, 'from django import forms\n'), ((19743, 19761), 'nadine.models.resource.Room.objects.all', 'Room.objects.all', ([], {}), '()\n', (19759, 19761), False, 'from nadine.models.resource import Room, Resource\n'), ((19823, 19918), 'django.forms.DateTimeInput', 'forms.DateTimeInput', ([], {'attrs': "{'placeholder': 'e.g. 12/28/16 14:30'}", 'format': '"""%m/%d/%Y %H:%M"""'}), "(attrs={'placeholder': 'e.g. 12/28/16 14:30'}, format=\n '%m/%d/%Y %H:%M')\n", (19842, 19918), False, 'from django import forms\n'), ((19971, 20069), 'django.forms.DateTimeInput', 'forms.DateTimeInput', ([], {'attrs': "{'placeholder': 'e.g. 12/28/16 16:30'}", 'format': '"""%m/%d/%Y %H:%M:%S"""'}), "(attrs={'placeholder': 'e.g. 12/28/16 16:30'}, format=\n '%m/%d/%Y %H:%M:%S')\n", (19990, 20069), False, 'from django import forms\n'), ((20274, 20322), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'browser-default'}"}), "(attrs={'class': 'browser-default'})\n", (20286, 20322), False, 'from django import forms\n'), ((21104, 21151), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'readonly': 'readonly'}"}), "(attrs={'readonly': 'readonly'})\n", (21119, 21151), False, 'from django import forms\n'), ((21192, 21211), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (21209, 21211), False, 'from django import forms\n'), ((22129, 22172), 'django.forms.HiddenInput', 'forms.HiddenInput', (["{'class': 'username_td'}"], {}), "({'class': 'username_td'})\n", (22146, 22172), False, 'from django import forms\n'), ((22304, 22349), 'django.forms.HiddenInput', 'forms.HiddenInput', (["{'class': 'created_by_td'}"], {}), "({'class': 'created_by_td'})\n", (22321, 22349), False, 'from django import forms\n'), ((22403, 22445), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {'attrs': "{'class': 's_id'}"}), "(attrs={'class': 's_id'})\n", (22420, 22445), False, 'from django import forms\n'), ((22493, 22515), 'nadine.models.resource.Resource.objects.all', 'Resource.objects.all', ([], {}), '()\n', (22513, 22515), False, 'from nadine.models.resource import Room, Resource\n'), ((22540, 22581), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'resource'}"}), "(attrs={'class': 'resource'})\n", (22552, 22581), False, 'from django import forms\n'), ((22687, 22733), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'start_date'}"}), "(attrs={'class': 'start_date'})\n", (22702, 22733), False, 'from django import forms\n'), ((22789, 22833), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'end_date'}"}), "(attrs={'class': 'end_date'})\n", (22804, 22833), False, 'from django import forms\n'), ((23022, 23069), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'paying_user'}"}), "(attrs={'class': 'paying_user'})\n", (23037, 23069), False, 'from django import forms\n'), ((24174, 24215), 'nadine.models.membership.ResourceSubscription.objects.get', 'ResourceSubscription.objects.get', ([], {'id': 's_id'}), '(id=s_id)\n', (24206, 24215), False, 'from nadine.models.membership import Membership, MembershipPackage, ResourceSubscription, IndividualMembership, SubscriptionDefault\n'), ((24284, 24546), 'nadine.models.membership.ResourceSubscription', 'ResourceSubscription', ([], {'created_ts': 'created_ts', 'created_by': 'created_by', 'resource': 'resource', 'allowance': 'allowance', 'start_date': 'start_date', 'end_date': 'end_date', 'monthly_rate': 'monthly_rate', 'overage_rate': 'overage_rate', 'paid_by': 'paid_by', 'membership': 'user.membership'}), '(created_ts=created_ts, created_by=created_by, resource\n =resource, allowance=allowance, start_date=start_date, end_date=\n end_date, monthly_rate=monthly_rate, overage_rate=overage_rate, paid_by\n =paid_by, membership=user.membership)\n', (24304, 24546), False, 'from nadine.models.membership import Membership, MembershipPackage, ResourceSubscription, IndividualMembership, SubscriptionDefault\n'), ((24792, 24840), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'browser-default'}"}), "(attrs={'class': 'browser-default'})\n", (24804, 24840), False, 'from django import forms\n'), ((26135, 26225), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'autocapitalize': 'words', 'placeholder': 'e.g. Welcome Info'}"}), "(attrs={'autocapitalize': 'words', 'placeholder':\n 'e.g. Welcome Info'})\n", (26150, 26225), False, 'from django import forms\n'), ((26260, 26321), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'placeholder': '<h1>Hello World</h1>'}"}), "(attrs={'placeholder': '<h1>Hello World</h1>'})\n", (26274, 26321), False, 'from django import forms\n'), ((26371, 26445), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': '{\'placeholder\': "Single Word for URL e.g. \'hello\'"}'}), '(attrs={\'placeholder\': "Single Word for URL e.g. \'hello\'"})\n', (26386, 26445), False, 'from django import forms\n'), ((27920, 27982), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'i.e. By the elevator'}"}), "(attrs={'placeholder': 'i.e. By the elevator'})\n", (27935, 27982), False, 'from django import forms\n'), ((29356, 29384), 'nadine.models.resource.Room.objects.get', 'Room.objects.get', ([], {'id': 'room_id'}), '(id=room_id)\n', (29372, 29384), False, 'from nadine.models.resource import Room, Resource\n'), ((29877, 30106), 'nadine.models.resource.Room', 'Room', ([], {'name': 'name', 'location': 'location', 'description': 'description', 'floor': 'floor', 'seats': 'seats', 'max_capacity': 'max_capacity', 'has_av': 'has_av', 'has_phone': 'has_phone', 'default_rate': 'default_rate', 'image': 'image', 'members_only': 'members_only'}), '(name=name, location=location, description=description, floor=floor,\n seats=seats, max_capacity=max_capacity, has_av=has_av, has_phone=\n has_phone, default_rate=default_rate, image=image, members_only=\n members_only)\n', (29881, 30106), False, 'from nadine.models.resource import Room, Resource\n'), ((30597, 30643), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'name-input'}"}), "(attrs={'class': 'name-input'})\n", (30612, 30643), False, 'from django import forms\n'), ((30807, 30855), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {'attrs': "{'class': 'package-id'}"}), "(attrs={'class': 'package-id'})\n", (30824, 30855), False, 'from django import forms\n'), ((30989, 31037), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'browser-default'}"}), "(attrs={'class': 'browser-default'})\n", (31001, 31037), False, 'from django import forms\n'), ((31075, 31097), 'nadine.models.resource.Resource.objects.all', 'Resource.objects.all', ([], {}), '()\n', (31095, 31097), False, 'from nadine.models.resource import Room, Resource\n'), ((31352, 31420), 'django.forms.CheckboxInput', 'forms.CheckboxInput', ([], {'attrs': "{'class': 'browser-default del-checkbox'}"}), "(attrs={'class': 'browser-default del-checkbox'})\n", (31371, 31420), False, 'from django import forms\n'), ((32114, 32155), 'nadine.models.membership.MembershipPackage.objects.get', 'MembershipPackage.objects.get', ([], {'id': 'package'}), '(id=package)\n', (32143, 32155), False, 'from nadine.models.membership import Membership, MembershipPackage, ResourceSubscription, IndividualMembership, SubscriptionDefault\n'), ((32236, 32299), 'nadine.models.membership.SubscriptionDefault.objects.get', 'SubscriptionDefault.objects.get', ([], {'id': "self.cleaned_data['sub_id']"}), "(id=self.cleaned_data['sub_id'])\n", (32267, 32299), False, 'from nadine.models.membership import Membership, MembershipPackage, ResourceSubscription, IndividualMembership, SubscriptionDefault\n'), ((32612, 32655), 'nadine.models.membership.MembershipPackage.objects.filter', 'MembershipPackage.objects.filter', ([], {'name': 'name'}), '(name=name)\n', (32644, 32655), False, 'from nadine.models.membership import Membership, MembershipPackage, ResourceSubscription, IndividualMembership, SubscriptionDefault\n'), ((33027, 33155), 'nadine.models.membership.SubscriptionDefault', 'SubscriptionDefault', ([], {'package': 'p', 'resource': 'resource', 'allowance': 'allowance', 'monthly_rate': 'monthly_rate', 'overage_rate': 'overage_rate'}), '(package=p, resource=resource, allowance=allowance,\n monthly_rate=monthly_rate, overage_rate=overage_rate)\n', (33046, 33155), False, 'from nadine.models.membership import Membership, MembershipPackage, ResourceSubscription, IndividualMembership, SubscriptionDefault\n'), ((1920, 1940), 'datetime.timedelta', 'timedelta', ([], {'days': 'days'}), '(days=days)\n', (1929, 1940), False, 'from datetime import datetime, timedelta\n'), ((2212, 2229), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (2221, 2229), False, 'from datetime import datetime, timedelta\n'), ((6842, 6877), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': 'username'}), '(username=username)\n', (6858, 6877), False, 'from django.contrib.auth.models import User\n'), ((6904, 6965), 'nadine.models.organization.OrganizationMember', 'OrganizationMember', ([], {'organization': 'self.organization', 'user': 'user'}), '(organization=self.organization, user=user)\n', (6922, 6965), False, 'from nadine.models.organization import Organization, OrganizationMember\n'), ((12571, 12592), 'django.core.files.base.ContentFile', 'ContentFile', (['img_data'], {}), '(img_data)\n', (12582, 12592), False, 'from django.core.files.base import ContentFile\n'), ((12669, 12738), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Organization'], {'id': "self.cleaned_data['organization']"}), "(Organization, id=self.cleaned_data['organization'])\n", (12686, 12738), False, 'from django.shortcuts import get_object_or_404\n'), ((23478, 23483), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (23481, 23483), False, 'from django.utils.timezone import localtime, now\n'), ((25555, 25590), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': 'username'}), '(username=username)\n', (25571, 25590), False, 'from django.contrib.auth.models import User\n'), ((27036, 27041), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (27039, 27041), False, 'from django.utils.timezone import localtime, now\n'), ((32788, 32829), 'nadine.models.membership.MembershipPackage.objects.get', 'MembershipPackage.objects.get', ([], {'id': 'package'}), '(id=package)\n', (32817, 32829), False, 'from nadine.models.membership import Membership, MembershipPackage, ResourceSubscription, IndividualMembership, SubscriptionDefault\n'), ((32929, 32974), 'nadine.models.membership.MembershipPackage', 'MembershipPackage', ([], {'name': 'name', 'enabled': 'enabled'}), '(name=name, enabled=enabled)\n', (32946, 32974), False, 'from nadine.models.membership import Membership, MembershipPackage, ResourceSubscription, IndividualMembership, SubscriptionDefault\n'), ((2203, 2208), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (2206, 2208), False, 'from django.utils.timezone import localtime, now\n'), ((9574, 9606), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'email': 'email'}), '(email=email)\n', (9593, 9606), False, 'from django.contrib.auth.models import User\n'), ((10798, 10836), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'username': 'username'}), '(username=username)\n', (10817, 10836), False, 'from django.contrib.auth.models import User\n'), ((12940, 12961), 'django.core.files.base.ContentFile', 'ContentFile', (['img_data'], {}), '(img_data)\n', (12951, 12961), False, 'from django.core.files.base import ContentFile\n'), ((13535, 13624), 'django.forms.ValidationError', 'forms.ValidationError', ([], {'message': '"""All websites must have a URL"""', 'code': '"""missing_anchor"""'}), "(message='All websites must have a URL', code=\n 'missing_anchor')\n", (13556, 13624), False, 'from django import forms\n'), ((13687, 13766), 'django.forms.ValidationError', 'forms.ValidationError', ([], {'message': '"""All URLS must have a type"""', 'code': '"""missing_type"""'}), "(message='All URLS must have a type', code='missing_type')\n", (13708, 13766), False, 'from django import forms\n'), ((21706, 21755), 'nadine.models.usage.CoworkingDay.objects.filter', 'CoworkingDay.objects.filter', ([], {'user': 'u', 'visit_date': 'v'}), '(user=u, visit_date=v)\n', (21733, 21755), False, 'from nadine.models.usage import PAYMENT_CHOICES, CoworkingDay\n'), ((24877, 24923), 'nadine.models.membership.MembershipPackage.objects.filter', 'MembershipPackage.objects.filter', ([], {'enabled': '(True)'}), '(enabled=True)\n', (24909, 24923), False, 'from nadine.models.membership import Membership, MembershipPackage, ResourceSubscription, IndividualMembership, SubscriptionDefault\n'), ((25702, 25734), 'nadine.models.organization.Organization.objects.get', 'Organization.objects.get', ([], {'id': 'org'}), '(id=org)\n', (25726, 25734), False, 'from nadine.models.organization import Organization, OrganizationMember\n'), ((1904, 1909), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (1907, 1909), False, 'from django.utils.timezone import localtime, now\n')]
import pymc as pm #import numpy as np import matplotlib.pyplot as plt np.set_printoptions(precision=3, suppress=True) c_data = np.genfromtxt("d:/data/challenger_data.csv", skip_header=1, usecols=[1,2], missing_values='NA', delimiter=',') c_data = c_data[~np.isnan(c_data[:,1])] print("TEMP, O-RING failure?") print(c_data) def logistic(x, beta, alpha=0): return 1.0 / (1.0 + np.exp(np.dot(beta, x) + alpha)) x = np.linspace(-4, 4, 100) #plt.plot(x, logistic(x, 1)) #plt.plot(x, logistic(x, 3)) #plt.plot(x, logistic(x, -5)) plt.legend() plt.plot(x, logistic(x, 1, 1)) plt.plot(x, logistic(x, 3, -2)) plt.plot(x, logistic(x, -5, 7)) temp = c_data[:, 0] D = c_data[:, 1] beta = pm.Normal("beta", 0, 0.001, value=0) alpha = pm.Normal("alpha", 0, 0.001, value=0) @pm.deterministic def p(t=temp, alpha=alpha, beta=beta): return 1.0 / (1. + np.exp(beta*t+alpha)) p.value observed = pm.Bernoulli("bernoulli_obs", p, value=D, observed=True) model = pm.Model([observed, beta, alpha]) map_ = pm.MAP(model) map_.fit() mcmc = pm.MCMC(model) mcmc.sample(120000, 100000, 2) alpha_samples = mcmc.trace('alpha')[:, None] beta_samples = mcmc.trace('beta')[:, None] t = np.linspace(temp.min() - 5, temp.max() + 5, 50)[:, None] p_t = logistic(t.T, beta_samples, alpha_samples) mean_prob_t = p_t.mean(axis=0) plt.plot(t, mean_prob_t, lw=3) plt.plot(t, p_t[0, :], ls='--') plt.plot(t, p_t[-2, :], ls="--") plt.scatter(temp, D, color="k", s=50, alpha=0.5)
[ "pymc.MAP", "pymc.Model", "matplotlib.pyplot.plot", "matplotlib.pyplot.scatter", "matplotlib.pyplot.legend", "pymc.MCMC", "pymc.Bernoulli", "pymc.Normal" ]
[((531, 543), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (541, 543), True, 'import matplotlib.pyplot as plt\n'), ((685, 721), 'pymc.Normal', 'pm.Normal', (['"""beta"""', '(0)', '(0.001)'], {'value': '(0)'}), "('beta', 0, 0.001, value=0)\n", (694, 721), True, 'import pymc as pm\n'), ((730, 767), 'pymc.Normal', 'pm.Normal', (['"""alpha"""', '(0)', '(0.001)'], {'value': '(0)'}), "('alpha', 0, 0.001, value=0)\n", (739, 767), True, 'import pymc as pm\n'), ((892, 948), 'pymc.Bernoulli', 'pm.Bernoulli', (['"""bernoulli_obs"""', 'p'], {'value': 'D', 'observed': '(True)'}), "('bernoulli_obs', p, value=D, observed=True)\n", (904, 948), True, 'import pymc as pm\n'), ((957, 990), 'pymc.Model', 'pm.Model', (['[observed, beta, alpha]'], {}), '([observed, beta, alpha])\n', (965, 990), True, 'import pymc as pm\n'), ((999, 1012), 'pymc.MAP', 'pm.MAP', (['model'], {}), '(model)\n', (1005, 1012), True, 'import pymc as pm\n'), ((1031, 1045), 'pymc.MCMC', 'pm.MCMC', (['model'], {}), '(model)\n', (1038, 1045), True, 'import pymc as pm\n'), ((1309, 1339), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'mean_prob_t'], {'lw': '(3)'}), '(t, mean_prob_t, lw=3)\n', (1317, 1339), True, 'import matplotlib.pyplot as plt\n'), ((1340, 1371), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'p_t[0, :]'], {'ls': '"""--"""'}), "(t, p_t[0, :], ls='--')\n", (1348, 1371), True, 'import matplotlib.pyplot as plt\n'), ((1372, 1404), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'p_t[-2, :]'], {'ls': '"""--"""'}), "(t, p_t[-2, :], ls='--')\n", (1380, 1404), True, 'import matplotlib.pyplot as plt\n'), ((1405, 1453), 'matplotlib.pyplot.scatter', 'plt.scatter', (['temp', 'D'], {'color': '"""k"""', 's': '(50)', 'alpha': '(0.5)'}), "(temp, D, color='k', s=50, alpha=0.5)\n", (1416, 1453), True, 'import matplotlib.pyplot as plt\n')]
from math import cos, sin, tan, radians an = int(input('Digite um ângulo: ')) s = sin(radians(an)) c = cos(radians(an)) t = tan(radians(an)) print('o valor do seno é:{:.2f} \n o valor do cosseno é: {:.2f} \n o valor da tangente é: {:.2f} '.format(s, c, t))
[ "math.radians" ]
[((86, 97), 'math.radians', 'radians', (['an'], {}), '(an)\n', (93, 97), False, 'from math import cos, sin, tan, radians\n'), ((107, 118), 'math.radians', 'radians', (['an'], {}), '(an)\n', (114, 118), False, 'from math import cos, sin, tan, radians\n'), ((128, 139), 'math.radians', 'radians', (['an'], {}), '(an)\n', (135, 139), False, 'from math import cos, sin, tan, radians\n')]
""" """ import os file_name = "exercise10" result_fle_name = "exercise10-result" result = [] # Obtenemos la ruta absoluta del directorio en el que estamos trabajando script_directory = os.path.dirname(__file__) def get_file_content(): try: file_path = f"{script_directory}/{file_name}.txt" return clean_scape_characters(open(file_path).readlines()) except FileNotFoundError: print("No podemos abrir el fichero por no existir") return 0 except Exception as except_message: print(except_message) return 0 # Limpiamos el contenido de caracteres de escape y devolverlo limpio def clean_scape_characters(list): clean_list = [] for i in range(len(list)): list[i] = list[i].strip() elements = list[i].split() clean_list.extend(elements) return clean_list def get_word_appears_count(): word_list = get_file_content() # Usamos la lista de todas las palabras del fichero for word in word_list: # Si no hay nada en el resultado, almacenar palabra con 1 aparición if (len(result) == 0): result.append([word, 1]) else: # Recorrer el resultado para ver si hay apariciones for position in range(len(result)): # Si encuentra algo, sumar + 1 la aparición if (word in result[position]): result[position][1] += 1 break # Si recorre todo los resultados y no encuentra nada, nuevo elemento elif position == len(result) - 1: result.append([word, 1]) def save_result(): file_path = f"{script_directory}/{result_fle_name}.txt" # "w" para sobrescribir lo anterior txt = open(file_path, "w") for element in result: txt.write(f"{element[0]}, {element[1]}\n") txt.close() # Obtener las apariciones get_word_appears_count() # Guardar resultado save_result()
[ "os.path.dirname" ]
[((188, 213), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (203, 213), False, 'import os\n')]
import unittest def return_42(): return 42 def raise_exception(): raise Exception("This is exception!") class TestUnittest(unittest.TestCase): def test_assert_equal(self): self.assertEqual(return_42(), 42) def test_assert_true(self): boolean_list = [False, False, True] self.assertTrue(any(boolean_list)) def test_assert_false(self): boolean_list = [True, True, True, False] self.assertFalse(all(boolean_list)) def test_assert_raises(self): with self.assertRaises(Exception): raise_exception() if __name__ == '__main__': unittest.main()
[ "unittest.main" ]
[((616, 631), 'unittest.main', 'unittest.main', ([], {}), '()\n', (629, 631), False, 'import unittest\n')]
#!/usr/bin/python3 # -*- coding: utf-8 -*- import multiprocessing import numpy as np import time from common.pilot_gloval_variable import MPVariable from planning import pure_pursuit class MPPlanning(): def __init__(self, cfg): self.__m = multiprocessing.Process(target=self.__process, \ args=(cfg['planning_interval'],)) self.__m.start() return def end(self): self.__m.join() print('Finish MPPerception') return def __process(self, interval): """ update planning """ try: previos_work_time = time.time() while True: now_time = time.time() if (now_time - previos_work_time) >= interval: # calc target angle target_z, \ target_angle = pure_pursuit.pure_pursuit(MPVariable.lane_m_leasts_abc_lpf_a.value, \ MPVariable.lane_m_leasts_abc_lpf_b.value, \ MPVariable.lane_m_leasts_abc_lpf_c.value, \ MPVariable.obd_vehicle_speed_kmph.value) # tx update MPVariable.pp_target_z.value = target_z MPVariable.can_tx_target_angle.value = target_angle MPVariable.can_tx_servo_on_flag.value = MPVariable.lane_m_leasts_status.value MPVariable.can_tx_counter_camera_unit.value = MPVariable.can_rx_counter_servo_unit.value previos_work_time = now_time except KeyboardInterrupt: pass except Exception as e: import traceback traceback.print_exc() finally: pass return
[ "multiprocessing.Process", "traceback.print_exc", "planning.pure_pursuit.pure_pursuit", "time.time" ]
[((255, 340), 'multiprocessing.Process', 'multiprocessing.Process', ([], {'target': 'self.__process', 'args': "(cfg['planning_interval'],)"}), "(target=self.__process, args=(cfg['planning_interval'],)\n )\n", (278, 340), False, 'import multiprocessing\n'), ((646, 657), 'time.time', 'time.time', ([], {}), '()\n', (655, 657), False, 'import time\n'), ((709, 720), 'time.time', 'time.time', ([], {}), '()\n', (718, 720), False, 'import time\n'), ((1818, 1839), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (1837, 1839), False, 'import traceback\n'), ((891, 1092), 'planning.pure_pursuit.pure_pursuit', 'pure_pursuit.pure_pursuit', (['MPVariable.lane_m_leasts_abc_lpf_a.value', 'MPVariable.lane_m_leasts_abc_lpf_b.value', 'MPVariable.lane_m_leasts_abc_lpf_c.value', 'MPVariable.obd_vehicle_speed_kmph.value'], {}), '(MPVariable.lane_m_leasts_abc_lpf_a.value,\n MPVariable.lane_m_leasts_abc_lpf_b.value, MPVariable.\n lane_m_leasts_abc_lpf_c.value, MPVariable.obd_vehicle_speed_kmph.value)\n', (916, 1092), False, 'from planning import pure_pursuit\n')]
import time import datetime import sys import getopt, argparse from collections import defaultdict import unicodedata import operator import json import unicodecsv def remove_control_characters(s): return "".join(ch for ch in s if unicodedata.category(ch)[0]!="C") def stripped(x): return "".join([i for i in x if 31 < ord(i) < 127]) parser = argparse.ArgumentParser(prog='combine_item_data_sources.py') parser.add_argument('-movielens-movies', help='Movielens 10 million movies file', required=True) parser.add_argument('-movielens-tags', help='Movielens 10 million tags file', required=True) parser.add_argument('-hetrec-movies', help='Hetrec 2011 movies file', required=True) parser.add_argument('-freebase-movies', help='Freebase movies file', required=True) parser.add_argument('-csv', help='csv file to write to', required=True) parser.add_argument('-item-attr-json', help='json file to write item attributes', required=True) opts = vars(parser.parse_args()) num_freebase_genres = 1 num_top_movielens_tags = 2 tagMap = {} topTags = {} actorMap = {} directorMap = {} imgUrl = {} titleMap = {} attrs = [("string","title",titleMap),("string","img_url",imgUrl),("text","top_tags",topTags),("text","movielens_tags_full",tagMap),("string","actors",actorMap),("string","directors",directorMap)] # write out attribute types with open(opts['item_attr_json'], 'w') as jsonFile: j = {} j["type_id"] = 1 j["type_name"] = "movie" attrList = [] for (atype,name,amap) in attrs: attrJson = {} attrJson['name'] = name attrJson['value_type'] = atype attrList.append(attrJson) j["type_attrs"] = attrList topj = {} tyList = [] tyList.append(j) topj["types"] = tyList jstr = json.dumps(topj,sort_keys=True) jsonFile.write(jstr) jsonFile.close() # # Load Movielens tags and keep top ones # ignoreTags = set() ignoreTags.add("less_than_300_ratings") # add tags to item_map_text with open(opts['movielens_tags']) as textfile1: for line in textfile1: line = line.rstrip() (user,item,tag,time) = line.split('::') item = int(item) tag = tag.replace("\"","'") tag = tag.replace(",","") tag = tag.replace("\\n","") tag = stripped(tag) tag = tag.lower() tag = tag.strip() tag = tag.replace(" ","_") if not (tag in ignoreTags or "nudity" in tag): if item in tagMap: tagMap[item][tag] = tagMap[item][tag] + 1 else: tagMap[item] = defaultdict(int) tagMap[item][tag] = 1 # Get top tags and change map to array of tags for key in tagMap: map = tagMap[key] sorted_x = sorted(map.items(), key=operator.itemgetter(1)) sorted_x = sorted_x[::-1] topList = sorted_x[0:num_top_movielens_tags] tags= [str(i[0]) for i in topList] tagMap[key] = [str(i[0]) for i in sorted_x] topTags[key] = tags # add data from freebase with open(opts['freebase_movies']) as textfile1: for line in textfile1: line = line.rstrip() j = json.loads(line) if not ('result' in j or 'error' in j): id = j["movielens_id"] id = int(id) genres = j['genre'][0:num_freebase_genres] if len(genres) > 0: for genre in genres: if id in topTags: topTags[id].append(genre.lower()) else: topTags[id] = [genre.lower()] starring = j['starring'] if starring: actors = [] c = 0 for star in starring: actor = star['actor'] if actor: actor = actor.replace("'","").replace("\"","").lower().replace(" ","_") actors.append(actor) c += 1 if c >= 3: break if len(actors) > 0: actorMap[id] = actors if id in topTags: topTags[id] = topTags[id] + actors else: topTags[id] = actors directed_by = j['directed_by'] directors = [] if directed_by and len(directed_by) > 0: if not type(directed_by) is list: directed_by = [directed_by] for director in directed_by: directors.append(director.replace("'","").replace("\"","").lower().replace(" ","_")) directorMap[id] = directors if id in topTags: topTags[id] = topTags[id] + directors else: topTags[id] = directors subjects_raw = j['subjects'] subjects = [] if len(subjects_raw) > 0: for subject in subjects_raw: subjects.append(subject.replace("'","").replace("\"","").lower().replace(" ","_")) if id in topTags: topTags[id] = topTags[id] + subjects else: topTags[id] = subjects first = True with open(opts['hetrec_movies']) as textfile1: for line in textfile1: if first: first = False continue line = line.rstrip() (id,title,imdbID,spanishTitle,imdbPictureURL,year,rtID,rtAllCriticsRating,rtAllCriticsNumReviews,rtAllCriticsNumFresh,rtAllCriticsNumRotten,rtAllCriticsScore,rtTopCriticsRating,rtTopCriticsNumReviews,rtTopCriticsNumFresh,rtTopCriticsNumRotten,rtTopCriticsScore,rtAudienceRating,rtAudienceNumRatings,rtAudienceScore,rtPictureURL) = line.split('\t') id = int(id) imgUrl[id] = [imdbPictureURL] with open(opts['movielens_movies']) as textfile1: for line in textfile1: line = line.rstrip() (id,title,tags) = line.split('::') id = int(id) titleMap[id] = [title] def addFeature(fmap,id,name,dmap): if id in dmap: v = ",".join(dmap[id]) fmap[name] = v with open(opts['csv'], 'w') as csvfile: fieldnames = ["id","name"] + [str(i[1]) for i in attrs] writer = unicodecsv.DictWriter(csvfile,encoding='utf-8',fieldnames=fieldnames) writer.writeheader() with open(opts['movielens_movies']) as textfile1: for line in textfile1: line = line.rstrip() (id,title,tags) = line.split('::') id = int(id) features = {} features['id'] = id features['name'] = title for (atype,name,amap) in attrs: addFeature(features,id,name,amap) writer.writerow(features)
[ "json.loads", "argparse.ArgumentParser", "unicodecsv.DictWriter", "unicodedata.category", "json.dumps", "collections.defaultdict", "operator.itemgetter" ]
[((354, 414), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""combine_item_data_sources.py"""'}), "(prog='combine_item_data_sources.py')\n", (377, 414), False, 'import getopt, argparse\n'), ((1752, 1784), 'json.dumps', 'json.dumps', (['topj'], {'sort_keys': '(True)'}), '(topj, sort_keys=True)\n', (1762, 1784), False, 'import json\n'), ((6237, 6308), 'unicodecsv.DictWriter', 'unicodecsv.DictWriter', (['csvfile'], {'encoding': '"""utf-8"""', 'fieldnames': 'fieldnames'}), "(csvfile, encoding='utf-8', fieldnames=fieldnames)\n", (6258, 6308), False, 'import unicodecsv\n'), ((3097, 3113), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (3107, 3113), False, 'import json\n'), ((2740, 2762), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (2759, 2762), False, 'import operator\n'), ((2557, 2573), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (2568, 2573), False, 'from collections import defaultdict\n'), ((236, 260), 'unicodedata.category', 'unicodedata.category', (['ch'], {}), '(ch)\n', (256, 260), False, 'import unicodedata\n')]
from ExcelGenerator import ExcelGenerator import os class DirectoryExplore: def __init__(self, directory_path): self.directory_path = directory_path def patse_directory(self, file_name): excel_generator = ExcelGenerator(file_name) browse_count = 0 directory_count = 0 for current_directory, sub_directory, files in os.walk(self.directory_path): browse_count += 1 print(current_directory) if ".git" in current_directory: continue if browse_count != 1: for sub in sub_directory: directory_count += 1 layer_list = os.path.join( current_directory, sub).replace("\\", "/").split("/") for index, layer in enumerate(layer_list): excel_generator.insert_cell_value( directory_count, index + 1, layer) excel_generator.insert_cell_hyperlink( directory_count, len(layer_list) - index, os.path.normpath(os.path.join(current_directory, sub) + (os.path.sep + os.path.pardir)*(index))) for file in files: directory_count += 1 layer_list = os.path.join(current_directory, file).replace("\\", "/").split("/") for index, layer in enumerate(layer_list): excel_generator.insert_cell_value( directory_count, index + 1, layer) excel_generator.insert_cell_hyperlink( directory_count, len(layer_list) - index, os.path.normpath(os.path.join(current_directory, file) + (os.path.sep + os.path.pardir)*(index))) excel_generator.save_workbook() # break if __name__ == '__main__': directory_explore = DirectoryExplore("C:/Users/ASUS/Desktop/2021-Projects/C-Heritage") directory_explore.patse_directory("test/02_test_patse_result.xlsx") # layer_1 = ['A', 'B', "C"] # layer_2 = [['A',"D","D","D","D"], ["D","D","D","D"]] # layer_3 = [[['','R'], ['','R']], [['','R'], ['','R']]] # layer = [] # layer.append(layer_1) # layer.append(layer_2) # layer.append(layer_3) # Base on the number of leyer we will take our collumn apparently # print(layer[2][0][1])
[ "ExcelGenerator.ExcelGenerator", "os.walk", "os.path.join" ]
[((231, 256), 'ExcelGenerator.ExcelGenerator', 'ExcelGenerator', (['file_name'], {}), '(file_name)\n', (245, 256), False, 'from ExcelGenerator import ExcelGenerator\n'), ((365, 393), 'os.walk', 'os.walk', (['self.directory_path'], {}), '(self.directory_path)\n', (372, 393), False, 'import os\n'), ((1296, 1333), 'os.path.join', 'os.path.join', (['current_directory', 'file'], {}), '(current_directory, file)\n', (1308, 1333), False, 'import os\n'), ((1680, 1717), 'os.path.join', 'os.path.join', (['current_directory', 'file'], {}), '(current_directory, file)\n', (1692, 1717), False, 'import os\n'), ((682, 718), 'os.path.join', 'os.path.join', (['current_directory', 'sub'], {}), '(current_directory, sub)\n', (694, 718), False, 'import os\n'), ((1118, 1154), 'os.path.join', 'os.path.join', (['current_directory', 'sub'], {}), '(current_directory, sub)\n', (1130, 1154), False, 'import os\n')]
from utils import get_contract_from_blockchain from compiler import compile_into_ast from analyze import analyze_ast import argparse import json parser = argparse.ArgumentParser(description="Analyze Tacos") parser.add_argument("--get-file", help="Use a local file path to analyze") parser.add_argument("--get-from-hash", help="Use a contract from etherscan.io") args = parser.parse_args() if __name__ == "__main__": # src_path, _ = get_contract_from_blockchain("0x61935CbDd02287B511119DDb11Aeb42F1593b7EF") # src_path, _ = get_contract_from_blockchain("0x5F098176B4f96207b3dc7b257175208112147243") # src_path, _ = get_contract_from_blockchain("0xC39D185eE1256E10D5010722D359ec87301eb647") if args.get_from_hash: print(args) src_path, _ = get_contract_from_blockchain(args.get_from_hash) else: src_path = args.get_file ast = compile_into_ast(src_path) print(analyze_ast(ast))
[ "analyze.analyze_ast", "compiler.compile_into_ast", "argparse.ArgumentParser", "utils.get_contract_from_blockchain" ]
[((163, 215), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Analyze Tacos"""'}), "(description='Analyze Tacos')\n", (186, 215), False, 'import argparse\n'), ((900, 926), 'compiler.compile_into_ast', 'compile_into_ast', (['src_path'], {}), '(src_path)\n', (916, 926), False, 'from compiler import compile_into_ast\n'), ((793, 841), 'utils.get_contract_from_blockchain', 'get_contract_from_blockchain', (['args.get_from_hash'], {}), '(args.get_from_hash)\n', (821, 841), False, 'from utils import get_contract_from_blockchain\n'), ((940, 956), 'analyze.analyze_ast', 'analyze_ast', (['ast'], {}), '(ast)\n', (951, 956), False, 'from analyze import analyze_ast\n')]
import argparse from timeit import default_timer as timer import numpy as np import tensorflow as tf import tbpf_tf parser = argparse.ArgumentParser() parser.add_argument("-d", "--degree", help="Degree of polynomial features", default=2, type=int) parser.add_argument("-i", "--iterations", help="Number of iterations over one number of inputs", default=10000, type=int) parser.add_argument("--start", help="Number of inputs start", default=1, type=int) parser.add_argument("--stop", help="Number of inputs stop", default=1001, type=int) parser.add_argument("--step", help="Number of inputs step", default=100, type=int) args = parser.parse_args() gpus = tf.config.experimental.list_physical_devices('GPU') if gpus: try: # Currently, memory growth needs to be the same across GPUs for gpu in gpus: tf.config.experimental.set_memory_growth(gpu, True) logical_gpus = tf.config.experimental.list_logical_devices('GPU') print(len(gpus), "Physical GPUs,", len(logical_gpus), "Logical GPUs") except RuntimeError as e: # Memory growth must be set before GPUs have been initialized print(e) order = args.degree inputs = range(args.start, args.stop, args.step) iterations = args.iterations times = [] pfgs = [tbpf_tf.PFG2, tbpf_tf.PFG3, tbpf_tf.PFG4, tbpf_tf.PFG5] sub_iterations = 100 if iterations/100 > 0: iterations /= 100 else: sub_iterations = 1 for i in inputs: times_in = [] input_tensor = tf.random.normal([i], 0, 1, tf.float32) x = tf.reshape(tf.concat([[1], input_tensor], 0), [1, -1]) M = [] for it in range(2, order + 1): M.append(tf.convert_to_tensor(tbpf_tf.mask_matrix(i, it, True))) for idx in range(int(iterations)): start = timer() for idx2 in range(sub_iterations): pfg = pfgs[order-2](M, x) end = timer() times_in.append((end-start)/sub_iterations) times.append([order, i, np.sum(times_in), np.min(times_in), np.mean(times_in), np.max(times_in), pfgs[order-2](M, x).numpy().size]) print('Order: {}, Inputs: {}, Overall test time: {}, Test mean time: {}, # of PF: {}'.format( times[-1][0], times[-1][1], times[-1][2], times[-1][4], times[-1][6])) np.savetxt(f'res_tbpf_tf_gpu_d{order}_it{iterations*sub_iterations}.csv', times, header='order,inputs,overall time,min time,mean time,max time,num of PF', delimiter=',')
[ "numpy.sum", "argparse.ArgumentParser", "tensorflow.random.normal", "timeit.default_timer", "numpy.savetxt", "tensorflow.concat", "tensorflow.config.experimental.set_memory_growth", "numpy.min", "numpy.mean", "numpy.max", "tbpf_tf.mask_matrix", "tensorflow.config.experimental.list_logical_devices", "tensorflow.config.experimental.list_physical_devices" ]
[((128, 153), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (151, 153), False, 'import argparse\n'), ((679, 730), 'tensorflow.config.experimental.list_physical_devices', 'tf.config.experimental.list_physical_devices', (['"""GPU"""'], {}), "('GPU')\n", (723, 730), True, 'import tensorflow as tf\n'), ((2243, 2427), 'numpy.savetxt', 'np.savetxt', (['f"""res_tbpf_tf_gpu_d{order}_it{iterations * sub_iterations}.csv"""', 'times'], {'header': '"""order,inputs,overall time,min time,mean time,max time,num of PF"""', 'delimiter': '""","""'}), "(f'res_tbpf_tf_gpu_d{order}_it{iterations * sub_iterations}.csv',\n times, header=\n 'order,inputs,overall time,min time,mean time,max time,num of PF',\n delimiter=',')\n", (2253, 2427), True, 'import numpy as np\n'), ((1468, 1507), 'tensorflow.random.normal', 'tf.random.normal', (['[i]', '(0)', '(1)', 'tf.float32'], {}), '([i], 0, 1, tf.float32)\n', (1484, 1507), True, 'import tensorflow as tf\n'), ((909, 959), 'tensorflow.config.experimental.list_logical_devices', 'tf.config.experimental.list_logical_devices', (['"""GPU"""'], {}), "('GPU')\n", (952, 959), True, 'import tensorflow as tf\n'), ((1527, 1560), 'tensorflow.concat', 'tf.concat', (['[[1], input_tensor]', '(0)'], {}), '([[1], input_tensor], 0)\n', (1536, 1560), True, 'import tensorflow as tf\n'), ((1748, 1755), 'timeit.default_timer', 'timer', ([], {}), '()\n', (1753, 1755), True, 'from timeit import default_timer as timer\n'), ((1851, 1858), 'timeit.default_timer', 'timer', ([], {}), '()\n', (1856, 1858), True, 'from timeit import default_timer as timer\n'), ((838, 889), 'tensorflow.config.experimental.set_memory_growth', 'tf.config.experimental.set_memory_growth', (['gpu', '(True)'], {}), '(gpu, True)\n', (878, 889), True, 'import tensorflow as tf\n'), ((1939, 1955), 'numpy.sum', 'np.sum', (['times_in'], {}), '(times_in)\n', (1945, 1955), True, 'import numpy as np\n'), ((1957, 1973), 'numpy.min', 'np.min', (['times_in'], {}), '(times_in)\n', (1963, 1973), True, 'import numpy as np\n'), ((1975, 1992), 'numpy.mean', 'np.mean', (['times_in'], {}), '(times_in)\n', (1982, 1992), True, 'import numpy as np\n'), ((1994, 2010), 'numpy.max', 'np.max', (['times_in'], {}), '(times_in)\n', (2000, 2010), True, 'import numpy as np\n'), ((1657, 1689), 'tbpf_tf.mask_matrix', 'tbpf_tf.mask_matrix', (['i', 'it', '(True)'], {}), '(i, it, True)\n', (1676, 1689), False, 'import tbpf_tf\n')]
import numpy as np def pdist(source_mtx, target_mtx): distance_matrix = -2 * source_mtx.dot(target_mtx.transpose()) \ + (source_mtx ** 2).sum(axis=1).reshape(-1, 1) \ + (target_mtx ** 2).sum(axis=1).reshape(1, -1) return distance_matrix def get_acc(query_emb, query_idx, gall_emb, gall_idx, labels, except_self=False): dist = pdist(query_emb, gall_emb) if except_self: sort_idx = np.argsort(dist, axis=1)[:, 1:21] else: sort_idx = np.argsort(dist, axis=1)[:, :20] match = np.zeros((len(query_idx), 20)) for i, idx in enumerate(query_idx): match[i] = labels[gall_idx[sort_idx[i].astype(np.int)]] == labels[idx] acc_val = [] for k in [1, 5, 10, 20]: acc = np.sum(np.sum(match[:, :k], axis=1) > 0) / match.shape[0] acc_val.append(acc) return acc_val class Metric: def __init__(self): pass def __call__(self, outputs, target, loss): raise NotImplementedError def reset(self): raise NotImplementedError def value(self): raise NotImplementedError def name(self): raise NotImplementedError class AccumulatedAccuracyMetric(Metric): """ Works with classification model """ def __init__(self): self.correct = 0 self.total = 0 def __call__(self, outputs, target, loss): pred = outputs[0].data.max(1, keepdim=True)[1] self.correct += pred.eq(target[0].data.view_as(pred)).cpu().sum() self.total += target[0].size(0) return self.value() def reset(self): self.correct = 0 self.total = 0 def value(self): return 100 * float(self.correct) / self.total def name(self): return 'Accuracy' class AverageNonzeroTripletsMetric(Metric): ''' Counts average number of nonzero triplets found in minibatches ''' def __init__(self): self.values = [] def __call__(self, outputs, target, loss): self.values.append(loss[1]) return self.value() def reset(self): self.values = [] def value(self): return np.mean(self.values) def name(self): return 'Average nonzero triplets' class RetrivalAccMetric(Metric): def __init__(self, data_num, vec_dim=128): self.data_num = data_num self.vec_dim = vec_dim self.emb = np.zeros((self.data_num, self.vec_dim), dtype=np.float16) self.label = np.zeros(self.data_num) self.source = np.zeros(self.data_num) self.cnt = 0 def __call__(self, outputs, target, source): self.emb[self.cnt:self.cnt + outputs.shape[0]] = outputs.detach().cpu().numpy().astype(np.float16) self.label[self.cnt:self.cnt + outputs.shape[0]] = target.detach().cpu().numpy() self.source[self.cnt:self.cnt + outputs.shape[0]] = source.detach().cpu().numpy() self.cnt += outputs.shape[0] def reset(self): self.emb = np.zeros((self.data_num, self.vec_dim)) self.label = np.zeros(self.data_num) self.source = np.zeros(self.data_num) self.cnt = 0 def value(self): user_idx = np.where(self.source == 0)[0] shop_idx = np.where(self.source == 1)[0] user_emb_mtx = self.emb[user_idx] shop_emb_mtx = self.emb[shop_idx] inshop_acc = get_acc(shop_emb_mtx, shop_idx, shop_emb_mtx, shop_idx, self.label, True) u2shop_acc = get_acc(user_emb_mtx, user_idx, shop_emb_mtx, shop_idx, self.label) return inshop_acc, u2shop_acc def name(self): return 'Retrieval Accuracy'
[ "numpy.sum", "numpy.zeros", "numpy.argsort", "numpy.mean", "numpy.where" ]
[((2160, 2180), 'numpy.mean', 'np.mean', (['self.values'], {}), '(self.values)\n', (2167, 2180), True, 'import numpy as np\n'), ((2409, 2466), 'numpy.zeros', 'np.zeros', (['(self.data_num, self.vec_dim)'], {'dtype': 'np.float16'}), '((self.data_num, self.vec_dim), dtype=np.float16)\n', (2417, 2466), True, 'import numpy as np\n'), ((2488, 2511), 'numpy.zeros', 'np.zeros', (['self.data_num'], {}), '(self.data_num)\n', (2496, 2511), True, 'import numpy as np\n'), ((2534, 2557), 'numpy.zeros', 'np.zeros', (['self.data_num'], {}), '(self.data_num)\n', (2542, 2557), True, 'import numpy as np\n'), ((2993, 3032), 'numpy.zeros', 'np.zeros', (['(self.data_num, self.vec_dim)'], {}), '((self.data_num, self.vec_dim))\n', (3001, 3032), True, 'import numpy as np\n'), ((3054, 3077), 'numpy.zeros', 'np.zeros', (['self.data_num'], {}), '(self.data_num)\n', (3062, 3077), True, 'import numpy as np\n'), ((3100, 3123), 'numpy.zeros', 'np.zeros', (['self.data_num'], {}), '(self.data_num)\n', (3108, 3123), True, 'import numpy as np\n'), ((452, 476), 'numpy.argsort', 'np.argsort', (['dist'], {'axis': '(1)'}), '(dist, axis=1)\n', (462, 476), True, 'import numpy as np\n'), ((515, 539), 'numpy.argsort', 'np.argsort', (['dist'], {'axis': '(1)'}), '(dist, axis=1)\n', (525, 539), True, 'import numpy as np\n'), ((3186, 3212), 'numpy.where', 'np.where', (['(self.source == 0)'], {}), '(self.source == 0)\n', (3194, 3212), True, 'import numpy as np\n'), ((3235, 3261), 'numpy.where', 'np.where', (['(self.source == 1)'], {}), '(self.source == 1)\n', (3243, 3261), True, 'import numpy as np\n'), ((778, 806), 'numpy.sum', 'np.sum', (['match[:, :k]'], {'axis': '(1)'}), '(match[:, :k], axis=1)\n', (784, 806), True, 'import numpy as np\n')]
import sys import geopandas as gpd import pandas as pd SITE_ID = "SiteFunctionalLocation" PROPERTIES_TO_RETAIN_GRID_AND_PRIMARY = [ "SiteName", SITE_ID, "SiteVoltage", "Total_Generation", ] PROPERTIES_TO_RETAIN_HEADROOM = [SITE_ID, "Headroom"] CURRENT_YEAR = 2021 DNO_UKPN = "UKPN" def main(): grid_and_primary_sites_file_name = sys.argv[1] headroom_capacity_file_name = sys.argv[2] grid_and_primary_sites_df: pd.DataFrame = pd.read_csv(grid_and_primary_sites_file_name) headroom_capacity_df: pd.DataFrame = pd.read_csv(headroom_capacity_file_name) # fetch the headroom capacity for the current year (e.g. 2021) or the closest year available preceding the current curr_headroom_capacity_df = headroom_capacity_df[ (headroom_capacity_df["Year"] == CURRENT_YEAR) & (headroom_capacity_df["Scenario"] == "Planning Scenario") ] idx_mask = curr_headroom_capacity_df.groupby(SITE_ID)["Year"].transform(max) == curr_headroom_capacity_df["Year"] curr_headroom_capacity_df = curr_headroom_capacity_df[idx_mask] curr_headroom_capacity_df = curr_headroom_capacity_df[PROPERTIES_TO_RETAIN_HEADROOM].set_index(SITE_ID) grid_and_primary_sites_df = grid_and_primary_sites_df.set_index(SITE_ID) ps_df: pd.DataFrame = grid_and_primary_sites_df.merge( curr_headroom_capacity_df, how="left", left_index=True, right_index=True ) # We are only interested in Primary Substations (filter out Grid Substations) ps_df = ps_df.reset_index() # We are only interested in Primary Substations (filter out Grid Substations) ps_df = ps_df[ps_df["SiteType"] == "Primary Substation"] # remove siteType column drop rows where geospatial information is missing (4 rows) ps_df = ps_df.drop(columns=["SiteType"]).dropna(subset=["Longitude", "Latitude"]) geometry = gpd.points_from_xy(ps_df["Longitude"], ps_df["Latitude"]) # drop duplicates in columns to retain columns_to_retain = list(set(PROPERTIES_TO_RETAIN_GRID_AND_PRIMARY + PROPERTIES_TO_RETAIN_HEADROOM)) # 4) build GeoDataFrame ps_gdf = gpd.GeoDataFrame( ps_df.loc[:, columns_to_retain], geometry=geometry, crs="EPSG:4326", ) # rename columns to align names ps_gdf = ps_gdf.rename( columns={ "SiteName": "Site Name", "SiteVoltage": "Downstream Voltage", "Total_Generation": "Total Generation (MW)", "Headroom": "Demand Headroom (MVA)", } ) # add a column with DNO name (useful after merging all DNO datasets) ps_gdf["DNO"] = DNO_UKPN # save to GeoJSON ps_gdf.to_file("ukpn-primary-substations.geojson", driver="GeoJSON") if __name__ == "__main__": main()
[ "pandas.read_csv", "geopandas.GeoDataFrame", "geopandas.points_from_xy" ]
[((461, 506), 'pandas.read_csv', 'pd.read_csv', (['grid_and_primary_sites_file_name'], {}), '(grid_and_primary_sites_file_name)\n', (472, 506), True, 'import pandas as pd\n'), ((548, 588), 'pandas.read_csv', 'pd.read_csv', (['headroom_capacity_file_name'], {}), '(headroom_capacity_file_name)\n', (559, 588), True, 'import pandas as pd\n'), ((1846, 1903), 'geopandas.points_from_xy', 'gpd.points_from_xy', (["ps_df['Longitude']", "ps_df['Latitude']"], {}), "(ps_df['Longitude'], ps_df['Latitude'])\n", (1864, 1903), True, 'import geopandas as gpd\n'), ((2093, 2183), 'geopandas.GeoDataFrame', 'gpd.GeoDataFrame', (['ps_df.loc[:, columns_to_retain]'], {'geometry': 'geometry', 'crs': '"""EPSG:4326"""'}), "(ps_df.loc[:, columns_to_retain], geometry=geometry, crs=\n 'EPSG:4326')\n", (2109, 2183), True, 'import geopandas as gpd\n')]
#!/pxrpythonsubst # # Copyright 2021 Pixar # # Licensed under the Apache License, Version 2.0 (the "Apache License") # with the following modification; you may not use this file except in # compliance with the Apache License and the following modification to it: # Section 6. Trademarks. is deleted and replaced with: # # 6. Trademarks. This License does not grant permission to use the trade # names, trademarks, service marks, or product names of the Licensor # and its affiliates, except as required to comply with Section 4(c) of # the License and to reproduce the content of the NOTICE file. # # You may obtain a copy of the Apache License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the Apache License with the above modification is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the Apache License for the specific # language governing permissions and limitations under the Apache License. from __future__ import print_function from pxr import Sdf, Pcp, Plug, Vt import os, unittest class TestPcpStreamingLayerReload(unittest.TestCase): @classmethod def setUpClass(cls): testRoot = os.path.join(os.path.dirname(__file__), 'PcpPlugins') testPluginsDso = testRoot + '/lib' testPluginsDsoSearch = testPluginsDso + '/*/Resources/' # Register dso plugins. Discard possible exception due to # TestPlugDsoEmpty. The exception only shows up here if it happens in # the main thread so we can't rely on it. try: Plug.Registry().RegisterPlugins(testPluginsDsoSearch) except RuntimeError: pass def setUp(self): # We expect there to be no layers left loaded when we start each test # case so we can start fresh. By the tearDown completes this needs to # be true. self.assertFalse(Sdf.Layer.GetLoadedLayers()) def _CreatePcpCache(self, rootLayer): return Pcp.Cache(Pcp.LayerStackIdentifier(rootLayer)) def test_StreamingLayerReload(self): # Open a layer with our streaming format. l = Sdf.Layer.FindOrOpen('root.testpcpstreaminglayerreload') self.assertTrue(l) # Build a cache. cache = self._CreatePcpCache(l) # Attempt to compute an index for /torus1/mesh_0 (should not exist). primIndex, errors = cache.ComputePrimIndex('/torus1/mesh_0') self.assertEqual(primIndex.primStack, []) # Load up asset.sdf, and replace l's content with it. This only changes # the sublayer list, which pcp should recognize and blow layer stacks. # Since l's underlying data implementation returns true for # "StreamsData()" this exercises a different code-path in Pcp's change # processing. assetLayer = Sdf.Layer.FindOrOpen('asset.sdf') self.assertTrue(assetLayer) with Pcp._TestChangeProcessor(cache): l.TransferContent(assetLayer) # Now when we compute the index for the mesh, it should have a spec, due # to the added sublayer. primIndex, errors = cache.ComputePrimIndex('/torus1/mesh_0') self.assertTrue(len(primIndex.primStack) > 0) if __name__ == "__main__": unittest.main()
[ "unittest.main", "pxr.Pcp.LayerStackIdentifier", "os.path.dirname", "pxr.Plug.Registry", "pxr.Sdf.Layer.GetLoadedLayers", "pxr.Sdf.Layer.FindOrOpen", "pxr.Pcp._TestChangeProcessor" ]
[((3357, 3372), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3370, 3372), False, 'import os, unittest\n'), ((2231, 2287), 'pxr.Sdf.Layer.FindOrOpen', 'Sdf.Layer.FindOrOpen', (['"""root.testpcpstreaminglayerreload"""'], {}), "('root.testpcpstreaminglayerreload')\n", (2251, 2287), False, 'from pxr import Sdf, Pcp, Plug, Vt\n'), ((2929, 2962), 'pxr.Sdf.Layer.FindOrOpen', 'Sdf.Layer.FindOrOpen', (['"""asset.sdf"""'], {}), "('asset.sdf')\n", (2949, 2962), False, 'from pxr import Sdf, Pcp, Plug, Vt\n'), ((1299, 1324), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1314, 1324), False, 'import os, unittest\n'), ((1992, 2019), 'pxr.Sdf.Layer.GetLoadedLayers', 'Sdf.Layer.GetLoadedLayers', ([], {}), '()\n', (2017, 2019), False, 'from pxr import Sdf, Pcp, Plug, Vt\n'), ((2089, 2124), 'pxr.Pcp.LayerStackIdentifier', 'Pcp.LayerStackIdentifier', (['rootLayer'], {}), '(rootLayer)\n', (2113, 2124), False, 'from pxr import Sdf, Pcp, Plug, Vt\n'), ((3012, 3043), 'pxr.Pcp._TestChangeProcessor', 'Pcp._TestChangeProcessor', (['cache'], {}), '(cache)\n', (3036, 3043), False, 'from pxr import Sdf, Pcp, Plug, Vt\n'), ((1669, 1684), 'pxr.Plug.Registry', 'Plug.Registry', ([], {}), '()\n', (1682, 1684), False, 'from pxr import Sdf, Pcp, Plug, Vt\n')]
from django import template from django.conf import settings from pettycash.models import PettycashBalanceCache from django.core.exceptions import ObjectDoesNotExist from django.utils import timezone from datetime import datetime, timedelta register = template.Library() @register.filter(name="has_group") def has_group(user, group_name): return user.groups.filter(name=group_name).exists() @register.filter(name="isMainsAdmin") def isMainsAdmin(user): return user.groups.filter(name=settings.SENSOR_USER_GROUP).exists() @register.filter(name="isNetAdmin") def isNetAdmin(user): return user.groups.filter(name=settings.NETADMIN_USER_GROUP).exists() @register.filter(name="isPettycashUser") def isPettycashUser(user): if "PETTYCASH_DEMO_USER_GROUP" in globals(): return user.groups.filter(name=settings.PETTYCASH_DEMO_USER_GROUP).exists() # check if non-zero balance or if there was a transaction # in last PETTYCASH_NOUSE_DAYS days # try: b = PettycashBalanceCache.objects.get(owner=user) if b.balance.amount != 0: return True cutoff = datetime.now(tz=timezone.utc) - timedelta( days=settings.PETTYCASH_NOUSE_DAYS ) if b.last and b.last.date > cutoff: return True except ObjectDoesNotExist as e: pass return False @register.filter(name="isTreasurer") def isTreasurer(user): return ( user.is_privileged and user.groups.filter(name=settings.PETTYCASH_TREASURER_GROUP).exists() ) @register.filter(name="isPettycashAdmin") def isInPettycashAdmin(user): if user.is_privileged: return True return user.groups.filter(name=settings.PETTYCASH_ADMIN_GROUP).exists()
[ "django.template.Library", "datetime.datetime.now", "datetime.timedelta", "pettycash.models.PettycashBalanceCache.objects.get" ]
[((254, 272), 'django.template.Library', 'template.Library', ([], {}), '()\n', (270, 272), False, 'from django import template\n'), ((1002, 1047), 'pettycash.models.PettycashBalanceCache.objects.get', 'PettycashBalanceCache.objects.get', ([], {'owner': 'user'}), '(owner=user)\n', (1035, 1047), False, 'from pettycash.models import PettycashBalanceCache\n'), ((1124, 1153), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'timezone.utc'}), '(tz=timezone.utc)\n', (1136, 1153), False, 'from datetime import datetime, timedelta\n'), ((1156, 1201), 'datetime.timedelta', 'timedelta', ([], {'days': 'settings.PETTYCASH_NOUSE_DAYS'}), '(days=settings.PETTYCASH_NOUSE_DAYS)\n', (1165, 1201), False, 'from datetime import datetime, timedelta\n')]
import logging import numpy as np from bokeh import plotting from bokeh.layouts import gridplot L = logging.getLogger(__name__) def bokeh_plot(data, var_name, results, title, module, test_name): plot = bokeh_plot_var(data, var_name, results, title, module, test_name) return gridplot([[plot]], sizing_mode='fixed') def bokeh_plot_var(time, data, var_name, results, title, module, test_name): """ Method to plot QC results using Bokeh """ if module not in results or test_name not in results[module]: L.warning(f'No results for test {module}.{test_name} found') return qc_test = results[module][test_name] qc_pass = np.ma.masked_where(qc_test != 1, data) qc_suspect = np.ma.masked_where(qc_test != 3, data) qc_fail = np.ma.masked_where(qc_test != 4, data) qc_notrun = np.ma.masked_where(qc_test != 2, data) p1 = plotting.figure(x_axis_type="datetime", title=test_name + ' : ' + title) p1.grid.grid_line_alpha = 0.3 p1.xaxis.axis_label = 'Time' p1.yaxis.axis_label = 'Data' p1.line(time, data, legend_label='data', color='#A6CEE3') p1.circle(time, qc_notrun, size=2, legend_label='qc not run', color='gray', alpha=0.2) p1.circle(time, qc_pass, size=4, legend_label='qc pass', color='green', alpha=0.5) p1.circle(time, qc_suspect, size=4, legend_label='qc suspect', color='orange', alpha=0.7) p1.circle(time, qc_fail, size=6, legend_label='qc fail', color='red', alpha=1.0) p1.circle(time, qc_notrun, size=6, legend_label='qc not eval', color='gray', alpha=1.0) return p1 def bokeh_multi_plot(stream, results, title, **kwargs): kwargs = { **{ 'merge_tools': True, 'toolbar_location': 'below', 'sizing_mode': 'scale_width', 'plot_width': 600, 'plot_height': 200, 'ncols': 2 }, **kwargs } plots = list(bokeh_multi_var(stream, results, title)) return gridplot(plots, **kwargs) def bokeh_multi_var(stream, results, title): for vname, qcobj in results.items(): for modu, tests in qcobj.items(): for testname, testresults in tests.items(): plt = bokeh_plot_var(stream.time(), stream.data(vname), vname, qcobj, title, modu, testname) yield plt def bokeh_plot_collected_results(results, **kwargs): kwargs = { **{ 'merge_tools': True, 'toolbar_location': 'below', 'sizing_mode': 'scale_width', 'plot_width': 600, 'plot_height': 200, 'ncols': 2 }, **kwargs } plots = [] for r in results: if r.data.any() and r.results.any(): plots.append(bokeh_plot_collected_result(r)) return gridplot(plots, **kwargs) def bokeh_plot_collected_result(cr): title = f'{cr.stream_id}: {cr.package}.{cr.test}' p1 = plotting.figure(x_axis_type="datetime", title=title) p1.grid.grid_line_alpha = 0.3 p1.xaxis.axis_label = 'Time' p1.yaxis.axis_label = 'Data' qc_pass = np.ma.masked_where(cr.results != 1, cr.data) qc_suspect = np.ma.masked_where(cr.results != 3, cr.data) qc_fail = np.ma.masked_where(cr.results != 4, cr.data) qc_notrun = np.ma.masked_where(cr.results != 2, cr.data) p1.line(cr.tinp, cr.data, legend_label='data', color='#A6CEE3') p1.circle(cr.tinp, qc_notrun, size=3, legend_label='qc not run', color='gray', alpha=0.2) p1.circle(cr.tinp, qc_pass, size=4, legend_label='qc pass', color='green', alpha=0.5) p1.circle(cr.tinp, qc_suspect, size=4, legend_label='qc suspect', color='orange', alpha=0.7) p1.circle(cr.tinp, qc_fail, size=6, legend_label='qc fail', color='red', alpha=1.0) p1.circle(cr.tinp, qc_notrun, size=3, legend_label='qc not eval', color='gray', alpha=1.0) return p1
[ "bokeh.plotting.figure", "numpy.ma.masked_where", "logging.getLogger", "bokeh.layouts.gridplot" ]
[((102, 129), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (119, 129), False, 'import logging\n'), ((287, 326), 'bokeh.layouts.gridplot', 'gridplot', (['[[plot]]'], {'sizing_mode': '"""fixed"""'}), "([[plot]], sizing_mode='fixed')\n", (295, 326), False, 'from bokeh.layouts import gridplot\n'), ((664, 702), 'numpy.ma.masked_where', 'np.ma.masked_where', (['(qc_test != 1)', 'data'], {}), '(qc_test != 1, data)\n', (682, 702), True, 'import numpy as np\n'), ((720, 758), 'numpy.ma.masked_where', 'np.ma.masked_where', (['(qc_test != 3)', 'data'], {}), '(qc_test != 3, data)\n', (738, 758), True, 'import numpy as np\n'), ((773, 811), 'numpy.ma.masked_where', 'np.ma.masked_where', (['(qc_test != 4)', 'data'], {}), '(qc_test != 4, data)\n', (791, 811), True, 'import numpy as np\n'), ((828, 866), 'numpy.ma.masked_where', 'np.ma.masked_where', (['(qc_test != 2)', 'data'], {}), '(qc_test != 2, data)\n', (846, 866), True, 'import numpy as np\n'), ((877, 949), 'bokeh.plotting.figure', 'plotting.figure', ([], {'x_axis_type': '"""datetime"""', 'title': "(test_name + ' : ' + title)"}), "(x_axis_type='datetime', title=test_name + ' : ' + title)\n", (892, 949), False, 'from bokeh import plotting\n'), ((1970, 1995), 'bokeh.layouts.gridplot', 'gridplot', (['plots'], {}), '(plots, **kwargs)\n', (1978, 1995), False, 'from bokeh.layouts import gridplot\n'), ((2786, 2811), 'bokeh.layouts.gridplot', 'gridplot', (['plots'], {}), '(plots, **kwargs)\n', (2794, 2811), False, 'from bokeh.layouts import gridplot\n'), ((2914, 2966), 'bokeh.plotting.figure', 'plotting.figure', ([], {'x_axis_type': '"""datetime"""', 'title': 'title'}), "(x_axis_type='datetime', title=title)\n", (2929, 2966), False, 'from bokeh import plotting\n'), ((3081, 3125), 'numpy.ma.masked_where', 'np.ma.masked_where', (['(cr.results != 1)', 'cr.data'], {}), '(cr.results != 1, cr.data)\n', (3099, 3125), True, 'import numpy as np\n'), ((3143, 3187), 'numpy.ma.masked_where', 'np.ma.masked_where', (['(cr.results != 3)', 'cr.data'], {}), '(cr.results != 3, cr.data)\n', (3161, 3187), True, 'import numpy as np\n'), ((3202, 3246), 'numpy.ma.masked_where', 'np.ma.masked_where', (['(cr.results != 4)', 'cr.data'], {}), '(cr.results != 4, cr.data)\n', (3220, 3246), True, 'import numpy as np\n'), ((3263, 3307), 'numpy.ma.masked_where', 'np.ma.masked_where', (['(cr.results != 2)', 'cr.data'], {}), '(cr.results != 2, cr.data)\n', (3281, 3307), True, 'import numpy as np\n')]
""" byceps.services.user_group.dbmodels ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2021 <NAME> :License: Revised BSD (see `LICENSE` file for details) """ from datetime import datetime from typing import Optional from sqlalchemy.ext.associationproxy import association_proxy from ...database import db, generate_uuid from ...typing import PartyID, UserID from ...util.instances import ReprBuilder from ..user.dbmodels.user import User class UserGroup(db.Model): """A self-organized group of users.""" __tablename__ = 'user_groups' __table_args__ = ( db.UniqueConstraint('party_id', 'title'), ) id = db.Column(db.Uuid, default=generate_uuid, primary_key=True) party_id = db.Column(db.UnicodeText, db.ForeignKey('parties.id'), index=True, nullable=False) created_at = db.Column(db.DateTime, default=datetime.utcnow) creator_id = db.Column(db.Uuid, db.ForeignKey('users.id'), unique=True, nullable=False) creator = db.relationship(User) title = db.Column(db.UnicodeText, unique=True, nullable=False) description = db.Column(db.UnicodeText, nullable=True) members = association_proxy('memberships', 'user') def __init__( self, party_id: PartyID, creator_id: UserID, title: str, description: Optional[str] = None, ) -> None: self.party_id = party_id self.creator_id = creator_id self.title = title self.description = description @property def member_count(self) -> int: return len(self.members) def __repr__(self) -> str: return ReprBuilder(self) \ .add_with_lookup('party_id') \ .add_with_lookup('title') \ .build() class Membership(db.Model): """The assignment of a user to a user group. A user must not be a member of more than one group per party. """ __tablename__ = 'user_group_memberships' id = db.Column(db.Uuid, default=generate_uuid, primary_key=True) group_id = db.Column(db.Uuid, db.ForeignKey('user_groups.id')) group = db.relationship(UserGroup, collection_class=set, backref='memberships') user_id = db.Column(db.Uuid, db.ForeignKey('users.id')) user = db.relationship(User, backref='group_membership') created_at = db.Column(db.DateTime, default=datetime.utcnow) def __repr__(self) -> str: return ReprBuilder(self) \ .add_with_lookup('id') \ .add_with_lookup('group') \ .add_with_lookup('user') \ .build()
[ "sqlalchemy.ext.associationproxy.association_proxy" ]
[((1137, 1177), 'sqlalchemy.ext.associationproxy.association_proxy', 'association_proxy', (['"""memberships"""', '"""user"""'], {}), "('memberships', 'user')\n", (1154, 1177), False, 'from sqlalchemy.ext.associationproxy import association_proxy\n')]
""" The tool to check the availability or syntax of domain, IP or URL. :: ██████╗ ██╗ ██╗███████╗██╗ ██╗███╗ ██╗ ██████╗███████╗██████╗ ██╗ ███████╗ ██╔══██╗╚██╗ ██╔╝██╔════╝██║ ██║████╗ ██║██╔════╝██╔════╝██╔══██╗██║ ██╔════╝ ██████╔╝ ╚████╔╝ █████╗ ██║ ██║██╔██╗ ██║██║ █████╗ ██████╔╝██║ █████╗ ██╔═══╝ ╚██╔╝ ██╔══╝ ██║ ██║██║╚██╗██║██║ ██╔══╝ ██╔══██╗██║ ██╔══╝ ██║ ██║ ██║ ╚██████╔╝██║ ╚████║╚██████╗███████╗██████╔╝███████╗███████╗ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝╚══════╝╚═════╝ ╚══════╝╚══════╝ Provides the base of all CSV file-s migrators. Author: <NAME>, @funilrys, contactTATAfunilrysTODTODcom Special thanks: https://pyfunceble.github.io/#/special-thanks Contributors: https://pyfunceble.github.io/#/contributors Project link: https://github.com/funilrys/PyFunceble Project documentation: https://pyfunceble.readthedocs.io/en/dev/ Project homepage: https://pyfunceble.github.io/ License: :: Copyright 2017, 2018, 2019, 2020, 2021 <NAME> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import csv import functools import tempfile from typing import List, Optional import PyFunceble.facility from PyFunceble.cli.migrators.base import MigratorBase from PyFunceble.cli.utils.stdout import print_single_line from PyFunceble.helpers.file import FileHelper class CSVFileMigratorBase(MigratorBase): """ Provides the base of all CSV file migrator classes. """ source_file: Optional[str] = None FIELDS: Optional[List[str]] = None TO_DELETE: Optional[List[str]] = None def ensure_source_file_is_given(func): # pylint: disable=no-self-argument """ Ensures that the source file is given before launching the decorated method. :raise RuntimeError: When the:code:`self.source_file` is not given. """ @functools.wraps(func) def wrapper(self, *args, **kwargs): if not isinstance(self.source_file, str): raise RuntimeError("<self.source_file> is not given.") return func(self, *args, **kwargs) # pylint: disable=not-callable return wrapper @ensure_source_file_is_given def migrate(self) -> "MigratorBase": """ Provides the migrator (itself). """ file_helper = FileHelper(self.source_file) if file_helper.exists(): with file_helper.open("r", encoding="utf-8") as file_stream: first_line = next(file_stream) if any(x in first_line for x in self.TO_DELETE): temp_destination = tempfile.NamedTemporaryFile( "a+", newline="", encoding="utf-8", delete=False ) file_handler = file_helper.open(newline="") reader = csv.DictReader(file_handler) writer = csv.DictWriter( temp_destination, fieldnames=[x for x in self.FIELDS if x not in self.TO_DELETE], ) writer.writeheader() keys_found = False for row in reader: row = dict(row) for key in self.TO_DELETE: if key in row: del row[key] keys_found = True if not keys_found: break writer.writerow(row) if self.print_action_to_stdout: print_single_line() temp_destination.seek(0) FileHelper(temp_destination.name).move(self.source_file) self.done = True def start(self) -> "MigratorBase": """ Starts the migration and everything related to it. """ PyFunceble.facility.Logger.info("Started migration.") self.migrate() PyFunceble.facility.Logger.info("Finished migration.") return self
[ "tempfile.NamedTemporaryFile", "PyFunceble.cli.utils.stdout.print_single_line", "csv.DictReader", "PyFunceble.helpers.file.FileHelper", "functools.wraps", "csv.DictWriter" ]
[((2457, 2478), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (2472, 2478), False, 'import functools\n'), ((2914, 2942), 'PyFunceble.helpers.file.FileHelper', 'FileHelper', (['self.source_file'], {}), '(self.source_file)\n', (2924, 2942), False, 'from PyFunceble.helpers.file import FileHelper\n'), ((3194, 3271), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', (['"""a+"""'], {'newline': '""""""', 'encoding': '"""utf-8"""', 'delete': '(False)'}), "('a+', newline='', encoding='utf-8', delete=False)\n", (3221, 3271), False, 'import tempfile\n'), ((3396, 3424), 'csv.DictReader', 'csv.DictReader', (['file_handler'], {}), '(file_handler)\n', (3410, 3424), False, 'import csv\n'), ((3450, 3550), 'csv.DictWriter', 'csv.DictWriter', (['temp_destination'], {'fieldnames': '[x for x in self.FIELDS if x not in self.TO_DELETE]'}), '(temp_destination, fieldnames=[x for x in self.FIELDS if x not in\n self.TO_DELETE])\n', (3464, 3550), False, 'import csv\n'), ((4112, 4131), 'PyFunceble.cli.utils.stdout.print_single_line', 'print_single_line', ([], {}), '()\n', (4129, 4131), False, 'from PyFunceble.cli.utils.stdout import print_single_line\n'), ((4191, 4224), 'PyFunceble.helpers.file.FileHelper', 'FileHelper', (['temp_destination.name'], {}), '(temp_destination.name)\n', (4201, 4224), False, 'from PyFunceble.helpers.file import FileHelper\n')]
import ctypes import ctypes.util from geneprog.cdefs import (cgp_data) def load_library(): libgp_path = ctypes.util.find_library('libgeneprog.0.dylib') libgp = ctypes.CDLL(str(libgp_path)) return libgp library = load_library() cgp_data.load(library)
[ "ctypes.util.find_library", "geneprog.cdefs.cgp_data.load" ]
[((243, 265), 'geneprog.cdefs.cgp_data.load', 'cgp_data.load', (['library'], {}), '(library)\n', (256, 265), False, 'from geneprog.cdefs import cgp_data\n'), ((110, 157), 'ctypes.util.find_library', 'ctypes.util.find_library', (['"""libgeneprog.0.dylib"""'], {}), "('libgeneprog.0.dylib')\n", (134, 157), False, 'import ctypes\n')]
# Python version of cmdlib.sh """ Houses helper code for python based coreos-assembler commands. """ import hashlib import json import os import shutil import subprocess import sys import tempfile import gi from botocore.exceptions import ( ConnectionClosedError, ConnectTimeoutError, IncompleteReadError, ReadTimeoutError) from tenacity import ( stop_after_delay, stop_after_attempt, retry_if_exception_type) gi.require_version("RpmOstree", "1.0") from gi.repository import RpmOstree from datetime import datetime, timezone retry_stop = (stop_after_delay(10) | stop_after_attempt(5)) retry_boto_exception = (retry_if_exception_type(ConnectionClosedError) | retry_if_exception_type(ConnectTimeoutError) | retry_if_exception_type(IncompleteReadError) | retry_if_exception_type(ReadTimeoutError)) def retry_callback(retry_state): print(f"Retrying after {retry_state.outcome.exception()}") def run_verbose(args, **kwargs): """ Prints out the command being executed before executing a subprocess call. :param args: All non-keyword arguments :type args: list :param kwargs: All keyword arguments :type kwargs: dict :raises: CalledProcessError """ print("+ {}".format(subprocess.list2cmdline(args))) # default to throwing exception if 'check' not in kwargs.keys(): kwargs['check'] = True # capture_output is only on python 3.7+. Provide convenience here # until 3.7 is a baseline: if kwargs.pop('capture_output', False): kwargs['stdout'] = subprocess.PIPE kwargs['stderr'] = subprocess.PIPE try: process = subprocess.run(args, **kwargs) except subprocess.CalledProcessError: fatal("Error running command " + args[0]) return process def write_json(path, data): """ Shortcut for writing a structure as json to the file system. :param path: The full path to the file to write :type: path: str :param data: structure to write out as json :type data: dict or list :raises: ValueError, OSError """ dn = os.path.dirname(path) f = tempfile.NamedTemporaryFile(mode='w', dir=dn, delete=False) json.dump(data, f, indent=4) os.fchmod(f.file.fileno(), 0o644) shutil.move(f.name, path) def load_json(path): """ Shortcut for loading json from a file path. :param path: The full path to the file :type: path: str :returns: loaded json :rtype: dict :raises: IOError, ValueError """ with open(path) as f: return json.load(f) def sha256sum_file(path): """ Calculates the sha256 sum from a path. :param path: The full path to the file :type: path: str :returns: The calculated sha256 sum :type: str """ h = hashlib.sha256() with open(path, 'rb', buffering=0) as f: for b in iter(lambda: f.read(128 * 1024), b''): h.update(b) return h.hexdigest() def fatal(msg): """ Prints fatal error messages and exits execution. :param msg: The message to show to output :type msg: str :raises: SystemExit """ raise SystemExit(msg) def info(msg): """ Prints info messages. :param msg: The message to show to output :type msg: str """ sys.stderr.write(f"info: {msg}") def rfc3339_time(t=None): """ Produces a rfc3339 compliant time string. :param t: The full path to the file :type: t: datetime.datetime :returns: a rfc3339 compliant time string :rtype: str """ if t is None: t = datetime.utcnow() else: # if the need arises, we can convert to UTC, but let's just enforce # this doesn't slip by for now assert t.tzname() == 'UTC', "Timestamp must be in UTC format" return t.strftime("%Y-%m-%dT%H:%M:%SZ") def rm_allow_noent(path): """ Removes a file but doesn't error if the file does not exist. :param path: The full path to the file :type: path: str """ try: os.unlink(path) except FileNotFoundError: pass def import_ostree_commit(repo, commit, tarfile, force=False): # create repo in case e.g. tmp/ was cleared out; idempotent subprocess.check_call(['ostree', 'init', '--repo', repo, '--mode=archive']) # in the common case where we're operating on a recent build, the OSTree # commit should already be in the tmprepo commitpartial = os.path.join(repo, f'state/{commit}.commitpartial') if (subprocess.call(['ostree', 'show', '--repo', repo, commit], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) == 0 and not os.path.isfile(commitpartial) and not force): return # extract in a new tmpdir inside the repo itself so we can still hardlink with tempfile.TemporaryDirectory(dir=repo) as d: subprocess.check_call(['tar', '-C', d, '-xf', tarfile]) subprocess.check_call(['ostree', 'pull-local', '--repo', repo, d, commit]) def get_basearch(): try: return get_basearch.saved except AttributeError: get_basearch.saved = RpmOstree.get_basearch() return get_basearch.saved def parse_date_string(date_string): """ Parses the date strings expected from the build system. Returned datetime instances will be in utc. :param date_string: string to turn into date. Format: %Y-%m-%dT%H:%M:%SZ :type date_string: str :returns: datetime instance from the date string :rtype: datetime.datetime :raises: ValueError, TypeError """ dt = datetime.strptime(date_string, '%Y-%m-%dT%H:%M:%SZ') return dt.replace(tzinfo=timezone.utc) def get_timestamp(entry): # ignore dirs missing meta.json meta_file = os.path.join(entry.path, 'meta.json') if not os.path.isfile(meta_file): print(f"Ignoring directory {entry.name}") return None # collect dirs and timestamps with open(meta_file) as f: j = json.load(f) # Older versions only had ostree-timestamp ts = j.get('coreos-assembler.build-timestamp') or j['ostree-timestamp'] return parse_date_string(ts) def image_info(image): try: out = json.loads(run_verbose( ['qemu-img', 'info', '--output=json', image], capture_output=True).stdout ) # Fixed VPC/VHD v1 disks are really raw images with a VHD footer. # The VHD footer uses 'conectix' as the identify in first 8 bytes # of the last 512 bytes. Sadly, 'qemu-img' does not identify it # properly. if out.get("format") == "raw": with open(image, 'rb') as imgf: imgf.seek(-512, os.SEEK_END) data = imgf.read(8) if data == b"conectix": out['format'] = "vpc" out['submformat'] = "fixed" return out except Exception as e: raise Exception(f"failed to inspect {image} with qemu", e)
[ "tenacity.stop_after_attempt", "os.unlink", "subprocess.list2cmdline", "datetime.datetime.utcnow", "os.path.isfile", "os.path.join", "gi.repository.RpmOstree.get_basearch", "subprocess.check_call", "tempfile.TemporaryDirectory", "os.path.dirname", "hashlib.sha256", "json.dump", "tenacity.retry_if_exception_type", "tenacity.stop_after_delay", "datetime.datetime.strptime", "subprocess.call", "tempfile.NamedTemporaryFile", "gi.require_version", "subprocess.run", "json.load", "shutil.move", "sys.stderr.write" ]
[((433, 471), 'gi.require_version', 'gi.require_version', (['"""RpmOstree"""', '"""1.0"""'], {}), "('RpmOstree', '1.0')\n", (451, 471), False, 'import gi\n'), ((564, 584), 'tenacity.stop_after_delay', 'stop_after_delay', (['(10)'], {}), '(10)\n', (580, 584), False, 'from tenacity import stop_after_delay, stop_after_attempt, retry_if_exception_type\n'), ((587, 608), 'tenacity.stop_after_attempt', 'stop_after_attempt', (['(5)'], {}), '(5)\n', (605, 608), False, 'from tenacity import stop_after_delay, stop_after_attempt, retry_if_exception_type\n'), ((843, 884), 'tenacity.retry_if_exception_type', 'retry_if_exception_type', (['ReadTimeoutError'], {}), '(ReadTimeoutError)\n', (866, 884), False, 'from tenacity import stop_after_delay, stop_after_attempt, retry_if_exception_type\n'), ((2141, 2162), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (2156, 2162), False, 'import os\n'), ((2171, 2230), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'mode': '"""w"""', 'dir': 'dn', 'delete': '(False)'}), "(mode='w', dir=dn, delete=False)\n", (2198, 2230), False, 'import tempfile\n'), ((2235, 2263), 'json.dump', 'json.dump', (['data', 'f'], {'indent': '(4)'}), '(data, f, indent=4)\n', (2244, 2263), False, 'import json\n'), ((2306, 2331), 'shutil.move', 'shutil.move', (['f.name', 'path'], {}), '(f.name, path)\n', (2317, 2331), False, 'import shutil\n'), ((2829, 2845), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (2843, 2845), False, 'import hashlib\n'), ((3328, 3360), 'sys.stderr.write', 'sys.stderr.write', (['f"""info: {msg}"""'], {}), "(f'info: {msg}')\n", (3344, 3360), False, 'import sys\n'), ((4255, 4330), 'subprocess.check_call', 'subprocess.check_call', (["['ostree', 'init', '--repo', repo, '--mode=archive']"], {}), "(['ostree', 'init', '--repo', repo, '--mode=archive'])\n", (4276, 4330), False, 'import subprocess\n'), ((4475, 4526), 'os.path.join', 'os.path.join', (['repo', 'f"""state/{commit}.commitpartial"""'], {}), "(repo, f'state/{commit}.commitpartial')\n", (4487, 4526), False, 'import os\n'), ((5678, 5730), 'datetime.datetime.strptime', 'datetime.strptime', (['date_string', '"""%Y-%m-%dT%H:%M:%SZ"""'], {}), "(date_string, '%Y-%m-%dT%H:%M:%SZ')\n", (5695, 5730), False, 'from datetime import datetime, timezone\n'), ((5855, 5892), 'os.path.join', 'os.path.join', (['entry.path', '"""meta.json"""'], {}), "(entry.path, 'meta.json')\n", (5867, 5892), False, 'import os\n'), ((774, 818), 'tenacity.retry_if_exception_type', 'retry_if_exception_type', (['IncompleteReadError'], {}), '(IncompleteReadError)\n', (797, 818), False, 'from tenacity import stop_after_delay, stop_after_attempt, retry_if_exception_type\n'), ((1694, 1724), 'subprocess.run', 'subprocess.run', (['args'], {}), '(args, **kwargs)\n', (1708, 1724), False, 'import subprocess\n'), ((2601, 2613), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2610, 2613), False, 'import json\n'), ((3616, 3633), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3631, 3633), False, 'from datetime import datetime, timezone\n'), ((4064, 4079), 'os.unlink', 'os.unlink', (['path'], {}), '(path)\n', (4073, 4079), False, 'import os\n'), ((4883, 4920), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {'dir': 'repo'}), '(dir=repo)\n', (4910, 4920), False, 'import tempfile\n'), ((4935, 4990), 'subprocess.check_call', 'subprocess.check_call', (["['tar', '-C', d, '-xf', tarfile]"], {}), "(['tar', '-C', d, '-xf', tarfile])\n", (4956, 4990), False, 'import subprocess\n'), ((4999, 5073), 'subprocess.check_call', 'subprocess.check_call', (["['ostree', 'pull-local', '--repo', repo, d, commit]"], {}), "(['ostree', 'pull-local', '--repo', repo, d, commit])\n", (5020, 5073), False, 'import subprocess\n'), ((5904, 5929), 'os.path.isfile', 'os.path.isfile', (['meta_file'], {}), '(meta_file)\n', (5918, 5929), False, 'import os\n'), ((6079, 6091), 'json.load', 'json.load', (['f'], {}), '(f)\n', (6088, 6091), False, 'import json\n'), ((634, 680), 'tenacity.retry_if_exception_type', 'retry_if_exception_type', (['ConnectionClosedError'], {}), '(ConnectionClosedError)\n', (657, 680), False, 'from tenacity import stop_after_delay, stop_after_attempt, retry_if_exception_type\n'), ((705, 749), 'tenacity.retry_if_exception_type', 'retry_if_exception_type', (['ConnectTimeoutError'], {}), '(ConnectTimeoutError)\n', (728, 749), False, 'from tenacity import stop_after_delay, stop_after_attempt, retry_if_exception_type\n'), ((1298, 1327), 'subprocess.list2cmdline', 'subprocess.list2cmdline', (['args'], {}), '(args)\n', (1321, 1327), False, 'import subprocess\n'), ((4535, 4653), 'subprocess.call', 'subprocess.call', (["['ostree', 'show', '--repo', repo, commit]"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['ostree', 'show', '--repo', repo, commit], stdout=\n subprocess.DEVNULL, stderr=subprocess.DEVNULL)\n", (4550, 4653), False, 'import subprocess\n'), ((4722, 4751), 'os.path.isfile', 'os.path.isfile', (['commitpartial'], {}), '(commitpartial)\n', (4736, 4751), False, 'import os\n'), ((5226, 5250), 'gi.repository.RpmOstree.get_basearch', 'RpmOstree.get_basearch', ([], {}), '()\n', (5248, 5250), False, 'from gi.repository import RpmOstree\n')]
from sh import riscv64_unknown_elf_gcc as gcc from sh import riscv64_unknown_elf_objdump as objdump from sh import rm import re gcc("test.S", "-c", "-o", "test.o", "-march=rv32ima", "-mabi=ilp32") for line in objdump("-d", "test.o"): m = re.match(r"^\s+([0-9a-f]+):\s+([0-9a-f]+)\s+(.*)$", line) if m: offset = int(m.group(1), 16) code = int(m.group(2), 16) assembly = m.group(3) bin_string = "" for index, bin_digit in enumerate("{:032b}".format(code)[::-1]): if index % 4 == 0 and index != 0: bin_string += "_" bin_string += bin_digit bin_string = bin_string[::-1] d = "{:032b}".format(code)[::-1] r_type = "_".join((d[0:7], d[7:12], d[12:15], d[15:20], d[20:25], d[25:32]))[::-1] u_type = "_".join((d[0:7], d[7:12], d[12:32]))[::-1] print("r: 0b{} u: 0b{} {}".format(r_type, u_type, assembly)) rm("test.o")
[ "sh.rm", "sh.riscv64_unknown_elf_gcc", "sh.riscv64_unknown_elf_objdump", "re.match" ]
[((132, 200), 'sh.riscv64_unknown_elf_gcc', 'gcc', (['"""test.S"""', '"""-c"""', '"""-o"""', '"""test.o"""', '"""-march=rv32ima"""', '"""-mabi=ilp32"""'], {}), "('test.S', '-c', '-o', 'test.o', '-march=rv32ima', '-mabi=ilp32')\n", (135, 200), True, 'from sh import riscv64_unknown_elf_gcc as gcc\n'), ((214, 237), 'sh.riscv64_unknown_elf_objdump', 'objdump', (['"""-d"""', '"""test.o"""'], {}), "('-d', 'test.o')\n", (221, 237), True, 'from sh import riscv64_unknown_elf_objdump as objdump\n'), ((939, 951), 'sh.rm', 'rm', (['"""test.o"""'], {}), "('test.o')\n", (941, 951), False, 'from sh import rm\n'), ((247, 306), 're.match', 're.match', (['"""^\\\\s+([0-9a-f]+):\\\\s+([0-9a-f]+)\\\\s+(.*)$"""', 'line'], {}), "('^\\\\s+([0-9a-f]+):\\\\s+([0-9a-f]+)\\\\s+(.*)$', line)\n", (255, 306), False, 'import re\n')]
import os from qtpy import QtCore as QC from qtpy import QtGui as QG from qtpy import QtWidgets as QW from hydrus.core import HydrusConstants as HC from hydrus.core import HydrusGlobals as HG from hydrus.core import HydrusPaths from hydrus.core import HydrusText from hydrus.client import ClientExporting from hydrus.client.gui import ClientGUIFunctions from hydrus.client.gui import QtPorting as QP # we do this because some programs like discord will disallow exports with additional custom mimetypes (like 'application/hydrus-files') # as this is only ever an internal transfer, and as the python mimedata object is preserved through the dnd, we can just tack this info on with a subclass and python variables class QMimeDataHydrusFiles( QC.QMimeData ): def __init__( self ): QC.QMimeData.__init__( self ) self._hydrus_files = None def hydrusFiles( self ): return self._hydrus_files def setHydrusFiles( self, page_key, hashes ): self._hydrus_files = ( page_key, hashes ) def DoFileExportDragDrop( window, page_key, media, alt_down ): drop_source = QG.QDrag( window ) data_object = QMimeDataHydrusFiles() # new_options = HG.client_controller.new_options do_secret_discord_dnd_fix = new_options.GetBoolean( 'secret_discord_dnd_fix' ) and alt_down # client_files_manager = HG.client_controller.client_files_manager original_paths = [] media_and_original_paths = [] total_size = 0 for m in media: hash = m.GetHash() mime = m.GetMime() total_size += m.GetSize() original_path = client_files_manager.GetFilePath( hash, mime, check_file_exists = False ) original_paths.append( original_path ) media_and_original_paths.append( ( m, original_path ) ) # discord_dnd_fix_possible = new_options.GetBoolean( 'discord_dnd_fix' ) and len( original_paths ) <= 50 and total_size < 200 * 1048576 temp_dir = HG.client_controller.temp_dir if do_secret_discord_dnd_fix: dnd_paths = original_paths flags = QC.Qt.MoveAction elif discord_dnd_fix_possible and os.path.exists( temp_dir ): fallback_filename_terms = ClientExporting.ParseExportPhrase( '{hash}' ) try: filename_pattern = new_options.GetString( 'discord_dnd_filename_pattern' ) filename_terms = ClientExporting.ParseExportPhrase( filename_pattern ) if len( filename_terms ) == 0: raise Exception() except: filename_terms = fallback_filename_terms dnd_paths = [] for ( m, original_path ) in media_and_original_paths: filename = ClientExporting.GenerateExportFilename( temp_dir, m, filename_terms ) if filename == HC.mime_ext_lookup[ m.GetMime() ]: filename = ClientExporting.GenerateExportFilename( temp_dir, m, fallback_filename_terms ) dnd_path = os.path.join( temp_dir, filename ) if not os.path.exists( dnd_path ): HydrusPaths.MirrorFile( original_path, dnd_path ) dnd_paths.append( dnd_path ) flags = QC.Qt.MoveAction | QC.Qt.CopyAction else: dnd_paths = original_paths flags = QC.Qt.CopyAction uri_list = [] for path in dnd_paths: uri_list.append( QC.QUrl.fromLocalFile( path ) ) data_object.setUrls( uri_list ) # hashes = [ m.GetHash() for m in media ] data_object.setHydrusFiles( page_key, hashes ) # old way of doing this that makes some external programs (discord) reject it ''' if page_key is None: encoded_page_key = None else: encoded_page_key = page_key.hex() data_obj = ( encoded_page_key, [ hash.hex() for hash in hashes ] ) data_str = json.dumps( data_obj ) data_bytes = bytes( data_str, 'utf-8' ) data_object.setData( 'application/hydrus-media', data_bytes ) ''' # drop_source.setMimeData( data_object ) result = drop_source.exec_( flags, QC.Qt.CopyAction ) return result class FileDropTarget( QC.QObject ): def __init__( self, parent, filenames_callable = None, url_callable = None, media_callable = None ): QC.QObject.__init__( self, parent ) self._parent = parent if parent: parent.setAcceptDrops( True ) self._filenames_callable = filenames_callable self._url_callable = url_callable self._media_callable = media_callable def eventFilter( self, object, event ): if event.type() == QC.QEvent.Drop: if self.OnDrop( event.pos().x(), event.pos().y() ): event.setDropAction( self.OnData( event.mimeData(), event.proposedAction() ) ) event.accept() elif event.type() == QC.QEvent.DragEnter: event.accept() return False def OnData( self, mime_data, result ): media_dnd = isinstance( mime_data, QMimeDataHydrusFiles ) urls_dnd = mime_data.hasUrls() text_dnd = mime_data.hasText() if media_dnd and self._media_callable is not None: result = mime_data.hydrusFiles() if result is not None: ( page_key, hashes ) = result if page_key is not None: QP.CallAfter( self._media_callable, page_key, hashes ) # callafter so we can terminate dnd event now result = QC.Qt.MoveAction # old way of doing it that messed up discord et al ''' elif mime_data.formats().count( 'application/hydrus-media' ) and self._media_callable is not None: mview = mime_data.data( 'application/hydrus-media' ) data_bytes = mview.data() data_str = str( data_bytes, 'utf-8' ) (encoded_page_key, encoded_hashes) = json.loads( data_str ) if encoded_page_key is not None: page_key = bytes.fromhex( encoded_page_key ) hashes = [ bytes.fromhex( encoded_hash ) for encoded_hash in encoded_hashes ] QP.CallAfter( self._media_callable, page_key, hashes ) # callafter so we can terminate dnd event now result = QC.Qt.MoveAction ''' elif urls_dnd or text_dnd: paths = [] urls = [] if urls_dnd: dnd_items = mime_data.urls() for dnd_item in dnd_items: if dnd_item.isLocalFile(): paths.append( os.path.normpath( dnd_item.toLocalFile() ) ) else: urls.append( dnd_item.url() ) else: text = mime_data.text() text_lines = HydrusText.DeserialiseNewlinedTexts( text ) for text_line in text_lines: if text_line.startswith( 'http' ): urls.append( text_line ) # ignore 'paths' if self._filenames_callable is not None: if len( paths ) > 0: QP.CallAfter( self._filenames_callable, paths ) # callafter to terminate dnd event now if self._url_callable is not None: if len( urls ) > 0: for url in urls: QP.CallAfter( self._url_callable, url ) # callafter to terminate dnd event now result = QC.Qt.IgnoreAction else: result = QC.Qt.IgnoreAction return result def OnDrop( self, x, y ): screen_position = ClientGUIFunctions.ClientToScreen( self._parent, QC.QPoint( x, y ) ) drop_tlw = QW.QApplication.topLevelAt( screen_position ) my_tlw = self._parent.window() if drop_tlw == my_tlw: return True else: return False # setting OnDragOver to return copy gives Linux trouble with page tab drops with shift held down
[ "hydrus.client.ClientExporting.GenerateExportFilename", "hydrus.core.HydrusText.DeserialiseNewlinedTexts", "hydrus.client.ClientExporting.ParseExportPhrase", "qtpy.QtGui.QDrag", "qtpy.QtCore.QObject.__init__", "qtpy.QtCore.QUrl.fromLocalFile", "os.path.exists", "hydrus.client.gui.QtPorting.CallAfter", "hydrus.core.HydrusPaths.MirrorFile", "qtpy.QtWidgets.QApplication.topLevelAt", "qtpy.QtCore.QPoint", "os.path.join", "qtpy.QtCore.QMimeData.__init__" ]
[((1191, 1207), 'qtpy.QtGui.QDrag', 'QG.QDrag', (['window'], {}), '(window)\n', (1199, 1207), True, 'from qtpy import QtGui as QG\n'), ((809, 836), 'qtpy.QtCore.QMimeData.__init__', 'QC.QMimeData.__init__', (['self'], {}), '(self)\n', (830, 836), True, 'from qtpy import QtCore as QC\n'), ((4863, 4896), 'qtpy.QtCore.QObject.__init__', 'QC.QObject.__init__', (['self', 'parent'], {}), '(self, parent)\n', (4882, 4896), True, 'from qtpy import QtCore as QC\n'), ((9410, 9453), 'qtpy.QtWidgets.QApplication.topLevelAt', 'QW.QApplication.topLevelAt', (['screen_position'], {}), '(screen_position)\n', (9436, 9453), True, 'from qtpy import QtWidgets as QW\n'), ((2339, 2363), 'os.path.exists', 'os.path.exists', (['temp_dir'], {}), '(temp_dir)\n', (2353, 2363), False, 'import os\n'), ((2410, 2453), 'hydrus.client.ClientExporting.ParseExportPhrase', 'ClientExporting.ParseExportPhrase', (['"""{hash}"""'], {}), "('{hash}')\n", (2443, 2453), False, 'from hydrus.client import ClientExporting\n'), ((3870, 3897), 'qtpy.QtCore.QUrl.fromLocalFile', 'QC.QUrl.fromLocalFile', (['path'], {}), '(path)\n', (3891, 3897), True, 'from qtpy import QtCore as QC\n'), ((9362, 9377), 'qtpy.QtCore.QPoint', 'QC.QPoint', (['x', 'y'], {}), '(x, y)\n', (9371, 9377), True, 'from qtpy import QtCore as QC\n'), ((2607, 2658), 'hydrus.client.ClientExporting.ParseExportPhrase', 'ClientExporting.ParseExportPhrase', (['filename_pattern'], {}), '(filename_pattern)\n', (2640, 2658), False, 'from hydrus.client import ClientExporting\n'), ((3032, 3099), 'hydrus.client.ClientExporting.GenerateExportFilename', 'ClientExporting.GenerateExportFilename', (['temp_dir', 'm', 'filename_terms'], {}), '(temp_dir, m, filename_terms)\n', (3070, 3099), False, 'from hydrus.client import ClientExporting\n'), ((3353, 3385), 'os.path.join', 'os.path.join', (['temp_dir', 'filename'], {}), '(temp_dir, filename)\n', (3365, 3385), False, 'import os\n'), ((3221, 3297), 'hydrus.client.ClientExporting.GenerateExportFilename', 'ClientExporting.GenerateExportFilename', (['temp_dir', 'm', 'fallback_filename_terms'], {}), '(temp_dir, m, fallback_filename_terms)\n', (3259, 3297), False, 'from hydrus.client import ClientExporting\n'), ((3420, 3444), 'os.path.exists', 'os.path.exists', (['dnd_path'], {}), '(dnd_path)\n', (3434, 3444), False, 'import os\n'), ((3481, 3528), 'hydrus.core.HydrusPaths.MirrorFile', 'HydrusPaths.MirrorFile', (['original_path', 'dnd_path'], {}), '(original_path, dnd_path)\n', (3503, 3528), False, 'from hydrus.core import HydrusPaths\n'), ((6241, 6293), 'hydrus.client.gui.QtPorting.CallAfter', 'QP.CallAfter', (['self._media_callable', 'page_key', 'hashes'], {}), '(self._media_callable, page_key, hashes)\n', (6253, 6293), True, 'from hydrus.client.gui import QtPorting as QP\n'), ((8024, 8065), 'hydrus.core.HydrusText.DeserialiseNewlinedTexts', 'HydrusText.DeserialiseNewlinedTexts', (['text'], {}), '(text)\n', (8059, 8065), False, 'from hydrus.core import HydrusText\n'), ((8570, 8615), 'hydrus.client.gui.QtPorting.CallAfter', 'QP.CallAfter', (['self._filenames_callable', 'paths'], {}), '(self._filenames_callable, paths)\n', (8582, 8615), True, 'from hydrus.client.gui import QtPorting as QP\n'), ((8915, 8952), 'hydrus.client.gui.QtPorting.CallAfter', 'QP.CallAfter', (['self._url_callable', 'url'], {}), '(self._url_callable, url)\n', (8927, 8952), True, 'from hydrus.client.gui import QtPorting as QP\n')]
#!/usr/bin/env python3 import h5py import numpy from numpy import sin, cos, pi, degrees from ext import hdf5handler from matplotlib import pyplot as plt #MKS G = 6.67384e-11 # m^3 kg^-1 s^-2 MSun = 1.9891e30 # kg^1 AU = 149597870700 # m^1 DAY = 3600*24 # s^1 YEAR = DAY*365.25 # s^1 def rk4(h, f, t, z): k1 = h * f(t, z) k2 = h * f(t + 0.5*h, z + 0.5*k1) k3 = h * f(t + 0.5*h, z + 0.5*k2) k4 = h * f(t + h, z + k3) return z + (k1 + 2*(k2 + k3) + k4)/6.0 def forward_euler(h, f, t, z): return z + h*f(t, z) def twobody_vmass(t, state): a, e, f, w, M = state n = (G*M/a**3)**(1/2) #mean motion dM = -1e-5*MSun/YEAR #implement here? da = -a*(1 + e**2 + 2*e*cos(f)) / (1-e**2) * dM/M de = -(e+cos(f)) * dM/M dw = -sin(f) / e * dM/M df = -dw + n*(1+e*cos(f))**2 / ((1 - e**2)**(3/2)) return numpy.array([da, de, df, dw, dM]) def main(): #todo:energy, ang_mom, position, etc. dt = 1*YEAR M0 = 1*MSun a0 = 100*AU e0 = 0.9 f0 = 0 w0 = 0 state = numpy.array([a0, e0, f0, w0, M0]) with hdf5handler.HDF5Handler('test.hdf5') as handle: for method in [rk4]: handle.prefix = method.__name__ for t in numpy.arange(0, 5e4*YEAR, dt): print("t={:.3f} year".format(t/YEAR)) state = method(dt, twobody_vmass, t, state) handle.put(t, '/time') handle.put(state[0], '/a') handle.put(state[1], '/e') handle.put(state[2], '/f') handle.put(state[3], '/w') handle.put(state[4], '/mass') f = h5py.File('test.hdf5') fig = plt.figure() ax1 = fig.add_subplot(221) ax2 = fig.add_subplot(222) ax3 = fig.add_subplot(223) ax4 = fig.add_subplot(224) for method in ['rk4']: ax1.plot(f[method+'/time'].value/YEAR , f[method+'/a'].value / AU) ax2.plot(f[method+'/time'].value/YEAR , f[method+'/e']) ax3.plot(f[method+'/time'].value/YEAR , f[method+'/f'].value % (2*pi)) ax4.plot(f[method+'/time'].value/YEAR , f[method+'/mass'].value / MSun) plt.savefig('image.png') fig = plt.figure() ax = fig.add_subplot(111,polar=True) for method in ['rk4']: ax.plot(f[method+'/f'].value %(2*pi), f[method+'/time'].value /YEAR ) plt.savefig('image1.png') fig = plt.figure() ax = fig.add_subplot(111,polar=True) for method in ['rk4']: ax.plot(f[method+'/w'].value %(2*pi), f[method+'/time'].value /YEAR ) plt.savefig('image2.png') if __name__ == "__main__": main()
[ "h5py.File", "ext.hdf5handler.HDF5Handler", "matplotlib.pyplot.figure", "numpy.sin", "numpy.array", "numpy.arange", "numpy.cos", "matplotlib.pyplot.savefig" ]
[((881, 914), 'numpy.array', 'numpy.array', (['[da, de, df, dw, dM]'], {}), '([da, de, df, dw, dM])\n', (892, 914), False, 'import numpy\n'), ((1067, 1100), 'numpy.array', 'numpy.array', (['[a0, e0, f0, w0, M0]'], {}), '([a0, e0, f0, w0, M0])\n', (1078, 1100), False, 'import numpy\n'), ((1664, 1686), 'h5py.File', 'h5py.File', (['"""test.hdf5"""'], {}), "('test.hdf5')\n", (1673, 1686), False, 'import h5py\n'), ((1697, 1709), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1707, 1709), True, 'from matplotlib import pyplot as plt\n'), ((2165, 2189), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""image.png"""'], {}), "('image.png')\n", (2176, 2189), True, 'from matplotlib import pyplot as plt\n'), ((2202, 2214), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2212, 2214), True, 'from matplotlib import pyplot as plt\n'), ((2365, 2390), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""image1.png"""'], {}), "('image1.png')\n", (2376, 2390), True, 'from matplotlib import pyplot as plt\n'), ((2403, 2415), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2413, 2415), True, 'from matplotlib import pyplot as plt\n'), ((2566, 2591), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""image2.png"""'], {}), "('image2.png')\n", (2577, 2591), True, 'from matplotlib import pyplot as plt\n'), ((1110, 1146), 'ext.hdf5handler.HDF5Handler', 'hdf5handler.HDF5Handler', (['"""test.hdf5"""'], {}), "('test.hdf5')\n", (1133, 1146), False, 'from ext import hdf5handler\n'), ((1252, 1287), 'numpy.arange', 'numpy.arange', (['(0)', '(50000.0 * YEAR)', 'dt'], {}), '(0, 50000.0 * YEAR, dt)\n', (1264, 1287), False, 'import numpy\n'), ((770, 776), 'numpy.cos', 'cos', (['f'], {}), '(f)\n', (773, 776), False, 'from numpy import sin, cos, pi, degrees\n'), ((796, 802), 'numpy.sin', 'sin', (['f'], {}), '(f)\n', (799, 802), False, 'from numpy import sin, cos, pi, degrees\n'), ((730, 736), 'numpy.cos', 'cos', (['f'], {}), '(f)\n', (733, 736), False, 'from numpy import sin, cos, pi, degrees\n'), ((837, 843), 'numpy.cos', 'cos', (['f'], {}), '(f)\n', (840, 843), False, 'from numpy import sin, cos, pi, degrees\n')]
#!/usr/bin/env python from nose.tools import assert_equal from mediagoblin import processing class TestProcessing(object): def run_fill(self, input, format, output=None): builder = processing.FilenameBuilder(input) result = builder.fill(format) if output is None: return result assert_equal(output, result) def test_easy_filename_fill(self): self.run_fill('/home/user/foo.TXT', '{basename}bar{ext}', 'foobar.txt') def test_long_filename_fill(self): self.run_fill('{0}.png'.format('A' * 300), 'image-{basename}{ext}', 'image-{0}.png'.format('A' * 245))
[ "nose.tools.assert_equal", "mediagoblin.processing.FilenameBuilder" ]
[((196, 229), 'mediagoblin.processing.FilenameBuilder', 'processing.FilenameBuilder', (['input'], {}), '(input)\n', (222, 229), False, 'from mediagoblin import processing\n'), ((329, 357), 'nose.tools.assert_equal', 'assert_equal', (['output', 'result'], {}), '(output, result)\n', (341, 357), False, 'from nose.tools import assert_equal\n')]
#!/usr/bin/env python3 import math import shm from mission.framework.movement import Depth, Heading, Pitch, VelocityX, VelocityY from mission.framework.primitive import Zero, Log, FunctionTask, Fail from mission.framework.task import Task from mission.framework.timing import Timer, Timed ''' Oh no you're using jank. Usage: Mini Sub: roughly tracks sub position in undefined units while passed task is run Main Sub: please dont im begging you. we have a dvl for a reason ''' class TrackMovementY(Task): def on_first_run(self, task, startPos=0, *args, **kwargs): self.vel_add = float(startPos) def on_run(self, task, *args, **kwargs): task() self.vel_add += float(shm.desires.sway_speed.get()) shm.jank_pos.y.set(self.vel_add) if task.finished: self.finish() class TrackMovementX(Task): def on_first_run(self, task, startPos=0, *args, **kwargs): self.vel_add = float(startPos) def on_run(self, task, *args, **kwargs): task() self.vel_add += float(shm.desires.speed.get()) shm.jank_pos.x.set(self.vel_add) if task.finished: self.finish() class TrackMovementXY(Task): def on_first_run(self, task, startPosX=0, startPosY=0, *args, **kwargs): self.vel_add_y = float(startPosY) self.vel_add_x = float(startPosX) def on_run(self, task, *args, **kwargs): task() self.vel_add_x += float(shm.desires.speed.get()) self.vel_add_y += float(shm.desires.sway_speed.get()) shm.jank_pos.x.set(self.vel_add_x) shm.jank_pos.y.set(self.vel_add_y) if task.finished: self.finish() class RestorePosY(Task): def on_first_run(self, vel, *args, **kwargs): if shm.jank_pos.y.get() < 0: self.task = VelocityY(vel) else: self.task = VelocityY(vel * -1) def on_run(self, vel, *args, **kwargs): if abs(shm.jank_pos.y.get()) > vel : self.task() else: self.logi("Y pos restored, I hope") Zero() self.finish() class RestorePosX(Task): def on_first_run(self, vel, *args, **kwargs): if shm.jank_pos.x.get() < 0: self.task = VelocityX(vel) else: self.task = VelocityX(vel * -1) def on_run(self, vel, *args, **kwargs): if abs(shm.jank_pos.x.get()) > vel : self.task() else: self.logi("X pos restored, I hope") Zero() self.finish() #just to test stuff def testRightRestore(): return TrackMovementY(Sequential( Timed(VelocityY(.4), 6), Log('Restoring?'), RestorePosY(.2), Zero(), )) def testLeftRestore(): return TrackMovementY(Sequential( Timed(VelocityY(-.4), 6), Log('Restoring?'), RestorePosY(.2), Zero(), ))
[ "shm.desires.sway_speed.get", "mission.framework.primitive.Zero", "mission.framework.movement.VelocityX", "shm.jank_pos.x.get", "shm.jank_pos.y.get", "shm.jank_pos.x.set", "shm.desires.speed.get", "shm.jank_pos.y.set", "mission.framework.primitive.Log", "mission.framework.movement.VelocityY" ]
[((733, 765), 'shm.jank_pos.y.set', 'shm.jank_pos.y.set', (['self.vel_add'], {}), '(self.vel_add)\n', (751, 765), False, 'import shm\n'), ((1059, 1091), 'shm.jank_pos.x.set', 'shm.jank_pos.x.set', (['self.vel_add'], {}), '(self.vel_add)\n', (1077, 1091), False, 'import shm\n'), ((1505, 1539), 'shm.jank_pos.x.set', 'shm.jank_pos.x.set', (['self.vel_add_x'], {}), '(self.vel_add_x)\n', (1523, 1539), False, 'import shm\n'), ((1546, 1580), 'shm.jank_pos.y.set', 'shm.jank_pos.y.set', (['self.vel_add_y'], {}), '(self.vel_add_y)\n', (1564, 1580), False, 'import shm\n'), ((697, 725), 'shm.desires.sway_speed.get', 'shm.desires.sway_speed.get', ([], {}), '()\n', (723, 725), False, 'import shm\n'), ((1028, 1051), 'shm.desires.speed.get', 'shm.desires.speed.get', ([], {}), '()\n', (1049, 1051), False, 'import shm\n'), ((1414, 1437), 'shm.desires.speed.get', 'shm.desires.speed.get', ([], {}), '()\n', (1435, 1437), False, 'import shm\n'), ((1469, 1497), 'shm.desires.sway_speed.get', 'shm.desires.sway_speed.get', ([], {}), '()\n', (1495, 1497), False, 'import shm\n'), ((1708, 1728), 'shm.jank_pos.y.get', 'shm.jank_pos.y.get', ([], {}), '()\n', (1726, 1728), False, 'import shm\n'), ((1752, 1766), 'mission.framework.movement.VelocityY', 'VelocityY', (['vel'], {}), '(vel)\n', (1761, 1766), False, 'from mission.framework.movement import Depth, Heading, Pitch, VelocityX, VelocityY\n'), ((1795, 1814), 'mission.framework.movement.VelocityY', 'VelocityY', (['(vel * -1)'], {}), '(vel * -1)\n', (1804, 1814), False, 'from mission.framework.movement import Depth, Heading, Pitch, VelocityX, VelocityY\n'), ((1975, 1981), 'mission.framework.primitive.Zero', 'Zero', ([], {}), '()\n', (1979, 1981), False, 'from mission.framework.primitive import Zero, Log, FunctionTask, Fail\n'), ((2083, 2103), 'shm.jank_pos.x.get', 'shm.jank_pos.x.get', ([], {}), '()\n', (2101, 2103), False, 'import shm\n'), ((2127, 2141), 'mission.framework.movement.VelocityX', 'VelocityX', (['vel'], {}), '(vel)\n', (2136, 2141), False, 'from mission.framework.movement import Depth, Heading, Pitch, VelocityX, VelocityY\n'), ((2170, 2189), 'mission.framework.movement.VelocityX', 'VelocityX', (['(vel * -1)'], {}), '(vel * -1)\n', (2179, 2189), False, 'from mission.framework.movement import Depth, Heading, Pitch, VelocityX, VelocityY\n'), ((2350, 2356), 'mission.framework.primitive.Zero', 'Zero', ([], {}), '()\n', (2354, 2356), False, 'from mission.framework.primitive import Zero, Log, FunctionTask, Fail\n'), ((2491, 2508), 'mission.framework.primitive.Log', 'Log', (['"""Restoring?"""'], {}), "('Restoring?')\n", (2494, 2508), False, 'from mission.framework.primitive import Zero, Log, FunctionTask, Fail\n'), ((2535, 2541), 'mission.framework.primitive.Zero', 'Zero', ([], {}), '()\n', (2539, 2541), False, 'from mission.framework.primitive import Zero, Log, FunctionTask, Fail\n'), ((2643, 2660), 'mission.framework.primitive.Log', 'Log', (['"""Restoring?"""'], {}), "('Restoring?')\n", (2646, 2660), False, 'from mission.framework.primitive import Zero, Log, FunctionTask, Fail\n'), ((2687, 2693), 'mission.framework.primitive.Zero', 'Zero', ([], {}), '()\n', (2691, 2693), False, 'from mission.framework.primitive import Zero, Log, FunctionTask, Fail\n'), ((1869, 1889), 'shm.jank_pos.y.get', 'shm.jank_pos.y.get', ([], {}), '()\n', (1887, 1889), False, 'import shm\n'), ((2244, 2264), 'shm.jank_pos.x.get', 'shm.jank_pos.x.get', ([], {}), '()\n', (2262, 2264), False, 'import shm\n'), ((2468, 2482), 'mission.framework.movement.VelocityY', 'VelocityY', (['(0.4)'], {}), '(0.4)\n', (2477, 2482), False, 'from mission.framework.movement import Depth, Heading, Pitch, VelocityX, VelocityY\n'), ((2619, 2634), 'mission.framework.movement.VelocityY', 'VelocityY', (['(-0.4)'], {}), '(-0.4)\n', (2628, 2634), False, 'from mission.framework.movement import Depth, Heading, Pitch, VelocityX, VelocityY\n')]
import json import re from .exceptions import InvalidParameterException from moto.core.responses import BaseResponse from .models import logs_backends # See http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/Welcome.html REGEX_LOG_GROUP_NAME = r"[-._\/#A-Za-z0-9]+" def validate_param( param_name, param_value, constraint, constraint_expression, pattern=None ): try: assert constraint_expression(param_value) except (AssertionError, TypeError): raise InvalidParameterException( constraint=constraint, parameter=param_name, value=param_value ) if pattern and param_value: try: assert re.fullmatch(pattern, param_value) except (AssertionError, TypeError): raise InvalidParameterException( constraint=f"Must match pattern: {pattern}", parameter=param_name, value=param_value, ) class LogsResponse(BaseResponse): @property def logs_backend(self): return logs_backends[self.region] @property def request_params(self): try: return json.loads(self.body) except ValueError: return {} def _get_param(self, param_name, if_none=None): return self.request_params.get(param_name, if_none) def _get_validated_param( self, param, constraint, constraint_expression, pattern=None ): param_value = self._get_param(param) validate_param(param, param_value, constraint, constraint_expression, pattern) return param_value def put_metric_filter(self): filter_name = self._get_validated_param( "filterName", "Minimum length of 1. Maximum length of 512.", lambda x: 1 <= len(x) <= 512, pattern="[^:*]*", ) filter_pattern = self._get_validated_param( "filterPattern", "Minimum length of 0. Maximum length of 1024.", lambda x: 0 <= len(x) <= 1024, ) log_group_name = self._get_validated_param( "logGroupName", "Minimum length of 1. Maximum length of 512.", lambda x: 1 <= len(x) <= 512, pattern=REGEX_LOG_GROUP_NAME, ) metric_transformations = self._get_validated_param( "metricTransformations", "Fixed number of 1 item.", lambda x: len(x) == 1 ) self.logs_backend.put_metric_filter( filter_name, filter_pattern, log_group_name, metric_transformations ) return "" def describe_metric_filters(self): filter_name_prefix = self._get_validated_param( "filterNamePrefix", "Minimum length of 1. Maximum length of 512.", lambda x: x is None or 1 <= len(x) <= 512, pattern="[^:*]*", ) log_group_name = self._get_validated_param( "logGroupName", "Minimum length of 1. Maximum length of 512", lambda x: x is None or 1 <= len(x) <= 512, pattern=REGEX_LOG_GROUP_NAME, ) metric_name = self._get_validated_param( "metricName", "Maximum length of 255.", lambda x: x is None or len(x) <= 255, pattern="[^:*$]*", ) metric_namespace = self._get_validated_param( "metricNamespace", "Maximum length of 255.", lambda x: x is None or len(x) <= 255, pattern="[^:*$]*", ) next_token = self._get_validated_param( "nextToken", "Minimum length of 1.", lambda x: x is None or 1 <= len(x) ) if metric_name and not metric_namespace: raise InvalidParameterException( constraint=f'{"If you include the metricName parameter in your request, "}' f'{"you must also include the metricNamespace parameter."}', parameter="metricNamespace", value=metric_namespace, ) if metric_namespace and not metric_name: raise InvalidParameterException( constraint=f'{"If you include the metricNamespace parameter in your request, "}' f'{"you must also include the metricName parameter."}', parameter="metricName", value=metric_name, ) filters = self.logs_backend.describe_metric_filters( filter_name_prefix, log_group_name, metric_name, metric_namespace ) return json.dumps({"metricFilters": filters, "nextToken": next_token}) def delete_metric_filter(self): filter_name = self._get_validated_param( "filterName", "Minimum length of 1. Maximum length of 512.", lambda x: 1 <= len(x) <= 512, pattern="[^:*]*$", ) log_group_name = self._get_validated_param( "logGroupName", "Minimum length of 1. Maximum length of 512.", lambda x: 1 <= len(x) <= 512, pattern=REGEX_LOG_GROUP_NAME, ) self.logs_backend.delete_metric_filter(filter_name, log_group_name) return "" def create_log_group(self): log_group_name = self._get_param("logGroupName") tags = self._get_param("tags") kms_key_id = self._get_param("kmsKeyId") self.logs_backend.create_log_group(log_group_name, tags, kmsKeyId=kms_key_id) return "" def delete_log_group(self): log_group_name = self._get_param("logGroupName") self.logs_backend.delete_log_group(log_group_name) return "" def describe_log_groups(self): log_group_name_prefix = self._get_param("logGroupNamePrefix") next_token = self._get_param("nextToken") limit = self._get_param("limit", 50) if limit > 50: raise InvalidParameterException( constraint="Member must have value less than or equal to 50", parameter="limit", value=limit, ) groups, next_token = self.logs_backend.describe_log_groups( limit=limit, log_group_name_prefix=log_group_name_prefix, next_token=next_token, ) result = {"logGroups": groups} if next_token: result["nextToken"] = next_token return json.dumps(result) def create_log_stream(self): log_group_name = self._get_param("logGroupName") log_stream_name = self._get_param("logStreamName") self.logs_backend.create_log_stream(log_group_name, log_stream_name) return "" def delete_log_stream(self): log_group_name = self._get_param("logGroupName") log_stream_name = self._get_param("logStreamName") self.logs_backend.delete_log_stream(log_group_name, log_stream_name) return "" def describe_log_streams(self): log_group_name = self._get_param("logGroupName") log_stream_name_prefix = self._get_param("logStreamNamePrefix", "") descending = self._get_param("descending", False) limit = self._get_param("limit", 50) next_token = self._get_param("nextToken") order_by = self._get_param("orderBy", "LogStreamName") streams, next_token = self.logs_backend.describe_log_streams( descending, limit, log_group_name, log_stream_name_prefix, next_token, order_by, ) return json.dumps({"logStreams": streams, "nextToken": next_token}) def put_log_events(self): log_group_name = self._get_param("logGroupName") log_stream_name = self._get_param("logStreamName") log_events = self._get_param("logEvents") next_sequence_token, rejected_info = self.logs_backend.put_log_events( log_group_name, log_stream_name, log_events ) if rejected_info: return json.dumps( { "nextSequenceToken": next_sequence_token, "rejectedLogEventsInfo": rejected_info, } ) else: return json.dumps({"nextSequenceToken": next_sequence_token}) def get_log_events(self): log_group_name = self._get_param("logGroupName") log_stream_name = self._get_param("logStreamName") start_time = self._get_param("startTime") end_time = self._get_param("endTime") limit = self._get_param("limit") next_token = self._get_param("nextToken") start_from_head = self._get_param("startFromHead", False) ( events, next_backward_token, next_forward_token, ) = self.logs_backend.get_log_events( log_group_name, log_stream_name, start_time, end_time, limit, next_token, start_from_head, ) return json.dumps( { "events": events, "nextBackwardToken": next_backward_token, "nextForwardToken": next_forward_token, } ) def filter_log_events(self): log_group_name = self._get_param("logGroupName") log_stream_names = self._get_param("logStreamNames", []) start_time = self._get_param("startTime") # impl, see: http://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html filter_pattern = self._get_param("filterPattern") interleaved = self._get_param("interleaved", False) end_time = self._get_param("endTime") limit = self._get_param("limit") next_token = self._get_param("nextToken") events, next_token, searched_streams = self.logs_backend.filter_log_events( log_group_name, log_stream_names, start_time, end_time, limit, next_token, filter_pattern, interleaved, ) return json.dumps( { "events": events, "nextToken": next_token, "searchedLogStreams": searched_streams, } ) def put_retention_policy(self): log_group_name = self._get_param("logGroupName") retention_in_days = self._get_param("retentionInDays") self.logs_backend.put_retention_policy(log_group_name, retention_in_days) return "" def delete_retention_policy(self): log_group_name = self._get_param("logGroupName") self.logs_backend.delete_retention_policy(log_group_name) return "" def describe_resource_policies(self): next_token = self._get_param("nextToken") limit = self._get_param("limit") policies = self.logs_backend.describe_resource_policies(next_token, limit) return json.dumps({"resourcePolicies": [p.describe() for p in policies]}) def put_resource_policy(self): policy_name = self._get_param("policyName") policy_doc = self._get_param("policyDocument") policy = self.logs_backend.put_resource_policy(policy_name, policy_doc) return json.dumps({"resourcePolicy": policy.describe()}) def delete_resource_policy(self): policy_name = self._get_param("policyName") self.logs_backend.delete_resource_policy(policy_name) return "" def list_tags_log_group(self): log_group_name = self._get_param("logGroupName") tags = self.logs_backend.list_tags_log_group(log_group_name) return json.dumps({"tags": tags}) def tag_log_group(self): log_group_name = self._get_param("logGroupName") tags = self._get_param("tags") self.logs_backend.tag_log_group(log_group_name, tags) return "" def untag_log_group(self): log_group_name = self._get_param("logGroupName") tags = self._get_param("tags") self.logs_backend.untag_log_group(log_group_name, tags) return "" def describe_subscription_filters(self): log_group_name = self._get_param("logGroupName") subscription_filters = self.logs_backend.describe_subscription_filters( log_group_name ) return json.dumps({"subscriptionFilters": subscription_filters}) def put_subscription_filter(self): log_group_name = self._get_param("logGroupName") filter_name = self._get_param("filterName") filter_pattern = self._get_param("filterPattern") destination_arn = self._get_param("destinationArn") role_arn = self._get_param("roleArn") self.logs_backend.put_subscription_filter( log_group_name, filter_name, filter_pattern, destination_arn, role_arn ) return "" def delete_subscription_filter(self): log_group_name = self._get_param("logGroupName") filter_name = self._get_param("filterName") self.logs_backend.delete_subscription_filter(log_group_name, filter_name) return "" def start_query(self): log_group_name = self._get_param("logGroupName") log_group_names = self._get_param("logGroupNames") start_time = self._get_param("startTime") end_time = self._get_param("endTime") query_string = self._get_param("queryString") if log_group_name and log_group_names: raise InvalidParameterException() if log_group_name: log_group_names = [log_group_name] query_id = self.logs_backend.start_query( log_group_names, start_time, end_time, query_string ) return json.dumps({"queryId": "{0}".format(query_id)}) def create_export_task(self): log_group_name = self._get_param("logGroupName") destination = self._get_param("destination") task_id = self.logs_backend.create_export_task( log_group_name=log_group_name, destination=destination ) return json.dumps(dict(taskId=str(task_id)))
[ "re.fullmatch", "json.loads", "json.dumps" ]
[((4553, 4616), 'json.dumps', 'json.dumps', (["{'metricFilters': filters, 'nextToken': next_token}"], {}), "({'metricFilters': filters, 'nextToken': next_token})\n", (4563, 4616), False, 'import json\n'), ((6391, 6409), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (6401, 6409), False, 'import json\n'), ((7535, 7595), 'json.dumps', 'json.dumps', (["{'logStreams': streams, 'nextToken': next_token}"], {}), "({'logStreams': streams, 'nextToken': next_token})\n", (7545, 7595), False, 'import json\n'), ((8998, 9114), 'json.dumps', 'json.dumps', (["{'events': events, 'nextBackwardToken': next_backward_token,\n 'nextForwardToken': next_forward_token}"], {}), "({'events': events, 'nextBackwardToken': next_backward_token,\n 'nextForwardToken': next_forward_token})\n", (9008, 9114), False, 'import json\n'), ((10072, 10171), 'json.dumps', 'json.dumps', (["{'events': events, 'nextToken': next_token, 'searchedLogStreams':\n searched_streams}"], {}), "({'events': events, 'nextToken': next_token, 'searchedLogStreams':\n searched_streams})\n", (10082, 10171), False, 'import json\n'), ((11626, 11652), 'json.dumps', 'json.dumps', (["{'tags': tags}"], {}), "({'tags': tags})\n", (11636, 11652), False, 'import json\n'), ((12306, 12363), 'json.dumps', 'json.dumps', (["{'subscriptionFilters': subscription_filters}"], {}), "({'subscriptionFilters': subscription_filters})\n", (12316, 12363), False, 'import json\n'), ((679, 713), 're.fullmatch', 're.fullmatch', (['pattern', 'param_value'], {}), '(pattern, param_value)\n', (691, 713), False, 'import re\n'), ((1148, 1169), 'json.loads', 'json.loads', (['self.body'], {}), '(self.body)\n', (1158, 1169), False, 'import json\n'), ((7984, 8082), 'json.dumps', 'json.dumps', (["{'nextSequenceToken': next_sequence_token, 'rejectedLogEventsInfo':\n rejected_info}"], {}), "({'nextSequenceToken': next_sequence_token,\n 'rejectedLogEventsInfo': rejected_info})\n", (7994, 8082), False, 'import json\n'), ((8201, 8255), 'json.dumps', 'json.dumps', (["{'nextSequenceToken': next_sequence_token}"], {}), "({'nextSequenceToken': next_sequence_token})\n", (8211, 8255), False, 'import json\n')]
# -*- coding: utf-8 -*- import numpy as np eps = np.finfo(float).eps def infnorm(x): return np.linalg.norm(x, np.inf) def scaled_tol(n): tol = 5e1*eps if n < 20 else np.log(n)**2.5*eps return tol # bespoke test generators def infNormLessThanTol(a, b, tol): def asserter(self): self.assertLessEqual(infnorm(a-b), tol) return asserter # test functions testfunctions = [] fun_details = [ # ( # function, # name for the test printouts, # Matlab chebfun adaptive degree on [-1,1], # Any roots on the real line? # ) (lambda x: x**3 + x**2 + x + 1.1, 'poly3(x)', 4, True), (lambda x: np.exp(x), 'exp(x)', 15, False), (lambda x: np.sin(x), 'sin(x)', 14, True), (lambda x: .2+.1*np.sin(x), '(.2+.1*sin(x))', 14, False), (lambda x: np.cos(20*x), 'cos(20x)', 51, True), (lambda x: 0.*x+1., 'constfun', 1, False), (lambda x: 0.*x, 'zerofun', 1, True), ] for k, items in enumerate(fun_details): fun = items[0] fun.__name__ = items[1] testfunctions.append((fun, items[2], items[3])) # TODO: check these lengths against Chebfun # TODO: more examples
[ "numpy.log", "numpy.finfo", "numpy.sin", "numpy.linalg.norm", "numpy.exp", "numpy.cos" ]
[((51, 66), 'numpy.finfo', 'np.finfo', (['float'], {}), '(float)\n', (59, 66), True, 'import numpy as np\n'), ((99, 124), 'numpy.linalg.norm', 'np.linalg.norm', (['x', 'np.inf'], {}), '(x, np.inf)\n', (113, 124), True, 'import numpy as np\n'), ((654, 663), 'numpy.exp', 'np.exp', (['x'], {}), '(x)\n', (660, 663), True, 'import numpy as np\n'), ((722, 731), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (728, 731), True, 'import numpy as np\n'), ((857, 871), 'numpy.cos', 'np.cos', (['(20 * x)'], {}), '(20 * x)\n', (863, 871), True, 'import numpy as np\n'), ((178, 187), 'numpy.log', 'np.log', (['n'], {}), '(n)\n', (184, 187), True, 'import numpy as np\n'), ((795, 804), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (801, 804), True, 'import numpy as np\n')]
import yaml import sys import os from jinja2 import Template import argparse backend = { 'cpu': { 'pass1': 'ffmpeg -hide_banner -loglevel error -y -i {{ input_file }} -an -c:v libx264 -preset:v {{ preset }} -threads 0 -r {{ fps }} -g {{ gop }} -keyint_min {{ gop }} -sc_threshold 0 -x264opts bframes=1 -pass 1 -b:v {{ bitrate }} -profile:v {{ profile }} -s {{ size }} -f mp4 {{ output_file }}', 'pass2': 'ffmpeg -hide_banner -loglevel error -y -i {{ input_file }} -strict -2 -c:a aac -ac 2 -ab {{ audio_bitrate }} -c:v libx264 -preset:v {{ preset }} -threads 0 -r {{ fps }} -g {{ gop }} -keyint_min {{ gop }} -sc_threshold 0 -x264opts bframes=1 -pass 2 -b:v {{ bitrate }} -profile:v {{ profile }} -s {{ size }} -f mp4 {{ output_file }}' }, 'nvidia': { 'pass1': 'ffmpeg -hide_banner -loglevel error -y -hwaccel cuvid -i {{ input_file }} -an -c:v h264_nvenc -preset:v {{ preset }} -pix_fmt yuv420p -movflags faststart -bf 2 -coder 1 -threads 0 -r {{ fps }} -g {{ gop }} -keyint_min {{ gop }} -sc_threshold 0 -pass 1 -b:v {{ bitrate }} -profile:v {{ profile }} -s {{ size }} -f mp4 {{ output_file }}', 'pass2': 'ffmpeg -hide_banner -loglevel error -y -hwaccel cuvid -i {{ input_file }} -strict -2 -c:a aac -ac 2 -ab {{ audio_bitrate }} -c:v h264_nvenc -preset:v {{ preset }} -pix_fmt yuv420p -movflags faststart -bf 2 -coder 1 -threads 0 -r {{ fps }} -g {{ gop }} -keyint_min {{ gop }} -sc_threshold 0 -pass 2 -b:v {{ bitrate }} -profile:v {{ profile }} -s {{ size }} -f mp4 {{ output_file }}' } } parser = argparse.ArgumentParser() parser.add_argument('-c', '--config') parser.add_argument('-b', '--backend', choices=['cpu', 'nvidia']) parser.add_argument('files', nargs='+') args = parser.parse_args() pass1 = Template(backend[args.backend]['pass1']) pass2 = Template(backend[args.backend]['pass2']) if os.path.exists(args.config) == False: print('Config file %s not found' % args.config) sys.exit(1) stream = open(args.config, 'r') config = yaml.load(stream) qualities = config['qualities'] for file in args.files: if os.path.exists(file) == False: print('File %s not found' % file) continue first_pass_done = False for q in qualities: qualities[q]['input_file'] = file qualities[q]['output_file'] = file.replace('.mp4', '') + '_' + q + '.mp4' if first_pass_done == False: command = pass1.render(qualities[q]) print(command) os.system(command) first_pass_done = True command = pass2.render(qualities[q]) print(command) os.system(command)
[ "jinja2.Template", "yaml.load", "argparse.ArgumentParser", "os.path.exists", "os.system", "sys.exit" ]
[((1554, 1579), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1577, 1579), False, 'import argparse\n'), ((1760, 1800), 'jinja2.Template', 'Template', (["backend[args.backend]['pass1']"], {}), "(backend[args.backend]['pass1'])\n", (1768, 1800), False, 'from jinja2 import Template\n'), ((1809, 1849), 'jinja2.Template', 'Template', (["backend[args.backend]['pass2']"], {}), "(backend[args.backend]['pass2'])\n", (1817, 1849), False, 'from jinja2 import Template\n'), ((2002, 2019), 'yaml.load', 'yaml.load', (['stream'], {}), '(stream)\n', (2011, 2019), False, 'import yaml\n'), ((1854, 1881), 'os.path.exists', 'os.path.exists', (['args.config'], {}), '(args.config)\n', (1868, 1881), False, 'import os\n'), ((1948, 1959), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1956, 1959), False, 'import sys\n'), ((2085, 2105), 'os.path.exists', 'os.path.exists', (['file'], {}), '(file)\n', (2099, 2105), False, 'import os\n'), ((2618, 2636), 'os.system', 'os.system', (['command'], {}), '(command)\n', (2627, 2636), False, 'import os\n'), ((2479, 2497), 'os.system', 'os.system', (['command'], {}), '(command)\n', (2488, 2497), False, 'import os\n')]
from ..models import Label import pytest from mixer.backend.django import mixer pytestmark = pytest.mark.django_db class TestBoard: def test_model(self): board = mixer.blend('boards.Board') assert board.pk == 1, 'Should create a Board instance' def test_str(self): board = mixer.blend('boards.Board') assert board.title == str(board), 'Should check the board name' def test_labels_signal(self): board = mixer.blend('boards.Board') assert Label.objects.filter(board=board).count() == 10 class TestList: def test_str(self): list = mixer.blend('boards.List') assert list.title == str(list), 'Should check the List name' def test_save(self): board = mixer.blend('boards.Board') list1 = mixer.blend('boards.List', board=board) list2 = mixer.blend('boards.List', board=board) assert list1.order == 2 ** 16 - 1 assert list1.order == list2.order - (2 ** 16 - 1) list3 = mixer.blend('boards.List', board=board) list4 = mixer.blend('boards.List', board=board) list1.delete() list5 = mixer.blend('boards.List', board=board) assert list5.order == 5 * (2 ** 16 - 1) class TestItem: def test_str(self): item = mixer.blend('boards.Item') assert item.title == str(item) def test_save(self): list = mixer.blend('boards.List') item1 = mixer.blend('boards.Item', list=list) item2 = mixer.blend('boards.Item', list=list) assert item1.order == (2 ** 16 -1) assert item1.order == item2.order - (2 ** 16 - 1) item3 = mixer.blend('boards.Item', list=list) item4 = mixer.blend('boards.Item', list=list) item1.delete() item2.delete() item5 = mixer.blend('boards.Item', list=list) assert item5.order == 5 * (2 ** 16 -1) class TestLabel: def test_model(self): board = mixer.blend('boards.Board') assert board.pk == 1, 'Should create a Label instance' def test_str(self): label = mixer.blend('boards.Label') assert label.title == str(label), 'Should check the Label name' class TestComment: def test_model(self): comment = mixer.blend('boards.Comment') assert comment.pk == 1, 'Should create a Comment instance' class TestAttachment: def test_model(self): attachment = mixer.blend('boards.Attachment') assert attachment.pk == 1, 'Should create a Attachment instance'
[ "mixer.backend.django.mixer.blend" ]
[((176, 203), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Board"""'], {}), "('boards.Board')\n", (187, 203), False, 'from mixer.backend.django import mixer\n'), ((308, 335), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Board"""'], {}), "('boards.Board')\n", (319, 335), False, 'from mixer.backend.django import mixer\n'), ((459, 486), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Board"""'], {}), "('boards.Board')\n", (470, 486), False, 'from mixer.backend.django import mixer\n'), ((607, 633), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.List"""'], {}), "('boards.List')\n", (618, 633), False, 'from mixer.backend.django import mixer\n'), ((745, 772), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Board"""'], {}), "('boards.Board')\n", (756, 772), False, 'from mixer.backend.django import mixer\n'), ((789, 828), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.List"""'], {'board': 'board'}), "('boards.List', board=board)\n", (800, 828), False, 'from mixer.backend.django import mixer\n'), ((845, 884), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.List"""'], {'board': 'board'}), "('boards.List', board=board)\n", (856, 884), False, 'from mixer.backend.django import mixer\n'), ((1002, 1041), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.List"""'], {'board': 'board'}), "('boards.List', board=board)\n", (1013, 1041), False, 'from mixer.backend.django import mixer\n'), ((1058, 1097), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.List"""'], {'board': 'board'}), "('boards.List', board=board)\n", (1069, 1097), False, 'from mixer.backend.django import mixer\n'), ((1137, 1176), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.List"""'], {'board': 'board'}), "('boards.List', board=board)\n", (1148, 1176), False, 'from mixer.backend.django import mixer\n'), ((1282, 1308), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Item"""'], {}), "('boards.Item')\n", (1293, 1308), False, 'from mixer.backend.django import mixer\n'), ((1389, 1415), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.List"""'], {}), "('boards.List')\n", (1400, 1415), False, 'from mixer.backend.django import mixer\n'), ((1432, 1469), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Item"""'], {'list': 'list'}), "('boards.Item', list=list)\n", (1443, 1469), False, 'from mixer.backend.django import mixer\n'), ((1486, 1523), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Item"""'], {'list': 'list'}), "('boards.Item', list=list)\n", (1497, 1523), False, 'from mixer.backend.django import mixer\n'), ((1641, 1678), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Item"""'], {'list': 'list'}), "('boards.Item', list=list)\n", (1652, 1678), False, 'from mixer.backend.django import mixer\n'), ((1695, 1732), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Item"""'], {'list': 'list'}), "('boards.Item', list=list)\n", (1706, 1732), False, 'from mixer.backend.django import mixer\n'), ((1795, 1832), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Item"""'], {'list': 'list'}), "('boards.Item', list=list)\n", (1806, 1832), False, 'from mixer.backend.django import mixer\n'), ((1941, 1968), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Board"""'], {}), "('boards.Board')\n", (1952, 1968), False, 'from mixer.backend.django import mixer\n'), ((2073, 2100), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Label"""'], {}), "('boards.Label')\n", (2084, 2100), False, 'from mixer.backend.django import mixer\n'), ((2238, 2267), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Comment"""'], {}), "('boards.Comment')\n", (2249, 2267), False, 'from mixer.backend.django import mixer\n'), ((2406, 2438), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""boards.Attachment"""'], {}), "('boards.Attachment')\n", (2417, 2438), False, 'from mixer.backend.django import mixer\n')]
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- ''' Provide a base class for all Bokeh widget models. In addition to different kinds of plots, various kinds of UI controls (e.g. sliders, buttons, inputs, etc.) can be included in Bokeh documents. These widgets can be used in conjunction with ``CustomJS`` callbacks that execute in the browser, or with python callbacks that execute on a Bokeh server. ''' #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- import logging log = logging.getLogger(__name__) #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Standard library imports # External imports # Bokeh imports from ...core.has_props import abstract from ...core.properties import Int, Enum, Override from ..layouts import LayoutDOM #----------------------------------------------------------------------------- # Globals and constants #----------------------------------------------------------------------------- __all__ = ( 'Widget', ) #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- @abstract class Widget(LayoutDOM): ''' A base class for all interactive widget types. ''' orientation = Enum("horizontal", "vertical", help=""" Orient the widget either horizontally (default) or vertically. Note that not all widgets support vertical orientation. """) default_size = Int(default=300, help=""" The default size (width or height) in the dominating dimension. The dominating dimension is determined by widget orientation. """) margin = Override(default=(5, 5, 5, 5)) #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
[ "logging.getLogger" ]
[((885, 912), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (902, 912), False, 'import logging\n')]
#!/usr/bin/env python3 from functools import reduce from random import randint, choices def main(): k, total_genomes, n = list(map(int, input().split())) genomes = [] for _ in range(total_genomes): genomes.append(input().upper()) best_motifs = summarized_gibbs_motif_search(k, genomes, n, 20) for motif in best_motifs: print(motif) ACTG_MAP = { 'A': 0, 'C': 1, 'T': 2, 'G': 3, } ACTG_REVERSE_MAP = { 0: 'A', 1: 'C', 2: 'T', 3: 'G' } def _recount_profile(profile: list, genome: str) -> None: """ Include 'genome' into given 'profile' counts matrix """ for i in range(len(genome)): profile[i][ACTG_MAP[genome[i]]] += 1 def _profile(genomes: list) -> list: """ Generate a profile (counts) of the given 'genomes' """ result = [[1 for _ in range(4)] for _ in range(len(genomes[0]))] for genome in genomes: _recount_profile(result, genome) return result def _profile_to_probs(profile: list, total_genomes: int) -> list: """ Convert a profile (with counts) to a profile of probabilities """ result = [] for bases in profile: result.append([float(bases[i]) / (float(total_genomes) + 4.) for i in range(4)]) return result def _mer_probability(profile_probs: list, genome: str) -> float: """ Return probability of the given 'genome' in the given 'profile_probs' """ return reduce( lambda x, y: x * y, [profile_probs[i][ACTG_MAP[genome[i]]] for i in range(len(genome))] ) def _profile_consensus_mer(profile: list) -> str: """ Get a consensus mer from the given profile """ result = [] for bases in profile: min_base = max(range(len(bases)), key=bases.__getitem__) result.append(ACTG_REVERSE_MAP[min_base]) return ''.join(result) def _score_mers(mers: list, consensus: str) -> int: """ Calculate the score of a mer list """ result = 0 for mer in mers: for i in range(len(mer)): if mer[i] != consensus[i]: result += 1 return result def _random_probable_motif(profile_probs: list, k: int, genome: str): """ Return a random most-probable by the given profile motif from the given genome """ mers = [] probabilities = [] for k_start in range(len(genome) - k + 1): mers.append(genome[k_start:k_start + k]) probabilities.append(reduce( lambda x, y: x * y, [profile_probs[i][ACTG_MAP[mers[-1][i]]] for i in range(len(mers[-1]))] )) return choices(mers, probabilities)[0] def gibbs_motif_search(k: int, genomes: list, n: int) -> list: """ Find profile-best motifs of length 'k' in each of the 'genomes' using a randomized algorithm """ best_motifs = [] for genome in genomes: r = randint(0, len(genomes[0]) - k) best_motifs.append(genome[r:r + k]) best_motifs_score = _score_mers(best_motifs, _profile_consensus_mer(_profile_to_probs(_profile(best_motifs), len(best_motifs)))) for j in range(n): i_rotate = randint(0, len(genomes) - 1) rotated_best_motifs = best_motifs[0:i_rotate] rotated_best_motifs.extend(best_motifs[i_rotate + 1:len(best_motifs)]) profile_probs = _profile_to_probs(_profile(rotated_best_motifs), len(rotated_best_motifs)) probable_motif = _random_probable_motif(profile_probs, k, genomes[i_rotate]) rotated_best_motifs.insert(i_rotate, probable_motif) rotated_score = _score_mers(rotated_best_motifs, _profile_consensus_mer(_profile_to_probs(_profile(rotated_best_motifs), len(rotated_best_motifs)))) if rotated_score < best_motifs_score: best_motifs = rotated_best_motifs best_motifs_score = rotated_score return best_motifs def summarized_gibbs_motif_search(k: int, genomes: list, n: int, samples: int) -> list: """ Perform 'gibbs_motif_search()' 'samples' times """ randomized_search_results = [] for _ in range(samples): randomized_search_results.append(gibbs_motif_search(k, genomes, n)) result = [] for i in range(len(genomes)): randomized_search_counts = {} randomized_search_max_count = 0 randomized_search_max_motif = None for search_result in randomized_search_results: count = randomized_search_counts.get(search_result[i], 0) + 1 if count > randomized_search_max_count: randomized_search_max_count = count randomized_search_max_motif = search_result[i] randomized_search_counts[search_result[i]] = count result.append(randomized_search_max_motif) return result if __name__ == '__main__': main()
[ "random.choices" ]
[((2612, 2640), 'random.choices', 'choices', (['mers', 'probabilities'], {}), '(mers, probabilities)\n', (2619, 2640), False, 'from random import randint, choices\n')]
import json from django.db import models from django.utils import timezone from django.utils.translation import ugettext_noop as _ from django.template.defaultfilters import slugify from django.core.exceptions import ValidationError from django.core import serializers from positions.fields import PositionField from .settings import USER_MODEL class JoyRideManager(models.Manager): def get_joyrides(self, url_path=None, for_user=None, exclude_viewed=True): try: qs = super(JoyRideManager, self).get_query_set() except AttributeError: qs = super(JoyRideManager, self).get_queryset() if for_user and for_user.is_authenticated(): viewed_qs = JoyRideHistory.objects.filter(user__id=for_user.id) if exclude_viewed: viewed_qs = viewed_qs.filter(viewed=True) viewed_qs_ids = viewed_qs.values_list('joyride__id', flat=True) qs = qs.exclude(id__in=viewed_qs_ids) if url_path is not None: qs = qs.filter(url_path__regex=r'^%s$' % url_path) return qs def get_joyride(self, slug, url_path=None, for_user=None, viewed=False): try: qs = super(JoyRideManager, self).get_query_set() except AttributeError: qs = super(JoyRideManager, self).get_queryset() kw = {'slug__exact': slug} if url_path is not None: kw.update({'url_path__regex': r'^%s$' % url_path}) obj = qs.get(**kw) if for_user and for_user.is_authenticated(): objv = obj.views.filter(user__id=for_user.id) if objv: objv = objv[0] if objv.viewed != viewed: obj = None return obj class JoyRide(models.Model): class Meta: verbose_name = _('Joy Ride') verbose_name_plural = _('Joy Rides') ordering = ['-created'] TIP_LOCATION_TOP = 'top' TIP_LOCATION_BOTTOM = 'bottom' TIP_LOCATION_RIGHT = 'right' TIP_LOCATION_LEFT = 'left' TIP_LOCATION_CHOICES = ( (TIP_LOCATION_TOP, _('top')), (TIP_LOCATION_BOTTOM, _('bottom')), (TIP_LOCATION_RIGHT, _('right')), (TIP_LOCATION_LEFT, _('left')), ) TIP_ANIMATION_POP = 'pop' TIP_ANIMATION_FADE = 'fade' TIP_ANIMATION_CHOICES = ( (TIP_ANIMATION_POP, _('pop')), (TIP_ANIMATION_FADE, _('fade')), ) name = models.CharField( _('<NAME>'), max_length=50, unique=True, help_text=_('This will be slugify automatically and will be used as ID for a joy ride'), ) url_path = models.CharField( _('Page URL'), max_length=255, null=True, blank=True, help_text=_('The url e.g. /about/ or url regex /abc/\d+/ of the page on which this joyride will be activated. \ If left blank joyride will be activated on global scope') ) slug = models.SlugField(editable=False) tipLocation = models.CharField( choices=TIP_LOCATION_CHOICES, default=TIP_LOCATION_BOTTOM, max_length=10, help_text=_('"top" or "bottom" in relation to parent'), ) nubPosition = models.CharField( max_length=10, default='auto', help_text=_('Override on a per tooltip bases'), ) scroll = models.BooleanField( default=True, help_text=_('Whether to scroll to tips'), ) scrollSpeed = models.PositiveIntegerField( default=300, help_text=_('Page scrolling speed in milliseconds'), ) timer = models.PositiveIntegerField( default=0, help_text=_('0 = no timer , all other numbers = timer in milliseconds'), ) autoStart = models.BooleanField( default=False, help_text=_('true or false - false tour starts when restart called'), ) startTimerOnClick = models.BooleanField( default=True, help_text=_('true or false - true requires clicking the first button start the timer'), ) startOffset = models.PositiveIntegerField( default=0, help_text=_('the index of the tooltip you want to start on (index of the li)'), ) nextButton = models.BooleanField( default=True, help_text=_('true or false to control whether a next button is used'), ) tipAnimation = models.CharField( choices=TIP_ANIMATION_CHOICES, default=TIP_ANIMATION_FADE, max_length=10, help_text=_('"pop" or "fade" in each tip'), ) tipAnimationFadeSpeed = models.PositiveIntegerField( default=300, help_text=_('when tipAnimation = "fade" this is speed in milliseconds for the transition'), ) cookieMonster = models.BooleanField( default=True, help_text=_('true or false to control whether cookies are used'), ) cookieName = models.CharField( max_length=50, default='joyride', help_text=_('Name the cookie you\'ll use'), ) cookieDomain = models.CharField( max_length=200, null=True, blank=True, help_text=_('Will this cookie be attached to a domain, ie. ".notableapp.com"'), ) cookiePath = models.CharField( max_length=255, null=True, blank=True, help_text=_('Set to "/" if you want the cookie for the whole website'), ) localStorage = models.BooleanField( default=False, help_text=_('true or false to control whether localstorage is used'), ) localStorageKey = models.CharField( default='joyride', max_length=50, help_text=_('Keyname in localstorage'), ) tipContainer = models.CharField( max_length=100, default='body', help_text=_('Where will the tip be attached'), ) modal = models.BooleanField( default=False, help_text=_('Whether to cover page with modal during the tour'), ) expose = models.BooleanField( default=False, help_text=_('Whether to expose the elements at each step in the tour (requires modal:true)'), ) postExposeCallback = models.CharField( max_length=100, null=True, blank=True, help_text=_('A method to call after an element has been exposed'), ) preRideCallback = models.CharField( max_length=100, null=True, blank=True, help_text=_('A method to call before the tour starts (passed index, tip, and cloned exposed element)'), ) postRideCallback = models.CharField( max_length=100, null=True, blank=True, help_text=_('A method to call once the tour closes (canceled or complete)'), ) preStepCallback = models.CharField( max_length=100, null=True, blank=True, help_text=_('A method to call before each step'), ) postStepCallback = models.CharField( max_length=100, null=True, blank=True, help_text=_('A method to call after each step'), ) showJoyRideElement = models.CharField( max_length=100, null=True, blank=True, help_text=_('A DOM element id or class, a method must be provided in showJoyRideElementOn, \ if this is left blank then JoyRide will be shown on page load'), ) showJoyRideElementOn = models.CharField( max_length=100, null=True, blank=True, help_text=_('When to show JoyRide i.e "fous", "click". This must be set if showJoyRideElement is given'), ) destroy = models.CharField( max_length=255, null=True, blank=True, help_text=_('IDs of joyrides which should be destroyed before invoking this joyride e.g. #abc, #cde'), ) created = models.DateTimeField( _('Creation Date'), default=timezone.now, help_text=_('Date and Time of when created'), ) objects = JoyRideManager() @property def properties(self): j = serializers.serialize('json', [self]) j = json.loads(j)[0]['fields'] j.pop('name') j.pop('slug') j.pop('url_path') j.pop('created') cookie_domain = j.pop('cookieDomain') if not cookie_domain: cookie_domain = False cookie_path = j.pop('cookiePath') if not cookie_path: cookie_path = False j.update({'cookieDomain': cookie_domain, 'cookiePath': cookie_path}) d = {} for key, val in j.items(): if val != '': d[key] = val return json.dumps(d) def clean(self): if self.showJoyRideElement and not self.showJoyRideElementOn: raise ValidationError(_('showJoyRideElementOn field is required if showJoyRideElement is given')) super(JoyRide, self).clean() def save(self, *args, **kwargs): if not self.id: self.slug = slugify(self.name) super(JoyRide, self).save(*args, **kwargs) def __unicode__(self): return self.name class JoyRideSteps(models.Model): class Meta: verbose_name = _('Joy Ride Step') verbose_name_plural = _('Joy Ride Steps') ordering = ['position', ] joyride = models.ForeignKey(JoyRide, related_name='steps') header = models.CharField( _('Step Header'), max_length=255, null=True, blank=True, help_text=_('The step header conent'), ) content = models.TextField( _('Step Content'), max_length=255, help_text=_('The content for step, can be valid html'), ) button = models.CharField( max_length=50, default='Next', ) attachId = models.CharField( 'data-id', max_length=100, null=True, blank=True, help_text=_('Attach this step to particular dom element by id') ) attachClass = models.CharField( 'data-class', max_length=100, null=True, blank=True, help_text=_('Attach this step to particular dom element by class') ) options = models.CharField( _('Options'), max_length=255, null=True, blank=True, help_text=_('Custom attributes related to step which will be used in data-options, \ i.e. tipLocation:top;tipAnimation:fade'), ) cssClass = models.CharField( max_length=50, null=True, blank=True, help_text=_('A custom css class name for tip'), ) position = PositionField(collection='joyride', default=0) def clean(self): if (self.attachId and self.attachClass) or (not self.attachId and not self.attachClass): raise ValidationError(_('Either provide data-id or data-class')) super(JoyRideSteps, self).clean() def __unicode__(self): return self.header or self.content[:20] class JoyRideHistory(models.Model): class Meta: verbose_name = _('Joy Ride History') ordering = ['created', ] unique_together = ('joyride', 'user') joyride = models.ForeignKey(JoyRide, related_name='views') user = models.ForeignKey(USER_MODEL, related_name='joyrides') viewed = models.BooleanField(default=True) created = models.DateTimeField(default=timezone.now)
[ "json.loads", "django.db.models.ForeignKey", "django.db.models.CharField", "django.core.serializers.serialize", "django.db.models.BooleanField", "django.db.models.SlugField", "json.dumps", "positions.fields.PositionField", "django.template.defaultfilters.slugify", "django.db.models.DateTimeField", "django.utils.translation.ugettext_noop" ]
[((3036, 3068), 'django.db.models.SlugField', 'models.SlugField', ([], {'editable': '(False)'}), '(editable=False)\n', (3052, 3068), False, 'from django.db import models\n'), ((9575, 9623), 'django.db.models.ForeignKey', 'models.ForeignKey', (['JoyRide'], {'related_name': '"""steps"""'}), "(JoyRide, related_name='steps')\n", (9592, 9623), False, 'from django.db import models\n'), ((9994, 10041), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'default': '"""Next"""'}), "(max_length=50, default='Next')\n", (10010, 10041), False, 'from django.db import models\n'), ((10954, 11000), 'positions.fields.PositionField', 'PositionField', ([], {'collection': '"""joyride"""', 'default': '(0)'}), "(collection='joyride', default=0)\n", (10967, 11000), False, 'from positions.fields import PositionField\n'), ((11537, 11585), 'django.db.models.ForeignKey', 'models.ForeignKey', (['JoyRide'], {'related_name': '"""views"""'}), "(JoyRide, related_name='views')\n", (11554, 11585), False, 'from django.db import models\n'), ((11598, 11652), 'django.db.models.ForeignKey', 'models.ForeignKey', (['USER_MODEL'], {'related_name': '"""joyrides"""'}), "(USER_MODEL, related_name='joyrides')\n", (11615, 11652), False, 'from django.db import models\n'), ((11667, 11700), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (11686, 11700), False, 'from django.db import models\n'), ((11716, 11758), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (11736, 11758), False, 'from django.db import models\n'), ((1870, 1883), 'django.utils.translation.ugettext_noop', '_', (['"""Joy Ride"""'], {}), "('Joy Ride')\n", (1871, 1883), True, 'from django.utils.translation import ugettext_noop as _\n'), ((1915, 1929), 'django.utils.translation.ugettext_noop', '_', (['"""Joy Rides"""'], {}), "('Joy Rides')\n", (1916, 1929), True, 'from django.utils.translation import ugettext_noop as _\n'), ((2541, 2552), 'django.utils.translation.ugettext_noop', '_', (['"""<NAME>"""'], {}), "('<NAME>')\n", (2542, 2552), True, 'from django.utils.translation import ugettext_noop as _\n'), ((2748, 2761), 'django.utils.translation.ugettext_noop', '_', (['"""Page URL"""'], {}), "('Page URL')\n", (2749, 2761), True, 'from django.utils.translation import ugettext_noop as _\n'), ((8058, 8076), 'django.utils.translation.ugettext_noop', '_', (['"""Creation Date"""'], {}), "('Creation Date')\n", (8059, 8076), True, 'from django.utils.translation import ugettext_noop as _\n'), ((8270, 8307), 'django.core.serializers.serialize', 'serializers.serialize', (['"""json"""', '[self]'], {}), "('json', [self])\n", (8291, 8307), False, 'from django.core import serializers\n'), ((8878, 8891), 'json.dumps', 'json.dumps', (['d'], {}), '(d)\n', (8888, 8891), False, 'import json\n'), ((9449, 9467), 'django.utils.translation.ugettext_noop', '_', (['"""Joy Ride Step"""'], {}), "('Joy Ride Step')\n", (9450, 9467), True, 'from django.utils.translation import ugettext_noop as _\n'), ((9499, 9518), 'django.utils.translation.ugettext_noop', '_', (['"""Joy Ride Steps"""'], {}), "('Joy Ride Steps')\n", (9500, 9518), True, 'from django.utils.translation import ugettext_noop as _\n'), ((9671, 9687), 'django.utils.translation.ugettext_noop', '_', (['"""Step Header"""'], {}), "('Step Header')\n", (9672, 9687), True, 'from django.utils.translation import ugettext_noop as _\n'), ((9858, 9875), 'django.utils.translation.ugettext_noop', '_', (['"""Step Content"""'], {}), "('Step Content')\n", (9859, 9875), True, 'from django.utils.translation import ugettext_noop as _\n'), ((10537, 10549), 'django.utils.translation.ugettext_noop', '_', (['"""Options"""'], {}), "('Options')\n", (10538, 10549), True, 'from django.utils.translation import ugettext_noop as _\n'), ((11413, 11434), 'django.utils.translation.ugettext_noop', '_', (['"""Joy Ride History"""'], {}), "('Joy Ride History')\n", (11414, 11434), True, 'from django.utils.translation import ugettext_noop as _\n'), ((2159, 2167), 'django.utils.translation.ugettext_noop', '_', (['"""top"""'], {}), "('top')\n", (2160, 2167), True, 'from django.utils.translation import ugettext_noop as _\n'), ((2201, 2212), 'django.utils.translation.ugettext_noop', '_', (['"""bottom"""'], {}), "('bottom')\n", (2202, 2212), True, 'from django.utils.translation import ugettext_noop as _\n'), ((2245, 2255), 'django.utils.translation.ugettext_noop', '_', (['"""right"""'], {}), "('right')\n", (2246, 2255), True, 'from django.utils.translation import ugettext_noop as _\n'), ((2287, 2296), 'django.utils.translation.ugettext_noop', '_', (['"""left"""'], {}), "('left')\n", (2288, 2296), True, 'from django.utils.translation import ugettext_noop as _\n'), ((2436, 2444), 'django.utils.translation.ugettext_noop', '_', (['"""pop"""'], {}), "('pop')\n", (2437, 2444), True, 'from django.utils.translation import ugettext_noop as _\n'), ((2477, 2486), 'django.utils.translation.ugettext_noop', '_', (['"""fade"""'], {}), "('fade')\n", (2478, 2486), True, 'from django.utils.translation import ugettext_noop as _\n'), ((2619, 2696), 'django.utils.translation.ugettext_noop', '_', (['"""This will be slugify automatically and will be used as ID for a joy ride"""'], {}), "('This will be slugify automatically and will be used as ID for a joy ride')\n", (2620, 2696), True, 'from django.utils.translation import ugettext_noop as _\n'), ((2848, 3019), 'django.utils.translation.ugettext_noop', '_', (['"""The url e.g. /about/ or url regex /abc/\\\\d+/ of the page on which this joyride will be activated. If left blank joyride will be activated on global scope"""'], {}), "('The url e.g. /about/ or url regex /abc/\\\\d+/ of the page on which this joyride will be activated. If left blank joyride will be activated on global scope'\n )\n", (2849, 3019), True, 'from django.utils.translation import ugettext_noop as _\n'), ((3226, 3270), 'django.utils.translation.ugettext_noop', '_', (['""""top" or "bottom" in relation to parent"""'], {}), '(\'"top" or "bottom" in relation to parent\')\n', (3227, 3270), True, 'from django.utils.translation import ugettext_noop as _\n'), ((3384, 3420), 'django.utils.translation.ugettext_noop', '_', (['"""Override on a per tooltip bases"""'], {}), "('Override on a per tooltip bases')\n", (3385, 3420), True, 'from django.utils.translation import ugettext_noop as _\n'), ((3506, 3536), 'django.utils.translation.ugettext_noop', '_', (['"""Whether to scroll to tips"""'], {}), "('Whether to scroll to tips')\n", (3507, 3536), True, 'from django.utils.translation import ugettext_noop as _\n'), ((3634, 3675), 'django.utils.translation.ugettext_noop', '_', (['"""Page scrolling speed in milliseconds"""'], {}), "('Page scrolling speed in milliseconds')\n", (3635, 3675), True, 'from django.utils.translation import ugettext_noop as _\n'), ((3765, 3826), 'django.utils.translation.ugettext_noop', '_', (['"""0 = no timer , all other numbers = timer in milliseconds"""'], {}), "('0 = no timer , all other numbers = timer in milliseconds')\n", (3766, 3826), True, 'from django.utils.translation import ugettext_noop as _\n'), ((3916, 3974), 'django.utils.translation.ugettext_noop', '_', (['"""true or false - false tour starts when restart called"""'], {}), "('true or false - false tour starts when restart called')\n", (3917, 3974), True, 'from django.utils.translation import ugettext_noop as _\n'), ((4071, 4147), 'django.utils.translation.ugettext_noop', '_', (['"""true or false - true requires clicking the first button start the timer"""'], {}), "('true or false - true requires clicking the first button start the timer')\n", (4072, 4147), True, 'from django.utils.translation import ugettext_noop as _\n'), ((4243, 4311), 'django.utils.translation.ugettext_noop', '_', (['"""the index of the tooltip you want to start on (index of the li)"""'], {}), "('the index of the tooltip you want to start on (index of the li)')\n", (4244, 4311), True, 'from django.utils.translation import ugettext_noop as _\n'), ((4401, 4460), 'django.utils.translation.ugettext_noop', '_', (['"""true or false to control whether a next button is used"""'], {}), "('true or false to control whether a next button is used')\n", (4402, 4460), True, 'from django.utils.translation import ugettext_noop as _\n'), ((4627, 4659), 'django.utils.translation.ugettext_noop', '_', (['""""pop" or "fade" in each tip"""'], {}), '(\'"pop" or "fade" in each tip\')\n', (4628, 4659), True, 'from django.utils.translation import ugettext_noop as _\n'), ((4767, 4852), 'django.utils.translation.ugettext_noop', '_', (['"""when tipAnimation = "fade" this is speed in milliseconds for the transition"""'], {}), '(\'when tipAnimation = "fade" this is speed in milliseconds for the transition\'\n )\n', (4768, 4852), True, 'from django.utils.translation import ugettext_noop as _\n'), ((4940, 4994), 'django.utils.translation.ugettext_noop', '_', (['"""true or false to control whether cookies are used"""'], {}), "('true or false to control whether cookies are used')\n", (4941, 4994), True, 'from django.utils.translation import ugettext_noop as _\n'), ((5110, 5141), 'django.utils.translation.ugettext_noop', '_', (['"""Name the cookie you\'ll use"""'], {}), '("Name the cookie you\'ll use")\n', (5111, 5141), True, 'from django.utils.translation import ugettext_noop as _\n'), ((5274, 5342), 'django.utils.translation.ugettext_noop', '_', (['"""Will this cookie be attached to a domain, ie. ".notableapp.com\\""""'], {}), '(\'Will this cookie be attached to a domain, ie. ".notableapp.com"\')\n', (5275, 5342), True, 'from django.utils.translation import ugettext_noop as _\n'), ((5472, 5532), 'django.utils.translation.ugettext_noop', '_', (['"""Set to "/" if you want the cookie for the whole website"""'], {}), '(\'Set to "/" if you want the cookie for the whole website\')\n', (5473, 5532), True, 'from django.utils.translation import ugettext_noop as _\n'), ((5625, 5683), 'django.utils.translation.ugettext_noop', '_', (['"""true or false to control whether localstorage is used"""'], {}), "('true or false to control whether localstorage is used')\n", (5626, 5683), True, 'from django.utils.translation import ugettext_noop as _\n'), ((5804, 5832), 'django.utils.translation.ugettext_noop', '_', (['"""Keyname in localstorage"""'], {}), "('Keyname in localstorage')\n", (5805, 5832), True, 'from django.utils.translation import ugettext_noop as _\n'), ((5948, 5983), 'django.utils.translation.ugettext_noop', '_', (['"""Where will the tip be attached"""'], {}), "('Where will the tip be attached')\n", (5949, 5983), True, 'from django.utils.translation import ugettext_noop as _\n'), ((6069, 6122), 'django.utils.translation.ugettext_noop', '_', (['"""Whether to cover page with modal during the tour"""'], {}), "('Whether to cover page with modal during the tour')\n", (6070, 6122), True, 'from django.utils.translation import ugettext_noop as _\n'), ((6209, 6296), 'django.utils.translation.ugettext_noop', '_', (['"""Whether to expose the elements at each step in the tour (requires modal:true)"""'], {}), "('Whether to expose the elements at each step in the tour (requires modal:true)'\n )\n", (6210, 6296), True, 'from django.utils.translation import ugettext_noop as _\n'), ((6429, 6484), 'django.utils.translation.ugettext_noop', '_', (['"""A method to call after an element has been exposed"""'], {}), "('A method to call after an element has been exposed')\n", (6430, 6484), True, 'from django.utils.translation import ugettext_noop as _\n'), ((6619, 6716), 'django.utils.translation.ugettext_noop', '_', (['"""A method to call before the tour starts (passed index, tip, and cloned exposed element)"""'], {}), "('A method to call before the tour starts (passed index, tip, and cloned exposed element)'\n )\n", (6620, 6716), True, 'from django.utils.translation import ugettext_noop as _\n'), ((6847, 6912), 'django.utils.translation.ugettext_noop', '_', (['"""A method to call once the tour closes (canceled or complete)"""'], {}), "('A method to call once the tour closes (canceled or complete)')\n", (6848, 6912), True, 'from django.utils.translation import ugettext_noop as _\n'), ((7047, 7085), 'django.utils.translation.ugettext_noop', '_', (['"""A method to call before each step"""'], {}), "('A method to call before each step')\n", (7048, 7085), True, 'from django.utils.translation import ugettext_noop as _\n'), ((7221, 7258), 'django.utils.translation.ugettext_noop', '_', (['"""A method to call after each step"""'], {}), "('A method to call after each step')\n", (7222, 7258), True, 'from django.utils.translation import ugettext_noop as _\n'), ((7396, 7553), 'django.utils.translation.ugettext_noop', '_', (['"""A DOM element id or class, a method must be provided in showJoyRideElementOn, if this is left blank then JoyRide will be shown on page load"""'], {}), "('A DOM element id or class, a method must be provided in showJoyRideElementOn, if this is left blank then JoyRide will be shown on page load'\n )\n", (7397, 7553), True, 'from django.utils.translation import ugettext_noop as _\n'), ((7691, 7790), 'django.utils.translation.ugettext_noop', '_', (['"""When to show JoyRide i.e "fous", "click". This must be set if showJoyRideElement is given"""'], {}), '(\'When to show JoyRide i.e "fous", "click". This must be set if showJoyRideElement is given\'\n )\n', (7692, 7790), True, 'from django.utils.translation import ugettext_noop as _\n'), ((7912, 8008), 'django.utils.translation.ugettext_noop', '_', (['"""IDs of joyrides which should be destroyed before invoking this joyride e.g. #abc, #cde"""'], {}), "('IDs of joyrides which should be destroyed before invoking this joyride e.g. #abc, #cde'\n )\n", (7913, 8008), True, 'from django.utils.translation import ugettext_noop as _\n'), ((8128, 8162), 'django.utils.translation.ugettext_noop', '_', (['"""Date and Time of when created"""'], {}), "('Date and Time of when created')\n", (8129, 8162), True, 'from django.utils.translation import ugettext_noop as _\n'), ((9238, 9256), 'django.template.defaultfilters.slugify', 'slugify', (['self.name'], {}), '(self.name)\n', (9245, 9256), False, 'from django.template.defaultfilters import slugify\n'), ((9774, 9801), 'django.utils.translation.ugettext_noop', '_', (['"""The step header conent"""'], {}), "('The step header conent')\n", (9775, 9801), True, 'from django.utils.translation import ugettext_noop as _\n'), ((9921, 9965), 'django.utils.translation.ugettext_noop', '_', (['"""The content for step, can be valid html"""'], {}), "('The content for step, can be valid html')\n", (9922, 9965), True, 'from django.utils.translation import ugettext_noop as _\n'), ((10213, 10266), 'django.utils.translation.ugettext_noop', '_', (['"""Attach this step to particular dom element by id"""'], {}), "('Attach this step to particular dom element by id')\n", (10214, 10266), True, 'from django.utils.translation import ugettext_noop as _\n'), ((10425, 10481), 'django.utils.translation.ugettext_noop', '_', (['"""Attach this step to particular dom element by class"""'], {}), "('Attach this step to particular dom element by class')\n", (10426, 10481), True, 'from django.utils.translation import ugettext_noop as _\n'), ((10636, 10762), 'django.utils.translation.ugettext_noop', '_', (['"""Custom attributes related to step which will be used in data-options, i.e. tipLocation:top;tipAnimation:fade"""'], {}), "('Custom attributes related to step which will be used in data-options, i.e. tipLocation:top;tipAnimation:fade'\n )\n", (10637, 10762), True, 'from django.utils.translation import ugettext_noop as _\n'), ((10887, 10923), 'django.utils.translation.ugettext_noop', '_', (['"""A custom css class name for tip"""'], {}), "('A custom css class name for tip')\n", (10888, 10923), True, 'from django.utils.translation import ugettext_noop as _\n'), ((8321, 8334), 'json.loads', 'json.loads', (['j'], {}), '(j)\n', (8331, 8334), False, 'import json\n'), ((9026, 9100), 'django.utils.translation.ugettext_noop', '_', (['"""showJoyRideElementOn field is required if showJoyRideElement is given"""'], {}), "('showJoyRideElementOn field is required if showJoyRideElement is given')\n", (9027, 9100), True, 'from django.utils.translation import ugettext_noop as _\n'), ((11162, 11203), 'django.utils.translation.ugettext_noop', '_', (['"""Either provide data-id or data-class"""'], {}), "('Either provide data-id or data-class')\n", (11163, 11203), True, 'from django.utils.translation import ugettext_noop as _\n')]
import pytest import numpy as np import pandas as pd from pandas import Categorical, Series, CategoricalIndex from pandas.core.dtypes.concat import union_categoricals from pandas.util import testing as tm class TestUnionCategoricals(object): def test_union_categorical(self): # GH 13361 data = [ (list('abc'), list('abd'), list('abcabd')), ([0, 1, 2], [2, 3, 4], [0, 1, 2, 2, 3, 4]), ([0, 1.2, 2], [2, 3.4, 4], [0, 1.2, 2, 2, 3.4, 4]), (['b', 'b', np.nan, 'a'], ['a', np.nan, 'c'], ['b', 'b', np.nan, 'a', 'a', np.nan, 'c']), (pd.date_range('2014-01-01', '2014-01-05'), pd.date_range('2014-01-06', '2014-01-07'), pd.date_range('2014-01-01', '2014-01-07')), (pd.date_range('2014-01-01', '2014-01-05', tz='US/Central'), pd.date_range('2014-01-06', '2014-01-07', tz='US/Central'), pd.date_range('2014-01-01', '2014-01-07', tz='US/Central')), (pd.period_range('2014-01-01', '2014-01-05'), pd.period_range('2014-01-06', '2014-01-07'), pd.period_range('2014-01-01', '2014-01-07')), ] for a, b, combined in data: for box in [Categorical, CategoricalIndex, Series]: result = union_categoricals([box(Categorical(a)), box(Categorical(b))]) expected = Categorical(combined) tm.assert_categorical_equal(result, expected, check_category_order=True) # new categories ordered by appearance s = Categorical(['x', 'y', 'z']) s2 = Categorical(['a', 'b', 'c']) result = union_categoricals([s, s2]) expected = Categorical(['x', 'y', 'z', 'a', 'b', 'c'], categories=['x', 'y', 'z', 'a', 'b', 'c']) tm.assert_categorical_equal(result, expected) s = Categorical([0, 1.2, 2], ordered=True) s2 = Categorical([0, 1.2, 2], ordered=True) result = union_categoricals([s, s2]) expected = Categorical([0, 1.2, 2, 0, 1.2, 2], ordered=True) tm.assert_categorical_equal(result, expected) # must exactly match types s = Categorical([0, 1.2, 2]) s2 = Categorical([2, 3, 4]) msg = 'dtype of categories must be the same' with tm.assert_raises_regex(TypeError, msg): union_categoricals([s, s2]) msg = 'No Categoricals to union' with tm.assert_raises_regex(ValueError, msg): union_categoricals([]) def test_union_categoricals_nan(self): # GH 13759 res = union_categoricals([pd.Categorical([1, 2, np.nan]), pd.Categorical([3, 2, np.nan])]) exp = Categorical([1, 2, np.nan, 3, 2, np.nan]) tm.assert_categorical_equal(res, exp) res = union_categoricals([pd.Categorical(['A', 'B']), pd.Categorical(['B', 'B', np.nan])]) exp = Categorical(['A', 'B', 'B', 'B', np.nan]) tm.assert_categorical_equal(res, exp) val1 = [pd.Timestamp('2011-01-01'), pd.Timestamp('2011-03-01'), pd.NaT] val2 = [pd.NaT, pd.Timestamp('2011-01-01'), pd.Timestamp('2011-02-01')] res = union_categoricals([pd.Categorical(val1), pd.Categorical(val2)]) exp = Categorical(val1 + val2, categories=[pd.Timestamp('2011-01-01'), pd.Timestamp('2011-03-01'), pd.Timestamp('2011-02-01')]) tm.assert_categorical_equal(res, exp) # all NaN res = union_categoricals([pd.Categorical([np.nan, np.nan]), pd.Categorical(['X'])]) exp = Categorical([np.nan, np.nan, 'X']) tm.assert_categorical_equal(res, exp) res = union_categoricals([pd.Categorical([np.nan, np.nan]), pd.Categorical([np.nan, np.nan])]) exp = Categorical([np.nan, np.nan, np.nan, np.nan]) tm.assert_categorical_equal(res, exp) def test_union_categoricals_empty(self): # GH 13759 res = union_categoricals([pd.Categorical([]), pd.Categorical([])]) exp = Categorical([]) tm.assert_categorical_equal(res, exp) res = union_categoricals([pd.Categorical([]), pd.Categorical([1.0])]) exp = Categorical([1.0]) tm.assert_categorical_equal(res, exp) # to make dtype equal nanc = pd.Categorical(np.array([np.nan], dtype=np.float64)) res = union_categoricals([nanc, pd.Categorical([])]) tm.assert_categorical_equal(res, nanc) def test_union_categorical_same_category(self): # check fastpath c1 = Categorical([1, 2, 3, 4], categories=[1, 2, 3, 4]) c2 = Categorical([3, 2, 1, np.nan], categories=[1, 2, 3, 4]) res = union_categoricals([c1, c2]) exp = Categorical([1, 2, 3, 4, 3, 2, 1, np.nan], categories=[1, 2, 3, 4]) tm.assert_categorical_equal(res, exp) c1 = Categorical(['z', 'z', 'z'], categories=['x', 'y', 'z']) c2 = Categorical(['x', 'x', 'x'], categories=['x', 'y', 'z']) res = union_categoricals([c1, c2]) exp = Categorical(['z', 'z', 'z', 'x', 'x', 'x'], categories=['x', 'y', 'z']) tm.assert_categorical_equal(res, exp) def test_union_categoricals_ordered(self): c1 = Categorical([1, 2, 3], ordered=True) c2 = Categorical([1, 2, 3], ordered=False) msg = 'Categorical.ordered must be the same' with tm.assert_raises_regex(TypeError, msg): union_categoricals([c1, c2]) res = union_categoricals([c1, c1]) exp = Categorical([1, 2, 3, 1, 2, 3], ordered=True) tm.assert_categorical_equal(res, exp) c1 = Categorical([1, 2, 3, np.nan], ordered=True) c2 = Categorical([3, 2], categories=[1, 2, 3], ordered=True) res = union_categoricals([c1, c2]) exp = Categorical([1, 2, 3, np.nan, 3, 2], ordered=True) tm.assert_categorical_equal(res, exp) c1 = Categorical([1, 2, 3], ordered=True) c2 = Categorical([1, 2, 3], categories=[3, 2, 1], ordered=True) msg = "to union ordered Categoricals, all categories must be the same" with tm.assert_raises_regex(TypeError, msg): union_categoricals([c1, c2]) def test_union_categoricals_ignore_order(self): # GH 15219 c1 = Categorical([1, 2, 3], ordered=True) c2 = Categorical([1, 2, 3], ordered=False) res = union_categoricals([c1, c2], ignore_order=True) exp = Categorical([1, 2, 3, 1, 2, 3]) tm.assert_categorical_equal(res, exp) msg = 'Categorical.ordered must be the same' with tm.assert_raises_regex(TypeError, msg): union_categoricals([c1, c2], ignore_order=False) res = union_categoricals([c1, c1], ignore_order=True) exp = Categorical([1, 2, 3, 1, 2, 3]) tm.assert_categorical_equal(res, exp) res = union_categoricals([c1, c1], ignore_order=False) exp = Categorical([1, 2, 3, 1, 2, 3], categories=[1, 2, 3], ordered=True) tm.assert_categorical_equal(res, exp) c1 = Categorical([1, 2, 3, np.nan], ordered=True) c2 = Categorical([3, 2], categories=[1, 2, 3], ordered=True) res = union_categoricals([c1, c2], ignore_order=True) exp = Categorical([1, 2, 3, np.nan, 3, 2]) tm.assert_categorical_equal(res, exp) c1 = Categorical([1, 2, 3], ordered=True) c2 = Categorical([1, 2, 3], categories=[3, 2, 1], ordered=True) res = union_categoricals([c1, c2], ignore_order=True) exp = Categorical([1, 2, 3, 1, 2, 3]) tm.assert_categorical_equal(res, exp) res = union_categoricals([c2, c1], ignore_order=True, sort_categories=True) exp = Categorical([1, 2, 3, 1, 2, 3], categories=[1, 2, 3]) tm.assert_categorical_equal(res, exp) c1 = Categorical([1, 2, 3], ordered=True) c2 = Categorical([4, 5, 6], ordered=True) result = union_categoricals([c1, c2], ignore_order=True) expected = Categorical([1, 2, 3, 4, 5, 6]) tm.assert_categorical_equal(result, expected) msg = "to union ordered Categoricals, all categories must be the same" with tm.assert_raises_regex(TypeError, msg): union_categoricals([c1, c2], ignore_order=False) with tm.assert_raises_regex(TypeError, msg): union_categoricals([c1, c2]) def test_union_categoricals_sort(self): # GH 13846 c1 = Categorical(['x', 'y', 'z']) c2 = Categorical(['a', 'b', 'c']) result = union_categoricals([c1, c2], sort_categories=True) expected = Categorical(['x', 'y', 'z', 'a', 'b', 'c'], categories=['a', 'b', 'c', 'x', 'y', 'z']) tm.assert_categorical_equal(result, expected) # fastpath c1 = Categorical(['a', 'b'], categories=['b', 'a', 'c']) c2 = Categorical(['b', 'c'], categories=['b', 'a', 'c']) result = union_categoricals([c1, c2], sort_categories=True) expected = Categorical(['a', 'b', 'b', 'c'], categories=['a', 'b', 'c']) tm.assert_categorical_equal(result, expected) c1 = Categorical(['a', 'b'], categories=['c', 'a', 'b']) c2 = Categorical(['b', 'c'], categories=['c', 'a', 'b']) result = union_categoricals([c1, c2], sort_categories=True) expected = Categorical(['a', 'b', 'b', 'c'], categories=['a', 'b', 'c']) tm.assert_categorical_equal(result, expected) # fastpath - skip resort c1 = Categorical(['a', 'b'], categories=['a', 'b', 'c']) c2 = Categorical(['b', 'c'], categories=['a', 'b', 'c']) result = union_categoricals([c1, c2], sort_categories=True) expected = Categorical(['a', 'b', 'b', 'c'], categories=['a', 'b', 'c']) tm.assert_categorical_equal(result, expected) c1 = Categorical(['x', np.nan]) c2 = Categorical([np.nan, 'b']) result = union_categoricals([c1, c2], sort_categories=True) expected = Categorical(['x', np.nan, np.nan, 'b'], categories=['b', 'x']) tm.assert_categorical_equal(result, expected) c1 = Categorical([np.nan]) c2 = Categorical([np.nan]) result = union_categoricals([c1, c2], sort_categories=True) expected = Categorical([np.nan, np.nan], categories=[]) tm.assert_categorical_equal(result, expected) c1 = Categorical([]) c2 = Categorical([]) result = union_categoricals([c1, c2], sort_categories=True) expected = Categorical([]) tm.assert_categorical_equal(result, expected) c1 = Categorical(['b', 'a'], categories=['b', 'a', 'c'], ordered=True) c2 = Categorical(['a', 'c'], categories=['b', 'a', 'c'], ordered=True) with pytest.raises(TypeError): union_categoricals([c1, c2], sort_categories=True) def test_union_categoricals_sort_false(self): # GH 13846 c1 = Categorical(['x', 'y', 'z']) c2 = Categorical(['a', 'b', 'c']) result = union_categoricals([c1, c2], sort_categories=False) expected = Categorical(['x', 'y', 'z', 'a', 'b', 'c'], categories=['x', 'y', 'z', 'a', 'b', 'c']) tm.assert_categorical_equal(result, expected) # fastpath c1 = Categorical(['a', 'b'], categories=['b', 'a', 'c']) c2 = Categorical(['b', 'c'], categories=['b', 'a', 'c']) result = union_categoricals([c1, c2], sort_categories=False) expected = Categorical(['a', 'b', 'b', 'c'], categories=['b', 'a', 'c']) tm.assert_categorical_equal(result, expected) # fastpath - skip resort c1 = Categorical(['a', 'b'], categories=['a', 'b', 'c']) c2 = Categorical(['b', 'c'], categories=['a', 'b', 'c']) result = union_categoricals([c1, c2], sort_categories=False) expected = Categorical(['a', 'b', 'b', 'c'], categories=['a', 'b', 'c']) tm.assert_categorical_equal(result, expected) c1 = Categorical(['x', np.nan]) c2 = Categorical([np.nan, 'b']) result = union_categoricals([c1, c2], sort_categories=False) expected = Categorical(['x', np.nan, np.nan, 'b'], categories=['x', 'b']) tm.assert_categorical_equal(result, expected) c1 = Categorical([np.nan]) c2 = Categorical([np.nan]) result = union_categoricals([c1, c2], sort_categories=False) expected = Categorical([np.nan, np.nan], categories=[]) tm.assert_categorical_equal(result, expected) c1 = Categorical([]) c2 = Categorical([]) result = union_categoricals([c1, c2], sort_categories=False) expected = Categorical([]) tm.assert_categorical_equal(result, expected) c1 = Categorical(['b', 'a'], categories=['b', 'a', 'c'], ordered=True) c2 = Categorical(['a', 'c'], categories=['b', 'a', 'c'], ordered=True) result = union_categoricals([c1, c2], sort_categories=False) expected = Categorical(['b', 'a', 'a', 'c'], categories=['b', 'a', 'c'], ordered=True) tm.assert_categorical_equal(result, expected) def test_union_categorical_unwrap(self): # GH 14173 c1 = Categorical(['a', 'b']) c2 = pd.Series(['b', 'c'], dtype='category') result = union_categoricals([c1, c2]) expected = Categorical(['a', 'b', 'b', 'c']) tm.assert_categorical_equal(result, expected) c2 = CategoricalIndex(c2) result = union_categoricals([c1, c2]) tm.assert_categorical_equal(result, expected) c1 = Series(c1) result = union_categoricals([c1, c2]) tm.assert_categorical_equal(result, expected) with pytest.raises(TypeError): union_categoricals([c1, ['a', 'b', 'c']])
[ "pandas.core.dtypes.concat.union_categoricals", "pandas.Timestamp", "pandas.date_range", "pandas.period_range", "pandas.util.testing.assert_raises_regex", "pytest.raises", "numpy.array", "pandas.Series", "pandas.util.testing.assert_categorical_equal", "pandas.Categorical", "pandas.CategoricalIndex" ]
[((1665, 1693), 'pandas.Categorical', 'Categorical', (["['x', 'y', 'z']"], {}), "(['x', 'y', 'z'])\n", (1676, 1693), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((1707, 1735), 'pandas.Categorical', 'Categorical', (["['a', 'b', 'c']"], {}), "(['a', 'b', 'c'])\n", (1718, 1735), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((1753, 1780), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[s, s2]'], {}), '([s, s2])\n', (1771, 1780), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((1800, 1890), 'pandas.Categorical', 'Categorical', (["['x', 'y', 'z', 'a', 'b', 'c']"], {'categories': "['x', 'y', 'z', 'a', 'b', 'c']"}), "(['x', 'y', 'z', 'a', 'b', 'c'], categories=['x', 'y', 'z', 'a',\n 'b', 'c'])\n", (1811, 1890), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((1926, 1971), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (1953, 1971), True, 'from pandas.util import testing as tm\n'), ((1985, 2023), 'pandas.Categorical', 'Categorical', (['[0, 1.2, 2]'], {'ordered': '(True)'}), '([0, 1.2, 2], ordered=True)\n', (1996, 2023), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((2037, 2075), 'pandas.Categorical', 'Categorical', (['[0, 1.2, 2]'], {'ordered': '(True)'}), '([0, 1.2, 2], ordered=True)\n', (2048, 2075), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((2093, 2120), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[s, s2]'], {}), '([s, s2])\n', (2111, 2120), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((2140, 2189), 'pandas.Categorical', 'Categorical', (['[0, 1.2, 2, 0, 1.2, 2]'], {'ordered': '(True)'}), '([0, 1.2, 2, 0, 1.2, 2], ordered=True)\n', (2151, 2189), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((2198, 2243), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (2225, 2243), True, 'from pandas.util import testing as tm\n'), ((2292, 2316), 'pandas.Categorical', 'Categorical', (['[0, 1.2, 2]'], {}), '([0, 1.2, 2])\n', (2303, 2316), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((2330, 2352), 'pandas.Categorical', 'Categorical', (['[2, 3, 4]'], {}), '([2, 3, 4])\n', (2341, 2352), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((2840, 2881), 'pandas.Categorical', 'Categorical', (['[1, 2, np.nan, 3, 2, np.nan]'], {}), '([1, 2, np.nan, 3, 2, np.nan])\n', (2851, 2881), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((2890, 2927), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (2917, 2927), True, 'from pandas.util import testing as tm\n'), ((3076, 3117), 'pandas.Categorical', 'Categorical', (["['A', 'B', 'B', 'B', np.nan]"], {}), "(['A', 'B', 'B', 'B', np.nan])\n", (3087, 3117), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((3126, 3163), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (3153, 3163), True, 'from pandas.util import testing as tm\n'), ((3683, 3720), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (3710, 3720), True, 'from pandas.util import testing as tm\n'), ((3880, 3914), 'pandas.Categorical', 'Categorical', (["[np.nan, np.nan, 'X']"], {}), "([np.nan, np.nan, 'X'])\n", (3891, 3914), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((3923, 3960), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (3950, 3960), True, 'from pandas.util import testing as tm\n'), ((4113, 4158), 'pandas.Categorical', 'Categorical', (['[np.nan, np.nan, np.nan, np.nan]'], {}), '([np.nan, np.nan, np.nan, np.nan])\n', (4124, 4158), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((4167, 4204), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (4194, 4204), True, 'from pandas.util import testing as tm\n'), ((4393, 4408), 'pandas.Categorical', 'Categorical', (['[]'], {}), '([])\n', (4404, 4408), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((4417, 4454), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (4444, 4454), True, 'from pandas.util import testing as tm\n'), ((4582, 4600), 'pandas.Categorical', 'Categorical', (['[1.0]'], {}), '([1.0])\n', (4593, 4600), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((4609, 4646), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (4636, 4646), True, 'from pandas.util import testing as tm\n'), ((4849, 4887), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'nanc'], {}), '(res, nanc)\n', (4876, 4887), True, 'from pandas.util import testing as tm\n'), ((4979, 5029), 'pandas.Categorical', 'Categorical', (['[1, 2, 3, 4]'], {'categories': '[1, 2, 3, 4]'}), '([1, 2, 3, 4], categories=[1, 2, 3, 4])\n', (4990, 5029), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((5043, 5098), 'pandas.Categorical', 'Categorical', (['[3, 2, 1, np.nan]'], {'categories': '[1, 2, 3, 4]'}), '([3, 2, 1, np.nan], categories=[1, 2, 3, 4])\n', (5054, 5098), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((5113, 5141), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {}), '([c1, c2])\n', (5131, 5141), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((5156, 5223), 'pandas.Categorical', 'Categorical', (['[1, 2, 3, 4, 3, 2, 1, np.nan]'], {'categories': '[1, 2, 3, 4]'}), '([1, 2, 3, 4, 3, 2, 1, np.nan], categories=[1, 2, 3, 4])\n', (5167, 5223), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((5258, 5295), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (5285, 5295), True, 'from pandas.util import testing as tm\n'), ((5310, 5366), 'pandas.Categorical', 'Categorical', (["['z', 'z', 'z']"], {'categories': "['x', 'y', 'z']"}), "(['z', 'z', 'z'], categories=['x', 'y', 'z'])\n", (5321, 5366), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((5380, 5436), 'pandas.Categorical', 'Categorical', (["['x', 'x', 'x']"], {'categories': "['x', 'y', 'z']"}), "(['x', 'x', 'x'], categories=['x', 'y', 'z'])\n", (5391, 5436), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((5451, 5479), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {}), '([c1, c2])\n', (5469, 5479), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((5494, 5565), 'pandas.Categorical', 'Categorical', (["['z', 'z', 'z', 'x', 'x', 'x']"], {'categories': "['x', 'y', 'z']"}), "(['z', 'z', 'z', 'x', 'x', 'x'], categories=['x', 'y', 'z'])\n", (5505, 5565), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((5600, 5637), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (5627, 5637), True, 'from pandas.util import testing as tm\n'), ((5699, 5735), 'pandas.Categorical', 'Categorical', (['[1, 2, 3]'], {'ordered': '(True)'}), '([1, 2, 3], ordered=True)\n', (5710, 5735), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((5749, 5786), 'pandas.Categorical', 'Categorical', (['[1, 2, 3]'], {'ordered': '(False)'}), '([1, 2, 3], ordered=False)\n', (5760, 5786), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((5950, 5978), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c1]'], {}), '([c1, c1])\n', (5968, 5978), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((5993, 6038), 'pandas.Categorical', 'Categorical', (['[1, 2, 3, 1, 2, 3]'], {'ordered': '(True)'}), '([1, 2, 3, 1, 2, 3], ordered=True)\n', (6004, 6038), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((6047, 6084), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (6074, 6084), True, 'from pandas.util import testing as tm\n'), ((6099, 6143), 'pandas.Categorical', 'Categorical', (['[1, 2, 3, np.nan]'], {'ordered': '(True)'}), '([1, 2, 3, np.nan], ordered=True)\n', (6110, 6143), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((6157, 6212), 'pandas.Categorical', 'Categorical', (['[3, 2]'], {'categories': '[1, 2, 3]', 'ordered': '(True)'}), '([3, 2], categories=[1, 2, 3], ordered=True)\n', (6168, 6212), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((6228, 6256), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {}), '([c1, c2])\n', (6246, 6256), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((6271, 6321), 'pandas.Categorical', 'Categorical', (['[1, 2, 3, np.nan, 3, 2]'], {'ordered': '(True)'}), '([1, 2, 3, np.nan, 3, 2], ordered=True)\n', (6282, 6321), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((6330, 6367), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (6357, 6367), True, 'from pandas.util import testing as tm\n'), ((6382, 6418), 'pandas.Categorical', 'Categorical', (['[1, 2, 3]'], {'ordered': '(True)'}), '([1, 2, 3], ordered=True)\n', (6393, 6418), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((6432, 6490), 'pandas.Categorical', 'Categorical', (['[1, 2, 3]'], {'categories': '[3, 2, 1]', 'ordered': '(True)'}), '([1, 2, 3], categories=[3, 2, 1], ordered=True)\n', (6443, 6490), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((6750, 6786), 'pandas.Categorical', 'Categorical', (['[1, 2, 3]'], {'ordered': '(True)'}), '([1, 2, 3], ordered=True)\n', (6761, 6786), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((6800, 6837), 'pandas.Categorical', 'Categorical', (['[1, 2, 3]'], {'ordered': '(False)'}), '([1, 2, 3], ordered=False)\n', (6811, 6837), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((6853, 6900), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'ignore_order': '(True)'}), '([c1, c2], ignore_order=True)\n', (6871, 6900), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((6915, 6946), 'pandas.Categorical', 'Categorical', (['[1, 2, 3, 1, 2, 3]'], {}), '([1, 2, 3, 1, 2, 3])\n', (6926, 6946), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((6955, 6992), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (6982, 6992), True, 'from pandas.util import testing as tm\n'), ((7176, 7223), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c1]'], {'ignore_order': '(True)'}), '([c1, c1], ignore_order=True)\n', (7194, 7223), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((7238, 7269), 'pandas.Categorical', 'Categorical', (['[1, 2, 3, 1, 2, 3]'], {}), '([1, 2, 3, 1, 2, 3])\n', (7249, 7269), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((7278, 7315), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (7305, 7315), True, 'from pandas.util import testing as tm\n'), ((7331, 7379), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c1]'], {'ignore_order': '(False)'}), '([c1, c1], ignore_order=False)\n', (7349, 7379), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((7394, 7461), 'pandas.Categorical', 'Categorical', (['[1, 2, 3, 1, 2, 3]'], {'categories': '[1, 2, 3]', 'ordered': '(True)'}), '([1, 2, 3, 1, 2, 3], categories=[1, 2, 3], ordered=True)\n', (7405, 7461), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((7496, 7533), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (7523, 7533), True, 'from pandas.util import testing as tm\n'), ((7548, 7592), 'pandas.Categorical', 'Categorical', (['[1, 2, 3, np.nan]'], {'ordered': '(True)'}), '([1, 2, 3, np.nan], ordered=True)\n', (7559, 7592), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((7606, 7661), 'pandas.Categorical', 'Categorical', (['[3, 2]'], {'categories': '[1, 2, 3]', 'ordered': '(True)'}), '([3, 2], categories=[1, 2, 3], ordered=True)\n', (7617, 7661), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((7677, 7724), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'ignore_order': '(True)'}), '([c1, c2], ignore_order=True)\n', (7695, 7724), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((7739, 7775), 'pandas.Categorical', 'Categorical', (['[1, 2, 3, np.nan, 3, 2]'], {}), '([1, 2, 3, np.nan, 3, 2])\n', (7750, 7775), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((7784, 7821), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (7811, 7821), True, 'from pandas.util import testing as tm\n'), ((7836, 7872), 'pandas.Categorical', 'Categorical', (['[1, 2, 3]'], {'ordered': '(True)'}), '([1, 2, 3], ordered=True)\n', (7847, 7872), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((7886, 7944), 'pandas.Categorical', 'Categorical', (['[1, 2, 3]'], {'categories': '[3, 2, 1]', 'ordered': '(True)'}), '([1, 2, 3], categories=[3, 2, 1], ordered=True)\n', (7897, 7944), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((7960, 8007), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'ignore_order': '(True)'}), '([c1, c2], ignore_order=True)\n', (7978, 8007), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((8022, 8053), 'pandas.Categorical', 'Categorical', (['[1, 2, 3, 1, 2, 3]'], {}), '([1, 2, 3, 1, 2, 3])\n', (8033, 8053), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((8062, 8099), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (8089, 8099), True, 'from pandas.util import testing as tm\n'), ((8115, 8184), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c2, c1]'], {'ignore_order': '(True)', 'sort_categories': '(True)'}), '([c2, c1], ignore_order=True, sort_categories=True)\n', (8133, 8184), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((8232, 8285), 'pandas.Categorical', 'Categorical', (['[1, 2, 3, 1, 2, 3]'], {'categories': '[1, 2, 3]'}), '([1, 2, 3, 1, 2, 3], categories=[1, 2, 3])\n', (8243, 8285), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((8294, 8331), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['res', 'exp'], {}), '(res, exp)\n', (8321, 8331), True, 'from pandas.util import testing as tm\n'), ((8346, 8382), 'pandas.Categorical', 'Categorical', (['[1, 2, 3]'], {'ordered': '(True)'}), '([1, 2, 3], ordered=True)\n', (8357, 8382), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((8396, 8432), 'pandas.Categorical', 'Categorical', (['[4, 5, 6]'], {'ordered': '(True)'}), '([4, 5, 6], ordered=True)\n', (8407, 8432), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((8450, 8497), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'ignore_order': '(True)'}), '([c1, c2], ignore_order=True)\n', (8468, 8497), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((8517, 8548), 'pandas.Categorical', 'Categorical', (['[1, 2, 3, 4, 5, 6]'], {}), '([1, 2, 3, 4, 5, 6])\n', (8528, 8548), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((8557, 8602), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (8584, 8602), True, 'from pandas.util import testing as tm\n'), ((8969, 8997), 'pandas.Categorical', 'Categorical', (["['x', 'y', 'z']"], {}), "(['x', 'y', 'z'])\n", (8980, 8997), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((9011, 9039), 'pandas.Categorical', 'Categorical', (["['a', 'b', 'c']"], {}), "(['a', 'b', 'c'])\n", (9022, 9039), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((9057, 9107), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(True)'}), '([c1, c2], sort_categories=True)\n', (9075, 9107), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((9127, 9217), 'pandas.Categorical', 'Categorical', (["['x', 'y', 'z', 'a', 'b', 'c']"], {'categories': "['a', 'b', 'c', 'x', 'y', 'z']"}), "(['x', 'y', 'z', 'a', 'b', 'c'], categories=['a', 'b', 'c', 'x',\n 'y', 'z'])\n", (9138, 9217), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((9253, 9298), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (9280, 9298), True, 'from pandas.util import testing as tm\n'), ((9332, 9383), 'pandas.Categorical', 'Categorical', (["['a', 'b']"], {'categories': "['b', 'a', 'c']"}), "(['a', 'b'], categories=['b', 'a', 'c'])\n", (9343, 9383), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((9397, 9448), 'pandas.Categorical', 'Categorical', (["['b', 'c']"], {'categories': "['b', 'a', 'c']"}), "(['b', 'c'], categories=['b', 'a', 'c'])\n", (9408, 9448), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((9466, 9516), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(True)'}), '([c1, c2], sort_categories=True)\n', (9484, 9516), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((9536, 9597), 'pandas.Categorical', 'Categorical', (["['a', 'b', 'b', 'c']"], {'categories': "['a', 'b', 'c']"}), "(['a', 'b', 'b', 'c'], categories=['a', 'b', 'c'])\n", (9547, 9597), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((9637, 9682), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (9664, 9682), True, 'from pandas.util import testing as tm\n'), ((9697, 9748), 'pandas.Categorical', 'Categorical', (["['a', 'b']"], {'categories': "['c', 'a', 'b']"}), "(['a', 'b'], categories=['c', 'a', 'b'])\n", (9708, 9748), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((9762, 9813), 'pandas.Categorical', 'Categorical', (["['b', 'c']"], {'categories': "['c', 'a', 'b']"}), "(['b', 'c'], categories=['c', 'a', 'b'])\n", (9773, 9813), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((9831, 9881), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(True)'}), '([c1, c2], sort_categories=True)\n', (9849, 9881), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((9901, 9962), 'pandas.Categorical', 'Categorical', (["['a', 'b', 'b', 'c']"], {'categories': "['a', 'b', 'c']"}), "(['a', 'b', 'b', 'c'], categories=['a', 'b', 'c'])\n", (9912, 9962), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((10002, 10047), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (10029, 10047), True, 'from pandas.util import testing as tm\n'), ((10095, 10146), 'pandas.Categorical', 'Categorical', (["['a', 'b']"], {'categories': "['a', 'b', 'c']"}), "(['a', 'b'], categories=['a', 'b', 'c'])\n", (10106, 10146), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((10160, 10211), 'pandas.Categorical', 'Categorical', (["['b', 'c']"], {'categories': "['a', 'b', 'c']"}), "(['b', 'c'], categories=['a', 'b', 'c'])\n", (10171, 10211), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((10229, 10279), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(True)'}), '([c1, c2], sort_categories=True)\n', (10247, 10279), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((10299, 10360), 'pandas.Categorical', 'Categorical', (["['a', 'b', 'b', 'c']"], {'categories': "['a', 'b', 'c']"}), "(['a', 'b', 'b', 'c'], categories=['a', 'b', 'c'])\n", (10310, 10360), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((10400, 10445), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (10427, 10445), True, 'from pandas.util import testing as tm\n'), ((10460, 10486), 'pandas.Categorical', 'Categorical', (["['x', np.nan]"], {}), "(['x', np.nan])\n", (10471, 10486), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((10500, 10526), 'pandas.Categorical', 'Categorical', (["[np.nan, 'b']"], {}), "([np.nan, 'b'])\n", (10511, 10526), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((10544, 10594), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(True)'}), '([c1, c2], sort_categories=True)\n', (10562, 10594), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((10614, 10676), 'pandas.Categorical', 'Categorical', (["['x', np.nan, np.nan, 'b']"], {'categories': "['b', 'x']"}), "(['x', np.nan, np.nan, 'b'], categories=['b', 'x'])\n", (10625, 10676), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((10716, 10761), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (10743, 10761), True, 'from pandas.util import testing as tm\n'), ((10776, 10797), 'pandas.Categorical', 'Categorical', (['[np.nan]'], {}), '([np.nan])\n', (10787, 10797), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((10811, 10832), 'pandas.Categorical', 'Categorical', (['[np.nan]'], {}), '([np.nan])\n', (10822, 10832), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((10850, 10900), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(True)'}), '([c1, c2], sort_categories=True)\n', (10868, 10900), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((10920, 10964), 'pandas.Categorical', 'Categorical', (['[np.nan, np.nan]'], {'categories': '[]'}), '([np.nan, np.nan], categories=[])\n', (10931, 10964), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((10973, 11018), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (11000, 11018), True, 'from pandas.util import testing as tm\n'), ((11033, 11048), 'pandas.Categorical', 'Categorical', (['[]'], {}), '([])\n', (11044, 11048), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((11062, 11077), 'pandas.Categorical', 'Categorical', (['[]'], {}), '([])\n', (11073, 11077), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((11095, 11145), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(True)'}), '([c1, c2], sort_categories=True)\n', (11113, 11145), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((11165, 11180), 'pandas.Categorical', 'Categorical', (['[]'], {}), '([])\n', (11176, 11180), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((11189, 11234), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (11216, 11234), True, 'from pandas.util import testing as tm\n'), ((11249, 11314), 'pandas.Categorical', 'Categorical', (["['b', 'a']"], {'categories': "['b', 'a', 'c']", 'ordered': '(True)'}), "(['b', 'a'], categories=['b', 'a', 'c'], ordered=True)\n", (11260, 11314), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((11328, 11393), 'pandas.Categorical', 'Categorical', (["['a', 'c']"], {'categories': "['b', 'a', 'c']", 'ordered': '(True)'}), "(['a', 'c'], categories=['b', 'a', 'c'], ordered=True)\n", (11339, 11393), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((11579, 11607), 'pandas.Categorical', 'Categorical', (["['x', 'y', 'z']"], {}), "(['x', 'y', 'z'])\n", (11590, 11607), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((11621, 11649), 'pandas.Categorical', 'Categorical', (["['a', 'b', 'c']"], {}), "(['a', 'b', 'c'])\n", (11632, 11649), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((11667, 11718), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(False)'}), '([c1, c2], sort_categories=False)\n', (11685, 11718), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((11738, 11828), 'pandas.Categorical', 'Categorical', (["['x', 'y', 'z', 'a', 'b', 'c']"], {'categories': "['x', 'y', 'z', 'a', 'b', 'c']"}), "(['x', 'y', 'z', 'a', 'b', 'c'], categories=['x', 'y', 'z', 'a',\n 'b', 'c'])\n", (11749, 11828), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((11864, 11909), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (11891, 11909), True, 'from pandas.util import testing as tm\n'), ((11943, 11994), 'pandas.Categorical', 'Categorical', (["['a', 'b']"], {'categories': "['b', 'a', 'c']"}), "(['a', 'b'], categories=['b', 'a', 'c'])\n", (11954, 11994), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((12008, 12059), 'pandas.Categorical', 'Categorical', (["['b', 'c']"], {'categories': "['b', 'a', 'c']"}), "(['b', 'c'], categories=['b', 'a', 'c'])\n", (12019, 12059), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((12077, 12128), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(False)'}), '([c1, c2], sort_categories=False)\n', (12095, 12128), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((12148, 12209), 'pandas.Categorical', 'Categorical', (["['a', 'b', 'b', 'c']"], {'categories': "['b', 'a', 'c']"}), "(['a', 'b', 'b', 'c'], categories=['b', 'a', 'c'])\n", (12159, 12209), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((12249, 12294), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (12276, 12294), True, 'from pandas.util import testing as tm\n'), ((12342, 12393), 'pandas.Categorical', 'Categorical', (["['a', 'b']"], {'categories': "['a', 'b', 'c']"}), "(['a', 'b'], categories=['a', 'b', 'c'])\n", (12353, 12393), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((12407, 12458), 'pandas.Categorical', 'Categorical', (["['b', 'c']"], {'categories': "['a', 'b', 'c']"}), "(['b', 'c'], categories=['a', 'b', 'c'])\n", (12418, 12458), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((12476, 12527), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(False)'}), '([c1, c2], sort_categories=False)\n', (12494, 12527), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((12547, 12608), 'pandas.Categorical', 'Categorical', (["['a', 'b', 'b', 'c']"], {'categories': "['a', 'b', 'c']"}), "(['a', 'b', 'b', 'c'], categories=['a', 'b', 'c'])\n", (12558, 12608), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((12648, 12693), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (12675, 12693), True, 'from pandas.util import testing as tm\n'), ((12708, 12734), 'pandas.Categorical', 'Categorical', (["['x', np.nan]"], {}), "(['x', np.nan])\n", (12719, 12734), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((12748, 12774), 'pandas.Categorical', 'Categorical', (["[np.nan, 'b']"], {}), "([np.nan, 'b'])\n", (12759, 12774), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((12792, 12843), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(False)'}), '([c1, c2], sort_categories=False)\n', (12810, 12843), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((12863, 12925), 'pandas.Categorical', 'Categorical', (["['x', np.nan, np.nan, 'b']"], {'categories': "['x', 'b']"}), "(['x', np.nan, np.nan, 'b'], categories=['x', 'b'])\n", (12874, 12925), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((12965, 13010), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (12992, 13010), True, 'from pandas.util import testing as tm\n'), ((13025, 13046), 'pandas.Categorical', 'Categorical', (['[np.nan]'], {}), '([np.nan])\n', (13036, 13046), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((13060, 13081), 'pandas.Categorical', 'Categorical', (['[np.nan]'], {}), '([np.nan])\n', (13071, 13081), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((13099, 13150), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(False)'}), '([c1, c2], sort_categories=False)\n', (13117, 13150), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((13170, 13214), 'pandas.Categorical', 'Categorical', (['[np.nan, np.nan]'], {'categories': '[]'}), '([np.nan, np.nan], categories=[])\n', (13181, 13214), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((13223, 13268), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (13250, 13268), True, 'from pandas.util import testing as tm\n'), ((13283, 13298), 'pandas.Categorical', 'Categorical', (['[]'], {}), '([])\n', (13294, 13298), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((13312, 13327), 'pandas.Categorical', 'Categorical', (['[]'], {}), '([])\n', (13323, 13327), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((13345, 13396), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(False)'}), '([c1, c2], sort_categories=False)\n', (13363, 13396), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((13416, 13431), 'pandas.Categorical', 'Categorical', (['[]'], {}), '([])\n', (13427, 13431), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((13440, 13485), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (13467, 13485), True, 'from pandas.util import testing as tm\n'), ((13500, 13565), 'pandas.Categorical', 'Categorical', (["['b', 'a']"], {'categories': "['b', 'a', 'c']", 'ordered': '(True)'}), "(['b', 'a'], categories=['b', 'a', 'c'], ordered=True)\n", (13511, 13565), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((13579, 13644), 'pandas.Categorical', 'Categorical', (["['a', 'c']"], {'categories': "['b', 'a', 'c']", 'ordered': '(True)'}), "(['a', 'c'], categories=['b', 'a', 'c'], ordered=True)\n", (13590, 13644), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((13662, 13713), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(False)'}), '([c1, c2], sort_categories=False)\n', (13680, 13713), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((13733, 13808), 'pandas.Categorical', 'Categorical', (["['b', 'a', 'a', 'c']"], {'categories': "['b', 'a', 'c']", 'ordered': '(True)'}), "(['b', 'a', 'a', 'c'], categories=['b', 'a', 'c'], ordered=True)\n", (13744, 13808), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((13848, 13893), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (13875, 13893), True, 'from pandas.util import testing as tm\n'), ((13972, 13995), 'pandas.Categorical', 'Categorical', (["['a', 'b']"], {}), "(['a', 'b'])\n", (13983, 13995), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((14009, 14048), 'pandas.Series', 'pd.Series', (["['b', 'c']"], {'dtype': '"""category"""'}), "(['b', 'c'], dtype='category')\n", (14018, 14048), True, 'import pandas as pd\n'), ((14066, 14094), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {}), '([c1, c2])\n', (14084, 14094), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((14114, 14147), 'pandas.Categorical', 'Categorical', (["['a', 'b', 'b', 'c']"], {}), "(['a', 'b', 'b', 'c'])\n", (14125, 14147), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((14156, 14201), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (14183, 14201), True, 'from pandas.util import testing as tm\n'), ((14216, 14236), 'pandas.CategoricalIndex', 'CategoricalIndex', (['c2'], {}), '(c2)\n', (14232, 14236), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((14254, 14282), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {}), '([c1, c2])\n', (14272, 14282), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((14291, 14336), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (14318, 14336), True, 'from pandas.util import testing as tm\n'), ((14351, 14361), 'pandas.Series', 'Series', (['c1'], {}), '(c1)\n', (14357, 14361), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((14379, 14407), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {}), '([c1, c2])\n', (14397, 14407), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((14416, 14461), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {}), '(result, expected)\n', (14443, 14461), True, 'from pandas.util import testing as tm\n'), ((2419, 2457), 'pandas.util.testing.assert_raises_regex', 'tm.assert_raises_regex', (['TypeError', 'msg'], {}), '(TypeError, msg)\n', (2441, 2457), True, 'from pandas.util import testing as tm\n'), ((2471, 2498), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[s, s2]'], {}), '([s, s2])\n', (2489, 2498), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((2554, 2593), 'pandas.util.testing.assert_raises_regex', 'tm.assert_raises_regex', (['ValueError', 'msg'], {}), '(ValueError, msg)\n', (2576, 2593), True, 'from pandas.util import testing as tm\n'), ((2607, 2629), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[]'], {}), '([])\n', (2625, 2629), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((3181, 3207), 'pandas.Timestamp', 'pd.Timestamp', (['"""2011-01-01"""'], {}), "('2011-01-01')\n", (3193, 3207), True, 'import pandas as pd\n'), ((3209, 3235), 'pandas.Timestamp', 'pd.Timestamp', (['"""2011-03-01"""'], {}), "('2011-03-01')\n", (3221, 3235), True, 'import pandas as pd\n'), ((3285, 3311), 'pandas.Timestamp', 'pd.Timestamp', (['"""2011-01-01"""'], {}), "('2011-01-01')\n", (3297, 3311), True, 'import pandas as pd\n'), ((3329, 3355), 'pandas.Timestamp', 'pd.Timestamp', (['"""2011-02-01"""'], {}), "('2011-02-01')\n", (3341, 3355), True, 'import pandas as pd\n'), ((4708, 4744), 'numpy.array', 'np.array', (['[np.nan]'], {'dtype': 'np.float64'}), '([np.nan], dtype=np.float64)\n', (4716, 4744), True, 'import numpy as np\n'), ((5854, 5892), 'pandas.util.testing.assert_raises_regex', 'tm.assert_raises_regex', (['TypeError', 'msg'], {}), '(TypeError, msg)\n', (5876, 5892), True, 'from pandas.util import testing as tm\n'), ((5906, 5934), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {}), '([c1, c2])\n', (5924, 5934), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((6584, 6622), 'pandas.util.testing.assert_raises_regex', 'tm.assert_raises_regex', (['TypeError', 'msg'], {}), '(TypeError, msg)\n', (6606, 6622), True, 'from pandas.util import testing as tm\n'), ((6636, 6664), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {}), '([c1, c2])\n', (6654, 6664), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((7060, 7098), 'pandas.util.testing.assert_raises_regex', 'tm.assert_raises_regex', (['TypeError', 'msg'], {}), '(TypeError, msg)\n', (7082, 7098), True, 'from pandas.util import testing as tm\n'), ((7112, 7160), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'ignore_order': '(False)'}), '([c1, c2], ignore_order=False)\n', (7130, 7160), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((8696, 8734), 'pandas.util.testing.assert_raises_regex', 'tm.assert_raises_regex', (['TypeError', 'msg'], {}), '(TypeError, msg)\n', (8718, 8734), True, 'from pandas.util import testing as tm\n'), ((8748, 8796), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'ignore_order': '(False)'}), '([c1, c2], ignore_order=False)\n', (8766, 8796), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((8811, 8849), 'pandas.util.testing.assert_raises_regex', 'tm.assert_raises_regex', (['TypeError', 'msg'], {}), '(TypeError, msg)\n', (8833, 8849), True, 'from pandas.util import testing as tm\n'), ((8863, 8891), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {}), '([c1, c2])\n', (8881, 8891), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((11407, 11431), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (11420, 11431), False, 'import pytest\n'), ((11445, 11495), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (['[c1, c2]'], {'sort_categories': '(True)'}), '([c1, c2], sort_categories=True)\n', (11463, 11495), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((14476, 14500), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (14489, 14500), False, 'import pytest\n'), ((14514, 14555), 'pandas.core.dtypes.concat.union_categoricals', 'union_categoricals', (["[c1, ['a', 'b', 'c']]"], {}), "([c1, ['a', 'b', 'c']])\n", (14532, 14555), False, 'from pandas.core.dtypes.concat import union_categoricals\n'), ((626, 667), 'pandas.date_range', 'pd.date_range', (['"""2014-01-01"""', '"""2014-01-05"""'], {}), "('2014-01-01', '2014-01-05')\n", (639, 667), True, 'import pandas as pd\n'), ((682, 723), 'pandas.date_range', 'pd.date_range', (['"""2014-01-06"""', '"""2014-01-07"""'], {}), "('2014-01-06', '2014-01-07')\n", (695, 723), True, 'import pandas as pd\n'), ((738, 779), 'pandas.date_range', 'pd.date_range', (['"""2014-01-01"""', '"""2014-01-07"""'], {}), "('2014-01-01', '2014-01-07')\n", (751, 779), True, 'import pandas as pd\n'), ((796, 854), 'pandas.date_range', 'pd.date_range', (['"""2014-01-01"""', '"""2014-01-05"""'], {'tz': '"""US/Central"""'}), "('2014-01-01', '2014-01-05', tz='US/Central')\n", (809, 854), True, 'import pandas as pd\n'), ((869, 927), 'pandas.date_range', 'pd.date_range', (['"""2014-01-06"""', '"""2014-01-07"""'], {'tz': '"""US/Central"""'}), "('2014-01-06', '2014-01-07', tz='US/Central')\n", (882, 927), True, 'import pandas as pd\n'), ((942, 1000), 'pandas.date_range', 'pd.date_range', (['"""2014-01-01"""', '"""2014-01-07"""'], {'tz': '"""US/Central"""'}), "('2014-01-01', '2014-01-07', tz='US/Central')\n", (955, 1000), True, 'import pandas as pd\n'), ((1017, 1060), 'pandas.period_range', 'pd.period_range', (['"""2014-01-01"""', '"""2014-01-05"""'], {}), "('2014-01-01', '2014-01-05')\n", (1032, 1060), True, 'import pandas as pd\n'), ((1075, 1118), 'pandas.period_range', 'pd.period_range', (['"""2014-01-06"""', '"""2014-01-07"""'], {}), "('2014-01-06', '2014-01-07')\n", (1090, 1118), True, 'import pandas as pd\n'), ((1133, 1176), 'pandas.period_range', 'pd.period_range', (['"""2014-01-01"""', '"""2014-01-07"""'], {}), "('2014-01-01', '2014-01-07')\n", (1148, 1176), True, 'import pandas as pd\n'), ((1450, 1471), 'pandas.Categorical', 'Categorical', (['combined'], {}), '(combined)\n', (1461, 1471), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((1488, 1560), 'pandas.util.testing.assert_categorical_equal', 'tm.assert_categorical_equal', (['result', 'expected'], {'check_category_order': '(True)'}), '(result, expected, check_category_order=True)\n', (1515, 1560), True, 'from pandas.util import testing as tm\n'), ((2727, 2757), 'pandas.Categorical', 'pd.Categorical', (['[1, 2, np.nan]'], {}), '([1, 2, np.nan])\n', (2741, 2757), True, 'import pandas as pd\n'), ((2793, 2823), 'pandas.Categorical', 'pd.Categorical', (['[3, 2, np.nan]'], {}), '([3, 2, np.nan])\n', (2807, 2823), True, 'import pandas as pd\n'), ((2963, 2989), 'pandas.Categorical', 'pd.Categorical', (["['A', 'B']"], {}), "(['A', 'B'])\n", (2977, 2989), True, 'import pandas as pd\n'), ((3025, 3059), 'pandas.Categorical', 'pd.Categorical', (["['B', 'B', np.nan]"], {}), "(['B', 'B', np.nan])\n", (3039, 3059), True, 'import pandas as pd\n'), ((3392, 3412), 'pandas.Categorical', 'pd.Categorical', (['val1'], {}), '(val1)\n', (3406, 3412), True, 'import pandas as pd\n'), ((3414, 3434), 'pandas.Categorical', 'pd.Categorical', (['val2'], {}), '(val2)\n', (3428, 3434), True, 'import pandas as pd\n'), ((3774, 3806), 'pandas.Categorical', 'pd.Categorical', (['[np.nan, np.nan]'], {}), '([np.nan, np.nan])\n', (3788, 3806), True, 'import pandas as pd\n'), ((3842, 3863), 'pandas.Categorical', 'pd.Categorical', (["['X']"], {}), "(['X'])\n", (3856, 3863), True, 'import pandas as pd\n'), ((3996, 4028), 'pandas.Categorical', 'pd.Categorical', (['[np.nan, np.nan]'], {}), '([np.nan, np.nan])\n', (4010, 4028), True, 'import pandas as pd\n'), ((4064, 4096), 'pandas.Categorical', 'pd.Categorical', (['[np.nan, np.nan]'], {}), '([np.nan, np.nan])\n', (4078, 4096), True, 'import pandas as pd\n'), ((4304, 4322), 'pandas.Categorical', 'pd.Categorical', (['[]'], {}), '([])\n', (4318, 4322), True, 'import pandas as pd\n'), ((4358, 4376), 'pandas.Categorical', 'pd.Categorical', (['[]'], {}), '([])\n', (4372, 4376), True, 'import pandas as pd\n'), ((4490, 4508), 'pandas.Categorical', 'pd.Categorical', (['[]'], {}), '([])\n', (4504, 4508), True, 'import pandas as pd\n'), ((4544, 4565), 'pandas.Categorical', 'pd.Categorical', (['[1.0]'], {}), '([1.0])\n', (4558, 4565), True, 'import pandas as pd\n'), ((4820, 4838), 'pandas.Categorical', 'pd.Categorical', (['[]'], {}), '([])\n', (4834, 4838), True, 'import pandas as pd\n'), ((3514, 3540), 'pandas.Timestamp', 'pd.Timestamp', (['"""2011-01-01"""'], {}), "('2011-01-01')\n", (3526, 3540), True, 'import pandas as pd\n'), ((3580, 3606), 'pandas.Timestamp', 'pd.Timestamp', (['"""2011-03-01"""'], {}), "('2011-03-01')\n", (3592, 3606), True, 'import pandas as pd\n'), ((3646, 3672), 'pandas.Timestamp', 'pd.Timestamp', (['"""2011-02-01"""'], {}), "('2011-02-01')\n", (3658, 3672), True, 'import pandas as pd\n'), ((1339, 1353), 'pandas.Categorical', 'Categorical', (['a'], {}), '(a)\n', (1350, 1353), False, 'from pandas import Categorical, Series, CategoricalIndex\n'), ((1405, 1419), 'pandas.Categorical', 'Categorical', (['b'], {}), '(b)\n', (1416, 1419), False, 'from pandas import Categorical, Series, CategoricalIndex\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Basic Implementation of a multi layer perceptron. """ from __future__ import division, print_function import ConfigSpace as CS import ConfigSpace.hyperparameters as CSH import torch.nn as nn from autoPyTorch.components.networks.base_net import BaseFeatureNet __author__ = "<NAME>, <NAME> and <NAME>" __version__ = "0.0.1" __license__ = "BSD" class MlpNet(BaseFeatureNet): activations = { 'relu': nn.ReLU, 'sigmoid': nn.Sigmoid, 'tanh': nn.Tanh } def __init__(self, config, in_features, out_features, embedding, final_activation=None): super(MlpNet, self).__init__(config, in_features, out_features, embedding, final_activation) self.activation = self.activations[config['activation']] self.layers = self._build_net(self.n_feats, self.n_classes) def _build_net(self, in_features, out_features): layers = list() self._add_layer(layers, in_features, self.config["num_units_1"], 1) for i in range(2, self.config["num_layers"] + 1): self._add_layer(layers, self.config["num_units_%d" % (i-1)], self.config["num_units_%d" % i], i) layers.append(nn.Linear(self.config["num_units_%d" % self.config["num_layers"]], out_features)) return nn.Sequential(*layers) def _add_layer(self, layers, in_features, out_features, layer_id): layers.append(nn.Linear(in_features, out_features)) layers.append(self.activation()) if self.config["use_dropout"]: layers.append(nn.Dropout(self.config["dropout_%d" % layer_id])) @staticmethod def get_config_space(user_updates=None): cs = CS.ConfigurationSpace() range_num_layers=(1, 15) range_num_units=(10, 1024) possible_activations=('sigmoid', 'tanh', 'relu') range_dropout=(0.0, 0.8) if user_updates is not None and 'num_layers' in user_updates: range_num_layers = user_updates['num_layers'] num_layers = CSH.UniformIntegerHyperparameter('num_layers', lower=range_num_layers[0], upper=range_num_layers[1]) cs.add_hyperparameter(num_layers) use_dropout = cs.add_hyperparameter(CS.CategoricalHyperparameter("use_dropout", [True, False], default_value=True)) for i in range(1, range_num_layers[1] + 1): n_units = CSH.UniformIntegerHyperparameter("num_units_%d" % i, lower=range_num_units[0], upper=range_num_units[1], log=True) cs.add_hyperparameter(n_units) dropout = CSH.UniformFloatHyperparameter("dropout_%d" % i, lower=range_dropout[0], upper=range_dropout[1]) cs.add_hyperparameter(dropout) dropout_condition_1 = CS.EqualsCondition(dropout, use_dropout, True) if i > range_num_layers[0]: cs.add_condition(CS.GreaterThanCondition(n_units, num_layers, i - 1)) dropout_condition_2 = CS.GreaterThanCondition(dropout, num_layers, i - 1) cs.add_condition(CS.AndConjunction(dropout_condition_1, dropout_condition_2)) else: cs.add_condition(dropout_condition_1) cs.add_hyperparameter(CSH.CategoricalHyperparameter('activation', possible_activations)) return(cs)
[ "torch.nn.Dropout", "ConfigSpace.ConfigurationSpace", "ConfigSpace.hyperparameters.CategoricalHyperparameter", "ConfigSpace.AndConjunction", "torch.nn.Sequential", "ConfigSpace.hyperparameters.UniformIntegerHyperparameter", "ConfigSpace.CategoricalHyperparameter", "ConfigSpace.GreaterThanCondition", "ConfigSpace.EqualsCondition", "ConfigSpace.hyperparameters.UniformFloatHyperparameter", "torch.nn.Linear" ]
[((1305, 1327), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (1318, 1327), True, 'import torch.nn as nn\n'), ((1693, 1716), 'ConfigSpace.ConfigurationSpace', 'CS.ConfigurationSpace', ([], {}), '()\n', (1714, 1716), True, 'import ConfigSpace as CS\n'), ((2034, 2138), 'ConfigSpace.hyperparameters.UniformIntegerHyperparameter', 'CSH.UniformIntegerHyperparameter', (['"""num_layers"""'], {'lower': 'range_num_layers[0]', 'upper': 'range_num_layers[1]'}), "('num_layers', lower=range_num_layers[0],\n upper=range_num_layers[1])\n", (2066, 2138), True, 'import ConfigSpace.hyperparameters as CSH\n'), ((1208, 1293), 'torch.nn.Linear', 'nn.Linear', (["self.config['num_units_%d' % self.config['num_layers']]", 'out_features'], {}), "(self.config['num_units_%d' % self.config['num_layers']], out_features\n )\n", (1217, 1293), True, 'import torch.nn as nn\n'), ((1422, 1458), 'torch.nn.Linear', 'nn.Linear', (['in_features', 'out_features'], {}), '(in_features, out_features)\n', (1431, 1458), True, 'import torch.nn as nn\n'), ((2221, 2299), 'ConfigSpace.CategoricalHyperparameter', 'CS.CategoricalHyperparameter', (['"""use_dropout"""', '[True, False]'], {'default_value': '(True)'}), "('use_dropout', [True, False], default_value=True)\n", (2249, 2299), True, 'import ConfigSpace as CS\n'), ((2376, 2495), 'ConfigSpace.hyperparameters.UniformIntegerHyperparameter', 'CSH.UniformIntegerHyperparameter', (["('num_units_%d' % i)"], {'lower': 'range_num_units[0]', 'upper': 'range_num_units[1]', 'log': '(True)'}), "('num_units_%d' % i, lower=range_num_units[\n 0], upper=range_num_units[1], log=True)\n", (2408, 2495), True, 'import ConfigSpace.hyperparameters as CSH\n'), ((2572, 2672), 'ConfigSpace.hyperparameters.UniformFloatHyperparameter', 'CSH.UniformFloatHyperparameter', (["('dropout_%d' % i)"], {'lower': 'range_dropout[0]', 'upper': 'range_dropout[1]'}), "('dropout_%d' % i, lower=range_dropout[0],\n upper=range_dropout[1])\n", (2602, 2672), True, 'import ConfigSpace.hyperparameters as CSH\n'), ((2746, 2792), 'ConfigSpace.EqualsCondition', 'CS.EqualsCondition', (['dropout', 'use_dropout', '(True)'], {}), '(dropout, use_dropout, True)\n', (2764, 2792), True, 'import ConfigSpace as CS\n'), ((3216, 3281), 'ConfigSpace.hyperparameters.CategoricalHyperparameter', 'CSH.CategoricalHyperparameter', (['"""activation"""', 'possible_activations'], {}), "('activation', possible_activations)\n", (3245, 3281), True, 'import ConfigSpace.hyperparameters as CSH\n'), ((1566, 1614), 'torch.nn.Dropout', 'nn.Dropout', (["self.config['dropout_%d' % layer_id]"], {}), "(self.config['dropout_%d' % layer_id])\n", (1576, 1614), True, 'import torch.nn as nn\n'), ((2959, 3010), 'ConfigSpace.GreaterThanCondition', 'CS.GreaterThanCondition', (['dropout', 'num_layers', '(i - 1)'], {}), '(dropout, num_layers, i - 1)\n', (2982, 3010), True, 'import ConfigSpace as CS\n'), ((2867, 2918), 'ConfigSpace.GreaterThanCondition', 'CS.GreaterThanCondition', (['n_units', 'num_layers', '(i - 1)'], {}), '(n_units, num_layers, i - 1)\n', (2890, 2918), True, 'import ConfigSpace as CS\n'), ((3044, 3103), 'ConfigSpace.AndConjunction', 'CS.AndConjunction', (['dropout_condition_1', 'dropout_condition_2'], {}), '(dropout_condition_1, dropout_condition_2)\n', (3061, 3103), True, 'import ConfigSpace as CS\n')]
""" This Module define the main services of the Test Session Coordinator in charge of the testing session. """ ################################################################################# # MIT License # # Copyright (c) 2018, <NAME>, Universitat Oberta de Catalunya (UOC), # Universidad de la Republica Oriental del Uruguay (UdelaR). # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. ################################################################################# import json import logging import lorawan.sessions as device_sessions import lorawan.parsing.configuration as configuration_parser import message_queueing import conformance_testing.test_errors as test_errors from user_interface.ui import ui_publisher import user_interface.ui_reports as ui_reports import parameters.message_broker as message_broker from parameters.message_broker import routing_keys logger = logging.getLogger(__name__) class TestSessionCoordinator(message_queueing.MqInterface): """ This class implements the testing service and is in charge of coordinating the execution of the selected tests. It is a MqInterface, it can communicate with the RMQ broker in order to receive uplink messages and send downlink messages to the agent (using the correct routing key). """ def __init__(self, reset_attemps=3): """ The constructor initializes the downlink counter of the test session (defined in the test application protocol) and declares logging queues in the RMQ Broker to avoid the message loss (in case that the clients haven't initialized the logging queues when the session starts. """ super().__init__() self.device_under_test = None self.current_test = None self.requested_tests = None self._next_test_index = 0 self._reset_dut = False self._reset_count = 0 self._reset_limit = reset_attemps self.downlink_counter = 0 self.testingtool_on = True self.last_deviceid = None # >> Declare log queue to avoid message loss -------------------------------------- self.declare_queue(queue_name='logger_tas', auto_delete=True, exclusive=False) log_tas_routing_key = "log." + message_broker.service_names.test_session_coordinator self.bind_queue(queue_name='logger_tas', routing_key=log_tas_routing_key) self.declare_queue(queue_name='logger_all', auto_delete=True, exclusive=False) log_all_routing_key = "log.#" self.bind_queue(queue_name='logger_all', routing_key=log_all_routing_key) # << End declare log queue ------------------------------------------------------- self.declare_queue(queue_name='display_gui', auto_delete=False, exclusive=False) self.bind_queue(queue_name='display_gui', routing_key=message_broker.routing_keys.ui_all_users + '.display') self.declare_queue(queue_name='configuration_request', auto_delete=False, exclusive=False) self.bind_queue(queue_name='configuration_request', routing_key=message_broker.routing_keys.configuration_request) self.declare_queue(queue_name='request_action_gui', auto_delete=False, exclusive=False) self.bind_queue(queue_name='request_action_gui', routing_key=message_broker.routing_keys.ui_all_users + '.request') @property def reset_dut(self): """ Indicates that the test needs to reset the Device Under Test (DUT) to set it to a known state. """ return self._reset_dut @reset_dut.setter def reset_dut(self, reset_value): """ Indicates that the test needs to reset the Device Under Test (DUT) to set it to a known state. """ if reset_value is True: self._reset_dut = True else: self._reset_dut = False self.downlink_counter = 0 def test_available(self): """ Returns True if there is a new tests to be executed.""" return self.requested_tests and self._next_test_index < len(self.requested_tests) def pop_next_test_name(self): """ Returns the name of the next test to be executed.""" if not self.test_available(): logger.info(f"No test available.") return None if self.reset_dut: if self._reset_count >= self._reset_limit: logger.info( f"Reset attempts exceeded ({self._reset_count}/{self._reset_limit}).") self._next_test_index = len(self.requested_tests) return None logger.info("Resetting device.") self.reset_dut = False self._reset_count += 1 return "td_lorawan_reset" else: logger.info("Returning new test.") self._reset_count = 0 idx = self._next_test_index self._next_test_index += 1 return self.requested_tests[idx] def publish(self, msg, routing_key, exchange_name=message_queueing.DEFAULT_EXCHANGE): """ Sends a message to the broker using the default channel. :param msg: byte sequence of the message to be sent. :param routing_key: :param exchange_name: :return: """ if self.current_test and routing_keys.toAgent in routing_key: self.downlink_counter += 1 super().publish(msg, routing_key) def wait_press_start(self): """ Publishes the Start button in the GUI.""" logger.debug("Showing start button.") start_request_body = ui_reports.InputFormBody(title="Start LoRaWAN testing tool") start_request_body.add_field(ui_reports.ButtonInputField(name="START", value=1)) request_start = ui_reports.RPCRequest(request_key=routing_keys.ui_all_users + '.request', channel=self.channel, body=str(start_request_body)) start_reply_json = request_start.wait_response(timeout_seconds=120).decode() self.consume_stop() def start_testing(self): """ Starts the current test.""" self.declare_and_consume(queue_name='testingtool_terminate_tas', routing_key=routing_keys.testing_terminate, callback=self.session_terminate_handler) logger.debug("Starting current test case...") self.current_test.start_test() def session_terminate_handler(self, ch, method, properties, body): """ Handles a Sesstion Termination message.""" logger.debug("SESSION TERMINATED BY THE USER.") self.consume_stop() self.testingtool_on = False raise test_errors.SessionTerminatedError("Terminated by UI request.") def get_device_from_gui(self): """ Requests and validates the Device information using the GUI.""" class InvalidHexStringInFieldError(Exception): pass def validate_bytes(number_of_bytes, field_str): try: field_bytes = bytes.fromhex(field_str) if len(field_bytes) == number_of_bytes: return field_bytes else: raise InvalidHexStringInFieldError(f'The field ({field_str}) must be {number_of_bytes} bytes long.') except ValueError: raise InvalidHexStringInFieldError(f'{field_str} is an invalid field') ########################################################################################### while "The user doesn't enter a valid device information": try: device_id = configuration_parser.DeviceID() device_request_body = ui_reports.InputFormBody(title="Enter device information.") device_request_body.add_field(ui_reports.TextInputField(name="DevEUI", label="Device EUI", value="0004a30b001adbe5")) device_request_body.add_field(ui_reports.TextInputField( name="AppKey", label="Application Key", value='<KEY>')) device_request_body.add_field(ui_reports.TextInputField(name="DevAddr", label="Short address", value="26011cf1")) logger.debug(f"Requesting device credentials: {str(device_request_body)}.") request_device = ui_reports.RPCRequest( request_key=routing_keys.ui_all_users + '.request', channel=self.channel, body=str(device_request_body)) device_reply = device_request_body.get_parsed_reply( request_device.wait_response(timeout_seconds=120)) device_id.deveui = validate_bytes(number_of_bytes=8, field_str=device_reply["DevEUI"]) device_id.appkey = validate_bytes(number_of_bytes=16, field_str=device_reply["AppKey"]) device_id.devaddr = validate_bytes(number_of_bytes=4, field_str=device_reply["DevAddr"]) device_id.appskey = bytes.fromhex("ff") + device_id.appkey[1::] device_id.nwkskey = bytes.fromhex("00") + device_id.appkey[1::] except InvalidHexStringInFieldError: continue else: break device_display = ui_reports.InputFormBody(title="Device Personalization Information.", tag_key="Configuration", tag_value="Information") device_display.add_field(ui_reports.ParagraphField( name=" ", value=device_id.to_print_str())) ui_publisher.display_on_gui( msg_str=str(device_display), key_prefix=message_broker.service_names.test_session_coordinator) return device_id def get_testcases(self): """ Request the list of test cases to be run by the test application server. A text box will be displayed in the GUI. """ testcases_request_body = ui_reports.InputFormBody(title="List of test cases.") testnames_template = "" testcases_request_body.add_field(ui_reports.TextInputField( name="TestCases", label="test cases", value=testnames_template)) request_testcases = ui_reports.RPCRequest( request_key=routing_keys.ui_all_users + '.request', channel=self.channel, body=str(testcases_request_body)) device_reply = testcases_request_body.get_parsed_reply( request_testcases.wait_response(timeout_seconds=120)) return device_reply["TestCases"].split() def ask_configuration_register_device(self): """ Use the GUI to request the configuration parameters of the test session: -List of testcases -DUT personalization parameters """ self.channel.start_consuming() logger.debug("Asking the GUI for configuration.") request_config = ui_reports.RPCRequest(request_key=routing_keys.configuration_request, channel=self.channel, body='{"_api_version": "1.0.0"}') session_configuration_bytes = request_config.wait_response(timeout_seconds=10) logger.debug(f"Received configuration from GUI: \n{json.dumps(session_configuration_bytes.decode(), indent=4, sort_keys=True)}") config = ui_reports.SessionConfigurationBody.build_from_json( json_str=session_configuration_bytes.decode()) self.requested_tests = ["td_lorawan_act_01"] if config.testcases: self.requested_tests.extend(config.testcases) else: self.requested_tests.extend(self.get_testcases()) self.requested_tests.append("td_lorawan_deactivate") testcases_display = ui_reports.InputFormBody(title="Test Cases to be excecuted.", tag_key="Configuration", tag_value="Information") testcases_display.add_field(ui_reports.ParagraphField( name="TCs list:", value="\n".join(self.requested_tests))) ui_publisher.display_on_gui(msg_str=str(testcases_display), key_prefix=message_broker.service_names.test_session_coordinator) device_id = self.get_device_from_gui() ######################################################################################### self.device_under_test = device_sessions.EndDevice(ctx_test_tool_service=self, deveui=device_id.deveui, devaddr=device_id.devaddr, appkey=device_id.appkey, nwkskey=device_id.nwkskey, appskey=device_id.appskey) self.last_deviceid = device_id def handle_error(self, raised_exception, test_name, result_report=None): """ Handles a raised exception, setting a flag to reset the DUT in case of a test failure. """ error_name = type(raised_exception).__name__ error_details = str(raised_exception) if isinstance(raised_exception, test_errors.TestFailError): self.reset_dut = True fail_message = f"Test {test_name} failed with {error_name} error." fail_message_paragraph = ui_reports.ParagraphField(name=test_name, value=fail_message) fail_details_paragraphs = [ui_reports.ParagraphField(name=":", value=line) for line in error_details.split("\n")] if result_report: result_report.add_field(fail_message_paragraph) for detail_paragraph in fail_details_paragraphs: result_report.add_field(detail_paragraph) result_report.level = ui_reports.LEVEL_ERR step_error = ui_reports.InputFormBody( title=f"{test_name}: Step Fail", tag_key=test_name, tag_value=" ") step_error.add_field(fail_message_paragraph) for detail_paragraph in fail_details_paragraphs: step_error.add_field(detail_paragraph) step_error.level = ui_reports.LEVEL_ERR ui_publisher.display_on_gui( msg_str=str(step_error), key_prefix=message_broker.service_names.test_session_coordinator)
[ "user_interface.ui_reports.ButtonInputField", "user_interface.ui_reports.RPCRequest", "lorawan.sessions.EndDevice", "user_interface.ui_reports.TextInputField", "user_interface.ui_reports.ParagraphField", "lorawan.parsing.configuration.DeviceID", "user_interface.ui_reports.InputFormBody", "conformance_testing.test_errors.SessionTerminatedError", "logging.getLogger" ]
[((1890, 1917), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1907, 1917), False, 'import logging\n'), ((6650, 6710), 'user_interface.ui_reports.InputFormBody', 'ui_reports.InputFormBody', ([], {'title': '"""Start LoRaWAN testing tool"""'}), "(title='Start LoRaWAN testing tool')\n", (6674, 6710), True, 'import user_interface.ui_reports as ui_reports\n'), ((7804, 7867), 'conformance_testing.test_errors.SessionTerminatedError', 'test_errors.SessionTerminatedError', (['"""Terminated by UI request."""'], {}), "('Terminated by UI request.')\n", (7838, 7867), True, 'import conformance_testing.test_errors as test_errors\n'), ((10840, 10963), 'user_interface.ui_reports.InputFormBody', 'ui_reports.InputFormBody', ([], {'title': '"""Device Personalization Information."""', 'tag_key': '"""Configuration"""', 'tag_value': '"""Information"""'}), "(title='Device Personalization Information.',\n tag_key='Configuration', tag_value='Information')\n", (10864, 10963), True, 'import user_interface.ui_reports as ui_reports\n'), ((11581, 11634), 'user_interface.ui_reports.InputFormBody', 'ui_reports.InputFormBody', ([], {'title': '"""List of test cases."""'}), "(title='List of test cases.')\n", (11605, 11634), True, 'import user_interface.ui_reports as ui_reports\n'), ((12556, 12685), 'user_interface.ui_reports.RPCRequest', 'ui_reports.RPCRequest', ([], {'request_key': 'routing_keys.configuration_request', 'channel': 'self.channel', 'body': '"""{"_api_version": "1.0.0"}"""'}), '(request_key=routing_keys.configuration_request,\n channel=self.channel, body=\'{"_api_version": "1.0.0"}\')\n', (12577, 12685), True, 'import user_interface.ui_reports as ui_reports\n'), ((13437, 13553), 'user_interface.ui_reports.InputFormBody', 'ui_reports.InputFormBody', ([], {'title': '"""Test Cases to be excecuted."""', 'tag_key': '"""Configuration"""', 'tag_value': '"""Information"""'}), "(title='Test Cases to be excecuted.', tag_key=\n 'Configuration', tag_value='Information')\n", (13461, 13553), True, 'import user_interface.ui_reports as ui_reports\n'), ((14148, 14342), 'lorawan.sessions.EndDevice', 'device_sessions.EndDevice', ([], {'ctx_test_tool_service': 'self', 'deveui': 'device_id.deveui', 'devaddr': 'device_id.devaddr', 'appkey': 'device_id.appkey', 'nwkskey': 'device_id.nwkskey', 'appskey': 'device_id.appskey'}), '(ctx_test_tool_service=self, deveui=device_id.\n deveui, devaddr=device_id.devaddr, appkey=device_id.appkey, nwkskey=\n device_id.nwkskey, appskey=device_id.appskey)\n', (14173, 14342), True, 'import lorawan.sessions as device_sessions\n'), ((15174, 15235), 'user_interface.ui_reports.ParagraphField', 'ui_reports.ParagraphField', ([], {'name': 'test_name', 'value': 'fail_message'}), '(name=test_name, value=fail_message)\n', (15199, 15235), True, 'import user_interface.ui_reports as ui_reports\n'), ((15674, 15769), 'user_interface.ui_reports.InputFormBody', 'ui_reports.InputFormBody', ([], {'title': 'f"""{test_name}: Step Fail"""', 'tag_key': 'test_name', 'tag_value': '""" """'}), "(title=f'{test_name}: Step Fail', tag_key=test_name,\n tag_value=' ')\n", (15698, 15769), True, 'import user_interface.ui_reports as ui_reports\n'), ((6748, 6798), 'user_interface.ui_reports.ButtonInputField', 'ui_reports.ButtonInputField', ([], {'name': '"""START"""', 'value': '(1)'}), "(name='START', value=1)\n", (6775, 6798), True, 'import user_interface.ui_reports as ui_reports\n'), ((11709, 11803), 'user_interface.ui_reports.TextInputField', 'ui_reports.TextInputField', ([], {'name': '"""TestCases"""', 'label': '"""test cases"""', 'value': 'testnames_template'}), "(name='TestCases', label='test cases', value=\n testnames_template)\n", (11734, 11803), True, 'import user_interface.ui_reports as ui_reports\n'), ((15271, 15318), 'user_interface.ui_reports.ParagraphField', 'ui_reports.ParagraphField', ([], {'name': '""":"""', 'value': 'line'}), "(name=':', value=line)\n", (15296, 15318), True, 'import user_interface.ui_reports as ui_reports\n'), ((8751, 8782), 'lorawan.parsing.configuration.DeviceID', 'configuration_parser.DeviceID', ([], {}), '()\n', (8780, 8782), True, 'import lorawan.parsing.configuration as configuration_parser\n'), ((8822, 8881), 'user_interface.ui_reports.InputFormBody', 'ui_reports.InputFormBody', ([], {'title': '"""Enter device information."""'}), "(title='Enter device information.')\n", (8846, 8881), True, 'import user_interface.ui_reports as ui_reports\n'), ((8928, 9019), 'user_interface.ui_reports.TextInputField', 'ui_reports.TextInputField', ([], {'name': '"""DevEUI"""', 'label': '"""Device EUI"""', 'value': '"""0004a30b001adbe5"""'}), "(name='DevEUI', label='Device EUI', value=\n '0004a30b001adbe5')\n", (8953, 9019), True, 'import user_interface.ui_reports as ui_reports\n'), ((9206, 9291), 'user_interface.ui_reports.TextInputField', 'ui_reports.TextInputField', ([], {'name': '"""AppKey"""', 'label': '"""Application Key"""', 'value': '"""<KEY>"""'}), "(name='AppKey', label='Application Key', value='<KEY>'\n )\n", (9231, 9291), True, 'import user_interface.ui_reports as ui_reports\n'), ((9395, 9482), 'user_interface.ui_reports.TextInputField', 'ui_reports.TextInputField', ([], {'name': '"""DevAddr"""', 'label': '"""Short address"""', 'value': '"""26011cf1"""'}), "(name='DevAddr', label='Short address', value=\n '26011cf1')\n", (9420, 9482), True, 'import user_interface.ui_reports as ui_reports\n')]
#!/usr/bin/python3 import re from prettytable import PrettyTable x = PrettyTable(field_names=["Key", "Value", "Operation"],sortby="Operation",reversesort=True) x.align = "l" x.add_row(["Adelaide", 1295, 0]) x.add_row(["Brisbane", 5905, 1]) x.add_row(["Darwin", 112, 1]) x.add_row(["Hobart", 1357, 0]) x.add_row(["Sydney", 2058, 0]) x.add_row(["Melbourne", 1566, 0]) x.add_row(["Perth", 5386, 0]) print(x.get_string(sortby="Operation")) urlReg= re.compile('(?P<provider>(\\w+))://(?P<host>(\\d+.\\d+.\\d+.\\d+)):(?P<port>(\d+))/(?P<db>(\w+))') url='mysql://10.124.144.98:3306/test_zyzx_dresource' regMatch = urlReg.search(url) v = regMatch.group('provider') print(v) print(regMatch.group('host')) print(regMatch.group('port')) #print(reg.search(url).group())
[ "prettytable.PrettyTable", "re.compile" ]
[((74, 170), 'prettytable.PrettyTable', 'PrettyTable', ([], {'field_names': "['Key', 'Value', 'Operation']", 'sortby': '"""Operation"""', 'reversesort': '(True)'}), "(field_names=['Key', 'Value', 'Operation'], sortby='Operation',\n reversesort=True)\n", (85, 170), False, 'from prettytable import PrettyTable\n'), ((452, 562), 're.compile', 're.compile', (['"""(?P<provider>(\\\\w+))://(?P<host>(\\\\d+.\\\\d+.\\\\d+.\\\\d+)):(?P<port>(\\\\d+))/(?P<db>(\\\\w+))"""'], {}), "(\n '(?P<provider>(\\\\w+))://(?P<host>(\\\\d+.\\\\d+.\\\\d+.\\\\d+)):(?P<port>(\\\\d+))/(?P<db>(\\\\w+))'\n )\n", (462, 562), False, 'import re\n')]
import numpy as np import keras from keras.layers import * from keras.models import Sequential,Model from keras import backend as K from base_networks import * import tensorflow as tf def my_KL_loss(y_true, y_pred): y_pred = K.clip(y_pred, K.epsilon(), 1) return - K.sum(y_true*K.log(y_pred), axis=-1) def my_binary_KL_loss(y_true, y_pred): y_pred = K.clip(y_pred, K.epsilon(), 1) compl_y_pred = 1.0 - y_pred compl_y_pred = K.clip(compl_y_pred, K.epsilon(), 1) return - K.sum(y_true*K.log(y_pred) + (1-y_true)*K.log(compl_y_pred), axis=-1) def my_binary_KL_loss_stable(y_true, y_pred): y_pred = K.clip(y_pred, K.epsilon(), 1-K.epsilon()) logits = K.log(y_pred) - K.log(1-y_pred) # sigmoid inverse neg_abs_logits = -K.abs(logits) relu_logits = K.relu(logits) loss_vec = relu_logits - logits*y_true + K.log(1 + K.exp(neg_abs_logits)) return K.sum(loss_vec) def REC_loss(x_true, x_pred): x_pred = K.clip(x_pred, K.epsilon(), 1) return - K.sum(x_true*K.log(x_pred), axis=-1) #keras.losses.categorical_crossentropy(x_true, x_pred) def traditional_VAE(data_dim,Nb,units,layers_e,layers_d,opt='adam',BN=True, summ=True, beta=0): pre_encoder = define_pre_encoder(data_dim, layers=layers_e,units=units,BN=BN) if summ: print("pre-encoder network:") pre_encoder.summary() generator = define_generator(Nb,data_dim,layers=layers_d,units=units,BN=BN) if summ: print("generator network:") generator.summary() ## Encoder x = Input(shape=(data_dim,)) hidden = pre_encoder(x) z_mean = Dense(Nb,activation='linear', name='z-mean')(hidden) z_log_var = Dense(Nb,activation='linear',name = 'z-log_var')(hidden) encoder = Model(x, z_mean) # build a model to project inputs on the latent space def sampling(args): epsilon_std = 1.0 z_mean, z_log_var = args epsilon = K.random_normal(shape=(K.shape(z_mean)[0], Nb),mean=0., stddev=epsilon_std) return z_mean + K.exp(0.5*z_log_var) * epsilon #+sigma (desvest) ## Decoder z_sampled = Lambda(sampling, output_shape=(Nb,), name='sampled')([z_mean, z_log_var]) output = generator(z_sampled) Recon_loss = REC_loss kl_loss = KL_loss(z_mean,z_log_var) def VAE_loss(y_true, y_pred): return Recon_loss(y_true, y_pred) + beta*kl_loss(y_true, y_pred) traditional_vae = Model(x, output) traditional_vae.compile(optimizer=opt, loss=VAE_loss, metrics = [Recon_loss,kl_loss]) return traditional_vae, encoder,generator def sample_gumbel(shape,eps=K.epsilon()): """Inverse Sample function from Gumbel(0, 1)""" U = K.random_uniform(shape, 0, 1) return K.log(U + eps)- K.log(1-U + eps) def VDSHS(data_dim,n_classes,Nb,units,layers_e,layers_d,opt='adam',BN=True, summ=True,tau_ann=False,beta=0,alpha=1.0,multilabel=False): pre_encoder = define_pre_encoder(data_dim, layers=layers_e,units=units,BN=BN) if summ: print("pre-encoder network:") pre_encoder.summary() generator = define_generator(Nb,data_dim,layers=layers_d,units=units,BN=BN) if summ: print("generator network:") generator.summary() ## Encoder x = Input(shape=(data_dim,)) hidden = pre_encoder(x) z_mean = Dense(Nb,activation='linear', name='z-mean')(hidden) z_log_var = Dense(Nb,activation='linear',name = 'z-log_var')(hidden) encoder = Model(x, z_mean) # build a model to project inputs on the latent space def sampling(args): epsilon_std = 1.0 z_mean, z_log_var = args epsilon = K.random_normal(shape=(K.shape(z_mean)[0], Nb),mean=0., stddev=epsilon_std) return z_mean + K.exp(0.5*z_log_var) * epsilon #+sigma (desvest) ## Decoder z_sampled = Lambda(sampling, output_shape=(Nb,), name='sampled')([z_mean, z_log_var]) output = generator(z_sampled) Recon_loss = REC_loss kl_loss = KL_loss(z_mean,z_log_var) def VAE_loss(y_true, y_pred): return Recon_loss(y_true, y_pred) + beta*kl_loss(y_true, y_pred) if multilabel: supervised_layer = Dense(n_classes, activation='sigmoid',name='sup-class')(z_sampled)#req n_classes else: supervised_layer = Dense(n_classes, activation='softmax',name='sup-class')(z_sampled)#req n_classes traditional_vae = Model(inputs=x, outputs=[output,supervised_layer]) if multilabel: traditional_vae.compile(optimizer=opt, loss=[VAE_loss,my_binary_KL_loss],loss_weights=[1., alpha], metrics=[Recon_loss,kl_loss]) else: traditional_vae.compile(optimizer=opt, loss=[VAE_loss,my_KL_loss],loss_weights=[1., alpha], metrics=[Recon_loss,kl_loss]) return traditional_vae, encoder,generator def binary_VAE(data_dim,Nb,units,layers_e,layers_d,opt='adam',BN=True, summ=True,tau_ann=False,beta=0): if tau_ann: tau = K.variable(1.0, name="temperature") else: tau = K.variable(0.67, name="temperature") #o tau fijo en 0.67=2/3 pre_encoder = define_pre_encoder(data_dim, layers=layers_e,units=units,BN=BN) if summ: print("pre-encoder network:") pre_encoder.summary() generator = define_generator(Nb,data_dim,layers=layers_d,units=units,BN=BN) if summ: print("generator network:") generator.summary() x = Input(shape=(data_dim,)) hidden = pre_encoder(x) logits_b = Dense(Nb, activation='linear', name='logits-b')(hidden) #log(B_j/1-B_j) #proba = np.exp(logits_b)/(1+np.exp(logits_b)) = sigmoidal(logits_b) <<<<<<<<<< recupera probabilidad #dist = Dense(Nb, activation='sigmoid')(hidden) #p(b) #otra forma de modelarlo encoder = Model(x, logits_b) def sampling(logits_b): #logits_b = K.log(aux/(1-aux) + K.epsilon() ) b = logits_b + sample_gumbel(K.shape(logits_b)) # logits + gumbel noise return keras.activations.sigmoid( b/tau ) b_sampled = Lambda(sampling, output_shape=(Nb,), name='sampled')(logits_b) output = generator(b_sampled) Recon_loss = REC_loss kl_loss = BKL_loss(logits_b) def BVAE_loss(y_true, y_pred): return Recon_loss(y_true, y_pred) + beta*kl_loss(y_true, y_pred) binary_vae = Model(x, output) binary_vae.compile(optimizer=opt, loss=BVAE_loss, metrics = [Recon_loss,kl_loss]) if tau_ann: return binary_vae, encoder,generator ,tau else: return binary_vae, encoder,generator def PSH_GS(data_dim,n_classes,Nb,units,layers_e,layers_d,opt='adam',BN=True, summ=True,tau_ann=False,beta=0,alpha=1.0,lambda_=1.0,multilabel=False): if tau_ann: tau = K.variable(1.0, name="temperature") else: tau = K.variable(0.67, name="temperature") #o tau fijo en 0.67=2/3 pre_encoder = define_pre_encoder(data_dim, layers=layers_e,units=units,BN=BN) if summ: print("pre-encoder network:") pre_encoder.summary() generator = define_generator(Nb,data_dim,layers=layers_d,units=units,BN=BN) if summ: print("generator network:") generator.summary() x = Input(shape=(data_dim,)) hidden = pre_encoder(x) logits_b = Dense(Nb, activation='linear', name='logits-b')(hidden) if multilabel: supervised_layer = Dense(n_classes, activation='sigmoid',name='sup-class')(hidden)#req n_classes else: supervised_layer = Dense(n_classes, activation='softmax',name='sup-class')(hidden)#req n_classes encoder = Model(x, logits_b) def sampling(logits_b): #logits_b = K.log(aux/(1-aux) + K.epsilon() ) b = logits_b + sample_gumbel(K.shape(logits_b)) # logits + gumbel noise return keras.activations.sigmoid( b/tau ) b_sampled = Lambda(sampling, output_shape=(Nb,), name='sampled')(logits_b) output = generator(b_sampled) Recon_loss = REC_loss kl_loss = BKL_loss(logits_b) def SUP_BAE_loss_pointwise(y_true, y_pred): return Recon_loss(y_true, y_pred) + beta*kl_loss(y_true, y_pred) margin = Nb/3.0 if multilabel: pred_loss = my_binary_KL_loss else: pred_loss = my_KL_loss def Hamming_loss(y_true, y_pred): r = tf.reduce_sum(b_sampled*b_sampled, 1) r = tf.reshape(r, [-1, 1]) D = r - 2*tf.matmul(b_sampled, tf.transpose(b_sampled)) + tf.transpose(r) #BXB similar_mask = K.dot(y_true, K.transpose(y_true)) #BXB M_ij = I(y_i = y_j) loss_hamming = (1.0/Nb)*K.sum(similar_mask*D + (1.0-similar_mask)*K.relu(margin-D)) return lambda_*pred_loss(y_true, y_pred) + loss_hamming #binary_vae = Model(inputs=[x,y], outputs=output) #binary_vae.compile(optimizer=opt, loss=SUP_BAE_loss_pointwise, metrics=[Recon_loss,kl_loss]) binary_vae = Model(inputs=x, outputs=[output,supervised_layer]) binary_vae.compile(optimizer=opt, loss=[SUP_BAE_loss_pointwise,Hamming_loss],loss_weights=[1., alpha], metrics=[Recon_loss,kl_loss,pred_loss]) if tau_ann: return binary_vae, encoder,generator ,tau else: return binary_vae, encoder,generator def SSBVAE(data_dim,n_classes,Nb,units,layers_e,layers_d,opt='adam',BN=True, summ=True,tau_ann=False,lambda_=0,alpha=1.0,beta=1.0,multilabel=False): if tau_ann: tau = K.variable(1.0, name="temperature") else: tau = K.variable(0.67, name="temperature") #o tau fijo en 0.67=2/3 pre_encoder = define_pre_encoder(data_dim, layers=layers_e,units=units,BN=BN) if summ: print("pre-encoder network:") pre_encoder.summary() generator = define_generator(Nb,data_dim,layers=layers_d,units=units,BN=BN) if summ: print("generator network:") generator.summary() x = Input(shape=(data_dim,)) hidden = pre_encoder(x) logits_b = Dense(Nb, activation='linear', name='logits-b')(hidden) #log(B_j/1-B_j) if multilabel: supervised_layer = Dense(n_classes, activation='sigmoid',name='sup-class')(hidden)#req n_classes else: supervised_layer = Dense(n_classes, activation='softmax',name='sup-class')(hidden)#req n_classes encoder = Model(x, logits_b) def sampling(logits_b): #logits_b = K.log(aux/(1-aux) + K.epsilon() ) b = logits_b + sample_gumbel(K.shape(logits_b)) # logits + gumbel noise return keras.activations.sigmoid( b/tau ) b_sampled = Lambda(sampling, output_shape=(Nb,), name='sampled')(logits_b) output = generator(b_sampled) Recon_loss = REC_loss kl_loss = BKL_loss(logits_b) def SUP_BAE_loss_pointwise(y_true, y_pred): return Recon_loss(y_true, y_pred) + lambda_*kl_loss(y_true, y_pred) margin = Nb/3.0 if multilabel: pred_loss = my_binary_KL_loss_stable else: pred_loss = my_KL_loss def Hamming_loss(y_true, y_pred): #pred_loss = keras.losses.categorical_crossentropy(y_true, y_pred) r = tf.reduce_sum(b_sampled*b_sampled, 1) r = tf.reshape(r, [-1, 1]) D = r - 2*tf.matmul(b_sampled, tf.transpose(b_sampled)) + tf.transpose(r) #BXB similar_mask = K.dot(y_pred, K.transpose(y_pred)) #BXB M_ij = I(y_i = y_j) loss_hamming = (1.0/Nb)*K.sum(similar_mask*D + (1.0-similar_mask)*K.relu(margin-D)) return beta*pred_loss(y_true, y_pred) + alpha*loss_hamming binary_vae = Model(inputs=x, outputs=[output,supervised_layer]) binary_vae.compile(optimizer=opt, loss=[SUP_BAE_loss_pointwise,Hamming_loss],loss_weights=[1., 1.], metrics=[Recon_loss,kl_loss,pred_loss]) if tau_ann: return binary_vae, encoder,generator ,tau else: return binary_vae, encoder,generator
[ "tensorflow.reduce_sum", "keras.backend.random_uniform", "keras.backend.epsilon", "keras.activations.sigmoid", "tensorflow.reshape", "keras.backend.sum", "keras.backend.exp", "keras.models.Model", "keras.backend.abs", "tensorflow.transpose", "keras.backend.log", "keras.backend.shape", "keras.backend.transpose", "keras.backend.relu", "keras.backend.variable" ]
[((792, 806), 'keras.backend.relu', 'K.relu', (['logits'], {}), '(logits)\n', (798, 806), True, 'from keras import backend as K\n'), ((896, 911), 'keras.backend.sum', 'K.sum', (['loss_vec'], {}), '(loss_vec)\n', (901, 911), True, 'from keras import backend as K\n'), ((1739, 1755), 'keras.models.Model', 'Model', (['x', 'z_mean'], {}), '(x, z_mean)\n', (1744, 1755), False, 'from keras.models import Sequential, Model\n'), ((2411, 2427), 'keras.models.Model', 'Model', (['x', 'output'], {}), '(x, output)\n', (2416, 2427), False, 'from keras.models import Sequential, Model\n'), ((2598, 2609), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (2607, 2609), True, 'from keras import backend as K\n'), ((2672, 2701), 'keras.backend.random_uniform', 'K.random_uniform', (['shape', '(0)', '(1)'], {}), '(shape, 0, 1)\n', (2688, 2701), True, 'from keras import backend as K\n'), ((3436, 3452), 'keras.models.Model', 'Model', (['x', 'z_mean'], {}), '(x, z_mean)\n', (3441, 3452), False, 'from keras.models import Sequential, Model\n'), ((4361, 4412), 'keras.models.Model', 'Model', ([], {'inputs': 'x', 'outputs': '[output, supervised_layer]'}), '(inputs=x, outputs=[output, supervised_layer])\n', (4366, 4412), False, 'from keras.models import Sequential, Model\n'), ((5692, 5710), 'keras.models.Model', 'Model', (['x', 'logits_b'], {}), '(x, logits_b)\n', (5697, 5710), False, 'from keras.models import Sequential, Model\n'), ((6233, 6249), 'keras.models.Model', 'Model', (['x', 'output'], {}), '(x, output)\n', (6238, 6249), False, 'from keras.models import Sequential, Model\n'), ((7481, 7499), 'keras.models.Model', 'Model', (['x', 'logits_b'], {}), '(x, logits_b)\n', (7486, 7499), False, 'from keras.models import Sequential, Model\n'), ((8769, 8820), 'keras.models.Model', 'Model', ([], {'inputs': 'x', 'outputs': '[output, supervised_layer]'}), '(inputs=x, outputs=[output, supervised_layer])\n', (8774, 8820), False, 'from keras.models import Sequential, Model\n'), ((10129, 10147), 'keras.models.Model', 'Model', (['x', 'logits_b'], {}), '(x, logits_b)\n', (10134, 10147), False, 'from keras.models import Sequential, Model\n'), ((11362, 11413), 'keras.models.Model', 'Model', ([], {'inputs': 'x', 'outputs': '[output, supervised_layer]'}), '(inputs=x, outputs=[output, supervised_layer])\n', (11367, 11413), False, 'from keras.models import Sequential, Model\n'), ((245, 256), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (254, 256), True, 'from keras import backend as K\n'), ((380, 391), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (389, 391), True, 'from keras import backend as K\n'), ((468, 479), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (477, 479), True, 'from keras import backend as K\n'), ((644, 655), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (653, 655), True, 'from keras import backend as K\n'), ((685, 698), 'keras.backend.log', 'K.log', (['y_pred'], {}), '(y_pred)\n', (690, 698), True, 'from keras import backend as K\n'), ((701, 718), 'keras.backend.log', 'K.log', (['(1 - y_pred)'], {}), '(1 - y_pred)\n', (706, 718), True, 'from keras import backend as K\n'), ((757, 770), 'keras.backend.abs', 'K.abs', (['logits'], {}), '(logits)\n', (762, 770), True, 'from keras import backend as K\n'), ((971, 982), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (980, 982), True, 'from keras import backend as K\n'), ((2713, 2727), 'keras.backend.log', 'K.log', (['(U + eps)'], {}), '(U + eps)\n', (2718, 2727), True, 'from keras import backend as K\n'), ((2729, 2747), 'keras.backend.log', 'K.log', (['(1 - U + eps)'], {}), '(1 - U + eps)\n', (2734, 2747), True, 'from keras import backend as K\n'), ((4892, 4927), 'keras.backend.variable', 'K.variable', (['(1.0)'], {'name': '"""temperature"""'}), "(1.0, name='temperature')\n", (4902, 4927), True, 'from keras import backend as K\n'), ((4953, 4989), 'keras.backend.variable', 'K.variable', (['(0.67)'], {'name': '"""temperature"""'}), "(0.67, name='temperature')\n", (4963, 4989), True, 'from keras import backend as K\n'), ((5889, 5923), 'keras.activations.sigmoid', 'keras.activations.sigmoid', (['(b / tau)'], {}), '(b / tau)\n', (5914, 5923), False, 'import keras\n'), ((6638, 6673), 'keras.backend.variable', 'K.variable', (['(1.0)'], {'name': '"""temperature"""'}), "(1.0, name='temperature')\n", (6648, 6673), True, 'from keras import backend as K\n'), ((6699, 6735), 'keras.backend.variable', 'K.variable', (['(0.67)'], {'name': '"""temperature"""'}), "(0.67, name='temperature')\n", (6709, 6735), True, 'from keras import backend as K\n'), ((7678, 7712), 'keras.activations.sigmoid', 'keras.activations.sigmoid', (['(b / tau)'], {}), '(b / tau)\n', (7703, 7712), False, 'import keras\n'), ((8189, 8228), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(b_sampled * b_sampled)', '(1)'], {}), '(b_sampled * b_sampled, 1)\n', (8202, 8228), True, 'import tensorflow as tf\n'), ((8239, 8261), 'tensorflow.reshape', 'tf.reshape', (['r', '[-1, 1]'], {}), '(r, [-1, 1])\n', (8249, 8261), True, 'import tensorflow as tf\n'), ((9269, 9304), 'keras.backend.variable', 'K.variable', (['(1.0)'], {'name': '"""temperature"""'}), "(1.0, name='temperature')\n", (9279, 9304), True, 'from keras import backend as K\n'), ((9330, 9366), 'keras.backend.variable', 'K.variable', (['(0.67)'], {'name': '"""temperature"""'}), "(0.67, name='temperature')\n", (9340, 9366), True, 'from keras import backend as K\n'), ((10326, 10360), 'keras.activations.sigmoid', 'keras.activations.sigmoid', (['(b / tau)'], {}), '(b / tau)\n', (10351, 10360), False, 'import keras\n'), ((10931, 10970), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(b_sampled * b_sampled)', '(1)'], {}), '(b_sampled * b_sampled, 1)\n', (10944, 10970), True, 'import tensorflow as tf\n'), ((10981, 11003), 'tensorflow.reshape', 'tf.reshape', (['r', '[-1, 1]'], {}), '(r, [-1, 1])\n', (10991, 11003), True, 'import tensorflow as tf\n'), ((659, 670), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (668, 670), True, 'from keras import backend as K\n'), ((8328, 8343), 'tensorflow.transpose', 'tf.transpose', (['r'], {}), '(r)\n', (8340, 8343), True, 'import tensorflow as tf\n'), ((8392, 8411), 'keras.backend.transpose', 'K.transpose', (['y_true'], {}), '(y_true)\n', (8403, 8411), True, 'from keras import backend as K\n'), ((11070, 11085), 'tensorflow.transpose', 'tf.transpose', (['r'], {}), '(r)\n', (11082, 11085), True, 'import tensorflow as tf\n'), ((11134, 11153), 'keras.backend.transpose', 'K.transpose', (['y_pred'], {}), '(y_pred)\n', (11145, 11153), True, 'from keras import backend as K\n'), ((287, 300), 'keras.backend.log', 'K.log', (['y_pred'], {}), '(y_pred)\n', (292, 300), True, 'from keras import backend as K\n'), ((862, 883), 'keras.backend.exp', 'K.exp', (['neg_abs_logits'], {}), '(neg_abs_logits)\n', (867, 883), True, 'from keras import backend as K\n'), ((1013, 1026), 'keras.backend.log', 'K.log', (['x_pred'], {}), '(x_pred)\n', (1018, 1026), True, 'from keras import backend as K\n'), ((2012, 2034), 'keras.backend.exp', 'K.exp', (['(0.5 * z_log_var)'], {}), '(0.5 * z_log_var)\n', (2017, 2034), True, 'from keras import backend as K\n'), ((3709, 3731), 'keras.backend.exp', 'K.exp', (['(0.5 * z_log_var)'], {}), '(0.5 * z_log_var)\n', (3714, 3731), True, 'from keras import backend as K\n'), ((5831, 5848), 'keras.backend.shape', 'K.shape', (['logits_b'], {}), '(logits_b)\n', (5838, 5848), True, 'from keras import backend as K\n'), ((7620, 7637), 'keras.backend.shape', 'K.shape', (['logits_b'], {}), '(logits_b)\n', (7627, 7637), True, 'from keras import backend as K\n'), ((10268, 10285), 'keras.backend.shape', 'K.shape', (['logits_b'], {}), '(logits_b)\n', (10275, 10285), True, 'from keras import backend as K\n'), ((510, 523), 'keras.backend.log', 'K.log', (['y_pred'], {}), '(y_pred)\n', (515, 523), True, 'from keras import backend as K\n'), ((537, 556), 'keras.backend.log', 'K.log', (['compl_y_pred'], {}), '(compl_y_pred)\n', (542, 556), True, 'from keras import backend as K\n'), ((1935, 1950), 'keras.backend.shape', 'K.shape', (['z_mean'], {}), '(z_mean)\n', (1942, 1950), True, 'from keras import backend as K\n'), ((3632, 3647), 'keras.backend.shape', 'K.shape', (['z_mean'], {}), '(z_mean)\n', (3639, 3647), True, 'from keras import backend as K\n'), ((8301, 8324), 'tensorflow.transpose', 'tf.transpose', (['b_sampled'], {}), '(b_sampled)\n', (8313, 8324), True, 'import tensorflow as tf\n'), ((8515, 8533), 'keras.backend.relu', 'K.relu', (['(margin - D)'], {}), '(margin - D)\n', (8521, 8533), True, 'from keras import backend as K\n'), ((11043, 11066), 'tensorflow.transpose', 'tf.transpose', (['b_sampled'], {}), '(b_sampled)\n', (11055, 11066), True, 'import tensorflow as tf\n'), ((11257, 11275), 'keras.backend.relu', 'K.relu', (['(margin - D)'], {}), '(margin - D)\n', (11263, 11275), True, 'from keras import backend as K\n')]
from pymatgen.io.vasp.sets import MPRelaxSet from pymatgen.core.structure import Structure #from atomate.vasp.workflows.base.core import get_wf from atomate.vasp.powerups import add_small_gap_multiply, add_stability_check, add_modify_incar, \ add_wf_metadata, add_common_powerups from quantumML.fireworks import StaticFW2D, OptimizeFW2D, NonSCFFW2D from atomate.utils.utils import get_wf_from_spec_dict from atomate.vasp.config import SMALLGAP_KPOINT_MULTIPLY, STABILITY_CHECK, VASP_CMD, DB_FILE, \ ADD_WF_METADATA, VDW_KERNEL_DIR import numpy as np import os from fireworks import Workflow from monty.serialization import loadfn from pathlib import Path module_dir = os.path.join(os.path.dirname(os.path.abspath(__file__))) def _read_user_incar(fname): fpath = os.path.join(module_dir,fname) fil = open(fpath, 'r') lines = fil.readlines() incar = {} for line in lines: key = (line.split('=')[0].strip()) val = line.split('=')[-1].strip() incar[key] = val return incar def wf_bandstructure2D(structure, c=None): c = c or {} vasp_cmd = c.get("VASP_CMD", VASP_CMD) db_file = c.get("DB_FILE", DB_FILE) vdw_kernel = c.get("VDW_KERNEL_DIR", VDW_KERNEL_DIR) incar = _read_user_incar('Relax2D.txt') print(incar) mpr2d = MPRelaxSet(structure, force_gamma=True, user_incar_settings=incar) mpr2dstatic = MPRelaxSet(structure, force_gamma=True, user_incar_settings={"NEDOS": "3001", "EMIN": "-15.0", "EMAX": "15.0"}) #fws = [OptimizeFW2D(structure=structure, vasp_input_set=mpr2d, vasp_cmd=vasp_cmd, db_file=db_file, vdw_kernel_dir=vdw_kernel)] fws = [OptimizeFW2D(structure=structure, vasp_input_set=mpr2d, vasp_cmd=vasp_cmd, vdw_kernel_dir=vdw_kernel)] fws.append(StaticFW2D(parents=fws[0], vasp_input_set= mpr2dstatic)) #fws.append(NonSCFFW2D(parents=fws[1], mode='uniform')) fws.append(NonSCFFW2D(parents=fws[1], mode='line')) wf = Workflow(fws) '''check bandstructure.yaml''' ''' wf = get_wf(structure, "bandstructure.yaml", vis=MPScanRelaxSet2D(structure, force_gamma=True,), \ params=[{'vasp_input_set': mpr2d},{},{},{}], common_params={"vasp_cmd": vasp_cmd, "db_file": db_file,}) #"vdw_kernel_dir": vdw_kernel}) ''' wf = add_common_powerups(wf, c) if c.get("SMALLGAP_KPOINT_MULTIPLY", SMALLGAP_KPOINT_MULTIPLY): wf = add_small_gap_multiply(wf, 0.5, 5, "static") wf = add_small_gap_multiply(wf, 0.5, 5, "nscf") if c.get("STABILITY_CHECK", STABILITY_CHECK): wf = add_stability_check(wf, fw_name_constraint="structure optimization") if c.get("ADD_WF_METADATA", ADD_WF_METADATA): wf = add_wf_metadata(wf, structure) wf.name = "{}:{}".format(structure.composition.reduced_formula, "bandStructure") ''' fws = wf.fws fws[0] = new_firework print(fws) ''' return wf
[ "atomate.vasp.powerups.add_common_powerups", "os.path.abspath", "quantumML.fireworks.StaticFW2D", "fireworks.Workflow", "pymatgen.io.vasp.sets.MPRelaxSet", "atomate.vasp.powerups.add_wf_metadata", "quantumML.fireworks.OptimizeFW2D", "atomate.vasp.powerups.add_small_gap_multiply", "atomate.vasp.powerups.add_stability_check", "quantumML.fireworks.NonSCFFW2D", "os.path.join" ]
[((778, 809), 'os.path.join', 'os.path.join', (['module_dir', 'fname'], {}), '(module_dir, fname)\n', (790, 809), False, 'import os\n'), ((1304, 1370), 'pymatgen.io.vasp.sets.MPRelaxSet', 'MPRelaxSet', (['structure'], {'force_gamma': '(True)', 'user_incar_settings': 'incar'}), '(structure, force_gamma=True, user_incar_settings=incar)\n', (1314, 1370), False, 'from pymatgen.io.vasp.sets import MPRelaxSet\n'), ((1389, 1504), 'pymatgen.io.vasp.sets.MPRelaxSet', 'MPRelaxSet', (['structure'], {'force_gamma': '(True)', 'user_incar_settings': "{'NEDOS': '3001', 'EMIN': '-15.0', 'EMAX': '15.0'}"}), "(structure, force_gamma=True, user_incar_settings={'NEDOS':\n '3001', 'EMIN': '-15.0', 'EMAX': '15.0'})\n", (1399, 1504), False, 'from pymatgen.io.vasp.sets import MPRelaxSet\n'), ((1944, 1957), 'fireworks.Workflow', 'Workflow', (['fws'], {}), '(fws)\n', (1952, 1957), False, 'from fireworks import Workflow\n'), ((2273, 2299), 'atomate.vasp.powerups.add_common_powerups', 'add_common_powerups', (['wf', 'c'], {}), '(wf, c)\n', (2292, 2299), False, 'from atomate.vasp.powerups import add_small_gap_multiply, add_stability_check, add_modify_incar, add_wf_metadata, add_common_powerups\n'), ((708, 733), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (723, 733), False, 'import os\n'), ((1644, 1749), 'quantumML.fireworks.OptimizeFW2D', 'OptimizeFW2D', ([], {'structure': 'structure', 'vasp_input_set': 'mpr2d', 'vasp_cmd': 'vasp_cmd', 'vdw_kernel_dir': 'vdw_kernel'}), '(structure=structure, vasp_input_set=mpr2d, vasp_cmd=vasp_cmd,\n vdw_kernel_dir=vdw_kernel)\n', (1656, 1749), False, 'from quantumML.fireworks import StaticFW2D, OptimizeFW2D, NonSCFFW2D\n'), ((1762, 1816), 'quantumML.fireworks.StaticFW2D', 'StaticFW2D', ([], {'parents': 'fws[0]', 'vasp_input_set': 'mpr2dstatic'}), '(parents=fws[0], vasp_input_set=mpr2dstatic)\n', (1772, 1816), False, 'from quantumML.fireworks import StaticFW2D, OptimizeFW2D, NonSCFFW2D\n'), ((1894, 1933), 'quantumML.fireworks.NonSCFFW2D', 'NonSCFFW2D', ([], {'parents': 'fws[1]', 'mode': '"""line"""'}), "(parents=fws[1], mode='line')\n", (1904, 1933), False, 'from quantumML.fireworks import StaticFW2D, OptimizeFW2D, NonSCFFW2D\n'), ((2382, 2426), 'atomate.vasp.powerups.add_small_gap_multiply', 'add_small_gap_multiply', (['wf', '(0.5)', '(5)', '"""static"""'], {}), "(wf, 0.5, 5, 'static')\n", (2404, 2426), False, 'from atomate.vasp.powerups import add_small_gap_multiply, add_stability_check, add_modify_incar, add_wf_metadata, add_common_powerups\n'), ((2440, 2482), 'atomate.vasp.powerups.add_small_gap_multiply', 'add_small_gap_multiply', (['wf', '(0.5)', '(5)', '"""nscf"""'], {}), "(wf, 0.5, 5, 'nscf')\n", (2462, 2482), False, 'from atomate.vasp.powerups import add_small_gap_multiply, add_stability_check, add_modify_incar, add_wf_metadata, add_common_powerups\n'), ((2547, 2615), 'atomate.vasp.powerups.add_stability_check', 'add_stability_check', (['wf'], {'fw_name_constraint': '"""structure optimization"""'}), "(wf, fw_name_constraint='structure optimization')\n", (2566, 2615), False, 'from atomate.vasp.powerups import add_small_gap_multiply, add_stability_check, add_modify_incar, add_wf_metadata, add_common_powerups\n'), ((2680, 2710), 'atomate.vasp.powerups.add_wf_metadata', 'add_wf_metadata', (['wf', 'structure'], {}), '(wf, structure)\n', (2695, 2710), False, 'from atomate.vasp.powerups import add_small_gap_multiply, add_stability_check, add_modify_incar, add_wf_metadata, add_common_powerups\n')]
"""Tests of endgame.py """ import unittest import os import time from reversi.board import BitBoard from reversi.strategies.common import Timer, Measure, CPU_TIME from reversi.strategies import _EndGame_, _EndGame, EndGame_, EndGame, _AlphaBeta_, _AlphaBeta, AlphaBeta_, AlphaBeta import reversi.strategies.coordinator as coord class TestEndGame(unittest.TestCase): """endgame """ def test_endgame_init(self): alphabeta_n = [_AlphaBeta_, _AlphaBeta, AlphaBeta_, AlphaBeta] timer = [False, False, True, True] measure = [False, True, False, True] for index, instance in enumerate([_EndGame_, _EndGame, EndGame_, EndGame]): endgame = instance() self.assertEqual(endgame._MIN, -10000000) self.assertEqual(endgame._MAX, 10000000) self.assertTrue(isinstance(endgame.evaluator, coord.Evaluator_N_Fast)) self.assertEqual(endgame.depth, 60) self.assertTrue(isinstance(endgame.alphabeta_n, alphabeta_n[index])) self.assertEqual(endgame.timer, timer[index]) self.assertEqual(endgame.measure, measure[index]) depth = 12 for index, instance in enumerate([_EndGame_, _EndGame, EndGame_, EndGame]): endgame = instance(depth) self.assertEqual(endgame._MIN, -10000000) self.assertEqual(endgame._MAX, 10000000) self.assertTrue(isinstance(endgame.evaluator, coord.Evaluator_N_Fast)) self.assertEqual(endgame.depth, depth) self.assertTrue(isinstance(endgame.alphabeta_n, alphabeta_n[index])) self.assertEqual(endgame.timer, timer[index]) self.assertEqual(endgame.measure, measure[index]) def test_endgame_next_move(self): for instance in [_EndGame_, _EndGame, EndGame_, EndGame]: board = BitBoard() endgame = instance(depth=6) board.put_disc('black', 3, 2) self.assertEqual(endgame.next_move('white', board), (2, 2)) board.put_disc('white', 2, 4) board.put_disc('black', 5, 5) board.put_disc('white', 4, 2) board.put_disc('black', 5, 2) board.put_disc('white', 5, 4) self.assertEqual(endgame.next_move('black', board), (1, 5)) def test_endgame_get_best_move(self): endgame = _EndGame_() # O--OOOOX # -OOOOOOX # OOXXOOOX # OOXOOOXX # OOOOOOXX # ---OOOOX # ----O--X # -------- # X board = BitBoard() board._black_bitboard = 0x0101312303010100 board._white_bitboard = 0x9E7ECEDCFC1E0800 board.update_score() # depth=20 : black : a2 board.put_disc('black', 0, 1) # depth=19 : white : b1 board.put_disc('white', 1, 0) # depth=18 : black : c1 board.put_disc('black', 2, 0) # depth=17 : white : -- # depth=17 : black : b6 board.put_disc('black', 1, 5) #print(board) #print(board._black_score, board._white_score) # depth=16 : white : c7 self.assertEqual(endgame.get_best_move('white', board, board.get_legal_moves('white')), ((1, 6), {(1, 6): -38.0, (2, 6): -38.0})) board.put_disc('white', 2, 6) # depth=15 : black : a7 self.assertEqual(endgame.get_best_move('black', board, board.get_legal_moves('black')), ((0, 6), {(0, 5): 16.0, (0, 6): 38.0, (1, 7): 38.0, (2, 5): 30.0, (3, 6): 38.0, (3, 7): 38.0, (4, 7): 38.0, (5, 6): 38.0, (5, 7): 38.0, (6, 6): 38.0})) board.put_disc('black', 0, 6) # depth=14 : white : b7 self.assertEqual(endgame.get_best_move('white', board, board.get_legal_moves('white')), ((1, 6), {(1, 6): -38.0})) board.put_disc('white', 1, 6) # depth=13 : black : b8 self.assertEqual(endgame.get_best_move('black', board, board.get_legal_moves('black')), ((1, 7), {(0, 5): 8.0, (1, 7): 38.0, (2, 5): 36.0, (3, 6): 36.0, (3, 7): 38.0, (4, 7): 38.0, (5, 6): 36.0, (5, 7): 38.0, (6, 6): 36.0})) board.put_disc('black', 1, 7) # depth=12 : white : d7 self.assertEqual(endgame.get_best_move('white', board, board.get_legal_moves('white')), ((3, 6), {(2, 5): -42.0, (3, 6): -38.0, (3, 7): -38.0})) board.put_disc('white', 3, 6) # depth=11 : black : f8 self.assertEqual(endgame.get_best_move('black', board, board.get_legal_moves('black')), ((5, 7), {(0, 5): 2.0, (2, 5): 36.0, (2, 7): 36.0, (3, 7): 36.0, (4, 7): 36.0, (5, 6): 36.0, (5, 7): 38.0, (6, 6): 36.0})) board.put_disc('black', 5, 7) # depth=10 : white : c6 self.assertEqual(endgame.get_best_move('white', board, board.get_legal_moves('white')), ((2, 5), {(2, 5): -38.0, (3, 7): -38.0, (4, 7): -38.0, (3, 7): -38.0, (5, 6): -38.0})) board.put_disc('white', 2, 5) # depth=9 : black : f7 self.assertEqual(endgame.get_best_move('black', board, board.get_legal_moves('black')), ((5, 6), {(0, 5): -8.0, (0, 7): 38.0, (2, 7): 38.0, (3, 7): 38.0, (5, 6): 38.0, (6, 6): 38.0})) board.put_disc('black', 5, 6) # depth=8 : white : g7 self.assertEqual(endgame.get_best_move('white', board, board.get_legal_moves('white')), ((6, 6), {(4, 7): -38.0, (6, 6): -38.0, (6, 7): -38.0})) board.put_disc('white', 6, 6) # depth=7 : black : a8 self.assertEqual(endgame.get_best_move('black', board, board.get_legal_moves('black')), ((0, 7), {(0, 5): 6.0, (0, 7): 38.0, (2, 7): 38.0, (3, 7): 38.0, (4, 7): 38.0, (6, 7): 38.0})) board.put_disc('black', 0, 7) # depth=6 : white : -- # depth=6 : black : d8 self.assertEqual(endgame.get_best_move('black', board, board.get_legal_moves('black')), ((3, 7), {(0, 5): 34.0, (2, 7): 34.0, (3, 7): 38.0, (4, 7): 38.0, (6, 7): 38.0})) board.put_disc('black', 3, 7) # depth=5 : white : e8 self.assertEqual(endgame.get_best_move('white', board, board.get_legal_moves('white')), ((4, 7), {(4, 7): -38.0})) board.put_disc('white', 4, 7) # depth=4 : balck : c8 self.assertEqual(endgame.get_best_move('black', board, board.get_legal_moves('black')), ((2, 7), {(0, 5): 22.0, (2, 7): 38.0, (6, 7): 38.0, (7, 7): 38.0})) board.put_disc('black', 2, 7) # depth=3 : white ; g8 self.assertEqual(endgame.get_best_move('white', board, board.get_legal_moves('white')), ((6, 7), {(6, 7): -38.0})) board.put_disc('white', 6, 7) # depth=2 : balck : a6 self.assertEqual(endgame.get_best_move('black', board, board.get_legal_moves('black')), ((0, 5), {(0, 5): 38.0, (7, 7): 38.0})) board.put_disc('black', 0, 5) # depth=1 : white : -- # depth=1 : black : h8 self.assertEqual(endgame.get_best_move('black', board, board.get_legal_moves('black')), ((7, 7), {(7, 7): 38.0})) def test_endgame_timer_timeout(self): board = BitBoard() board.put_disc('black', 3, 2) endgame = EndGame(depth=20) pid = endgame.__class__.__name__ + str(os.getpid()) Measure.elp_time[pid] = {'min': 10000, 'max': 0, 'ave': 0, 'cnt': 0} Measure.count[pid] = 0 endgame.next_move('white', board) self.assertTrue(Timer.timeout_flag[pid]) self.assertLessEqual(Measure.elp_time[pid]['max'], CPU_TIME * 1.1) print('(1000000)', Measure.count[pid]) def test_endgame_remain_12(self): # Windows10 Celeron 1.6GHz 4.00GB board = BitBoard() endgame = _EndGame(depth=12) key = endgame.__class__.__name__ + str(os.getpid()) Measure.elp_time[key] = {'min': 10000, 'max': 0, 'ave': 0, 'cnt': 0} Measure.count[key] = 0 color = 'black' board._black_bitboard = 0xF07DBF650158381C board._white_bitboard = 0x2009A7EA6C4E0 board.update_score() self.assertEqual(endgame.next_move(color, board), (7, 0)) print() print(key, 'remain = 12') print(' min :', Measure.elp_time[key]['min'], '(s)') print(' max :', Measure.elp_time[key]['max'], '(s)') print(' ave :', Measure.elp_time[key]['ave'], '(s)') print('(101890 / 0.03s)', Measure.count[key]) def test_endgame_remain_14(self): board = BitBoard() endgame = _EndGame(depth=14) key = endgame.__class__.__name__ + str(os.getpid()) Measure.elp_time[key] = {'min': 10000, 'max': 0, 'ave': 0, 'cnt': 0} Measure.count[key] = 0 color = 'black' board._black_bitboard = 0xE07DBF650158381C board._white_bitboard = 0x0009A7EA6C4E0 board.update_score() self.assertEqual(endgame.next_move(color, board), (7, 5)) print() print(key, 'remain = 14') print(' min :', Measure.elp_time[key]['min'], '(s)') print(' max :', Measure.elp_time[key]['max'], '(s)') print(' ave :', Measure.elp_time[key]['ave'], '(s)') print('(562957 / 0.19s)', Measure.count[key]) def test_endgame_remain_16(self): board = BitBoard() endgame = _EndGame(depth=16) key = endgame.__class__.__name__ + str(os.getpid()) Measure.elp_time[key] = {'min': 10000, 'max': 0, 'ave': 0, 'cnt': 0} Measure.count[key] = 0 color = 'black' board._black_bitboard = 0xC07DBF650158381C board._white_bitboard = 0x0009A7CA6C4E0 board.update_score() self.assertEqual(endgame.next_move(color, board), (6, 6)) print() print(key, 'remain = 16') print(' min :', Measure.elp_time[key]['min'], '(s)') print(' max :', Measure.elp_time[key]['max'], '(s)') print(' ave :', Measure.elp_time[key]['ave'], '(s)') print('(5417116 / 1.94s)', Measure.count[key]) def test_endgame_force_import_error(self): import os import importlib import reversi # ------------------------------- # switch environ and reload module os.environ['FORCE_ENDGAMEMETHODS_IMPORT_ERROR'] = 'RAISE' importlib.reload(reversi.strategies.EndGameMethods) self.assertTrue(reversi.strategies.EndGameMethods.ENDGAME_SIZE8_64BIT_ERROR) # ------------------------------- for instance in [_EndGame_, _EndGame, EndGame_, EndGame]: board = BitBoard() endgame = instance(depth=6) board.put_disc('black', 3, 2) self.assertEqual(endgame.next_move('white', board), (2, 2)) board.put_disc('white', 2, 4) board.put_disc('black', 5, 5) board.put_disc('white', 4, 2) board.put_disc('black', 5, 2) board.put_disc('white', 5, 4) self.assertEqual(endgame.next_move('black', board), (1, 5)) # ------------------------------- # recover environment and reload module del os.environ['FORCE_ENDGAMEMETHODS_IMPORT_ERROR'] importlib.reload(reversi.strategies.EndGameMethods) self.assertFalse(reversi.strategies.EndGameMethods.ENDGAME_SIZE8_64BIT_ERROR) # -------------------------------
[ "reversi.strategies.EndGame", "os.getpid", "importlib.reload", "reversi.board.BitBoard", "reversi.strategies._EndGame_", "reversi.strategies._EndGame" ]
[((2359, 2370), 'reversi.strategies._EndGame_', '_EndGame_', ([], {}), '()\n', (2368, 2370), False, 'from reversi.strategies import _EndGame_, _EndGame, EndGame_, EndGame, _AlphaBeta_, _AlphaBeta, AlphaBeta_, AlphaBeta\n'), ((2552, 2562), 'reversi.board.BitBoard', 'BitBoard', ([], {}), '()\n', (2560, 2562), False, 'from reversi.board import BitBoard\n'), ((6976, 6986), 'reversi.board.BitBoard', 'BitBoard', ([], {}), '()\n', (6984, 6986), False, 'from reversi.board import BitBoard\n'), ((7043, 7060), 'reversi.strategies.EndGame', 'EndGame', ([], {'depth': '(20)'}), '(depth=20)\n', (7050, 7060), False, 'from reversi.strategies import _EndGame_, _EndGame, EndGame_, EndGame, _AlphaBeta_, _AlphaBeta, AlphaBeta_, AlphaBeta\n'), ((7539, 7549), 'reversi.board.BitBoard', 'BitBoard', ([], {}), '()\n', (7547, 7549), False, 'from reversi.board import BitBoard\n'), ((7568, 7586), 'reversi.strategies._EndGame', '_EndGame', ([], {'depth': '(12)'}), '(depth=12)\n', (7576, 7586), False, 'from reversi.strategies import _EndGame_, _EndGame, EndGame_, EndGame, _AlphaBeta_, _AlphaBeta, AlphaBeta_, AlphaBeta\n'), ((8315, 8325), 'reversi.board.BitBoard', 'BitBoard', ([], {}), '()\n', (8323, 8325), False, 'from reversi.board import BitBoard\n'), ((8344, 8362), 'reversi.strategies._EndGame', '_EndGame', ([], {'depth': '(14)'}), '(depth=14)\n', (8352, 8362), False, 'from reversi.strategies import _EndGame_, _EndGame, EndGame_, EndGame, _AlphaBeta_, _AlphaBeta, AlphaBeta_, AlphaBeta\n'), ((9091, 9101), 'reversi.board.BitBoard', 'BitBoard', ([], {}), '()\n', (9099, 9101), False, 'from reversi.board import BitBoard\n'), ((9120, 9138), 'reversi.strategies._EndGame', '_EndGame', ([], {'depth': '(16)'}), '(depth=16)\n', (9128, 9138), False, 'from reversi.strategies import _EndGame_, _EndGame, EndGame_, EndGame, _AlphaBeta_, _AlphaBeta, AlphaBeta_, AlphaBeta\n'), ((10087, 10138), 'importlib.reload', 'importlib.reload', (['reversi.strategies.EndGameMethods'], {}), '(reversi.strategies.EndGameMethods)\n', (10103, 10138), False, 'import importlib\n'), ((10961, 11012), 'importlib.reload', 'importlib.reload', (['reversi.strategies.EndGameMethods'], {}), '(reversi.strategies.EndGameMethods)\n', (10977, 11012), False, 'import importlib\n'), ((1849, 1859), 'reversi.board.BitBoard', 'BitBoard', ([], {}), '()\n', (1857, 1859), False, 'from reversi.board import BitBoard\n'), ((10353, 10363), 'reversi.board.BitBoard', 'BitBoard', ([], {}), '()\n', (10361, 10363), False, 'from reversi.board import BitBoard\n'), ((7108, 7119), 'os.getpid', 'os.getpid', ([], {}), '()\n', (7117, 7119), False, 'import os\n'), ((7634, 7645), 'os.getpid', 'os.getpid', ([], {}), '()\n', (7643, 7645), False, 'import os\n'), ((8410, 8421), 'os.getpid', 'os.getpid', ([], {}), '()\n', (8419, 8421), False, 'import os\n'), ((9186, 9197), 'os.getpid', 'os.getpid', ([], {}), '()\n', (9195, 9197), False, 'import os\n')]
import sys sys.path.append("/home/gaoxiang/data/CuAssembler") from CuAsm.CubinFile import CubinFile binname = sys.argv[1] cf = CubinFile(binname) asmname = binname.replace('.cubin', '.cuasm') cf.saveAsCuAsm(asmname)
[ "sys.path.append", "CuAsm.CubinFile.CubinFile" ]
[((13, 63), 'sys.path.append', 'sys.path.append', (['"""/home/gaoxiang/data/CuAssembler"""'], {}), "('/home/gaoxiang/data/CuAssembler')\n", (28, 63), False, 'import sys\n'), ((130, 148), 'CuAsm.CubinFile.CubinFile', 'CubinFile', (['binname'], {}), '(binname)\n', (139, 148), False, 'from CuAsm.CubinFile import CubinFile\n')]