code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# ExportSQLite: SQLite export plugin for MySQL Workbench
#
# Copyright (C) 2015 <NAME> (Python version)
# Copyright (C) 2009 <NAME> (Original Lua version)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import StringIO
import grt
import mforms
from grt.modules import Workbench
from wb import DefineModule, wbinputs
from workbench.ui import WizardForm, WizardPage
from mforms import newButton, newCodeEditor, FileChooser
ModuleInfo = DefineModule(name='ExportSQLite',
author='<NAME>',
version='0.1.0')
@ModuleInfo.plugin('wb.util.exportSQLite',
caption='Export SQLite CREATE script',
input=[wbinputs.currentCatalog()],
groups=['Catalog/Utilities', 'Menu/Catalog'])
@ModuleInfo.export(grt.INT, grt.classes.db_Catalog)
def exportSQLite(cat):
"""Function to go through all schemata in catalog and rename all FKs
of table-objects
"""
def validate_for_sqlite_export(cat):
"""Check uniqueness of schema, table and index names. Return 0 on
success otherwise return 1 (the export process should abort)
"""
have_errors = False
idt = {}
for i, schema in enumerate(cat.schemata):
if schema.name in idt:
have_errors = True
if Workbench.confirm('Name conflict',
'Schemas %d and %d have the same name "%s".'
' Please rename one of them.\n'
'Search for more such errors?' % (
idt[schema.name], i, schema.name)) == 0:
return False
else:
idt[schema.name] = i
# Do not continue looking for errors on schema name error
if have_errors:
return False
for schema in cat.schemata:
idt = {}
for i, tbl in enumerate(schema.tables):
if tbl.name == '':
have_errors = True
if Workbench.confirm('Name conflict',
'Table %d in schema "%s". has no name.'
' Please rename.\n'
'Search for more such errors?' % (
i, schema.name)) == 0:
return False
if tbl.name in idt:
have_errors = True
if Workbench.confirm('Name conflict',
'Tables %d and %d in schema "%s"'
' have the same name "%s".'
' Please rename one of them.\n'
'Search for more such errors?' % (
idt[tbl.name], i, schema.name, tbl.name)) == 0:
return False
else:
idt[tbl.name] = i
if have_errors:
return False
for schema in cat.schemata:
for tbl in schema.tables:
idt = {}
for i, column in enumerate(tbl.columns):
if column.name == '':
have_errors = True
if Workbench.confirm('Name conflict',
'Column %d in table "%s"."%s". has no name.'
' Please rename.\n'
'Search for more such errors?' % (
i, schema.name, tbl.name)) == 0:
return False
if column.name in idt:
have_errors = True
if Workbench.confirm('Name conflict',
'Columns %d and %d in table "%s"."%s"'
' have the same name "%s".'
' Please rename one of them.\n'
'Search for more such errors?' % (
idt[column.name],
i,
schema.name,
tbl.name,
column.name)) == 0:
return False
else:
idt[column.name] = i
# Now check indices (except primary/unique)
idt = {}
for i, index in enumerate(tbl.indices):
if index.indexType == 'INDEX':
if index.name == '':
have_errors = True
if Workbench.confirm('Name conflict',
'Index %d in table "%s"."%s". has no name.'
' Please rename.\n'
'Search for more such errors?' % (
i, schema.name, tbl.name)) == 0:
return False
if index.name in idt:
have_errors = True
if Workbench.confirm('Name conflict',
'Indices %d and %d in table "%s"."%s"'
' have the same name "%s".'
' Please rename one of them.\n'
'Search for more such errors?' % (
idt[index.name],
i,
schema.name,
tbl.name,
column.name)) == 0:
return False
else:
idt[index.name] = i
if have_errors:
return False
return True
def is_deferred(fkey):
# Hack: if comment starts with "Defer..." we make it a deferred FK could
# use member 'deferability' (WB has it), but there is no GUI for it
return fkey.comment.lstrip().lower()[0:5] == 'defer'
def export_table(out, db_name, schema, tbl):
if len(tbl.columns) == 0:
return
out.write('CREATE TABLE %s%s(\n%s' % (
db_name, dq(tbl.name), schema_comment_format(tbl.comment)))
primary_key = [i for i in tbl.indices if i.isPrimary == 1]
primary_key = primary_key[0] if len(primary_key) > 0 else None
pk_column = None
if primary_key and len(primary_key.columns) == 1:
pk_column = primary_key.columns[0].referencedColumn
col_comment = ''
for i, column in enumerate(tbl.columns):
check, sqlite_type, flags = '', None, None
if column.simpleType:
sqlite_type = column.simpleType.name
flags = column.simpleType.flags
else:
sqlite_type = column.userType.name
flags = column.flags
length = column.length
# For INTEGER PRIMARY KEY column to become an alias for the rowid
# the type needs to be "INTEGER" not "INT"
# we fix it for other columns as well
if 'INT' in sqlite_type or sqlite_type == 'LONG':
sqlite_type = 'INTEGER'
length = -1
# Check flags for "unsigned"
if 'UNSIGNED' in column.flags:
check = dq(column.name) + '>=0'
# We even implement ENUM (because we can)
if sqlite_type == 'ENUM':
sqlite_type = 'TEXT'
if column.datatypeExplicitParams:
check = (dq(column.name) + ' IN' +
column.datatypeExplicitParams)
if i > 0:
out.write(',' + comment_format(col_comment) + '\n')
out.write(' ' + dq(column.name))
# Type is optional in SQLite
if sqlite_type != '':
out.write(' ' + sqlite_type)
# For [VAR]CHAR and such types specify length even though this is
# not used in SQLite
if length > 0:
out.write('(%d)' % length)
# Must specify single-column PKs as column-constraints for AI/rowid
# behaviour
if column == pk_column:
out.write(' PRIMARY KEY')
if primary_key.columns[0].descend == 1:
out.write(' DESC')
# Only PK columns can be AI in SQLite
if column.autoIncrement == 1:
out.write(' AUTOINCREMENT')
# Check for NotNull
if column.isNotNull == 1:
out.write(' NOT NULL')
if check != '':
out.write(' CHECK(' + check + ')')
if column.defaultValue != '':
out.write(' DEFAULT ' + column.defaultValue)
col_comment = column.comment
# For multicolumn PKs
if primary_key and not pk_column:
out.write(',%s\n PRIMARY KEY(%s)' % (
comment_format(col_comment),
print_index_columns(primary_key)))
col_comment = ''
# Put non-primary, UNIQUE Keys in CREATE TABLE as well (because we can)
for index in tbl.indices:
if index != primary_key and index.indexType == 'UNIQUE':
out.write(',%s\n' % comment_format(col_comment))
col_comment = ''
if index.name != '':
out.write(' CONSTRAINT %s\n ' % dq(index.name))
out.write(' UNIQUE(%s)' % print_index_columns(index))
for fkey in tbl.foreignKeys:
have_fkeys = 1
out.write(',%s\n' % comment_format(col_comment))
col_comment = ''
if fkey.name != '':
out.write(' CONSTRAINT %s\n ' % dq(fkey.name))
out.write(' FOREIGN KEY(%s)\n' % print_fk_columns(fkey.columns))
out.write(' REFERENCES %s(%s)' % (
dq(fkey.referencedTable.name),
print_fk_columns(fkey.referencedColumns)))
if fkey.deleteRule in ['RESTRICT', 'CASCADE', 'SET NULL']:
out.write('\n ON DELETE ' + fkey.deleteRule)
if fkey.updateRule in ['RESTRICT', 'CASCADE', 'SET NULL']:
out.write('\n ON UPDATE ' + fkey.updateRule)
if is_deferred(fkey):
out.write(' DEFERRABLE INITIALLY DEFERRED')
out.write(comment_format(col_comment) + '\n);\n')
# CREATE INDEX statements for all non-primary, non-unique, non-foreign
# indexes
for i, index in enumerate(tbl.indices):
if index.indexType == 'INDEX':
index_name = tbl.name + '.' + index.name
if index.name == '':
index_name = tbl.name + '.index' + i
out.write('CREATE INDEX %s%s ON %s (%s);\n' % (
db_name,
dq(index_name),
dq(tbl.name),
print_index_columns(index)))
# Write the INSERTS (currently always)
for insert in tbl.inserts().splitlines():
columns_values = ''
insert_start = 'insert into `%s`.`%s` (' % (schema.name, tbl.name)
if insert[0:len(insert_start)].lower() == insert_start.lower():
columns_values = insert[len(insert_start):]
else:
raise ExportSQLiteError(
'Error', 'Unrecognized command in insert')
last_column = 0
for i, column in enumerate(tbl.columns):
column_name = '`' + column.name + '`'
if columns_values[0:len(column_name)] == column_name:
columns_values = columns_values[len(column_name):]
if columns_values[0:1] == ')':
columns_values = columns_values[1:]
last_column = i
break
else:
if columns_values[0:2] == ', ':
columns_values = columns_values[2:]
else:
raise ExportSQLiteError(
'Error',
'Unrecognized character in column list')
else:
raise ExportSQLiteError(
'Error', 'Unrecognized column in inserts')
out.write('INSERT INTO %s(' % dq(tbl.name))
for i in range(last_column + 1):
if i > 0:
out.write(',')
out.write(dq(tbl.columns[i].name))
if columns_values[0:9].lower() != ' values (':
raise ExportSQLiteError(
'Error', 'Unrecognized SQL in insert')
columns_values = columns_values[9:]
out.write(') VALUES(')
out.write(columns_values.replace("\\'", "''"))
out.write('\n')
def order_tables(out, db_name, schema, unordered, respect_deferredness):
have_ordered = False
while not have_ordered:
if len(unordered) == 0:
have_ordered = True
for tbl in unordered.values():
has_forward_reference = False
for fkey in tbl.foreignKeys:
if (fkey.referencedTable.name in unordered and
fkey.referencedTable.name != tbl.name and not (
respect_deferredness and is_deferred(fkey))):
has_forward_reference = True
break
if not has_forward_reference:
export_table(out, db_name, schema, tbl)
del unordered[tbl.name]
have_ordered = True
def export_schema(out, schema, is_main_schema):
if len(schema.tables) == 0:
return
out.write('\n-- Schema: %s\n' % schema.name)
out.write(schema_comment_format(schema.comment))
db_name = ''
if not is_main_schema:
db_name = dq(schema.name) + '.'
out.write('ATTACH "%s" AS %s;\n' % (
safe_file_name(schema.name + '.sdb'),
dq(schema.name)))
out.write('BEGIN;\n')
# Find a valid table order for inserts from FK constraints
unordered = {t.name: t for t in schema.tables}
# Try treating deferred keys like non-deferred keys first for ordering
order_tables(out, db_name, schema, unordered, False)
# Now try harder (leave out deferred keys from determining an order)
order_tables(out, db_name, schema, unordered, True)
# Loop through all remaining tables, if any. Have circular FK refs.
# How to handle?
for tbl in unordered.values():
export_table(out, db_name, schema, tbl)
out.write('COMMIT;\n')
def print_index_columns(index):
s = ''
for i, column in enumerate(index.columns):
if i > 0:
s += ','
s += dq(column.referencedColumn.name)
if column.descend == 1:
s += ' DESC'
return s
def print_fk_columns(columns):
s = ''
for i, column in enumerate(columns):
if i > 0:
s += ','
s += dq(column.name)
return s
def dq(ident):
"""Double quote identifer, replacing " by "" """
return '"' + re.sub(r'"', '""', ident) + '"'
def safe_file_name(ident):
"""Create safe filename from identifer"""
def repl(c):
return ["%%%02x" % c for c in bytearray(c, 'ascii')]
return re.sub(r'[/\:*?"<>|%]', repl, ident)
def info_format(header, body):
"""Format a info field as SQL comment"""
body = body.strip()
if body == '':
return ''
elif '\n' in body:
# Multiline comment
return '-- %s:\n-- %s\n' % (
header, re.sub(r'\n', '\n-- ', body))
else:
# Single line
return '-- %-14s %s\n' % (header + ':', body)
def schema_comment_format(body):
"""Format a schema or table comment as SQL comment
table comments to be stored in SQLite schema
"""
body = body.strip()
if body == '':
return ''
else:
# Multiline comment
return '-- %s\n' % re.sub(r'\n', '\n-- ', body)
def comment_format(body):
body = body.strip()
if body == '':
return ''
elif '\n' in body:
# Multiline comment
return '\n-- %s' % re.sub(r'\n', '\n-- ', body)
else:
# Single line
return '-- %s' % body
if not validate_for_sqlite_export(cat):
return 1
out = StringIO.StringIO()
out.write(info_format(
'Creator',
'MySQL Workbench %d.%d.%d/ExportSQLite Plugin %s\n' % (
grt.root.wb.info.version.majorNumber,
grt.root.wb.info.version.minorNumber,
grt.root.wb.info.version.releaseNumber,
ModuleInfo.version)))
out.write(info_format('Author', grt.root.wb.doc.info.author))
out.write(info_format('Caption', grt.root.wb.doc.info.caption))
out.write(info_format('Project', grt.root.wb.doc.info.project))
out.write(info_format('Changed', grt.root.wb.doc.info.dateChanged))
out.write(info_format('Created', grt.root.wb.doc.info.dateCreated))
out.write(info_format('Description', grt.root.wb.doc.info.description))
out.write('PRAGMA foreign_keys = OFF;\n')
# Loop over all catalogs in schema, find main schema main schema is first
# nonempty schema or nonempty schema named "main"
try:
for schema in [(s, s.name == 'main') for s in cat.schemata]:
export_schema(out, schema[0], schema[1])
except ExportSQLiteError as e:
Workbench.confirm(e.typ, e.message)
return 1
sql_text = out.getvalue()
out.close()
wizard = ExportSQLiteWizard(sql_text)
wizard.run()
return 0
class ExportSQLiteError(Exception):
def __init__(self, typ, message):
self.typ = typ
self.message = message
def __str__(self):
return repr(self.typ) + ': ' + repr(self.message)
class ExportSQLiteWizard_PreviewPage(WizardPage):
def __init__(self, owner, sql_text):
WizardPage.__init__(self, owner, 'Review Generated Script')
self.save_button = mforms.newButton()
self.save_button.enable_internal_padding(True)
self.save_button.set_text('Save to File...')
self.save_button.set_tooltip('Save the text to a new file.')
self.save_button.add_clicked_callback(self.save_clicked)
self.copy_button = mforms.newButton()
self.copy_button.enable_internal_padding(True)
self.copy_button.set_text('Copy to Clipboard')
self.copy_button.set_tooltip('Copy the text to the clipboard.')
self.copy_button.add_clicked_callback(self.copy_clicked)
self.sql_text = mforms.newCodeEditor()
self.sql_text.set_language(mforms.LanguageMySQL)
self.sql_text.set_text(sql_text)
def go_cancel(self):
self.main.finish()
def create_ui(self):
button_box = mforms.newBox(True)
button_box.set_padding(8)
button_box.add(self.save_button, False, True)
button_box.add(self.copy_button, False, True)
self.content.add_end(button_box, False, False)
self.content.add_end(self.sql_text, True, True)
def save_clicked(self):
file_chooser = mforms.newFileChooser(self.main, mforms.SaveFile)
file_chooser.set_extensions('SQL Files (*.sql)|*.sql', 'sql')
if file_chooser.run_modal() == mforms.ResultOk:
path = file_chooser.get_path()
text = self.sql_text.get_text(False)
try:
with open(path, 'w+') as f:
f.write(text)
except IOError as e:
mforms.Utilities.show_error(
'Save to File',
'Could not save to file "%s": %s' % (path, str(e)),
'OK')
def copy_clicked(self):
mforms.Utilities.set_clipboard_text(self.sql_text.get_text(False))
class ExportSQLiteWizard(WizardForm):
def __init__(self, sql_text):
WizardForm.__init__(self, None)
self.set_name('sqlite_export_wizard')
self.set_title('SQLite Export Wizard')
self.preview_page = ExportSQLiteWizard_PreviewPage(self, sql_text)
self.add_page(self.preview_page)
| [
"StringIO.StringIO",
"mforms.newBox",
"wb.DefineModule",
"mforms.newFileChooser",
"wb.wbinputs.currentCatalog",
"workbench.ui.WizardForm.__init__",
"workbench.ui.WizardPage.__init__",
"mforms.newCodeEditor",
"re.sub",
"grt.modules.Workbench.confirm",
"mforms.newButton"
] | [((1036, 1103), 'wb.DefineModule', 'DefineModule', ([], {'name': '"""ExportSQLite"""', 'author': '"""<NAME>"""', 'version': '"""0.1.0"""'}), "(name='ExportSQLite', author='<NAME>', version='0.1.0')\n", (1048, 1103), False, 'from wb import DefineModule, wbinputs\n'), ((17713, 17732), 'StringIO.StringIO', 'StringIO.StringIO', ([], {}), '()\n', (17730, 17732), False, 'import StringIO\n'), ((16535, 16571), 're.sub', 're.sub', (['"""[/\\\\:*?"<>|%]"""', 'repl', 'ident'], {}), '(\'[/\\\\:*?"<>|%]\', repl, ident)\n', (16541, 16571), False, 'import re\n'), ((19338, 19397), 'workbench.ui.WizardPage.__init__', 'WizardPage.__init__', (['self', 'owner', '"""Review Generated Script"""'], {}), "(self, owner, 'Review Generated Script')\n", (19357, 19397), False, 'from workbench.ui import WizardForm, WizardPage\n'), ((19426, 19444), 'mforms.newButton', 'mforms.newButton', ([], {}), '()\n', (19442, 19444), False, 'import mforms\n'), ((19715, 19733), 'mforms.newButton', 'mforms.newButton', ([], {}), '()\n', (19731, 19733), False, 'import mforms\n'), ((20006, 20028), 'mforms.newCodeEditor', 'mforms.newCodeEditor', ([], {}), '()\n', (20026, 20028), False, 'import mforms\n'), ((20227, 20246), 'mforms.newBox', 'mforms.newBox', (['(True)'], {}), '(True)\n', (20240, 20246), False, 'import mforms\n'), ((20554, 20603), 'mforms.newFileChooser', 'mforms.newFileChooser', (['self.main', 'mforms.SaveFile'], {}), '(self.main, mforms.SaveFile)\n', (20575, 20603), False, 'import mforms\n'), ((21314, 21345), 'workbench.ui.WizardForm.__init__', 'WizardForm.__init__', (['self', 'None'], {}), '(self, None)\n', (21333, 21345), False, 'from workbench.ui import WizardForm, WizardPage\n'), ((18853, 18888), 'grt.modules.Workbench.confirm', 'Workbench.confirm', (['e.typ', 'e.message'], {}), '(e.typ, e.message)\n', (18870, 18888), False, 'from grt.modules import Workbench\n'), ((1284, 1309), 'wb.wbinputs.currentCatalog', 'wbinputs.currentCatalog', ([], {}), '()\n', (1307, 1309), False, 'from wb import DefineModule, wbinputs\n'), ((16318, 16342), 're.sub', 're.sub', (['"""\\""""', '""""\\""""', 'ident'], {}), '(\'"\', \'""\', ident)\n', (16324, 16342), False, 'import re\n'), ((17308, 17338), 're.sub', 're.sub', (['"""\\\\n"""', '"""\n-- """', 'body'], {}), "('\\\\n', '\\n-- ', body)\n", (17314, 17338), False, 'import re\n'), ((1936, 2123), 'grt.modules.Workbench.confirm', 'Workbench.confirm', (['"""Name conflict"""', '("""Schemas %d and %d have the same name "%s". Please rename one of them.\nSearch for more such errors?"""\n % (idt[schema.name], i, schema.name))'], {}), '(\'Name conflict\', \n """Schemas %d and %d have the same name "%s". Please rename one of them.\nSearch for more such errors?"""\n % (idt[schema.name], i, schema.name))\n', (1953, 2123), False, 'from grt.modules import Workbench\n'), ((17535, 17565), 're.sub', 're.sub', (['"""\\\\n"""', '"""\n-- """', 'body'], {}), "('\\\\n', '\\n-- ', body)\n", (17541, 17565), False, 'import re\n'), ((2635, 2787), 'grt.modules.Workbench.confirm', 'Workbench.confirm', (['"""Name conflict"""', '("""Table %d in schema "%s". has no name. Please rename.\nSearch for more such errors?"""\n % (i, schema.name))'], {}), '(\'Name conflict\', \n """Table %d in schema "%s". has no name. Please rename.\nSearch for more such errors?"""\n % (i, schema.name))\n', (2652, 2787), False, 'from grt.modules import Workbench\n'), ((3039, 3247), 'grt.modules.Workbench.confirm', 'Workbench.confirm', (['"""Name conflict"""', '("""Tables %d and %d in schema "%s" have the same name "%s". Please rename one of them.\nSearch for more such errors?"""\n % (idt[tbl.name], i, schema.name, tbl.name))'], {}), '(\'Name conflict\', \n """Tables %d and %d in schema "%s" have the same name "%s". Please rename one of them.\nSearch for more such errors?"""\n % (idt[tbl.name], i, schema.name, tbl.name))\n', (3056, 3247), False, 'from grt.modules import Workbench\n'), ((16864, 16894), 're.sub', 're.sub', (['"""\\\\n"""', '"""\n-- """', 'body'], {}), "('\\\\n', '\\n-- ', body)\n", (16870, 16894), False, 'import re\n'), ((3811, 3978), 'grt.modules.Workbench.confirm', 'Workbench.confirm', (['"""Name conflict"""', '("""Column %d in table "%s"."%s". has no name. Please rename.\nSearch for more such errors?"""\n % (i, schema.name, tbl.name))'], {}), '(\'Name conflict\', \n """Column %d in table "%s"."%s". has no name. Please rename.\nSearch for more such errors?"""\n % (i, schema.name, tbl.name))\n', (3828, 3978), False, 'from grt.modules import Workbench\n'), ((4265, 4494), 'grt.modules.Workbench.confirm', 'Workbench.confirm', (['"""Name conflict"""', '("""Columns %d and %d in table "%s"."%s" have the same name "%s". Please rename one of them.\nSearch for more such errors?"""\n % (idt[column.name], i, schema.name, tbl.name, column.name))'], {}), '(\'Name conflict\', \n """Columns %d and %d in table "%s"."%s" have the same name "%s". Please rename one of them.\nSearch for more such errors?"""\n % (idt[column.name], i, schema.name, tbl.name, column.name))\n', (4282, 4494), False, 'from grt.modules import Workbench\n'), ((5234, 5400), 'grt.modules.Workbench.confirm', 'Workbench.confirm', (['"""Name conflict"""', '("""Index %d in table "%s"."%s". has no name. Please rename.\nSearch for more such errors?"""\n % (i, schema.name, tbl.name))'], {}), '(\'Name conflict\', \n """Index %d in table "%s"."%s". has no name. Please rename.\nSearch for more such errors?"""\n % (i, schema.name, tbl.name))\n', (5251, 5400), False, 'from grt.modules import Workbench\n'), ((5718, 5946), 'grt.modules.Workbench.confirm', 'Workbench.confirm', (['"""Name conflict"""', '("""Indices %d and %d in table "%s"."%s" have the same name "%s". Please rename one of them.\nSearch for more such errors?"""\n % (idt[index.name], i, schema.name, tbl.name, column.name))'], {}), '(\'Name conflict\', \n """Indices %d and %d in table "%s"."%s" have the same name "%s". Please rename one of them.\nSearch for more such errors?"""\n % (idt[index.name], i, schema.name, tbl.name, column.name))\n', (5735, 5946), False, 'from grt.modules import Workbench\n')] |
import os
from doppel import DoppelProject, destroy_all_projects
from tests.utils import console_logger, get_root_path
ROOT_PATH = get_root_path()
def run_aikit():
controller = DoppelProject(
'aikit-controller',
path=os.path.join(ROOT_PATH, 'examples', 'aikit-example'),
entry_point='doppelkit/run_controller.py',
requirements=['aikit'],
packages=[ROOT_PATH],
n_instances=1, min_memory=2, min_cpu=1,
commands=['mkdir -p doppel/ailogs/mljobmanager_workers']
)
controller.start()
workers = DoppelProject(
'aikit-worker',
path=os.path.join(ROOT_PATH, 'examples', 'aikit-example'),
entry_point='doppelkit/run_worker.py',
requirements=['aikit'],
packages=[ROOT_PATH],
n_instances=20, duration=0.2,
min_memory=2, min_cpu=16,
commands=['mkdir -p doppel/ailogs/mljobrunner_workers']
)
workers.start()
workers.monitore()
controller.terminate()
if __name__ == '__main__':
console_logger()
destroy_all_projects()
run_aikit()
| [
"os.path.join",
"tests.utils.console_logger",
"tests.utils.get_root_path",
"doppel.destroy_all_projects"
] | [((134, 149), 'tests.utils.get_root_path', 'get_root_path', ([], {}), '()\n', (147, 149), False, 'from tests.utils import console_logger, get_root_path\n'), ((1027, 1043), 'tests.utils.console_logger', 'console_logger', ([], {}), '()\n', (1041, 1043), False, 'from tests.utils import console_logger, get_root_path\n'), ((1048, 1070), 'doppel.destroy_all_projects', 'destroy_all_projects', ([], {}), '()\n', (1068, 1070), False, 'from doppel import DoppelProject, destroy_all_projects\n'), ((242, 294), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""examples"""', '"""aikit-example"""'], {}), "(ROOT_PATH, 'examples', 'aikit-example')\n", (254, 294), False, 'import os\n'), ((618, 670), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""examples"""', '"""aikit-example"""'], {}), "(ROOT_PATH, 'examples', 'aikit-example')\n", (630, 670), False, 'import os\n')] |
"""
The script expects the MViT (MDef-DETR or MDETR) detections in .txt format. For example, there should be,
One .txt file for each image and each line in the file represents a detection.
The format of a single detection should be "<label> <confidence> <x1> <y1> <x2> <y2>
Please see the 'mvit_detections' for reference.
"""
import os
import argparse
import xml.etree.ElementTree as ET
from fvcore.common.file_io import PathManager
import numpy as np
import time
import cv2
from nms import nms
TASK1_TRAIN_LIST = "t1_train.txt"
TASK2_TRAIN_LIST = "t2_train.txt"
TASK3_TRAIN_LIST = "t3_train.txt"
TASK4_TRAIN_LIST = "t4_train.txt"
def read_image_list(path):
with open(path, 'r') as f:
lines = f.read()
images = lines.split('\n')
return images[:-1]
TASK1_TRAIN_IMAGES = read_image_list(TASK1_TRAIN_LIST)
TASK2_TRAIN_IMAGES = read_image_list(TASK2_TRAIN_LIST)
TASK3_TRAIN_IMAGES = read_image_list(TASK3_TRAIN_LIST)
TASK4_TRAIN_IMAGES = read_image_list(TASK4_TRAIN_LIST)
TASK1_KNOWN_CLASSES = ["aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat",
"chair", "cow", "diningtable", "dog", "horse", "motorbike", "person",
"pottedplant", "sheep", "sofa", "train", "tvmonitor", "airplane", "dining table", "motorcycle",
"potted plant", "couch", "tv"]
TASK2_KNOWN_CLASSES = TASK1_KNOWN_CLASSES + ["truck", "traffic light", "fire hydrant", "stop sign", "parking meter",
"bench", "elephant", "bear", "zebra", "giraffe",
"backpack", "umbrella", "handbag", "tie", "suitcase",
"microwave", "oven", "toaster", "sink", "refrigerator"]
TASK3_KNOWN_CLASSES = TASK2_KNOWN_CLASSES + ["frisbee", "skis", "snowboard", "sports ball", "kite",
"baseball bat", "baseball glove", "skateboard", "surfboard",
"tennis racket",
"banana", "apple", "sandwich", "orange", "broccoli",
"carrot", "hot dog", "pizza", "donut", "cake"]
TASK4_KNOWN_CLASSES = TASK3_KNOWN_CLASSES + ["bed", "toilet", "laptop", "mouse",
"remote", "keyboard", "cell phone", "book", "clock",
"vase", "scissors", "teddy bear", "hair drier", "toothbrush",
"wine glass", "cup", "fork", "knife", "spoon", "bowl"]
def parse_arguments():
"""
Parse the command line arguments
"""
ap = argparse.ArgumentParser()
ap.add_argument("-ann", "--annotations_dir_path", required=True,
help="Path to the directory containing the original annotations in pascal VOC format.")
ap.add_argument("-det", "--detections_dir_path", required=True,
help="Path to the directory containing the detections generated using class agnostic object "
"detector. One .txt file for each image where each line in the file represents a detection."
"The format of a single detection should be "
"<label> <confidence> <x1> <y1> <x2> <y2>")
ap.add_argument("-o", "--output_dir_path", required=True,
help="The output dir path to save the updated annotations.")
ap.add_argument("-det_conf", "--detection_confidence_threshold", required=False, type=float, default=0.5,
help="The confidence threshold to filter potential detections at first step. All detections with "
"confidence less than this threshold value will be ignored.")
ap.add_argument("-iou", "--iou_thresh_unk", required=False, type=float, default=0.5,
help="All detections, having an overlap greater than iou_thresh with any of the ground truths, "
"will be ignored.")
ap.add_argument("-nms", "--apply_nms", required=False, type=bool, default=False,
help="Flag to decide either to apply NMS on detections before assigning them unknown/gt or not.")
ap.add_argument("-iou_nms", "--iou_thresh_nms", required=False, type=float, default=0.2,
help="IOU threshold for NMS.")
args = vars(ap.parse_args())
return args
def parse_voc_gt_kn(path):
image_name = os.path.basename(path).split('.')[0]
if os.path.exists(path):
with PathManager.open(path) as f:
tree = ET.parse(f)
boxes = []
for obj in tree.findall("object"):
cls = obj.find("name").text
if image_name in TASK1_TRAIN_IMAGES:
if cls not in TASK1_KNOWN_CLASSES:
continue
elif image_name in TASK2_TRAIN_IMAGES:
if cls not in TASK2_KNOWN_CLASSES:
continue
elif image_name in TASK3_TRAIN_IMAGES:
if cls not in TASK3_KNOWN_CLASSES:
continue
elif image_name in TASK4_TRAIN_IMAGES:
if cls not in TASK4_KNOWN_CLASSES:
continue
else:
# Not a training image
return boxes, tree, False
bbox = obj.find("bndbox")
bbox = [float(bbox.find(x).text) for x in ["xmin", "ymin", "xmax", "ymax"]]
# Original annotations are integers in the range [1, W or H]
# Assuming they mean 1-based pixel indices (inclusive),
# a box with annotation (xmin=1, xmax=W) covers the whole image.
# In coordinate space this is represented by (xmin=0, xmax=W)
bbox[0] -= 1.0
bbox[1] -= 1.0
boxes.append(bbox)
else:
# No annotation file found, create an empty xml node and return
image_name = f"{os.path.basename(path).split('.')[0]}.jpg"
image_path = f"{os.path.dirname(os.path.dirname(path))}/JPEGImages/{image_name}"
img = cv2.imread(image_path)
h, w, c = img.shape
node_root = ET.Element('annotation')
node_folder = ET.SubElement(node_root, 'folder')
node_folder.text = 'VOC2007'
node_filename = ET.SubElement(node_root, 'filename')
node_filename.text = image_name
node_size = ET.SubElement(node_root, 'size')
node_width = ET.SubElement(node_size, 'width')
node_width.text = str(int(w))
node_height = ET.SubElement(node_size, 'height')
node_height.text = str(int(h))
node_depth = ET.SubElement(node_size, 'depth')
node_depth.text = str(int(c))
tree = ET.ElementTree(node_root)
boxes = []
return boxes, tree, True
def parse_det_txt(path, conf_thresh=0.5):
if os.path.exists(path):
with open(path, "r") as f:
lines = f.readlines()
boxes = []
scores = []
for line in lines:
content = line.rstrip().split(' ')
bbox = content[2:]
# Only keep the boxes with score >= conf_thresh
det_conf = float(content[1])
if det_conf >= conf_thresh:
boxes.append([int(b) for b in bbox])
scores.append(det_conf)
return boxes, scores
else:
return [], []
def class_agnostic_nms(boxes, scores, iou=0.7):
# boxes = non_max_suppression_fast(np.array(boxes), iou)
boxes = nms(np.array(boxes), np.array(scores), iou)
return list(boxes)
def get_unk_det(gt, det, iou):
if not gt:
return det
gt = np.array(gt)
unk_det = []
for dl in det:
d = np.array(dl)
ixmin = np.maximum(gt[:, 0], d[0])
iymin = np.maximum(gt[:, 1], d[1])
ixmax = np.minimum(gt[:, 2], d[2])
iymax = np.minimum(gt[:, 3], d[3])
iw = np.maximum(ixmax - ixmin + 1.0, 0.0)
ih = np.maximum(iymax - iymin + 1.0, 0.0)
inters = iw * ih
uni = (
(d[2] - d[0] + 1.0) * (d[3] - d[1] + 1.0)
+ (gt[:, 2] - gt[:, 0] + 1.0) * (gt[:, 3] - gt[:, 1] + 1.0)
- inters
)
overlaps = inters / uni
ov_max = np.max(overlaps)
if ov_max < iou:
unk_det.append(dl)
return unk_det
def main(ann_dir, det_dir, out_dir, det_conf_thesh, iou_thresh, nms=False, iou_thresh_nms=0.7):
files = os.listdir(det_dir)
start = time.time()
for i, file_name in enumerate(files):
if i % 100 == 0:
print(f"On image no. {i}. Time: {time.time() - start}")
start = time.time()
ann_file_path = f"{ann_dir}/{file_name.split('.')[0]}.xml"
ref_det_file_path = f"{det_dir}/{file_name.split('.')[0]}.txt"
out_ann_file_path = f"{out_dir}/{file_name.split('.')[0]}.xml"
gt_boxes, ann_tree, train = parse_voc_gt_kn(ann_file_path) # Read the ground truth bounding boxes
# Only add the unknown detections if training image
if not train:
# Copy the original annotation file
ann_tree.write(out_ann_file_path, encoding='latin-1')
continue
det_boxes, scores = parse_det_txt(ref_det_file_path, conf_thresh=det_conf_thesh) # Read the detections
if nms:
det_boxes = class_agnostic_nms(det_boxes, scores, iou_thresh_nms) # Apply NMS if prompted to do so
det_unk = get_unk_det(gt_boxes, det_boxes, iou_thresh) # Get the potential unknown detections
# Create the updated annotation file
for det in det_unk:
object = ET.SubElement(ann_tree.getroot(), 'object')
name = ET.SubElement(object, "name")
name.text = "unknown"
pose = ET.SubElement(object, "pose")
pose.text = "Unspecified"
truncated = ET.SubElement(object, "truncated")
truncated.text = "2"
difficult = ET.SubElement(object, "difficult")
difficult.text = "0"
bndbox = ET.SubElement(object, "bndbox")
xmin = ET.SubElement(bndbox, "xmin")
xmin.text = str(int(det[0]))
ymin = ET.SubElement(bndbox, "ymin")
ymin.text = str(int(det[1]))
xmax = ET.SubElement(bndbox, "xmax")
xmax.text = str(int(det[2]))
ymax = ET.SubElement(bndbox, "ymax")
ymax.text = str(int(det[3]))
# Save the updated annotations
ann_tree.write(out_ann_file_path, encoding='latin-1')
if __name__ == "__main__":
args = parse_arguments()
annotations_dir = args["annotations_dir_path"]
detections_dir = args["detections_dir_path"]
output_dir = args["output_dir_path"]
if not os.path.exists(output_dir):
os.makedirs(output_dir)
conf_threshold_det = args["detection_confidence_threshold"]
iou_threshold_unk = args["iou_thresh_unk"]
apply_nms = args["apply_nms"]
iou_threshold_nms = args["iou_thresh_nms"]
main(annotations_dir, detections_dir, output_dir, conf_threshold_det, iou_threshold_unk,
apply_nms, iou_threshold_nms)
| [
"os.path.exists",
"os.listdir",
"xml.etree.ElementTree.parse",
"numpy.minimum",
"argparse.ArgumentParser",
"os.makedirs",
"fvcore.common.file_io.PathManager.open",
"numpy.max",
"numpy.array",
"xml.etree.ElementTree.Element",
"xml.etree.ElementTree.ElementTree",
"os.path.dirname",
"os.path.ba... | [((2699, 2724), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2722, 2724), False, 'import argparse\n'), ((4540, 4560), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (4554, 4560), False, 'import os\n'), ((6880, 6900), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (6894, 6900), False, 'import os\n'), ((7676, 7688), 'numpy.array', 'np.array', (['gt'], {}), '(gt)\n', (7684, 7688), True, 'import numpy as np\n'), ((8483, 8502), 'os.listdir', 'os.listdir', (['det_dir'], {}), '(det_dir)\n', (8493, 8502), False, 'import os\n'), ((8515, 8526), 'time.time', 'time.time', ([], {}), '()\n', (8524, 8526), False, 'import time\n'), ((6113, 6135), 'cv2.imread', 'cv2.imread', (['image_path'], {}), '(image_path)\n', (6123, 6135), False, 'import cv2\n'), ((6184, 6208), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""annotation"""'], {}), "('annotation')\n", (6194, 6208), True, 'import xml.etree.ElementTree as ET\n'), ((6231, 6265), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['node_root', '"""folder"""'], {}), "(node_root, 'folder')\n", (6244, 6265), True, 'import xml.etree.ElementTree as ET\n'), ((6327, 6363), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['node_root', '"""filename"""'], {}), "(node_root, 'filename')\n", (6340, 6363), True, 'import xml.etree.ElementTree as ET\n'), ((6424, 6456), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['node_root', '"""size"""'], {}), "(node_root, 'size')\n", (6437, 6456), True, 'import xml.etree.ElementTree as ET\n'), ((6478, 6511), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['node_size', '"""width"""'], {}), "(node_size, 'width')\n", (6491, 6511), True, 'import xml.etree.ElementTree as ET\n'), ((6572, 6606), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['node_size', '"""height"""'], {}), "(node_size, 'height')\n", (6585, 6606), True, 'import xml.etree.ElementTree as ET\n'), ((6667, 6700), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['node_size', '"""depth"""'], {}), "(node_size, 'depth')\n", (6680, 6700), True, 'import xml.etree.ElementTree as ET\n'), ((6754, 6779), 'xml.etree.ElementTree.ElementTree', 'ET.ElementTree', (['node_root'], {}), '(node_root)\n', (6768, 6779), True, 'import xml.etree.ElementTree as ET\n'), ((7537, 7552), 'numpy.array', 'np.array', (['boxes'], {}), '(boxes)\n', (7545, 7552), True, 'import numpy as np\n'), ((7554, 7570), 'numpy.array', 'np.array', (['scores'], {}), '(scores)\n', (7562, 7570), True, 'import numpy as np\n'), ((7737, 7749), 'numpy.array', 'np.array', (['dl'], {}), '(dl)\n', (7745, 7749), True, 'import numpy as np\n'), ((7766, 7792), 'numpy.maximum', 'np.maximum', (['gt[:, 0]', 'd[0]'], {}), '(gt[:, 0], d[0])\n', (7776, 7792), True, 'import numpy as np\n'), ((7809, 7835), 'numpy.maximum', 'np.maximum', (['gt[:, 1]', 'd[1]'], {}), '(gt[:, 1], d[1])\n', (7819, 7835), True, 'import numpy as np\n'), ((7852, 7878), 'numpy.minimum', 'np.minimum', (['gt[:, 2]', 'd[2]'], {}), '(gt[:, 2], d[2])\n', (7862, 7878), True, 'import numpy as np\n'), ((7895, 7921), 'numpy.minimum', 'np.minimum', (['gt[:, 3]', 'd[3]'], {}), '(gt[:, 3], d[3])\n', (7905, 7921), True, 'import numpy as np\n'), ((7935, 7971), 'numpy.maximum', 'np.maximum', (['(ixmax - ixmin + 1.0)', '(0.0)'], {}), '(ixmax - ixmin + 1.0, 0.0)\n', (7945, 7971), True, 'import numpy as np\n'), ((7985, 8021), 'numpy.maximum', 'np.maximum', (['(iymax - iymin + 1.0)', '(0.0)'], {}), '(iymax - iymin + 1.0, 0.0)\n', (7995, 8021), True, 'import numpy as np\n'), ((8281, 8297), 'numpy.max', 'np.max', (['overlaps'], {}), '(overlaps)\n', (8287, 8297), True, 'import numpy as np\n'), ((10786, 10812), 'os.path.exists', 'os.path.exists', (['output_dir'], {}), '(output_dir)\n', (10800, 10812), False, 'import os\n'), ((10822, 10845), 'os.makedirs', 'os.makedirs', (['output_dir'], {}), '(output_dir)\n', (10833, 10845), False, 'import os\n'), ((4575, 4597), 'fvcore.common.file_io.PathManager.open', 'PathManager.open', (['path'], {}), '(path)\n', (4591, 4597), False, 'from fvcore.common.file_io import PathManager\n'), ((4623, 4634), 'xml.etree.ElementTree.parse', 'ET.parse', (['f'], {}), '(f)\n', (4631, 4634), True, 'import xml.etree.ElementTree as ET\n'), ((8682, 8693), 'time.time', 'time.time', ([], {}), '()\n', (8691, 8693), False, 'import time\n'), ((9727, 9756), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['object', '"""name"""'], {}), "(object, 'name')\n", (9740, 9756), True, 'import xml.etree.ElementTree as ET\n'), ((9810, 9839), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['object', '"""pose"""'], {}), "(object, 'pose')\n", (9823, 9839), True, 'import xml.etree.ElementTree as ET\n'), ((9902, 9936), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['object', '"""truncated"""'], {}), "(object, 'truncated')\n", (9915, 9936), True, 'import xml.etree.ElementTree as ET\n'), ((9994, 10028), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['object', '"""difficult"""'], {}), "(object, 'difficult')\n", (10007, 10028), True, 'import xml.etree.ElementTree as ET\n'), ((10083, 10114), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['object', '"""bndbox"""'], {}), "(object, 'bndbox')\n", (10096, 10114), True, 'import xml.etree.ElementTree as ET\n'), ((10134, 10163), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['bndbox', '"""xmin"""'], {}), "(bndbox, 'xmin')\n", (10147, 10163), True, 'import xml.etree.ElementTree as ET\n'), ((10224, 10253), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['bndbox', '"""ymin"""'], {}), "(bndbox, 'ymin')\n", (10237, 10253), True, 'import xml.etree.ElementTree as ET\n'), ((10314, 10343), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['bndbox', '"""xmax"""'], {}), "(bndbox, 'xmax')\n", (10327, 10343), True, 'import xml.etree.ElementTree as ET\n'), ((10404, 10433), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['bndbox', '"""ymax"""'], {}), "(bndbox, 'ymax')\n", (10417, 10433), True, 'import xml.etree.ElementTree as ET\n'), ((4496, 4518), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (4512, 4518), False, 'import os\n'), ((6050, 6071), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (6065, 6071), False, 'import os\n'), ((5967, 5989), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (5983, 5989), False, 'import os\n'), ((8639, 8650), 'time.time', 'time.time', ([], {}), '()\n', (8648, 8650), False, 'import time\n')] |
# <NAME>
# initial version of the webcam detector, can be used to test HSV settings, radius, etc
import cv2
#import time
import numpy as np
#from infer_imagenet import *
FRAME_WIDTH = 640
FRAME_HEIGHT = 480
# load in the video
cap = cv2.VideoCapture(0)
cap.set(cv2.CAP_PROP_FRAME_WIDTH,FRAME_WIDTH)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT,FRAME_HEIGHT)
# Check if camera opened successfully
if (cap.isOpened() == False):
print("Error opening video stream or file")
# writing a video file for presentation
#fourcc = cv2.VideoWriter_fourcc(*'MJPG')
#out = cv2.VideoWriter('example_track.avi', fourcc , 30.0, (640, 480),
# Read until video is completed
while cap.isOpened():
# Capture frame-by-frame
ret, frame = cap.read()
if ret == True:
redball_detected=False
# resize video for faster processing, add blurr to smooth image, convert to Hue saturation value
frame = cv2.resize(frame, (640, 480))
blurred = cv2.GaussianBlur(frame, (11, 11), 0)
frameHSV = cv2.cvtColor(blurred, cv2.COLOR_BGR2HSV)
# code for later exploring using CNNs for object detection, in this case a tennis ball
#found = infer_result(frame, 852, model)
#print('Tennis Ball found?:', found)
redLow = (0, 140, 140)
redHigh = (255, 255, 255)
# other colors such as the green for a tennis ball
#colorLow = (100, 40, 60)
#colorHigh = (120, 255, 255)
# masks the parts of the image which fits the HSV setting, fills in holes using erode/dilate
mask = cv2.inRange(frameHSV, redLow, redHigh)
mask = cv2.erode(mask, None, iterations=4)
mask = cv2.dilate(mask, None, iterations=8)
mask = cv2.erode(mask, None, iterations=4)
# copy of the mask for checking if circle
maskg = np.copy(mask)
imgg = np.zeros(frame.shape[0:2])
cv2.imshow('mask', mask)
cnts, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
center = None
cv2.drawContours(frame, cnts, -1, (0, 255, 0), 3)
# Checks to make sure there is a red object
if len(cnts) < 1:
cv2.imshow('Frame', frame)
#cv2.waitKey(10)
#out.write(frame)
else:
c = max(cnts, key=cv2.contourArea)
(x, y), radius = cv2.minEnclosingCircle(c)
M = cv2.moments(c)
if int(M["m00"]) != 0:
center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))
print('radius', radius)
# only proceed if the radius meets a minimum size
if radius > 10:
# Check to see if the object is a circle by checking mask fill of enclosing circle
cv2.circle(imgg, center, int(radius), 255, -1)
masked = cv2.bitwise_and(maskg.astype(np.uint8), maskg.astype(np.uint8), mask=imgg.astype(np.uint8))
circle_fullness = np.sum(masked) / (np.pi * radius ** 2 * 255)
if circle_fullness > 0.8:
redball_detected=True
# draw the circle and centroid on the frame,
cv2.circle(frame, (int(x), int(y)), int(radius),
(0, 0, 255), 2)
cv2.circle(frame, center, 5, (0, 0, 255), -1)
# if large enough of a red object is detected it sends the coordinates
if redball_detected:
print('center coordinates', center)
print(type(center))
# write to a video file
#out.write(frame)
# Display the resulting frame
print('Redball detected:', redball_detected)
cv2.imshow('Frame', frame)
cv2.imshow("test", frameHSV)
#cv2.waitKey(1)
# Press Q on keyboard to exit
if cv2.waitKey(25) & 0xFF == ord('q'):
break
# Break the loop
else:
break
# When everything done, release the video capture object
cap.release()
#out.release()
# Closes all the frames
cv2.destroyAllWindows()
| [
"numpy.copy",
"cv2.drawContours",
"cv2.dilate",
"cv2.inRange",
"cv2.erode",
"cv2.minEnclosingCircle",
"cv2.imshow",
"numpy.sum",
"numpy.zeros",
"cv2.circle",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"cv2.cvtColor",
"cv2.moments",
"cv2.findContours",
"cv2.resize",
"cv2.GaussianBlu... | [((246, 265), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (262, 265), False, 'import cv2\n'), ((4141, 4164), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (4162, 4164), False, 'import cv2\n'), ((938, 967), 'cv2.resize', 'cv2.resize', (['frame', '(640, 480)'], {}), '(frame, (640, 480))\n', (948, 967), False, 'import cv2\n'), ((987, 1023), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['frame', '(11, 11)', '(0)'], {}), '(frame, (11, 11), 0)\n', (1003, 1023), False, 'import cv2\n'), ((1044, 1084), 'cv2.cvtColor', 'cv2.cvtColor', (['blurred', 'cv2.COLOR_BGR2HSV'], {}), '(blurred, cv2.COLOR_BGR2HSV)\n', (1056, 1084), False, 'import cv2\n'), ((1605, 1643), 'cv2.inRange', 'cv2.inRange', (['frameHSV', 'redLow', 'redHigh'], {}), '(frameHSV, redLow, redHigh)\n', (1616, 1643), False, 'import cv2\n'), ((1660, 1695), 'cv2.erode', 'cv2.erode', (['mask', 'None'], {'iterations': '(4)'}), '(mask, None, iterations=4)\n', (1669, 1695), False, 'import cv2\n'), ((1712, 1748), 'cv2.dilate', 'cv2.dilate', (['mask', 'None'], {'iterations': '(8)'}), '(mask, None, iterations=8)\n', (1722, 1748), False, 'import cv2\n'), ((1765, 1800), 'cv2.erode', 'cv2.erode', (['mask', 'None'], {'iterations': '(4)'}), '(mask, None, iterations=4)\n', (1774, 1800), False, 'import cv2\n'), ((1871, 1884), 'numpy.copy', 'np.copy', (['mask'], {}), '(mask)\n', (1878, 1884), True, 'import numpy as np\n'), ((1901, 1927), 'numpy.zeros', 'np.zeros', (['frame.shape[0:2]'], {}), '(frame.shape[0:2])\n', (1909, 1927), True, 'import numpy as np\n'), ((1941, 1965), 'cv2.imshow', 'cv2.imshow', (['"""mask"""', 'mask'], {}), "('mask', mask)\n", (1951, 1965), False, 'import cv2\n'), ((1993, 2055), 'cv2.findContours', 'cv2.findContours', (['mask', 'cv2.RETR_TREE', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n', (2009, 2055), False, 'import cv2\n'), ((2088, 2137), 'cv2.drawContours', 'cv2.drawContours', (['frame', 'cnts', '(-1)', '(0, 255, 0)', '(3)'], {}), '(frame, cnts, -1, (0, 255, 0), 3)\n', (2104, 2137), False, 'import cv2\n'), ((3779, 3805), 'cv2.imshow', 'cv2.imshow', (['"""Frame"""', 'frame'], {}), "('Frame', frame)\n", (3789, 3805), False, 'import cv2\n'), ((3815, 3843), 'cv2.imshow', 'cv2.imshow', (['"""test"""', 'frameHSV'], {}), "('test', frameHSV)\n", (3825, 3843), False, 'import cv2\n'), ((2233, 2259), 'cv2.imshow', 'cv2.imshow', (['"""Frame"""', 'frame'], {}), "('Frame', frame)\n", (2243, 2259), False, 'import cv2\n'), ((2416, 2441), 'cv2.minEnclosingCircle', 'cv2.minEnclosingCircle', (['c'], {}), '(c)\n', (2438, 2441), False, 'import cv2\n'), ((2459, 2473), 'cv2.moments', 'cv2.moments', (['c'], {}), '(c)\n', (2470, 2473), False, 'import cv2\n'), ((3921, 3936), 'cv2.waitKey', 'cv2.waitKey', (['(25)'], {}), '(25)\n', (3932, 3936), False, 'import cv2\n'), ((3037, 3051), 'numpy.sum', 'np.sum', (['masked'], {}), '(masked)\n', (3043, 3051), True, 'import numpy as np\n'), ((3377, 3422), 'cv2.circle', 'cv2.circle', (['frame', 'center', '(5)', '(0, 0, 255)', '(-1)'], {}), '(frame, center, 5, (0, 0, 255), -1)\n', (3387, 3422), False, 'import cv2\n')] |
# encoding: utf-8
import logging
import os
region_name = "us-west-2"
athena_s3_result_tmp_path = "s3://yamibuy-oregon/athena/sql_result"
LOG_DIR_NAME = "log"
LOG_FILE_NAME = "athena-operation.log"
LOG_LEVEL = logging.INFO
jar_file_path = "{}/../lib*.jar".format(os.path.split(os.path.realpath(__file__))[0])
| [
"os.path.realpath"
] | [((280, 306), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (296, 306), False, 'import os\n')] |
import rospy
GAS_DENSITY = 2.858
ONE_MPH = 0.44704
IS_DEBUG = False
from yaw_controller import YawController
from pid import PID
from lowpass import LowPassFilter
class Controller(object):
#def __init__(self, *args, **kwargs):
# TODO: Implement
#pass
# init in dbw:
# self.controller = Controller(vehicle_mass = vehicle_mass,
# fuel_capacity = fuel_capacity,
# brake_deadband = brake_deadband,
# decel_limit = decel_limit,
# accel_limit = accel_limit,
# wheel_radius = wheel_radius,
# wheel_base = wheel_base,
# steer_ratio = steer_ratio,
# max_lat_accel = max_lat_accel,
# max_steer_angle = max_steer_angle)
# usage in dbw:
#self.throttle, self.brake, self.steering = self.controller.control(self.current_vel,
# self.dbw_enabled,
# self.linear_vel,
# self.angular_vel)
def __init__(self, vehicle_mass, fuel_capacity, brake_deadband, decel_limit, accel_limit,
wheel_radius, wheel_base, steer_ratio, max_lat_accel, max_steer_angle):
self.yaw_controller = YawController(wheel_base, steer_ratio, 0.1, max_lat_accel, max_steer_angle)
kp = 0.3
ki = 0.1
kd = 0
mn = 0. # mimnimum throttle value
mx = 0.2 # maximum throttle value
self.throttle_controller = PID(kp, ki, kd, mn, mx)
# velocity is noisy, so use low-pass filter
tau = 0.5 # 1/ (2*pi*tau) = cutoff-frequence
ts = 0.02 # sample time = 50 Hz
self.vel_lpf = LowPassFilter(tau, ts)
self.vehicle_mass = vehicle_mass
self.fuel_capacity = fuel_capacity
self.brake_deadband = brake_deadband
self.decel_limit = decel_limit
self.accel_limit = accel_limit
self.wheel_radius = wheel_radius
self.last_time = rospy.get_time()
#def control(self, *args, **kwargs):
# TODO: Change the arg, kwarg list to suit your needs
# Return throttle, brake, steer
#return 1., 0., 0.
def control(self, current_vel, dbw_enabled, linear_vel, angular_vel):
# Return throttle, brake, steer
if not dbw_enabled:
self.throttle_controller.reset()
return 0., 0., 0.
# get lowpass-filtered velocity
current_vel = self.vel_lpf.filt(current_vel)
if IS_DEBUG:
rospy.loginfo("Angular vel: {0}".format(angular_vel))
rospy.loginfo("Target vel: {0}".format(linear_vel))
rospy.loginfo("Target ang vel: {0}".format(angular_vel))
rospy.loginfo("Current vel: {0}".format(current_vel))
rospy.loginfo("Filtered vel: {0}".format(self.vel_lpf.get()))
steering = self.yaw_controller.get_steering(linear_vel, angular_vel, current_vel)
vel_error = linear_vel - current_vel
self.last_vel = current_vel
current_time = rospy.get_time()
sample_time = current_time - self.last_time
self.last_time = current_time
throttle = self.throttle_controller.step(vel_error, sample_time)
brake = 0
if linear_vel == 0. and current_vel < 0.1:
throttle = 0
# brake = 400 # Nm --> needed to hold car in place if we are at a light, resulting acceleration = -1 m/s^2
brake = 700 # Nm --> needed to hold car in place if we are at a light, resulting acceleration = -1 m/s^2
elif throttle < 0.1 and vel_error < 0:
throttle = 0
decel = max(vel_error, self.decel_limit) # limit brake value
brake = abs(decel) * self.vehicle_mass * self.wheel_radius # torque = N * m = acceleration * mass * radius
if IS_DEBUG:
rospy.logwarn("vel error: {0}, throttle: {1}, brake: {2}".format(vel_error, throttle, brake))
# problem with this controller:
# by the time, car is away from waypoint, waypoint_follower send new commands
# 1. waypoint_folloer: make sure, it will update every time; if not following waypoints, do update
# 2. change yaw-controller and add some damping terms (current_ang_vel vs target_ang_vel --> if too large...)
return throttle, brake, steering | [
"yaw_controller.YawController",
"rospy.get_time",
"lowpass.LowPassFilter",
"pid.PID"
] | [((1620, 1695), 'yaw_controller.YawController', 'YawController', (['wheel_base', 'steer_ratio', '(0.1)', 'max_lat_accel', 'max_steer_angle'], {}), '(wheel_base, steer_ratio, 0.1, max_lat_accel, max_steer_angle)\n', (1633, 1695), False, 'from yaw_controller import YawController\n'), ((1872, 1895), 'pid.PID', 'PID', (['kp', 'ki', 'kd', 'mn', 'mx'], {}), '(kp, ki, kd, mn, mx)\n', (1875, 1895), False, 'from pid import PID\n'), ((2069, 2091), 'lowpass.LowPassFilter', 'LowPassFilter', (['tau', 'ts'], {}), '(tau, ts)\n', (2082, 2091), False, 'from lowpass import LowPassFilter\n'), ((2366, 2382), 'rospy.get_time', 'rospy.get_time', ([], {}), '()\n', (2380, 2382), False, 'import rospy\n'), ((3433, 3449), 'rospy.get_time', 'rospy.get_time', ([], {}), '()\n', (3447, 3449), False, 'import rospy\n')] |
"""
Author : <NAME>
FileName : add_friends.py
Date : 5.5.17
Version : 1.0
"""
from page import *
import socket
from tkinter import messagebox
class AddFriends(Page):
def __init__(self, root, username):
Page.__init__(self, root)
self.username = username
self.root = root
def add_elements(self, root, title):
"""
The function displays the page of the adding new friends.
It presents the description of the page, a place to write
the username of the friends and buttons for accept and decline
the user's receiving friendship requests.
Args:
root (Tk): The tk window.
title (string): The name of the page.
"""
super(AddFriends, self).add_elements(root, title)
label1 = Label(root, font=self.font1, fg=FIREBRICK1,
text=ADD_PAGE_TEXT)
label1.pack()
label1.place(x=20, y=60)
global entry_username
entry_username = Entry(root, bg=CHOCOLATE, fg=WHITE, bd=5,
font=self.font1, exportselection=0,
insertbackground=GOLD, insertwidth=10,
width=20)
entry_username.pack()
entry_username.place(x=250, y=250)
button_username = Button(root, bg=ROYAL_BLUE,
activebackground=ROYAL_BLUE,
font=self.font1, fg=WHITE, text=ADD_FRIENDS,
command=self.add_friend)
button_username.pack()
button_username.place(x=650, y=250)
button_accept = Button(root, bg=ROYAL_BLUE,
activebackground=ROYAL_BLUE,
font=self.font1, fg=WHITE, text=ACCEPT,
command=self.accept_friend)
button_accept.pack()
button_accept.place(x=50, y=600)
button_decline = Button(root, bg=ROYAL_BLUE,
activebackground=ROYAL_BLUE,
font=self.font1, fg=WHITE, text=DECLINE,
command=self.decline_friend)
button_decline.pack()
button_decline.place(x=650, y=600)
self.show_requests()
def accept_friend(self):
"""
The function executes when the user press the accept button.
It gets the marked username that the user chose and send to the
server a request to accept his friendship request.
"""
try:
index = lb.curselection()
if index:
user = lb.get(index)
request = "addfriend#" + self.username + "#" + user
sock = socket.socket()
sock.connect((SERVER, PORT))
sock.send(request.encode())
answer = sock.recv(CHECK_BUFFER).decode()
if answer == OK:
messagebox.showwarning("SUCCESS!", "Congratulations "
"for the new "
"friendship!")
self.clear_screen(self.root)
self.add_elements(self.root, ADD_FRIENDS)
else:
messagebox.showwarning("ERROR!", "Your accept wasn't "
"sent. Try again later.")
except NameError:
pass
def decline_friend(self):
"""
The function executes when the user press the decline button.
It gets the marked username that the user chose and send to the
server a request to decline his friendship request.
"""
try:
index = lb.curselection()
if index:
user = lb.get(index) + "-not"
request = "addfriend#" + self.username + "#" + user
sock = socket.socket()
sock.connect((SERVER, PORT))
sock.send(request.encode())
answer = sock.recv(CHECK_BUFFER).decode()
if answer == OK:
messagebox.showwarning("SUCCESS!", "Your decline was "
"sent "
"successfully")
self.clear_screen(self.root)
self.add_elements(self.root, ADD_FRIENDS)
else:
messagebox.showwarning("ERROR!", "Your decline wasn't "
"sent. Try again later.")
except NameError:
pass
def add_friend(self):
"""
The function executes when the user press the add friends button.
It gets the username that the user typed in the box line and send
to the server a request friendship for this user.
"""
global entry_username
username = entry_username.get()
if self.username != username:
if username:
request = "addfriend#" + self.username + "#" + username
sock = socket.socket()
sock.connect((SERVER, PORT))
sock.send(request.encode())
answer = sock.recv(CHECK_BUFFER).decode()
if answer == OK:
messagebox.showwarning("SUCCESS!", "Your friendship "
"request was sent "
"successfully")
self.clear_screen(self.root)
self.add_elements(self.root, ADD_FRIENDS)
else:
messagebox.showwarning("ERROR!", "Your friendship "
"request was not sent\n"
"1) this username is "
"invalid\n"
"2) You already sent "
"friendship request for "
"this username")
else:
messagebox.showwarning("ERROR!", "Your cannot "
"friendship yourself!")
def show_requests(self):
"""
The function asks from the server the list of all the friendship
request that were sent to the user.
It displays the list in the page so the user could see all of them.
"""
sock = socket.socket()
sock.connect((SERVER, PORT))
request = "getrequests#" + self.username
sock.send(request.encode())
answer = sock.recv(FRIENDS_BUFFER).decode()
if answer and answer != OK:
requests = answer.split(",")
scroll = Scrollbar()
scroll.pack(side="right", fill="y")
length = len(requests)
if length > 12:
length = 12
global lb
lb = Listbox(self.root, bd=10, bg=PEACHPUFF2, font=self.font1,
fg=ORANGE_RED, height=length, selectbackground=GREEN,
selectmode="single", relief="sunken", width=20,
yscrollcommand=scroll.set)
i = 1
for raw in requests:
lb.insert(i, raw)
i += 1
lb.pack()
lb.place(y=320, x=250)
scroll.config(command=lb.yview)
sock.close()
| [
"tkinter.messagebox.showwarning",
"socket.socket"
] | [((6723, 6738), 'socket.socket', 'socket.socket', ([], {}), '()\n', (6736, 6738), False, 'import socket\n'), ((6342, 6410), 'tkinter.messagebox.showwarning', 'messagebox.showwarning', (['"""ERROR!"""', '"""Your cannot friendship yourself!"""'], {}), "('ERROR!', 'Your cannot friendship yourself!')\n", (6364, 6410), False, 'from tkinter import messagebox\n'), ((2779, 2794), 'socket.socket', 'socket.socket', ([], {}), '()\n', (2792, 2794), False, 'import socket\n'), ((4008, 4023), 'socket.socket', 'socket.socket', ([], {}), '()\n', (4021, 4023), False, 'import socket\n'), ((5251, 5266), 'socket.socket', 'socket.socket', ([], {}), '()\n', (5264, 5266), False, 'import socket\n'), ((2999, 3076), 'tkinter.messagebox.showwarning', 'messagebox.showwarning', (['"""SUCCESS!"""', '"""Congratulations for the new friendship!"""'], {}), "('SUCCESS!', 'Congratulations for the new friendship!')\n", (3021, 3076), False, 'from tkinter import messagebox\n'), ((3362, 3439), 'tkinter.messagebox.showwarning', 'messagebox.showwarning', (['"""ERROR!"""', '"""Your accept wasn\'t sent. Try again later."""'], {}), '(\'ERROR!\', "Your accept wasn\'t sent. Try again later.")\n', (3384, 3439), False, 'from tkinter import messagebox\n'), ((4228, 4300), 'tkinter.messagebox.showwarning', 'messagebox.showwarning', (['"""SUCCESS!"""', '"""Your decline was sent successfully"""'], {}), "('SUCCESS!', 'Your decline was sent successfully')\n", (4250, 4300), False, 'from tkinter import messagebox\n'), ((4586, 4664), 'tkinter.messagebox.showwarning', 'messagebox.showwarning', (['"""ERROR!"""', '"""Your decline wasn\'t sent. Try again later."""'], {}), '(\'ERROR!\', "Your decline wasn\'t sent. Try again later.")\n', (4608, 4664), False, 'from tkinter import messagebox\n'), ((5467, 5554), 'tkinter.messagebox.showwarning', 'messagebox.showwarning', (['"""SUCCESS!"""', '"""Your friendship request was sent successfully"""'], {}), "('SUCCESS!',\n 'Your friendship request was sent successfully')\n", (5489, 5554), False, 'from tkinter import messagebox\n'), ((5820, 5990), 'tkinter.messagebox.showwarning', 'messagebox.showwarning', (['"""ERROR!"""', '"""Your friendship request was not sent\n1) this username is invalid\n2) You already sent friendship request for this username"""'], {}), '(\'ERROR!\',\n """Your friendship request was not sent\n1) this username is invalid\n2) You already sent friendship request for this username"""\n )\n', (5842, 5990), False, 'from tkinter import messagebox\n')] |
#!/usr/bin/python
# Copyright (c) 2018, Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_dynamic_group_facts
short_description: Retrieve facts of dynamic groups
description:
- This module retrieves information of the specified dynamic group or lists all the dynamic groups in a tenancy.
version_added: "2.5"
options:
dynamic_group_id:
description: The OCID of the dynamic group. I(dynamic_group_id) is required to get a specific dynamic group's
information.
required: false
aliases: [ 'id' ]
compartment_id:
description: The OCID of the compartment (remember that the tenancy is simply the root compartment).
Required to list all the dynamic groups in a tenancy.
required: false
author: "<NAME> (@rohitChaware)"
extends_documentation_fragment: [ oracle, oracle_name_option ]
"""
EXAMPLES = """
- name: Get all the dynamic groups in a tenancy
oci_dynamic_group_facts:
compartment_id: ocid1.tenancy.oc1..xxxxxEXAMPLExxxxx
- name: Get information of a specific dynamic group
oci_dynamic_group_facts:
dynamic_group_id: ocid1.dynamicgroup.oc1..xxxxxEXAMPLExxxxx
"""
RETURN = """
dynamic_groups:
description: List of dynamic group details
returned: always
type: complex
contains:
compartment_id:
description: The OCID of the tenancy containing the group.
returned: always
type: string
sample: ocid1.compartment.oc1..xxxxxEXAMPLExxxxx
description:
description: The description you assign to the group. Does not have to be unique, and it's changeable.
returned: always
type: string
sample: "Group for all instances with the tag namespace and tag key operations.department"
id:
description: The OCID of the group.
returned: always
type: string
sample: ocid1.dynamicgroup.oc1..xxxxxEXAMPLExxxxx
inactive_status:
description: The detailed status of INACTIVE lifecycleState.
returned: always
type: int
sample: 1
lifecycle_state:
description: The group's current state. After creating a group, make sure its lifecycleState changes from
CREATING to ACTIVE before using it.
returned: always
type: string
sample: ACTIVE
matching_rule:
description: A rule string that defines which instance certificates will be matched. For syntax, see
U(https://docs.us-phoenix-1.oraclecloud.com/Content/Identity/Tasks/managingdynamicgroups.htm).
returned: always
type: string
sample: tag.operations.department.value
time_created:
description: Date and time the group was created, in the format defined by RFC3339.
returned: always
type: datetime
sample: 2018-03-28T18:37:56.190000+00:00
name:
description: The name you assign to the group during creation. The name must be unique across all groups in
the tenancy and cannot be changed.
returned: always
type: string
sample: Sample dynamic group
sample: [{
"compartment_id": "ocid1.tenancy.oc1..xxxxxEXAMPLExxxxx",
"description": "Group for all instances with the tag namespace and tag key operations.department",
"id": "ocid1.dynamicgroup.oc1..xxxxxEXAMPLExxxxx",
"inactive_status": null,
"lifecycle_state": "ACTIVE",
"matching_rule": "tag.operations.department.value",
"name": "Sample dynamic group",
"time_created": "2018-07-05T09:38:27.176000+00:00"
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.oracle import oci_utils
try:
from oci.identity.identity_client import IdentityClient
from oci.util import to_dict
from oci.exceptions import ServiceError
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
def main():
module_args = oci_utils.get_facts_module_arg_spec(filter_by_name=True)
module_args.update(
dict(
dynamic_group_id=dict(type="str", required=False, aliases=["id"]),
compartment_id=dict(type="str", required=False),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=False)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
identity_client = oci_utils.create_service_client(module, IdentityClient)
dynamic_group_id = module.params["dynamic_group_id"]
try:
if dynamic_group_id is not None:
result = [
to_dict(
oci_utils.call_with_backoff(
identity_client.get_dynamic_group,
dynamic_group_id=dynamic_group_id,
).data
)
]
else:
result = to_dict(
oci_utils.list_all_resources(
identity_client.list_dynamic_groups,
compartment_id=module.params["compartment_id"],
name=module.params["name"],
)
)
except ServiceError as ex:
module.fail_json(msg=ex.message)
module.exit_json(dynamic_groups=result)
if __name__ == "__main__":
main()
| [
"ansible.module_utils.basic.AnsibleModule",
"ansible.module_utils.oracle.oci_utils.create_service_client",
"ansible.module_utils.oracle.oci_utils.get_facts_module_arg_spec",
"ansible.module_utils.oracle.oci_utils.list_all_resources",
"ansible.module_utils.oracle.oci_utils.call_with_backoff"
] | [((4664, 4720), 'ansible.module_utils.oracle.oci_utils.get_facts_module_arg_spec', 'oci_utils.get_facts_module_arg_spec', ([], {'filter_by_name': '(True)'}), '(filter_by_name=True)\n', (4699, 4720), False, 'from ansible.module_utils.oracle import oci_utils\n'), ((4929, 4996), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'module_args', 'supports_check_mode': '(False)'}), '(argument_spec=module_args, supports_check_mode=False)\n', (4942, 4996), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((5121, 5176), 'ansible.module_utils.oracle.oci_utils.create_service_client', 'oci_utils.create_service_client', (['module', 'IdentityClient'], {}), '(module, IdentityClient)\n', (5152, 5176), False, 'from ansible.module_utils.oracle import oci_utils\n'), ((5620, 5765), 'ansible.module_utils.oracle.oci_utils.list_all_resources', 'oci_utils.list_all_resources', (['identity_client.list_dynamic_groups'], {'compartment_id': "module.params['compartment_id']", 'name': "module.params['name']"}), "(identity_client.list_dynamic_groups,\n compartment_id=module.params['compartment_id'], name=module.params['name'])\n", (5648, 5765), False, 'from ansible.module_utils.oracle import oci_utils\n'), ((5354, 5455), 'ansible.module_utils.oracle.oci_utils.call_with_backoff', 'oci_utils.call_with_backoff', (['identity_client.get_dynamic_group'], {'dynamic_group_id': 'dynamic_group_id'}), '(identity_client.get_dynamic_group,\n dynamic_group_id=dynamic_group_id)\n', (5381, 5455), False, 'from ansible.module_utils.oracle import oci_utils\n')] |
#
# -*- coding: utf-8 -*-
#
# Copyright (c) 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.framework import graph_pb2
from tensorflow.python.framework import tensor_util
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.framework import node_def_pb2
from tensorflow.python.framework import dtypes
from intel_quantization.transform_graph.graph_transform_base import GraphTransformBase
class FuseColumnWiseMul(GraphTransformBase):
def __init__(self, input_pb):
super(FuseColumnWiseMul, self).__init__(input_pb)
def get_fuse_index(self, input_node_map, input_name_list):
fuseable_op_list = ['Conv2D', 'DepthwiseConv2dNative', 'MatMul']
fuse_op_name = {}
for node_index, node_name in enumerate(input_name_list):
node_op = input_node_map[node_name].op
if node_op == "Mul" and input_node_map[
input_node_map[node_name].
input[0]].op in fuseable_op_list and input_node_map[
input_node_map[node_name].input[1]].op == "Const":
fuse_op_name[input_node_map[node_name].input[0]] = node_name
return fuse_op_name
def parse_input_graph(self, input_graph_def):
node_type_list = []
node_name_list = []
input_node_map = {}
for node in input_graph_def.node:
node_name_list.append(node.name)
node_type_list.append(node.op)
each_node_input = []
if node.input:
for _, sub_input in enumerate(node.input):
each_node_input.append(sub_input)
if node.name not in input_node_map:
input_node_map[node.name] = node
else:
print('Duplicate node name {}'.format(node.name))
return input_node_map, node_type_list, node_name_list
def generate_output_graph(self, input_graph_def, input_node_map,
fuse_op_name):
output_graph_def = graph_pb2.GraphDef()
skip_list = []
skip_node_name = []
for index, node in enumerate(input_graph_def.node):
if node.name in fuse_op_name:
skip_list.append(index + 1)
original_node = input_node_map[node.name]
mul_node = input_node_map[fuse_op_name[node.name]]
weights_node_name = original_node.input[1]
weights_node = input_node_map[weights_node_name]
mul_value_node_name = mul_node.input[1]
mul_value_node = input_node_map[mul_value_node_name]
new_node = node_def_pb2.NodeDef()
new_node.op = original_node.op
new_node.name = mul_node.name
for _, value in enumerate(node.input):
new_node.input.append(value)
if original_node.op == "DepthwiseConv2dNative":
weights_col = weights_node.attr[
'value'].tensor.tensor_shape.dim[
2].size * weights_node.attr[
'value'].tensor.tensor_shape.dim[3].size
elif original_node.op == "Conv2D":
weights_col = weights_node.attr[
'value'].tensor.tensor_shape.dim[3].size
else:
weights_col = weights_node.attr[
'value'].tensor.tensor_shape.dim[1].size
mul_value_node_tensor = mul_value_node.attr['value'].tensor
weights_node_tensor = weights_node.attr['value'].tensor
if len(mul_value_node_tensor.tensor_shape.dim
) != 1 or mul_value_node_tensor.tensor_shape.dim[
0].size != weights_col:
print ("Invalid Mul OP fusion.")
mul_value_node_list = [
i for i in tensor_util.MakeNdarray(
mul_value_node_tensor).flat
]
new_weights = []
for index, i in enumerate(
tensor_util.MakeNdarray(weights_node_tensor).flat):
new_weights_value = i * mul_value_node_list[
index % len(mul_value_node_list)]
new_weights.append(new_weights_value)
weights_node.attr['value'].CopyFrom(
attr_value_pb2.
AttrValue(tensor=tensor_util.make_tensor_proto(
new_weights, dtypes.float32,
tensor_util.MakeNdarray(weights_node_tensor).shape)))
skip_node_name.append(weights_node.name)
output_graph_def.node.extend([weights_node])
for key in original_node.attr:
new_node.attr[key].CopyFrom(original_node.attr[key])
output_graph_def.node.extend([new_node])
elif index in skip_list or node.name in skip_node_name:
continue
else:
new_node = node_def_pb2.NodeDef()
new_node.CopyFrom(node)
output_graph_def.node.extend([new_node])
return output_graph_def
def do_transformation(self):
"""
Execute the Conv2D/DepthwiseConv2dNative/Matmul + Mul fusion.
:return: Transformed graph
"""
input_node_map, _, node_name_list = self.parse_input_graph(
self.input_graph)
fuse_op_name = self.get_fuse_index(input_node_map, node_name_list)
# print(fuse_op_name)
return self.generate_output_graph(self.input_graph, input_node_map,
fuse_op_name)
| [
"tensorflow.core.framework.graph_pb2.GraphDef",
"tensorflow.core.framework.node_def_pb2.NodeDef",
"tensorflow.python.framework.tensor_util.MakeNdarray"
] | [((2671, 2691), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (2689, 2691), False, 'from tensorflow.core.framework import graph_pb2\n'), ((3293, 3315), 'tensorflow.core.framework.node_def_pb2.NodeDef', 'node_def_pb2.NodeDef', ([], {}), '()\n', (3313, 3315), False, 'from tensorflow.core.framework import node_def_pb2\n'), ((5741, 5763), 'tensorflow.core.framework.node_def_pb2.NodeDef', 'node_def_pb2.NodeDef', ([], {}), '()\n', (5761, 5763), False, 'from tensorflow.core.framework import node_def_pb2\n'), ((4784, 4828), 'tensorflow.python.framework.tensor_util.MakeNdarray', 'tensor_util.MakeNdarray', (['weights_node_tensor'], {}), '(weights_node_tensor)\n', (4807, 4828), False, 'from tensorflow.python.framework import tensor_util\n'), ((4589, 4635), 'tensorflow.python.framework.tensor_util.MakeNdarray', 'tensor_util.MakeNdarray', (['mul_value_node_tensor'], {}), '(mul_value_node_tensor)\n', (4612, 4635), False, 'from tensorflow.python.framework import tensor_util\n'), ((5252, 5296), 'tensorflow.python.framework.tensor_util.MakeNdarray', 'tensor_util.MakeNdarray', (['weights_node_tensor'], {}), '(weights_node_tensor)\n', (5275, 5296), False, 'from tensorflow.python.framework import tensor_util\n')] |
import os
import imutils
import pickle
import time
import cv2
import threading
import numpy as np
from PIL import ImageFont, ImageDraw, Image
import json
import datetime
import requests
from faced import FaceDetector
from faced.utils import annotate_image
from config_reader import read_config
ZM_URL = 'http://18.179.207.49/zm'
ZM_STREAM_URL = f'{ZM_URL}/cgi-bin/nph-zms'
LOGIN_URL = f'{ZM_URL}/api/host/login.json?user=admin&pass=<PASSWORD>'
MAX_RETRY_FRAME = 1000
def connect_stream(monitor, stream_url):
r = requests.post(url=LOGIN_URL)
print('[INFO] openning video stream...')
auth_info = r.json()['credentials']
new_url = f'{ZM_STREAM_URL}?mode=jpeg&maxfps=5&monitor={monitor}&{auth_info}'
# start streaming with zm stream url
cap = cv2.VideoCapture(new_url)
if cap is None or not cap.isOpened():
# try to open alternative url
print('[ERROR] trying to open direct url...')
cap = cv2.VideoCapture(stream_url)
return cap
class Camera(object):
thread_list = {}
json_list = {}
frame_list = {}
last_access = {}
json_data = {}
detector = None
embedder = None
recognizer = None
le = None
max_retry_count = 0
stream_url_list = {}
confidence = 0.90
# is_ended = False
def initialize(self, monitor, stream_url):
if monitor not in Camera.thread_list:
# start background frame thread
thread = threading.Thread(target=self._thread, args=(
stream_url,), kwargs={"monitor": monitor})
thread.start()
Camera.thread_list[str(monitor)] = thread
# wait until frames start to be available
# while monitor not in self.frame_list or self.frame_list[str(monitor)] is None:
# time.sleep(0)
def __init__(self):
file_paths, configs = read_config()
if Camera.detector is None:
print('[INFO] loading face detector...')
Camera.detector = FaceDetector()
if Camera.embedder is None:
# load our serialized face embedding model from disk
print('[INFO] loading embedder from {}'.format(
file_paths['embedder_path']))
Camera.embedder = cv2.dnn.readNetFromTorch(
file_paths['embedder_path'])
if Camera.recognizer is None:
# load the actual face recognition model along with the label encoder
print('[INFO] loading face recognizer from {}'.format(
file_paths['recognizer_path']))
Camera.recognizer = pickle.loads(
open('output/recognizer.pickle', 'rb').read())
if Camera.le is None:
print('[INFO] loading le from {}'.format(file_paths['le_path']))
Camera.le = pickle.loads(open('output/le.pickle', 'rb').read())
print('[INFO] Confidence value is set to {}'.format(
configs['confidence']))
Camera.confidence = float(configs['confidence'])
Camera.max_retry_count = int(configs['max_retry_count'])
# def get_frame(self, monitor):
# try:
# return self.frame_list[str(monitor)]
# except:
# return None
def get_json(self, monitor):
try:
return self.json_list[str(monitor)]
except:
response_data = {}
response_data['detection'] = []
return response_data
def change_stream_url(self, monitor, stream_url):
if monitor in Camera.thread_list:
return None
Camera.stream_url_list[str(monitor)] = stream_url
self.initialize(monitor, stream_url)
@classmethod
def _thread(cls, stream_url, monitor=0):
# login to zm server first
r = requests.post(url=LOGIN_URL)
print('[INFO] openning video stream...')
auth_info = r.json()['credentials']
new_url = f'{ZM_STREAM_URL}?mode=jpeg&maxfps=5&monitor={monitor}&{auth_info}'
retry_count = 0
cap = None
# start trying to connect to streaming resource
while (cap is None or not cap.isOpened) and retry_count < cls.max_retry_count:
cap = connect_stream(monitor, cls.stream_url_list[str(monitor)])
retry_count += 1
if cap is None or not cap.isOpened():
print('[ERROR] unable to open remote stream...')
cls.thread_list[str(monitor)] = None
return
print('[INFO] starting face detection...')
cap_failed_count = 0
while True:
try:
response_data = {}
response_data['detection'] = []
ret, frame = cap.read()
#ret, frame = camera.read()
if not ret:
cap_failed_count += 1
cls.json_list[str(monitor)] = response_data
if (cap_failed_count > cls.max_retry_count):
if cap.isOpened():
cap.release()
retry_count = 0
while (cap is None or not cap.isOpened) and retry_count < cls.max_retry_count:
cap = connect_stream(
monitor, cls.stream_url_list[str(monitor)])
retry_count += 1
if cap is None or not cap.isOpened():
print('[ERROR] unable to open remote stream...')
cls.thread_list[str(monitor)] = None
return
continue
# resize the frame to have a width of 600 pixels (while
# maintaining the aspect ratio), and then grab the image
# dimensions
frame = imutils.resize(frame, width=600)
# (h, w) = frame.shape[:2]
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
bboxes = cls.detector.predict(frame, cls.confidence)
# ensure at least one face was found
print('[INFO] detected faces: {}'.format(len(bboxes)))
if len(bboxes) > 0:
for xb, yb, wb, hb, pb in bboxes:
startX = int(xb - wb/2)
startY = int(yb - hb/2)
endX = int(xb + wb/2)
endY = int(yb + hb/2)
# extract the face ROI
face = frame[startY:endY, startX:endX]
# (fH, fW) = face.shape[:2]
# ensure the face width and height are sufficiently large
# if fW < 20 or fH < 20:
# continue
# construct a blob for the face ROI, then pass the blob
# through our face embedding model to obtain the 128-d
# quantification of the face
faceBlob = cv2.dnn.blobFromImage(
face, 1.0 / 255, (96, 96), (0, 0, 0), swapRB=True, crop=False)
cls.embedder.setInput(faceBlob)
vec = cls.embedder.forward()
# perform classification to recognize the face
preds = cls.recognizer.predict_proba(vec)[0]
j = np.argmax(preds)
proba = preds[j]
name = cls.le.classes_[j]
# name = 0
# if proba >= 0.6:
# name = cls.le.classes_[j]
json_data = {}
json_data['name'] = '{}'.format(name)
json_data['time'] = datetime.datetime.now().strftime(
'%Y-%m-%d %H:%M:%S')
json_data['confidence'] = str(proba)
response_data['detection'].append(json_data)
cls.json_list[str(monitor)] = response_data
# ret, jpeg = cv2.imencode('.jpg', frame)
# cls.frame_list[str(monitor)] = jpeg.tobytes()
finally:
time.sleep(0.02)
print('[INFO] releasing stream resources...')
if cap.isOpened():
cap.release()
cls.thread_list[str(monitor)] = None
def detect_image(self, frame):
response_data = {}
response_data['detection'] = []
response_list = []
# resize the frame to have a width of 600 pixels (while
# maintaining the aspect ratio), and then grab the image
# dimensions
# frame = imutils.resize(frame, width=600)
try:
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
bboxes = Camera.detector.predict(frame, Camera.confidence)
# ensure at least one face was found
print('[INFO] detected faces: {}'.format(len(bboxes)))
if len(bboxes) > 0:
for xb, yb, wb, hb, pb in bboxes:
startX = int(xb - wb/2)
startY = int(yb - hb/2)
endX = int(xb + wb/2)
endY = int(yb + hb/2)
# extract the face ROI
face = frame[startY:endY, startX:endX]
# (fH, fW) = face.shape[:2]
# ensure the face width and height are sufficiently large
# if fW < 20 or fH < 20:
# continue
# construct a blob for the face ROI, then pass the blob
# through our face embedding model to obtain the 128-d
# quantification of the face
faceBlob = cv2.dnn.blobFromImage(
face, 1.0 / 255, (96, 96), (0, 0, 0), swapRB=True, crop=False)
Camera.embedder.setInput(faceBlob)
vec = Camera.embedder.forward()
# perform classification to recognize the face
preds = Camera.recognizer.predict_proba(vec)[0]
j = np.argmax(preds)
proba = preds[j]
name = Camera.le.classes_[j]
# name = 0
# if proba >= 0.6:
# name = Camera.le.classes_[j]
if name not in response_list:
response_list.append(name)
json_data = {}
json_data['name'] = '{}'.format(name)
json_data['time'] = datetime.datetime.now().strftime(
'%Y-%m-%d %H:%M:%S')
json_data['confidence'] = str(proba)
response_data['detection'].append(json_data)
finally:
return response_data, response_list
def detect_video(self, event_id, monitor_id, event_date):
response_data = {}
response_data['detection'] = []
# cap = cv2.VideoCapture(0)
print(f'[INFO] starting face detection for event {event_id}...')
result_list = []
start_index = 1
while(True):
print(f'[INFO] checking still image {start_index:05}-analyse.jpg...')
img_path = f'/mnt/zoneminder/events/{monitor_id}/{event_date}/{event_id}/{start_index:05}-analyse.jpg'
if not os.path.isfile(img_path):
if start_index >= MAX_RETRY_FRAME:
break
start_index += 1
time.sleep(0.02)
continue
try:
# print(f'[INFO] parsing {img_path}...')
frame = cv2.imread(img_path)
if frame is not None:
detect_data, detect_list = self.detect_image(frame)
for detect_id in detect_list:
if detect_id not in result_list:
result_list.append(detect_id)
except Exception as e:
# print(e)
print(
f'[INFO] failed to parsing frame {start_index} for event {event_id}...')
finally:
break
print('[INFO] finish video detection...')
response_data['detection'] = result_list
return response_data
| [
"cv2.dnn.blobFromImage",
"requests.post",
"cv2.dnn.readNetFromTorch",
"config_reader.read_config",
"numpy.argmax",
"time.sleep",
"os.path.isfile",
"imutils.resize",
"faced.FaceDetector",
"datetime.datetime.now",
"cv2.VideoCapture",
"cv2.cvtColor",
"threading.Thread",
"cv2.imread"
] | [((521, 549), 'requests.post', 'requests.post', ([], {'url': 'LOGIN_URL'}), '(url=LOGIN_URL)\n', (534, 549), False, 'import requests\n'), ((768, 793), 'cv2.VideoCapture', 'cv2.VideoCapture', (['new_url'], {}), '(new_url)\n', (784, 793), False, 'import cv2\n'), ((942, 970), 'cv2.VideoCapture', 'cv2.VideoCapture', (['stream_url'], {}), '(stream_url)\n', (958, 970), False, 'import cv2\n'), ((1860, 1873), 'config_reader.read_config', 'read_config', ([], {}), '()\n', (1871, 1873), False, 'from config_reader import read_config\n'), ((3768, 3796), 'requests.post', 'requests.post', ([], {'url': 'LOGIN_URL'}), '(url=LOGIN_URL)\n', (3781, 3796), False, 'import requests\n'), ((1440, 1530), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._thread', 'args': '(stream_url,)', 'kwargs': "{'monitor': monitor}"}), "(target=self._thread, args=(stream_url,), kwargs={'monitor':\n monitor})\n", (1456, 1530), False, 'import threading\n'), ((1993, 2007), 'faced.FaceDetector', 'FaceDetector', ([], {}), '()\n', (2005, 2007), False, 'from faced import FaceDetector\n'), ((2246, 2299), 'cv2.dnn.readNetFromTorch', 'cv2.dnn.readNetFromTorch', (["file_paths['embedder_path']"], {}), "(file_paths['embedder_path'])\n", (2270, 2299), False, 'import cv2\n'), ((8736, 8774), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2RGB'], {}), '(frame, cv2.COLOR_BGR2RGB)\n', (8748, 8774), False, 'import cv2\n'), ((5795, 5827), 'imutils.resize', 'imutils.resize', (['frame'], {'width': '(600)'}), '(frame, width=600)\n', (5809, 5827), False, 'import imutils\n'), ((5895, 5933), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2RGB'], {}), '(frame, cv2.COLOR_BGR2RGB)\n', (5907, 5933), False, 'import cv2\n'), ((8202, 8218), 'time.sleep', 'time.sleep', (['(0.02)'], {}), '(0.02)\n', (8212, 8218), False, 'import time\n'), ((11390, 11414), 'os.path.isfile', 'os.path.isfile', (['img_path'], {}), '(img_path)\n', (11404, 11414), False, 'import os\n'), ((11542, 11558), 'time.sleep', 'time.sleep', (['(0.02)'], {}), '(0.02)\n', (11552, 11558), False, 'import time\n'), ((11683, 11703), 'cv2.imread', 'cv2.imread', (['img_path'], {}), '(img_path)\n', (11693, 11703), False, 'import cv2\n'), ((9759, 9847), 'cv2.dnn.blobFromImage', 'cv2.dnn.blobFromImage', (['face', '(1.0 / 255)', '(96, 96)', '(0, 0, 0)'], {'swapRB': '(True)', 'crop': '(False)'}), '(face, 1.0 / 255, (96, 96), (0, 0, 0), swapRB=True,\n crop=False)\n', (9780, 9847), False, 'import cv2\n'), ((10136, 10152), 'numpy.argmax', 'np.argmax', (['preds'], {}), '(preds)\n', (10145, 10152), True, 'import numpy as np\n'), ((6988, 7076), 'cv2.dnn.blobFromImage', 'cv2.dnn.blobFromImage', (['face', '(1.0 / 255)', '(96, 96)', '(0, 0, 0)'], {'swapRB': '(True)', 'crop': '(False)'}), '(face, 1.0 / 255, (96, 96), (0, 0, 0), swapRB=True,\n crop=False)\n', (7009, 7076), False, 'import cv2\n'), ((7380, 7396), 'numpy.argmax', 'np.argmax', (['preds'], {}), '(preds)\n', (7389, 7396), True, 'import numpy as np\n'), ((10599, 10622), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (10620, 10622), False, 'import datetime\n'), ((7768, 7791), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (7789, 7791), False, 'import datetime\n')] |
import asyncio
import textwrap
import unittest
import unittest.mock
import discord
from bot import constants
from bot.cogs import information
from tests.helpers import AsyncMock, MockBot, MockContext, MockGuild, MockMember, MockRole
class InformationCogTests(unittest.TestCase):
"""Tests the Information cog."""
@classmethod
def setUpClass(cls):
cls.moderator_role = MockRole(name="Moderator", role_id=constants.Roles.moderator)
def setUp(self):
"""Sets up fresh objects for each test."""
self.bot = MockBot()
self.cog = information.Information(self.bot)
self.ctx = MockContext()
self.ctx.author.roles.append(self.moderator_role)
def test_roles_command_command(self):
"""Test if the `role_info` command correctly returns the `moderator_role`."""
self.ctx.guild.roles.append(self.moderator_role)
self.cog.roles_info.can_run = AsyncMock()
self.cog.roles_info.can_run.return_value = True
coroutine = self.cog.roles_info.callback(self.cog, self.ctx)
self.assertIsNone(asyncio.run(coroutine))
self.ctx.send.assert_called_once()
_, kwargs = self.ctx.send.call_args
embed = kwargs.pop('embed')
self.assertEqual(embed.title, "Role information")
self.assertEqual(embed.colour, discord.Colour.blurple())
self.assertEqual(embed.description, f"`{self.moderator_role.id}` - {self.moderator_role.mention}\n")
self.assertEqual(embed.footer.text, "Total roles: 1")
def test_role_info_command(self):
"""Tests the `role info` command."""
dummy_role = MockRole(
name="Dummy",
role_id=112233445566778899,
colour=discord.Colour.blurple(),
position=10,
members=[self.ctx.author],
permissions=discord.Permissions(0)
)
admin_role = MockRole(
name="Admins",
role_id=998877665544332211,
colour=discord.Colour.red(),
position=3,
members=[self.ctx.author],
permissions=discord.Permissions(0),
)
self.ctx.guild.roles.append([dummy_role, admin_role])
self.cog.role_info.can_run = AsyncMock()
self.cog.role_info.can_run.return_value = True
coroutine = self.cog.role_info.callback(self.cog, self.ctx, dummy_role, admin_role)
self.assertIsNone(asyncio.run(coroutine))
self.assertEqual(self.ctx.send.call_count, 2)
(_, dummy_kwargs), (_, admin_kwargs) = self.ctx.send.call_args_list
dummy_embed = dummy_kwargs["embed"]
admin_embed = admin_kwargs["embed"]
self.assertEqual(dummy_embed.title, "Dummy info")
self.assertEqual(dummy_embed.colour, discord.Colour.blurple())
self.assertEqual(dummy_embed.fields[0].value, str(dummy_role.id))
self.assertEqual(dummy_embed.fields[1].value, f"#{dummy_role.colour.value:0>6x}")
self.assertEqual(dummy_embed.fields[2].value, "0.63 0.48 218")
self.assertEqual(dummy_embed.fields[3].value, "1")
self.assertEqual(dummy_embed.fields[4].value, "10")
self.assertEqual(dummy_embed.fields[5].value, "0")
self.assertEqual(admin_embed.title, "Admins info")
self.assertEqual(admin_embed.colour, discord.Colour.red())
@unittest.mock.patch('bot.cogs.information.time_since')
def test_server_info_command(self, time_since_patch):
time_since_patch.return_value = '2 days ago'
self.ctx.guild = MockGuild(
features=('lemons', 'apples'),
region="The Moon",
roles=[self.moderator_role],
channels=[
discord.TextChannel(
state={},
guild=self.ctx.guild,
data={'id': 42, 'name': 'lemons-offering', 'position': 22, 'type': 'text'}
),
discord.CategoryChannel(
state={},
guild=self.ctx.guild,
data={'id': 5125, 'name': 'the-lemon-collection', 'position': 22, 'type': 'category'}
),
discord.VoiceChannel(
state={},
guild=self.ctx.guild,
data={'id': 15290, 'name': 'listen-to-lemon', 'position': 22, 'type': 'voice'}
)
],
members=[
*(MockMember(status='online') for _ in range(2)),
*(MockMember(status='idle') for _ in range(1)),
*(MockMember(status='dnd') for _ in range(4)),
*(MockMember(status='offline') for _ in range(3)),
],
member_count=1_234,
icon_url='a-lemon.jpg',
)
coroutine = self.cog.server_info.callback(self.cog, self.ctx)
self.assertIsNone(asyncio.run(coroutine))
time_since_patch.assert_called_once_with(self.ctx.guild.created_at, precision='days')
_, kwargs = self.ctx.send.call_args
embed = kwargs.pop('embed')
self.assertEqual(embed.colour, discord.Colour.blurple())
self.assertEqual(
embed.description,
textwrap.dedent(
f"""
**Server information**
Created: {time_since_patch.return_value}
Voice region: {self.ctx.guild.region}
Features: {', '.join(self.ctx.guild.features)}
**Counts**
Members: {self.ctx.guild.member_count:,}
Roles: {len(self.ctx.guild.roles)}
Text: 1
Voice: 1
Channel categories: 1
**Members**
{constants.Emojis.status_online} 2
{constants.Emojis.status_idle} 1
{constants.Emojis.status_dnd} 4
{constants.Emojis.status_offline} 3
"""
)
)
self.assertEqual(embed.thumbnail.url, 'a-lemon.jpg')
| [
"tests.helpers.MockMember",
"asyncio.run",
"discord.VoiceChannel",
"tests.helpers.MockBot",
"discord.Colour.red",
"discord.Permissions",
"discord.CategoryChannel",
"bot.cogs.information.Information",
"discord.TextChannel",
"tests.helpers.AsyncMock",
"discord.Colour.blurple",
"tests.helpers.Moc... | [((3353, 3407), 'unittest.mock.patch', 'unittest.mock.patch', (['"""bot.cogs.information.time_since"""'], {}), "('bot.cogs.information.time_since')\n", (3372, 3407), False, 'import unittest\n'), ((392, 453), 'tests.helpers.MockRole', 'MockRole', ([], {'name': '"""Moderator"""', 'role_id': 'constants.Roles.moderator'}), "(name='Moderator', role_id=constants.Roles.moderator)\n", (400, 453), False, 'from tests.helpers import AsyncMock, MockBot, MockContext, MockGuild, MockMember, MockRole\n'), ((546, 555), 'tests.helpers.MockBot', 'MockBot', ([], {}), '()\n', (553, 555), False, 'from tests.helpers import AsyncMock, MockBot, MockContext, MockGuild, MockMember, MockRole\n'), ((576, 609), 'bot.cogs.information.Information', 'information.Information', (['self.bot'], {}), '(self.bot)\n', (599, 609), False, 'from bot.cogs import information\n'), ((630, 643), 'tests.helpers.MockContext', 'MockContext', ([], {}), '()\n', (641, 643), False, 'from tests.helpers import AsyncMock, MockBot, MockContext, MockGuild, MockMember, MockRole\n'), ((927, 938), 'tests.helpers.AsyncMock', 'AsyncMock', ([], {}), '()\n', (936, 938), False, 'from tests.helpers import AsyncMock, MockBot, MockContext, MockGuild, MockMember, MockRole\n'), ((2244, 2255), 'tests.helpers.AsyncMock', 'AsyncMock', ([], {}), '()\n', (2253, 2255), False, 'from tests.helpers import AsyncMock, MockBot, MockContext, MockGuild, MockMember, MockRole\n'), ((1092, 1114), 'asyncio.run', 'asyncio.run', (['coroutine'], {}), '(coroutine)\n', (1103, 1114), False, 'import asyncio\n'), ((1338, 1362), 'discord.Colour.blurple', 'discord.Colour.blurple', ([], {}), '()\n', (1360, 1362), False, 'import discord\n'), ((2431, 2453), 'asyncio.run', 'asyncio.run', (['coroutine'], {}), '(coroutine)\n', (2442, 2453), False, 'import asyncio\n'), ((2780, 2804), 'discord.Colour.blurple', 'discord.Colour.blurple', ([], {}), '()\n', (2802, 2804), False, 'import discord\n'), ((3325, 3345), 'discord.Colour.red', 'discord.Colour.red', ([], {}), '()\n', (3343, 3345), False, 'import discord\n'), ((4869, 4891), 'asyncio.run', 'asyncio.run', (['coroutine'], {}), '(coroutine)\n', (4880, 4891), False, 'import asyncio\n'), ((5107, 5131), 'discord.Colour.blurple', 'discord.Colour.blurple', ([], {}), '()\n', (5129, 5131), False, 'import discord\n'), ((1735, 1759), 'discord.Colour.blurple', 'discord.Colour.blurple', ([], {}), '()\n', (1757, 1759), False, 'import discord\n'), ((1849, 1871), 'discord.Permissions', 'discord.Permissions', (['(0)'], {}), '(0)\n', (1868, 1871), False, 'import discord\n'), ((2000, 2020), 'discord.Colour.red', 'discord.Colour.red', ([], {}), '()\n', (2018, 2020), False, 'import discord\n'), ((2109, 2131), 'discord.Permissions', 'discord.Permissions', (['(0)'], {}), '(0)\n', (2128, 2131), False, 'import discord\n'), ((3710, 3841), 'discord.TextChannel', 'discord.TextChannel', ([], {'state': '{}', 'guild': 'self.ctx.guild', 'data': "{'id': 42, 'name': 'lemons-offering', 'position': 22, 'type': 'text'}"}), "(state={}, guild=self.ctx.guild, data={'id': 42, 'name':\n 'lemons-offering', 'position': 22, 'type': 'text'})\n", (3729, 3841), False, 'import discord\n'), ((3933, 4079), 'discord.CategoryChannel', 'discord.CategoryChannel', ([], {'state': '{}', 'guild': 'self.ctx.guild', 'data': "{'id': 5125, 'name': 'the-lemon-collection', 'position': 22, 'type': 'category'\n }"}), "(state={}, guild=self.ctx.guild, data={'id': 5125,\n 'name': 'the-lemon-collection', 'position': 22, 'type': 'category'})\n", (3956, 4079), False, 'import discord\n'), ((4171, 4307), 'discord.VoiceChannel', 'discord.VoiceChannel', ([], {'state': '{}', 'guild': 'self.ctx.guild', 'data': "{'id': 15290, 'name': 'listen-to-lemon', 'position': 22, 'type': 'voice'}"}), "(state={}, guild=self.ctx.guild, data={'id': 15290,\n 'name': 'listen-to-lemon', 'position': 22, 'type': 'voice'})\n", (4191, 4307), False, 'import discord\n'), ((4437, 4464), 'tests.helpers.MockMember', 'MockMember', ([], {'status': '"""online"""'}), "(status='online')\n", (4447, 4464), False, 'from tests.helpers import AsyncMock, MockBot, MockContext, MockGuild, MockMember, MockRole\n'), ((4503, 4528), 'tests.helpers.MockMember', 'MockMember', ([], {'status': '"""idle"""'}), "(status='idle')\n", (4513, 4528), False, 'from tests.helpers import AsyncMock, MockBot, MockContext, MockGuild, MockMember, MockRole\n'), ((4567, 4591), 'tests.helpers.MockMember', 'MockMember', ([], {'status': '"""dnd"""'}), "(status='dnd')\n", (4577, 4591), False, 'from tests.helpers import AsyncMock, MockBot, MockContext, MockGuild, MockMember, MockRole\n'), ((4630, 4658), 'tests.helpers.MockMember', 'MockMember', ([], {'status': '"""offline"""'}), "(status='offline')\n", (4640, 4658), False, 'from tests.helpers import AsyncMock, MockBot, MockContext, MockGuild, MockMember, MockRole\n')] |
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.utils.translation import gettext as _
from core import models
# https://docs.djangoproject.com/en/3.0/topics/auth
# /customizing/#using-a-custom-user-model-when-starting-a-project
class UserAdmin(BaseUserAdmin):
ordering = ['id']
list_display = ['email', 'name']
# there are 4 first brackets pairs in fieldset,
# each pair represent as a section
# each pair first value is section title in user update page
fieldsets = (
(None, {'fields': ('email', 'password')}),
(_('Personal Info'), {'fields': ('name',)}),
(_('Permissions'),
{'fields': ('is_active', 'is_staff', 'is_superuser')}),
(_('Important dates'), {'fields': ('last_login',)}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', '<PASSWORD>', '<PASSWORD>')
}),
)
admin.site.register(models.User, UserAdmin)
admin.site.register(models.Tag)
admin.site.register(models.Ingredient)
admin.site.register(models.Recipe)
| [
"django.contrib.admin.site.register",
"django.utils.translation.gettext"
] | [((974, 1017), 'django.contrib.admin.site.register', 'admin.site.register', (['models.User', 'UserAdmin'], {}), '(models.User, UserAdmin)\n', (993, 1017), False, 'from django.contrib import admin\n'), ((1018, 1049), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Tag'], {}), '(models.Tag)\n', (1037, 1049), False, 'from django.contrib import admin\n'), ((1050, 1088), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Ingredient'], {}), '(models.Ingredient)\n', (1069, 1088), False, 'from django.contrib import admin\n'), ((1089, 1123), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Recipe'], {}), '(models.Recipe)\n', (1108, 1123), False, 'from django.contrib import admin\n'), ((618, 636), 'django.utils.translation.gettext', '_', (['"""Personal Info"""'], {}), "('Personal Info')\n", (619, 636), True, 'from django.utils.translation import gettext as _\n'), ((671, 687), 'django.utils.translation.gettext', '_', (['"""Permissions"""'], {}), "('Permissions')\n", (672, 687), True, 'from django.utils.translation import gettext as _\n'), ((763, 783), 'django.utils.translation.gettext', '_', (['"""Important dates"""'], {}), "('Important dates')\n", (764, 783), True, 'from django.utils.translation import gettext as _\n')] |
from django.core.mail.backends.base import BaseEmailBackend
import threading
import O365
from . import settings
from . import util
import logging
from .o365_logger import SimpleErrorHandler # Handles auth exceptions!
"""
A wrapper that manages the O365 API for sending emails.
Uses an identity (auth_flow_type == 'credentials').
See https://docs.microsoft.com/en-us/graph/auth-v2-service?context=graph%2Fapi%2F1.0&view=graph-rest-1.0 for more details.
"""
class O365EmailBackend(BaseEmailBackend):
def __init__(self, client_id=None, client_secret=None, tenant_id=None,
fail_silently=False, **kwargs):
super().__init__(fail_silently=fail_silently)
self.client_id = client_id or settings.O365_MAIL_CLIENT_ID
self.client_secret = client_secret or settings.O365_MAIL_CLIENT_SECRET
self.tenant_id = tenant_id or settings.O365_MAIL_TENANT_ID
self.mailbox = None
# Handle exceptions that come from authentication (Only errors)
# This is needed because O365 does not raise Exceptions, it only logs them.
self.log_handler = SimpleErrorHandler()
log = logging.getLogger('O365')
log.addHandler(self.log_handler)
self._lock = threading.RLock()
def open(self):
"""
Ensure an open connection to the email server. Return whether or not a
new connection was required (True or False) or None if an exception
passed silently.
"""
if self.mailbox:
# Nothing to do if the mailbox is already open.
return False
credentials = (self.client_id, self.client_secret)
account = O365.Account(credentials, auth_flow_type='credentials', tenant_id=self.tenant_id)
self.log_handler.flush()
try:
if account.authenticate():
kwargs = settings.O365_MAIL_MAILBOX_KWARGS
self.mailbox = account.mailbox(**kwargs)
return True
else:
msg = self.log_handler.get_message()
if msg:
raise Exception(msg)
except Exception as e:
if not self.fail_silently:
raise
def close(self):
"""
As far as I know, the O365 Python API has no method to close the connection.
"""
pass
def send_messages(self, email_messages):
if not email_messages:
return 0
with self._lock:
new_mailbox_created = self.open()
if not self.mailbox or new_mailbox_created is None:
return 0
num_sent = 0
for message in email_messages:
sent = self._send(message)
if sent:
num_sent += 1
if new_mailbox_created:
self.close()
return num_sent
def _send(self, email_message):
"""A helper method that does the actual sending."""
if not email_message.recipients():
return False
# Basic email information
m = self.mailbox.new_message()
m.to.add(email_message.to)
m.cc.add(email_message.cc)
m.bcc.add(email_message.bcc)
m.sender.name, m.sender.address = util.get_name_and_email(email_message.from_email)
m.subject = "".join([settings.O365_SUBJECT_PREFIX, email_message.subject])
m.body = util.get_message_body(email_message)
# Attachments
if email_message.attachments:
for attachment in email_message.attachments:
# The attachment can either be a MIME object or a tuple according to Django docs.
# We need to convert it to a file object for the O365 API!
converter = util.get_converter(attachment)(attachment) # get_converter returns a reference to a function, thus it's ()()!
file = converter.get_file()
filename = converter.get_filename()
attachment_count = len(m.attachments)
m.attachments.add([(file, filename)])
att_obj = m.attachments[attachment_count] # count is +1 compared to index, so we already have the correct index
# This is to support inline content (e.g. images)
att_obj.is_inline = converter.is_inline()
att_obj.content_id = converter.get_content_id()
# Send it!
try:
if (settings.DEBUG and settings.O365_ACTUALLY_SEND_IN_DEBUG) or not settings.DEBUG:
return m.send(save_to_sent_folder=settings.O365_MAIL_SAVE_TO_SENT)
return True
except Exception as e:
if self.fail_silently:
return False
raise e
| [
"logging.getLogger",
"threading.RLock",
"O365.Account"
] | [((1151, 1176), 'logging.getLogger', 'logging.getLogger', (['"""O365"""'], {}), "('O365')\n", (1168, 1176), False, 'import logging\n'), ((1240, 1257), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1255, 1257), False, 'import threading\n'), ((1671, 1757), 'O365.Account', 'O365.Account', (['credentials'], {'auth_flow_type': '"""credentials"""', 'tenant_id': 'self.tenant_id'}), "(credentials, auth_flow_type='credentials', tenant_id=self.\n tenant_id)\n", (1683, 1757), False, 'import O365\n')] |
from Cards.models import Question, TestQuestion, Test, QuestionLog
def create_new_test(user, course):
test = Test()
test.user = user
test.course = course
test.save()
questions = Question.objects.filter(course=course).all()
test_questions = []
for q in questions:
tq = TestQuestion()
tq.test = test
tq.question = q
test_questions.append(tq)
TestQuestion.objects.bulk_create(test_questions)
return test
def finish_test_question(user, tq, type):
log = QuestionLog()
log.user = user
log.question = tq.question
log.type = type
log.save()
tq.type = type
tq.save()
| [
"Cards.models.TestQuestion.objects.bulk_create",
"Cards.models.QuestionLog",
"Cards.models.TestQuestion",
"Cards.models.Test",
"Cards.models.Question.objects.filter"
] | [((115, 121), 'Cards.models.Test', 'Test', ([], {}), '()\n', (119, 121), False, 'from Cards.models import Question, TestQuestion, Test, QuestionLog\n'), ((408, 456), 'Cards.models.TestQuestion.objects.bulk_create', 'TestQuestion.objects.bulk_create', (['test_questions'], {}), '(test_questions)\n', (440, 456), False, 'from Cards.models import Question, TestQuestion, Test, QuestionLog\n'), ((528, 541), 'Cards.models.QuestionLog', 'QuestionLog', ([], {}), '()\n', (539, 541), False, 'from Cards.models import Question, TestQuestion, Test, QuestionLog\n'), ((307, 321), 'Cards.models.TestQuestion', 'TestQuestion', ([], {}), '()\n', (319, 321), False, 'from Cards.models import Question, TestQuestion, Test, QuestionLog\n'), ((201, 239), 'Cards.models.Question.objects.filter', 'Question.objects.filter', ([], {'course': 'course'}), '(course=course)\n', (224, 239), False, 'from Cards.models import Question, TestQuestion, Test, QuestionLog\n')] |
from flask import jsonify
class AppError(Exception):
"""Base class for all errors. Can represent error as HTTP response for API calls"""
status_code = 500
error_code = "INTERNAL_ERROR"
message = "Request cannot be processed at the moment."
def __init__(self, status_code=None, error_code=None, message=None):
Exception.__init__(self)
if message is not None:
self.message = message
if status_code is not None:
self.status_code = status_code
if error_code is not None:
self.error_code = error_code
def to_api_response(self):
response = jsonify(
{"errorCode": self.error_code, "errorMessage": self.message}
)
response.status_code = self.status_code
return response
class InvalidFieldError(AppError):
def __init__(self, field_name, message=""):
AppError.__init__(
self,
status_code=422,
error_code="INVALID_FIELD",
message=f"Invalid '{field_name}''. {message}",
)
class BadRequestError(AppError):
def __init__(self, message="Malformed request."):
AppError.__init__(
self, status_code=400, error_code="BAD_REQUEST", message=message
)
class NotFoundError(AppError):
def __init__(self, message="Requested resource not found."):
AppError.__init__(
self, status_code=404, error_code="NOT_FOUND", message=message
)
| [
"flask.jsonify"
] | [((639, 708), 'flask.jsonify', 'jsonify', (["{'errorCode': self.error_code, 'errorMessage': self.message}"], {}), "({'errorCode': self.error_code, 'errorMessage': self.message})\n", (646, 708), False, 'from flask import jsonify\n')] |
from wx.lib.mixins.listctrl import CheckListCtrlMixin, ListCtrlAutoWidthMixin
import parser
import subprocess
import sys
import wx
from entrypoint2 import entrypoint
class CheckListCtrl(wx.ListCtrl, CheckListCtrlMixin, ListCtrlAutoWidthMixin):
def __init__(self, parent):
wx.ListCtrl.__init__(
self, parent, -1, style=wx.LC_REPORT | wx.SUNKEN_BORDER)
CheckListCtrlMixin.__init__(self)
ListCtrlAutoWidthMixin.__init__(self)
class Repository(wx.Frame):
def __init__(self, parent, id, title, mandoc):
self.command = mandoc['command']
wx.Frame.__init__(self, parent, id, title, size=(600, 400))
panel = wx.Panel(self, -1)
vbox = wx.BoxSizer(wx.VERTICAL)
hbox = wx.BoxSizer(wx.HORIZONTAL)
leftPanel = wx.Panel(panel, -1)
rightPanel = wx.Panel(panel, -1)
self.log = wx.TextCtrl(rightPanel, -1, style=wx.TE_MULTILINE)
self.list = CheckListCtrl(rightPanel)
self.list.InsertColumn(0, 'flag', width=140)
self.list.InsertColumn(1, 'short flag')
self.list.InsertColumn(2, 'help')
for i in mandoc['options']:
flags = i[0]
flags.sort(key=len, reverse=True)
index = self.list.InsertStringItem(sys.maxint, flags[0])
self.list.SetStringItem(
index, 1, flags[1] if len(flags) > 1 else '')
self.list.SetStringItem(index, 2, i[1])
vbox2 = wx.BoxSizer(wx.VERTICAL)
sel = wx.Button(leftPanel, -1, 'Select All', size=(100, -1))
des = wx.Button(leftPanel, -1, 'Deselect All', size=(100, -1))
apply = wx.Button(leftPanel, -1, 'Run', size=(100, -1))
self.cb_close = wx.CheckBox(leftPanel, -1, 'Close', size=(100, -1))
self.cb_close.SetToolTip(
wx.ToolTip("close GUI after running the command"))
self.cb_term = wx.CheckBox(
leftPanel, -1, 'new terminal', size=(100, -1))
self.cb_term.SetToolTip(wx.ToolTip("run command in new terminal"))
bt_exit = wx.Button(leftPanel, -1, 'Exit', size=(100, -1))
self.Bind(wx.EVT_BUTTON, self.OnSelectAll, id=sel.GetId())
self.Bind(wx.EVT_BUTTON, self.OnDeselectAll, id=des.GetId())
self.Bind(wx.EVT_BUTTON, self.OnApply, id=apply.GetId())
self.Bind(wx.EVT_BUTTON, self.OnExit, id=bt_exit.GetId())
vbox2.Add(sel, 0, wx.TOP, 5)
vbox2.Add(des)
vbox2.Add(apply)
vbox2.Add(self.cb_close)
vbox2.Add(self.cb_term)
vbox2.Add(bt_exit)
leftPanel.SetSizer(vbox2)
vbox.Add(self.list, 1, wx.EXPAND | wx.TOP, 3)
vbox.Add((-1, 10))
vbox.Add(self.log, 0.5, wx.EXPAND)
vbox.Add((-1, 10))
rightPanel.SetSizer(vbox)
hbox.Add(leftPanel, 0, wx.EXPAND | wx.RIGHT, 5)
hbox.Add(rightPanel, 1, wx.EXPAND)
hbox.Add((3, -1))
panel.SetSizer(hbox)
self.Centre()
self.Show(True)
self.list.OnCheckItem = self.OnCheckItem
cmd = self.cmd()
self.log.SetValue(cmd)
def OnSelectAll(self, event):
num = self.list.GetItemCount()
for i in range(num):
self.list.CheckItem(i)
def OnDeselectAll(self, event):
num = self.list.GetItemCount()
for i in range(num):
self.list.CheckItem(i, False)
def OnApply(self, event):
# print os.getcwd()
cmd = self.log.GetValue()
term = 'xterm'
if self.cb_term.IsChecked():
cmd = '%s -hold -e "%s"' % (term, cmd)
# os.system( cmd )
subprocess.Popen(cmd, shell=1)
if self.cb_close.IsChecked():
exit(0)
def OnExit(self, event):
exit(0)
def cmd(self):
count = self.list.GetItemCount()
cmd = self.command + ' '
for row in range(count):
item = self.list.GetItem(itemId=row, col=0)
if self.list.IsChecked(row):
cmd += item.GetText() + ' '
return cmd
def OnCheckItem(self, index, flag):
cmd = self.cmd()
self.log.SetValue(cmd)
@entrypoint
def main(command):
mandoc = parser.command_info(command)
app = wx.App()
Repository(None, -1, 'mangui', mandoc)
app.MainLoop()
| [
"wx.Button",
"wx.lib.mixins.listctrl.CheckListCtrlMixin.__init__",
"parser.command_info",
"subprocess.Popen",
"wx.BoxSizer",
"wx.lib.mixins.listctrl.ListCtrlAutoWidthMixin.__init__",
"wx.ToolTip",
"wx.CheckBox",
"wx.TextCtrl",
"wx.ListCtrl.__init__",
"wx.Frame.__init__",
"wx.App",
"wx.Panel"... | [((4166, 4194), 'parser.command_info', 'parser.command_info', (['command'], {}), '(command)\n', (4185, 4194), False, 'import parser\n'), ((4206, 4214), 'wx.App', 'wx.App', ([], {}), '()\n', (4212, 4214), False, 'import wx\n'), ((286, 363), 'wx.ListCtrl.__init__', 'wx.ListCtrl.__init__', (['self', 'parent', '(-1)'], {'style': '(wx.LC_REPORT | wx.SUNKEN_BORDER)'}), '(self, parent, -1, style=wx.LC_REPORT | wx.SUNKEN_BORDER)\n', (306, 363), False, 'import wx\n'), ((385, 418), 'wx.lib.mixins.listctrl.CheckListCtrlMixin.__init__', 'CheckListCtrlMixin.__init__', (['self'], {}), '(self)\n', (412, 418), False, 'from wx.lib.mixins.listctrl import CheckListCtrlMixin, ListCtrlAutoWidthMixin\n'), ((427, 464), 'wx.lib.mixins.listctrl.ListCtrlAutoWidthMixin.__init__', 'ListCtrlAutoWidthMixin.__init__', (['self'], {}), '(self)\n', (458, 464), False, 'from wx.lib.mixins.listctrl import CheckListCtrlMixin, ListCtrlAutoWidthMixin\n'), ((595, 654), 'wx.Frame.__init__', 'wx.Frame.__init__', (['self', 'parent', 'id', 'title'], {'size': '(600, 400)'}), '(self, parent, id, title, size=(600, 400))\n', (612, 654), False, 'import wx\n'), ((672, 690), 'wx.Panel', 'wx.Panel', (['self', '(-1)'], {}), '(self, -1)\n', (680, 690), False, 'import wx\n'), ((707, 731), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (718, 731), False, 'import wx\n'), ((747, 773), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (758, 773), False, 'import wx\n'), ((795, 814), 'wx.Panel', 'wx.Panel', (['panel', '(-1)'], {}), '(panel, -1)\n', (803, 814), False, 'import wx\n'), ((836, 855), 'wx.Panel', 'wx.Panel', (['panel', '(-1)'], {}), '(panel, -1)\n', (844, 855), False, 'import wx\n'), ((876, 926), 'wx.TextCtrl', 'wx.TextCtrl', (['rightPanel', '(-1)'], {'style': 'wx.TE_MULTILINE'}), '(rightPanel, -1, style=wx.TE_MULTILINE)\n', (887, 926), False, 'import wx\n'), ((1461, 1485), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (1472, 1485), False, 'import wx\n'), ((1501, 1555), 'wx.Button', 'wx.Button', (['leftPanel', '(-1)', '"""Select All"""'], {'size': '(100, -1)'}), "(leftPanel, -1, 'Select All', size=(100, -1))\n", (1510, 1555), False, 'import wx\n'), ((1570, 1626), 'wx.Button', 'wx.Button', (['leftPanel', '(-1)', '"""Deselect All"""'], {'size': '(100, -1)'}), "(leftPanel, -1, 'Deselect All', size=(100, -1))\n", (1579, 1626), False, 'import wx\n'), ((1643, 1690), 'wx.Button', 'wx.Button', (['leftPanel', '(-1)', '"""Run"""'], {'size': '(100, -1)'}), "(leftPanel, -1, 'Run', size=(100, -1))\n", (1652, 1690), False, 'import wx\n'), ((1715, 1766), 'wx.CheckBox', 'wx.CheckBox', (['leftPanel', '(-1)', '"""Close"""'], {'size': '(100, -1)'}), "(leftPanel, -1, 'Close', size=(100, -1))\n", (1726, 1766), False, 'import wx\n'), ((1887, 1945), 'wx.CheckBox', 'wx.CheckBox', (['leftPanel', '(-1)', '"""new terminal"""'], {'size': '(100, -1)'}), "(leftPanel, -1, 'new terminal', size=(100, -1))\n", (1898, 1945), False, 'import wx\n'), ((2052, 2100), 'wx.Button', 'wx.Button', (['leftPanel', '(-1)', '"""Exit"""'], {'size': '(100, -1)'}), "(leftPanel, -1, 'Exit', size=(100, -1))\n", (2061, 2100), False, 'import wx\n'), ((3601, 3631), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(1)'}), '(cmd, shell=1)\n', (3617, 3631), False, 'import subprocess\n'), ((1813, 1862), 'wx.ToolTip', 'wx.ToolTip', (['"""close GUI after running the command"""'], {}), "('close GUI after running the command')\n", (1823, 1862), False, 'import wx\n'), ((1991, 2032), 'wx.ToolTip', 'wx.ToolTip', (['"""run command in new terminal"""'], {}), "('run command in new terminal')\n", (2001, 2032), False, 'import wx\n')] |
# coding: utf-8
# /*##########################################################################
#
# Copyright (c) 2017-2018 European Synchrotron Radiation Facility
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# ###########################################################################*/
"""Helper to transform or extract information from the abstratc model.
"""
from __future__ import division
__authors__ = ["<NAME>"]
__license__ = "MIT"
import numpy
from pyFAI.control_points import ControlPoints
from pyFAI.gui.model.CalibrationModel import CalibrationModel
from pyFAI.gui.model.PeakSelectionModel import PeakSelectionModel
from pyFAI.gui.model.PeakModel import PeakModel
from pyFAI.gui.CalibrationContext import CalibrationContext
def createControlPoints(model):
"""Create ControlPoints object from the calibration model
:rtype: pyFAI.control_points.ControlPoints
"""
if not isinstance(model, CalibrationModel):
raise TypeError("Unexpected model type")
calibrant = model.experimentSettingsModel().calibrantModel().calibrant()
wavelength = model.experimentSettingsModel().wavelength().value()
controlPoints = ControlPoints(calibrant=calibrant, wavelength=wavelength)
for peakModel in model.peakSelectionModel():
if not peakModel.isEnabled():
continue
ringNumber = peakModel.ringNumber() - 1
points = peakModel.coords().tolist()
controlPoints.append(points=points, ring=ringNumber)
return controlPoints
def createPeaksArray(model):
"""Create a contiguous peak array containing (y, x, ring number)
:param PeakSelectionModel model: A set of selected peaks
:rtype: numpy.ndarray
"""
if not isinstance(model, PeakSelectionModel):
raise TypeError("Unexpected model type")
count = 0
for group in model:
count += len(group)
pos = 0
peaks = numpy.empty(shape=(count, 3), dtype=float)
for group in model:
if not group.isEnabled():
continue
end = pos + len(group)
peaks[pos:end, 0:2] = group.coords()
peaks[pos:end, 2] = group.ringNumber() - 1
pos = end
peaks = numpy.array(peaks)
return peaks
def filterControlPoints(filterCallback, peakSelectionModel, removedPeaks=None):
"""Filter each peaks of the model using a callback
:param Callable[int,int,bool] filter: Filter returning true is the
peak have to stay in the result.
:param PeakSelectionModel peakSelectionModel: Model to filter
:param List[Tuple[int,int]] removedPeaks: Provide a list to feed it with
removed peaks from the model.
"""
peakSelectionModel.lockSignals()
for peakGroup in peakSelectionModel:
changed = False
newCoords = []
for coord in peakGroup.coords():
if filterCallback(coord[0], coord[1]):
newCoords.append(coord)
else:
if removedPeaks is not None:
removedPeaks.append(coord)
changed = True
if changed:
if len(newCoords) == 0:
newCoords = numpy.empty(shape=(0, 2))
else:
newCoords = numpy.array(newCoords)
peakGroup.setCoords(newCoords)
peakSelectionModel.unlockSignals()
def _findUnusedName(peakSelectionModel):
"""
:rtype: str
"""
names = ["% 8s" % p.name() for p in peakSelectionModel]
if len(names) > 0:
names = list(sorted(names))
bigger = names[-1].strip()
number = 0
for c in bigger:
number = number * 26 + (ord(c) - ord('a'))
else:
number = -1
number = number + 1
# compute the next one
name = ""
if number == 0:
name = "a"
else:
n = number
while n > 0:
c = n % 26
n = n // 26
name = chr(c + ord('a')) + name
return name
def createRing(points, peakSelectionModel, ringNumber=None, context=None):
"""Create a new ring from a group of points
:rtype: PeakModel
"""
if context is None:
context = CalibrationContext.instance()
name = _findUnusedName(peakSelectionModel)
if ringNumber is None:
ringNumber = 1
color = context.getMarkerColor(ringNumber - 1)
peakModel = PeakModel(peakSelectionModel)
peakModel.setName(name)
peakModel.setColor(color)
peakModel.setCoords(points)
peakModel.setRingNumber(ringNumber)
return peakModel
def initPeaksFromControlPoints(peakSelectionModel, controlPoints, context=None):
"""Initialize peak selection model using control points object
:rtype: pyFAI.control_points.ControlPoints
"""
if not isinstance(peakSelectionModel, PeakSelectionModel):
raise TypeError("Unexpected model type")
if not isinstance(controlPoints, ControlPoints):
raise TypeError("Unexpected model type")
if context is None:
context = CalibrationContext.instance()
peakSelectionModel.clear()
for label in controlPoints.get_labels():
group = controlPoints.get(lbl=label)
color = context.getMarkerColor(group.ring)
points = numpy.array(group.points)
peakModel = createRing(points, peakSelectionModel=peakSelectionModel, context=context)
peakModel.setRingNumber(group.ring + 1)
peakModel.setColor(color)
peakModel.setName(label)
peakSelectionModel.append(peakModel)
def geometryModelToGeometry(geometryModel, geometry):
geometry.dist = geometryModel.distance().value()
geometry.poni1 = geometryModel.poni1().value()
geometry.poni2 = geometryModel.poni2().value()
geometry.rot1 = geometryModel.rotation1().value()
geometry.rot2 = geometryModel.rotation2().value()
geometry.rot3 = geometryModel.rotation3().value()
geometry.wavelength = geometryModel.wavelength().value()
| [
"pyFAI.gui.model.PeakModel.PeakModel",
"pyFAI.control_points.ControlPoints",
"numpy.array",
"numpy.empty",
"pyFAI.gui.CalibrationContext.CalibrationContext.instance"
] | [((2173, 2230), 'pyFAI.control_points.ControlPoints', 'ControlPoints', ([], {'calibrant': 'calibrant', 'wavelength': 'wavelength'}), '(calibrant=calibrant, wavelength=wavelength)\n', (2186, 2230), False, 'from pyFAI.control_points import ControlPoints\n'), ((2904, 2946), 'numpy.empty', 'numpy.empty', ([], {'shape': '(count, 3)', 'dtype': 'float'}), '(shape=(count, 3), dtype=float)\n', (2915, 2946), False, 'import numpy\n'), ((3183, 3201), 'numpy.array', 'numpy.array', (['peaks'], {}), '(peaks)\n', (3194, 3201), False, 'import numpy\n'), ((5331, 5360), 'pyFAI.gui.model.PeakModel.PeakModel', 'PeakModel', (['peakSelectionModel'], {}), '(peakSelectionModel)\n', (5340, 5360), False, 'from pyFAI.gui.model.PeakModel import PeakModel\n'), ((5135, 5164), 'pyFAI.gui.CalibrationContext.CalibrationContext.instance', 'CalibrationContext.instance', ([], {}), '()\n', (5162, 5164), False, 'from pyFAI.gui.CalibrationContext import CalibrationContext\n'), ((5976, 6005), 'pyFAI.gui.CalibrationContext.CalibrationContext.instance', 'CalibrationContext.instance', ([], {}), '()\n', (6003, 6005), False, 'from pyFAI.gui.CalibrationContext import CalibrationContext\n'), ((6196, 6221), 'numpy.array', 'numpy.array', (['group.points'], {}), '(group.points)\n', (6207, 6221), False, 'import numpy\n'), ((4140, 4165), 'numpy.empty', 'numpy.empty', ([], {'shape': '(0, 2)'}), '(shape=(0, 2))\n', (4151, 4165), False, 'import numpy\n'), ((4212, 4234), 'numpy.array', 'numpy.array', (['newCoords'], {}), '(newCoords)\n', (4223, 4234), False, 'import numpy\n')] |
import datetime
from sqlalchemy import (
PrimaryKeyConstraint,
Boolean,
DateTime,
UniqueConstraint,
ForeignKey,
MetaData,
Table,
Column,
ForeignKey,
Integer,
String,
Date,
)
from . import meta
# TODO: type on declarative style
friends = Table('friends', meta,
Column('user_1', Integer, ForeignKey('user.id'), comment='Subscriber'),
Column('user_2', Integer, ForeignKey('user.id'), comment='User to subscribe'),
Column('created_on', DateTime(), default=datetime.datetime.utcnow),
Column('is_approved', Boolean(), default=False),
PrimaryKeyConstraint('user_1', 'user_2', name='user_1_user_2_pk'),
)
user = Table('user', meta,
Column('id', Integer, primary_key=True),
Column('username', String(200), nullable=False, index=True),
Column('phone', String(14), nullable=True),
Column('last_seen', DateTime(), nullable=False, default=datetime.datetime.utcnow),
UniqueConstraint('username', 'phone', name='username_phone_uniq_constraint'),
)
| [
"sqlalchemy.DateTime",
"sqlalchemy.Boolean",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.ForeignKey",
"sqlalchemy.UniqueConstraint",
"sqlalchemy.String",
"sqlalchemy.Column"
] | [((659, 724), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""user_1"""', '"""user_2"""'], {'name': '"""user_1_user_2_pk"""'}), "('user_1', 'user_2', name='user_1_user_2_pk')\n", (679, 724), False, 'from sqlalchemy import PrimaryKeyConstraint, Boolean, DateTime, UniqueConstraint, ForeignKey, MetaData, Table, Column, ForeignKey, Integer, String, Date\n'), ((785, 824), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (791, 824), False, 'from sqlalchemy import PrimaryKeyConstraint, Boolean, DateTime, UniqueConstraint, ForeignKey, MetaData, Table, Column, ForeignKey, Integer, String, Date\n'), ((1066, 1142), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""username"""', '"""phone"""'], {'name': '"""username_phone_uniq_constraint"""'}), "('username', 'phone', name='username_phone_uniq_constraint')\n", (1082, 1142), False, 'from sqlalchemy import PrimaryKeyConstraint, Boolean, DateTime, UniqueConstraint, ForeignKey, MetaData, Table, Column, ForeignKey, Integer, String, Date\n'), ((353, 374), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""user.id"""'], {}), "('user.id')\n", (363, 374), False, 'from sqlalchemy import PrimaryKeyConstraint, Boolean, DateTime, UniqueConstraint, ForeignKey, MetaData, Table, Column, ForeignKey, Integer, String, Date\n'), ((441, 462), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""user.id"""'], {}), "('user.id')\n", (451, 462), False, 'from sqlalchemy import PrimaryKeyConstraint, Boolean, DateTime, UniqueConstraint, ForeignKey, MetaData, Table, Column, ForeignKey, Integer, String, Date\n'), ((531, 541), 'sqlalchemy.DateTime', 'DateTime', ([], {}), '()\n', (539, 541), False, 'from sqlalchemy import PrimaryKeyConstraint, Boolean, DateTime, UniqueConstraint, ForeignKey, MetaData, Table, Column, ForeignKey, Integer, String, Date\n'), ((616, 625), 'sqlalchemy.Boolean', 'Boolean', ([], {}), '()\n', (623, 625), False, 'from sqlalchemy import PrimaryKeyConstraint, Boolean, DateTime, UniqueConstraint, ForeignKey, MetaData, Table, Column, ForeignKey, Integer, String, Date\n'), ((858, 869), 'sqlalchemy.String', 'String', (['(200)'], {}), '(200)\n', (864, 869), False, 'from sqlalchemy import PrimaryKeyConstraint, Boolean, DateTime, UniqueConstraint, ForeignKey, MetaData, Table, Column, ForeignKey, Integer, String, Date\n'), ((929, 939), 'sqlalchemy.String', 'String', (['(14)'], {}), '(14)\n', (935, 939), False, 'from sqlalchemy import PrimaryKeyConstraint, Boolean, DateTime, UniqueConstraint, ForeignKey, MetaData, Table, Column, ForeignKey, Integer, String, Date\n'), ((990, 1000), 'sqlalchemy.DateTime', 'DateTime', ([], {}), '()\n', (998, 1000), False, 'from sqlalchemy import PrimaryKeyConstraint, Boolean, DateTime, UniqueConstraint, ForeignKey, MetaData, Table, Column, ForeignKey, Integer, String, Date\n')] |
# %% [markdown]
# ## Imports
# %%
import numpy as np
import scipy
import skimage
import cv2
# %%
class CornerDetector:
"""Corner detector for an image.
Args:
img (array-like): matrix representation of input image.
May be a grayscale or RGB image.
Attributes:
img (numpy.ndarray): numpy array of image input image representation.
"""
def __init__(self, img):
self.__img = np.array(img)
def rgb_to_grayscale(self, img):
""" Converts an RGB image to gray scale.
Using the ITU-R 601-2 luma transform
Args:
img (array-like): array representation of a RGB image.
Returns:
numpy.ndarray: Array representation of img, converted to grayscale.
"""
return np.dot(img[..., :3], [0.2989, 0.5870, 0.1140])
def image_derivatives(self, arr, x=True, y=True):
""" Calculates x and y derivatives using the Sobel operator,
with convolution using Scipy.
Args:
arr (array-like): An array representation of a grayscale image.
x (bool): True to calculate the X-derivative, else False
y (bool): True to calculate the Y-derivative, else False.
Returns:
numpy.ndarray: X-derivative of arr if x = True, else None.
numpy.ndarray: Y-derivative of arr if y = True, else None.
"""
kernel_x = np.array([[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]])
kernel_y = np.array([[-1, -2, -1], [0, 0, 0], [1, 2, 1]])
deriv_x, deriv_y = None, None
if (x):
deriv_x = scipy.signal.convolve2d(arr, kernel_x, mode='same')
if (y):
deriv_y = scipy.signal.convolve2d(arr, kernel_y, mode='same')
return deriv_x, deriv_y
def __preprocess(self):
"""
Pre-processing the image, with denoising using openCV2,
and thresholding with the Sobel filter and threshold_otsu
from Scikit-image.
Returns:
numpy.ndarray: Pre-processed image for self.corner_detector method.
"""
img_p = None
if (len(self.__img.shape) == 3):
img_p = cv2.fastNlMeansDenoisingColored(self.__img)
img_p = skimage.filters.sobel(self.rgb_to_grayscale(img_p))
img_p = img_p > skimage.filters.threshold_otsu(img_p)
elif (len(self.__img.shape) == 2):
img_p = cv2.fastNlMeansDenoising(self.__img)
img_p = skimage.filters.sobel(img_p)
img_p = img_p > skimage.filters.threshold_otsu(img_p)
return img_p
def corner_detector(self,
offset=1,
threshold=0,
k=0.06,
k_mean=False,
eps=0.001,
mode='shi-tomasi'):
""" Corner detection method.
Uses Harris Corner Detector or Shi-Tomasi Corner Detector.
Note:
This method calls the self.__preprocess method before applying the
Harris/Shi-Tomasi corner detector on the resulting image.
Args:
offset (int): Offset to center of analyzed regions around a pixel.
Equals the integer division of the size of the region by two.
threshold (float): Threshold of corner response measure.
The higher the limit, the fewer points will be returned.
k (float): Harris detector parameter
Should be around 0.04 to 0.06.
k_mean (bool): Determines if k should be automatically computed.
eps (float): Small value (around 0.001) for k computation.
Only relevant if k_mean = True.
mode (str): 'harris' or 'shi-tomasi'.
Selector between Harris and Shi-Tomasi Corner Detectors.
Returns:
numpy.ndarray: Input image, with marked regions identified
as corners.
numpy.ndarray: List of points identified as corners.
Structure: [x, y, E], where x and y are the coordinates,
and E is the corner response measure of the point.
"""
corner_points = []
ret_img = np.copy(self.__img)
# Preprocessing image with thresholding
img_p = self.__preprocess()
# Find derivatives and tensor setup
# Create image for return, illustrating corner points
if (len(img_p.shape) == 3):
dx, dy = self.image_derivatives(self.rgb_to_grayscale(img_p))
elif (len(img_p.shape) == 2):
dx, dy = self.image_derivatives(img_p)
else:
raise TypeError("Numpy array with invalid shape")
ixx, ixy, iyy = dx**2, dx * dy, dy**2
# Iterate through windows
for i in range(offset, self.__img.shape[0] - offset):
for j in range(offset, self.__img.shape[1] - offset):
# Calculate sum over the sliding window
sxx = np.sum(ixx[i - offset:i + offset + 1, j - offset:j +
offset + 1])
syy = np.sum(iyy[i - offset:i + offset + 1, j - offset:j +
offset + 1])
sxy = np.sum(ixy[i - offset:i + offset + 1, j - offset:j +
offset + 1])
# Find determinant and trace,
# use to get corner response -> r = det - k*(trace**2)
det = ((sxx * syy) - (sxy**2))
trace = sxx + syy
if (k_mean):
k = 2 * (det / (trace + eps))
if (mode == 'harris'):
r = det - k * (trace**2)
elif (mode == 'shi-tomasi'):
r = np.minimum(sxx, syy)
else:
raise ValueError("Invalid value for 'mode' variable")
# Verify if point is a corner with threshold value
# If true, add to list of corner points and colorize point
# on returning image
if (r > threshold):
corner_points.append([i, j, r])
if (len(ret_img.shape) == 3):
ret_img[i, j] = [255, 0, 0]
elif (len(ret_img.shape) == 2):
ret_img[i, j] = 255
else:
raise TypeError("Numpy array with invalid shape")
return ret_img, np.array(corner_points)
def find_corners4(self,
offset=1,
threshold=0,
k=0.06,
k_mean=False,
eps=0.001,
mode='shi-tomasi'):
"""
Find the corner points nearest to the corners of the input image,
using self.corner_detector.
Args:
offset (int): Offset to center of analyzed regions around a pixel.
Equals the integer division of the size of the region by two.
threshold (float): Threshold of corner response measure.
The higher the limit, the fewer points will be returned.
k (float): Harris detector parameter
Should be around 0.04 to 0.06.
k_mean (bool): Determines if k should be automatically computed.
eps (float): Small value (around 0.001) for k computation.
Only relevant if k_mean = True.
mode (str): 'harris' or 'shi-tomasi'.
Selector between Harris and Shi-Tomasi Corner Detectors.
Returns:
numpy.ndarray: Array of coordinates of the four identified corners
of the object.
"""
img_cd, img_cd_c = self.corner_detector(offset, threshold, k, k_mean,
eps, mode)
# Getting the four best corners of the business card, after corner
# detection
points = np.array([[0, 0, np.inf], [0, 0, np.inf], [0, 0, np.inf],
[0, 0, np.inf]])
corners = [[0, 0], [0, img_cd.shape[1] - 1], [img_cd.shape[0] - 1, 0],
[img_cd.shape[0] - 1, img_cd.shape[1] - 1]]
for c in img_cd_c:
# Getting distances from c to the corners of the image
dist = np.array([
scipy.spatial.distance.euclidean(c[:2], corners[0]),
scipy.spatial.distance.euclidean(c[:2], corners[1]),
scipy.spatial.distance.euclidean(c[:2], corners[2]),
scipy.spatial.distance.euclidean(c[:2], corners[3]),
])
# Limiting each element from points to a quadrant of the image
if (dist[0] < points[0][2] and c[0] < img_cd.shape[0] // 2 and
c[1] < img_cd.shape[1] // 2):
points[0] = [(c[0]), c[1], dist[0]]
if (dist[1] < points[1][2] and c[0] < img_cd.shape[0] // 2 and
c[1] > img_cd.shape[1] // 2):
points[1] = [(c[0]), c[1], dist[1]]
if (dist[2] < points[2][2] and c[0] > img_cd.shape[0] // 2 and
c[1] < img_cd.shape[1] // 2):
points[2] = [(c[0]), c[1], dist[2]]
if (dist[3] < points[3][2] and c[0] > img_cd.shape[0] // 2 and
c[1] > img_cd.shape[1] // 2):
points[3] = [(c[0]), c[1], dist[3]]
return points[:, :2]
# %%
# Running tests on an random image
# ! This segment of the code is used only for testing purposes
if __name__ == "__main__":
import imageio
import matplotlib.pyplot as plt
import os
# Listing example files
example_files = [
'./images/' + f for f in os.listdir('./images')
if os.path.isfile(os.path.join('./images', f))
]
# Selecting random file for testing
file_img = example_files[np.random.randint(0, len(example_files))]
img = imageio.imread(file_img)
plt.figure(figsize=(20, 20))
plt.subplot(121)
plt.imshow(img)
plt.subplot(122)
plt.imshow(CornerDetector(img).corner_detector()[0])
print(CornerDetector(img).find_corners4())
| [
"matplotlib.pyplot.imshow",
"numpy.copy",
"scipy.signal.convolve2d",
"os.listdir",
"numpy.minimum",
"cv2.fastNlMeansDenoising",
"cv2.fastNlMeansDenoisingColored",
"skimage.filters.threshold_otsu",
"skimage.filters.sobel",
"os.path.join",
"numpy.array",
"matplotlib.pyplot.figure",
"numpy.dot"... | [((9931, 9955), 'imageio.imread', 'imageio.imread', (['file_img'], {}), '(file_img)\n', (9945, 9955), False, 'import imageio\n'), ((9960, 9988), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 20)'}), '(figsize=(20, 20))\n', (9970, 9988), True, 'import matplotlib.pyplot as plt\n'), ((9993, 10009), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(121)'], {}), '(121)\n', (10004, 10009), True, 'import matplotlib.pyplot as plt\n'), ((10014, 10029), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (10024, 10029), True, 'import matplotlib.pyplot as plt\n'), ((10034, 10050), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(122)'], {}), '(122)\n', (10045, 10050), True, 'import matplotlib.pyplot as plt\n'), ((431, 444), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (439, 444), True, 'import numpy as np\n'), ((783, 827), 'numpy.dot', 'np.dot', (['img[..., :3]', '[0.2989, 0.587, 0.114]'], {}), '(img[..., :3], [0.2989, 0.587, 0.114])\n', (789, 827), True, 'import numpy as np\n'), ((1412, 1458), 'numpy.array', 'np.array', (['[[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]]'], {}), '([[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]])\n', (1420, 1458), True, 'import numpy as np\n'), ((1478, 1524), 'numpy.array', 'np.array', (['[[-1, -2, -1], [0, 0, 0], [1, 2, 1]]'], {}), '([[-1, -2, -1], [0, 0, 0], [1, 2, 1]])\n', (1486, 1524), True, 'import numpy as np\n'), ((4212, 4231), 'numpy.copy', 'np.copy', (['self.__img'], {}), '(self.__img)\n', (4219, 4231), True, 'import numpy as np\n'), ((7971, 8045), 'numpy.array', 'np.array', (['[[0, 0, np.inf], [0, 0, np.inf], [0, 0, np.inf], [0, 0, np.inf]]'], {}), '([[0, 0, np.inf], [0, 0, np.inf], [0, 0, np.inf], [0, 0, np.inf]])\n', (7979, 8045), True, 'import numpy as np\n'), ((1601, 1652), 'scipy.signal.convolve2d', 'scipy.signal.convolve2d', (['arr', 'kernel_x'], {'mode': '"""same"""'}), "(arr, kernel_x, mode='same')\n", (1624, 1652), False, 'import scipy\n'), ((1691, 1742), 'scipy.signal.convolve2d', 'scipy.signal.convolve2d', (['arr', 'kernel_y'], {'mode': '"""same"""'}), "(arr, kernel_y, mode='same')\n", (1714, 1742), False, 'import scipy\n'), ((2165, 2208), 'cv2.fastNlMeansDenoisingColored', 'cv2.fastNlMeansDenoisingColored', (['self.__img'], {}), '(self.__img)\n', (2196, 2208), False, 'import cv2\n'), ((6465, 6488), 'numpy.array', 'np.array', (['corner_points'], {}), '(corner_points)\n', (6473, 6488), True, 'import numpy as np\n'), ((9726, 9748), 'os.listdir', 'os.listdir', (['"""./images"""'], {}), "('./images')\n", (9736, 9748), False, 'import os\n'), ((2309, 2346), 'skimage.filters.threshold_otsu', 'skimage.filters.threshold_otsu', (['img_p'], {}), '(img_p)\n', (2339, 2346), False, 'import skimage\n'), ((2410, 2446), 'cv2.fastNlMeansDenoising', 'cv2.fastNlMeansDenoising', (['self.__img'], {}), '(self.__img)\n', (2434, 2446), False, 'import cv2\n'), ((2467, 2495), 'skimage.filters.sobel', 'skimage.filters.sobel', (['img_p'], {}), '(img_p)\n', (2488, 2495), False, 'import skimage\n'), ((4986, 5051), 'numpy.sum', 'np.sum', (['ixx[i - offset:i + offset + 1, j - offset:j + offset + 1]'], {}), '(ixx[i - offset:i + offset + 1, j - offset:j + offset + 1])\n', (4992, 5051), True, 'import numpy as np\n'), ((5107, 5172), 'numpy.sum', 'np.sum', (['iyy[i - offset:i + offset + 1, j - offset:j + offset + 1]'], {}), '(iyy[i - offset:i + offset + 1, j - offset:j + offset + 1])\n', (5113, 5172), True, 'import numpy as np\n'), ((5228, 5293), 'numpy.sum', 'np.sum', (['ixy[i - offset:i + offset + 1, j - offset:j + offset + 1]'], {}), '(ixy[i - offset:i + offset + 1, j - offset:j + offset + 1])\n', (5234, 5293), True, 'import numpy as np\n'), ((9775, 9802), 'os.path.join', 'os.path.join', (['"""./images"""', 'f'], {}), "('./images', f)\n", (9787, 9802), False, 'import os\n'), ((2524, 2561), 'skimage.filters.threshold_otsu', 'skimage.filters.threshold_otsu', (['img_p'], {}), '(img_p)\n', (2554, 2561), False, 'import skimage\n'), ((8355, 8406), 'scipy.spatial.distance.euclidean', 'scipy.spatial.distance.euclidean', (['c[:2]', 'corners[0]'], {}), '(c[:2], corners[0])\n', (8387, 8406), False, 'import scipy\n'), ((8424, 8475), 'scipy.spatial.distance.euclidean', 'scipy.spatial.distance.euclidean', (['c[:2]', 'corners[1]'], {}), '(c[:2], corners[1])\n', (8456, 8475), False, 'import scipy\n'), ((8493, 8544), 'scipy.spatial.distance.euclidean', 'scipy.spatial.distance.euclidean', (['c[:2]', 'corners[2]'], {}), '(c[:2], corners[2])\n', (8525, 8544), False, 'import scipy\n'), ((8562, 8613), 'scipy.spatial.distance.euclidean', 'scipy.spatial.distance.euclidean', (['c[:2]', 'corners[3]'], {}), '(c[:2], corners[3])\n', (8594, 8613), False, 'import scipy\n'), ((5758, 5778), 'numpy.minimum', 'np.minimum', (['sxx', 'syy'], {}), '(sxx, syy)\n', (5768, 5778), True, 'import numpy as np\n')] |
__copyright__ = '2017 <NAME>. All Rights Reserved.'
__author__ = '<NAME>'
""" Mic file geometry and processing.
"""
import numpy as np
from xdm_toolkit import xdm_assert as xassert
def generate_vertices(mic_snp, sidewidth):
T_GEN_IDX = 4 # Triangle generation index.
T_DIR_IDX = 3 # Triangle direction index.
tri_gen = 2.0 ** mic_snp[:, T_GEN_IDX]
down_idx = (mic_snp[:, T_DIR_IDX] > 1).nonzero()
up_idx = (mic_snp[:, T_DIR_IDX] <= 1).nonzero()
ups_sidewidth = sidewidth / tri_gen[up_idx]
downs_sidewidth = sidewidth / tri_gen[down_idx]
up_vert = gen_vertex_helper(np.squeeze(
mic_snp[up_idx, 0:2]), ups_sidewidth, points_up=True)
down_vert = gen_vertex_helper(np.squeeze(
mic_snp[down_idx, 0:2]), downs_sidewidth, points_up=False)
up_data = np.squeeze(mic_snp[up_idx, 2:])
down_data = np.squeeze(mic_snp[down_idx, 2:])
# Return the set of vertices (x0, y0, x1, y1, x2, y2) and data.
# Note that the data are reordered.
return np.vstack((up_vert, down_vert)), np.vstack((up_data, down_data))
def gen_vertex_helper(left_vert, sw_list, points_up):
# import pdb; pdb.set_trace()
# Here be type check and dimension check.
xassert.runtime_assert(len(left_vert.shape) == 2 and
left_vert.shape[1] == 2,
'Error: vertex expected to be 2 dimensional.')
v1 = np.copy(left_vert)
v2 = np.copy(left_vert)
v2[:, 0] += sw_list
v3 = np.copy(left_vert)
v3[:, 0] += sw_list / 2.0
if points_up:
v3[:, 1] += sw_list / 2.0 * np.sqrt(3)
else:
v3[:, 1] -= sw_list / 2.0 * np.sqrt(3)
return np.hstack((v1, v2, v3))
| [
"numpy.copy",
"numpy.sqrt",
"numpy.hstack",
"numpy.squeeze",
"numpy.vstack"
] | [((807, 838), 'numpy.squeeze', 'np.squeeze', (['mic_snp[up_idx, 2:]'], {}), '(mic_snp[up_idx, 2:])\n', (817, 838), True, 'import numpy as np\n'), ((855, 888), 'numpy.squeeze', 'np.squeeze', (['mic_snp[down_idx, 2:]'], {}), '(mic_snp[down_idx, 2:])\n', (865, 888), True, 'import numpy as np\n'), ((1403, 1421), 'numpy.copy', 'np.copy', (['left_vert'], {}), '(left_vert)\n', (1410, 1421), True, 'import numpy as np\n'), ((1431, 1449), 'numpy.copy', 'np.copy', (['left_vert'], {}), '(left_vert)\n', (1438, 1449), True, 'import numpy as np\n'), ((1484, 1502), 'numpy.copy', 'np.copy', (['left_vert'], {}), '(left_vert)\n', (1491, 1502), True, 'import numpy as np\n'), ((1667, 1690), 'numpy.hstack', 'np.hstack', (['(v1, v2, v3)'], {}), '((v1, v2, v3))\n', (1676, 1690), True, 'import numpy as np\n'), ((605, 637), 'numpy.squeeze', 'np.squeeze', (['mic_snp[up_idx, 0:2]'], {}), '(mic_snp[up_idx, 0:2])\n', (615, 637), True, 'import numpy as np\n'), ((713, 747), 'numpy.squeeze', 'np.squeeze', (['mic_snp[down_idx, 0:2]'], {}), '(mic_snp[down_idx, 0:2])\n', (723, 747), True, 'import numpy as np\n'), ((1009, 1040), 'numpy.vstack', 'np.vstack', (['(up_vert, down_vert)'], {}), '((up_vert, down_vert))\n', (1018, 1040), True, 'import numpy as np\n'), ((1042, 1073), 'numpy.vstack', 'np.vstack', (['(up_data, down_data)'], {}), '((up_data, down_data))\n', (1051, 1073), True, 'import numpy as np\n'), ((1587, 1597), 'numpy.sqrt', 'np.sqrt', (['(3)'], {}), '(3)\n', (1594, 1597), True, 'import numpy as np\n'), ((1644, 1654), 'numpy.sqrt', 'np.sqrt', (['(3)'], {}), '(3)\n', (1651, 1654), True, 'import numpy as np\n')] |
#! /usr/bin/env python
"""Make history files into timeseries"""
import os
import sys
from subprocess import check_call, Popen, PIPE
from glob import glob
import re
import click
import yaml
import tempfile
import logging
import cftime
import xarray as xr
import numpy as np
import globus
from workflow import task_manager as tm
logger = logging.getLogger(__name__)
logger.setLevel(level=logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
script_path = os.path.dirname(os.path.realpath(__file__))
GLOBUS_CAMPAIGN_PATH = '/gpfs/csfs1/cesm/development/bgcwg/projects/xtFe/cases'
USER = os.environ['USER']
ARCHIVE_ROOT = f'/glade/scratch/{USER}/archive'
tm.ACCOUNT = '<KEY>'
tm.MAXJOBS = 100
xr_open = dict(decode_times=False, decode_coords=False)
def get_year_filename(file):
"""Get the year from the datestr part of a file."""
date_parts = [int(d) for d in file.split('.')[-2].split('-')]
return date_parts[0]
class file_date(object):
"""Class with attributes for the start, stop, and middle of a file's time
axis.
"""
def __init__(self, file):
with xr.open_dataset(file, **xr_open) as ds:
time_units = ds.time.units
calendar = ds.time.calendar
tb = ds.time.bounds
tb_dim = ds[tb].dims[-1]
t0 = ds[tb].isel(**{'time': 0, tb_dim: 0})
tf = ds[tb].isel(**{'time': -1, tb_dim: -1})
self.date = cftime.num2date(np.mean([t0, tf]), units=time_units,
calendar=calendar)
self.year = self.date.year
self.month = self.date.month
self.day = self.date.day
time_mid_point = cftime.num2date(ds[tb].mean(dim=tb_dim),
units=time_units, calendar=calendar)
self.t0 = time_mid_point[0]
self.tf = time_mid_point[-1]
def get_date_string(files, freq):
"""return a date string for timeseries files"""
date_start = file_date(files[0])
date_end = file_date(files[-1])
year = [date_start.t0.year, date_end.tf.year]
month = [date_start.t0.month, date_end.tf.month]
day = [date_start.t0.day, date_end.tf.day]
if freq in ['day_1', 'day_5', 'daily', 'day']:
return (f'{year[0]:04d}{month[0]:02d}{day[0]:02d}-'
f'{year[1]:04d}{month[1]:02d}{day[1]:02d}')
elif freq in ['month_1', 'monthly', 'mon']:
return (f'{year[0]:04d}{month[0]:02d}-'
f'{year[1]:04d}{month[1]:02d}')
elif freq in ['year_1', 'yearly', 'year', 'ann']:
return (f'{year[0]:04d}-'
f'{year[1]:04d}')
else:
raise ValueError(f'freq: {freq} not implemented')
def get_vars(files):
"""get lists of non-time-varying variables and time varying variables"""
with xr.open_dataset(files[0], **xr_open) as ds:
static_vars = [v for v, da in ds.variables.items() if 'time' not in da.dims]
static_vars = static_vars+['time', ds.time.attrs['bounds']]
time_vars = [v for v, da in ds.variables.items() if 'time' in da.dims and
v not in static_vars]
return static_vars, time_vars
@click.command()
@click.argument('case')
@click.option('--components', default='ocn')
@click.option('--archive-root', default=ARCHIVE_ROOT)
@click.option('--output-root', default=None)
@click.option('--only-streams', default=[])
@click.option('--only-variables', default=None)
@click.option('--campaign-transfer', default=False, is_flag=True)
@click.option('--campaign-path', default=GLOBUS_CAMPAIGN_PATH)
@click.option('--year-groups', default=None)
@click.option('--demo', default=False, is_flag=True)
@click.option('--clobber', default=False, is_flag=True)
def main(case, components=['ocn', 'ice'], archive_root=ARCHIVE_ROOT, output_root=None,
only_streams=[], only_variables=None, campaign_transfer=False, campaign_path=None,
year_groups=None, demo=False, clobber=False):
droot = os.path.join(archive_root, case)
if isinstance(components, str):
components = components.split(',')
if output_root is None:
droot_out = droot
else:
droot_out = os.path.join(output_root, case)
if campaign_transfer and campaign_path is None:
raise ValueError('campaign path required')
if year_groups is None:
year_groups = [(-1e36, 1e36)]
report_year_groups = False
elif isinstance(year_groups, str):
year_groups = year_groups.split(',')
year_groups = [tuple(int(i) for i in ygi.split(':')) for ygi in year_groups]
report_year_groups = True
else:
raise ValueError('cannot parse year groups')
if isinstance(only_streams, str):
only_streams = only_streams.split(',')
if isinstance(only_variables, str):
only_variables = only_variables.split(',')
logger.info('constructing time-series of the following year groups:')
logger.info(year_groups)
print()
with open(f'{script_path}/cesm_streams.yml') as f:
streams = yaml.safe_load(f)
for component in components:
print('='*80)
logger.info(f'working on component: {component}')
print('='*80)
for stream, stream_info in streams[component].items():
if only_streams:
if stream not in only_streams:
continue
print('-'*80)
logger.info(f'working on stream: {stream}')
print('-'*80)
dateglob = stream_info['dateglob']
dateregex = stream_info['dateregex']
freq = stream_info['freq']
dout = f'{droot_out}/{component}/proc/tseries/{freq}'
if not os.path.exists(dout):
os.makedirs(dout, exist_ok=True)
# set target destination on globus
globus_file_list = []
if campaign_transfer:
campaign_dout = f'{campaign_path}/{case}/{component}/proc/tseries/{freq}'
globus.makedirs('campaign', campaign_dout)
globus_file_list = globus.listdir('campaign', campaign_dout)
logger.info(f'found {len(globus_file_list)} files on campaign.')
# get input files
files = sorted(glob(f'{droot}/{component}/hist/{case}.{stream}.{dateglob}.nc'))
if len(files) == 0:
logger.warning(f'no files: component={component}, stream={stream}')
continue
# get file dates
files_year = [get_year_filename(f) for f in files]
# get variable lists
static_vars, time_vars = get_vars(files)
if only_variables is not None:
time_vars = [v for v in time_vars if v in only_variables]
print(only_variables)
if not static_vars:
continue
# make a report
logger.info(f'found {len(files)} history files')
logger.info(f'history file years: {min(files_year)}-{max(files_year)}')
logger.info(f'found {len(time_vars)} variables to process')
logger.info(f'expecting to generate {len(time_vars) * len(year_groups)} timeseries files')
for y0, yf in year_groups:
if report_year_groups:
logger.info(f'working on year group {y0}-{yf}')
files_group_i = [f for f, y in zip(files, files_year)
if (y0 <= y) and (y <= yf)]
fid, tmpfile = tempfile.mkstemp(suffix='.filelist', prefix='tmpfile',
dir=os.environ['TMPDIR'])
with open(tmpfile,'w') as fid:
for i, f in enumerate(files_group_i):
fid.write('%s\n'%f)
# get the date string
date_cat = get_date_string(files_group_i, freq)
for i, v in enumerate(time_vars):
file_cat_basename = '.'.join([case, stream, v, date_cat, 'nc'])
file_cat = os.path.join(dout, file_cat_basename)
if not clobber:
if file_cat_basename in globus_file_list:
print(f'on campaign: {file_cat_basename}...skipping')
continue
if os.path.exists(file_cat):
print(f'exists: {file_cat_basename}...skipping')
continue
logger.info(f'creating {file_cat}')
vars = ','.join(static_vars+[v])
cat_cmd = [f'cat {tmpfile} | ncrcat -O -h -v {vars} {file_cat}']
compress_cmd = [f'ncks -O -4 -L 1 {file_cat} {file_cat}']
if not demo:
if campaign_transfer:
xfr_cmd = [f'{script_path}/globus.py',
'--src-ep=glade --dst-ep=campaign',
'--retry=3',
f'--src-paths={file_cat}',
f'--dst-paths={campaign_dout}/{file_cat_basename}']
cleanup_cmd = [f'if [ $? -eq 0 ]; then rm -f {file_cat}; else exit 1; fi']
else:
xfr_cmd = []
cleanup_cmd = []
jid = tm.submit([cat_cmd, compress_cmd, xfr_cmd, cleanup_cmd],
modules=['nco'], memory='100GB')
print()
tm.wait()
if __name__ == '__main__':
main() | [
"logging.getLogger",
"numpy.mean",
"click.argument",
"logging.StreamHandler",
"os.path.exists",
"os.makedirs",
"click.option",
"os.path.join",
"globus.listdir",
"xarray.open_dataset",
"os.path.realpath",
"workflow.task_manager.wait",
"yaml.safe_load",
"workflow.task_manager.submit",
"glo... | [((341, 368), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (358, 368), False, 'import logging\n'), ((415, 448), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (436, 448), False, 'import logging\n'), ((3246, 3261), 'click.command', 'click.command', ([], {}), '()\n', (3259, 3261), False, 'import click\n'), ((3263, 3285), 'click.argument', 'click.argument', (['"""case"""'], {}), "('case')\n", (3277, 3285), False, 'import click\n'), ((3287, 3330), 'click.option', 'click.option', (['"""--components"""'], {'default': '"""ocn"""'}), "('--components', default='ocn')\n", (3299, 3330), False, 'import click\n'), ((3332, 3384), 'click.option', 'click.option', (['"""--archive-root"""'], {'default': 'ARCHIVE_ROOT'}), "('--archive-root', default=ARCHIVE_ROOT)\n", (3344, 3384), False, 'import click\n'), ((3386, 3429), 'click.option', 'click.option', (['"""--output-root"""'], {'default': 'None'}), "('--output-root', default=None)\n", (3398, 3429), False, 'import click\n'), ((3431, 3473), 'click.option', 'click.option', (['"""--only-streams"""'], {'default': '[]'}), "('--only-streams', default=[])\n", (3443, 3473), False, 'import click\n'), ((3475, 3521), 'click.option', 'click.option', (['"""--only-variables"""'], {'default': 'None'}), "('--only-variables', default=None)\n", (3487, 3521), False, 'import click\n'), ((3523, 3587), 'click.option', 'click.option', (['"""--campaign-transfer"""'], {'default': '(False)', 'is_flag': '(True)'}), "('--campaign-transfer', default=False, is_flag=True)\n", (3535, 3587), False, 'import click\n'), ((3589, 3650), 'click.option', 'click.option', (['"""--campaign-path"""'], {'default': 'GLOBUS_CAMPAIGN_PATH'}), "('--campaign-path', default=GLOBUS_CAMPAIGN_PATH)\n", (3601, 3650), False, 'import click\n'), ((3652, 3695), 'click.option', 'click.option', (['"""--year-groups"""'], {'default': 'None'}), "('--year-groups', default=None)\n", (3664, 3695), False, 'import click\n'), ((3697, 3748), 'click.option', 'click.option', (['"""--demo"""'], {'default': '(False)', 'is_flag': '(True)'}), "('--demo', default=False, is_flag=True)\n", (3709, 3748), False, 'import click\n'), ((3750, 3804), 'click.option', 'click.option', (['"""--clobber"""'], {'default': '(False)', 'is_flag': '(True)'}), "('--clobber', default=False, is_flag=True)\n", (3762, 3804), False, 'import click\n'), ((539, 565), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (555, 565), False, 'import os\n'), ((4053, 4085), 'os.path.join', 'os.path.join', (['archive_root', 'case'], {}), '(archive_root, case)\n', (4065, 4085), False, 'import os\n'), ((9689, 9698), 'workflow.task_manager.wait', 'tm.wait', ([], {}), '()\n', (9696, 9698), True, 'from workflow import task_manager as tm\n'), ((2886, 2922), 'xarray.open_dataset', 'xr.open_dataset', (['files[0]'], {}), '(files[0], **xr_open)\n', (2901, 2922), True, 'import xarray as xr\n'), ((4250, 4281), 'os.path.join', 'os.path.join', (['output_root', 'case'], {}), '(output_root, case)\n', (4262, 4281), False, 'import os\n'), ((5124, 5141), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (5138, 5141), False, 'import yaml\n'), ((1164, 1196), 'xarray.open_dataset', 'xr.open_dataset', (['file'], {}), '(file, **xr_open)\n', (1179, 1196), True, 'import xarray as xr\n'), ((1506, 1523), 'numpy.mean', 'np.mean', (['[t0, tf]'], {}), '([t0, tf])\n', (1513, 1523), True, 'import numpy as np\n'), ((5779, 5799), 'os.path.exists', 'os.path.exists', (['dout'], {}), '(dout)\n', (5793, 5799), False, 'import os\n'), ((5817, 5849), 'os.makedirs', 'os.makedirs', (['dout'], {'exist_ok': '(True)'}), '(dout, exist_ok=True)\n', (5828, 5849), False, 'import os\n'), ((6072, 6114), 'globus.makedirs', 'globus.makedirs', (['"""campaign"""', 'campaign_dout'], {}), "('campaign', campaign_dout)\n", (6087, 6114), False, 'import globus\n'), ((6150, 6191), 'globus.listdir', 'globus.listdir', (['"""campaign"""', 'campaign_dout'], {}), "('campaign', campaign_dout)\n", (6164, 6191), False, 'import globus\n'), ((6331, 6394), 'glob.glob', 'glob', (['f"""{droot}/{component}/hist/{case}.{stream}.{dateglob}.nc"""'], {}), "(f'{droot}/{component}/hist/{case}.{stream}.{dateglob}.nc')\n", (6335, 6394), False, 'from glob import glob\n'), ((7598, 7683), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'suffix': '""".filelist"""', 'prefix': '"""tmpfile"""', 'dir': "os.environ['TMPDIR']"}), "(suffix='.filelist', prefix='tmpfile', dir=os.environ['TMPDIR']\n )\n", (7614, 7683), False, 'import tempfile\n'), ((8146, 8183), 'os.path.join', 'os.path.join', (['dout', 'file_cat_basename'], {}), '(dout, file_cat_basename)\n', (8158, 8183), False, 'import os\n'), ((8433, 8457), 'os.path.exists', 'os.path.exists', (['file_cat'], {}), '(file_cat)\n', (8447, 8457), False, 'import os\n'), ((9528, 9621), 'workflow.task_manager.submit', 'tm.submit', (['[cat_cmd, compress_cmd, xfr_cmd, cleanup_cmd]'], {'modules': "['nco']", 'memory': '"""100GB"""'}), "([cat_cmd, compress_cmd, xfr_cmd, cleanup_cmd], modules=['nco'],\n memory='100GB')\n", (9537, 9621), True, 'from workflow import task_manager as tm\n')] |
"""
SimpleMDM Functions
Functions for retrieving device data and assigning device groups in
SimpleMDM using the SimpleMDM API.
Author: <NAME>
Created: 04/05/18
Updated: 10/19/18
"""
import json
try:
import requests
except ModuleNotFoundError:
from botocore.vendored import requests
from utils import *
# Functions
def get_device_info(device_id, api_key, function_log):
"""Get device info from SimpleMDM API."""
action_log = ActionLog(
"get_device_info",
{"device_id": device_id}
)
api_call = requests.get(
('https://a.simplemdm.com/api/v1/devices/' + str(device_id)),
auth = (api_key, '')
)
if api_call.status_code != 200:
device_info = None
action_log.set_status(
"failure",
{
"action": "api_call",
"type": "get_devices",
"code": api_call.status_code
}
)
else:
device_info = json.loads(api_call.text)['data']
action_log.set_status("success", {"device_info": device_info})
function_log.log_action(action_log.output())
return device_info
def assign_device_group(device_id, group_name, api_key, function_log):
"""Assign a device to a SimpleMDM device group."""
action_log = ActionLog(
"assign_device_group",
{
"device_id": device_id,
"assign_group": group_name
}
)
api_call = requests.get(
'https://a.simplemdm.com/api/v1/device_groups',
auth = (api_key, '')
)
if api_call.status_code == 200:
data = json.loads(api_call.text)['data']
for group in data:
if group['attributes']['name'] == group_name:
group_id = group['id']
api_url = ('https://a.simplemdm.com/api/v1/device_groups/' + str(group_id) + '/devices/' + str(device_id))
assign_device_call = requests.post(api_url, auth = (api_key, ''))
if assign_device_call.status_code == 204:
action_log.set_status("success")
else:
action_log.set_status(
"failure",
{
"action": "api_call_set_group",
"code": api_call.status_code
}
)
function_log.log_action(action_log.output())
return
action_log.set_status(
"failure",
"GroupNotFound"
)
else:
action_log.set_status(
"failure",
{
"action": "get_device_groups_api_call",
"code": api_call.status_code
}
)
function_log.log_action(act_log.output())
return
if __name__ == "__main__":
pass
| [
"botocore.vendored.requests.get",
"json.loads",
"botocore.vendored.requests.post"
] | [((1462, 1547), 'botocore.vendored.requests.get', 'requests.get', (['"""https://a.simplemdm.com/api/v1/device_groups"""'], {'auth': "(api_key, '')"}), "('https://a.simplemdm.com/api/v1/device_groups', auth=(api_key, '')\n )\n", (1474, 1547), False, 'from botocore.vendored import requests\n'), ((976, 1001), 'json.loads', 'json.loads', (['api_call.text'], {}), '(api_call.text)\n', (986, 1001), False, 'import json\n'), ((1619, 1644), 'json.loads', 'json.loads', (['api_call.text'], {}), '(api_call.text)\n', (1629, 1644), False, 'import json\n'), ((1938, 1980), 'botocore.vendored.requests.post', 'requests.post', (['api_url'], {'auth': "(api_key, '')"}), "(api_url, auth=(api_key, ''))\n", (1951, 1980), False, 'from botocore.vendored import requests\n')] |
# Generated by Django 3.1 on 2020-10-02 17:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('school', '0016_auto_20201002_1315'),
]
operations = [
migrations.CreateModel(
name='TeacherCourses',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
options={
'db_table': 'teacher_courses',
'managed': False,
},
),
migrations.AddField(
model_name='teacher',
name='keyword',
field=models.CharField(blank=True, db_column='keyword', default='', max_length=255, null=True, verbose_name='关键词'),
),
migrations.AddField(
model_name='course',
name='teachers',
field=models.ManyToManyField(through='school.TeacherCourses', to='school.Teacher'),
),
]
| [
"django.db.models.AutoField",
"django.db.models.ManyToManyField",
"django.db.models.CharField"
] | [((680, 793), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_column': '"""keyword"""', 'default': '""""""', 'max_length': '(255)', 'null': '(True)', 'verbose_name': '"""关键词"""'}), "(blank=True, db_column='keyword', default='', max_length=\n 255, null=True, verbose_name='关键词')\n", (696, 793), False, 'from django.db import migrations, models\n'), ((910, 986), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'through': '"""school.TeacherCourses"""', 'to': '"""school.Teacher"""'}), "(through='school.TeacherCourses', to='school.Teacher')\n", (932, 986), False, 'from django.db import migrations, models\n'), ((335, 428), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (351, 428), False, 'from django.db import migrations, models\n')] |
# -*- coding: utf-8 -*-
from odoo import models, fields, api
import logging
_logger = logging.getLogger(__name__)
class MaintenanceOperation(models.Model):
_name = 'maintenance_planning.operation'
_description = 'Maintenance operation'
name = fields.Char('Name', required=True)
sequence = fields.Integer(string='Sequence', default=10)
maintenance_duration = fields.Float(help="Maintenance Duration in hours.")
periodicity = fields.Selection([('7', 'Week'), ('15', '2 Weeks'), ('30', 'Month'),
('60', 'Two months'), ('90', 'Three months'),
('180', 'Six months'),('365', 'Year')],
string='Periodicity', required=True)
equipment_id = fields.Many2one('maintenance.equipment', string='Equipment',
ondelete='cascade', index=True, required=True)
# required_material_ids = fields.One2many('maintenance_planning.required_material',
# 'operation_id', ondelete='cascade',
# help='Required material for maintenance operation')
note = fields.Text('Note')
class MaintenancePlanningEquipement(models.Model):
_inherit = 'maintenance.equipment'
maintenance_operation_ids = fields.One2many('maintenance_planning.operation',
'equipment_id', ondelete='cascade',
help='Related maintenance operation')
class MaintenanceRequestMaterialLine(models.Model):
_name = 'maintenance_planning.required_material'
_description = 'Required material for maintenance'
sequence = fields.Integer(string='Sequence', default=10)
request_id = fields.Many2one('maintenance.request', string='Request',
ondelete='cascade', index=True)
product_id = fields.Many2one('product.product', 'Material',
domain="[('type', 'in', ['product', 'consu'])]",
required=True)
qty_required = fields.Integer('Quantity')
comment = fields.Char('Comment')
qty_available_today = fields.Float(compute='_compute_qty_at_date')
forecasted_issue = fields.Boolean(compute='_compute_qty_at_date')
@api.depends('product_id', 'qty_required')
def _compute_qty_at_date(self):
for line in self:
line.qty_available_today = line.product_id.qty_available - line.qty_required
line.forecasted_issue = True if line.qty_available_today <= 0 else False
class MaintenancePlanningRequest(models.Model):
_inherit = 'maintenance.request'
maintenance_operation_id = fields.Many2one('maintenance_planning.operation', 'Operation',
help='Related maintenance operation')
required_material_ids = fields.One2many('maintenance_planning.required_material',
'request_id', ondelete='cascade',
help='Required material for maintenance operation')
| [
"logging.getLogger",
"odoo.fields.Float",
"odoo.fields.Many2one",
"odoo.api.depends",
"odoo.fields.Integer",
"odoo.fields.One2many",
"odoo.fields.Text",
"odoo.fields.Selection",
"odoo.fields.Char",
"odoo.fields.Boolean"
] | [((88, 115), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (105, 115), False, 'import logging\n'), ((259, 293), 'odoo.fields.Char', 'fields.Char', (['"""Name"""'], {'required': '(True)'}), "('Name', required=True)\n", (270, 293), False, 'from odoo import models, fields, api\n'), ((309, 354), 'odoo.fields.Integer', 'fields.Integer', ([], {'string': '"""Sequence"""', 'default': '(10)'}), "(string='Sequence', default=10)\n", (323, 354), False, 'from odoo import models, fields, api\n'), ((382, 433), 'odoo.fields.Float', 'fields.Float', ([], {'help': '"""Maintenance Duration in hours."""'}), "(help='Maintenance Duration in hours.')\n", (394, 433), False, 'from odoo import models, fields, api\n'), ((453, 653), 'odoo.fields.Selection', 'fields.Selection', (["[('7', 'Week'), ('15', '2 Weeks'), ('30', 'Month'), ('60', 'Two months'), (\n '90', 'Three months'), ('180', 'Six months'), ('365', 'Year')]"], {'string': '"""Periodicity"""', 'required': '(True)'}), "([('7', 'Week'), ('15', '2 Weeks'), ('30', 'Month'), ('60',\n 'Two months'), ('90', 'Three months'), ('180', 'Six months'), ('365',\n 'Year')], string='Periodicity', required=True)\n", (469, 653), False, 'from odoo import models, fields, api\n'), ((776, 888), 'odoo.fields.Many2one', 'fields.Many2one', (['"""maintenance.equipment"""'], {'string': '"""Equipment"""', 'ondelete': '"""cascade"""', 'index': '(True)', 'required': '(True)'}), "('maintenance.equipment', string='Equipment', ondelete=\n 'cascade', index=True, required=True)\n", (791, 888), False, 'from odoo import models, fields, api\n'), ((1202, 1221), 'odoo.fields.Text', 'fields.Text', (['"""Note"""'], {}), "('Note')\n", (1213, 1221), False, 'from odoo import models, fields, api\n'), ((1347, 1475), 'odoo.fields.One2many', 'fields.One2many', (['"""maintenance_planning.operation"""', '"""equipment_id"""'], {'ondelete': '"""cascade"""', 'help': '"""Related maintenance operation"""'}), "('maintenance_planning.operation', 'equipment_id', ondelete=\n 'cascade', help='Related maintenance operation')\n", (1362, 1475), False, 'from odoo import models, fields, api\n'), ((1739, 1784), 'odoo.fields.Integer', 'fields.Integer', ([], {'string': '"""Sequence"""', 'default': '(10)'}), "(string='Sequence', default=10)\n", (1753, 1784), False, 'from odoo import models, fields, api\n'), ((1802, 1894), 'odoo.fields.Many2one', 'fields.Many2one', (['"""maintenance.request"""'], {'string': '"""Request"""', 'ondelete': '"""cascade"""', 'index': '(True)'}), "('maintenance.request', string='Request', ondelete='cascade',\n index=True)\n", (1817, 1894), False, 'from odoo import models, fields, api\n'), ((1941, 2056), 'odoo.fields.Many2one', 'fields.Many2one', (['"""product.product"""', '"""Material"""'], {'domain': '"""[(\'type\', \'in\', [\'product\', \'consu\'])]"""', 'required': '(True)'}), '(\'product.product\', \'Material\', domain=\n "[(\'type\', \'in\', [\'product\', \'consu\'])]", required=True)\n', (1956, 2056), False, 'from odoo import models, fields, api\n'), ((2137, 2163), 'odoo.fields.Integer', 'fields.Integer', (['"""Quantity"""'], {}), "('Quantity')\n", (2151, 2163), False, 'from odoo import models, fields, api\n'), ((2178, 2200), 'odoo.fields.Char', 'fields.Char', (['"""Comment"""'], {}), "('Comment')\n", (2189, 2200), False, 'from odoo import models, fields, api\n'), ((2229, 2273), 'odoo.fields.Float', 'fields.Float', ([], {'compute': '"""_compute_qty_at_date"""'}), "(compute='_compute_qty_at_date')\n", (2241, 2273), False, 'from odoo import models, fields, api\n'), ((2297, 2343), 'odoo.fields.Boolean', 'fields.Boolean', ([], {'compute': '"""_compute_qty_at_date"""'}), "(compute='_compute_qty_at_date')\n", (2311, 2343), False, 'from odoo import models, fields, api\n'), ((2350, 2391), 'odoo.api.depends', 'api.depends', (['"""product_id"""', '"""qty_required"""'], {}), "('product_id', 'qty_required')\n", (2361, 2391), False, 'from odoo import models, fields, api\n'), ((2746, 2851), 'odoo.fields.Many2one', 'fields.Many2one', (['"""maintenance_planning.operation"""', '"""Operation"""'], {'help': '"""Related maintenance operation"""'}), "('maintenance_planning.operation', 'Operation', help=\n 'Related maintenance operation')\n", (2761, 2851), False, 'from odoo import models, fields, api\n'), ((2924, 3071), 'odoo.fields.One2many', 'fields.One2many', (['"""maintenance_planning.required_material"""', '"""request_id"""'], {'ondelete': '"""cascade"""', 'help': '"""Required material for maintenance operation"""'}), "('maintenance_planning.required_material', 'request_id',\n ondelete='cascade', help='Required material for maintenance operation')\n", (2939, 3071), False, 'from odoo import models, fields, api\n')] |
import datetime
import logging
from subprocess import Popen
from time import sleep
from unittest import expectedFailure, skip
import os
from django.test import TransactionTestCase
from django.utils.timezone import now
from random import randint
from django_clickhouse.database import connections
from django_clickhouse.migrations import migrate_app
from django_clickhouse.utils import int_ranges
from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel
from tests.models import TestModel
logger = logging.getLogger('django-clickhouse')
class SyncTest(TransactionTestCase):
def setUp(self):
self.db = ClickHouseCollapseTestModel.get_database()
self.db.drop_database()
self.db.create_database()
migrate_app('tests', 'default')
ClickHouseTestModel.get_storage().flush()
def test_simple(self):
obj = TestModel.objects.create(value=1, created=datetime.datetime.now(), created_date=datetime.date.today())
ClickHouseTestModel.sync_batch_from_storage()
synced_data = list(ClickHouseTestModel.objects.all())
self.assertEqual(1, len(synced_data))
self.assertEqual(obj.created_date, synced_data[0].created_date)
self.assertEqual(obj.value, synced_data[0].value)
self.assertEqual(obj.id, synced_data[0].id)
def test_collapsing_update_by_final(self):
obj = TestModel.objects.create(value=1, created=datetime.datetime.now(), created_date=datetime.date.today())
obj.value = 2
obj.save()
ClickHouseCollapseTestModel.sync_batch_from_storage()
# insert and update came before sync. Only one item will be inserted
synced_data = list(ClickHouseCollapseTestModel.objects.all())
self.assertEqual(1, len(synced_data))
self.assertEqual(obj.value, synced_data[0].value)
self.assertEqual(obj.id, synced_data[0].id)
obj.value = 3
obj.save()
ClickHouseCollapseTestModel.sync_batch_from_storage()
synced_data = list(self.db.select('SELECT * FROM $table FINAL', model_class=ClickHouseCollapseTestModel))
self.assertGreaterEqual(len(synced_data), 1)
self.assertEqual(obj.value, synced_data[0].value)
self.assertEqual(obj.id, synced_data[0].id)
def test_collapsing_update_by_version(self):
ClickHouseCollapseTestModel.engine.version_col = 'version'
obj = TestModel.objects.create(value=1, created=datetime.datetime.now(), created_date=datetime.date.today())
obj.value = 2
obj.save()
ClickHouseCollapseTestModel.sync_batch_from_storage()
# insert and update came before sync. Only one item will be inserted
synced_data = list(ClickHouseCollapseTestModel.objects.all())
self.assertEqual(1, len(synced_data))
self.assertEqual(obj.value, synced_data[0].value)
self.assertEqual(obj.id, synced_data[0].id)
obj.value = 3
obj.save()
ClickHouseCollapseTestModel.sync_batch_from_storage()
synced_data = list(self.db.select('SELECT * FROM $table FINAL', model_class=ClickHouseCollapseTestModel))
self.assertGreaterEqual(len(synced_data), 1)
self.assertEqual(obj.value, synced_data[0].value)
self.assertEqual(obj.id, synced_data[0].id)
ClickHouseCollapseTestModel.engine.version_col = None
@expectedFailure
def test_collapsing_delete(self):
obj = TestModel.objects.create(value=1, created_date=datetime.date.today())
ClickHouseCollapseTestModel.sync_batch_from_storage()
obj.delete()
ClickHouseCollapseTestModel.sync_batch_from_storage()
# sync_batch_from_storage uses FINAL, so data would be collapsed by now
synced_data = list(ClickHouseCollapseTestModel.objects.all())
self.assertEqual(0, len(synced_data))
def test_multi_model(self):
obj = TestModel.objects.create(value=1, created=datetime.datetime.now(), created_date=datetime.date.today())
obj.value = 2
obj.save()
ClickHouseMultiTestModel.sync_batch_from_storage()
synced_data = list(ClickHouseTestModel.objects.all())
self.assertEqual(1, len(synced_data))
self.assertEqual(obj.created_date, synced_data[0].created_date)
self.assertEqual(obj.value, synced_data[0].value)
self.assertEqual(obj.id, synced_data[0].id)
# sync_batch_from_storage uses FINAL, so data would be collapsed by now
synced_data = list(ClickHouseCollapseTestModel.objects.all())
self.assertEqual(1, len(synced_data))
self.assertEqual(obj.value, synced_data[0].value)
self.assertEqual(obj.id, synced_data[0].id)
obj.value = 3
obj.save()
ClickHouseMultiTestModel.sync_batch_from_storage()
synced_data = list(self.db.select('SELECT * FROM $table FINAL', model_class=ClickHouseCollapseTestModel))
self.assertGreaterEqual(len(synced_data), 1)
self.assertEqual(obj.value, synced_data[0].value)
self.assertEqual(obj.id, synced_data[0].id)
class KillTest(TransactionTestCase):
TEST_TIME = 60
maxDiff = None
def setUp(self):
ClickHouseTestModel.get_storage().flush()
connections['default'].drop_database()
connections['default'].create_database()
migrate_app('tests', 'default')
# Disable sync for not interesting models
ClickHouseMultiTestModel.sync_enabled = False
ClickHouseTestModel.sync_enabled = False
def tearDown(self):
# Disable sync for not interesting models
ClickHouseMultiTestModel.sync_enabled = True
ClickHouseTestModel.sync_enabled = True
def _check_data(self):
logger.debug('django-clickhouse: syncing left test data')
# Sync all data that is not synced
# Data is expected to be in test_db, not default. So we need to call subprocess
# in order everything works correctly
import_key = ClickHouseCollapseTestModel.get_import_key()
storage = ClickHouseCollapseTestModel.get_storage()
sync_left = storage.operations_count(import_key)
while sync_left:
logger.debug('django-clickhouse: final sync (%d left)' % sync_left)
self.sync_iteration(False)
sync_left = storage.operations_count(import_key)
logger.debug('django_clickhouse: sync finished')
ch_data = list(connections['default'].select_tuples('SELECT * FROM $table FINAL ORDER BY id',
model_class=ClickHouseCollapseTestModel))
logger.debug('django_clickhouse: got clickhouse data')
pg_data = list(TestModel.objects.all().order_by('id'))
logger.debug('django_clickhouse: got postgres data')
if len(pg_data) != len(ch_data):
absent_ids = set(item.id for item in pg_data) - set(item.id for item in ch_data)
logger.debug('django_clickhouse: absent ranges: %s (min: %d, max: %d)'
% (','.join(('(%d, %d)' % r) for r in int_ranges(absent_ids)),
min(item.id for item in pg_data), max(item.id for item in pg_data)))
self.assertEqual(len(pg_data), len(ch_data))
for pg_item, ch_item in zip(pg_data, ch_data):
self.assertEqual(ch_item.id, pg_item.id)
self.assertEqual(ch_item.value, pg_item.value)
@classmethod
def sync_iteration(cls, kill=True):
test_script = os.path.join(os.path.dirname(__file__), 'kill_test_sub_process.py')
if kill:
args = ['--test-time', str(cls.TEST_TIME)]
else:
args = ['--once', 'true']
p_sync = Popen(['python3', test_script, 'sync'] + args)
if kill:
sleep(randint(0, 5))
logger.debug('django-clickhouse: test killing: %d' % p_sync.pid)
p_sync.kill()
else:
p_sync.wait()
def test_kills(self):
test_script = os.path.join(os.path.dirname(__file__), 'kill_test_sub_process.py')
p_create = Popen(['python3', test_script, 'create', '--test-time', str(self.TEST_TIME)])
# Updates must be slower than inserts, or they will do nothing
p_update = Popen(['python3', test_script, 'update', '--test-time', str(self.TEST_TIME), '--batch-size', '500'])
start = now()
while (now() - start).total_seconds() < self.TEST_TIME:
self.sync_iteration()
p_create.wait()
p_update.wait()
self._check_data()
# Used to profile sync execution time. Disabled by default
@skip
class ProfileTest(TransactionTestCase):
BATCH_SIZE = 10000
def setUp(self):
ClickHouseTestModel.get_storage().flush()
connections['default'].drop_database()
connections['default'].create_database()
migrate_app('tests', 'default')
# Disable sync for not interesting models
ClickHouseMultiTestModel.sync_enabled = False
ClickHouseTestModel.sync_enabled = False
TestModel.objects.bulk_create([
TestModel(created=datetime.datetime.now(), created_date='2018-01-01', value=i)
for i in range(self.BATCH_SIZE)
])
def tearDown(self):
# Disable sync for not interesting models
ClickHouseMultiTestModel.sync_enabled = True
ClickHouseTestModel.sync_enabled = True
def test_sync(self):
ClickHouseCollapseTestModel.sync_batch_size = self.BATCH_SIZE
ClickHouseCollapseTestModel.sync_batch_from_storage()
| [
"logging.getLogger",
"tests.clickhouse_models.ClickHouseCollapseTestModel.get_import_key",
"subprocess.Popen",
"tests.clickhouse_models.ClickHouseTestModel.sync_batch_from_storage",
"django.utils.timezone.now",
"tests.clickhouse_models.ClickHouseMultiTestModel.sync_batch_from_storage",
"random.randint",... | [((554, 592), 'logging.getLogger', 'logging.getLogger', (['"""django-clickhouse"""'], {}), "('django-clickhouse')\n", (571, 592), False, 'import logging\n'), ((671, 713), 'tests.clickhouse_models.ClickHouseCollapseTestModel.get_database', 'ClickHouseCollapseTestModel.get_database', ([], {}), '()\n', (711, 713), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((788, 819), 'django_clickhouse.migrations.migrate_app', 'migrate_app', (['"""tests"""', '"""default"""'], {}), "('tests', 'default')\n", (799, 819), False, 'from django_clickhouse.migrations import migrate_app\n'), ((1023, 1068), 'tests.clickhouse_models.ClickHouseTestModel.sync_batch_from_storage', 'ClickHouseTestModel.sync_batch_from_storage', ([], {}), '()\n', (1066, 1068), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((1574, 1627), 'tests.clickhouse_models.ClickHouseCollapseTestModel.sync_batch_from_storage', 'ClickHouseCollapseTestModel.sync_batch_from_storage', ([], {}), '()\n', (1625, 1627), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((1982, 2035), 'tests.clickhouse_models.ClickHouseCollapseTestModel.sync_batch_from_storage', 'ClickHouseCollapseTestModel.sync_batch_from_storage', ([], {}), '()\n', (2033, 2035), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((2598, 2651), 'tests.clickhouse_models.ClickHouseCollapseTestModel.sync_batch_from_storage', 'ClickHouseCollapseTestModel.sync_batch_from_storage', ([], {}), '()\n', (2649, 2651), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((3006, 3059), 'tests.clickhouse_models.ClickHouseCollapseTestModel.sync_batch_from_storage', 'ClickHouseCollapseTestModel.sync_batch_from_storage', ([], {}), '()\n', (3057, 3059), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((3553, 3606), 'tests.clickhouse_models.ClickHouseCollapseTestModel.sync_batch_from_storage', 'ClickHouseCollapseTestModel.sync_batch_from_storage', ([], {}), '()\n', (3604, 3606), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((3636, 3689), 'tests.clickhouse_models.ClickHouseCollapseTestModel.sync_batch_from_storage', 'ClickHouseCollapseTestModel.sync_batch_from_storage', ([], {}), '()\n', (3687, 3689), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((4086, 4136), 'tests.clickhouse_models.ClickHouseMultiTestModel.sync_batch_from_storage', 'ClickHouseMultiTestModel.sync_batch_from_storage', ([], {}), '()\n', (4134, 4136), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((4785, 4835), 'tests.clickhouse_models.ClickHouseMultiTestModel.sync_batch_from_storage', 'ClickHouseMultiTestModel.sync_batch_from_storage', ([], {}), '()\n', (4833, 4835), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((5367, 5398), 'django_clickhouse.migrations.migrate_app', 'migrate_app', (['"""tests"""', '"""default"""'], {}), "('tests', 'default')\n", (5378, 5398), False, 'from django_clickhouse.migrations import migrate_app\n'), ((6022, 6066), 'tests.clickhouse_models.ClickHouseCollapseTestModel.get_import_key', 'ClickHouseCollapseTestModel.get_import_key', ([], {}), '()\n', (6064, 6066), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((6085, 6126), 'tests.clickhouse_models.ClickHouseCollapseTestModel.get_storage', 'ClickHouseCollapseTestModel.get_storage', ([], {}), '()\n', (6124, 6126), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((7753, 7799), 'subprocess.Popen', 'Popen', (["(['python3', test_script, 'sync'] + args)"], {}), "(['python3', test_script, 'sync'] + args)\n", (7758, 7799), False, 'from subprocess import Popen\n'), ((8417, 8422), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (8420, 8422), False, 'from django.utils.timezone import now\n'), ((8904, 8935), 'django_clickhouse.migrations.migrate_app', 'migrate_app', (['"""tests"""', '"""default"""'], {}), "('tests', 'default')\n", (8915, 8935), False, 'from django_clickhouse.migrations import migrate_app\n'), ((9557, 9610), 'tests.clickhouse_models.ClickHouseCollapseTestModel.sync_batch_from_storage', 'ClickHouseCollapseTestModel.sync_batch_from_storage', ([], {}), '()\n', (9608, 9610), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((1097, 1130), 'tests.clickhouse_models.ClickHouseTestModel.objects.all', 'ClickHouseTestModel.objects.all', ([], {}), '()\n', (1128, 1130), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((1733, 1774), 'tests.clickhouse_models.ClickHouseCollapseTestModel.objects.all', 'ClickHouseCollapseTestModel.objects.all', ([], {}), '()\n', (1772, 1774), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((2757, 2798), 'tests.clickhouse_models.ClickHouseCollapseTestModel.objects.all', 'ClickHouseCollapseTestModel.objects.all', ([], {}), '()\n', (2796, 2798), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((3798, 3839), 'tests.clickhouse_models.ClickHouseCollapseTestModel.objects.all', 'ClickHouseCollapseTestModel.objects.all', ([], {}), '()\n', (3837, 3839), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((4165, 4198), 'tests.clickhouse_models.ClickHouseTestModel.objects.all', 'ClickHouseTestModel.objects.all', ([], {}), '()\n', (4196, 4198), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((4536, 4577), 'tests.clickhouse_models.ClickHouseCollapseTestModel.objects.all', 'ClickHouseCollapseTestModel.objects.all', ([], {}), '()\n', (4575, 4577), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((7557, 7582), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (7572, 7582), False, 'import os\n'), ((8056, 8081), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (8071, 8081), False, 'import os\n'), ((828, 861), 'tests.clickhouse_models.ClickHouseTestModel.get_storage', 'ClickHouseTestModel.get_storage', ([], {}), '()\n', (859, 861), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((954, 977), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (975, 977), False, 'import datetime\n'), ((992, 1013), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (1011, 1013), False, 'import datetime\n'), ((1464, 1487), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1485, 1487), False, 'import datetime\n'), ((1502, 1523), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (1521, 1523), False, 'import datetime\n'), ((2488, 2511), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2509, 2511), False, 'import datetime\n'), ((2526, 2547), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (2545, 2547), False, 'import datetime\n'), ((3522, 3543), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (3541, 3543), False, 'import datetime\n'), ((3976, 3999), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3997, 3999), False, 'import datetime\n'), ((4014, 4035), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (4033, 4035), False, 'import datetime\n'), ((5221, 5254), 'tests.clickhouse_models.ClickHouseTestModel.get_storage', 'ClickHouseTestModel.get_storage', ([], {}), '()\n', (5252, 5254), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((7836, 7849), 'random.randint', 'randint', (['(0)', '(5)'], {}), '(0, 5)\n', (7843, 7849), False, 'from random import randint\n'), ((8758, 8791), 'tests.clickhouse_models.ClickHouseTestModel.get_storage', 'ClickHouseTestModel.get_storage', ([], {}), '()\n', (8789, 8791), False, 'from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel\n'), ((6739, 6762), 'tests.models.TestModel.objects.all', 'TestModel.objects.all', ([], {}), '()\n', (6760, 6762), False, 'from tests.models import TestModel\n'), ((8438, 8443), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (8441, 8443), False, 'from django.utils.timezone import now\n'), ((9161, 9184), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (9182, 9184), False, 'import datetime\n'), ((7121, 7143), 'django_clickhouse.utils.int_ranges', 'int_ranges', (['absent_ids'], {}), '(absent_ids)\n', (7131, 7143), False, 'from django_clickhouse.utils import int_ranges\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import numpy
import quantities
quantities.set_default_units('si')
quantities.UnitQuantity('kilocalorie', 1000.0*quantities.cal, symbol='kcal')
quantities.UnitQuantity('kilojoule', 1000.0*quantities.J, symbol='kJ')
from rmgpy.chem.molecule import Molecule
from rmgpy.chem.species import Species
from rmgpy.chem.reaction import Reaction
from rmgpy.chem.kinetics import Arrhenius
from rmgpy.chem.thermo import ThermoData
from rmgpy.solver.simple import SimpleReactor
################################################################################
class SimpleReactorCheck(unittest.TestCase):
def testSolve(self):
"""
Test the simple batch reactor with a simple kinetic model. Here we
choose a kinetic model consisting of the hydrogen abstraction reaction
CH4 + C2H5 <=> CH3 + C2H6.
"""
CH4 = Species(
molecule=[Molecule().fromSMILES("C")],
thermo=ThermoData(Tdata=([300,400,500,600,800,1000,1500],"K"), Cpdata=([ 8.615, 9.687,10.963,12.301,14.841,16.976,20.528],"cal/mol*K"), H298=(-17.714,"kcal/mol"), S298=(44.472,"cal/mol*K"))
)
CH3 = Species(
molecule=[Molecule().fromSMILES("[CH3]")],
thermo=ThermoData(Tdata=([300,400,500,600,800,1000,1500],"K"), Cpdata=([ 9.397,10.123,10.856,11.571,12.899,14.055,16.195],"cal/mol*K"), H298=( 9.357,"kcal/mol"), S298=(45.174,"cal/mol*K"))
)
C2H6 = Species(
molecule=[Molecule().fromSMILES("CC")],
thermo=ThermoData(Tdata=([300,400,500,600,800,1000,1500],"K"), Cpdata=([12.684,15.506,18.326,20.971,25.500,29.016,34.595],"cal/mol*K"), H298=(-19.521,"kcal/mol"), S298=(54.799,"cal/mol*K"))
)
C2H5 = Species(
molecule=[Molecule().fromSMILES("C[CH2]")],
thermo=ThermoData(Tdata=([300,400,500,600,800,1000,1500],"K"), Cpdata=([11.635,13.744,16.085,18.246,21.885,24.676,29.107],"cal/mol*K"), H298=( 29.496,"kcal/mol"), S298=(56.687,"cal/mol*K"))
)
rxn1 = Reaction(reactants=[C2H6,CH3], products=[C2H5,CH4], kinetics=Arrhenius(A=686.375*6, n=4.40721, Ea=7.82799*4184., T0=298.15))
coreSpecies = [CH4,CH3,C2H6,C2H5]
edgeSpecies = []
coreReactions = [rxn1]
edgeReactions = []
T = 1000; P = 1.0e5
rxnSystem = SimpleReactor(T, P, initialMoleFractions={C2H5: 0.1, CH3: 0.1, CH4: 0.4, C2H6: 0.4})
rxnSystem.initializeModel(coreSpecies, coreReactions, edgeSpecies, edgeReactions)
tlist = numpy.array([10**(i/10.0) for i in range(-130, -49)], numpy.float64)
# Integrate to get the solution at each time point
t = []; y = []; reactionRates = []; speciesRates = []
for t1 in tlist:
rxnSystem.advance(t1)
t.append(rxnSystem.t)
# You must make a copy of y because it is overwritten by DASSL at
# each call to advance()
y.append(rxnSystem.y.copy())
reactionRates.append(rxnSystem.coreReactionRates.copy())
speciesRates.append(rxnSystem.coreSpeciesRates.copy())
# Convert the solution vectors to numpy arrays
t = numpy.array(t, numpy.float64)
y = numpy.array(y, numpy.float64)
reactionRates = numpy.array(reactionRates, numpy.float64)
speciesRates = numpy.array(speciesRates, numpy.float64)
import pylab
fig = pylab.figure(figsize=(6,6))
pylab.subplot(2,1,1)
pylab.semilogx(t, y)
pylab.ylabel('Concentration (mol/m$^\\mathdefault{3}$)')
pylab.legend(['CH4', 'CH3', 'C2H6', 'C2H5'], loc=4)
pylab.subplot(2,1,2)
pylab.semilogx(t, speciesRates)
pylab.legend(['CH4', 'CH3', 'C2H6', 'C2H5'], loc=4)
pylab.xlabel('Time (s)')
pylab.ylabel('Rate (mol/m$^\\mathdefault{3}$*s)')
fig.subplots_adjust(left=0.12, bottom=0.10, right=0.95, top=0.95, wspace=0.20, hspace=0.35)
pylab.show()
################################################################################
if __name__ == '__main__':
unittest.main( testRunner = unittest.TextTestRunner(verbosity=2) ) | [
"pylab.ylabel",
"quantities.set_default_units",
"pylab.subplot",
"unittest.TextTestRunner",
"pylab.xlabel",
"pylab.legend",
"rmgpy.chem.kinetics.Arrhenius",
"pylab.figure",
"numpy.array",
"rmgpy.chem.thermo.ThermoData",
"rmgpy.chem.molecule.Molecule",
"pylab.semilogx",
"rmgpy.solver.simple.S... | [((91, 125), 'quantities.set_default_units', 'quantities.set_default_units', (['"""si"""'], {}), "('si')\n", (119, 125), False, 'import quantities\n'), ((126, 204), 'quantities.UnitQuantity', 'quantities.UnitQuantity', (['"""kilocalorie"""', '(1000.0 * quantities.cal)'], {'symbol': '"""kcal"""'}), "('kilocalorie', 1000.0 * quantities.cal, symbol='kcal')\n", (149, 204), False, 'import quantities\n'), ((203, 275), 'quantities.UnitQuantity', 'quantities.UnitQuantity', (['"""kilojoule"""', '(1000.0 * quantities.J)'], {'symbol': '"""kJ"""'}), "('kilojoule', 1000.0 * quantities.J, symbol='kJ')\n", (226, 275), False, 'import quantities\n'), ((2380, 2468), 'rmgpy.solver.simple.SimpleReactor', 'SimpleReactor', (['T', 'P'], {'initialMoleFractions': '{C2H5: 0.1, CH3: 0.1, CH4: 0.4, C2H6: 0.4}'}), '(T, P, initialMoleFractions={C2H5: 0.1, CH3: 0.1, CH4: 0.4,\n C2H6: 0.4})\n', (2393, 2468), False, 'from rmgpy.solver.simple import SimpleReactor\n'), ((3216, 3245), 'numpy.array', 'numpy.array', (['t', 'numpy.float64'], {}), '(t, numpy.float64)\n', (3227, 3245), False, 'import numpy\n'), ((3258, 3287), 'numpy.array', 'numpy.array', (['y', 'numpy.float64'], {}), '(y, numpy.float64)\n', (3269, 3287), False, 'import numpy\n'), ((3312, 3353), 'numpy.array', 'numpy.array', (['reactionRates', 'numpy.float64'], {}), '(reactionRates, numpy.float64)\n', (3323, 3353), False, 'import numpy\n'), ((3377, 3417), 'numpy.array', 'numpy.array', (['speciesRates', 'numpy.float64'], {}), '(speciesRates, numpy.float64)\n', (3388, 3417), False, 'import numpy\n'), ((3454, 3482), 'pylab.figure', 'pylab.figure', ([], {'figsize': '(6, 6)'}), '(figsize=(6, 6))\n', (3466, 3482), False, 'import pylab\n'), ((3490, 3512), 'pylab.subplot', 'pylab.subplot', (['(2)', '(1)', '(1)'], {}), '(2, 1, 1)\n', (3503, 3512), False, 'import pylab\n'), ((3519, 3539), 'pylab.semilogx', 'pylab.semilogx', (['t', 'y'], {}), '(t, y)\n', (3533, 3539), False, 'import pylab\n'), ((3548, 3604), 'pylab.ylabel', 'pylab.ylabel', (['"""Concentration (mol/m$^\\\\mathdefault{3}$)"""'], {}), "('Concentration (mol/m$^\\\\mathdefault{3}$)')\n", (3560, 3604), False, 'import pylab\n'), ((3613, 3664), 'pylab.legend', 'pylab.legend', (["['CH4', 'CH3', 'C2H6', 'C2H5']"], {'loc': '(4)'}), "(['CH4', 'CH3', 'C2H6', 'C2H5'], loc=4)\n", (3625, 3664), False, 'import pylab\n'), ((3673, 3695), 'pylab.subplot', 'pylab.subplot', (['(2)', '(1)', '(2)'], {}), '(2, 1, 2)\n', (3686, 3695), False, 'import pylab\n'), ((3702, 3733), 'pylab.semilogx', 'pylab.semilogx', (['t', 'speciesRates'], {}), '(t, speciesRates)\n', (3716, 3733), False, 'import pylab\n'), ((3742, 3793), 'pylab.legend', 'pylab.legend', (["['CH4', 'CH3', 'C2H6', 'C2H5']"], {'loc': '(4)'}), "(['CH4', 'CH3', 'C2H6', 'C2H5'], loc=4)\n", (3754, 3793), False, 'import pylab\n'), ((3802, 3826), 'pylab.xlabel', 'pylab.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (3814, 3826), False, 'import pylab\n'), ((3835, 3884), 'pylab.ylabel', 'pylab.ylabel', (['"""Rate (mol/m$^\\\\mathdefault{3}$*s)"""'], {}), "('Rate (mol/m$^\\\\mathdefault{3}$*s)')\n", (3847, 3884), False, 'import pylab\n'), ((3993, 4005), 'pylab.show', 'pylab.show', ([], {}), '()\n', (4003, 4005), False, 'import pylab\n'), ((4148, 4184), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (4171, 4184), False, 'import unittest\n'), ((985, 1190), 'rmgpy.chem.thermo.ThermoData', 'ThermoData', ([], {'Tdata': "([300, 400, 500, 600, 800, 1000, 1500], 'K')", 'Cpdata': "([8.615, 9.687, 10.963, 12.301, 14.841, 16.976, 20.528], 'cal/mol*K')", 'H298': "(-17.714, 'kcal/mol')", 'S298': "(44.472, 'cal/mol*K')"}), "(Tdata=([300, 400, 500, 600, 800, 1000, 1500], 'K'), Cpdata=([\n 8.615, 9.687, 10.963, 12.301, 14.841, 16.976, 20.528], 'cal/mol*K'),\n H298=(-17.714, 'kcal/mol'), S298=(44.472, 'cal/mol*K'))\n", (995, 1190), False, 'from rmgpy.chem.thermo import ThermoData\n'), ((1279, 1483), 'rmgpy.chem.thermo.ThermoData', 'ThermoData', ([], {'Tdata': "([300, 400, 500, 600, 800, 1000, 1500], 'K')", 'Cpdata': "([9.397, 10.123, 10.856, 11.571, 12.899, 14.055, 16.195], 'cal/mol*K')", 'H298': "(9.357, 'kcal/mol')", 'S298': "(45.174, 'cal/mol*K')"}), "(Tdata=([300, 400, 500, 600, 800, 1000, 1500], 'K'), Cpdata=([\n 9.397, 10.123, 10.856, 11.571, 12.899, 14.055, 16.195], 'cal/mol*K'),\n H298=(9.357, 'kcal/mol'), S298=(45.174, 'cal/mol*K'))\n", (1289, 1483), False, 'from rmgpy.chem.thermo import ThermoData\n'), ((1571, 1776), 'rmgpy.chem.thermo.ThermoData', 'ThermoData', ([], {'Tdata': "([300, 400, 500, 600, 800, 1000, 1500], 'K')", 'Cpdata': "([12.684, 15.506, 18.326, 20.971, 25.5, 29.016, 34.595], 'cal/mol*K')", 'H298': "(-19.521, 'kcal/mol')", 'S298': "(54.799, 'cal/mol*K')"}), "(Tdata=([300, 400, 500, 600, 800, 1000, 1500], 'K'), Cpdata=([\n 12.684, 15.506, 18.326, 20.971, 25.5, 29.016, 34.595], 'cal/mol*K'),\n H298=(-19.521, 'kcal/mol'), S298=(54.799, 'cal/mol*K'))\n", (1581, 1776), False, 'from rmgpy.chem.thermo import ThermoData\n'), ((1867, 2073), 'rmgpy.chem.thermo.ThermoData', 'ThermoData', ([], {'Tdata': "([300, 400, 500, 600, 800, 1000, 1500], 'K')", 'Cpdata': "([11.635, 13.744, 16.085, 18.246, 21.885, 24.676, 29.107], 'cal/mol*K')", 'H298': "(29.496, 'kcal/mol')", 'S298': "(56.687, 'cal/mol*K')"}), "(Tdata=([300, 400, 500, 600, 800, 1000, 1500], 'K'), Cpdata=([\n 11.635, 13.744, 16.085, 18.246, 21.885, 24.676, 29.107], 'cal/mol*K'),\n H298=(29.496, 'kcal/mol'), S298=(56.687, 'cal/mol*K'))\n", (1877, 2073), False, 'from rmgpy.chem.thermo import ThermoData\n'), ((2141, 2208), 'rmgpy.chem.kinetics.Arrhenius', 'Arrhenius', ([], {'A': '(686.375 * 6)', 'n': '(4.40721)', 'Ea': '(7.82799 * 4184.0)', 'T0': '(298.15)'}), '(A=686.375 * 6, n=4.40721, Ea=7.82799 * 4184.0, T0=298.15)\n', (2150, 2208), False, 'from rmgpy.chem.kinetics import Arrhenius\n'), ((937, 947), 'rmgpy.chem.molecule.Molecule', 'Molecule', ([], {}), '()\n', (945, 947), False, 'from rmgpy.chem.molecule import Molecule\n'), ((1227, 1237), 'rmgpy.chem.molecule.Molecule', 'Molecule', ([], {}), '()\n', (1235, 1237), False, 'from rmgpy.chem.molecule import Molecule\n'), ((1522, 1532), 'rmgpy.chem.molecule.Molecule', 'Molecule', ([], {}), '()\n', (1530, 1532), False, 'from rmgpy.chem.molecule import Molecule\n'), ((1814, 1824), 'rmgpy.chem.molecule.Molecule', 'Molecule', ([], {}), '()\n', (1822, 1824), False, 'from rmgpy.chem.molecule import Molecule\n')] |
import emoji
from math import hypot
cateto_opos = float(input('Cateto oposto: '))
cateto_adja = float(input('Cateto adjacente: '))
hipotenusa = (hypot(cateto_opos, cateto_adja))
print(emoji.emojize(f'De acordo com os dados, a hipotenusa é igual a {hipotenusa:.2f} :nerd_face::thumbs_up:.')) | [
"math.hypot",
"emoji.emojize"
] | [((145, 176), 'math.hypot', 'hypot', (['cateto_opos', 'cateto_adja'], {}), '(cateto_opos, cateto_adja)\n', (150, 176), False, 'from math import hypot\n'), ((184, 299), 'emoji.emojize', 'emoji.emojize', (['f"""De acordo com os dados, a hipotenusa é igual a {hipotenusa:.2f} :nerd_face::thumbs_up:."""'], {}), "(\n f'De acordo com os dados, a hipotenusa é igual a {hipotenusa:.2f} :nerd_face::thumbs_up:.'\n )\n", (197, 299), False, 'import emoji\n')] |
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import shutil
from textwrap import dedent
from pants.backend.android.targets.android_dependency import AndroidDependency
from pants.backend.android.targets.android_library import AndroidLibrary
from pants.backend.android.targets.android_resources import AndroidResources
from pants.backend.android.tasks.unpack_libraries import UnpackLibraries
from pants.backend.core.targets.dependencies import Dependencies
from pants.backend.jvm.targets.jar_dependency import JarDependency
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.backend.jvm.tasks.ivy_task_mixin import IvyTaskMixin
from pants.base.build_file_aliases import BuildFileAliases
from pants.fs.archive import ZIP
from pants.util.contextutil import open_zip, temporary_dir, temporary_file
from pants.util.dirutil import safe_mkdir, safe_open, safe_walk, touch
from pants_test.android.test_android_base import TestAndroidBase
class UnpackLibrariesTest(TestAndroidBase):
"""Test the .aar and .jar unpacking methods in pants.backend.android.tasks.unpack_libraries."""
@classmethod
def task_type(cls):
return UnpackLibraries
@classmethod
def _add_ivy_imports_product(cls, foo_target, android_dep, unpack_task):
ivy_imports_product = unpack_task.context.products.get('ivy_imports')
ivy_imports_product.add(foo_target, os.path.dirname(android_dep),
[os.path.basename(android_dep)])
@property
def alias_groups(self):
return BuildFileAliases.create(
targets={
'android_dependency': AndroidDependency,
'android_library': AndroidLibrary,
'jar_library': JarLibrary,
'target': Dependencies
},
objects={
'jar': JarDependency,
},
)
def unpacked_aar_library(self, location, manifest=True, classes_jar=True, resources=True,
filenames=None):
"""Mock the contents of an aar file, with optional components and additional files."""
if manifest:
manifest_file = os.path.join(location, 'AndroidManifest.xml')
touch(manifest_file)
with safe_open(manifest_file, 'w') as fp:
fp.write(self.android_manifest())
fp.close()
if classes_jar:
self.create_jarfile(location, filenames=filenames)
if resources:
safe_mkdir(os.path.join(location, 'res'))
return location
def create_aarfile(self, location, name, filenames=None):
"""Create an aar file, using the contents created by self.unpacked_aar_library."""
with temporary_dir() as temp:
aar_contents = self.unpacked_aar_library(temp, filenames=filenames)
archive = ZIP.create(aar_contents, location, name)
aar = os.path.join(location, '{}.aar'.format(name))
os.rename(archive, aar)
return aar
def create_jarfile(self, location, name=None, filenames=None):
"""Create a sample jar file."""
name = '{}.jar'.format(name or 'classes')
jar_name = os.path.join(location, name)
with open_zip(jar_name, 'w') as library:
library.writestr('a/b/c/Foo.class', '0xCAFEBABE')
library.writestr('a/b/c/Bar.class', '0xCAFEBABE')
if filenames:
for class_file in filenames:
library.writestr(class_file, '0xCAFEBABE')
return jar_name
def test_unpack_smoke(self):
task = self.create_task(self.context())
task.execute()
def test_is_library(self):
with self.android_library() as android_library:
task = self.create_task(self.context())
self.assertTrue(task.is_library(android_library))
def test_detect_nonlibrary(self):
with self.android_target() as android_target:
task = self.create_task(self.context())
self.assertFalse(task.is_library(android_target))
def test_aar_out(self):
task = self.create_task(self.context())
archive = 'org.pantsbuild.example-1.0'
outdir = task.unpacked_aar_location(archive)
self.assertEqual(os.path.join(task.workdir, archive), outdir)
def test_jar_out(self):
task = self.create_task(self.context())
archive = 'org.pantsbuild.example-1.0'
outdir = task.unpacked_jar_location(archive)
self.assertEqual(os.path.join(task.workdir, 'explode-jars', archive), outdir)
def test_create_classes_jar_target(self):
with self.android_library() as android_library:
with temporary_file() as jar:
task = self.create_task(self.context())
archive = 'org.pantsbuild.example-1.0'
created_target = task.create_classes_jar_target(android_library, archive, jar)
self.assertEqual(created_target.derived_from, android_library)
self.assertTrue(created_target.is_synthetic)
self.assertTrue(isinstance(created_target, JarLibrary))
def test_create_resource_target(self):
with self.android_library() as library:
with temporary_file() as manifest:
with temporary_dir() as res:
manifest.write(self.android_manifest())
manifest.close()
task = self.create_task(self.context())
archive = 'org.pantsbuild.example-1.0'
created_target = task.create_resource_target(library, archive, manifest.name, res)
self.assertEqual(created_target.derived_from, library)
self.assertTrue(created_target.is_synthetic)
self.assertTrue(isinstance(created_target, AndroidResources))
self.assertEqual(created_target.resource_dir, res)
self.assertEqual(created_target.manifest.path, manifest.name)
def test_create_android_library_target(self):
with self.android_library(include_patterns=['**/*.class']) as android_library:
with temporary_dir() as temp:
contents = self.unpacked_aar_library(temp)
task = self.create_task(self.context())
archive = 'org.pantsbuild.example-1.0'
created_library = task.create_android_library_target(android_library, archive, contents)
self.assertEqual(created_library.derived_from, android_library)
self.assertTrue(created_library.is_synthetic)
self.assertTrue(isinstance(created_library, AndroidLibrary))
self.assertEqual(android_library.include_patterns, created_library.include_patterns)
self.assertEqual(android_library.exclude_patterns, created_library.exclude_patterns)
self.assertEqual(len(created_library.dependencies), 2)
for dep in created_library.dependencies:
self.assertTrue(isinstance(dep, AndroidResources) or isinstance(dep, JarLibrary))
def test_no_classes_jar(self):
with self.android_library(include_patterns=['**/*.class']) as android_library:
with temporary_dir() as temp:
contents = self.unpacked_aar_library(temp, classes_jar=False)
task = self.create_task(self.context())
archive = 'org.pantsbuild.example-1.0'
created_library = task.create_android_library_target(android_library, archive, contents)
self.assertEqual(len(created_library.dependencies), 1)
for dep in created_library.dependencies:
isinstance(dep, AndroidResources)
def test_no_resources(self):
with self.android_library() as android_library:
with temporary_dir() as temp:
contents = self.unpacked_aar_library(temp, classes_jar=False)
task = self.create_task(self.context())
archive = 'org.pantsbuild.example-1.0'
created_library = task.create_android_library_target(android_library, archive, contents)
self.assertEqual(len(created_library.dependencies), 1)
for dep in created_library.dependencies:
isinstance(dep, JarLibrary)
def test_no_manifest(self):
with self.android_library(include_patterns=['**/*.class']) as android_library:
with temporary_dir() as temp:
contents = self.unpacked_aar_library(temp, manifest=False)
task = self.create_task(self.context())
archive = 'org.pantsbuild.example-1.0'
with self.assertRaises(UnpackLibraries.MissingElementException):
task.create_android_library_target(android_library, archive, contents)
# Test unpacking process.
def create_unpack_build_file(self):
self.add_to_build_file('unpack', dedent('''
android_library(name='test',
libraries=['unpack/libs:test-jar'],
include_patterns=[
'a/b/c/*.class',
],
)
'''))
def test_unpack_jar_library(self):
# Test for when the imported library is a jarfile.
with temporary_dir() as temp:
jar_file = self.create_jarfile(temp, 'org.pantsbuild.android.test',
filenames=['a/b/c/Any.class', 'a/b/d/Thing.class'])
self.create_unpack_build_file()
target_name = 'unpack:test'
self._make_android_dependency('test-jar', jar_file, '1.0')
test_target = self.target(target_name)
files = self.unpack_libraries(target_name, jar_file)
# If the android_library imports a jar, files are unpacked but no new targets are created.
self.assertIn('Thing.class', files)
self.assertEqual(len(test_target.dependencies), 0)
def test_unexpected_archive_type(self):
with temporary_dir() as temp:
aar = self.create_aarfile(temp, 'org.pantsbuild.android.test')
unexpected_archive = os.path.join(temp, 'org.pantsbuild.android.test{}'.format('.other'))
os.rename(aar, unexpected_archive)
self.create_unpack_build_file()
target_name = 'unpack:test'
self._make_android_dependency('test-jar', unexpected_archive, '1.0')
with self.assertRaises(UnpackLibraries.UnexpectedArchiveType):
self.unpack_libraries(target_name, unexpected_archive)
# Test aar unpacking and invalidation
def test_ivy_args(self):
# A regression test for ivy_mixin_task. UnpackLibraries depends on the mapped jar filename
# being unique and including the version number. If you are making a change to
# ivy_task_mixin._get_ivy_args() that maintains both then feel free to update this test.
ivy_args = [
'-retrieve', '{}/[organisation]/[artifact]/[conf]/'
'[organisation]-[artifact]-[revision](-[classifier]).[ext]'.format('foo'),
'-symlink',
]
self.assertEqual(ivy_args, IvyTaskMixin._get_ivy_args('foo'))
# There is a bit of fudging here. In practice, the jar name is transformed by ivy into
# '[organisation]-[artifact]-[revision](-[classifier]).[ext]'. The unpack_libraries task does not
# care about the details of the imported jar name but it does rely on that name being unique and
# including the version number.
def _approximate_ivy_mapjar_name(self, archive, android_archive):
# This basically creates a copy named after the target.id + file extension.
location = os.path.dirname(archive)
ivy_mapjar_name = os.path.join(location,
'{}{}'.format(android_archive, os.path.splitext(archive)[1]))
shutil.copy(archive, ivy_mapjar_name)
return ivy_mapjar_name
def _make_android_dependency(self, name, library_file, version):
build_file = os.path.join(self.build_root, 'unpack', 'libs', 'BUILD')
if os.path.exists(build_file):
os.remove(build_file)
self.add_to_build_file('unpack/libs', dedent('''
android_dependency(name='{name}',
jars=[
jar(org='com.example', name='bar', rev='{version}', url='file:///{filepath}'),
],
)
'''.format(name=name, version=version, filepath=library_file)))
def unpack_libraries(self, target_name, aar_file):
test_target = self.target(target_name)
task = self.create_task(self.context(target_roots=[test_target]))
for android_archive in test_target.imported_jars:
target_jar = self._approximate_ivy_mapjar_name(aar_file, android_archive)
self._add_ivy_imports_product(test_target, target_jar, task)
task.execute()
# Gather classes found when unpacking the aar_file.
aar_name = os.path.basename(target_jar)
files = []
jar_location = task.unpacked_jar_location(aar_name)
for _, _, filename in safe_walk(jar_location):
files.extend(filename)
return files
def test_unpack_aar_files_and_invalidation(self):
with temporary_dir() as temp:
aar = self.create_aarfile(temp, 'org.pantsbuild.android.test')
self.create_unpack_build_file()
target_name = 'unpack:test'
self._make_android_dependency('test-jar', aar, '1.0')
files = self.unpack_libraries(target_name, aar)
self.assertIn('Foo.class', files)
# Reset build graph to dismiss all the created targets.
self.reset_build_graph()
# Create a new copy of the archive- adding a sentinel file but without bumping the version.
new_aar = self.create_aarfile(temp, 'org.pantsbuild.android.test',
filenames=['a/b/c/Baz.class'])
# Call task a 2nd time but the sentinel file is not found because we didn't bump version.
files = self.unpack_libraries(target_name, new_aar)
self.assertNotIn('Baz.class', files)
# Now bump version and this time the aar is unpacked and the sentinel file is found.
self.reset_build_graph()
self._make_android_dependency('test-jar', new_aar, '2.0')
files = self.unpack_libraries(target_name, new_aar)
self.assertIn('Baz.class', files)
| [
"pants.util.dirutil.touch",
"pants.base.build_file_aliases.BuildFileAliases.create",
"os.path.exists",
"pants.fs.archive.ZIP.create",
"textwrap.dedent",
"os.rename",
"os.path.join",
"os.path.splitext",
"pants.util.dirutil.safe_open",
"os.remove",
"os.path.dirname",
"pants.backend.jvm.tasks.ivy... | [((1770, 1970), 'pants.base.build_file_aliases.BuildFileAliases.create', 'BuildFileAliases.create', ([], {'targets': "{'android_dependency': AndroidDependency, 'android_library': AndroidLibrary,\n 'jar_library': JarLibrary, 'target': Dependencies}", 'objects': "{'jar': JarDependency}"}), "(targets={'android_dependency': AndroidDependency,\n 'android_library': AndroidLibrary, 'jar_library': JarLibrary, 'target':\n Dependencies}, objects={'jar': JarDependency})\n", (1793, 1970), False, 'from pants.base.build_file_aliases import BuildFileAliases\n'), ((3232, 3260), 'os.path.join', 'os.path.join', (['location', 'name'], {}), '(location, name)\n', (3244, 3260), False, 'import os\n'), ((11031, 11055), 'os.path.dirname', 'os.path.dirname', (['archive'], {}), '(archive)\n', (11046, 11055), False, 'import os\n'), ((11202, 11239), 'shutil.copy', 'shutil.copy', (['archive', 'ivy_mapjar_name'], {}), '(archive, ivy_mapjar_name)\n', (11213, 11239), False, 'import shutil\n'), ((11352, 11408), 'os.path.join', 'os.path.join', (['self.build_root', '"""unpack"""', '"""libs"""', '"""BUILD"""'], {}), "(self.build_root, 'unpack', 'libs', 'BUILD')\n", (11364, 11408), False, 'import os\n'), ((11416, 11442), 'os.path.exists', 'os.path.exists', (['build_file'], {}), '(build_file)\n', (11430, 11442), False, 'import os\n'), ((12216, 12244), 'os.path.basename', 'os.path.basename', (['target_jar'], {}), '(target_jar)\n', (12232, 12244), False, 'import os\n'), ((12342, 12365), 'pants.util.dirutil.safe_walk', 'safe_walk', (['jar_location'], {}), '(jar_location)\n', (12351, 12365), False, 'from pants.util.dirutil import safe_mkdir, safe_open, safe_walk, touch\n'), ((1629, 1657), 'os.path.dirname', 'os.path.dirname', (['android_dep'], {}), '(android_dep)\n', (1644, 1657), False, 'import os\n'), ((2306, 2351), 'os.path.join', 'os.path.join', (['location', '"""AndroidManifest.xml"""'], {}), "(location, 'AndroidManifest.xml')\n", (2318, 2351), False, 'import os\n'), ((2358, 2378), 'pants.util.dirutil.touch', 'touch', (['manifest_file'], {}), '(manifest_file)\n', (2363, 2378), False, 'from pants.util.dirutil import safe_mkdir, safe_open, safe_walk, touch\n'), ((2808, 2823), 'pants.util.contextutil.temporary_dir', 'temporary_dir', ([], {}), '()\n', (2821, 2823), False, 'from pants.util.contextutil import open_zip, temporary_dir, temporary_file\n'), ((2923, 2963), 'pants.fs.archive.ZIP.create', 'ZIP.create', (['aar_contents', 'location', 'name'], {}), '(aar_contents, location, name)\n', (2933, 2963), False, 'from pants.fs.archive import ZIP\n'), ((3028, 3051), 'os.rename', 'os.rename', (['archive', 'aar'], {}), '(archive, aar)\n', (3037, 3051), False, 'import os\n'), ((3270, 3293), 'pants.util.contextutil.open_zip', 'open_zip', (['jar_name', '"""w"""'], {}), "(jar_name, 'w')\n", (3278, 3293), False, 'from pants.util.contextutil import open_zip, temporary_dir, temporary_file\n'), ((4200, 4235), 'os.path.join', 'os.path.join', (['task.workdir', 'archive'], {}), '(task.workdir, archive)\n', (4212, 4235), False, 'import os\n'), ((4429, 4480), 'os.path.join', 'os.path.join', (['task.workdir', '"""explode-jars"""', 'archive'], {}), "(task.workdir, 'explode-jars', archive)\n", (4441, 4480), False, 'import os\n'), ((8434, 8660), 'textwrap.dedent', 'dedent', (['"""\n android_library(name=\'test\',\n libraries=[\'unpack/libs:test-jar\'],\n include_patterns=[\n \'a/b/c/*.class\',\n ],\n )\n """'], {}), '(\n """\n android_library(name=\'test\',\n libraries=[\'unpack/libs:test-jar\'],\n include_patterns=[\n \'a/b/c/*.class\',\n ],\n )\n """\n )\n', (8440, 8660), False, 'from textwrap import dedent\n'), ((8754, 8769), 'pants.util.contextutil.temporary_dir', 'temporary_dir', ([], {}), '()\n', (8767, 8769), False, 'from pants.util.contextutil import open_zip, temporary_dir, temporary_file\n'), ((9432, 9447), 'pants.util.contextutil.temporary_dir', 'temporary_dir', ([], {}), '()\n', (9445, 9447), False, 'from pants.util.contextutil import open_zip, temporary_dir, temporary_file\n'), ((9628, 9662), 'os.rename', 'os.rename', (['aar', 'unexpected_archive'], {}), '(aar, unexpected_archive)\n', (9637, 9662), False, 'import os\n'), ((10510, 10543), 'pants.backend.jvm.tasks.ivy_task_mixin.IvyTaskMixin._get_ivy_args', 'IvyTaskMixin._get_ivy_args', (['"""foo"""'], {}), "('foo')\n", (10536, 10543), False, 'from pants.backend.jvm.tasks.ivy_task_mixin import IvyTaskMixin\n'), ((11450, 11471), 'os.remove', 'os.remove', (['build_file'], {}), '(build_file)\n', (11459, 11471), False, 'import os\n'), ((12475, 12490), 'pants.util.contextutil.temporary_dir', 'temporary_dir', ([], {}), '()\n', (12488, 12490), False, 'from pants.util.contextutil import open_zip, temporary_dir, temporary_file\n'), ((1688, 1717), 'os.path.basename', 'os.path.basename', (['android_dep'], {}), '(android_dep)\n', (1704, 1717), False, 'import os\n'), ((2390, 2419), 'pants.util.dirutil.safe_open', 'safe_open', (['manifest_file', '"""w"""'], {}), "(manifest_file, 'w')\n", (2399, 2419), False, 'from pants.util.dirutil import safe_mkdir, safe_open, safe_walk, touch\n'), ((2600, 2629), 'os.path.join', 'os.path.join', (['location', '"""res"""'], {}), "(location, 'res')\n", (2612, 2629), False, 'import os\n'), ((4598, 4614), 'pants.util.contextutil.temporary_file', 'temporary_file', ([], {}), '()\n', (4612, 4614), False, 'from pants.util.contextutil import open_zip, temporary_dir, temporary_file\n'), ((5090, 5106), 'pants.util.contextutil.temporary_file', 'temporary_file', ([], {}), '()\n', (5104, 5106), False, 'from pants.util.contextutil import open_zip, temporary_dir, temporary_file\n'), ((5895, 5910), 'pants.util.contextutil.temporary_dir', 'temporary_dir', ([], {}), '()\n', (5908, 5910), False, 'from pants.util.contextutil import open_zip, temporary_dir, temporary_file\n'), ((6877, 6892), 'pants.util.contextutil.temporary_dir', 'temporary_dir', ([], {}), '()\n', (6890, 6892), False, 'from pants.util.contextutil import open_zip, temporary_dir, temporary_file\n'), ((7415, 7430), 'pants.util.contextutil.temporary_dir', 'temporary_dir', ([], {}), '()\n', (7428, 7430), False, 'from pants.util.contextutil import open_zip, temporary_dir, temporary_file\n'), ((7977, 7992), 'pants.util.contextutil.temporary_dir', 'temporary_dir', ([], {}), '()\n', (7990, 7992), False, 'from pants.util.contextutil import open_zip, temporary_dir, temporary_file\n'), ((5133, 5148), 'pants.util.contextutil.temporary_dir', 'temporary_dir', ([], {}), '()\n', (5146, 5148), False, 'from pants.util.contextutil import open_zip, temporary_dir, temporary_file\n'), ((11167, 11192), 'os.path.splitext', 'os.path.splitext', (['archive'], {}), '(archive)\n', (11183, 11192), False, 'import os\n')] |
from django.urls import path
from . import views
app_name = 'core'
urlpatterns = [
path('', views.blog, name='blog'),
path('<int:pk>/', views.post_detail, name='post_detail'),
path('<int:pk>/share/', views.post_share, name='post_share'),
path('manage/', views.ManagePostListView.as_view(), name='manage'),
path('create/', views.PostCreateView.as_view(), name='create'),
path('<pk>/edit/', views.PostUpdateView.as_view(), name='post_edit'),
path('<pk>/delete/', views.PostDeleteView.as_view(), name='post_delete'),
] | [
"django.urls.path"
] | [((89, 122), 'django.urls.path', 'path', (['""""""', 'views.blog'], {'name': '"""blog"""'}), "('', views.blog, name='blog')\n", (93, 122), False, 'from django.urls import path\n'), ((128, 184), 'django.urls.path', 'path', (['"""<int:pk>/"""', 'views.post_detail'], {'name': '"""post_detail"""'}), "('<int:pk>/', views.post_detail, name='post_detail')\n", (132, 184), False, 'from django.urls import path\n'), ((190, 250), 'django.urls.path', 'path', (['"""<int:pk>/share/"""', 'views.post_share'], {'name': '"""post_share"""'}), "('<int:pk>/share/', views.post_share, name='post_share')\n", (194, 250), False, 'from django.urls import path\n')] |
"""App utilites tests.
"""
import sys
import pytest
from django.apps import apps
from ..models import Camera
pytestmark = pytest.mark.django_db
@pytest.mark.parametrize(
"testargs, output",
[
[["python", "manage.py", "runserver"], 4],
[["python", "manage.py", "makemigration"], 0],
[["python", "manage.py", "migrate"], 0],
[["python", "manage.py", "test"], 0],
[["pytest"], 0],
],
)
def test_app(monkeypatch, testargs, output):
"""test_create_demo_objects."""
monkeypatch.setattr(sys, "argv", testargs)
app_config = apps.get_app_config("locations")
app_config.ready()
app_config = apps.get_app_config("cameras")
app_config.ready()
assert Camera.objects.count() == output
@pytest.mark.parametrize(
"testenv, output",
[
["true", 4],
["True", 4],
["1", 4],
["false", 0],
["False", 0],
["0", 0],
["random_string", 4],
],
)
def test_app_not_create_demo(monkeypatch, testenv, output):
"""test_create_demo_objects."""
monkeypatch.setenv("CREATE_DEMO", testenv)
testargs = ["python", "manage.py", "runserver"]
monkeypatch.setattr(sys, "argv", testargs)
app_config = apps.get_app_config("locations")
app_config.ready()
app_config = apps.get_app_config("cameras")
app_config.ready()
assert Camera.objects.count() == output
| [
"pytest.mark.parametrize",
"django.apps.apps.get_app_config"
] | [((150, 394), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""testargs, output"""', "[[['python', 'manage.py', 'runserver'], 4], [['python', 'manage.py',\n 'makemigration'], 0], [['python', 'manage.py', 'migrate'], 0], [[\n 'python', 'manage.py', 'test'], 0], [['pytest'], 0]]"], {}), "('testargs, output', [[['python', 'manage.py',\n 'runserver'], 4], [['python', 'manage.py', 'makemigration'], 0], [[\n 'python', 'manage.py', 'migrate'], 0], [['python', 'manage.py', 'test'],\n 0], [['pytest'], 0]])\n", (173, 394), False, 'import pytest\n'), ((759, 903), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""testenv, output"""', "[['true', 4], ['True', 4], ['1', 4], ['false', 0], ['False', 0], ['0', 0],\n ['random_string', 4]]"], {}), "('testenv, output', [['true', 4], ['True', 4], ['1',\n 4], ['false', 0], ['False', 0], ['0', 0], ['random_string', 4]])\n", (782, 903), False, 'import pytest\n'), ((585, 617), 'django.apps.apps.get_app_config', 'apps.get_app_config', (['"""locations"""'], {}), "('locations')\n", (604, 617), False, 'from django.apps import apps\n'), ((658, 688), 'django.apps.apps.get_app_config', 'apps.get_app_config', (['"""cameras"""'], {}), "('cameras')\n", (677, 688), False, 'from django.apps import apps\n'), ((1234, 1266), 'django.apps.apps.get_app_config', 'apps.get_app_config', (['"""locations"""'], {}), "('locations')\n", (1253, 1266), False, 'from django.apps import apps\n'), ((1307, 1337), 'django.apps.apps.get_app_config', 'apps.get_app_config', (['"""cameras"""'], {}), "('cameras')\n", (1326, 1337), False, 'from django.apps import apps\n')] |
from typing import List, Tuple, Optional
import numpy as np
def rmse(x: List[float], y: List[float]) -> float:
r = 0
for (a, b) in zip(x, y):
r += (a - b) ** 2
return r
def lin_reg(data: List[Tuple[float, float]]) -> Tuple[float, float]:
d = np.array(data)
m = d.shape[0]
p = np.sum(d[:, 0])
q = np.sum(d[:, 1])
r = np.sum(d[:, 0] * d[:, 1])
s = np.sum(d[:, 0] ** 2)
d = (m + 1) * s - p ** 2
a = ((m + 1) * r - p * q) / d
b = (s * q - p * r) / d
return (a, b)
class LinearRegressor():
def __init__(self):
self._coeffs = None # type: Optional[Tuple[float, float]]
def fit(self, data: List[Tuple[float, float]]) -> None:
self._coeffs = lin_reg(data)
def predict(self, x: List[float]) -> List[float]:
pass
@property
def coeffs(self) -> Tuple[float, float]:
if self._coeffs is None:
raise Exception('You need to call `fit` on the model first.')
return self._coeffs
class Vertex:
def __init__(self, id: int) -> None:
self.id = id
self.neighbours = set()
self.visited = False
def add_neighbour(self, other_id):
self.neighbours.add(other_id)
def visit(self):
self.visited = True
def __str__(self):
return "Vertex " + str(self.id)
def __repr__(self):
return self.__str__()
class Graph:
def __init__(self, matrix=None):
self.vertices = []
if matrix is None:
return
n = len(matrix)
for i in range(n):
v = Vertex(i)
self.vertices.append(v)
for j in range(n):
if matrix[i][j]:
v.add_neighbour(j)
def __str__(self):
r = ""
for row in self.matrix():
r += str(row) + "\n"
return r
def matrix(self):
n = len(self.vertices)
m = [[0 for i in range(n)] for j in range(n)]
for i in range(n):
for j in range(n):
if j in self.vertices[i].neighbours:
m[i][j] = 1
return m
def add_vertex(self, neighbours):
v = Vertex(len(self.vertices))
for n in neighbours:
v.add_neighbour(n)
self.vertices.append(v)
return self
def add_egde(self, e):
self.vertices[e[0]].add_neighbour(e[1])
return self
def clear_visited(self):
for v in self.vertices:
v.visited = False
def BFS(self, start=None):
q = []
r = []
if start is not None:
q.append(start)
else:
q.append(self.vertices[0])
while q:
c = q.pop(0)
r.append(c)
c.visit()
for n in c.neighbours:
nei = self.vertices[n]
if not nei.visited:
q.append(nei)
self.clear_visited()
return r
g = Graph([
[0, 1, 1, 0, 1],
[0, 0, 0, 0, 0],
[0, 0, 0, 1, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]
])
print(g.BFS())
| [
"numpy.array",
"numpy.sum"
] | [((270, 284), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (278, 284), True, 'import numpy as np\n'), ((312, 327), 'numpy.sum', 'np.sum', (['d[:, 0]'], {}), '(d[:, 0])\n', (318, 327), True, 'import numpy as np\n'), ((336, 351), 'numpy.sum', 'np.sum', (['d[:, 1]'], {}), '(d[:, 1])\n', (342, 351), True, 'import numpy as np\n'), ((360, 385), 'numpy.sum', 'np.sum', (['(d[:, 0] * d[:, 1])'], {}), '(d[:, 0] * d[:, 1])\n', (366, 385), True, 'import numpy as np\n'), ((394, 414), 'numpy.sum', 'np.sum', (['(d[:, 0] ** 2)'], {}), '(d[:, 0] ** 2)\n', (400, 414), True, 'import numpy as np\n')] |
""""
Parte 5: Criando Colisões
"""
#Importações necessárias para a criação da janela
import pygame
from pygame.locals import *
from sys import exit
from random import randint
#Inicialização das váriaveis e funções do pygame
pygame.init()
#Criação da tela
width = 640
height = 480
x = width/2
y = height/2
#Criando váriaveis para assumir diferentes valores para cada colisão
x_blue = randint(40, 600)
y_blue = randint(50, 430)
screen = pygame.display.set_mode((width, height))
pygame.display.set_caption('Game')
#Controlando a velocidade da movimentação do objeto
clock = pygame.time.Clock()
#Looping principal do jogo
while True:
clock.tick(30)
screen.fill((0, 0, 0))
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
exit()
#Criando uma condição para mudar a movimentação de acordo com a tecla
if event.type == KEYDOWN:
if event.key == K_a:
x = x - 20
if event.key == K_d:
x = x + 20
if event.key == K_w:
y = y - 20
if event.key == K_s:
y = y + 20
#Criando uma condição caso a tecla continue a ser pressionada
if pygame.key.get_pressed()[K_a]:
x = x - 20
if pygame.key.get_pressed()[K_d]:
x = x + 20
if pygame.key.get_pressed()[K_w]:
y = y - 20
if pygame.key.get_pressed()[K_s]:
y = y + 20
#Desenhando Objetos dentro da Tela e movimentando
ret_red = pygame.draw.rect(screen, (255, 0, 0), (x, y, 40, 50))#
ret_blue = pygame.draw.rect(screen, (0, 0, 255), (x_blue, y_blue, 40, 50))
#Criando Condições para cada colisão
if ret_red.colliderect(ret_blue):
x_blue = randint(40, 600)
y_blue = randint(50, 430)
pygame.display.update()
| [
"pygame.display.set_caption",
"sys.exit",
"pygame.init",
"pygame.quit",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.key.get_pressed",
"pygame.draw.rect",
"pygame.time.Clock",
"pygame.display.update",
"random.randint"
] | [((227, 240), 'pygame.init', 'pygame.init', ([], {}), '()\n', (238, 240), False, 'import pygame\n'), ((389, 405), 'random.randint', 'randint', (['(40)', '(600)'], {}), '(40, 600)\n', (396, 405), False, 'from random import randint\n'), ((415, 431), 'random.randint', 'randint', (['(50)', '(430)'], {}), '(50, 430)\n', (422, 431), False, 'from random import randint\n'), ((443, 483), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(width, height)'], {}), '((width, height))\n', (466, 483), False, 'import pygame\n'), ((484, 518), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Game"""'], {}), "('Game')\n", (510, 518), False, 'import pygame\n'), ((580, 599), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (597, 599), False, 'import pygame\n'), ((704, 722), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (720, 722), False, 'import pygame\n'), ((1500, 1553), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(255, 0, 0)', '(x, y, 40, 50)'], {}), '(screen, (255, 0, 0), (x, y, 40, 50))\n', (1516, 1553), False, 'import pygame\n'), ((1570, 1633), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(0, 0, 255)', '(x_blue, y_blue, 40, 50)'], {}), '(screen, (0, 0, 255), (x_blue, y_blue, 40, 50))\n', (1586, 1633), False, 'import pygame\n'), ((1784, 1807), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (1805, 1807), False, 'import pygame\n'), ((1214, 1238), 'pygame.key.get_pressed', 'pygame.key.get_pressed', ([], {}), '()\n', (1236, 1238), False, 'import pygame\n'), ((1271, 1295), 'pygame.key.get_pressed', 'pygame.key.get_pressed', ([], {}), '()\n', (1293, 1295), False, 'import pygame\n'), ((1328, 1352), 'pygame.key.get_pressed', 'pygame.key.get_pressed', ([], {}), '()\n', (1350, 1352), False, 'import pygame\n'), ((1385, 1409), 'pygame.key.get_pressed', 'pygame.key.get_pressed', ([], {}), '()\n', (1407, 1409), False, 'import pygame\n'), ((1727, 1743), 'random.randint', 'randint', (['(40)', '(600)'], {}), '(40, 600)\n', (1734, 1743), False, 'from random import randint\n'), ((1761, 1777), 'random.randint', 'randint', (['(50)', '(430)'], {}), '(50, 430)\n', (1768, 1777), False, 'from random import randint\n'), ((767, 780), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (778, 780), False, 'import pygame\n'), ((793, 799), 'sys.exit', 'exit', ([], {}), '()\n', (797, 799), False, 'from sys import exit\n')] |
# Turtle
import turtle
bob = turtle.Turtle()
print(bob)
bob.fd(100)
bob.lt(120)
bob.fd(100)
bob.lt(120)
bob.fd(100)
bob.lt(120) | [
"turtle.Turtle"
] | [((30, 45), 'turtle.Turtle', 'turtle.Turtle', ([], {}), '()\n', (43, 45), False, 'import turtle\n')] |
#!/usr/bin/env python3
"""Runs kb-stopwatch."""
from kbstopwatch.main import main
# Run it
main()
| [
"kbstopwatch.main.main"
] | [((94, 100), 'kbstopwatch.main.main', 'main', ([], {}), '()\n', (98, 100), False, 'from kbstopwatch.main import main\n')] |
from pathlib import Path
import typer
import beet
from windshader import utils
app = typer.Typer()
@app.command()
def cmd_main(
base_config: Path = typer.Argument(..., exists=True, dir_okay=False)
):
config = beet.load_config(base_config)
minecraft_version = config.meta["texture_atlas"]["minecraft_version"]
cache = beet.Cache(".beet_cache/texture_atlas")
for atlas_variant in map(lambda x: x.name, utils.get_uvs_root(cache, minecraft_version).iterdir()):
config.meta["texture_atlas"]["variant"] = atlas_variant
config.meta["project_variant"] = atlas_variant
with beet.run_beet(config) as ctx:
pass
app() | [
"beet.Cache",
"typer.Typer",
"beet.load_config",
"windshader.utils.get_uvs_root",
"beet.run_beet",
"typer.Argument"
] | [((87, 100), 'typer.Typer', 'typer.Typer', ([], {}), '()\n', (98, 100), False, 'import typer\n'), ((152, 200), 'typer.Argument', 'typer.Argument', (['...'], {'exists': '(True)', 'dir_okay': '(False)'}), '(..., exists=True, dir_okay=False)\n', (166, 200), False, 'import typer\n'), ((214, 243), 'beet.load_config', 'beet.load_config', (['base_config'], {}), '(base_config)\n', (230, 243), False, 'import beet\n'), ((324, 363), 'beet.Cache', 'beet.Cache', (['""".beet_cache/texture_atlas"""'], {}), "('.beet_cache/texture_atlas')\n", (334, 363), False, 'import beet\n'), ((580, 601), 'beet.run_beet', 'beet.run_beet', (['config'], {}), '(config)\n', (593, 601), False, 'import beet\n'), ((409, 453), 'windshader.utils.get_uvs_root', 'utils.get_uvs_root', (['cache', 'minecraft_version'], {}), '(cache, minecraft_version)\n', (427, 453), False, 'from windshader import utils\n')] |
from unittest import TestCase
from tests import abspath
from pytezos.repl.interpreter import Interpreter
from pytezos.michelson.converter import michelson_to_micheline
from pytezos.repl.parser import parse_expression
class OpcodeTestlist_concat_bytes_136(TestCase):
def setUp(self):
self.maxDiff = None
self.i = Interpreter(debug=True)
def test_opcode_list_concat_bytes_136(self):
res = self.i.execute(f'INCLUDE "{abspath("opcodes/contracts/list_concat_bytes.tz")}"')
self.assertTrue(res['success'])
res = self.i.execute('RUN { 0xcd ; 0xef ; 0x00 } 0x00ab')
self.assertTrue(res['success'])
exp_val_expr = michelson_to_micheline('0x00abcdef00')
exp_val = parse_expression(exp_val_expr, res['result']['storage'].type_expr)
self.assertEqual(exp_val, res['result']['storage']._val)
| [
"pytezos.michelson.converter.michelson_to_micheline",
"tests.abspath",
"pytezos.repl.parser.parse_expression",
"pytezos.repl.interpreter.Interpreter"
] | [((337, 360), 'pytezos.repl.interpreter.Interpreter', 'Interpreter', ([], {'debug': '(True)'}), '(debug=True)\n', (348, 360), False, 'from pytezos.repl.interpreter import Interpreter\n'), ((701, 739), 'pytezos.michelson.converter.michelson_to_micheline', 'michelson_to_micheline', (['"""0x00abcdef00"""'], {}), "('0x00abcdef00')\n", (723, 739), False, 'from pytezos.michelson.converter import michelson_to_micheline\n'), ((758, 824), 'pytezos.repl.parser.parse_expression', 'parse_expression', (['exp_val_expr', "res['result']['storage'].type_expr"], {}), "(exp_val_expr, res['result']['storage'].type_expr)\n", (774, 824), False, 'from pytezos.repl.parser import parse_expression\n'), ((460, 509), 'tests.abspath', 'abspath', (['"""opcodes/contracts/list_concat_bytes.tz"""'], {}), "('opcodes/contracts/list_concat_bytes.tz')\n", (467, 509), False, 'from tests import abspath\n')] |
import json
from huobi.utils.http import post
from huobi.host import HOST_FUTURES
class TriggerOrder:
def __init__(self, access_key: str, secret_key: str, host: str = HOST_FUTURES):
self.__access_key = access_key
self.__secret_key = secret_key
self.__host = host
def isolated_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_trigger_order"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_trigger_order"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_cancel(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_trigger_cancel"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_cancel(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_trigger_cancel"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_cancel_all(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_trigger_cancelall"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_cancel_all(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_trigger_cancelall"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_get_open_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_trigger_openorders"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_get_open_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_trigger_openorders"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_get_his_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_trigger_hisorders"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_get_his_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_trigger_hisorders"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_tpsl_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_tpsl_order"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_tpsl_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_tpsl_order"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_tpsl_cancel(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_tpsl_cancel"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_tpsl_cancel(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_tpsl_cancel"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_tpsl_cancel_all(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_tpsl_cancelall"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_tpsl_cancel_all(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_tpsl_cancelall"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_get_tpsl_open_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_tpsl_openorders"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_get_tpsl_open_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_tpsl_openorders"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_get_tpsl_his_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_tpsl_hisorders"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_get_tpsl_his_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_tpsl_hisorders"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_get_relation_tpsl_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_relation_tpsl_order"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_get_relation_tpsl_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_relation_tpsl_order"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_track_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_track_order"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_track_order(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_track_order"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_track_cancel(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_track_cancel"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_track_cancel(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_track_cancel"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_track_cancel_all(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_track_cancelall"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_track_cancel_all(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_track_cancelall"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_get_track_open_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_track_openorders"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_get_track_open_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_track_openorders"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def isolated_get_track_his_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_track_hisorders"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
def cross_get_track_his_orders(self, data: dict = None) -> json:
path = "/linear-swap-api/v1/swap_cross_track_hisorders"
return post(self.__host, path, self.__access_key, self.__secret_key, data)
| [
"huobi.utils.http.post"
] | [((423, 490), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (427, 490), False, 'from huobi.utils.http import post\n'), ((623, 690), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (627, 690), False, 'from huobi.utils.http import post\n'), ((822, 889), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (826, 889), False, 'from huobi.utils.http import post\n'), ((1024, 1091), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (1028, 1091), False, 'from huobi.utils.http import post\n'), ((1230, 1297), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (1234, 1297), False, 'from huobi.utils.http import post\n'), ((1439, 1506), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (1443, 1506), False, 'from huobi.utils.http import post\n'), ((1651, 1718), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (1655, 1718), False, 'from huobi.utils.http import post\n'), ((1866, 1933), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (1870, 1933), False, 'from huobi.utils.http import post\n'), ((2076, 2143), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (2080, 2143), False, 'from huobi.utils.http import post\n'), ((2289, 2356), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (2293, 2356), False, 'from huobi.utils.http import post\n'), ((2488, 2555), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (2492, 2555), False, 'from huobi.utils.http import post\n'), ((2690, 2757), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (2694, 2757), False, 'from huobi.utils.http import post\n'), ((2891, 2958), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (2895, 2958), False, 'from huobi.utils.http import post\n'), ((3095, 3162), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (3099, 3162), False, 'from huobi.utils.http import post\n'), ((3303, 3370), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (3307, 3370), False, 'from huobi.utils.http import post\n'), ((3514, 3581), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (3518, 3581), False, 'from huobi.utils.http import post\n'), ((3728, 3795), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (3732, 3795), False, 'from huobi.utils.http import post\n'), ((3945, 4012), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (3949, 4012), False, 'from huobi.utils.http import post\n'), ((4157, 4224), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (4161, 4224), False, 'from huobi.utils.http import post\n'), ((4372, 4439), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (4376, 4439), False, 'from huobi.utils.http import post\n'), ((4593, 4660), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (4597, 4660), False, 'from huobi.utils.http import post\n'), ((4817, 4884), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (4821, 4884), False, 'from huobi.utils.http import post\n'), ((5024, 5091), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (5028, 5091), False, 'from huobi.utils.http import post\n'), ((5228, 5295), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (5232, 5295), False, 'from huobi.utils.http import post\n'), ((5431, 5498), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (5435, 5498), False, 'from huobi.utils.http import post\n'), ((5637, 5704), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (5641, 5704), False, 'from huobi.utils.http import post\n'), ((5847, 5914), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (5851, 5914), False, 'from huobi.utils.http import post\n'), ((6060, 6127), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (6064, 6127), False, 'from huobi.utils.http import post\n'), ((6276, 6343), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (6280, 6343), False, 'from huobi.utils.http import post\n'), ((6495, 6562), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (6499, 6562), False, 'from huobi.utils.http import post\n'), ((6709, 6776), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (6713, 6776), False, 'from huobi.utils.http import post\n'), ((6926, 6993), 'huobi.utils.http.post', 'post', (['self.__host', 'path', 'self.__access_key', 'self.__secret_key', 'data'], {}), '(self.__host, path, self.__access_key, self.__secret_key, data)\n', (6930, 6993), False, 'from huobi.utils.http import post\n')] |
import csv
import gzip
import json
import re
import sys
from ast import literal_eval
from collections import Counter
from math import exp
import numpy as np
from nltk.stem.porter import PorterStemmer
from nltk.tokenize import word_tokenize
OPINION_EXP = re.compile(r"(.*)<o>(.*?)</o>(.*)")
ASPECT_EXP = re.compile(r"(.*)<f>(.*?)</f>(.*)")
TAGGED_EXP = re.compile(r"<\w>(.*?)</\w>")
TARGET_EXP = re.compile(r"\[.*\]")
def readline_gzip(path):
with gzip.open(path, "rt") as f:
for line in f:
yield line
def readline(path):
with open(path, "r") as f:
for line in f:
yield line
def unique(sequence):
"""
Returns a unique list preserve the order of original list
"""
seen = set()
return [x for x in sequence if not (x in seen or seen.add(x))]
def to_dict(values):
value2index = {}
for i, item in enumerate(unique(values)):
value2index[item] = i
return value2index
def save_dict(value2index, path):
with open(path, "w") as f:
for value, index in value2index.items():
f.write("%s %d\n" % (value, index))
return value2index
def load_dict(path, sep=None):
dic = {}
with open(path, "r") as f:
for line in f:
try:
[item, index] = line.split(sep)
dic[item] = int(index)
except:
print("WARN - skipping invalid line: {}".format(line), sys.exc_info())
return dic
def save_count(values, path):
counts = Counter(values)
with open(path, "w") as f:
for w, count in counts.most_common():
f.write("%s %d\n" % (w, count))
return counts
def load_count(path, sep=None, dtypeKey=""):
counts = Counter()
with open(path, "r") as f:
for line in f:
try:
[w, count] = line.strip().split(sep)
if dtypeKey == "int":
w = int(w)
counts[w] = int(count)
except:
print("WARN - skipping invalid line: {}".format(line), sys.exc_info())
return counts
def reverse_key(key_value):
return {v: k for k, v in key_value.items()}
def parse_sentence(sentence, opinion, aspect):
stemmer = PorterStemmer()
sentence = re.sub(
re.compile("(^| )({})".format(opinion)), r"\1<o>\2</o>", sentence, 1
)
if not OPINION_EXP.match(sentence):
sentence = re.sub(
re.compile("(^| )({})".format(stemmer.stem(opinion))),
r"\1<o>\2</o>",
sentence,
1,
)
sentence = re.sub(
re.compile("(^| )({})".format(aspect)), r"\1<f>\2</f>", sentence, 1
)
if not ASPECT_EXP.match(sentence):
sentence = re.sub(
re.compile("(^| )({})".format(stemmer.stem(aspect))),
r"\1<f>\2</f>",
sentence,
1,
)
sentence = re.sub(
re.compile("<o>{}</o>".format(opinion)),
"<o>{}</o>".format("_".join(opinion.split(" "))),
sentence,
)
sentence = re.sub(
re.compile("<f>{}</f>".format(aspect)),
"<f>{}</f>".format("_".join(aspect.split(" "))),
sentence,
)
sentence = re.sub(r"(<\w?>[ \w]+)(</\w?>)([-\w]+)", r"\1\3\2", sentence)
sentence = re.sub(r"\(\d+\)$", "", sentence).strip().lower()
opinion_pos = None
aspect_pos = None
opinion_segments = OPINION_EXP.match(sentence)
if opinion_segments is not None:
opinion_pos = len(
word_tokenize(re.sub(TAGGED_EXP, r"\1", opinion_segments.group(1)))
)
opinion = opinion_segments.group(2)
aspect_segments = ASPECT_EXP.match(sentence)
if aspect_segments is not None:
aspect_pos = len(
word_tokenize(re.sub(TAGGED_EXP, r"\1", aspect_segments.group(1)))
)
aspect = aspect_segments.group(2)
tokens = word_tokenize(re.sub(TAGGED_EXP, r"\1", sentence))
sentence_len = len(tokens)
sentence = " ".join(tokens)
return sentence, sentence_len, opinion_pos, opinion, aspect_pos, aspect
def to_one_hot(idx, size, value=1.0):
one_hot = np.zeros(size).astype(np.float32)
one_hot[int(float(idx))] = value
return one_hot
def flatten_json(json_content):
csv_content = {}
for k, v in json_content.items():
if not isinstance(v, dict):
csv_content[k] = v
else:
for k1, v1 in v.items():
csv_content["{}_{}".format(k, k1)] = v1
return csv_content
def dict_to_csv(json_content, path):
with open(path, "w") as f:
writer = csv.DictWriter(f, fieldnames=list(json_content.keys()))
writer.writeheader()
writer.writerow(json_content)
def dump_json(json_content, path):
with open(path, "w") as f:
json.dump(json_content, f)
def load_json(path):
with open(path, "r") as f:
return json.load(f)
def export_spare_matrix(M, path, sep="\t"):
assert len(M.shape) == 2
(d1, d2) = M.shape
with open(path, "w") as f:
f.write("{}\t{}\t{}\n".format(d1, d2, np.count_nonzero(M)))
for i in range(d1):
for j in range(d2):
if M[i, j] != 0:
f.write("{}\t{}\t{}\n".format(i, j, M[i, j]))
def export_dense_matrix(M, path):
assert len(M.shape) == 2
(d1, d2) = M.shape
with open(path, "w") as f:
f.write("Dimension: {} x {}\n".format(d1, d2))
for i in range(d1):
f.write("[{}]\n".format("\t".join([str(j) for j in M[i]])))
def load_sparse_matrix(path):
with open(path, "r") as f:
line = f.readline()
tokens = line.strip().split()
assert len(tokens) == 3
r, c, n = int(tokens[0]), int(tokens[1]), int(tokens[2])
matrix = np.zeros((r, c))
for i in range(n):
line = f.readline()
tokens = line.strip().split()
assert len(tokens) == 3
matrix[int(tokens[0])][int(tokens[1])] = float(tokens[2])
return matrix
def load_dense_matrix(path):
result = []
with open(path, "r") as f:
tokens = f.readline().split(":")[1].split("x")
assert len(tokens) == 2
r, c = int(tokens[0]), int(tokens[1])
for i in range(r):
tokens = f.readline().strip()[1:-1].split()
assert len(tokens) == c
values = [float(v) for v in tokens]
result.append(values)
return np.array(result)
def export_dense_tensor(T, path):
assert len(T.shape) == 3
(d1, d2, d3) = T.shape
with open(path, "w") as f:
f.write("Dimension: {} x {} x {}\n".format(d1, d2, d3))
for i in range(d1):
f.write(
"{}\n".format(
",".join(
["[{}]".format("\t".join([str(k) for k in j])) for j in T[i]]
)
)
)
def load_dense_tensor(path):
result = []
with open(path, "r") as f:
tokens = f.readline().split(":")[1].split("x")
assert len(tokens) == 3
d1, d2, d3 = int(tokens[0]), int(tokens[1]), int(tokens[2])
for i in range(d1):
lst = f.readline().strip().split(",")
arr = []
for j in range(d2):
values = [float(v) for v in lst[j][1:-1].split()]
arr.append(values)
result.append(arr)
return np.array(result)
def empty_file(path):
with open(path, "w") as f:
f.write("")
def frequent_score(cnt, N):
return 1 + (N - 1) * (2 / (1 + exp(-cnt)) - 1)
def sentiment_score(sentiment, N):
return 1 + (N - 1) / (1 + exp(-sentiment))
def lcs(a, b):
lengths = [[0 for j in range(len(b) + 1)] for i in range(len(a) + 1)]
# row 0 and column 0 are initialized to 0 already
for i, x in enumerate(a):
for j, y in enumerate(b):
if x == y:
lengths[i + 1][j + 1] = lengths[i][j] + 1
else:
lengths[i + 1][j + 1] = max(lengths[i + 1][j], lengths[i][j + 1])
# read the subsequence out from the matrix
result = []
x, y = len(a), len(b)
while x != 0 and y != 0:
if lengths[x][y] == lengths[x - 1][y]:
x -= 1
elif lengths[x][y] == lengths[x][y - 1]:
y -= 1
else:
assert a[x - 1] == b[y - 1]
result.append(a[x - 1])
x -= 1
y -= 1
return result[::-1]
def array2string(x):
assert len(np.shape(x)) <= 2
if len(np.shape(x)) == 1:
return ",".join([str(i) for i in x])
elif len(np.shape(x)) == 2:
return ";".join([array2string(i) for i in x])
def string2array(x):
if len(x.split(";")) > 1:
return [[j for j in i.split(",")] for i in x.split(";")]
return [i for i in x.split(",")]
def substitute_word(sentence, new_word, position):
sentence = sentence.split()
sentence[position] = new_word
return " ".join(sentence)
def convert_str_to_list(cell):
return literal_eval(cell)
| [
"re.compile",
"gzip.open",
"collections.Counter",
"numpy.array",
"nltk.stem.porter.PorterStemmer",
"ast.literal_eval",
"numpy.zeros",
"numpy.count_nonzero",
"sys.exc_info",
"json.load",
"re.sub",
"numpy.shape",
"json.dump",
"math.exp"
] | [((256, 290), 're.compile', 're.compile', (['"""(.*)<o>(.*?)</o>(.*)"""'], {}), "('(.*)<o>(.*?)</o>(.*)')\n", (266, 290), False, 'import re\n'), ((305, 339), 're.compile', 're.compile', (['"""(.*)<f>(.*?)</f>(.*)"""'], {}), "('(.*)<f>(.*?)</f>(.*)')\n", (315, 339), False, 'import re\n'), ((354, 384), 're.compile', 're.compile', (['"""<\\\\w>(.*?)</\\\\w>"""'], {}), "('<\\\\w>(.*?)</\\\\w>')\n", (364, 384), False, 'import re\n'), ((397, 419), 're.compile', 're.compile', (['"""\\\\[.*\\\\]"""'], {}), "('\\\\[.*\\\\]')\n", (407, 419), False, 'import re\n'), ((1515, 1530), 'collections.Counter', 'Counter', (['values'], {}), '(values)\n', (1522, 1530), False, 'from collections import Counter\n'), ((1730, 1739), 'collections.Counter', 'Counter', ([], {}), '()\n', (1737, 1739), False, 'from collections import Counter\n'), ((2238, 2253), 'nltk.stem.porter.PorterStemmer', 'PorterStemmer', ([], {}), '()\n', (2251, 2253), False, 'from nltk.stem.porter import PorterStemmer\n'), ((3202, 3268), 're.sub', 're.sub', (['"""(<\\\\w?>[ \\\\w]+)(</\\\\w?>)([-\\\\w]+)"""', '"""\\\\1\\\\3\\\\2"""', 'sentence'], {}), "('(<\\\\w?>[ \\\\w]+)(</\\\\w?>)([-\\\\w]+)', '\\\\1\\\\3\\\\2', sentence)\n", (3208, 3268), False, 'import re\n'), ((6437, 6453), 'numpy.array', 'np.array', (['result'], {}), '(result)\n', (6445, 6453), True, 'import numpy as np\n'), ((9018, 9036), 'ast.literal_eval', 'literal_eval', (['cell'], {}), '(cell)\n', (9030, 9036), False, 'from ast import literal_eval\n'), ((455, 476), 'gzip.open', 'gzip.open', (['path', '"""rt"""'], {}), "(path, 'rt')\n", (464, 476), False, 'import gzip\n'), ((3894, 3929), 're.sub', 're.sub', (['TAGGED_EXP', '"""\\\\1"""', 'sentence'], {}), "(TAGGED_EXP, '\\\\1', sentence)\n", (3900, 3929), False, 'import re\n'), ((4790, 4816), 'json.dump', 'json.dump', (['json_content', 'f'], {}), '(json_content, f)\n', (4799, 4816), False, 'import json\n'), ((4886, 4898), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4895, 4898), False, 'import json\n'), ((5772, 5788), 'numpy.zeros', 'np.zeros', (['(r, c)'], {}), '((r, c))\n', (5780, 5788), True, 'import numpy as np\n'), ((7402, 7418), 'numpy.array', 'np.array', (['result'], {}), '(result)\n', (7410, 7418), True, 'import numpy as np\n'), ((4124, 4138), 'numpy.zeros', 'np.zeros', (['size'], {}), '(size)\n', (4132, 4138), True, 'import numpy as np\n'), ((8491, 8502), 'numpy.shape', 'np.shape', (['x'], {}), '(x)\n', (8499, 8502), True, 'import numpy as np\n'), ((8520, 8531), 'numpy.shape', 'np.shape', (['x'], {}), '(x)\n', (8528, 8531), True, 'import numpy as np\n'), ((5074, 5093), 'numpy.count_nonzero', 'np.count_nonzero', (['M'], {}), '(M)\n', (5090, 5093), True, 'import numpy as np\n'), ((7642, 7657), 'math.exp', 'exp', (['(-sentiment)'], {}), '(-sentiment)\n', (7645, 7657), False, 'from math import exp\n'), ((8597, 8608), 'numpy.shape', 'np.shape', (['x'], {}), '(x)\n', (8605, 8608), True, 'import numpy as np\n'), ((3279, 3314), 're.sub', 're.sub', (['"""\\\\(\\\\d+\\\\)$"""', '""""""', 'sentence'], {}), "('\\\\(\\\\d+\\\\)$', '', sentence)\n", (3285, 3314), False, 'import re\n'), ((1439, 1453), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1451, 1453), False, 'import sys\n'), ((2063, 2077), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (2075, 2077), False, 'import sys\n'), ((7559, 7568), 'math.exp', 'exp', (['(-cnt)'], {}), '(-cnt)\n', (7562, 7568), False, 'from math import exp\n')] |
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 20 15:39:17 2018
@author: Stephanie
"""
"""
brief : Remote Call Procedure / Server procedure
Listen to the requests dispatched and send the response back on the specified queue
args :
Return :
Raises :
"""
import pika
import os
import amqp
import msgpack
import msgpack_numpy as m
import sys
###establishing connection and declaring the queue
amqp_url = amqp.key
#print(amqp_url);
url = os.environ.get('CLOUDAMQP_URL',amqp_url)
params = pika.URLParameters(url)
params.socket_timeout = 15
connection = pika.BlockingConnection(params)
channel= connection.channel()
channel.queue_declare(queue='rpc_queue')
#send the response back
def on_request(ch,method,props,body):
print("Encoded payload size:",sys.getsizeof(body))
decoded_message = str(msgpack.unpackb(body, object_hook=m.decode))
print("Decoded payload size:",sys.getsizeof(decoded_message))
print('Request is ', decoded_message);
response ='Fine and you ?'
ch.basic_publish(exchange='',
routing_key=props.reply_to,
body=str(response),
properties=pika.BasicProperties(correlation_id = props.correlation_id)
)
ch.basic_ack(delivery_tag=method.delivery_tag)
#Run
channel.basic_qos(prefetch_count=1)#load balancing if multiple servers
channel.basic_consume(on_request,queue='rpc_queue') #executed when a request is received
print("Waiting for message. To exit press CTRL+C")
channel.start_consuming() ## sorte de boucle infinie | [
"sys.getsizeof",
"pika.URLParameters",
"pika.BlockingConnection",
"os.environ.get",
"msgpack.unpackb",
"pika.BasicProperties"
] | [((455, 496), 'os.environ.get', 'os.environ.get', (['"""CLOUDAMQP_URL"""', 'amqp_url'], {}), "('CLOUDAMQP_URL', amqp_url)\n", (469, 496), False, 'import os\n'), ((505, 528), 'pika.URLParameters', 'pika.URLParameters', (['url'], {}), '(url)\n', (523, 528), False, 'import pika\n'), ((569, 600), 'pika.BlockingConnection', 'pika.BlockingConnection', (['params'], {}), '(params)\n', (592, 600), False, 'import pika\n'), ((769, 788), 'sys.getsizeof', 'sys.getsizeof', (['body'], {}), '(body)\n', (782, 788), False, 'import sys\n'), ((816, 859), 'msgpack.unpackb', 'msgpack.unpackb', (['body'], {'object_hook': 'm.decode'}), '(body, object_hook=m.decode)\n', (831, 859), False, 'import msgpack\n'), ((895, 925), 'sys.getsizeof', 'sys.getsizeof', (['decoded_message'], {}), '(decoded_message)\n', (908, 925), False, 'import sys\n'), ((1163, 1220), 'pika.BasicProperties', 'pika.BasicProperties', ([], {'correlation_id': 'props.correlation_id'}), '(correlation_id=props.correlation_id)\n', (1183, 1220), False, 'import pika\n')] |
# Created by Kelvin_Clark on 2/1/2022, 2:16 PM
from app.utils.security.jwt import create_access_token, get_token_data
def test_create_token():
data = {"payload": "User data"}
token = create_access_token(data=data)
assert token is not None
def test_verify_token():
data = {"payload": "User data"}
token = create_access_token(data=data)
token_data = get_token_data(token=token)
assert token_data is not None
assert token_data.keys().__contains__("payload")
assert token_data["payload"] == data["payload"]
def test_tempered_token():
data = {"payload": "User data"}
token = create_access_token(data=data)
token = token[:len(token) - 5]
token = token.upper()
token += "<PASSWORD>"
token_data = get_token_data(token)
assert token_data is False
| [
"app.utils.security.jwt.create_access_token",
"app.utils.security.jwt.get_token_data"
] | [((193, 223), 'app.utils.security.jwt.create_access_token', 'create_access_token', ([], {'data': 'data'}), '(data=data)\n', (212, 223), False, 'from app.utils.security.jwt import create_access_token, get_token_data\n'), ((328, 358), 'app.utils.security.jwt.create_access_token', 'create_access_token', ([], {'data': 'data'}), '(data=data)\n', (347, 358), False, 'from app.utils.security.jwt import create_access_token, get_token_data\n'), ((376, 403), 'app.utils.security.jwt.get_token_data', 'get_token_data', ([], {'token': 'token'}), '(token=token)\n', (390, 403), False, 'from app.utils.security.jwt import create_access_token, get_token_data\n'), ((620, 650), 'app.utils.security.jwt.create_access_token', 'create_access_token', ([], {'data': 'data'}), '(data=data)\n', (639, 650), False, 'from app.utils.security.jwt import create_access_token, get_token_data\n'), ((755, 776), 'app.utils.security.jwt.get_token_data', 'get_token_data', (['token'], {}), '(token)\n', (769, 776), False, 'from app.utils.security.jwt import create_access_token, get_token_data\n')] |
import boto3
from botocore.config import Config
def s3_client():
s3 = boto3.client('s3',
aws_access_key_id="secretless",
region_name="us-east-1",
aws_secret_access_key="secretless",
endpoint_url="http://secretless.empty",
config=Config(proxies={'http': 'http://localhost:8099'}))
return s3
def list_buckets():
bck_rep = s3_client().list_buckets()
for buck in bck_rep["Buckets"]:
print(buck)
def create_bucket():
return s3_client().create_bucket(
Bucket="polo202214"
)
if __name__ == '__main__':
# create_bucket()
print(list_buckets())
| [
"botocore.config.Config"
] | [((345, 394), 'botocore.config.Config', 'Config', ([], {'proxies': "{'http': 'http://localhost:8099'}"}), "(proxies={'http': 'http://localhost:8099'})\n", (351, 394), False, 'from botocore.config import Config\n')] |
from hashlib import md5
from project.src.core.entities.address import Address
from project.src.core.entities.contacts import ContactParameters, Contact
from project.src.core.entities.email import Email
from project.src.core.entities.name import Name
from project.src.core.entities.phones import Phone
def _create_id(contact: ContactParameters) -> str:
_id = (
f"{contact.firstName + contact.lastName}:"
f"{contact.email}:"
f"{contact.phoneList[0].number}"
)
return md5(_id.encode()).hexdigest()
def transform_parameters_to_contact(contact_parameters: ContactParameters) -> Contact:
contact = Contact(
contactId=_create_id(contact_parameters),
name=Name(
lastName=contact_parameters.lastName,
firstName=contact_parameters.firstName,
),
email=Email(
email=contact_parameters.email
),
phoneList=[Phone(
type=phone.type,
number=phone.number
) for phone in contact_parameters.phoneList],
address=Address(
full_address=contact_parameters.address
)
)
return contact
| [
"project.src.core.entities.name.Name",
"project.src.core.entities.address.Address",
"project.src.core.entities.email.Email",
"project.src.core.entities.phones.Phone"
] | [((709, 796), 'project.src.core.entities.name.Name', 'Name', ([], {'lastName': 'contact_parameters.lastName', 'firstName': 'contact_parameters.firstName'}), '(lastName=contact_parameters.lastName, firstName=contact_parameters.\n firstName)\n', (713, 796), False, 'from project.src.core.entities.name import Name\n'), ((842, 879), 'project.src.core.entities.email.Email', 'Email', ([], {'email': 'contact_parameters.email'}), '(email=contact_parameters.email)\n', (847, 879), False, 'from project.src.core.entities.email import Email\n'), ((1060, 1108), 'project.src.core.entities.address.Address', 'Address', ([], {'full_address': 'contact_parameters.address'}), '(full_address=contact_parameters.address)\n', (1067, 1108), False, 'from project.src.core.entities.address import Address\n'), ((922, 965), 'project.src.core.entities.phones.Phone', 'Phone', ([], {'type': 'phone.type', 'number': 'phone.number'}), '(type=phone.type, number=phone.number)\n', (927, 965), False, 'from project.src.core.entities.phones import Phone\n')] |
#! /bin/python3
"""
import_csv_data.py will structure the data from a CBORD CSV dump
data output will be stored into memory, and then uploaded into MongoDB
Created by: <NAME> <EMAIL>
Last Edited: May 10, 2017
"""
import os
from datetime import datetime as dt
from pprint import pprint
from pymongo import MongoClient
filepath = "/home/kiosk/Downloads/fulldata_05021517.csv"
# figure out when the file was modified (i.e. how old the data is)
editTime = dt.fromtimestamp(os.path.getmtime(filepath))
print("Edited time:", editTime)
data = {}
def generate_empty_user(userID):
user = {
"lastName" : "",
"mealPlan" : {
"max" : 0,
"isWeekly" : False,
"count" : 0,
'planName' : "None"
},
"bonusBucks" : 0,
"uName" : "",
"firstName" : "",
"uID" : userID,
"updated" : dt.now(),
"isLiveData" : False,
"name" : ","
}
return user
def find_max_meals(meal_plan_name):
"""
scan through a name of a plan to figure out how many meals it gets
basically, find the longest series of characters that can be read as an int
note that CD-100 gets read as 100, not -100
"""
maxx = 0
idx = 0
searching = True
while searching:
try:
# this will generate a ValueError if can't read current char as int
maxx = int(meal_plan_name[idx])
# if executing here, then the current base position is a valid int
length = 1
while True:
# keep trying for longer and longer numbers
length += 1
try:
tmp = int(meal_plan_name[idx:idx+length])
# best to make a temp variable, else it messes up assignment
maxx = tmp
except ValueError:
# have gone past the last valid number character
return maxx
except IndexError:
# must have run off the end of the line
# unexpected, but still have a valid number, so return it
return maxx
except ValueError:
# current character isn't valid as an int, so try the next one
idx += 1
except IndexError as ie:
# somehow didn't find anything that could be read as a number
# SHOULD NOT get to this point though
print(ie)
maxx = "?"
# shouldn't ever reach this line, but why not include it
return maxx
# open the output CSV file and read it line by line
with open(filepath, 'r') as f:
for line in f:
# breakup the CSV by field after removing any whitespace on ends
parts = line.strip().split(',')
# shouldn't have fewer than 7 parts, but will cause a headache if you do
if len(parts) != 7:
print("Skipping line", parts)
continue
# concurrent assignment. Note that each variable is a String right now
uID, email, planName, dataType, count, firstName, lastName = [x.strip() for x in parts]
while len(uID) < 7:
# CBORD database output trims leading 0's
# need to have 7 digit ID's
uID = "0" + uID
if uID not in data:
data[uID] = generate_empty_user(uID)
# extract username from email address
data[uID]['uName'] = email.split('@')[0]
data[uID]['lastName'] = lastName
data[uID]['firstName'] = firstName
# kinda redundant, but our database schema calls for it
data[uID]['name'] = lastName + ',' + firstName
if dataType == 'Board':
# describing meal plan, not bonus bucks
if 'Default' in planName:
# data[uID]['mealPlan'] = {
# 'planName' : planName
# 'count' : 0,
# 'isWeekly' : False,
# "max" : 0
# }
print("No meal plan for", name)
else:
data[uID]['mealPlan'] = {
'count' : int(count),
'isWeekly' : "week" in planName or "Any" in planName,
'max' : find_max_meals(planName),
'planName' : planName
}
# done processing Board
# if not Board, probably Bonus Bucks
elif dataType == 'Bonus Bucks':
data[uID]['bonusBucks'] = float(count)
# editTime is when the file was most recently updated
data[uID]['updated'] = editTime
# currently set to false because not getting live data :'(
data[uID]['isLiveData'] = False
# pprint(data)
print("Finished reading values, connecting to mongoDB now")
# some connection to the database
client = MongoClient('mongodb://localhost:27017/')
# kiosk database, records collection
coll = client['kiosk']['records']
#bulk operations go faster
bulk = coll.initialize_unordered_bulk_op()
for uID, record in data.items():
# will replace a document with the same uID
# note that this assumes that all data on user that we want is in the dump
# if a doc only has BonusBucks data, it'll set meals to 0 for the users in the doc
# this would probably be good to change to $set instead of replace_one
bulk.find({'uID' : uID}).upsert().replace_one(record)
result = bulk.execute()
try:
# upserted is a huge long list of documents and ObjectIds, better to delete it
result.pop('upserted')
except:
# didn't trim out the long list of inserted/upserted documents
pass
pprint(result)
| [
"pymongo.MongoClient",
"datetime.datetime.now",
"os.path.getmtime",
"pprint.pprint"
] | [((4750, 4791), 'pymongo.MongoClient', 'MongoClient', (['"""mongodb://localhost:27017/"""'], {}), "('mongodb://localhost:27017/')\n", (4761, 4791), False, 'from pymongo import MongoClient\n'), ((5538, 5552), 'pprint.pprint', 'pprint', (['result'], {}), '(result)\n', (5544, 5552), False, 'from pprint import pprint\n'), ((469, 495), 'os.path.getmtime', 'os.path.getmtime', (['filepath'], {}), '(filepath)\n', (485, 495), False, 'import os\n'), ((828, 836), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (834, 836), True, 'from datetime import datetime as dt\n')] |
# Copyright (c) 2012-2020 Jicamarca Radio Observatory
# All rights reserved.
#
# Distributed under the terms of the BSD 3-clause license.
"""Base class to create plot operations
"""
import os
import sys
import zmq
import time
import numpy
import datetime
from collections import deque
from functools import wraps
from threading import Thread
import matplotlib
if 'BACKEND' in os.environ:
matplotlib.use(os.environ['BACKEND'])
elif 'linux' in sys.platform:
matplotlib.use("TkAgg")
elif 'darwin' in sys.platform:
matplotlib.use('MacOSX')
else:
from schainpy.utils import log
log.warning('Using default Backend="Agg"', 'INFO')
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.patches import Polygon
from mpl_toolkits.axes_grid1 import make_axes_locatable
from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator
from schainpy.model.data.jrodata import PlotterData
from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
from schainpy.utils import log
jet_values = matplotlib.pyplot.get_cmap('jet', 100)(numpy.arange(100))[10:90]
blu_values = matplotlib.pyplot.get_cmap(
'seismic_r', 20)(numpy.arange(20))[10:15]
ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
'jro', numpy.vstack((blu_values, jet_values)))
matplotlib.pyplot.register_cmap(cmap=ncmap)
CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'viridis',
'plasma', 'inferno', 'Greys', 'seismic', 'bwr', 'coolwarm')]
EARTH_RADIUS = 6.3710e3
def ll2xy(lat1, lon1, lat2, lon2):
p = 0.017453292519943295
a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
r = 12742 * numpy.arcsin(numpy.sqrt(a))
theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
* numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
theta = -theta + numpy.pi/2
return r*numpy.cos(theta), r*numpy.sin(theta)
def km2deg(km):
'''
Convert distance in km to degrees
'''
return numpy.rad2deg(km/EARTH_RADIUS)
def figpause(interval):
backend = plt.rcParams['backend']
if backend in matplotlib.rcsetup.interactive_bk:
figManager = matplotlib._pylab_helpers.Gcf.get_active()
if figManager is not None:
canvas = figManager.canvas
if canvas.figure.stale:
canvas.draw()
try:
canvas.start_event_loop(interval)
except:
pass
return
def popup(message):
'''
'''
fig = plt.figure(figsize=(12, 8), facecolor='r')
text = '\n'.join([s.strip() for s in message.split(':')])
fig.text(0.01, 0.5, text, ha='left', va='center',
size='20', weight='heavy', color='w')
fig.show()
figpause(1000)
class Throttle(object):
'''
Decorator that prevents a function from being called more than once every
time period.
To create a function that cannot be called more than once a minute, but
will sleep until it can be called:
@Throttle(minutes=1)
def foo():
pass
for i in range(10):
foo()
print "This function has run %s times." % i
'''
def __init__(self, seconds=0, minutes=0, hours=0):
self.throttle_period = datetime.timedelta(
seconds=seconds, minutes=minutes, hours=hours
)
self.time_of_last_call = datetime.datetime.min
def __call__(self, fn):
@wraps(fn)
def wrapper(*args, **kwargs):
coerce = kwargs.pop('coerce', None)
if coerce:
self.time_of_last_call = datetime.datetime.now()
return fn(*args, **kwargs)
else:
now = datetime.datetime.now()
time_since_last_call = now - self.time_of_last_call
time_left = self.throttle_period - time_since_last_call
if time_left > datetime.timedelta(seconds=0):
return
self.time_of_last_call = datetime.datetime.now()
return fn(*args, **kwargs)
return wrapper
def apply_throttle(value):
@Throttle(seconds=value)
def fnThrottled(fn):
fn()
return fnThrottled
@MPDecorator
class Plot(Operation):
"""Base class for Schain plotting operations
This class should never be use directtly you must subclass a new operation,
children classes must be defined as follow:
ExamplePlot(Plot):
CODE = 'code'
colormap = 'jet'
plot_type = 'pcolor' # options are ('pcolor', 'pcolorbuffer', 'scatter', 'scatterbuffer')
def setup(self):
pass
def plot(self):
pass
"""
CODE = 'Figure'
colormap = 'jet'
bgcolor = 'white'
buffering = True
__missing = 1E30
__attrs__ = ['show', 'save', 'ymin', 'ymax', 'zmin', 'zmax', 'title',
'showprofile']
def __init__(self):
Operation.__init__(self)
self.isConfig = False
self.isPlotConfig = False
self.save_time = 0
self.sender_time = 0
self.data = None
self.firsttime = True
self.sender_queue = deque(maxlen=10)
self.plots_adjust = {'left': 0.125, 'right': 0.9, 'bottom': 0.15, 'top': 0.9, 'wspace': 0.2, 'hspace': 0.2}
def __fmtTime(self, x, pos):
'''
'''
return '{}'.format(self.getDateTime(x).strftime('%H:%M'))
def __setup(self, **kwargs):
'''
Initialize variables
'''
self.figures = []
self.axes = []
self.cb_axes = []
self.localtime = kwargs.pop('localtime', True)
self.show = kwargs.get('show', True)
self.save = kwargs.get('save', False)
self.save_period = kwargs.get('save_period', 0)
self.colormap = kwargs.get('colormap', self.colormap)
self.colormap_coh = kwargs.get('colormap_coh', 'jet')
self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
self.colormaps = kwargs.get('colormaps', None)
self.bgcolor = kwargs.get('bgcolor', self.bgcolor)
self.showprofile = kwargs.get('showprofile', False)
self.title = kwargs.get('wintitle', self.CODE.upper())
self.cb_label = kwargs.get('cb_label', None)
self.cb_labels = kwargs.get('cb_labels', None)
self.labels = kwargs.get('labels', None)
self.xaxis = kwargs.get('xaxis', 'frequency')
self.zmin = kwargs.get('zmin', None)
self.zmax = kwargs.get('zmax', None)
self.zlimits = kwargs.get('zlimits', None)
self.xmin = kwargs.get('xmin', None)
self.xmax = kwargs.get('xmax', None)
self.xrange = kwargs.get('xrange', 12)
self.xscale = kwargs.get('xscale', None)
self.ymin = kwargs.get('ymin', None)
self.ymax = kwargs.get('ymax', None)
self.yscale = kwargs.get('yscale', None)
self.xlabel = kwargs.get('xlabel', None)
self.attr_time = kwargs.get('attr_time', 'utctime')
self.attr_data = kwargs.get('attr_data', 'data_param')
self.decimation = kwargs.get('decimation', None)
self.showSNR = kwargs.get('showSNR', False)
self.oneFigure = kwargs.get('oneFigure', True)
self.width = kwargs.get('width', None)
self.height = kwargs.get('height', None)
self.colorbar = kwargs.get('colorbar', True)
self.factors = kwargs.get('factors', [1, 1, 1, 1, 1, 1, 1, 1])
self.channels = kwargs.get('channels', None)
self.titles = kwargs.get('titles', [])
self.polar = False
self.type = kwargs.get('type', 'iq')
self.grid = kwargs.get('grid', False)
self.pause = kwargs.get('pause', False)
self.save_code = kwargs.get('save_code', self.CODE)
self.throttle = kwargs.get('throttle', 0)
self.exp_code = kwargs.get('exp_code', None)
self.server = kwargs.get('server', False)
self.sender_period = kwargs.get('sender_period', 60)
self.tag = kwargs.get('tag', '')
self.height_index = kwargs.get('height_index', None)
self.__throttle_plot = apply_throttle(self.throttle)
code = self.attr_data if self.attr_data else self.CODE
self.data = PlotterData(self.CODE, self.exp_code, self.localtime)
if self.server:
if not self.server.startswith('tcp://'):
self.server = 'tcp://{}'.format(self.server)
log.success(
'Sending to server: {}'.format(self.server),
self.name
)
def __setup_plot(self):
'''
Common setup for all figures, here figures and axes are created
'''
self.setup()
self.time_label = 'LT' if self.localtime else 'UTC'
if self.width is None:
self.width = 8
self.figures = []
self.axes = []
self.cb_axes = []
self.pf_axes = []
self.cmaps = []
size = '15%' if self.ncols == 1 else '30%'
pad = '4%' if self.ncols == 1 else '8%'
if self.oneFigure:
if self.height is None:
self.height = 1.4 * self.nrows + 1
fig = plt.figure(figsize=(self.width, self.height),
edgecolor='k',
facecolor='w')
self.figures.append(fig)
for n in range(self.nplots):
ax = fig.add_subplot(self.nrows, self.ncols,
n + 1, polar=self.polar)
ax.tick_params(labelsize=8)
ax.firsttime = True
ax.index = 0
ax.press = None
self.axes.append(ax)
if self.showprofile:
cax = self.__add_axes(ax, size=size, pad=pad)
cax.tick_params(labelsize=8)
self.pf_axes.append(cax)
else:
if self.height is None:
self.height = 3
for n in range(self.nplots):
fig = plt.figure(figsize=(self.width, self.height),
edgecolor='k',
facecolor='w')
ax = fig.add_subplot(1, 1, 1, polar=self.polar)
ax.tick_params(labelsize=8)
ax.firsttime = True
ax.index = 0
ax.press = None
self.figures.append(fig)
self.axes.append(ax)
if self.showprofile:
cax = self.__add_axes(ax, size=size, pad=pad)
cax.tick_params(labelsize=8)
self.pf_axes.append(cax)
for n in range(self.nrows):
if self.colormaps is not None:
cmap = plt.get_cmap(self.colormaps[n])
else:
cmap = plt.get_cmap(self.colormap)
cmap.set_bad(self.bgcolor, 1.)
self.cmaps.append(cmap)
def __add_axes(self, ax, size='30%', pad='8%'):
'''
Add new axes to the given figure
'''
divider = make_axes_locatable(ax)
nax = divider.new_horizontal(size=size, pad=pad)
ax.figure.add_axes(nax)
return nax
def fill_gaps(self, x_buffer, y_buffer, z_buffer):
'''
Create a masked array for missing data
'''
if x_buffer.shape[0] < 2:
return x_buffer, y_buffer, z_buffer
deltas = x_buffer[1:] - x_buffer[0:-1]
x_median = numpy.median(deltas)
index = numpy.where(deltas > 5 * x_median)
if len(index[0]) != 0:
z_buffer[::, index[0], ::] = self.__missing
z_buffer = numpy.ma.masked_inside(z_buffer,
0.99 * self.__missing,
1.01 * self.__missing)
return x_buffer, y_buffer, z_buffer
def decimate(self):
# dx = int(len(self.x)/self.__MAXNUMX) + 1
dy = int(len(self.y) / self.decimation) + 1
# x = self.x[::dx]
x = self.x
y = self.y[::dy]
z = self.z[::, ::, ::dy]
return x, y, z
def format(self):
'''
Set min and max values, labels, ticks and titles
'''
for n, ax in enumerate(self.axes):
if ax.firsttime:
if self.xaxis != 'time':
xmin = self.xmin
xmax = self.xmax
else:
xmin = self.tmin
xmax = self.tmin + self.xrange*60*60
ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
ax.xaxis.set_major_locator(LinearLocator(9))
ymin = self.ymin if self.ymin is not None else numpy.nanmin(self.y[numpy.isfinite(self.y)])
ymax = self.ymax if self.ymax is not None else numpy.nanmax(self.y[numpy.isfinite(self.y)])
ax.set_facecolor(self.bgcolor)
if self.xscale:
ax.xaxis.set_major_formatter(FuncFormatter(
lambda x, pos: '{0:g}'.format(x*self.xscale)))
if self.yscale:
ax.yaxis.set_major_formatter(FuncFormatter(
lambda x, pos: '{0:g}'.format(x*self.yscale)))
if self.xlabel is not None:
ax.set_xlabel(self.xlabel)
if self.ylabel is not None:
ax.set_ylabel(self.ylabel)
if self.showprofile:
self.pf_axes[n].set_ylim(ymin, ymax)
self.pf_axes[n].set_xlim(self.zmin, self.zmax)
self.pf_axes[n].set_xlabel('dB')
self.pf_axes[n].grid(b=True, axis='x')
[tick.set_visible(False)
for tick in self.pf_axes[n].get_yticklabels()]
if self.colorbar:
ax.cbar = plt.colorbar(
ax.plt, ax=ax, fraction=0.05, pad=0.02, aspect=10)
ax.cbar.ax.tick_params(labelsize=8)
ax.cbar.ax.press = None
if self.cb_label:
ax.cbar.set_label(self.cb_label, size=8)
elif self.cb_labels:
ax.cbar.set_label(self.cb_labels[n], size=8)
else:
ax.cbar = None
ax.set_xlim(xmin, xmax)
ax.set_ylim(ymin, ymax)
ax.firsttime = False
if self.grid:
ax.grid(True)
if not self.polar:
ax.set_title('{} {} {}'.format(
self.titles[n],
self.getDateTime(self.data.max_time).strftime(
'%Y-%m-%d %H:%M:%S'),
self.time_label),
size=8)
else:
ax.set_title('{}'.format(self.titles[n]), size=8)
ax.set_ylim(0, 90)
ax.set_yticks(numpy.arange(0, 90, 20))
ax.yaxis.labelpad = 40
if self.firsttime:
for n, fig in enumerate(self.figures):
fig.subplots_adjust(**self.plots_adjust)
self.firsttime = False
def clear_figures(self):
'''
Reset axes for redraw plots
'''
for ax in self.axes+self.pf_axes+self.cb_axes:
ax.clear()
ax.firsttime = True
if hasattr(ax, 'cbar') and ax.cbar:
ax.cbar.remove()
def __plot(self):
'''
Main function to plot, format and save figures
'''
self.plot()
self.format()
for n, fig in enumerate(self.figures):
if self.nrows == 0 or self.nplots == 0:
log.warning('No data', self.name)
fig.text(0.5, 0.5, 'No Data', fontsize='large', ha='center')
fig.canvas.manager.set_window_title(self.CODE)
continue
fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
self.getDateTime(self.data.max_time).strftime('%Y/%m/%d')))
fig.canvas.draw()
if self.show:
fig.show()
figpause(0.01)
if self.save:
self.save_figure(n)
if self.server:
self.send_to_server()
def __update(self, dataOut, timestamp):
'''
'''
metadata = {
'yrange': dataOut.heightList,
'interval': dataOut.timeInterval,
'channels': dataOut.channelList
}
data, meta = self.update(dataOut)
metadata.update(meta)
self.data.update(data, timestamp, metadata)
def save_figure(self, n):
'''
'''
if (self.data.max_time - self.save_time) <= self.save_period:
return
self.save_time = self.data.max_time
fig = self.figures[n]
figname = os.path.join(
self.save,
self.save_code,
'{}_{}.png'.format(
self.save_code,
self.getDateTime(self.data.max_time).strftime(
'%Y%m%d_%H%M%S'
),
)
)
log.log('Saving figure: {}'.format(figname), self.name)
if not os.path.isdir(os.path.dirname(figname)):
os.makedirs(os.path.dirname(figname))
fig.savefig(figname)
if self.throttle == 0:
figname = os.path.join(
self.save,
'{}_{}.png'.format(
self.save_code,
self.getDateTime(self.data.min_time).strftime(
'%Y%m%d'
),
)
)
fig.savefig(figname)
def send_to_server(self):
'''
'''
if self.exp_code == None:
log.warning('Missing `exp_code` skipping sending to server...')
last_time = self.data.max_time
interval = last_time - self.sender_time
if interval < self.sender_period:
return
self.sender_time = last_time
attrs = ['titles', 'zmin', 'zmax', 'tag', 'ymin', 'ymax']
for attr in attrs:
value = getattr(self, attr)
if value:
if isinstance(value, (numpy.float32, numpy.float64)):
value = round(float(value), 2)
self.data.meta[attr] = value
if self.colormap == 'jet':
self.data.meta['colormap'] = 'Jet'
elif 'RdBu' in self.colormap:
self.data.meta['colormap'] = 'RdBu'
else:
self.data.meta['colormap'] = 'Viridis'
self.data.meta['interval'] = int(interval)
self.sender_queue.append(last_time)
while True:
try:
tm = self.sender_queue.popleft()
except IndexError:
break
msg = self.data.jsonify(tm, self.save_code, self.plot_type)
self.socket.send_string(msg)
socks = dict(self.poll.poll(2000))
if socks.get(self.socket) == zmq.POLLIN:
reply = self.socket.recv_string()
if reply == 'ok':
log.log("Response from server ok", self.name)
time.sleep(0.1)
continue
else:
log.warning(
"Malformed reply from server: {}".format(reply), self.name)
else:
log.warning(
"No response from server, retrying...", self.name)
self.sender_queue.appendleft(tm)
self.socket.setsockopt(zmq.LINGER, 0)
self.socket.close()
self.poll.unregister(self.socket)
self.socket = self.context.socket(zmq.REQ)
self.socket.connect(self.server)
self.poll.register(self.socket, zmq.POLLIN)
break
def setup(self):
'''
This method should be implemented in the child class, the following
attributes should be set:
self.nrows: number of rows
self.ncols: number of cols
self.nplots: number of plots (channels or pairs)
self.ylabel: label for Y axes
self.titles: list of axes title
'''
raise NotImplementedError
def plot(self):
'''
Must be defined in the child class, the actual plotting method
'''
raise NotImplementedError
def update(self, dataOut):
'''
Must be defined in the child class, update self.data with new data
'''
data = {
self.CODE: getattr(dataOut, 'data_{}'.format(self.CODE))
}
meta = {}
return data, meta
def run(self, dataOut, **kwargs):
'''
Main plotting routine
'''
if self.isConfig is False:
self.__setup(**kwargs)
if self.localtime:
self.getDateTime = datetime.datetime.fromtimestamp
else:
self.getDateTime = datetime.datetime.utcfromtimestamp
self.data.setup()
self.isConfig = True
if self.server:
self.context = zmq.Context()
self.socket = self.context.socket(zmq.REQ)
self.socket.connect(self.server)
self.poll = zmq.Poller()
self.poll.register(self.socket, zmq.POLLIN)
tm = getattr(dataOut, self.attr_time)
if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange*60*60:
self.save_time = tm
self.__plot()
self.tmin += self.xrange*60*60
self.data.setup()
self.clear_figures()
self.__update(dataOut, tm)
if self.isPlotConfig is False:
self.__setup_plot()
self.isPlotConfig = True
if self.xaxis == 'time':
dt = self.getDateTime(tm)
if self.xmin is None:
self.tmin = tm
self.xmin = dt.hour
minutes = (self.xmin-int(self.xmin)) * 60
seconds = (minutes - int(minutes)) * 60
self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
datetime.datetime(1970, 1, 1)).total_seconds()
if self.localtime:
self.tmin += time.timezone
if self.xmin is not None and self.xmax is not None:
self.xrange = self.xmax - self.xmin
if self.throttle == 0:
self.__plot()
else:
self.__throttle_plot(self.__plot)#, coerce=coerce)
def close(self):
if self.data and not self.data.flagNoData:
self.save_time = self.data.max_time
self.__plot()
if self.data and not self.data.flagNoData and self.pause:
figpause(10)
| [
"numpy.sqrt",
"time.sleep",
"zmq.Poller",
"numpy.isfinite",
"numpy.sin",
"datetime.timedelta",
"numpy.arange",
"datetime.datetime",
"collections.deque",
"matplotlib.ticker.FuncFormatter",
"numpy.where",
"functools.wraps",
"matplotlib._pylab_helpers.Gcf.get_active",
"numpy.vstack",
"mpl_t... | [((1323, 1366), 'matplotlib.pyplot.register_cmap', 'matplotlib.pyplot.register_cmap', ([], {'cmap': 'ncmap'}), '(cmap=ncmap)\n', (1354, 1366), False, 'import matplotlib\n'), ((395, 432), 'matplotlib.use', 'matplotlib.use', (["os.environ['BACKEND']"], {}), "(os.environ['BACKEND'])\n", (409, 432), False, 'import matplotlib\n'), ((1283, 1321), 'numpy.vstack', 'numpy.vstack', (['(blu_values, jet_values)'], {}), '((blu_values, jet_values))\n', (1295, 1321), False, 'import numpy\n'), ((1377, 1392), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['s'], {}), '(s)\n', (1389, 1392), True, 'import matplotlib.pyplot as plt\n'), ((2161, 2193), 'numpy.rad2deg', 'numpy.rad2deg', (['(km / EARTH_RADIUS)'], {}), '(km / EARTH_RADIUS)\n', (2174, 2193), False, 'import numpy\n'), ((2688, 2730), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 8)', 'facecolor': '"""r"""'}), "(figsize=(12, 8), facecolor='r')\n", (2698, 2730), True, 'import matplotlib.pyplot as plt\n'), ((467, 490), 'matplotlib.use', 'matplotlib.use', (['"""TkAgg"""'], {}), "('TkAgg')\n", (481, 490), False, 'import matplotlib\n'), ((1059, 1097), 'matplotlib.pyplot.get_cmap', 'matplotlib.pyplot.get_cmap', (['"""jet"""', '(100)'], {}), "('jet', 100)\n", (1085, 1097), False, 'import matplotlib\n'), ((1098, 1115), 'numpy.arange', 'numpy.arange', (['(100)'], {}), '(100)\n', (1110, 1115), False, 'import numpy\n'), ((1137, 1180), 'matplotlib.pyplot.get_cmap', 'matplotlib.pyplot.get_cmap', (['"""seismic_r"""', '(20)'], {}), "('seismic_r', 20)\n", (1163, 1180), False, 'import matplotlib\n'), ((1186, 1202), 'numpy.arange', 'numpy.arange', (['(20)'], {}), '(20)\n', (1198, 1202), False, 'import numpy\n'), ((2330, 2372), 'matplotlib._pylab_helpers.Gcf.get_active', 'matplotlib._pylab_helpers.Gcf.get_active', ([], {}), '()\n', (2370, 2372), False, 'import matplotlib\n'), ((3409, 3474), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'seconds', 'minutes': 'minutes', 'hours': 'hours'}), '(seconds=seconds, minutes=minutes, hours=hours)\n', (3427, 3474), False, 'import datetime\n'), ((3591, 3600), 'functools.wraps', 'wraps', (['fn'], {}), '(fn)\n', (3596, 3600), False, 'from functools import wraps\n'), ((5084, 5108), 'schainpy.model.proc.jroproc_base.Operation.__init__', 'Operation.__init__', (['self'], {}), '(self)\n', (5102, 5108), False, 'from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator\n'), ((5312, 5328), 'collections.deque', 'deque', ([], {'maxlen': '(10)'}), '(maxlen=10)\n', (5317, 5328), False, 'from collections import deque\n'), ((8384, 8437), 'schainpy.model.data.jrodata.PlotterData', 'PlotterData', (['self.CODE', 'self.exp_code', 'self.localtime'], {}), '(self.CODE, self.exp_code, self.localtime)\n', (8395, 8437), False, 'from schainpy.model.data.jrodata import PlotterData\n'), ((11241, 11264), 'mpl_toolkits.axes_grid1.make_axes_locatable', 'make_axes_locatable', (['ax'], {}), '(ax)\n', (11260, 11264), False, 'from mpl_toolkits.axes_grid1 import make_axes_locatable\n'), ((11649, 11669), 'numpy.median', 'numpy.median', (['deltas'], {}), '(deltas)\n', (11661, 11669), False, 'import numpy\n'), ((11687, 11721), 'numpy.where', 'numpy.where', (['(deltas > 5 * x_median)'], {}), '(deltas > 5 * x_median)\n', (11698, 11721), False, 'import numpy\n'), ((526, 550), 'matplotlib.use', 'matplotlib.use', (['"""MacOSX"""'], {}), "('MacOSX')\n", (540, 550), False, 'import matplotlib\n'), ((596, 646), 'schainpy.utils.log.warning', 'log.warning', (['"""Using default Backend="Agg\\""""', '"""INFO"""'], {}), '(\'Using default Backend="Agg"\', \'INFO\')\n', (607, 646), False, 'from schainpy.utils import log\n'), ((651, 672), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (665, 672), False, 'import matplotlib\n'), ((1784, 1797), 'numpy.sqrt', 'numpy.sqrt', (['a'], {}), '(a)\n', (1794, 1797), False, 'import numpy\n'), ((1825, 1853), 'numpy.sin', 'numpy.sin', (['((lon2 - lon1) * p)'], {}), '((lon2 - lon1) * p)\n', (1834, 1853), False, 'import numpy\n'), ((1850, 1869), 'numpy.cos', 'numpy.cos', (['(lat2 * p)'], {}), '(lat2 * p)\n', (1859, 1869), False, 'import numpy\n'), ((2040, 2056), 'numpy.cos', 'numpy.cos', (['theta'], {}), '(theta)\n', (2049, 2056), False, 'import numpy\n'), ((2060, 2076), 'numpy.sin', 'numpy.sin', (['theta'], {}), '(theta)\n', (2069, 2076), False, 'import numpy\n'), ((4150, 4173), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4171, 4173), False, 'import datetime\n'), ((9345, 9420), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(self.width, self.height)', 'edgecolor': '"""k"""', 'facecolor': '"""w"""'}), "(figsize=(self.width, self.height), edgecolor='k', facecolor='w')\n", (9355, 9420), True, 'import matplotlib.pyplot as plt\n'), ((11833, 11911), 'numpy.ma.masked_inside', 'numpy.ma.masked_inside', (['z_buffer', '(0.99 * self.__missing)', '(1.01 * self.__missing)'], {}), '(z_buffer, 0.99 * self.__missing, 1.01 * self.__missing)\n', (11855, 11911), False, 'import numpy\n'), ((18205, 18268), 'schainpy.utils.log.warning', 'log.warning', (['"""Missing `exp_code` skipping sending to server..."""'], {}), "('Missing `exp_code` skipping sending to server...')\n", (18216, 18268), False, 'from schainpy.utils import log\n'), ((1629, 1657), 'numpy.cos', 'numpy.cos', (['((lat2 - lat1) * p)'], {}), '((lat2 - lat1) * p)\n', (1638, 1657), False, 'import numpy\n'), ((1869, 1888), 'numpy.cos', 'numpy.cos', (['(lat1 * p)'], {}), '(lat1 * p)\n', (1878, 1888), False, 'import numpy\n'), ((1915, 1934), 'numpy.sin', 'numpy.sin', (['(lat2 * p)'], {}), '(lat2 * p)\n', (1924, 1934), False, 'import numpy\n'), ((1969, 1997), 'numpy.cos', 'numpy.cos', (['((lon2 - lon1) * p)'], {}), '((lon2 - lon1) * p)\n', (1978, 1997), False, 'import numpy\n'), ((3751, 3774), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3772, 3774), False, 'import datetime\n'), ((3858, 3881), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3879, 3881), False, 'import datetime\n'), ((10200, 10275), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(self.width, self.height)', 'edgecolor': '"""k"""', 'facecolor': '"""w"""'}), "(figsize=(self.width, self.height), edgecolor='k', facecolor='w')\n", (10210, 10275), True, 'import matplotlib.pyplot as plt\n'), ((10925, 10956), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['self.colormaps[n]'], {}), '(self.colormaps[n])\n', (10937, 10956), True, 'import matplotlib.pyplot as plt\n'), ((10998, 11025), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['self.colormap'], {}), '(self.colormap)\n', (11010, 11025), True, 'import matplotlib.pyplot as plt\n'), ((15970, 16003), 'schainpy.utils.log.warning', 'log.warning', (['"""No data"""', 'self.name'], {}), "('No data', self.name)\n", (15981, 16003), False, 'from schainpy.utils import log\n'), ((17630, 17654), 'os.path.dirname', 'os.path.dirname', (['figname'], {}), '(figname)\n', (17645, 17654), False, 'import os\n'), ((17681, 17705), 'os.path.dirname', 'os.path.dirname', (['figname'], {}), '(figname)\n', (17696, 17705), False, 'import os\n'), ((19872, 19934), 'schainpy.utils.log.warning', 'log.warning', (['"""No response from server, retrying..."""', 'self.name'], {}), "('No response from server, retrying...', self.name)\n", (19883, 19934), False, 'from schainpy.utils import log\n'), ((21610, 21623), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (21621, 21623), False, 'import zmq\n'), ((21760, 21772), 'zmq.Poller', 'zmq.Poller', ([], {}), '()\n', (21770, 21772), False, 'import zmq\n'), ((1662, 1681), 'numpy.cos', 'numpy.cos', (['(lat1 * p)'], {}), '(lat1 * p)\n', (1671, 1681), False, 'import numpy\n'), ((1694, 1713), 'numpy.cos', 'numpy.cos', (['(lat2 * p)'], {}), '(lat2 * p)\n', (1703, 1713), False, 'import numpy\n'), ((1721, 1749), 'numpy.cos', 'numpy.cos', (['((lon2 - lon1) * p)'], {}), '((lon2 - lon1) * p)\n', (1730, 1749), False, 'import numpy\n'), ((1933, 1952), 'numpy.sin', 'numpy.sin', (['(lat1 * p)'], {}), '(lat1 * p)\n', (1942, 1952), False, 'import numpy\n'), ((1951, 1970), 'numpy.cos', 'numpy.cos', (['(lat2 * p)'], {}), '(lat2 * p)\n', (1960, 1970), False, 'import numpy\n'), ((4054, 4083), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(0)'}), '(seconds=0)\n', (4072, 4083), False, 'import datetime\n'), ((14101, 14164), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['ax.plt'], {'ax': 'ax', 'fraction': '(0.05)', 'pad': '(0.02)', 'aspect': '(10)'}), '(ax.plt, ax=ax, fraction=0.05, pad=0.02, aspect=10)\n', (14113, 14164), True, 'import matplotlib.pyplot as plt\n'), ((15184, 15207), 'numpy.arange', 'numpy.arange', (['(0)', '(90)', '(20)'], {}), '(0, 90, 20)\n', (15196, 15207), False, 'import numpy\n'), ((19588, 19633), 'schainpy.utils.log.log', 'log.log', (['"""Response from server ok"""', 'self.name'], {}), "('Response from server ok', self.name)\n", (19595, 19633), False, 'from schainpy.utils import log\n'), ((19654, 19669), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (19664, 19669), False, 'import time\n'), ((12776, 12805), 'matplotlib.ticker.FuncFormatter', 'FuncFormatter', (['self.__fmtTime'], {}), '(self.__fmtTime)\n', (12789, 12805), False, 'from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator\n'), ((12854, 12870), 'matplotlib.ticker.LinearLocator', 'LinearLocator', (['(9)'], {}), '(9)\n', (12867, 12870), False, 'from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator\n'), ((12955, 12977), 'numpy.isfinite', 'numpy.isfinite', (['self.y'], {}), '(self.y)\n', (12969, 12977), False, 'import numpy\n'), ((13063, 13085), 'numpy.isfinite', 'numpy.isfinite', (['self.y'], {}), '(self.y)\n', (13077, 13085), False, 'import numpy\n'), ((22726, 22755), 'datetime.datetime', 'datetime.datetime', (['(1970)', '(1)', '(1)'], {}), '(1970, 1, 1)\n', (22743, 22755), False, 'import datetime\n')] |
from dataclasses import dataclass, field
from typing import Dict, List, Union
import pygamehack.struct_parser as struct_parser
from .struct_parser import tuples_to_classes, classes_to_string, Comment
from .struct_file import PythonStructSourceGenerator
__all__ = ['ReClassNet']
# TODO: ReClassNet all types
#region ReClassNet
class ReClassNet:
@dataclass
class Class(object):
name: str
size: int = 0
fields: Dict[str, Union[str, 'Class']] = field(default_factory=dict)
offsets: Dict[str, List[int]] = field(default_factory=dict)
comments: Dict[str, str] = field(default_factory=dict)
@staticmethod
def load_project_file(path: str) -> List[Class]:
"""
Load a ReClass.Net project file (.rcnet) from the given path into a list of ReClassNet Class definitions
"""
import zipfile
from xml.dom import minidom
classes = []
archive = zipfile.ZipFile(path, 'r')
with archive.open(C.DataFileName) as data_xml:
doc = minidom.parse(data_xml)
platform, version = Parse.platform_version(doc)
# custom_data = Parse.custom_data(doc)
type_mapping = Parse.type_mapping(doc)
# enums = Parse.enums(doc)
classes.extend(Parse.classes(doc, type_mapping, platform))
return classes
@staticmethod
def convert_structs(classes: List[Class], imported_name: str = 'gh') -> [struct_parser.Class]:
"""
Convert the given list of ReClassNet Class definitions into a list of pygamehack Class definitions
"""
tuples = []
name_to_class = {}
for class_def in classes:
name_to_class[class_def.name] = class_def
tuples.extend((f'{class_def.name}.{k}', v if len(v) > 1 else v[0]) for k, v in class_def.offsets.items())
struct_classes = tuples_to_classes(tuples)
Parse.field_type_annotations(struct_classes, classes, name_to_class, imported_name)
return struct_classes
@staticmethod
def generate_struct_src(classes: List[Class], imported_name: str = 'gh') -> str:
"""
Generate source code for the given list of ReClassNet Class definitions
"""
return f'import pygamehack as {imported_name}\n\n\n' + classes_to_string(
ReClassNet.convert_structs(classes, imported_name),
generator=PythonStructSourceGenerator(imported_name=imported_name)
)
#endregion
#region ReClassNet Implementation
#region Constants
class C:
DataFileName = 'Data.xml'
FileVersion = 0x00010001
FileVersionCriticalMask = 0xFFFF0000
class Elem:
RootElement = "reclass"
CustomData = "custom_data"
TypeMapping = "type_mapping"
Enums = "enums"
Enum = "enum"
Classes = "classes"
Class = "class"
Node = "node"
Method = "method"
Item = "item"
class Attr:
Version = "version"
Platform = "type"
Uuid = "uuid"
Name = "name"
Comment = "comment"
Hidden = "hidden"
Address = "address"
Type = "type"
Reference = "reference"
Count = "count"
Bits = "bits"
Length = "length"
Size = "size"
Signature = "signature"
Flags = "flags"
Value = "value"
#endregion
#region Parse
class Parse:
#region Basic
# Platform/Version
@staticmethod
def platform_version(doc):
platform = doc.documentElement.attributes[C.Attr.Platform].value
version = int(doc.documentElement.attributes[C.Attr.Version].value)
if (version & C.FileVersionCriticalMask) > (C.FileVersion & C.FileVersionCriticalMask):
raise RuntimeError(f'The file version is unsupported.')
return platform, version
# Custom Data
@staticmethod
def custom_data(doc):
# TODO: Parse ReClassNet custom data
custom_data = doc.documentElement.getElementsByTagName(C.Elem.CustomData)
if custom_data:
pass
return None
# Type Mapping
@staticmethod
def type_mapping(doc):
import xml.dom
type_mapping = {}
type_mapping_elem = doc.documentElement.getElementsByTagName(C.Elem.TypeMapping)
if type_mapping_elem and type_mapping_elem.length > 0:
for node in type_mapping_elem.item(0).childNodes:
if node.nodeType == xml.dom.Node.ELEMENT_NODE:
type_mapping[node.nodeName] = node.childNodes[0].nodeValue
return type_mapping
# Enums
@staticmethod
def enums(doc):
enums = []
enums_elem = doc.documentElement.getElementsByTagName(C.Elem.Enums)
if enums_elem and enums_elem.length > 0:
for node in enums_elem.item(0).childNodes:
name = node.attributes.get(C.Attr.Name) or ''
use_flags = node.attributes.get(C.Attr.Flags) or False
size = node.attributes.get(C.Attr.Size, 4) # TODO: Default enum size
values = {}
if node.length > 0:
for item in node.item(0).childNodes:
item_name = item.attributes.get(C.Attr.Name) or ''
value = item.attributes.get(C.Attr.Value) or 0
values[item_name] = value
enums.append((name, use_flags, size, values))
return enums
#endregion
#region Classes
@staticmethod
def classes(doc, types, platform):
classes = []
Parse.classes_without_size_and_offsets(doc, types, classes)
Parse.class_sizes_and_offsets(classes, platform)
return classes
@staticmethod
def classes_without_size_and_offsets(doc, types, classes):
classes_elem = doc.documentElement.getElementsByTagName(C.Elem.Classes)
element_class = []
seen_classes = {}
# Parse classes
if classes_elem and classes_elem.length > 0:
for node in classes_elem.item(0).childNodes:
if node.attributes:
uuid = node.attributes.get(C.Attr.Uuid, None)
if uuid is not None and node.attributes and uuid.value not in seen_classes:
name = node.attributes.get(C.Attr.Name, None)
if name is not None and node.childNodes and not Parse.in_type_mapping(name.value, types):
class_def = ReClassNet.Class(name.value)
classes.append(class_def)
seen_classes[uuid.value] = class_def
element_class.append((node, class_def))
# Parse properties for each class recursively
for node, class_def in element_class:
for child in node.childNodes:
if child.attributes:
Parse.create_class_property_from_node(class_def, child, types, seen_classes, node)
@staticmethod
def class_sizes_and_offsets(classes, platform):
from .struct_meta import StructDependencies
# Sort classes in reverse dependency order
dependencies = {}
for parent in classes:
dependencies[parent.name] = []
for child in parent.fields.values():
if isinstance(child, ReClassNet.Class):
dependencies[parent.name].append(child.name)
StructDependencies.sort(classes, dependencies, lambda t: t.name)
# Calculate class sizes now that all the classes have been sorted
calc = {}
def calculate_size_offsets(class_def, calculated):
if class_def.name in calculated:
return class_def.size
calculated[class_def.name] = True
offset = 0
for name, child_def in class_def.fields.items():
class_def.offsets[name][0] = offset
property_size = 0
if isinstance(child_def, ReClassNet.Class):
property_size = calculate_size_offsets(child_def, calculated)
class_def.size = max(offset + property_size, class_def.size)
else:
pygamehack_type = Parse.pygamehack_type(child_def)
class_def.fields[name] = ReClassNet.Class(pygamehack_type.__name__)
if pygamehack_type.__name__ != 'ptr':
property_size = pygamehack_type.size
class_def.fields[name].size = property_size
if len(class_def.offsets[name]) == 1:
offset += property_size
else:
offset += 4 if platform == 'x86' else 8
class_def.size = max(offset, class_def.size)
return class_def.size
for parent in classes:
calculate_size_offsets(parent, calc)
@staticmethod
def create_class_property_from_node(class_def, node, types, seen_classes, parent=None, comes_from_pointer=False):
uuid = node.attributes.get(C.Attr.Reference, None)
node_type = node.attributes.get(C.Attr.Type, None)
name = (parent if comes_from_pointer else node).attributes.get(C.Attr.Name, None)
name = name.value if name is not None else ''
comment = (parent if comes_from_pointer else node).attributes.get(C.Attr.Comment, None)
class_def.comments[name] = comment.value if comment is not None else ''
# Existing class
if uuid is not None and uuid.value in seen_classes:
field_def = seen_classes[uuid.value]
class_def.fields[name] = field_def
class_def.offsets[name] = [0] if not comes_from_pointer else [0, 0]
# Basic Type
elif node_type is not None and Parse.in_type_mapping(node_type.value, types):
class_def.fields[name] = node_type.value
class_def.offsets[name] = [0]
# Pointer to existing class
elif node_type.value == 'PointerNode':
for n in node.childNodes:
if n.attributes:
Parse.create_class_property_from_node(class_def, n, types, seen_classes, node, True)
break
# Undefined Class that comes from pointer
else:
assert node_type.value == 'ClassInstanceNode', 'We should only be parsing undefined classes here'
assert comes_from_pointer, 'Inline instance definiton of undefined class is not allowed'
class_def.fields[name] = 'ptr'
class_def.offsets[name] = [0, 0]
@staticmethod
def in_type_mapping(name, types):
if 'Type' + name.replace('Node', '') in types:
return True
@staticmethod
def pygamehack_type(reclass_type_name):
import pygamehack as gh
name = reclass_type_name.replace('Node', '') \
.replace('UInt', 'u') \
.replace('Int', 'i') \
.lower()
if hasattr(gh, name):
return getattr(gh, name)
else:
raise RuntimeError("Unknown type name", reclass_type_name)
#endregion
@staticmethod
def field_type_annotations(classes, class_defs, name_to_class_def, imported_name):
for c, class_def in zip(classes, class_defs):
assert c.name == class_def.name, "Something fucked up"
for name, f in c.fields.items():
# Comment
if class_def.comments[name]:
f.comments.append(Comment(class_def.comments[name], 0, 0, 0))
# Annotation src
field_def = class_def.fields[name]
type_name = field_def if isinstance(field_def, str) else field_def.name
if type_name in name_to_class_def:
f.annotation_src = f"'{type_name}'"
else:
f.annotation_src = f'{imported_name}.{type_name}'
#endregion
#endregion
| [
"xml.dom.minidom.parse",
"dataclasses.field",
"zipfile.ZipFile"
] | [((480, 507), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (485, 507), False, 'from dataclasses import dataclass, field\n'), ((548, 575), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (553, 575), False, 'from dataclasses import dataclass, field\n'), ((611, 638), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (616, 638), False, 'from dataclasses import dataclass, field\n'), ((955, 981), 'zipfile.ZipFile', 'zipfile.ZipFile', (['path', '"""r"""'], {}), "(path, 'r')\n", (970, 981), False, 'import zipfile\n'), ((1059, 1082), 'xml.dom.minidom.parse', 'minidom.parse', (['data_xml'], {}), '(data_xml)\n', (1072, 1082), False, 'from xml.dom import minidom\n')] |
from selenium import webdriver
from selenium.webdriver.common.by import By
chrome_driver_path = "C:/Users/st0rmg0d/Desktop/chromedriver.exe"
class scrap:
def __init__(self, currency_name):
self.currency_name = currency_name
def scrap_articles(self):
driver = webdriver.Chrome(executable_path=chrome_driver_path)
link = f"https://coinmarketcap.com/currencies/{self.currency_name}/news"
driver.get(link)
paragraph = []
for num in range(1, 6):
if num != 3:
paragraph = driver.find_element(By.XPATH, f'/html/body/div/div[1]/div/div[2]/div/div[3]/div/div/main/'
f'div[2]/div[{num}]/a/div[1]/p').text
paragraph.append(paragraph)
driver.quit()
return paragraph | [
"selenium.webdriver.Chrome"
] | [((296, 348), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'executable_path': 'chrome_driver_path'}), '(executable_path=chrome_driver_path)\n', (312, 348), False, 'from selenium import webdriver\n')] |
import json
from app import create_app, db
from app.models import User, UserType
from .base import BaseTest
class TestOrders(BaseTest):
def setUp(self):
self.app = create_app(config_name='testing')
self.client = self.app.test_client()
with self.app.app_context():
db.create_all()
self.setUpAuth()
def data(self):
return json.dumps({
'quantity': 2,
'user_id': self.user['id'],
'menu_item_id': self.create_menu_item()['menu_item']['id']
})
def test_can_create_order(self):
res = self.client.post(
'api/v1/orders', data=self.data(), headers=self.user_headers)
self.assertEqual(res.status_code, 201)
self.assertIn(b'Successfully saved order', res.data)
def test_cannot_create_order_without_user_id(self):
res = self.client.post(
'api/v1/orders',
data=self.data_without(['user_id']),
headers=self.user_headers)
self.assertEqual(res.status_code, 400)
self.assertIn(b'user id field is required', res.data)
def test_cannot_create_order_without_quantity(self):
res = self.client.post(
'api/v1/orders',
data=self.data_without(['quantity']),
headers=self.user_headers)
self.assertEqual(res.status_code, 400)
self.assertIn(b'quantity field is required', res.data)
def test_cannot_create_order_without_menu_item_id(self):
res = self.client.post(
'api/v1/orders',
data=self.data_without(['menu_item_id']),
headers=self.user_headers)
self.assertEqual(res.status_code, 400)
self.assertIn(b'menu item id field is required', res.data)
def test_cannot_create_order_with_quantity_than_available(self):
res = self.client.post(
'api/v1/orders',
data=self.data_with({
'quantity': 1000
}),
headers=self.user_headers)
self.assertEqual(res.status_code, 400)
self.assertIn(b'meal(s) are available', res.data)
def test_can_update_order(self):
json_res = self.create_order()
res = self.client.put(
'api/v1/orders/{}'.format(json_res['order']['id']),
data=json.dumps({
'quantity': 20,
'menu_item_id': json_res['order']['menu_item_id'],
}),
headers=self.user_headers)
json_res = self.to_dict(res)
self.assertEqual(res.status_code, 200)
self.assertEqual(json_res['order']['quantity'], 20)
self.assertIn(b'successfully updated', res.data)
def test_admin_can_update_order(self):
json_res = self.create_order()
res = self.client.put(
'api/v1/orders/{}'.format(json_res['order']['id']),
data=json.dumps({
'quantity': 20,
'menu_item_id': json_res['order']['menu_item_id'],
}),
headers=self.admin_headers)
json_res = self.to_dict(res)
self.assertEqual(res.status_code, 200)
self.assertEqual(json_res['order']['quantity'], 20)
self.assertIn(b'successfully updated', res.data)
def test_cannot_update_another_users_order(self):
json_res = self.create_order()
user, headers = self.authUser(email='<EMAIL>')
res = self.client.put(
'api/v1/orders/{}'.format(json_res['order']['id']),
data=json.dumps({
'quantity': 20,
'menu_item_id': json_res['order']['menu_item_id'],
}),
headers=headers)
self.assertEqual(res.status_code, 401)
self.assertIn(b'Unauthorized access', res.data)
def test_can_get_order(self):
json_res = self.create_order()
res = self.client.get(
'api/v1/orders/{}'.format(json_res['order']['id']),
headers=self.user_headers)
self.assertEqual(res.status_code, 200)
self.assertIn(b'successfully retrieved', res.data)
def test_can_get_many_orders(self):
json_res = self.create_order()
res = self.client.get(
'api/v1/orders',
headers=self.user_headers)
self.assertEqual(res.status_code, 200)
self.assertIn(b'Successfully retrieved orders', res.data)
def test_can_get_many_orders_history(self):
json_res = self.create_order()
res = self.client.get(
'api/v1/orders?history=1',
headers=self.user_headers)
self.assertEqual(res.status_code, 200)
self.assertIn(b'Successfully retrieved orders', res.data)
def test_can_delete_order(self):
json_res = self.create_order()
res = self.client.delete(
'api/v1/orders/{}'.format(json_res['order']['id']),
headers=self.user_headers)
self.assertEqual(res.status_code, 200)
self.assertIn(b'successfully deleted', res.data)
res = self.client.get(
'api/v1/orders/{}'.format(json_res['order']['id']),
headers=self.user_headers)
self.assertEqual(res.status_code, 404)
def test_cannot_delete_another_users_order(self):
json_res = self.create_order()
user, headers = self.authUser(email='<EMAIL>')
res = self.client.delete(
'api/v1/orders/{}'.format(json_res['order']['id']),
headers=headers)
self.assertEqual(res.status_code, 401)
self.assertIn(b'Unauthorized access', res.data)
def create_order(self):
res = self.client.post(
'api/v1/orders', data=self.data(), headers=self.user_headers)
self.assertEqual(res.status_code, 201)
self.assertIn(b'Successfully saved order', res.data)
return self.to_dict(res)
def create_menu_item(self):
# create a meal
res = self.client.post(
'api/v1/meals',
data=json.dumps({
'name': 'ugali',
'cost': 30,
}),
headers=self.admin_headers)
self.assertEqual(res.status_code, 201)
self.assertIn(b'Successfully saved meal', res.data)
meal_id = self.to_dict(res)['meal']['id']
# now create a menu
res = self.client.post(
'api/v1/menus',
data=json.dumps({
'name': 'Lunch'
}),
headers=self.admin_headers)
self.assertEqual(res.status_code, 201)
self.assertIn(b'Successfully saved menu', res.data)
menu_id = self.to_dict(res)['menu']['id']
# finally create a menu item
res = self.client.post(
'api/v1/menu-items',
data=json.dumps({
'quantity': 100,
'menu_id': menu_id,
'meal_id': meal_id
}),
headers=self.admin_headers)
self.assertEqual(res.status_code, 201)
self.assertIn(b'Successfully saved menu item', res.data)
return self.to_dict(res)
def tearDown(self):
with self.app.app_context():
db.drop_all()
| [
"app.db.create_all",
"app.db.drop_all",
"json.dumps",
"app.create_app"
] | [((178, 211), 'app.create_app', 'create_app', ([], {'config_name': '"""testing"""'}), "(config_name='testing')\n", (188, 211), False, 'from app import create_app, db\n'), ((306, 321), 'app.db.create_all', 'db.create_all', ([], {}), '()\n', (319, 321), False, 'from app import create_app, db\n'), ((7131, 7144), 'app.db.drop_all', 'db.drop_all', ([], {}), '()\n', (7142, 7144), False, 'from app import create_app, db\n'), ((2311, 2390), 'json.dumps', 'json.dumps', (["{'quantity': 20, 'menu_item_id': json_res['order']['menu_item_id']}"], {}), "({'quantity': 20, 'menu_item_id': json_res['order']['menu_item_id']})\n", (2321, 2390), False, 'import json\n'), ((2874, 2953), 'json.dumps', 'json.dumps', (["{'quantity': 20, 'menu_item_id': json_res['order']['menu_item_id']}"], {}), "({'quantity': 20, 'menu_item_id': json_res['order']['menu_item_id']})\n", (2884, 2953), False, 'import json\n'), ((3504, 3583), 'json.dumps', 'json.dumps', (["{'quantity': 20, 'menu_item_id': json_res['order']['menu_item_id']}"], {}), "({'quantity': 20, 'menu_item_id': json_res['order']['menu_item_id']})\n", (3514, 3583), False, 'import json\n'), ((5968, 6009), 'json.dumps', 'json.dumps', (["{'name': 'ugali', 'cost': 30}"], {}), "({'name': 'ugali', 'cost': 30})\n", (5978, 6009), False, 'import json\n'), ((6361, 6390), 'json.dumps', 'json.dumps', (["{'name': 'Lunch'}"], {}), "({'name': 'Lunch'})\n", (6371, 6390), False, 'import json\n'), ((6739, 6808), 'json.dumps', 'json.dumps', (["{'quantity': 100, 'menu_id': menu_id, 'meal_id': meal_id}"], {}), "({'quantity': 100, 'menu_id': menu_id, 'meal_id': meal_id})\n", (6749, 6808), False, 'import json\n')] |
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An example using monascaclient via the Python API"""
from monascaclient import client
import monascaclient.exc as exc
import time
# In order to use the python api directly, you must first obtain an
# auth token and identify which endpoint you wish to speak to.
endpoint = 'http://192.168.10.4:8070/v2.0'
# The api version of monasca-api
api_version = '2_0'
# Pass in the keystone authentication kwargs to construct a monasca client.
# The monasca_client will try to authenticate with keystone one time
# when it sees a 401 unauthorized resp, to take care of a stale token.
# In this example no token is input, so it will get a 401 when executing the
# first metrics.create request, and will authenticate and try again.
auth_kwargs = {'username': 'mini-mon',
'password': 'password',
'project_name': 'mini-mon',
'auth_url': 'http://192.168.10.5:35357/v3/'}
monasca_client = client.Client(api_version, endpoint, **auth_kwargs)
# you can reference the monascaclient.v2_0.shell.py
# do_commands for command field initialization.
# post a metric
dimensions = {'instance_id': '12345', 'service': 'nova'}
fields = {}
fields['name'] = 'metric1'
fields['dimensions'] = dimensions
# time in milliseconds
fields['timestamp'] = time.time() * 1000
fields['value'] = 222.333
try:
resp = monasca_client.metrics.create(**fields)
except exc.HTTPException as he:
print('HTTPException code=%s message=%s' % (he.code, he.message))
else:
print(resp)
print('Successfully created metric')
# post a metric with a unicode service name
dimensions = {'instance_id': '12345', 'service': u'\u76db\u5927'}
fields = {}
fields['name'] = 'metric1'
fields['dimensions'] = dimensions
fields['timestamp'] = time.time() * 1000
fields['value'] = 222.333
try:
resp = monasca_client.metrics.create(**fields)
except exc.HTTPException as he:
print('HTTPException code=%s message=%s' % (he.code, he.message))
else:
print(resp)
print('Successfully created metric')
print('Giving the DB time to update...')
time.sleep(5)
# metric-list
name = 'metric1'
dimensions = None
fields = {}
if name:
fields['name'] = name
if dimensions:
fields['dimensions'] = dimensions
try:
body = monasca_client.metrics.list(**fields)
except exc.HTTPException as he:
print('HTTPException code=%s message=%s' % (he.code, he.message))
else:
print(body)
| [
"time.sleep",
"time.time",
"monascaclient.client.Client"
] | [((1535, 1586), 'monascaclient.client.Client', 'client.Client', (['api_version', 'endpoint'], {}), '(api_version, endpoint, **auth_kwargs)\n', (1548, 1586), False, 'from monascaclient import client\n'), ((2660, 2673), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (2670, 2673), False, 'import time\n'), ((1880, 1891), 'time.time', 'time.time', ([], {}), '()\n', (1889, 1891), False, 'import time\n'), ((2352, 2363), 'time.time', 'time.time', ([], {}), '()\n', (2361, 2363), False, 'import time\n')] |
# coding: utf-8
"""
Utilities to handle mongoengine classes and connections.
"""
import contextlib
from pymatgen.util.serialization import pmg_serialize
from monty.json import MSONable
from mongoengine import connect
from mongoengine.context_managers import switch_collection
from mongoengine.connection import DEFAULT_CONNECTION_NAME
class DatabaseData(MSONable):
"""
Basic class providing data to connect to a collection in the database and switching to that collection.
Wraps mongoengine's connect and switch_collection functions.
"""
def __init__(self, database, host=None, port=None, collection=None, username=None, password=None):
"""
Args:
database: name of the database
host: the host name of the mongod instance to connect to
port: the port that the mongod instance is running on
collection: name of the collection
username: username to authenticate with
password: <PASSWORD>
"""
#TODO handle multiple collections?
# note: if making collection a list (or a dict), make it safe for mutable default arguments, otherwise there
# will probably be problems with the switch_collection
self.database = database
self.host = host
self.port = port
self.collection = collection
self.username = username
self.password = password
@classmethod
def from_dict(cls, d):
d = d.copy()
d.pop("@module", None)
d.pop("@class", None)
return cls(**d)
@pmg_serialize
def as_dict(self):
return dict(database=self.database, host=self.host, port=self.port, collection=self.collection,
username=self.username, password=self.password)
@pmg_serialize
def as_dict_no_credentials(self):
return dict(database=self.database, host=self.host, port=self.port, collection=self.collection)
def connect_mongoengine(self, alias=DEFAULT_CONNECTION_NAME):
"""
Open the connection to the selected database
"""
return connect(db=self.database, host=self.host, port=self.port, username=self.username,
password=self.password, alias=alias)
@contextlib.contextmanager
def switch_collection(self, cls):
"""
Switches to the chosen collection using Mongoengine's switch_collection.
"""
if self.collection:
with switch_collection(cls, self.collection) as new_cls:
yield new_cls
else:
yield cls
| [
"mongoengine.context_managers.switch_collection",
"mongoengine.connect"
] | [((2112, 2235), 'mongoengine.connect', 'connect', ([], {'db': 'self.database', 'host': 'self.host', 'port': 'self.port', 'username': 'self.username', 'password': 'self.password', 'alias': 'alias'}), '(db=self.database, host=self.host, port=self.port, username=self.\n username, password=self.password, alias=alias)\n', (2119, 2235), False, 'from mongoengine import connect\n'), ((2475, 2514), 'mongoengine.context_managers.switch_collection', 'switch_collection', (['cls', 'self.collection'], {}), '(cls, self.collection)\n', (2492, 2514), False, 'from mongoengine.context_managers import switch_collection\n')] |
import csv
import random
from flask import Flask
from twilio.twiml.messaging_response import MessagingResponse
app = Flask(__name__)
with open('truisms.csv', 'r') as f:
reader = csv.reader(f)
truisms = list(reader)
@app.route('/sms', methods=['POST'])
def sms():
truism = random.choice(truisms)[0]
resp = MessagingResponse()
resp.message(truism)
return str(resp)
if __name__ == '__main__':
app.run()
| [
"twilio.twiml.messaging_response.MessagingResponse",
"random.choice",
"csv.reader",
"flask.Flask"
] | [((119, 134), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (124, 134), False, 'from flask import Flask\n'), ((185, 198), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (195, 198), False, 'import csv\n'), ((328, 347), 'twilio.twiml.messaging_response.MessagingResponse', 'MessagingResponse', ([], {}), '()\n', (345, 347), False, 'from twilio.twiml.messaging_response import MessagingResponse\n'), ((290, 312), 'random.choice', 'random.choice', (['truisms'], {}), '(truisms)\n', (303, 312), False, 'import random\n')] |
from django.db import models
from django.core.validators import RegexValidator, MinValueValidator,MaxValueValidator
from django.core.urlresolvers import reverse
from django.dispatch import receiver
from django.db.models.signals import post_save, pre_save, pre_delete
from django.utils import timezone
from core.models import BaseEntity
from products.models import Service
from customers.models import Staff, Customer, Vendor
class CommissionStructure(BaseEntity):
""" commission for staff based on services"""
staff = models.ForeignKey(
Staff,
related_name='commissions'
)
service = models.ForeignKey(
Service,
related_name='commissions'
)
amount = models.IntegerField(
default=500,
validators=[
MinValueValidator(
10,
message = 'Amount should be greater than 10'
),
MaxValueValidator(
100000,
message = 'Amount should be less than 100000'
),
]
)
class Meta:
unique_together = ("staff", "service")
def __str__(self):
return str(self.staff.name)+ ' '+ str(self.service.name)
def get_absolute_url(self):
return reverse('customers:Staff_Detail', kwargs={'pk': self.staff.id})
def get_update_url(self):
return reverse('accounting:CommissionStructure_Update', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('accounting:CommissionStructure_Delete', kwargs={'pk': self.id})
class Payout(BaseEntity):
""" Payout to all vendors"""
MODE_CHOICES=(
('BANK', 'Bank'),
('CHEQUE', 'Cheque'),
('DD', 'Demand Draft'),
('CASH', 'Cash'),
)
vendor = models.ForeignKey(
Vendor,
related_name='bill_payouts'
)
date = models.DateField(
verbose_name='payment date'
)
time = models.TimeField(
verbose_name='payment time'
)
amount = models.IntegerField(
default=500,
validators=[
MinValueValidator(
10,
message = 'Amount should be greater than 10'
),
MaxValueValidator(
10000000,
message = 'Amount should be less than 10000000'
),
]
)
mode = models.CharField(
max_length =15,
choices = MODE_CHOICES,
default = 'CASH',
)
def __str__(self):
return self.vendor.name
def get_absolute_url(self):
return reverse('accounting:Payout_Detail', kwargs={'id': self.id})
def get_update_url(self):
return reverse('accounting:Payout_Update', kwargs={'id': self.id})
def get_delete_url(self):
return reverse('accounting:Payout_Delete', kwargs={'id': self.id})
class PayCommissionOrSalary(BaseEntity):
""" Payout to all staff members"""
MODE_CHOICES=(
('BANK', 'Bank'),
('CHEQUE', 'Cheque'),
('DD', 'Demand Draft'),
('CASH', 'Cash'),
)
staff = models.ForeignKey(
Staff,
related_name= 'commissions_payouts'
)
date = models.DateField(
verbose_name='payment date'
)
time = models.TimeField(
verbose_name='payment time'
)
amount = models.IntegerField(
default=500,
validators=[
MinValueValidator(
10,
message = 'Amount should be greater than 10'
),
MaxValueValidator(
10000000,
message = 'Amount should be less than 10000000'
),
]
)
mode = models.CharField(
max_length =15,
choices = MODE_CHOICES,
default = 'CASH',
)
def get_absolute_url(self):
return reverse('accounting:PayCommissionOrSalary_Detail', kwargs={'id': self.id})
def get_update_url(self):
return reverse('accounting:PayCommissionOrSalary_Update', kwargs={'id': self.id})
def get_delete_url(self):
return reverse('accounting:PayCommissionOrSalary_Delete', kwargs={'id': self.id})
class Payin(BaseEntity):
""" Payins from all customers"""
MODE_CHOICES=(
('BANK', 'Bank'),
('CHEQUE', 'Cheque'),
('DD', 'Demand Draft'),
('CASH', 'Cash'),
)
customer = models.ForeignKey(
'customers.Customer',
related_name='customer_payins',
blank = True,
null = True,
)
event = models.ForeignKey(
'booking.Event',
related_name='event_payins',
blank = True,
null =True,
)
date = models.DateField(
verbose_name='payment date'
)
time = models.TimeField(
verbose_name='payment time'
)
amount = models.IntegerField(
default=500,
validators=[
MinValueValidator(
10,
message = 'Amount should be greater than 10'
),
MaxValueValidator(
10000000,
message = 'Amount should be less than 10000000'
),
]
)
mode = models.CharField(
max_length =15,
choices = MODE_CHOICES,
default = 'CASH',
)
def get_absolute_url(self):
return reverse('accounting:Payin_Detail', kwargs={'id': self.id})
def get_update_url(self):
return reverse('accounting:Payin_Update', kwargs={'id': self.id})
def get_delete_url(self):
return reverse('accounting:Payin_Delete', kwargs={'id': self.id})
class Invoice(BaseEntity):
""" Invoices are generated based on events state"""
STATUS_CHOICES=(
('CREATED', 'Created'),
('CONFIRMED', 'Confirmed'),
('PARTIAL_PAYMENT', 'Partially Paid'),
('RECEIVED', 'Received'),
('CLOSED', 'Closed')
)
customer = models.ForeignKey(
Customer,
related_name='invoices',
)
event = models.ForeignKey(
'booking.Event',
related_name='invoice',
)
generated_date = models.DateField(
verbose_name='date invoice generated'
)
due_date = models.DateField(
verbose_name='date payment is expected'
)
paid_date = models.DateField(
verbose_name='date payment is expected',
blank =True,
null = True,
)
status = models.CharField(
max_length =15,
choices = STATUS_CHOICES,
default = 'CREATED',
)
amount = models.IntegerField(
default=500,
validators=[
MinValueValidator(
10,
message = 'Amount should be greater than 10'
),
MaxValueValidator(
10000000,
message = 'Amount should be less than 10000000'
),
]
)
paid = models.IntegerField(
default=500,
validators=[
MinValueValidator(
10,
message = 'Amount should be greater than 10'
),
MaxValueValidator(
10000000,
message = 'Amount should be less than 10000000'
),
]
)
payins = models.ManyToManyField(
Payin,
related_name='invoices',
null = True,
blank = True,
)
def get_absolute_url(self):
return reverse('accounting:Invoice_Detail', kwargs={'id': self.id})
def get_update_url(self):
return reverse('accounting:Invoice_Update', kwargs={'id': self.id})
def get_delete_url(self):
return reverse('accounting:Invoice_Delete', kwargs={'id': self.id})
class Bill(BaseEntity):
""" Invoices are generated based on events state"""
STATUS_CHOICES=(
('CREATED', 'Created'),
('CONFIRMED', 'Confirmed'),
('PARTIAL_PAYMENT', 'Partially Paid'),
('PAID', 'Paid'),
('CLOSED', 'Closed')
)
vendor = models.ForeignKey(
Vendor,
related_name='vendor',
)
booked_service = models.ForeignKey(
'booking.Booked_Service',
related_name='billed_services',
)
generated_date = models.DateField(
verbose_name='date bill generated'
)
due_date = models.DateField(
verbose_name='date payout is expected'
)
paid_date = models.DateField(
verbose_name='date payout is made',
null = True,
blank = True,
)
status = models.CharField(
max_length =15,
choices = STATUS_CHOICES,
default = 'CREATED',
)
amount = models.IntegerField(
default=500,
validators=[
MinValueValidator(
10,
message = 'Amount should be greater than 10'
),
MaxValueValidator(
10000000,
message = 'Amount should be less than 10000000'
),
]
)
paid = models.IntegerField(
default=500,
validators=[
MinValueValidator(
0,
message = 'Amount should be greater than 0'
),
MaxValueValidator(
10000000,
message = 'Amount should be less than 10000000'
),
]
)
payouts = models.ManyToManyField(
Payout,
related_name='bills',
null = True,
blank = True,
)
@property
def due_amount(self):
return self.amount - self.paid
def __str__(self):
return self.vendor.name
def get_absolute_url(self):
return reverse('accounting:Bill_Detail', kwargs={'id': self.id})
def get_update_url(self):
return reverse('accounting:Bill_Update', kwargs={'id': self.id})
def get_delete_url(self):
return reverse('accounting:Bill_Delete', kwargs={'id': self.id})
class Commission(BaseEntity):
""" Commissions are generated based on events state"""
STATUS_CHOICES=(
('CREATED', 'Created'),
('CONFIRMED', 'Confirmed'),
('PARTIAL_PAYMENT', 'Partially Paid'),
('PAID', 'paid fully'),
('CLOSED', 'Closed')
)
staff = models.ForeignKey(
Staff,
related_name='staff_commissions',
)
event = models.ForeignKey(
'booking.Event',
related_name='event_commissions',
blank = True,
null =True,
)
booked_service = models.ForeignKey(
'booking.Booked_Service',
related_name='commissions',
)
generated_date = models.DateField(
verbose_name='date commission generated'
)
due_date = models.DateField(
verbose_name='date commission is expected'
)
paid_date = models.DateField(
verbose_name='date commission is paid',
null = True,
blank =True
)
status = models.CharField(
max_length =15,
choices = STATUS_CHOICES,
default = 'CREATED',
)
amount = models.IntegerField(
default=500,
validators=[
MinValueValidator(
10,
message = 'Amount should be greater than 10'
),
MaxValueValidator(
10000000,
message = 'Amount should be less than 10000000'
),
]
)
paid = models.IntegerField(
default=500,
validators=[
MinValueValidator(
0,
message = 'Amount should be greater than 0'
),
MaxValueValidator(
10000000,
message = 'Amount should be less than 10000000'
),
]
)
payouts = models.ManyToManyField(
PayCommissionOrSalary,
related_name='commissions',
null = True,
blank = True,
)
def get_absolute_url(self):
return reverse('accounting:Commission_Detail', kwargs={'id': self.id})
def get_update_url(self):
return reverse('accounting:Commission_Update', kwargs={'id': self.id})
def get_delete_url(self):
return reverse('accounting:Commission_Delete', kwargs={'id': self.id})
@receiver(post_save, sender = PayCommissionOrSalary)
def update_commissions_salaries_based_on_PayCommissionOrSalary_post_save(sender, instance, created, **kwargs):
payout = instance
amount = payout.amount
commissions = Commission.objects.filter(staff = payout.staff).order_by('generated_date')
for commission in commissions:
if amount >= commission.amount:
commission.paid = commission.amount
commission.status = 'PAID'
amount -= commission.amount
commission.paid_date = timezone.now().date()
commission.payouts.add(payout)
commission.save()
elif amount > 0:
commission.paid += amount
if commission.paid < commission.amount:
commission.status = 'PARTIAL_PAYMENT'
else:
commission.status = 'PAID'
commission.paid_date = timezone.now().date()
amount -= commission.paid
commission.payouts.add(payout)
commission.save()
@receiver(pre_save, sender = PayCommissionOrSalary)
def update_bill_based_on_PayCommissionOrSalary_pre_save(sender, instance, **kwargs):
print('triggered pre save PayCommissionOrSalary')
payout = instance
try:
past_payout = PayCommissionOrSalary.objects.get(pk = payout.id)
amount = past_payout.amount
commissions = past_payout.commissions.all()
for commission in commissions:
if amount >=0:
if commission.paid < amount:
commission.paid = 0
commission.status = 'CONFIRMED'
commission.save()
commission -= commission.paid
elif commission.paid >= amount:
commission.paid -= amount
commission.status = 'PARTIAL_PAYMENT'
commission.save()
amount -= commission.paid
except:
pass
@receiver(pre_delete, sender = PayCommissionOrSalary)
def update_bill_based_on_PayCommissionOrSalary_pre_delete(sender, instance, **kwargs):
print('triggered pre delete PayCommissionOrSalary' )
payout = instance
try:
delted_payout = PayCommissionOrSalary.objects.get(pk = payout.id)
amount = delted_payout.amount
commissions = delted_payout.commissions.all()
print(commissions)
for commission in commissions:
if amount >0:
if commission.paid <= amount:
commission.paid = 0
commission.status = 'CONFIRMED'
commission.save()
amount -= commission.paid
elif commission.paid > amount:
commission.paid -= amount
commission.status = 'PARTIAL_PAYMENT'
commission.save()
amount -= commission.paid
except:
print('failed')
@receiver(post_save, sender = Payout)
def update_bill_based_on_payout_post_save(sender, instance, created, **kwargs):
print('triggered post save payout')
payout = instance
amount = payout.amount
bills = Bill.objects.filter(vendor = payout.vendor).order_by('generated_date')
for bill in bills:
if bill.paid < bill.amount:
if amount >= bill.amount:
bill.paid = bill.amount
bill.status = 'PAID'
amount -= bill.amount
bill.paid_date = timezone.now().date()
bill.payouts.add(payout)
bill.save()
elif amount > 0:
bill.paid += amount
if bill.paid < bill.amount:
bill.status = 'PARTIAL_PAYMENT'
else:
bill.status = 'PAID'
bill.paid_date = timezone.now().date()
amount -= bill.paid
bill.payouts.add(payout)
bill.save()
@receiver(pre_save, sender = Payout)
def update_bill_based_on_payout_pre_save(sender, instance, **kwargs):
print('triggered pre save payout')
payout = instance
try:
past_payout = Payout.objects.get(pk = payout.id)
amount = past_payout.amount
print(amount)
bills = past_payout.bills.all()
for bill in bills:
if amount >0:
if bill.paid <= amount:
bill.paid = 0
bill.status = 'CONFIRMED'
bill.save()
amount -= bill.paid
elif bill.paid > amount:
bill.paid -= amount
bill.status = 'PARTIAL_PAYMENT'
bill.save()
amount -= bill.paid
except:
pass
@receiver(pre_delete, sender = Payout)
def update_bill_based_on_payout_pre_delete(sender, instance, **kwargs):
print('triggered pre delete payout' )
payout = instance
try:
delted_payout = Payout.objects.get(pk = payout.id)
amount = delted_payout.amount
print(amount)
bills = delted_payout.bills.all()
for bill in bills:
if amount >0:
if bill.paid <= amount:
print(bill.paid)
bill.paid = 0
bill.status = 'CONFIRMED'
bill.paid_date = None
bill.save()
amount -= bill.paid
elif bill.paid > amount:
bill.paid -= amount
bill.status = 'PARTIAL_PAYMENT'
bill.paid_date = None
bill.save()
amount -= bill.paid
except:
pass
| [
"django.db.models.DateField",
"django.core.validators.MaxValueValidator",
"django.db.models.TimeField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.core.urlresolvers.reverse",
"django.utils.timezone.now",
"django.dispatch.receiver",
"django.core.validators.MinValueVali... | [((13000, 13049), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'PayCommissionOrSalary'}), '(post_save, sender=PayCommissionOrSalary)\n', (13008, 13049), False, 'from django.dispatch import receiver\n'), ((14042, 14090), 'django.dispatch.receiver', 'receiver', (['pre_save'], {'sender': 'PayCommissionOrSalary'}), '(pre_save, sender=PayCommissionOrSalary)\n', (14050, 14090), False, 'from django.dispatch import receiver\n'), ((14977, 15027), 'django.dispatch.receiver', 'receiver', (['pre_delete'], {'sender': 'PayCommissionOrSalary'}), '(pre_delete, sender=PayCommissionOrSalary)\n', (14985, 15027), False, 'from django.dispatch import receiver\n'), ((15961, 15995), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'Payout'}), '(post_save, sender=Payout)\n', (15969, 15995), False, 'from django.dispatch import receiver\n'), ((16978, 17011), 'django.dispatch.receiver', 'receiver', (['pre_save'], {'sender': 'Payout'}), '(pre_save, sender=Payout)\n', (16986, 17011), False, 'from django.dispatch import receiver\n'), ((17787, 17822), 'django.dispatch.receiver', 'receiver', (['pre_delete'], {'sender': 'Payout'}), '(pre_delete, sender=Payout)\n', (17795, 17822), False, 'from django.dispatch import receiver\n'), ((531, 583), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Staff'], {'related_name': '"""commissions"""'}), "(Staff, related_name='commissions')\n", (548, 583), False, 'from django.db import models\n'), ((632, 686), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Service'], {'related_name': '"""commissions"""'}), "(Service, related_name='commissions')\n", (649, 686), False, 'from django.db import models\n'), ((1839, 1893), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Vendor'], {'related_name': '"""bill_payouts"""'}), "(Vendor, related_name='bill_payouts')\n", (1856, 1893), False, 'from django.db import models\n'), ((1939, 1984), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""payment date"""'}), "(verbose_name='payment date')\n", (1955, 1984), False, 'from django.db import models\n'), ((2018, 2063), 'django.db.models.TimeField', 'models.TimeField', ([], {'verbose_name': '"""payment time"""'}), "(verbose_name='payment time')\n", (2034, 2063), False, 'from django.db import models\n'), ((2502, 2571), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)', 'choices': 'MODE_CHOICES', 'default': '"""CASH"""'}), "(max_length=15, choices=MODE_CHOICES, default='CASH')\n", (2518, 2571), False, 'from django.db import models\n'), ((3216, 3276), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Staff'], {'related_name': '"""commissions_payouts"""'}), "(Staff, related_name='commissions_payouts')\n", (3233, 3276), False, 'from django.db import models\n'), ((3323, 3368), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""payment date"""'}), "(verbose_name='payment date')\n", (3339, 3368), False, 'from django.db import models\n'), ((3402, 3447), 'django.db.models.TimeField', 'models.TimeField', ([], {'verbose_name': '"""payment time"""'}), "(verbose_name='payment time')\n", (3418, 3447), False, 'from django.db import models\n'), ((3886, 3955), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)', 'choices': 'MODE_CHOICES', 'default': '"""CASH"""'}), "(max_length=15, choices=MODE_CHOICES, default='CASH')\n", (3902, 3955), False, 'from django.db import models\n'), ((4575, 4673), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""customers.Customer"""'], {'related_name': '"""customer_payins"""', 'blank': '(True)', 'null': '(True)'}), "('customers.Customer', related_name='customer_payins',\n blank=True, null=True)\n", (4592, 4673), False, 'from django.db import models\n'), ((4745, 4835), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""booking.Event"""'], {'related_name': '"""event_payins"""', 'blank': '(True)', 'null': '(True)'}), "('booking.Event', related_name='event_payins', blank=True,\n null=True)\n", (4762, 4835), False, 'from django.db import models\n'), ((4905, 4950), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""payment date"""'}), "(verbose_name='payment date')\n", (4921, 4950), False, 'from django.db import models\n'), ((4984, 5029), 'django.db.models.TimeField', 'models.TimeField', ([], {'verbose_name': '"""payment time"""'}), "(verbose_name='payment time')\n", (5000, 5029), False, 'from django.db import models\n'), ((5468, 5537), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)', 'choices': 'MODE_CHOICES', 'default': '"""CASH"""'}), "(max_length=15, choices=MODE_CHOICES, default='CASH')\n", (5484, 5537), False, 'from django.db import models\n'), ((6199, 6251), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Customer'], {'related_name': '"""invoices"""'}), "(Customer, related_name='invoices')\n", (6216, 6251), False, 'from django.db import models\n'), ((6299, 6357), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""booking.Event"""'], {'related_name': '"""invoice"""'}), "('booking.Event', related_name='invoice')\n", (6316, 6357), False, 'from django.db import models\n'), ((6414, 6469), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""date invoice generated"""'}), "(verbose_name='date invoice generated')\n", (6430, 6469), False, 'from django.db import models\n'), ((6507, 6564), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""date payment is expected"""'}), "(verbose_name='date payment is expected')\n", (6523, 6564), False, 'from django.db import models\n'), ((6604, 6689), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""date payment is expected"""', 'blank': '(True)', 'null': '(True)'}), "(verbose_name='date payment is expected', blank=True, null=True\n )\n", (6620, 6689), False, 'from django.db import models\n'), ((6749, 6823), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)', 'choices': 'STATUS_CHOICES', 'default': '"""CREATED"""'}), "(max_length=15, choices=STATUS_CHOICES, default='CREATED')\n", (6765, 6823), False, 'from django.db import models\n'), ((7679, 7756), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Payin'], {'related_name': '"""invoices"""', 'null': '(True)', 'blank': '(True)'}), "(Payin, related_name='invoices', null=True, blank=True)\n", (7701, 7756), False, 'from django.db import models\n'), ((8435, 8483), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Vendor'], {'related_name': '"""vendor"""'}), "(Vendor, related_name='vendor')\n", (8452, 8483), False, 'from django.db import models\n'), ((8540, 8615), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""booking.Booked_Service"""'], {'related_name': '"""billed_services"""'}), "('booking.Booked_Service', related_name='billed_services')\n", (8557, 8615), False, 'from django.db import models\n'), ((8672, 8724), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""date bill generated"""'}), "(verbose_name='date bill generated')\n", (8688, 8724), False, 'from django.db import models\n'), ((8762, 8818), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""date payout is expected"""'}), "(verbose_name='date payout is expected')\n", (8778, 8818), False, 'from django.db import models\n'), ((8858, 8933), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""date payout is made"""', 'null': '(True)', 'blank': '(True)'}), "(verbose_name='date payout is made', null=True, blank=True)\n", (8874, 8933), False, 'from django.db import models\n'), ((8999, 9073), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)', 'choices': 'STATUS_CHOICES', 'default': '"""CREATED"""'}), "(max_length=15, choices=STATUS_CHOICES, default='CREATED')\n", (9015, 9073), False, 'from django.db import models\n'), ((9929, 10004), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Payout'], {'related_name': '"""bills"""', 'null': '(True)', 'blank': '(True)'}), "(Payout, related_name='bills', null=True, blank=True)\n", (9951, 10004), False, 'from django.db import models\n'), ((10824, 10882), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Staff'], {'related_name': '"""staff_commissions"""'}), "(Staff, related_name='staff_commissions')\n", (10841, 10882), False, 'from django.db import models\n'), ((10930, 11026), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""booking.Event"""'], {'related_name': '"""event_commissions"""', 'blank': '(True)', 'null': '(True)'}), "('booking.Event', related_name='event_commissions', blank=\n True, null=True)\n", (10947, 11026), False, 'from django.db import models\n'), ((11105, 11176), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""booking.Booked_Service"""'], {'related_name': '"""commissions"""'}), "('booking.Booked_Service', related_name='commissions')\n", (11122, 11176), False, 'from django.db import models\n'), ((11233, 11291), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""date commission generated"""'}), "(verbose_name='date commission generated')\n", (11249, 11291), False, 'from django.db import models\n'), ((11329, 11389), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""date commission is expected"""'}), "(verbose_name='date commission is expected')\n", (11345, 11389), False, 'from django.db import models\n'), ((11429, 11508), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""date commission is paid"""', 'null': '(True)', 'blank': '(True)'}), "(verbose_name='date commission is paid', null=True, blank=True)\n", (11445, 11508), False, 'from django.db import models\n'), ((11572, 11646), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)', 'choices': 'STATUS_CHOICES', 'default': '"""CREATED"""'}), "(max_length=15, choices=STATUS_CHOICES, default='CREATED')\n", (11588, 11646), False, 'from django.db import models\n'), ((12501, 12601), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['PayCommissionOrSalary'], {'related_name': '"""commissions"""', 'null': '(True)', 'blank': '(True)'}), "(PayCommissionOrSalary, related_name='commissions',\n null=True, blank=True)\n", (12523, 12601), False, 'from django.db import models\n'), ((1324, 1387), 'django.core.urlresolvers.reverse', 'reverse', (['"""customers:Staff_Detail"""'], {'kwargs': "{'pk': self.staff.id}"}), "('customers:Staff_Detail', kwargs={'pk': self.staff.id})\n", (1331, 1387), False, 'from django.core.urlresolvers import reverse\n'), ((1434, 1506), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:CommissionStructure_Update"""'], {'kwargs': "{'pk': self.id}"}), "('accounting:CommissionStructure_Update', kwargs={'pk': self.id})\n", (1441, 1506), False, 'from django.core.urlresolvers import reverse\n'), ((1553, 1625), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:CommissionStructure_Delete"""'], {'kwargs': "{'pk': self.id}"}), "('accounting:CommissionStructure_Delete', kwargs={'pk': self.id})\n", (1560, 1625), False, 'from django.core.urlresolvers import reverse\n'), ((2712, 2771), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Payout_Detail"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Payout_Detail', kwargs={'id': self.id})\n", (2719, 2771), False, 'from django.core.urlresolvers import reverse\n'), ((2818, 2877), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Payout_Update"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Payout_Update', kwargs={'id': self.id})\n", (2825, 2877), False, 'from django.core.urlresolvers import reverse\n'), ((2924, 2983), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Payout_Delete"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Payout_Delete', kwargs={'id': self.id})\n", (2931, 2983), False, 'from django.core.urlresolvers import reverse\n'), ((4040, 4114), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:PayCommissionOrSalary_Detail"""'], {'kwargs': "{'id': self.id}"}), "('accounting:PayCommissionOrSalary_Detail', kwargs={'id': self.id})\n", (4047, 4114), False, 'from django.core.urlresolvers import reverse\n'), ((4161, 4235), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:PayCommissionOrSalary_Update"""'], {'kwargs': "{'id': self.id}"}), "('accounting:PayCommissionOrSalary_Update', kwargs={'id': self.id})\n", (4168, 4235), False, 'from django.core.urlresolvers import reverse\n'), ((4282, 4356), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:PayCommissionOrSalary_Delete"""'], {'kwargs': "{'id': self.id}"}), "('accounting:PayCommissionOrSalary_Delete', kwargs={'id': self.id})\n", (4289, 4356), False, 'from django.core.urlresolvers import reverse\n'), ((5623, 5681), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Payin_Detail"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Payin_Detail', kwargs={'id': self.id})\n", (5630, 5681), False, 'from django.core.urlresolvers import reverse\n'), ((5728, 5786), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Payin_Update"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Payin_Update', kwargs={'id': self.id})\n", (5735, 5786), False, 'from django.core.urlresolvers import reverse\n'), ((5833, 5891), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Payin_Delete"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Payin_Delete', kwargs={'id': self.id})\n", (5840, 5891), False, 'from django.core.urlresolvers import reverse\n'), ((7868, 7928), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Invoice_Detail"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Invoice_Detail', kwargs={'id': self.id})\n", (7875, 7928), False, 'from django.core.urlresolvers import reverse\n'), ((7975, 8035), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Invoice_Update"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Invoice_Update', kwargs={'id': self.id})\n", (7982, 8035), False, 'from django.core.urlresolvers import reverse\n'), ((8082, 8142), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Invoice_Delete"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Invoice_Delete', kwargs={'id': self.id})\n", (8089, 8142), False, 'from django.core.urlresolvers import reverse\n'), ((10252, 10309), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Bill_Detail"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Bill_Detail', kwargs={'id': self.id})\n", (10259, 10309), False, 'from django.core.urlresolvers import reverse\n'), ((10356, 10413), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Bill_Update"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Bill_Update', kwargs={'id': self.id})\n", (10363, 10413), False, 'from django.core.urlresolvers import reverse\n'), ((10460, 10517), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Bill_Delete"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Bill_Delete', kwargs={'id': self.id})\n", (10467, 10517), False, 'from django.core.urlresolvers import reverse\n'), ((12710, 12773), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Commission_Detail"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Commission_Detail', kwargs={'id': self.id})\n", (12717, 12773), False, 'from django.core.urlresolvers import reverse\n'), ((12820, 12883), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Commission_Update"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Commission_Update', kwargs={'id': self.id})\n", (12827, 12883), False, 'from django.core.urlresolvers import reverse\n'), ((12930, 12993), 'django.core.urlresolvers.reverse', 'reverse', (['"""accounting:Commission_Delete"""'], {'kwargs': "{'id': self.id}"}), "('accounting:Commission_Delete', kwargs={'id': self.id})\n", (12937, 12993), False, 'from django.core.urlresolvers import reverse\n'), ((821, 886), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(10)'], {'message': '"""Amount should be greater than 10"""'}), "(10, message='Amount should be greater than 10')\n", (838, 886), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((965, 1035), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(100000)'], {'message': '"""Amount should be less than 100000"""'}), "(100000, message='Amount should be less than 100000')\n", (982, 1035), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((2187, 2252), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(10)'], {'message': '"""Amount should be greater than 10"""'}), "(10, message='Amount should be greater than 10')\n", (2204, 2252), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((2331, 2405), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(10000000)'], {'message': '"""Amount should be less than 10000000"""'}), "(10000000, message='Amount should be less than 10000000')\n", (2348, 2405), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((3571, 3636), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(10)'], {'message': '"""Amount should be greater than 10"""'}), "(10, message='Amount should be greater than 10')\n", (3588, 3636), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((3715, 3789), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(10000000)'], {'message': '"""Amount should be less than 10000000"""'}), "(10000000, message='Amount should be less than 10000000')\n", (3732, 3789), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((5153, 5218), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(10)'], {'message': '"""Amount should be greater than 10"""'}), "(10, message='Amount should be greater than 10')\n", (5170, 5218), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((5297, 5371), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(10000000)'], {'message': '"""Amount should be less than 10000000"""'}), "(10000000, message='Amount should be less than 10000000')\n", (5314, 5371), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((6960, 7025), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(10)'], {'message': '"""Amount should be greater than 10"""'}), "(10, message='Amount should be greater than 10')\n", (6977, 7025), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((7104, 7178), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(10000000)'], {'message': '"""Amount should be less than 10000000"""'}), "(10000000, message='Amount should be less than 10000000')\n", (7121, 7178), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((7362, 7427), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(10)'], {'message': '"""Amount should be greater than 10"""'}), "(10, message='Amount should be greater than 10')\n", (7379, 7427), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((7506, 7580), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(10000000)'], {'message': '"""Amount should be less than 10000000"""'}), "(10000000, message='Amount should be less than 10000000')\n", (7523, 7580), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((9210, 9275), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(10)'], {'message': '"""Amount should be greater than 10"""'}), "(10, message='Amount should be greater than 10')\n", (9227, 9275), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((9354, 9428), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(10000000)'], {'message': '"""Amount should be less than 10000000"""'}), "(10000000, message='Amount should be less than 10000000')\n", (9371, 9428), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((9613, 9676), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(0)'], {'message': '"""Amount should be greater than 0"""'}), "(0, message='Amount should be greater than 0')\n", (9630, 9676), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((9755, 9829), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(10000000)'], {'message': '"""Amount should be less than 10000000"""'}), "(10000000, message='Amount should be less than 10000000')\n", (9772, 9829), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((11783, 11848), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(10)'], {'message': '"""Amount should be greater than 10"""'}), "(10, message='Amount should be greater than 10')\n", (11800, 11848), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((11927, 12001), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(10000000)'], {'message': '"""Amount should be less than 10000000"""'}), "(10000000, message='Amount should be less than 10000000')\n", (11944, 12001), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((12185, 12248), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(0)'], {'message': '"""Amount should be greater than 0"""'}), "(0, message='Amount should be greater than 0')\n", (12202, 12248), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((12327, 12401), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(10000000)'], {'message': '"""Amount should be less than 10000000"""'}), "(10000000, message='Amount should be less than 10000000')\n", (12344, 12401), False, 'from django.core.validators import RegexValidator, MinValueValidator, MaxValueValidator\n'), ((13542, 13556), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (13554, 13556), False, 'from django.utils import timezone\n'), ((16495, 16509), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (16507, 16509), False, 'from django.utils import timezone\n'), ((13906, 13920), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (13918, 13920), False, 'from django.utils import timezone\n'), ((16847, 16861), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (16859, 16861), False, 'from django.utils import timezone\n')] |
from config import Configuration
if __name__ == "__main__":
config = Configuration("/home/orikeidar01/config.json", "anylink")
config.database.add_user("<EMAIL>",
"ECD71870D1963316A97E3AC3408C9835AD8CF0F3C1BC703527C30265534F75AE", "anylink")
| [
"config.Configuration"
] | [((74, 131), 'config.Configuration', 'Configuration', (['"""/home/orikeidar01/config.json"""', '"""anylink"""'], {}), "('/home/orikeidar01/config.json', 'anylink')\n", (87, 131), False, 'from config import Configuration\n')] |
from django.contrib.auth import get_user_model
from django.db import models
from ordered_model.models import OrderedModel
class Rule(OrderedModel):
"""Represents a subreddit rule to which a moderator action may be link."""
name = models.CharField(max_length=255, help_text='The name of the rule')
description = models.TextField(blank=True, default='', help_text='Text to further explain or define the rule')
def __str__(self):
return self.name
class Entry(models.Model):
ACTION_WARN = 1
ACTION_TEMP_BAN = 2
ACTION_PERM_BAN = 3
ACTION_CHOICES = (
(ACTION_WARN, 'Warn'),
(ACTION_TEMP_BAN, 'Temporary Ban'),
(ACTION_PERM_BAN, 'Permanent Ban'),
)
moderator = models.ForeignKey(get_user_model(), on_delete=models.SET_NULL, null=True, editable=False,
related_name='entries')
date = models.DateField(auto_now_add=True)
user = models.CharField(max_length=20, blank=False, db_index=True)
rule = models.ForeignKey(Rule, on_delete=models.SET_NULL, null=True, related_name='+')
action = models.PositiveSmallIntegerField(choices=ACTION_CHOICES, default=ACTION_WARN)
ban_length = models.PositiveSmallIntegerField(null=True, default=None, help_text='The length of a temporary ban')
notes = models.TextField(blank=True, default='', help_text='A private note to attach to this entry.')
@property
def action_string(self):
if self.action == Entry.ACTION_TEMP_BAN:
return f'{self.ban_length}-day Ban'
elif self.action == Entry.ACTION_PERM_BAN:
return 'Permanent Ban'
else:
return 'Warning Issued'
class Meta:
ordering = ['-date', '-id']
verbose_name_plural = 'entries'
def __str__(self):
return f'Action for /u/{self.user} on {self.date}'
| [
"django.contrib.auth.get_user_model",
"django.db.models.DateField",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.PositiveSmallIntegerField",
"django.db.models.CharField"
] | [((240, 306), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'help_text': '"""The name of the rule"""'}), "(max_length=255, help_text='The name of the rule')\n", (256, 306), False, 'from django.db import models\n'), ((325, 426), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'default': '""""""', 'help_text': '"""Text to further explain or define the rule"""'}), "(blank=True, default='', help_text=\n 'Text to further explain or define the rule')\n", (341, 426), False, 'from django.db import models\n'), ((892, 927), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (908, 927), False, 'from django.db import models\n'), ((939, 998), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'blank': '(False)', 'db_index': '(True)'}), '(max_length=20, blank=False, db_index=True)\n', (955, 998), False, 'from django.db import models\n'), ((1010, 1089), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Rule'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'related_name': '"""+"""'}), "(Rule, on_delete=models.SET_NULL, null=True, related_name='+')\n", (1027, 1089), False, 'from django.db import models\n'), ((1103, 1180), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'choices': 'ACTION_CHOICES', 'default': 'ACTION_WARN'}), '(choices=ACTION_CHOICES, default=ACTION_WARN)\n', (1135, 1180), False, 'from django.db import models\n'), ((1198, 1303), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'null': '(True)', 'default': 'None', 'help_text': '"""The length of a temporary ban"""'}), "(null=True, default=None, help_text=\n 'The length of a temporary ban')\n", (1230, 1303), False, 'from django.db import models\n'), ((1311, 1409), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'default': '""""""', 'help_text': '"""A private note to attach to this entry."""'}), "(blank=True, default='', help_text=\n 'A private note to attach to this entry.')\n", (1327, 1409), False, 'from django.db import models\n'), ((751, 767), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (765, 767), False, 'from django.contrib.auth import get_user_model\n')] |
#!/usr/bin/env python
#
# ----------------------------------------------------------------------
#
# <NAME>, U.S. Geological Survey
# <NAME>, GNS Science
# <NAME>, University of Chicago
#
# This code was developed as part of the Computational Infrastructure
# for Geodynamics (http://geodynamics.org).
#
# Copyright (c) 2010-2017 University of California, Davis
#
# See COPYING for license information.
#
# ----------------------------------------------------------------------
#
## @file pylith/topology/MeshRefiner.py
##
## @brief Python manager for refining mesh in parallel.
##
## Factory: mesh_refiner.
from pylith.utils.PetscComponent import PetscComponent
# MeshRefiner class
class MeshRefiner(PetscComponent):
"""
Python manager for refining mesh in parallel.
Factory: mesh_refiner
"""
# PUBLIC METHODS /////////////////////////////////////////////////////
def __init__(self, name="refiner"):
"""
Constructor.
"""
PetscComponent.__init__(self, name, facility="refiner")
return
def refine(self, mesh):
"""
Refine mesh.
"""
self._setupLogging()
logEvent = "%srefine" % self._loggingPrefix
self._eventLogger.eventBegin(logEvent)
self._eventLogger.eventEnd(logEvent)
return mesh
# PRIVATE METHODS ////////////////////////////////////////////////////
def _configure(self):
"""
Set members based using inventory.
"""
PetscComponent._configure(self)
return
def _setupLogging(self):
"""
Setup event logging.
"""
self._loggingPrefix = "Refin "
from pylith.utils.EventLogger import EventLogger
logger = EventLogger()
logger.className("FE Refinement")
logger.initialize()
events = ["refine"]
for event in events:
logger.registerEvent("%s%s" % (self._loggingPrefix, event))
self._eventLogger = logger
return
# FACTORIES ////////////////////////////////////////////////////////////
def mesh_refiner():
"""
Factory associated with MeshRefiner.
"""
return MeshRefiner()
# End of file
| [
"pylith.utils.EventLogger.EventLogger",
"pylith.utils.PetscComponent.PetscComponent.__init__",
"pylith.utils.PetscComponent.PetscComponent._configure"
] | [((956, 1011), 'pylith.utils.PetscComponent.PetscComponent.__init__', 'PetscComponent.__init__', (['self', 'name'], {'facility': '"""refiner"""'}), "(self, name, facility='refiner')\n", (979, 1011), False, 'from pylith.utils.PetscComponent import PetscComponent\n'), ((1417, 1448), 'pylith.utils.PetscComponent.PetscComponent._configure', 'PetscComponent._configure', (['self'], {}), '(self)\n', (1442, 1448), False, 'from pylith.utils.PetscComponent import PetscComponent\n'), ((1631, 1644), 'pylith.utils.EventLogger.EventLogger', 'EventLogger', ([], {}), '()\n', (1642, 1644), False, 'from pylith.utils.EventLogger import EventLogger\n')] |
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from downloadautio import settings
import requests
import os
from downloadautio.file_handle import FileEntry
class DownloadautioPipeline(object):
def __init__(self):
self.fileEntry = FileEntry()
def process_item(self, item, spider):
downUrl = item['down_url'][0]
fileName = item['file_name'][0]
autio_title = item['autio_title'][0]
print("item fileName:" + fileName + ",downUrl:" + str(downUrl))
localPath = self.fileEntry.readPath()
dir_path = '%s/%s' % (localPath, autio_title)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
houzui = downUrl[downUrl.index('.', len(downUrl) - 5, len(downUrl)): len(downUrl)]
autio_file_path = fileName + houzui
print("autio_path:" + autio_file_path)
autio_file = '%s/%s ' % (dir_path, autio_file_path)
print("autio_file:" + autio_file)
print("download_url:" + downUrl)
if not os.path.exists(autio_file):
with open(autio_file, 'wb') as handle:
response = requests.get(url=downUrl)
for block in response.iter_content(1024):
if not block:
break
handle.write(block)
return item
| [
"downloadautio.file_handle.FileEntry",
"os.path.exists",
"os.makedirs",
"requests.get"
] | [((390, 401), 'downloadautio.file_handle.FileEntry', 'FileEntry', ([], {}), '()\n', (399, 401), False, 'from downloadautio.file_handle import FileEntry\n'), ((756, 780), 'os.path.exists', 'os.path.exists', (['dir_path'], {}), '(dir_path)\n', (770, 780), False, 'import os\n'), ((794, 815), 'os.makedirs', 'os.makedirs', (['dir_path'], {}), '(dir_path)\n', (805, 815), False, 'import os\n'), ((1158, 1184), 'os.path.exists', 'os.path.exists', (['autio_file'], {}), '(autio_file)\n', (1172, 1184), False, 'import os\n'), ((1264, 1289), 'requests.get', 'requests.get', ([], {'url': 'downUrl'}), '(url=downUrl)\n', (1276, 1289), False, 'import requests\n')] |
import gym
import numpy as np
from copy import deepcopy
from gym_fabrikatioRL.envs.core import Core
from gym_fabrikatioRL.envs.core_state import State
from gym_fabrikatioRL.envs.interface_input import Input
from gym_fabrikatioRL.envs.env_utils import UndefinedOptimizerConfiguration
from gym_fabrikatioRL.envs.env_utils import UndefinedOptimizerTargetMode
from gym_fabrikatioRL.envs.env_utils import UndefinedLegalActionCall
from gym_fabrikatioRL.envs.env_utils import IllegalAction
class FabricatioRL(gym.Env):
def __init__(self, scheduling_inputs, seeds='', logfile_path='',
return_transformer=None, selectable_optimizers=None):
# SEED DEFINITION
if bool(seeds):
self.__seeds_remaining = seeds[1:]
self.__seeds_used = [seeds[0]]
init_seed = seeds[0]
else:
self.__seeds_remaining = []
self.__seeds_used = []
init_seed = -1
# SETUP DEFINITION
self.__parameters = Input(scheduling_inputs, init_seed, logfile_path)
# CORE
self.__core = Core(deepcopy(self.__parameters))
# INTERFACE OBJECTS
# return transformer
self.__return_transformer = return_transformer
# otimizers
self.__optimizer_configuration = -1 # becomes value in {0, .., 11}
self.__sequencing_optimizers = None
self.__transport_optimizers = None
self.__setup_optimizers(selectable_optimizers
if selectable_optimizers is not None else [])
# needed when transport and routing decisions are made by the same agent
self.__transport_decision_offset = None
# action and state space
self.action_space = None
self.observation_space = None
self.__get_action_space()
self.__get_observation_space()
# <editor-fold desc="Environment Interface">
def step(self, action: int):
try:
direct_action = self.__transform_action(action)
except IllegalAction:
state_repr = self.__return_transformer.transform_state(
self.__core.state)
return state_repr, -1, True, {}
state, done = self.__core.step(direct_action)
if self.__return_transformer is None:
# something other than RL is using this simulation
return state, None, done, {}
state_repr = self.__return_transformer.transform_state(state)
reward = self.__return_transformer.transform_reward(state)
while self.autoplay() and not done:
state_repr, reward, done, _ = self.step(0)
return state_repr, reward, done, {}
def autoplay(self):
return ((self.__core.state.scheduling_mode == 0 and
self.__optimizer_configuration in {4, 6, 7}) or
(self.__core.state.scheduling_mode == 1 and
self.__optimizer_configuration in {2, 6, 10}))
def reset(self) -> State:
# seed cycling if seeds were passed
if bool(self.__seeds_remaining) or bool(self.__seeds_used):
if len(self.__seeds_remaining) > 0:
seed = self.__seeds_remaining.pop(0)
self.__seeds_used.append(seed)
else:
self.__seeds_remaining = self.__seeds_used[1:]
seed = self.__seeds_used[0]
self.__seeds_used = [seed]
self.__parameters = Input(self.__parameters.scheduling_inputs,
seed, self.__parameters.logfile_path)
else:
self.__parameters = Input(
self.__parameters.scheduling_inputs,
logfile_path=self.__parameters.logfile_path)
self.__core = Core(self.__parameters)
if self.__return_transformer is not None:
return self.__return_transformer.transform_state(self.__core.state)
else:
return self.__core.state
def render(self, mode='dummy'):
raise NotImplementedError
def get_legal_actions(self):
"""
Returns a list of legal actions for each simulation mode and optimizer
mode combination.
:return: The legal actions in this state.
"""
# TODO: implement masking
toffs = self.__transport_decision_offset
n_to = self.__transport_optimizers.shape[0]
if self.__optimizer_configuration == 0:
if self.__core.state.scheduling_mode == 0:
return self.__core.state.legal_actions
else:
return [a + toffs - 1 for a in self.__core.state.legal_actions]
elif self.__optimizer_configuration in {1, 2}:
if self.__core.state.scheduling_mode == 0:
return self.__core.state.legal_actions
else:
raise UndefinedLegalActionCall(
self.__optimizer_configuration,
self.__core.state.scheduling_mode)
elif self.__optimizer_configuration == 3:
if self.__core.state.scheduling_mode == 0:
return self.__core.state.legal_actions
else:
return [toffs + i for i in range(n_to)]
elif self.__optimizer_configuration == 4:
if self.__core.state.scheduling_mode == 0:
raise UndefinedLegalActionCall(
self.__optimizer_configuration,
self.__core.state.scheduling_mode)
else:
return self.__core.state.legal_actions
elif self.__optimizer_configuration in {5, 6}:
raise UndefinedLegalActionCall(
self.__optimizer_configuration,
self.__core.state.scheduling_mode)
elif self.__optimizer_configuration == 7:
if self.__core.state.scheduling_mode == 0:
raise UndefinedLegalActionCall(
self.__optimizer_configuration,
self.__core.state.scheduling_mode)
else:
return list(range(n_to))
elif self.__optimizer_configuration == 8:
if self.__core.state.scheduling_mode == 0:
return list(range(toffs))
else:
return [a + toffs - 1 for a in self.__core.state.legal_actions]
elif self.__optimizer_configuration in {9, 10}:
if self.__core.state.scheduling_mode == 0:
return list(range(len(self.sequencing_optimizers)))
else:
raise UndefinedLegalActionCall(
self.__optimizer_configuration,
self.__core.state.scheduling_mode)
else: # self.__optimizer_configuration == 11:
if self.__core.state.scheduling_mode == 0:
return list(range(toffs))
else:
return [toffs + i for i in range(n_to)]
def make_deterministic(self):
"""
Purges all stochasticity from the simulation.
This breaks the environment in that one cannot recover the initial
stochastic events purged by this method.
:return: None.
"""
self.__core.make_deterministic()
def seed(self, seed=-1):
self.__seeds_remaining = seed
self.__seeds_used = []
# </editor-fold>
# <editor-fold desc="Optimizer Configuration">
def __setup_optimizers(self, selectable_opt: list):
"""
Splits the transport and sequencing optimizers according to their type
parameter, and initializes the optimizer_configuration parameter
defining the action space definition and action selection schemes.
:param selectable_opt: The list of optimizers.
:return: None
"""
seq_opt, tra_opt = [], []
for optimizer in selectable_opt:
if optimizer.target_mode == 'sequencing':
seq_opt.append(optimizer)
elif optimizer.target_mode == 'transport':
tra_opt.append(optimizer)
else:
raise UndefinedOptimizerTargetMode()
self.__sequencing_optimizers = np.array(seq_opt)
self.__transport_optimizers = np.array(tra_opt)
self.__setup_optimizer_config()
def __is_sequencing_only_simulation(self):
"""
If all types can be executed on exactly one machine, and the operation
ordering is sequential, then there is no transport decision to be made,
since jobs have only one downstream machine to be routed to. In such a
case, return True.
:return: True, if no transport decisions need to be made.
"""
type_to_machine = self.__parameters.matrices_m.machine_capabilities_dt
prec_list = self.__parameters.matrices_j.operation_precedence_l
for _, eligible_machines in type_to_machine.items():
if len(eligible_machines) > 1:
return False
for node_to_neighbor_map in prec_list:
for _, neighbors in node_to_neighbor_map.items():
if len(neighbors) > 1:
return False
return True
def __setup_optimizer_config(self):
"""
Initializes the optimizer_configuration parameter influencing the action
space definition and action translation to one of 11 integer values
defined as follows:
0: Direct sequencing action and direct transport action
1: Direct sequencing action (sequencing only simulation)
2: Direct sequencing action and fixed transport optimizer
3: Selectable sequencing optimizer and selectable transport optimizer
4: Fixed sequencing optimizer and direct transport action
5: Fixed sequencing optimizer run (sequencing only simulation)
6: Fixed sequencing and routing optimizer run
7: Fixed sequencing and selectable transport optimizer
8: Selectable sequencing optimizer and direct transport action
9: Selectable sequencing optimizer (sequencing only simulation)
10: Selectable sequencing optimizer and fixed transport optimizer
11: Selectable sequencing and transport optimizers
:return: None
"""
n_to = self.__transport_optimizers.shape[0]
n_so = self.__sequencing_optimizers.shape[0]
if n_so == 0 and n_to == 0: # direct actions only
if not self.__is_sequencing_only_simulation():
self.__optimizer_configuration = 0
else:
self.__optimizer_configuration = 1
elif n_so == 0 and n_to == 1:
self.__optimizer_configuration = 2
elif n_so == 0 and n_to > 1:
self.__optimizer_configuration = 3
elif n_so == 1 and n_to == 0:
if not self.__is_sequencing_only_simulation():
self.__optimizer_configuration = 4
else:
self.__optimizer_configuration = 5
elif n_so == 1 and n_to == 1:
self.__optimizer_configuration = 6
elif n_so == 1 and n_to > 1:
self.__optimizer_configuration = 7
elif n_so > 1 and n_to == 0:
if not self.__is_sequencing_only_simulation():
self.__optimizer_configuration = 8
else:
self.__optimizer_configuration = 9
elif n_so > 1 and n_to == 1:
self.__optimizer_configuration = 10
else: # n_so > 1 and n_to > 1:
self.__optimizer_configuration = 11
# </editor-fold>
# <editor-fold desc="Action and Observation Space Setup">
def __get_action_space(self):
"""
Initializes the action space parameter based on the
optimizer_configuration. The following scheme is applied:
1.) The agent action vector contains sequencing actions first,
then transport, except when there are no sequencing actions,
in which case only the transport options are actions
2.) For direct sequencing action, the total number of *visible*
operation indices constitute the actions + 1 for the wait signal
3.) For direct transport the number of machines in the system + 1 for
the wait signal constitute the actions
4.) For indirect optimizer actions the index of the respective optimizer
represents the action (here too 1. applies)
5.) If both routing and scheduling actions come from the agent, an
offset scalar (number of possible agent sequencing actions, n_s)
is kept to distinguish between the two, e.g. for agent action n
in transport mode transport action = n - n_s
:return: None
"""
assert -1 < self.__optimizer_configuration <= 11
n = self.__core.state.params.n_jobs
o = self.__core.state.params.max_n_operations
m = self.__core.state.params.n_machines
n_so = self.__sequencing_optimizers.shape[0]
n_to = self.__transport_optimizers.shape[0]
self.__transport_decision_offset = None
if self.__optimizer_configuration == 0:
self.__transport_decision_offset = n * o + 1
self.action_space = gym.spaces.Discrete(n * o + 1 + m + 1)
elif self.__optimizer_configuration in {1, 2}:
self.action_space = gym.spaces.Discrete(n * o + 1)
elif self.__optimizer_configuration == 3:
self.__transport_decision_offset = n * o + 1
self.action_space = gym.spaces.Discrete(n * o + 1 + n_to)
elif self.__optimizer_configuration == 4:
self.action_space = gym.spaces.Discrete(m + 1)
elif self.__optimizer_configuration in {5, 6}:
return # not RL; leave action space None
elif self.__optimizer_configuration == 7:
self.action_space = gym.spaces.Discrete(n_to)
elif self.__optimizer_configuration == 8:
self.__transport_decision_offset = n_so
self.action_space = gym.spaces.Discrete(n_so + m + 1)
elif self.__optimizer_configuration in {9, 10}:
self.action_space = gym.spaces.Discrete(n_so)
else: # self.__optimizer_configuration == 11:
self.__transport_decision_offset = n_so
self.action_space = gym.spaces.Discrete(n_so + n_to)
def __get_observation_space(self):
"""
Initializes the observation space required by gym to a Box object as
defined by gym.
The observation (i.e. state) space dimension is inferred from the state
representation returned by the state_transformer on the initial state.
:return: None
"""
if self.__return_transformer is None:
# something other than RL is using this simulation
return
state_repr = self.__return_transformer.transform_state(
self.__core.state)
self.observation_space = gym.spaces.Box(low=-np.inf, high=np.inf,
shape=state_repr.shape)
# </editor-fold>
# <editor-fold desc="Getters">
@property
def parameters(self):
return self.__parameters
@property
def core(self):
return self.__core
@property
def sequencing_optimizers(self):
return self.__sequencing_optimizers
@property
def transport_optimizers(self):
return self.__transport_optimizers
@property
def optimizer_configuration(self):
return self.__optimizer_configuration
# </editor-fold>
# <editor-fold desc="Action Transformation">
def __transform_action(self, agent_action):
"""
Switches between the 11 available decision interfaces and transforms the
agent action accordingly into an environment core compatible decision.
:param agent_action: The action as chosen by the agent.
:return: The action compatible with the core.
"""
if self.__optimizer_configuration in {0, 1}:
# both routing and sequencing direct actions
return self.__transform_a_direct_action_run(agent_action)
elif self.__optimizer_configuration == 2:
return self.__transform_a_direct_sequencing_fixed_transport(
agent_action)
elif self.__optimizer_configuration == 3:
return self.__transform_a_direct_sequencing_selectable_transport(
agent_action)
elif self.__optimizer_configuration == 4:
return self.__transform_a_fixed_sequencing_direct_transport(
agent_action)
elif self.__optimizer_configuration in {5, 6}:
return self.__transform_a_fixed_optimizer_run()
elif self.__optimizer_configuration == 7:
return self.__transform_a_fixed_sequencing_selectable_transport(
agent_action)
elif self.__optimizer_configuration in {8, 9}:
return self.__transform_a_selectable_sequencing_direct_transport(
agent_action)
elif self.__optimizer_configuration == 10:
return self.__transform_a_selectable_sequencing_fixed_transport(
agent_action)
elif self.__optimizer_configuration == 11:
return self.__transform_action_fully_selectable_optimizer_run(
agent_action)
else: # should not be possible at this point;
raise UndefinedOptimizerConfiguration()
def __transform_a_selectable_sequencing_direct_transport(
self, action: int) -> int:
"""
Translates an agent action into a simulation core action when sequencing
decisions (mode 0) are made indirectly through optimizers and transport
decisions (mode 1) are taken directly by the agent.
This function ensures that:
1. No transport action is taken in sequencing mode
(action > transport decision offset)
2. No transport decisions are made at all, if the simulation
instance only needs sequencing decisions (transport decision offset
is None)
3. The raw transport action passed by the agent is legal, as
perceived by the simulation core.
:param action: The action selected by the agent.
:return: The corresponding simulation core action.
"""
if self.__core.state.scheduling_mode == 0: # sequencing
if self.__transport_decision_offset is None:
# no transport decisions available
return self.__sequencing_optimizers[action].get_action(
self.__core.state)
elif action >= self.__transport_decision_offset:
# picked a transport action in sequencing mode
raise IllegalAction()
else:
# all goode :)
return self.__sequencing_optimizers[action].get_action(
self.__core.state)
else:
core_action = action - self.__transport_decision_offset + 1
if (action < self.__transport_decision_offset or
core_action not in self.__core.state.legal_actions):
raise IllegalAction()
# m starts from 1!
return core_action
def __transform_a_direct_sequencing_selectable_transport(
self, action: int) -> int:
if self.__core.state.scheduling_mode == 0:
if (action >= self.__transport_decision_offset or
action not in self.__core.state.legal_actions):
raise IllegalAction()
return action
else:
if action < self.__transport_decision_offset:
raise IllegalAction()
return self.__transport_optimizers[
action - self.__transport_decision_offset].get_action(
self.__core.state)
def __transform_a_fixed_optimizer_run(self) -> int:
# pure optimizer run. action space not relevant
# illegal actions not possible
if self.__core.state.scheduling_mode == 0:
direct_core_action = self.__sequencing_optimizers[0].get_action(
self.__core.state)
else:
direct_core_action = self.__transport_optimizers[0].get_action(
self.__core.state)
return direct_core_action
def __transform_a_selectable_sequencing_fixed_transport(
self, agent_action: int) -> int:
# illegal actions not possible
if self.__core.state.scheduling_mode == 0:
return self.__sequencing_optimizers[agent_action].get_action(
self.__core.state)
else:
return self.__transport_optimizers[0].get_action(self.__core.state)
def __transform_a_direct_sequencing_fixed_transport(
self, agent_action: int) -> int:
if self.__core.state.scheduling_mode == 0:
if agent_action not in self.__core.state.legal_actions:
raise IllegalAction()
return agent_action
else:
return self.__transport_optimizers[0].get_action(
self.__core.state)
def __transform_a_fixed_sequencing_selectable_transport(
self, agent_action: int) -> int:
if self.__core.state.scheduling_mode == 0:
return self.__sequencing_optimizers[0].get_action(
self.__core.state)
else:
# illegal actions not possible
return self.__transport_optimizers[agent_action].get_action(
self.__core.state)
def __transform_a_fixed_sequencing_direct_transport(
self, agent_action: int) -> int:
if self.__core.state.scheduling_mode == 0:
return self.__sequencing_optimizers[0].get_action(
self.__core.state)
else:
# illegal actions handled by the core?
core_action = agent_action + 1
if core_action not in self.__core.state.legal_actions:
raise IllegalAction()
return core_action
def __transform_a_direct_action_run(self, agent_action: int) -> int:
if self.__core.state.scheduling_mode == 0:
if self.__transport_decision_offset is None:
if agent_action not in self.__core.state.legal_actions:
raise IllegalAction()
elif (agent_action >= self.__transport_decision_offset or
agent_action not in self.__core.state.legal_actions):
raise IllegalAction()
return agent_action
else:
core_action = agent_action - self.__transport_decision_offset + 1
if (agent_action < self.__transport_decision_offset or
core_action not in self.__core.state.legal_actions):
raise IllegalAction()
return core_action
def __transform_action_fully_selectable_optimizer_run(
self, agent_action: int) -> int:
"""
Transforms action in the selectable routing and sequencing mode
(opt_conf==6).
When the core is in sequencing mode, the agent action
designates a sequencing optimizer index. When in routing mode, the agent
action designates a transport optimizer index. The first
self.__transport_decision_offset optimizers designate sequencing
optimizers while the next indices pertain to transport optimizers.
The get_action method of the optimizer selected by the agent is called
with the core state to return the core compatible action.
:param agent_action: The transport or sequencing optimizer index.
:return: The core compatible action.
"""
# Selectable Indirect Transport Action &
# Selectable Indirect Sequencing Action
if self.__core.state.scheduling_mode == 0:
if agent_action >= self.__transport_decision_offset:
raise IllegalAction()
direct_core_action = self.__sequencing_optimizers[
agent_action].get_action(self.__core.state)
else:
if agent_action < self.__transport_decision_offset:
raise IllegalAction()
direct_core_action = self.__transport_optimizers[
agent_action - self.__transport_decision_offset].get_action(
self.__core.state)
return direct_core_action
# </editor-fold>
| [
"gym_fabrikatioRL.envs.env_utils.UndefinedOptimizerTargetMode",
"gym.spaces.Discrete",
"gym.spaces.Box",
"gym_fabrikatioRL.envs.interface_input.Input",
"numpy.array",
"gym_fabrikatioRL.envs.env_utils.UndefinedOptimizerConfiguration",
"copy.deepcopy",
"gym_fabrikatioRL.envs.env_utils.IllegalAction",
... | [((999, 1048), 'gym_fabrikatioRL.envs.interface_input.Input', 'Input', (['scheduling_inputs', 'init_seed', 'logfile_path'], {}), '(scheduling_inputs, init_seed, logfile_path)\n', (1004, 1048), False, 'from gym_fabrikatioRL.envs.interface_input import Input\n'), ((3739, 3762), 'gym_fabrikatioRL.envs.core.Core', 'Core', (['self.__parameters'], {}), '(self.__parameters)\n', (3743, 3762), False, 'from gym_fabrikatioRL.envs.core import Core\n'), ((8102, 8119), 'numpy.array', 'np.array', (['seq_opt'], {}), '(seq_opt)\n', (8110, 8119), True, 'import numpy as np\n'), ((8158, 8175), 'numpy.array', 'np.array', (['tra_opt'], {}), '(tra_opt)\n', (8166, 8175), True, 'import numpy as np\n'), ((14891, 14955), 'gym.spaces.Box', 'gym.spaces.Box', ([], {'low': '(-np.inf)', 'high': 'np.inf', 'shape': 'state_repr.shape'}), '(low=-np.inf, high=np.inf, shape=state_repr.shape)\n', (14905, 14955), False, 'import gym\n'), ((1091, 1118), 'copy.deepcopy', 'deepcopy', (['self.__parameters'], {}), '(self.__parameters)\n', (1099, 1118), False, 'from copy import deepcopy\n'), ((3431, 3516), 'gym_fabrikatioRL.envs.interface_input.Input', 'Input', (['self.__parameters.scheduling_inputs', 'seed', 'self.__parameters.logfile_path'], {}), '(self.__parameters.scheduling_inputs, seed, self.__parameters.logfile_path\n )\n', (3436, 3516), False, 'from gym_fabrikatioRL.envs.interface_input import Input\n'), ((3596, 3688), 'gym_fabrikatioRL.envs.interface_input.Input', 'Input', (['self.__parameters.scheduling_inputs'], {'logfile_path': 'self.__parameters.logfile_path'}), '(self.__parameters.scheduling_inputs, logfile_path=self.__parameters.\n logfile_path)\n', (3601, 3688), False, 'from gym_fabrikatioRL.envs.interface_input import Input\n'), ((13174, 13212), 'gym.spaces.Discrete', 'gym.spaces.Discrete', (['(n * o + 1 + m + 1)'], {}), '(n * o + 1 + m + 1)\n', (13193, 13212), False, 'import gym\n'), ((13300, 13330), 'gym.spaces.Discrete', 'gym.spaces.Discrete', (['(n * o + 1)'], {}), '(n * o + 1)\n', (13319, 13330), False, 'import gym\n'), ((19163, 19178), 'gym_fabrikatioRL.envs.env_utils.IllegalAction', 'IllegalAction', ([], {}), '()\n', (19176, 19178), False, 'from gym_fabrikatioRL.envs.env_utils import IllegalAction\n'), ((19546, 19561), 'gym_fabrikatioRL.envs.env_utils.IllegalAction', 'IllegalAction', ([], {}), '()\n', (19559, 19561), False, 'from gym_fabrikatioRL.envs.env_utils import IllegalAction\n'), ((19682, 19697), 'gym_fabrikatioRL.envs.env_utils.IllegalAction', 'IllegalAction', ([], {}), '()\n', (19695, 19697), False, 'from gym_fabrikatioRL.envs.env_utils import IllegalAction\n'), ((20970, 20985), 'gym_fabrikatioRL.envs.env_utils.IllegalAction', 'IllegalAction', ([], {}), '()\n', (20983, 20985), False, 'from gym_fabrikatioRL.envs.env_utils import IllegalAction\n'), ((21999, 22014), 'gym_fabrikatioRL.envs.env_utils.IllegalAction', 'IllegalAction', ([], {}), '()\n', (22012, 22014), False, 'from gym_fabrikatioRL.envs.env_utils import IllegalAction\n'), ((22810, 22825), 'gym_fabrikatioRL.envs.env_utils.IllegalAction', 'IllegalAction', ([], {}), '()\n', (22823, 22825), False, 'from gym_fabrikatioRL.envs.env_utils import IllegalAction\n'), ((23940, 23955), 'gym_fabrikatioRL.envs.env_utils.IllegalAction', 'IllegalAction', ([], {}), '()\n', (23953, 23955), False, 'from gym_fabrikatioRL.envs.env_utils import IllegalAction\n'), ((24179, 24194), 'gym_fabrikatioRL.envs.env_utils.IllegalAction', 'IllegalAction', ([], {}), '()\n', (24192, 24194), False, 'from gym_fabrikatioRL.envs.env_utils import IllegalAction\n'), ((4825, 4921), 'gym_fabrikatioRL.envs.env_utils.UndefinedLegalActionCall', 'UndefinedLegalActionCall', (['self.__optimizer_configuration', 'self.__core.state.scheduling_mode'], {}), '(self.__optimizer_configuration, self.__core.state.\n scheduling_mode)\n', (4849, 4921), False, 'from gym_fabrikatioRL.envs.env_utils import UndefinedLegalActionCall\n'), ((8032, 8062), 'gym_fabrikatioRL.envs.env_utils.UndefinedOptimizerTargetMode', 'UndefinedOptimizerTargetMode', ([], {}), '()\n', (8060, 8062), False, 'from gym_fabrikatioRL.envs.env_utils import UndefinedOptimizerTargetMode\n'), ((13470, 13507), 'gym.spaces.Discrete', 'gym.spaces.Discrete', (['(n * o + 1 + n_to)'], {}), '(n * o + 1 + n_to)\n', (13489, 13507), False, 'import gym\n'), ((18745, 18760), 'gym_fabrikatioRL.envs.env_utils.IllegalAction', 'IllegalAction', ([], {}), '()\n', (18758, 18760), False, 'from gym_fabrikatioRL.envs.env_utils import IllegalAction\n'), ((22326, 22341), 'gym_fabrikatioRL.envs.env_utils.IllegalAction', 'IllegalAction', ([], {}), '()\n', (22339, 22341), False, 'from gym_fabrikatioRL.envs.env_utils import IllegalAction\n'), ((22508, 22523), 'gym_fabrikatioRL.envs.env_utils.IllegalAction', 'IllegalAction', ([], {}), '()\n', (22521, 22523), False, 'from gym_fabrikatioRL.envs.env_utils import IllegalAction\n'), ((13590, 13616), 'gym.spaces.Discrete', 'gym.spaces.Discrete', (['(m + 1)'], {}), '(m + 1)\n', (13609, 13616), False, 'import gym\n'), ((5319, 5415), 'gym_fabrikatioRL.envs.env_utils.UndefinedLegalActionCall', 'UndefinedLegalActionCall', (['self.__optimizer_configuration', 'self.__core.state.scheduling_mode'], {}), '(self.__optimizer_configuration, self.__core.state.\n scheduling_mode)\n', (5343, 5415), False, 'from gym_fabrikatioRL.envs.env_utils import UndefinedLegalActionCall\n'), ((5598, 5694), 'gym_fabrikatioRL.envs.env_utils.UndefinedLegalActionCall', 'UndefinedLegalActionCall', (['self.__optimizer_configuration', 'self.__core.state.scheduling_mode'], {}), '(self.__optimizer_configuration, self.__core.state.\n scheduling_mode)\n', (5622, 5694), False, 'from gym_fabrikatioRL.envs.env_utils import UndefinedLegalActionCall\n'), ((13808, 13833), 'gym.spaces.Discrete', 'gym.spaces.Discrete', (['n_to'], {}), '(n_to)\n', (13827, 13833), False, 'import gym\n'), ((5850, 5946), 'gym_fabrikatioRL.envs.env_utils.UndefinedLegalActionCall', 'UndefinedLegalActionCall', (['self.__optimizer_configuration', 'self.__core.state.scheduling_mode'], {}), '(self.__optimizer_configuration, self.__core.state.\n scheduling_mode)\n', (5874, 5946), False, 'from gym_fabrikatioRL.envs.env_utils import UndefinedLegalActionCall\n'), ((13968, 14001), 'gym.spaces.Discrete', 'gym.spaces.Discrete', (['(n_so + m + 1)'], {}), '(n_so + m + 1)\n', (13987, 14001), False, 'import gym\n'), ((14090, 14115), 'gym.spaces.Discrete', 'gym.spaces.Discrete', (['n_so'], {}), '(n_so)\n', (14109, 14115), False, 'import gym\n'), ((14255, 14287), 'gym.spaces.Discrete', 'gym.spaces.Discrete', (['(n_so + n_to)'], {}), '(n_so + n_to)\n', (14274, 14287), False, 'import gym\n'), ((6506, 6602), 'gym_fabrikatioRL.envs.env_utils.UndefinedLegalActionCall', 'UndefinedLegalActionCall', (['self.__optimizer_configuration', 'self.__core.state.scheduling_mode'], {}), '(self.__optimizer_configuration, self.__core.state.\n scheduling_mode)\n', (6530, 6602), False, 'from gym_fabrikatioRL.envs.env_utils import UndefinedLegalActionCall\n'), ((17375, 17408), 'gym_fabrikatioRL.envs.env_utils.UndefinedOptimizerConfiguration', 'UndefinedOptimizerConfiguration', ([], {}), '()\n', (17406, 17408), False, 'from gym_fabrikatioRL.envs.env_utils import UndefinedOptimizerConfiguration\n')] |
import os
from .version import BaseVersion
class MetaContainer(type):
def __new__(cls, name, bases, attrs):
cclass = super(MetaContainer, cls).__new__(cls, name, bases, attrs)
cclass._versions = {}
for name, value in attrs.items():
if name.startswith('vs_') or not isinstance(value, BaseVersion):
continue
cclass.version_register(name, value, original=(name == 'self'))
return cclass
class BaseContainer(metaclass=MetaContainer):
"""
Note: version with name "self" has special meaning - it processes original
source file and runs only once at file creation and saving.
"""
attrname = 'dc'
_versions = None
_version_original = None
_version_params = ('conveyor', 'versionfile', 'accessor',
'filename', 'extension', 'storage',)
@classmethod
def version_params(cls):
params = [(n, getattr(cls, 'vs_%s' % n))
for n in cls._version_params if hasattr(cls, 'vs_%s' % n)]
return dict(params)
@classmethod
def version_register(cls, name, value, original=False):
# set container specific version params
# and force attrname assign to version
params = cls.version_params()
value.params(**params) if params else None
value.params(attrname=name, force=True)
# register version in internal registry
# and del same named container property
if original:
cls._version_original = value
else:
cls._versions.__setitem__(name, value)
hasattr(cls, name) and delattr(cls, name)
def __init__(self, source_file, data=None):
self.source_file = source_file
self.data = data
self._versionfiles = {}
def __getattr__(self, name):
if name in self._versionfiles:
versfile = self._versionfiles[name]
elif name in self._versions:
versfile = self._versionfiles[name] = self.version(name)
else:
self.__getattribute__(name)
return versfile
def version(self, name, instantiate=True):
"""
version get method, for overrite behaviour of version creating to set
some attrs in all(any) versions directly (use on your own risk),
for example, like that:
def version(self, name, instantiate=False):
cls, args, kwargs = super(SomeClass, self).version(name, False)
kwargs.update({'lazy': True, 'quiet': False,})
return cls(*args, **kwargs)
"""
version = (self._version_original if name == 'self' else
self._versions.get(name, None))
if not version:
raise IndexError('Version with name "%s" does not exists.' % name)
return version.version(self.source_file, data=self.data,
instantiate=instantiate)
def change_original(self):
"""change source file before save with "self" version"""
if self._version_original:
cls, args, kwargs = self.version('self', instantiate=False)
kwargs.update(
filename='%s%%s' % os.path.splitext(self.source_file.name)[0]
)
versionfile = cls(*args, **kwargs)
versionfile.create(force=True)
def create_versions(self):
"""call "create" for each version (policy)"""
for name in self._versions.keys():
self.__getattr__(name).create()
def delete_versions(self):
"""call "delete" for each version (policy)"""
for name in self._versions.keys():
self.__getattr__(name).delete()
| [
"os.path.splitext"
] | [((3197, 3236), 'os.path.splitext', 'os.path.splitext', (['self.source_file.name'], {}), '(self.source_file.name)\n', (3213, 3236), False, 'import os\n')] |
import subprocess
code = 'if (flag > {}) then 1 else 0'
n = 8 * 36
low = 0
high = 1 << n
while low <= high:
mid = (low + high) >> 1
process = subprocess.Popen(['nc', 'wolf.chal.pwning.xxx', '6808'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
out, err = process.communicate(code.format(mid))
if out[0] == '1':
low = mid + 1
else:
high = mid - 1
print (low, high)
print ('%0{}x'.format(n) % low).decode('hex')
| [
"subprocess.Popen"
] | [((154, 262), 'subprocess.Popen', 'subprocess.Popen', (["['nc', 'wolf.chal.pwning.xxx', '6808']"], {'stdin': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE'}), "(['nc', 'wolf.chal.pwning.xxx', '6808'], stdin=subprocess.\n PIPE, stdout=subprocess.PIPE)\n", (170, 262), False, 'import subprocess\n')] |
# copied from https://github.com/probml/pyprobml/blob/master/scripts/sgmcmc_nuts_demo.py
# Compare NUTS, SGLD and Adam on sampling from a multivariate Gaussian
from collections import namedtuple
from typing import Any, Callable, Dict, List, NamedTuple, Optional, Tuple, Union
import jax.numpy as jnp
import optax
from blackjax import nuts, stan_warmup
from jax import jit, random, vmap
from jax.lax import scan
from jax.random import normal, split
from sgmcmcjax.optimizer import build_optax_optimizer
from sgmcmcjax.samplers import build_sgld_sampler
from .sgmcmc_utils import build_nuts_sampler
# We use the 'quickstart' example from
# https://github.com/jeremiecoullon/SGMCMCJax
def loglikelihood(theta, x):
return -0.5 * jnp.dot(x - theta, x - theta)
def logprior(theta):
return -0.5 * jnp.dot(theta, theta) * 0.01
# generate dataset
N, D = 1000, 100
key = random.PRNGKey(0)
mu_true = random.normal(key, (D,))
X_data = random.normal(key, shape=(N, D)) + mu_true
# Adam
batch_size = int(0.1 * N)
opt = optax.adam(learning_rate=1e-2)
optimizer = build_optax_optimizer(opt, loglikelihood, logprior, (X_data,), batch_size)
Nsamples = 10_000
params, log_post_list = optimizer(key, Nsamples, jnp.zeros(D))
print(log_post_list.shape)
print(params.shape)
assert jnp.allclose(params, mu_true, atol=1e-1)
print("adam test passed")
# SGLD
batch_size = int(0.1 * N)
dt = 1e-5
sampler = build_sgld_sampler(dt, loglikelihood, logprior, (X_data,), batch_size)
Nsamples = 10_000
samples = sampler(key, Nsamples, jnp.zeros(D))
print(samples.shape)
mu_est = jnp.mean(samples, axis=0)
assert jnp.allclose(mu_est, mu_true, atol=1e-1)
print("sgld test passed")
# NUTS / blackjax
num_warmup = 500
sampler = build_nuts_sampler(num_warmup, loglikelihood, logprior, (X_data,))
Nsamples = 10_000
samples = sampler(key, Nsamples, jnp.zeros(D))
print(samples.shape)
mu_est = jnp.mean(samples, axis=0)
assert jnp.allclose(mu_est, mu_true, atol=1e-1)
print("nuts test passed")
| [
"jax.numpy.allclose",
"optax.adam",
"jax.numpy.zeros",
"jax.random.PRNGKey",
"jax.random.normal",
"sgmcmcjax.optimizer.build_optax_optimizer",
"jax.numpy.dot",
"jax.numpy.mean",
"sgmcmcjax.samplers.build_sgld_sampler"
] | [((880, 897), 'jax.random.PRNGKey', 'random.PRNGKey', (['(0)'], {}), '(0)\n', (894, 897), False, 'from jax import jit, random, vmap\n'), ((908, 932), 'jax.random.normal', 'random.normal', (['key', '(D,)'], {}), '(key, (D,))\n', (921, 932), False, 'from jax import jit, random, vmap\n'), ((1026, 1056), 'optax.adam', 'optax.adam', ([], {'learning_rate': '(0.01)'}), '(learning_rate=0.01)\n', (1036, 1056), False, 'import optax\n'), ((1069, 1143), 'sgmcmcjax.optimizer.build_optax_optimizer', 'build_optax_optimizer', (['opt', 'loglikelihood', 'logprior', '(X_data,)', 'batch_size'], {}), '(opt, loglikelihood, logprior, (X_data,), batch_size)\n', (1090, 1143), False, 'from sgmcmcjax.optimizer import build_optax_optimizer\n'), ((1279, 1318), 'jax.numpy.allclose', 'jnp.allclose', (['params', 'mu_true'], {'atol': '(0.1)'}), '(params, mu_true, atol=0.1)\n', (1291, 1318), True, 'import jax.numpy as jnp\n'), ((1400, 1470), 'sgmcmcjax.samplers.build_sgld_sampler', 'build_sgld_sampler', (['dt', 'loglikelihood', 'logprior', '(X_data,)', 'batch_size'], {}), '(dt, loglikelihood, logprior, (X_data,), batch_size)\n', (1418, 1470), False, 'from sgmcmcjax.samplers import build_sgld_sampler\n'), ((1566, 1591), 'jax.numpy.mean', 'jnp.mean', (['samples'], {'axis': '(0)'}), '(samples, axis=0)\n', (1574, 1591), True, 'import jax.numpy as jnp\n'), ((1599, 1638), 'jax.numpy.allclose', 'jnp.allclose', (['mu_est', 'mu_true'], {'atol': '(0.1)'}), '(mu_est, mu_true, atol=0.1)\n', (1611, 1638), True, 'import jax.numpy as jnp\n'), ((1875, 1900), 'jax.numpy.mean', 'jnp.mean', (['samples'], {'axis': '(0)'}), '(samples, axis=0)\n', (1883, 1900), True, 'import jax.numpy as jnp\n'), ((1908, 1947), 'jax.numpy.allclose', 'jnp.allclose', (['mu_est', 'mu_true'], {'atol': '(0.1)'}), '(mu_est, mu_true, atol=0.1)\n', (1920, 1947), True, 'import jax.numpy as jnp\n'), ((942, 974), 'jax.random.normal', 'random.normal', (['key'], {'shape': '(N, D)'}), '(key, shape=(N, D))\n', (955, 974), False, 'from jax import jit, random, vmap\n'), ((1211, 1223), 'jax.numpy.zeros', 'jnp.zeros', (['D'], {}), '(D)\n', (1220, 1223), True, 'import jax.numpy as jnp\n'), ((1522, 1534), 'jax.numpy.zeros', 'jnp.zeros', (['D'], {}), '(D)\n', (1531, 1534), True, 'import jax.numpy as jnp\n'), ((1831, 1843), 'jax.numpy.zeros', 'jnp.zeros', (['D'], {}), '(D)\n', (1840, 1843), True, 'import jax.numpy as jnp\n'), ((736, 765), 'jax.numpy.dot', 'jnp.dot', (['(x - theta)', '(x - theta)'], {}), '(x - theta, x - theta)\n', (743, 765), True, 'import jax.numpy as jnp\n'), ((807, 828), 'jax.numpy.dot', 'jnp.dot', (['theta', 'theta'], {}), '(theta, theta)\n', (814, 828), True, 'import jax.numpy as jnp\n')] |
import errno
import os
import hashlib
import requests
import subprocess
import json
class Colors:
BLACK = "\033[0;30m"
RED = "\033[0;31m"
GREEN = "\033[0;32m"
BROWN = "\033[0;33m"
BLUE = "\033[0;34m"
PURPLE = "\033[0;35m"
CYAN = "\033[0;36m"
LIGHT_GRAY = "\033[0;37m"
DARK_GRAY = "\033[1;30m"
LIGHT_RED = "\033[1;31m"
LIGHT_GREEN = "\033[1;32m"
YELLOW = "\033[1;33m"
LIGHT_BLUE = "\033[1;34m"
LIGHT_PURPLE = "\033[1;35m"
LIGHT_CYAN = "\033[1;36m"
LIGHT_WHITE = "\033[1;37m"
BOLD = "\033[1m"
FAINT = "\033[2m"
ITALIC = "\033[3m"
UNDERLINE = "\033[4m"
BLINK = "\033[5m"
NEGATIVE = "\033[7m"
CROSSED = "\033[9m"
RESET = "\033[0m"
class CliException(Exception):
def __init__(self, msg: str):
self.msg = msg
def stripDups(l: list[str]) -> list[str]:
# Remove duplicates from a list
# by keeping only the last occurence
result: list[str] = []
for item in l:
if item in result:
result.remove(item)
result.append(item)
return result
def findFiles(dir: str, exts: list[str] = []) -> list[str]:
if not os.path.isdir(dir):
return []
result: list[str] = []
for f in os.listdir(dir):
if len(exts) == 0:
result.append(f)
else:
for ext in exts:
if f.endswith(ext):
result.append(os.path.join(dir, f))
break
return result
def objSha256(obj: dict, keys: list[str] = []) -> str:
toHash = {}
if len(keys) == 0:
toHash = obj
else:
for key in keys:
if key in obj:
toHash[key] = obj[key]
data = json.dumps(toHash, sort_keys=True)
return hashlib.sha256(data.encode("utf-8")).hexdigest()
def objKey(obj: dict, keys: list[str]) -> str:
toKey = []
for key in keys:
if key in obj:
if isinstance(obj[key], bool):
if obj[key]:
toKey.append(key)
else:
toKey.append(obj[key])
return "-".join(toKey)
def mkdirP(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
return path
def downloadFile(url: str) -> str:
dest = ".cache/remote/" + hashlib.sha256(url.encode('utf-8')).hexdigest()
tmp = dest + ".tmp"
if os.path.isfile(dest):
return dest
print(f"Downloading {url} to {dest}")
try:
r = requests.get(url, stream=True)
r.raise_for_status()
mkdirP(os.path.dirname(dest))
with open(tmp, 'wb') as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
os.rename(tmp, dest)
return dest
except:
raise Exception(f"Failed to download {url}")
def runCmd(*args: list[str]) -> bool:
proc = subprocess.run(args)
if proc.returncode != 0:
raise Exception(f"Failed to run {' '.join(args)}")
return True
| [
"os.listdir",
"os.makedirs",
"os.rename",
"json.dumps",
"subprocess.run",
"os.path.join",
"requests.get",
"os.path.isfile",
"os.path.dirname",
"os.path.isdir"
] | [((1240, 1255), 'os.listdir', 'os.listdir', (['dir'], {}), '(dir)\n', (1250, 1255), False, 'import os\n'), ((1723, 1757), 'json.dumps', 'json.dumps', (['toHash'], {'sort_keys': '(True)'}), '(toHash, sort_keys=True)\n', (1733, 1757), False, 'import json\n'), ((2477, 2497), 'os.path.isfile', 'os.path.isfile', (['dest'], {}), '(dest)\n', (2491, 2497), False, 'import os\n'), ((3002, 3022), 'subprocess.run', 'subprocess.run', (['args'], {}), '(args)\n', (3016, 3022), False, 'import subprocess\n'), ((1160, 1178), 'os.path.isdir', 'os.path.isdir', (['dir'], {}), '(dir)\n', (1173, 1178), False, 'import os\n'), ((2158, 2175), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (2169, 2175), False, 'import os\n'), ((2584, 2614), 'requests.get', 'requests.get', (['url'], {'stream': '(True)'}), '(url, stream=True)\n', (2596, 2614), False, 'import requests\n'), ((2845, 2865), 'os.rename', 'os.rename', (['tmp', 'dest'], {}), '(tmp, dest)\n', (2854, 2865), False, 'import os\n'), ((2659, 2680), 'os.path.dirname', 'os.path.dirname', (['dest'], {}), '(dest)\n', (2674, 2680), False, 'import os\n'), ((2244, 2263), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (2257, 2263), False, 'import os\n'), ((1426, 1446), 'os.path.join', 'os.path.join', (['dir', 'f'], {}), '(dir, f)\n', (1438, 1446), False, 'import os\n')] |
#This is a basic starter script for figuring out salvaging calculations in GW2
"""Basic salvage cost calculating in GW2
To determine if it is profitable:
salvage item + cost to salvage < value of goods after TP fees
To calculate the value of the goods after TP fees, I need:
Salvage rates
Value of raw salvage materials
Value of refined salvage materials
TP fees on items (fixed percentage)
Cost to salvage (fixed value per method)
"""
"""Program Flow
This first attempt is very fixed so it will be very stupid
Do not worry about what happens if you reduce the cost by 1c here and there
Steps are currently:
Get and sort values from GW2 API
Generate optimal values of material price + decision for selling
Print chart of values and decision for selling with profit
Calculate value of salvage item with the 3 types of salvage methods
Print result
Ideal steps:
Get and sort values from GW2 API
Generate optimal values of material price + decision for selling
Calculate all salvage values and sums for selling with profit
Print everything in a chart per material type
Have a summary at the end about what is profitable to salvage and by how much
Additional work:
Instead of salvaging to sell, calculate the cost savings of buying and salvaging vs buying
Computing the 4 combinations of buy-sell profits
Computing how many items I'd need to salvage to get x amount of material
"""
"""API item and number
Salvage items:
Metal Salvage
21690=Brittle Clump of Ore
21678=Bit of Metal Scrap
21691=Weak Clump of Ore
21679=Pile of Metal Scrap
21692=Clump of Ore
21680=Jagged Metal Scrap
21693=Laden Clump of Ore
21681=Metal Scrap
21694=Loaded Clump of Ore
21682=Salvageable Metal Scrap
21695=Rich Clump of Ore
21683=Valuable Metal Scrap
79079=Unstable Metal Chunk
Leather Salvage
21661=Tattered Hide
21684=Rawhide Leather Strap
21653=Tattered Pelt
21664=Ripped Hide
21685=Thin Leather Strap
21654=Ripped Pelt
21667=Torn Hide
21686=Coarse Leather Strap
21655=Torn Pelt
21668=Frayed Hide
21687=Thick Leather Strap
21656=Frayed Pelt
21670=Filthy Hide
21688=Rugged Leather Strap
21657=Filthy Pelt
22331=Salvageable Hide
21689=Hard Leather Strap
21658=Salvageable Pelt
79213=Unstable Hide
80681=Bloodstone-Warped Hide
Cloth Salvage
21669=Shredded Garment
22325=Half-Eaten Mass
21659=Shredded Rag
21671=Worn Garment
22326=Decaying Mass
21660=Worn Rag
21672=Ragged Garment
22327=Fetid Mass
21662=Soiled Rag
21673=Frayed Garment
22328=Malodorous Mass
21663=Frayed Rag
21674=Torn Garment
22329=Half-Digested Mass
21665=Torn Rag
21675=Discarded Garment
22330=Regurgitated Mass
21666=Rag
79138=Unstable Rag
Wood Salvage
79423=Reclaimed Wood Chunk
Rare Metal Salvage
43552=Bit of Aetherized Metal Scrap
41733=Bit of Fused Metal Scrap
45039=Bit of Twisted Watchwork Scrap
43553=Pile of Aetherized Metal Scrap
41734=Pile of Fused Metal Scrap
45040=Pile of Twisted Watchwork Scrap
43554=Jagged Aetherized Metal Scrap
41735=Jagged Fused Metal Scrap
45041=Jagged Twisted Watchwork Scrap
43555=Aetherized Metal Scrap
41736=Fused Metal Scrap
45042=Twisted Watchwork Scrap
43556=Salvageable Aetherized Metal Scrap
41737=Salvageable Fused Metal Scrap
Other
82488=Salvageable Intact Forged Scrap
Direct salvage output material:
Metal Material
19697=Copper Ore
19703=Silver Ore
19699=Iron Ore
19698=Gold Ore
19702=Platinum Ore
19700=Mithril Ore
19701=Orichalcum Ore
Leather Material
19719=Rawhide Leather Section
19728=Thin Leather Section
19730=Coarse Leather Section
19731=Rugged Leather Section
19729=Thick Leather Section
19732=Hardened Leather Section
Cloth Material
19718=Jute Scrap
19739=Wool Scrap
19741=Cotton Scrap
19743=Linen Scrap
19748=Silk Scrap
19745=Gossamer Scrap
Wood Material
19723=Green Wood Log
19726=Soft Wood Log
19727=Seasoned Wood Log
19724=Hard Wood Log
19722=Elder Wood Log
19725=Ancient Wood Log
Rare Materials
24301=Charged Sliver
24302=Charged Fragment
24303=Charged Shard
24304=Charged Core
24305=Charged Lodestone
24311=Molten Sliver
24312=Molten Fragment
24313=Molten Shard
24314=Molten Core
24315=Molten Lodestone
24316=Glacial Sliver
24317=Glacial Fragment
24318=Glacial Shard
24319=Glacial Core
24320=Glacial Lodestone
24307=Onyx Fragment
24308=Onyx Shard
24309=Onyx Core
44941=Watchwork Sprocket
Refinement materials:
Metal Refinement
19680=Copper Ingot
19679=Bronze Ingot
19687=Silver Ingot
19683=Iron Ingot
19688=Steel Ingot
19682=Gold Ingot
19686=Platinum Ingot
19681=Darksteel Ingot
19684=Mithril Ingot
19685=Orichalcum Ingot
Leather Refinement
19738=Stretched Rawhide Leather Square
19733=Cured Thin Leather Square
19734=Cured Coarse Leather Square
19736=Cured Rugged Leather Square
19735=Cured Thick Leather Square
19737=Cured Hardened Leather Square
Cloth Refinement
19720=Bolt of Jute
19740=Bolt of Wool
19742=Bolt of Cotton
19744=Bolt of Linen
19747=Bolt of Silk
19746=Bolt of Gossamer
Wood Refinement
19710=Green Wood Plank
19713=Soft Wood Plank
19714=Seasoned Wood Plank
19711=Hard Wood Plank
19709=Elder Wood Plank
19712=Ancient Wood Plank
Additional refinement materials:
19704=Lump of Tin (Bronze Ingots)
19750=Lump of Coal (Steel Ingot)
19924=Lump of Primordium (Darksteel)
"""
"""Refinement Equations
Metal:
1x Copper Ingot = 2x Copper Ore
5x Bronze Ingot = 10x Copper + Lump of Tin
1x Silver Ingot = 2x Silver Ore
1x Iron Ingot = 3x Iron Ore
1x Steel Ingot = 3x Iron Ore + 1x Lump of Coal
1x Gold Ingot = 2x Gold Ore
1x Platinum Ingot = 2x Platinum Ore
1x Darksteel Ingot = 2x Platinum Ore + Lump of Primordium
1x Mithril Ingot = 2x Mithril Ore
1x Orichalcum Ingot = 2x Orichalcum Ore
Leather
1x Stretched Rawhide Leather Square = 2x Rawhide Leather Section
1x Cured Thin Leather Square = 2x Thin Leather Section
1x Cured Coarse Leather Square = 2x Coarse Leather Square
1x Cured Rugged Leather Square = 2x Rugged Leather Square
1x Cured Thick Leather Square = 4x Thick Leather Section
1x Cured Hardened Leather Square = 3x Hardened Leather Section
Cloth
1x Bolt of Jute = 2x Jute Scrap
1x Bolt of Wool = 2x Wool Scrap
1x Bolt of Cotton = 2x Cotton Scrap
1x Bolt of Linen = 2x Linen Scrap
1x Bolt of Silk = 3x Silk Scrap
1x Bolt of Gossamer = 2x Gossamer Scrap
Wood
1x Green Wood Plank = 3x Green Wood Log
1x Soft Wood Plank = 2x Soft Wood Log
1x Seasoned Wood Plank = 3x Seasoned Wood Plank
1x Hard Wood Plank = 3x Hard Wood Log
1x Elder Wood Plank = 3x Elder Wood Log
1x Ancient Wood Plank = 3x Ancient Wood Log
"""
"""Salvage results
The wiki sometimes indicates a number for each possible salvage result
Specific numbers of salvage per salvage item doesn't really matter - want average value
Metal Salvage:
Metal
Brittle Clump of Ore = (1-3) Copper Ore
Bit of Metal Scrap = (1-3) Copper Ore
Weak Clump of Ore = (1-3) Copper Ore + (1-3) Silver Ore + (1-3) Iron Ore
Pile of Metal Scrap = (1-3) Copper Ore + (1-3) Silver Ore + (1-3) Iron Ore
Clump of Ore = (1-3) Silver Ore + (1-3) Iron Ore + (1-3) Gold Ore
Jagged Metal Scrap = (1-3) Silver Ore + (1-3) Iron Ore + (1-3) Gold Ore
Laden Clump of Ore = (1-3) Iron Ore + (1-3) Gold Ore + (1-3) Platinum Ore
Metal Scrap = (1-3) Iron Ore + (1-3) Gold Ore + (1-3) Platinum Ore
Loaded Clump of Ore = (1-3) Platinum Ore + (1-3) Mithril Ore
Salvageable Metal Scrap = (1-3) Platinum Ore + (1-3) Mithril Ore
Rich Clump of Ore = (1-3) Mithril Ore + (?0-1?) Orichalcum Ore
Valuable Metal Scrap = (1-3) Mithril Ore + (?0-1?) Orichalcum Ore
Unstable Metal Chunk = Copper, Iron, Platinum, Mithril, Orichalcum (no gold or silver apparently)
Leather
Tattered Hide = (1-3) Rawhide Leather Section
Rawhide Leather Strap = (1-3) Rawhide Leather Section
Tattered Pelt = (1-3) Rawhide Leather Section
Ripped Hide = (1-3) Rawhide Leather Section + (1-3) Thin Leather Section
Thin Leather Strap = (1-3) Rawhide Leather Section + (1-3) Thin Leather Section
Ripped Pelt = (1-3) Rawhide Leather Section + (1-3) Thin Leather Section
Torn Hide = (1-3) Thin Leather Section + (1-3) Coarse Leather Section
Coarse Leather Strap = (1-3) Thin Leather Section + (1-3) Coarse Leather Section
Torn Pelt = (1-3) Thin Leather Section + (1-3) Coarse Leather Section
Frayed Hide = (1-3) Coarse Leather Section + (1-3) Rugged Leather Section
Thick Leather Strap = (1-3) Coarse Leather Section + (1-3) Rugged Leather Section
Frayed Pelt = (1-3) Coarse Leather Section + (1-3) Rugged Leather Section
Filthy Hide = (1-3) Rugged Leather Section + (1-3) Thick Leather Section
Rugged Leather Strap = (1-3) Rugged Leather Section + (1-3) Thick Leather Section
Filthy Pelt = (1-3) Rugged Leather Section + (1-3) Thick Leather Section
Salvageable Hide = (1-3) Thick Leather Section + Hardened Leather Section
Hard Leather Strap = (1-3) Thick Leather Section + Hardened Leather Section
Salvageable Pelt = (1-3) Thick Leather Section + Hardened Leather Section
Unstable Hide = Rawhide, Thin Leather, Coarse, Rugged, Thick, Hardened
Bloodstone-Warped Hide = Rawhide, Thin Leather, Coarse, Rugged, Thick, Hardened
Cloth
Shredded Garment = (1-3) Jute Scrap
Half-Eaten Mass = (1-3) Jute Scrap
Shredded Rag = (1-3) Jute Scrap
Worn Garment = (1-3) Jute Scrap + (1-3) Wool Scrap
Decaying Mass = (1-3) Jute Scrap + (1-3) Wool Scrap
Worn Rag = (1-3) Jute Scrap + (1-3) Wool Scrap
Ragged Garment = (1-3) Wool Scrap + (1-3) Cotton Scrap
Fetid Mass = (1-3) Wool Scrap + (1-3) Cotton Scrap
Soiled Rag = (1-3) Wool Scrap + (1-3) Cotton Scrap
Frayed Garment = (1-3) Cotton Scrap + (1-3) Linen Scrap
Malodorous Mass = (1-3) Cotton Scrap + (1-3) Linen Scrap
Frayed Rag = (1-3) Cotton Scrap + (1-3) Linen Scrap
Torn Garment = (1-3) Linen Scrap + (1-3) Silk Scrap
Half-Digested Mass = (1-3) Linen Scrap + (1-3) Silk Scrap
Torn Rag = (1-3) Linen Scrap + (1-3) Silk Scrap
Discarded Garment = (1-3) Silk Scrap + Gossamer Scrap
Regurgitated Mass = (1-3) Silk Scrap + Gossamer Scrap
Rag = (1-3) Silk Scrap + Gossamer Scrap
Unstable Rag = Jute, Woll, Cotton, Linen, Silk, Gossamer
Wood
Reglaimed Wood Chunk = Green, Soft, Seasoned, Hard, Elder, Ancient
Rare Metal Salvage
Bit or Aetherized Metal = Copper, Silver, Iron, Gold, Mithril, Charged Sliver
Bit of Fused Metal Scrap = Copper, Silver, Iron, Mithril, Molten Sliver, Glacial Sliver
Bit of Twisted Watchwork Scrap = Copper, Silver, Iron, Charged Sliver, Watchwork Sprocket
Pile of Aetherized Metal Scrap = Silver, Iron, Gold, Charged Fragment
Pile of Fused Metal Scrap = Silver, Iron, Gold, Molten Fragment, Glacial Fragment
Pile of Twisted Watchwork Scrap = Silver, Iron, Gold, Onyx Fragment, Watchwork Sprocket
Jagged Aetherized Metal Scrap = Iron, Gold, Platinum, Charged Shard
Jagged Fused Metal Scrap = Iron, Gold, Platinum, Molten Shard, Glaciak Shard
Jagged Twisted Watchwork Scrap = Iron, Gold, Platinum, Charged Shard, Onyx Shard, Watchwork Sprocket
Aetherized Metal Scrap = Silver, Iron, Gold, Platinum, Mithril, Orichalcum, Charged Sliver, Charged Fragment, Charged Core
Fused Metal Scrap = Copper, Iron, Platinum, Mithril, Orichalcum, Molten Sliver, Molten Core, Glacial Core
Twisted Watchwork Scrap = Gold, Platinum, Mithril, Charged Core, Onyx Core, Watchwork Sprocket
Salvageable Aetherized Metal Scrap = Mithril, Orichalcum, Charged Core, Charge Lodestone
Salvageable Fused Metal Scrap = Mithril, Orichalcum, Molten Core, Molten Lodestone, Glacial Core, Glacial Lodestone
Salvageable Twisted Watchwork Scrap = Mithril, Orichalcum, Watchwork Sprocket, Charged Core, Charged Lodestone, Onyx Core, Onyx Lodestone
Other
Salvageable Intact Forged Scrap = (3-9) Mithril xor (3) Orichalcum xor (1,5) Forgemetal + (1-3) 10 luck xor (1-3) 50 luck xor 100 luck xor 200 luck
Don't care about Ambrite
Ectoplasm use cases deserve their own script since the drop rate is so well understood already
"""
"""
Function Declarations
"""
#Organize API entries
def sort_allAPI(allAPI):
"""The purpose of this function is to sort the raw GW2 API wrapper return dictionaries (originally JSON objects) into the necessary dictionaries for use in this "calc_salvage" script"""
"""Input:
bit list of commerceprices API objects. List, not dict
"""
"""Output:
unrefined_prices = dictionary of all unrefined material prices. "Raw" and "unrefined" are used interchangeable when they probably shouldn't be
refined_prices = dictionary of all refined material prices
salvageLeather = dictionary of corresponding salvage item names and prices
salvageWood = dictionary of corresponding salvage item names and prices
salvageMetal = dictionary of corresponding salvage item names and prices
salvageCloth = dictionary of corresponding salvage item names and prices
All use the format of:
"salvage item name" :[buy order price, sell listing price]
"""
"""Design Note:
A dictionary with the key:value pair id:name is needed for this to work because these are sorting commerceprices data from the API, and only returns the following:
{'id': 79423, 'whitelisted': False, 'buys': {'quantity': 13684, 'unit_price': 114}, 'sells': {'quantity': 22649, 'unit_price': 119}}
"""
#there is only 1 wood salvage item so it just needs a single case in the sorting loop
api_salvageMetal = {21690:'Brittle Clump of Ore',21678:'Bit of Metal Scrap',#T1
21691:'Weak Clump of Ore',21679:'Pile of Metal Scrap',#T2
21692:'Clump of Ore',21680:'Jagged Metal Scrap',
21693:'Laden Clump of Ore',21681:'Metal Scrap',
21694:'Loaded Clump of Ore',21682:'Salvageable Metal Scrap',
21695:'Rich Clump of Ore',21683:'Valuable Metal Scrap',
79079:'Unstable Metal Chunk'}
api_salvageLeather = {21661:'Tattered Hide',21684:'Rawhide Leather Strap',21653:'Tattered Pelt',
21664:'Ripped Hide',21685:'Thin Leather Strap',21654:'Ripped Pelt',
21667:'Torn Hide',21686:'Coarse Leather Strap',21655:'Torn Pelt',
21668:'Frayed Hide',21687:'Thick Leather Strap',21656:'Frayed Pelt',
21670:'Filthy Hide',21688:'Rugged Leather Strap',21657:'Filthy Pelt',
22331:'Salvageable Hide',21689:'Hard Leather Strap',21658:'Salvageable Pelt',
79213:'Unstable Hide',80681:'Bloodstone-Warped Hide'}
api_salvageCloth = {21669:'Shredded Garment',22325:'Half-Eaten Mass',21659:'Shredded Rag',
21671:'Worn Garment',22326:'Decaying Mass',21660:'Worn Rag',
21672:'Ragged Garment',22327:'Fetid Mass',21662:'Soiled Rag',
21673:'Frayed Garment',22328:'Malodorous Mass',21663:'Frayed Rag',
21674:'Torn Garment',22329:'Half-Digested Mass',21665:'Torn Rag',
21675:'Discarded Garment',22330:'Regurgitated Mass',21666:'Rag',
21676:'Garment_28',21677:'Garment_32',
79138:'Unstable Rag'}
api_unrefined_prices = {19697:'Copper Ore',19703:'Silver Ore',19699:'Iron Ore',19698:'Gold Ore',19702:'Platinum Ore',19700:'Mithril Ore',19701:'Orichalcum Ore',
19719:'Rawhide Leather Section',19728:'Thin Leather Section',19730:'Coarse Leather Section',19731:'Rugged Leather Section',19729:'Thick Leather Section',19732:'Hardened Leather Section',
19718:'Jute Scrap',19739:'Wool Scrap',19741:'Cotton Scrap',19743:'Linen Scrap',19748:'Silk Scrap',19745:'Gossamer Scrap',
19723:'Green Wood Log',19726:'Soft Wood Log',19727:'Seasoned Wood Log',19724:'Hard Wood Log',19722:'Elder Wood Log',19725:'Ancient Wood Log'}
api_refined_prices = {19680:'Copper Ingot',19679:'Bronze Ingot',19687:'Silver Ingot',19683:'Iron Ingot',19688:'Steel Ingot',19682:'Gold Ingot',19686:'Platinum Ingot',19681:'Darksteel Ingot',19684:'Mithril Ingot',19685:'Orichalcum Ingot',
19738:'Stretched Rawhide Leather Square',19733:'Cured Thin Leather Square',19734:'Cured Coarse Leather Square',19736:'Cured Rugged Leather Square',19735:'Cured Thick Leather Square',19737:'Cured Hardened Leather Square',
19720:'Bolt of Jute',19740:'Bolt of Wool',19742:'Bolt of Cotton',19744:'Bolt of Linen',19747:'Bolt of Silk',19746:'Bolt of Gossamer',
19710:'Green Wood Plank',19713:'Soft Wood Plank',19714:'Seasoned Wood Plank',19711:'Hard Wood Plank',19709:'Elder Wood Plank',19712:'Ancient Wood Plank'}
#Return dictionaries with 'item':[buy sell] key:value pairs
salvageWood = {}#Yes it has one entry but dict is the required data type
salvageMetal = {}
salvageLeather = {}
salvageCloth = {}
unrefined_prices = {}
refined_prices = {}
#If the id exists in a specific api_{dict} then it is added into it's corresponding return dict. This replaces repetitive elif statements
for entryAPI in allAPI:
if(entryAPI['id']==79423):#special case because there's only 1
salvageWood['Reclaimed Wood Chunk'] = [entryAPI['buys']['unit_price'], entryAPI['sells']['unit_price']]
elif entryAPI['id'] in api_salvageMetal:
salvageMetal[api_salvageMetal[entryAPI['id']]] = [entryAPI['buys']['unit_price'], entryAPI['sells']['unit_price']]
elif entryAPI['id'] in api_salvageLeather:
salvageLeather[api_salvageLeather[entryAPI['id']]] = [entryAPI['buys']['unit_price'], entryAPI['sells']['unit_price']]
elif entryAPI['id'] in api_salvageCloth:
salvageCloth[api_salvageCloth[entryAPI['id']]] = [entryAPI['buys']['unit_price'], entryAPI['sells']['unit_price']]
elif entryAPI['id'] in api_unrefined_prices:
unrefined_prices[api_unrefined_prices[entryAPI['id']]] = [entryAPI['buys']['unit_price'], entryAPI['sells']['unit_price']]
elif entryAPI['id'] in api_refined_prices:
refined_prices[api_refined_prices[entryAPI['id']]] = [entryAPI['buys']['unit_price'], entryAPI['sells']['unit_price']]
else:
print("Unexpected API return")
print(entryAPI)
return unrefined_prices, refined_prices, salvageLeather, salvageWood, salvageMetal, salvageCloth
#General compute and print report
def salvagePrint(itemName_str,itemCost_dct,multiplier_dct,droprate_dict,salvageCost_dct,buysell):
""" Input:
itemName_str = string name of salvage item
itemCost_dct = dictionary with salvage item costs. salvage item name : [buy order price, sell listing price]
multiplier_dct = dictionary of all material values. raw material name : buy or sell value
droprate_dict = droprate of item in question. Should have all 3 levels of kit
salvageCost_dct = the cost per salvage kit type
buysell = 0 is for passing through buy order indicators, 1 is for passing through sell listing indicators
"""
"""This is the goal
Hard Leather Strap : {cost}
Salvage Kit Profit | Total Value | item 1 item 2
--------------------------------------------------------------------------------------
Copper : {profit} | {sum right} |
Runecrafters : {profit} | {sum right} |
Rare : {profit} | {sum right} |
return value is some kind of profit metric [salvage item, salvage method, salvage profit %, profit]
[7-20) "Consider" because profit is low and profit % may be terrible
[20-50) "Good" enough profit per salvage and maybe decent profit %
[50-100) "BUYBUYBUY" because this is a definitely good "can be bought, priced lower, and make good profit" with a probably good profit %
[100:) "MEGA BUY" because the profit on this is close to the cost of some salvage items themselves
"""
worthit_list = []
orderedkeys = list(droprate_dict['Copper'].keys()) #Copper chosen arbitrarily/was the first case always in so easiest to get keys from
#the "%" operator here is actually used as the indicator for "%d" to format strings, like with C
formatline = "{:<14} : {:>10} | {:>12} | " + ' '.join(["{:>%d}" % len(l) for l in orderedkeys])
print("\n{salvageName} : {salvagePrice}".format(salvageName=itemName_str, salvagePrice=itemCost_dct[itemName_str][buysell]))
print("-"*(len(itemName_str)+8))
print(formatline.format(*["Salvage Kit", "Profit","Total Value"]+orderedkeys))
#print("-"*len(formatline)) maybe ad this in later. I don't really care to have the labels separated from the data
#This is difficult to test separately without a lot of setup but was shown to be accurate
#Checking for multiple items was the most difficult part to do properly but this expression is some kind of generator that will break as soon as a match is found "not any(x in ["MEGA BUY", "BUYBUYBUY", "Good"] for x in worthit_list)"
for salvage_rarity,droprate_x in droprate_dict.items():
itemValues_dct,itemSum_val = compute_result(droprate_x,multiplier_dct,True)
methodprofit=round(itemSum_val - salvageCost_dct[salvage_rarity]-itemCost_dct[itemName_str][buysell],4)
print(formatline.format(*[salvage_rarity,round(methodprofit,4),round(itemSum_val,4)]+[itemValues_dct[x] for x in orderedkeys]))
"%d%%"%(100*(methodprofit/(salvageCost_dct[salvage_rarity]+itemCost_dct[itemName_str][buysell])))
if (methodprofit >= 100):
worthit_list = [itemName_str, "Check Kit", methodprofit, "%d%%"%(100*(methodprofit/(salvageCost_dct[salvage_rarity]+itemCost_dct[itemName_str][buysell]))), "MEGA BUY"]
elif (methodprofit >=50) and ("MEGA BUY" not in worthit_list):
worthit_list = [itemName_str, salvage_rarity, methodprofit, "%d%%"%(100*(methodprofit/(salvageCost_dct[salvage_rarity]+itemCost_dct[itemName_str][buysell]))), "BUYBUYBUY"]
elif (methodprofit >=20) and not any(x in ["MEGA BUY", "BUYBUYBUY"] for x in worthit_list):
worthit_list = [itemName_str, salvage_rarity, methodprofit, "%d%%"%(100*(methodprofit/(salvageCost_dct[salvage_rarity]+itemCost_dct[itemName_str][buysell]))), "Good"]
elif (methodprofit >=7) and not any(x in ["MEGA BUY", "BUYBUYBUY", "Good"] for x in worthit_list):
worthit_list = [itemName_str, salvage_rarity, methodprofit, "%d%%"%(100*(methodprofit/(salvageCost_dct[salvage_rarity]+itemCost_dct[itemName_str][buysell]))), "Consider"]
return worthit_list
#End of salvagePrint function
"""************************************
************ DROP RATES ************
************************************"""
"""New case needs the following information:
droprate dictionary
material IDs added to allAPI list
material IDs added to sort_allAPI function
variable to allAPI output if needed
salvagePrint function call
"""
"""
Drop rates: Metals
"""
""" T1 """
#Brittle Clump of Ore
droprate_BrittleClumpofOre={}
#All Peureki
droprate_BrittleClumpofOre['Copper']={'Copper Ore':1.896}
droprate_BrittleClumpofOre['Runecrafter']={'Copper Ore':1.86}
droprate_BrittleClumpofOre['Rare']={'Copper Ore':1.888}
#Bit of Metal Scrap
droprate_BitofMetalScrap = {}
#All Peureki
droprate_BitofMetalScrap['Copper']={'Copper Ore':1.796}
droprate_BitofMetalScrap['Runecrafter']={'Copper Ore':1.884}
droprate_BitofMetalScrap['Rare']={'Copper Ore':1.856}
""" T2 """
#Weak Clump of Ore
droprate_WeakClumpofOre = {}
#Peu
droprate_WeakClumpofOre['Copper']={'Copper Ore':0.37,'Silver Ore':0.65,'Iron Ore':0.81}
droprate_WeakClumpofOre['Runecrafter']={'Copper Ore':0.25,'Silver Ore':0.78,'Iron Ore':0.75}
droprate_WeakClumpofOre['Rare']={'Copper Ore':0.43,'Silver Ore':0.81,'Iron Ore':0.77}
#Pile of Metal Scrap
droprate_PileofMetalScrap = {}
#Peu
droprate_PileofMetalScrap['Copper']={'Copper Ore':0.608,'Silver Ore':0.748,'Iron Ore':0.504}
droprate_PileofMetalScrap['Runecrafter']={'Copper Ore':0.484,'Silver Ore':0.712,'Iron Ore':0.66}
droprate_PileofMetalScrap['Rare']={'Copper Ore':0.408,'Silver Ore':0.632,'Iron Ore':0.812}
""" T3 """
#Pile of Clump of Ore
droprate_ClumpofOre = {}
#Peu
droprate_ClumpofOre['Copper']={'Silver Ore':0.24,'Iron Ore':0.916,'Gold Ore':0.604}
droprate_ClumpofOre['Runecrafter']={'Silver Ore':0.148,'Iron Ore':1.008,'Gold Ore':0.728}
droprate_ClumpofOre['Rare']={'Silver Ore':0.2,'Iron Ore':0.924,'Gold Ore':0.792}
#Jagged Metal Scrap
droprate_JaggedMetalScrap = {}
#Peu
droprate_JaggedMetalScrap['Copper']={'Silver Ore':0.228,'Iron Ore':0.836,'Gold Ore':0.752}
droprate_JaggedMetalScrap['Runecrafter']={'Silver Ore':0.176,'Iron Ore':0.924,'Gold Ore':0.752}
droprate_JaggedMetalScrap['Rare']={'Silver Ore':0.212,'Iron Ore':1.012,'Gold Ore':0.704}
""" T4 """
#Laden Clump of Ore
droprate_LadenClumpofOre = {}
#Peu
droprate_LadenClumpofOre['Copper']={'Iron Ore':0.224,'Gold Ore':0.176,'Platinum Ore':1.484}
droprate_LadenClumpofOre['Runecrafter']={'Iron Ore':0.204,'Gold Ore':0.212,'Platinum Ore':1.436}
droprate_LadenClumpofOre['Rare']={'Iron Ore':0.22,'Gold Ore':0.16,'Platinum Ore':1.424}
#Metal Scrap
droprate_MetalScrap = {}
#Peu
droprate_MetalScrap['Copper']={'Iron Ore':0.212,'Gold Ore':0.276,'Platinum Ore':1.3}
droprate_MetalScrap['Runecrafter']={'Iron Ore':0.176,'Gold Ore':0.164,'Platinum Ore':1.476}
droprate_MetalScrap['Rare']={'Iron Ore':0.184,'Gold Ore':0.136,'Platinum Ore':1.488}
""" T5 """
#Loaded Clump of Ore
droprate_LoadedClumpofOre = {}
#Peu
droprate_LoadedClumpofOre['Copper']={'Platinum Ore':0.524,'Mithril Ore':1.088}
droprate_LoadedClumpofOre['Runecrafter']={'Platinum Ore':0.456,'Mithril Ore':1.312}
droprate_LoadedClumpofOre['Rare']={'Platinum Ore':0.392,'Mithril Ore':1.32}
#Salvageable Metal Scrap
droprate_SalvageableMetalScrap = {}
#Peu
droprate_SalvageableMetalScrap['Copper']={'Platinum Ore':0.53,'Mithril Ore':1.07}
droprate_SalvageableMetalScrap['Runecrafter']={'Platinum Ore':0.51,'Mithril Ore':1.1}
droprate_SalvageableMetalScrap['Rare']={'Platinum Ore':0.39,'Mithril Ore':1.32}
""" T6 """
#Rich Clump of Ore
droprate_RichClumpofOre = {}
#Peu
droprate_RichClumpofOre['Copper']={'Mithril Ore':1.172,'Orichalcum Ore':0.244}
droprate_RichClumpofOre['Runecrafter']={'Mithril Ore':1.472,'Orichalcum Ore':0.192}
droprate_RichClumpofOre['Rare']={'Mithril Ore':1.24,'Orichalcum Ore':0.212}
#Valuable Metal Scrap
droprate_ValuableMetalScrap = {}
#Peu
droprate_ValuableMetalScrap['Copper']={'Mithril Ore':1.216,'Orichalcum Ore':0.196}
droprate_ValuableMetalScrap['Runecrafter']={'Mithril Ore':1.276,'Orichalcum Ore':0.2}
droprate_ValuableMetalScrap['Rare']={'Mithril Ore':1.468,'Orichalcum Ore':0.204}
""" All Tiers """
#Unstable Metal Chunk
droprate_UnstableMetalChunk = {}
#Me
droprate_UnstableMetalChunk['Copper']={'Copper Ore':0.2035,'Iron Ore':0.9506,'Platinum Ore':0.5039,'Mithril Ore':0.1453,'Orichalcum Ore':0.2946}
droprate_UnstableMetalChunk['Runecrafter']={'Copper Ore':0.1531,'Iron Ore':0.911,'Platinum Ore':0.9593,'Mithril Ore':0.1966,'Orichalcum Ore':0.3427}
#Peu
droprate_UnstableMetalChunk['Rare']={'Copper Ore':0.136,'Iron Ore':1.004,'Platinum Ore':0.523,'Mithril Ore':0.151,'Orichalcum Ore':0.31}
"""
Drop rates: Leathers
"""
""" T1 """
#Tattered Hide
droprate_TatteredHide = {}
#Peureki
droprate_TatteredHide['Copper'] = {'Rawhide Leather Section':1.84}
droprate_TatteredHide['Runecrafter'] = {'Rawhide Leather Section':1.79}
droprate_TatteredHide['Rare'] = {'Rawhide Leather Section':1.87}
#Rawhide Leather Strap
droprate_RawhideLeatherStrap = {}
#Peureki
droprate_RawhideLeatherStrap['Copper'] = {'Rawhide Leather Section':1.788}
droprate_RawhideLeatherStrap['Runecrafter'] = {'Rawhide Leather Section':1.848}
droprate_RawhideLeatherStrap['Rare'] = {'Rawhide Leather Section':1.9}
#Tattered Pelt
droprate_TatteredPelt = {}
#Peureki
droprate_TatteredPelt['Copper'] = {'Rawhide Leather Section':1.9}
droprate_TatteredPelt['Runecrafter'] = {'Rawhide Leather Section':1.92}
droprate_TatteredPelt['Rare'] = {'Rawhide Leather Section':1.87}
""" T2 """
#Ripped Hide
droprate_RippedHide = {}
#Peureki
droprate_RippedHide['Copper'] = {'Rawhide Leather Section':0.46,'Thin Leather Section':1.33}
droprate_RippedHide['Runecrafter'] = {'Rawhide Leather Section':0.35,'Thin Leather Section':1.48}
droprate_RippedHide['Rare'] = {'Rawhide Leather Section':0.35,'Thin Leather Section':1.57}
#Thin Leather Strap
droprate_ThinLeatherStrap = {}
#Peureki
droprate_ThinLeatherStrap['Copper'] = {'Rawhide Leather Section':0.55,'Thin Leather Section':1.29}
droprate_ThinLeatherStrap['Runecrafter'] = {'Rawhide Leather Section':0.41,'Thin Leather Section':1.38}
droprate_ThinLeatherStrap['Rare'] = {'Rawhide Leather Section':0.35,'Thin Leather Section':1.59}
#Ripped Pelt
droprate_RippedPelt = {}
#Peureki
droprate_RippedPelt['Copper'] = {'Rawhide Leather Section':0.58,'Thin Leather Section':1.18}
droprate_RippedPelt['Runecrafter'] = {'Rawhide Leather Section':0.45,'Thin Leather Section':1.44}
droprate_RippedPelt['Rare'] = {'Rawhide Leather Section':0.35,'Thin Leather Section':1.56}
""" T3 """
#Torn Hide
droprate_TornHide = {}
#Peureki
droprate_TornHide['Copper'] = {'Thin Leather Section':0.48,'Coarse Leather Section':1.41}
droprate_TornHide['Runecrafter'] = {'Thin Leather Section':0.26,'Coarse Leather Section':1.6}
droprate_TornHide['Rare'] = {'Thin Leather Section':0.32,'Coarse Leather Section':1.6}
#Coarse Leather Strap
droprate_CoarseLeatherStrap = {}
#Peureki
droprate_CoarseLeatherStrap['Copper'] = {'Thin Leather Section':0.422,'Coarse Leather Section':1.38}
droprate_CoarseLeatherStrap['Runecrafter'] = {'Thin Leather Section':0.348,'Coarse Leather Section':1.44}
droprate_CoarseLeatherStrap['Rare'] = {'Thin Leather Section':0.456,'Coarse Leather Section':1.42}
#Torn Pelt
droprate_TornPelt = {}
#Peureki
droprate_TornPelt['Copper'] = {'Thin Leather Section':0.38,'Coarse Leather Section':1.48}
droprate_TornPelt['Runecrafter'] = {'Thin Leather Section':0.26,'Coarse Leather Section':1.6}
droprate_TornPelt['Rare'] = {'Thin Leather Section':0.32,'Coarse Leather Section':1.6}
""" T4 """
#Frayed Hide
droprate_FrayedHide={}
#Peu
droprate_FrayedHide['Copper']={'Coarse Leather Section':0.57,'Rugged Leather Section':1.16}
#mine
droprate_FrayedHide['Runecrafter']={'Coarse Leather Section':0.4167,'Rugged Leather Section':1.4132}
droprate_FrayedHide['Rare']={'Coarse Leather Section':0.3641,'Rugged Leather Section':1.5538}
#Thick Leather Strap
droprate_ThickLeatherStrap = {}
#Peureki
droprate_ThickLeatherStrap['Copper'] = {'Coarse Leather Section':0.52,'Rugged Leather Section':1.24}
droprate_ThickLeatherStrap['Runecrafter'] = {'Coarse Leather Section':0.29,'Rugged Leather Section':1.64}
droprate_ThickLeatherStrap['Rare'] = {'Coarse Leather Section':0.3,'Rugged Leather Section':1.53}
#Frayed Pelt
droprate_FrayedPelt = {}
#Peureki
droprate_FrayedPelt['Copper'] = {'Coarse Leather Section':0.52,'Rugged Leather Section':1.22}
droprate_FrayedPelt['Runecrafter'] = {'Coarse Leather Section':0.36,'Rugged Leather Section':1.4}
droprate_FrayedPelt['Rare'] = {'Coarse Leather Section':0.3,'Rugged Leather Section':1.62}
""" T5 """
#Filthy Hide
droprate_FilthyHIde = {}
#Peureki
droprate_FilthyHIde['Copper'] = {'Rugged Leather Section':1.36,'Thick Leather Section':0.4}
droprate_FilthyHIde['Runecrafter'] = {'Rugged Leather Section':0.7,'Thick Leather Section':0.96}
droprate_FilthyHIde['Rare'] = {'Rugged Leather Section':0.78,'Thick Leather Section':1.08}
#Rugged Leather Strap
droprate_RuggedLeatherStrap = {}
#Peureki
droprate_RuggedLeatherStrap['Copper'] = {'Rugged Leather Section':1.12,'Thick Leather Section':0.62}
droprate_RuggedLeatherStrap['Runecrafter'] = {'Rugged Leather Section':1.02,'Thick Leather Section':0.77}
droprate_RuggedLeatherStrap['Rare'] = {'Rugged Leather Section':0.83,'Thick Leather Section':0.9}
#Filthy Pelt
droprate_FilthyPelt = {}
#Peureki
droprate_FilthyPelt['Copper'] = {'Rugged Leather Section':1.28,'Thick Leather Section':0.48}
droprate_FilthyPelt['Runecrafter'] = {'Rugged Leather Section':1.24,'Thick Leather Section':0.58}
droprate_FilthyPelt['Rare'] = {'Rugged Leather Section':0.98,'Thick Leather Section':0.84}
""" T6 """
#Salvageable Hide
droprate_SalvageableHide = {}
#Peureki
droprate_SalvageableHide['Copper'] = {'Thick Leather Section':1.316,'Hardened Leather Section':0.064}
droprate_SalvageableHide['Runecrafter'] = {'Thick Leather Section':1.3,'Hardened Leather Section':0.076}
droprate_SalvageableHide['Rare'] = {'Thick Leather Section':1.236,'Hardened Leather Section':0.1}
#Hard Leather Strap
droprate_HardLeatherStrap={}
#Mine
droprate_HardLeatherStrap['Copper'] = {'Thick Leather Section':1.2844,'Hardened Leather Section':0.0791}
droprate_HardLeatherStrap['Runecrafter'] = {'Thick Leather Section':1.3045,'Hardened Leather Section':0.0813}
droprate_HardLeatherStrap['Rare'] = {'Thick Leather Section':1.2588,'Hardened Leather Section':0.0975}
#Salvageable Pelt
droprate_SalvageablePelt = {}
#Peureki
droprate_SalvageablePelt['Copper'] = {'Thick Leather Section':1.24,'Hardened Leather Section':0.100}
droprate_SalvageablePelt['Runecrafter'] = {'Thick Leather Section':1.21,'Hardened Leather Section':0.11}
droprate_SalvageablePelt['Rare'] = {'Thick Leather Section':1.22,'Hardened Leather Section':0.11}
""" All Tiers """
#Unstable Hide
droprate_UnstableHide = {}
#My data
droprate_UnstableHide['Copper'] = {'Rawhide Leather Section':0.1822,'Thin Leather Section':0.4846,'Coarse Leather Section':0.4884,'Rugged Leather Section':0.4612,'Thick Leather Section':0.1537,'Hardened Leather Section':0.3004}
droprate_UnstableHide['Runecrafter'] = {'Rawhide Leather Section':0.1746,'Thin Leather Section':0.4780,'Coarse Leather Section':0.4793,'Rugged Leather Section':0.4920,'Thick Leather Section':0.1646,'Hardened Leather Section':0.3170}
droprate_UnstableHide['Rare'] = {'Rawhide Leather Section':0.1747,'Thin Leather Section':0.4603,'Coarse Leather Section':0.4833,'Rugged Leather Section':0.5240,'Thick Leather Section':0.1606,'Hardened Leather Section':0.3366}
#Bloodstone-Warped Hide
droprate_BloodstoneWarpedHide={}
#my data only
droprate_BloodstoneWarpedHide['Copper'] = {'Rawhide Leather Section':0.0462,'Thin Leather Section':0.0533,'Coarse Leather Section':0.0445,'Rugged Leather Section':0.0467,'Thick Leather Section':0.4533,'Hardened Leather Section':0.4714}
droprate_BloodstoneWarpedHide['Runecrafter'] = {'Rawhide Leather Section':0.0483,'Thin Leather Section':0.0463,'Coarse Leather Section':0.0461,'Rugged Leather Section':0.0468,'Thick Leather Section':0.4820,'Hardened Leather Section':0.5337}
droprate_BloodstoneWarpedHide['Rare'] = {'Rawhide Leather Section':0.0534,'Thin Leather Section':0.0647,'Coarse Leather Section':0.0605,'Rugged Leather Section':0.0578,'Thick Leather Section':0.4863,'Hardened Leather Section':0.5581}
"""
Drop rates: Cloth
"""
""" T1 """
#Shredded Garment
droprate_ShreddedGarment = {}
#Peureki
droprate_ShreddedGarment['Copper']={'Jute Scrap':1.884}
droprate_ShreddedGarment['Runecrafter']={'Jute Scrap':1.836}
droprate_ShreddedGarment['Rare']={'Jute Scrap':2.016}
#Half-Eaten Mass
droprate_HalfEatenMass = {}
#Peureki
droprate_HalfEatenMass['Copper']={'Jute Scrap':1.73}
droprate_HalfEatenMass['Runecrafter']={'Jute Scrap':1.74}
droprate_HalfEatenMass['Rare']={'Jute Scrap':1.89}
#Shredded Rag
droprate_ShreddedRag = {}
#Peureki
droprate_ShreddedRag['Copper']={'Jute Scrap':1.784}
droprate_ShreddedRag['Runecrafter']={'Jute Scrap':1.844}
droprate_ShreddedRag['Rare']={'Jute Scrap':1.852}
""" T2 """
#Worn Garment
droprate_WornGarment = {}
#me
droprate_WornGarment['Copper']={'Jute Scrap':0.3560,'Wool Scrap':1.4320}
droprate_WornGarment['Runecrafter']={'Jute Scrap':0.4232,'Wool Scrap':1.4232}
droprate_WornGarment['Rare']={'Jute Scrap':0.3938,'Wool Scrap':1.4831}
#Decaying
droprate_DecayingMass = {}
#Peureki
droprate_DecayingMass['Copper']={'Jute Scrap':0.4,'Wool Scrap':1.42}
droprate_WornGarment['Runecrafter']={'Jute Scrap':0.68,'Wool Scrap':1.24}
droprate_WornGarment['Rare']={'Jute Scrap':0.38,'Wool Scrap':1.44}
#Worn Rag
droprate_WornRag = {}
#Me
droprate_WornRag['Copper']={'Jute Scrap':0.4772,'Wool Scrap':1.3423}
droprate_WornRag['Runecrafter']={'Jute Scrap':0.4283,'Wool Scrap':1.3811}
droprate_WornRag['Rare']={'Jute Scrap':0.3742,'Wool Scrap':1.5470}
""" T3 """
#Ragged Garment
droprate_RaggedGarment = {}
#Peu
droprate_RaggedGarment['Copper']={'Wool Scrap':00.492,'Cotton Scrap':1.372}
droprate_RaggedGarment['Runecrafter']={'Wool Scrap':00.416,'Cotton Scrap':1.424}
droprate_RaggedGarment['Rare']={'Wool Scrap':00.34,'Cotton Scrap':1.522}
#Fetid Mass
droprate_FetidMass = {}
#Peu
droprate_FetidMass['Copper']={'Wool Scrap':00.28,'Cotton Scrap':1.44}
droprate_FetidMass['Runecrafter']={'Wool Scrap':00.46,'Cotton Scrap':1.4}
droprate_FetidMass['Rare']={'Wool Scrap':00.26,'Cotton Scrap':1.54}
#Soiled Rag
droprate_SoiledRag = {}
#Peu
droprate_SoiledRag['Copper']={'Wool Scrap':00.36,'Cotton Scrap':1.54}
droprate_SoiledRag['Runecrafter']={'Wool Scrap':00.34,'Cotton Scrap':1.45}
droprate_SoiledRag['Rare']={'Wool Scrap':00.34,'Cotton Scrap':1.38}
""" T4 """
#Frayed Garment
droprate_FrayedGarment = {}
#wiki
droprate_FrayedGarment['Copper']={'Cotton Scrap':00.55,'Linen Scrap':1.25}
#Peu
droprate_FrayedGarment['Runecrafter']={'Cotton Scrap':00.484,'Linen Scrap':1.4}
droprate_FrayedGarment['Rare']={'Cotton Scrap':00.432,'Linen Scrap':0.976}
#Malodorous Mass
droprate_MalodorousMass = {}
#Peu
droprate_MalodorousMass['Copper']={'Cotton Scrap':00.43,'Linen Scrap':1.36}
droprate_MalodorousMass['Runecrafter']={'Cotton Scrap':00.45,'Linen Scrap':1.5}
droprate_MalodorousMass['Rare']={'Cotton Scrap':00.37,'Linen Scrap':1.46}
#Frayed Rag
droprate_FrayedRag = {}
#Peu
droprate_FrayedRag['Copper']={'Cotton Scrap':00.488,'Linen Scrap':1.308}
droprate_FrayedRag['Runecrafter']={'Cotton Scrap':00.424,'Linen Scrap':1.484}
droprate_FrayedRag['Rare']={'Cotton Scrap':00.324,'Linen Scrap':1.556}
""" T5 """
#Torn Garment
droprate_TornGarment = {}
#Peu
droprate_TornGarment['Copper']={'Linen Scrap':00.428,'Silk Scrap':1.4}
droprate_TornGarment['Runecrafter']={'Linen Scrap':00.436,'Silk Scrap':1.356}
droprate_TornGarment['Rare']={'Linen Scrap':00.448,'Silk Scrap':1.46}
#Half-Digested Mass
droprate_HalfDigestedMass = {}
#Peu
droprate_HalfDigestedMass['Copper']={'Linen Scrap':00.32,'Silk Scrap':1.42}
droprate_HalfDigestedMass['Runecrafter']={'Linen Scrap':00.53,'Silk Scrap':1.27}
droprate_HalfDigestedMass['Rare']={'Linen Scrap':00.35,'Silk Scrap':1.51}
#Torn Rag
droprate_TornRag = {}
#Peu
droprate_TornRag['Copper']={'Linen Scrap':00.35,'Silk Scrap':1.47}
droprate_TornRag['Runecrafter']={'Linen Scrap':00.43,'Silk Scrap':1.36}
#wiki
droprate_TornRag['Rare']={'Linen Scrap':00.324,'Silk Scrap':1.596}
""" T6 """
#Discarded Garment
droprate_DiscardedGarment = {}
#Peu
droprate_DiscardedGarment['Copper']={'Silk Scrap':1.31,'Gossamer Scrap':00.098}
droprate_DiscardedGarment['Runecrafter']={'Silk Scrap':1.366,'Gossamer Scrap':00.081}
droprate_DiscardedGarment['Rare']={'Silk Scrap':1.296,'Gossamer Scrap':00.121}
#Regurgitated Mass
droprate_RegurgitatedMass = {}
#Peu
droprate_RegurgitatedMass['Copper']={'Silk Scrap':1.61,'Gossamer Scrap':00.1}
droprate_RegurgitatedMass['Runecrafter']={'Silk Scrap':1.5,'Gossamer Scrap':00.13}
droprate_RegurgitatedMass['Rare']={'Silk Scrap':1.49,'Gossamer Scrap':00.08}
#Rag
droprate_Rag = {}
#Peu
droprate_Rag['Copper']={'Silk Scrap':1.596,'Gossamer Scrap':00.076}
droprate_Rag['Runecrafter']={'Silk Scrap':1.53,'Gossamer Scrap':00.124}
droprate_Rag['Rare']={'Silk Scrap':1.55,'Gossamer Scrap':00.104}
""" Additional Garments """
#Garment 28
droprate_Garment28 = {}
#No data anywhere. Placehoder for completeness
droprate_Garment28['Copper']={'Linen Scrap':00.00,'Silk Scrap':00.00}
droprate_Garment28['Runecrafter']={'Linen Scrap':00.00,'Silk Scrap':00.00}
droprate_Garment28['Rare']={'Linen Scrap':00.00,'Silk Scrap':00.00}
#Garment 32
droprate_Garment32 = {}
#No data anywhere. Placehoder for completeness
droprate_Garment32['Copper']={'Linen Scrap':00.00,'Silk Scrap':00.00}
droprate_Garment32['Runecrafter']={'Linen Scrap':00.00,'Silk Scrap':00.00}
droprate_Garment32['Rare']={'Linen Scrap':00.00,'Silk Scrap':00.00}
""" All Tiers """
#Unstable Cloth
droprate_UnstableRag = {}
#Peu
droprate_UnstableRag['Copper']={'Jute Scrap':0.1855,'Wool Scrap':0.5135,'Cotton Scrap':0.4850,'Linen Scrap':0.5166,'Silk Scrap':0.1855,'Gossamer Scrap':0.1917}
droprate_UnstableRag['Runecrafter']={'Jute Scrap':0.1746,'Wool Scrap':0.5373,'Cotton Scrap':0.5317,'Linen Scrap':0.4857,'Silk Scrap':0.1833,'Gossamer Scrap':0.1825}
droprate_UnstableRag['Rare']={'Jute Scrap':0.1604,'Wool Scrap':0.5076,'Cotton Scrap':0.5761,'Linen Scrap':0.4855,'Silk Scrap':0.2109,'Gossamer Scrap':0.1680}
"""
Drop rates: Wood
"""
#Yes, there's only 1
droprate_ReclaimedWoodChunk={}
#Wiki
droprate_ReclaimedWoodChunk['Copper']={'Green Wood Log':0.102,'Soft Wood Log':0.4703,'Seasoned Wood Log':0.504,'Hard Wood Log':0.5206,'Elder Wood Log':0.163,'Ancient Wood Log':0.277}
#Peu
droprate_ReclaimedWoodChunk['Runecrafter']={'Green Wood Log':0.109,'Soft Wood Log':0.523,'Seasoned Wood Log':0.546,'Hard Wood Log':0.436,'Elder Wood Log':0.178,'Ancient Wood Log':0.344}
droprate_ReclaimedWoodChunk['Rare']={'Green Wood Log':0.12,'Soft Wood Log':0.459,'Seasoned Wood Log':0.511,'Hard Wood Log':0.469,'Elder Wood Log':0.149,'Ancient Wood Log':0.331}
"""
Helper stuff
"""
#All relevant IDs
allIDs = [79423,#Wood salvage
21690,21678,21691,21679,21692,21680,21693,21681,21694,21682,21695,21683,79079,#Metal salvage
21661,21684,21653,21664,21685,21654,21667,21686,21655,21668,21687,21656,21670,21688,21657,22331,21689,21658,79213,80681,#Leather salvage
21669,22325,21659,21671,22326,21660,21672,22327,21662,21673,22328,21663,21674,22329,21665,21675,22330,21666,79138,#Cloth salvage
21676,21677,#The random other Rags
19723,19726,19727,19724,19722,19725,#raw wood
19710,19713,19714,19711,19709,19712,#refined wood
19697,19703,19699,19698,19702,19700,19701,#raw metal
19680,19679,19687,19683,19688,19682,19686,19681,19684,19685,#refined metal
19718,19739,19741,19743,19748,19745,#raw cloth
19720,19740,19742,19744,19747,19746,#refined cloth
19719,19728,19730,19731,19729,19732,#raw leather
19738,19733,19734,19736,19735,19737]#refined leather
#Salvage options
#salvageOptions 'Mystic':10.5, 'Copper':5 , 'Runecrafter':30, 'Silver':60
salvageCost = {'Copper':5 , 'Runecrafter':30, 'Rare':60}
#Containers
#defaulting to main ingots for refined to avoid problems. generate_multiplier will change as needed
unrefined_to_refined = {'Hardened Leather Section':'Cured Hardened Leather Square','Thick Leather Section':'Cured Thick Leather Square','Rugged Leather Section':'Cured Rugged Leather Square','Coarse Leather Section':'Cured Coarse Leather Square','Thin Leather Section':'Cured Thin Leather Square','Rawhide Leather Section':'Stretched Rawhide Leather Square',
'Copper Ore':'Copper Ingot','Silver Ore':'Silver Ingot','Iron Ore':'Iron Ingot','Gold Ore':'Gold Ingot','Platinum Ore':'Platinum Ingot','Mithril Ore':'Mithril Ingot','Orichalcum Ore':'Orichalcum Ingot',
'Jute Scrap':'Bolt of Jute','Wool Scrap':'Bolt of Wool','Cotton Scrap':'Bolt of Cotton','Linen Scrap':'Bolt of Linen','Silk Scrap':'Bolt of Silk','Gossamer Scrap':'Bolt of Gossamer',
'Green Wood Log':'Green Wood Plank','Soft Wood Log':'Soft Wood Plank','Seasoned Wood Log':'Seasoned Wood Plank','Hard Wood Log':'Hard Wood Plank','Elder Wood Log':'Elder Wood Plank','Ancient Wood Log':'Ancient Wood Plank'}
refined_scalar = {'Stretched Rawhide Leather Square':2,'Cured Thin Leather Square':2,'Cured Coarse Leather Square':2,'Cured Rugged Leather Square':2,'Cured Thick Leather Square':4,'Cured Hardened Leather Square':3,
'Copper Ingot':2,'Bronze Ingot':2,'Silver Ingot':2,'Iron Ingot':3,'Steel Ingot':3,'Gold Ingot':2,'Platinum Ingot':2,'Darksteel Ingot':2,'Mithril Ingot':2,'Orichalcum Ingot':2,
'Bolt of Jute':2,'Bolt of Wool':2,'Bolt of Cotton':2,'Bolt of Linen':2,'Bolt of Silk':3,'Bolt of Gossamer':2,
'Green Wood Plank':3,'Soft Wood Plank':2,'Seasoned Wood Plank':3,'Hard Wood Plank':3,'Elder Wood Plank':3,'Ancient Wood Plank':3,
'Pile of Lucent Crystal':10}
#Raw to refined lookup
#I don't think I need this
"""
Main Program
"""
if __name__ == '__main__':
#Import new common helper file
from calc_helpers import *
#Python GW2 API wrapper library
from gw2api import GuildWars2Client
gw2_client = GuildWars2Client()
allAPI=gw2_client.commerceprices.get(ids=allIDs)
unrefined_prices, refined_prices, salvageLeather, salvageWood, salvageMetal, salvageCloth = sort_allAPI(allAPI)
#Multiplier creation
#Multiplier and decision are based off of sell prices
multiplier_prices,decision = generate_multiplier(unrefined_prices,refined_prices,refined_scalar,unrefined_to_refined,1)
#Price Chart
#The if is from the unid chart because there is no refinement on charm/symbol
print('{:<24} : {:>10} {:<10} {:<10} {:<10}'.format('Material','Sell Price','State','Raw','Refined'))
print('-'*74)
for key, value in multiplier_prices.items():
if key in decision:
print('{:<24} : {:>10} {:<10} {:<10} {:<10}'.format(key,value, decision[key],unrefined_prices[key][1],refined_prices[unrefined_to_refined[key]][1]))
else:
print('{:<24} : {:>10}'.format(key,value))
#Calculate salvaged values
worthbuyinglist=[]
print('\n','#'*10,"Metal",'#'*10)
#T1
worthbuyinglist.append(salvagePrint('Bit of Metal Scrap',salvageMetal,multiplier_prices,droprate_BitofMetalScrap,salvageCost,0))
worthbuyinglist.append(salvagePrint('Brittle Clump of Ore',salvageMetal,multiplier_prices,droprate_BrittleClumpofOre,salvageCost,0))
#T2
worthbuyinglist.append(salvagePrint('Weak Clump of Ore',salvageMetal,multiplier_prices,droprate_WeakClumpofOre,salvageCost,0))
worthbuyinglist.append(salvagePrint('Pile of Metal Scrap',salvageMetal,multiplier_prices,droprate_PileofMetalScrap,salvageCost,0))
#T3
worthbuyinglist.append(salvagePrint('Clump of Ore',salvageMetal,multiplier_prices,droprate_ClumpofOre,salvageCost,0))
worthbuyinglist.append(salvagePrint('Jagged Metal Scrap',salvageMetal,multiplier_prices,droprate_JaggedMetalScrap,salvageCost,0))
#T4
worthbuyinglist.append(salvagePrint('Laden Clump of Ore',salvageMetal,multiplier_prices,droprate_LadenClumpofOre,salvageCost,0))
worthbuyinglist.append(salvagePrint('Metal Scrap',salvageMetal,multiplier_prices,droprate_MetalScrap,salvageCost,0))
#T5
worthbuyinglist.append(salvagePrint('Loaded Clump of Ore',salvageMetal,multiplier_prices,droprate_LoadedClumpofOre,salvageCost,0))
worthbuyinglist.append(salvagePrint('Salvageable Metal Scrap',salvageMetal,multiplier_prices,droprate_SalvageableMetalScrap,salvageCost,0))
#T6
worthbuyinglist.append(salvagePrint('Rich Clump of Ore',salvageMetal,multiplier_prices,droprate_RichClumpofOre,salvageCost,0))
worthbuyinglist.append(salvagePrint('Valuable Metal Scrap',salvageMetal,multiplier_prices,droprate_ValuableMetalScrap,salvageCost,0))
#All
worthbuyinglist.append(salvagePrint('Unstable Metal Chunk',salvageMetal,multiplier_prices,droprate_UnstableMetalChunk,salvageCost,0))
print('\n','#'*10,"Metal / / / Leather",'#'*10)
#T1
worthbuyinglist.append(salvagePrint('Tattered Hide',salvageLeather,multiplier_prices,droprate_TatteredHide,salvageCost,0))
worthbuyinglist.append(salvagePrint('Rawhide Leather Strap',salvageLeather,multiplier_prices,droprate_RawhideLeatherStrap,salvageCost,0))
worthbuyinglist.append(salvagePrint('Tattered Pelt',salvageLeather,multiplier_prices,droprate_TatteredPelt,salvageCost,0))
#T2
worthbuyinglist.append(salvagePrint('Ripped Hide',salvageLeather,multiplier_prices,droprate_RippedHide,salvageCost,0))
worthbuyinglist.append(salvagePrint('Thin Leather Strap',salvageLeather,multiplier_prices,droprate_ThinLeatherStrap,salvageCost,0))
worthbuyinglist.append(salvagePrint('Ripped Pelt',salvageLeather,multiplier_prices,droprate_RippedPelt,salvageCost,0))
#T3
worthbuyinglist.append(salvagePrint('Torn Hide',salvageLeather,multiplier_prices,droprate_TornHide,salvageCost,0))
worthbuyinglist.append(salvagePrint('Coarse Leather Strap',salvageLeather,multiplier_prices,droprate_CoarseLeatherStrap,salvageCost,0))
worthbuyinglist.append(salvagePrint('Torn Pelt',salvageLeather,multiplier_prices,droprate_TornPelt,salvageCost,0))
#T4
worthbuyinglist.append(salvagePrint('Frayed Hide',salvageLeather,multiplier_prices,droprate_FrayedHide,salvageCost,0))
worthbuyinglist.append(salvagePrint('Thick Leather Strap',salvageLeather,multiplier_prices,droprate_ThickLeatherStrap,salvageCost,0))
worthbuyinglist.append(salvagePrint('Frayed Pelt',salvageLeather,multiplier_prices,droprate_FrayedPelt,salvageCost,0))
#T5
worthbuyinglist.append(salvagePrint('Filthy Hide',salvageLeather,multiplier_prices,droprate_FilthyHIde,salvageCost,0))
worthbuyinglist.append(salvagePrint('Rugged Leather Strap',salvageLeather,multiplier_prices,droprate_RuggedLeatherStrap,salvageCost,0))
worthbuyinglist.append(salvagePrint('Filthy Pelt',salvageLeather,multiplier_prices,droprate_FilthyPelt,salvageCost,0))
#T6
worthbuyinglist.append(salvagePrint('Salvageable Hide',salvageLeather,multiplier_prices,droprate_SalvageableHide,salvageCost,0))
worthbuyinglist.append(salvagePrint('Hard Leather Strap',salvageLeather,multiplier_prices,droprate_HardLeatherStrap,salvageCost,0))
worthbuyinglist.append(salvagePrint('Salvageable Pelt',salvageLeather,multiplier_prices,droprate_SalvageablePelt,salvageCost,0))
#All
worthbuyinglist.append(salvagePrint('Unstable Hide',salvageLeather,multiplier_prices,droprate_UnstableHide,salvageCost,0))
worthbuyinglist.append(salvagePrint('Bloodstone-Warped Hide',salvageLeather,multiplier_prices,droprate_BloodstoneWarpedHide,salvageCost,0))
print('\n','#'*10,"Leather / / / Cloth",'#'*10)
#T1
worthbuyinglist.append(salvagePrint('Shredded Garment',salvageCloth,multiplier_prices,droprate_ShreddedGarment,salvageCost,0))
worthbuyinglist.append(salvagePrint('Half-Eaten Mass',salvageCloth,multiplier_prices,droprate_HalfEatenMass,salvageCost,0))
worthbuyinglist.append(salvagePrint('Shredded Rag',salvageCloth,multiplier_prices,droprate_ShreddedRag,salvageCost,0))
#T2
worthbuyinglist.append(salvagePrint('Worn Garment',salvageCloth,multiplier_prices,droprate_WornGarment,salvageCost,0))
worthbuyinglist.append(salvagePrint('Decaying Mass',salvageCloth,multiplier_prices,droprate_DecayingMass,salvageCost,0))
worthbuyinglist.append(salvagePrint('Worn Rag',salvageCloth,multiplier_prices,droprate_WornRag,salvageCost,0))
#T3
worthbuyinglist.append(salvagePrint('Ragged Garment',salvageCloth,multiplier_prices,droprate_RaggedGarment,salvageCost,0))
worthbuyinglist.append(salvagePrint('Fetid Mass',salvageCloth,multiplier_prices,droprate_FetidMass,salvageCost,0))
worthbuyinglist.append(salvagePrint('Soiled Rag',salvageCloth,multiplier_prices,droprate_SoiledRag,salvageCost,0))
#T4
worthbuyinglist.append(salvagePrint('Frayed Garment',salvageCloth,multiplier_prices,droprate_FrayedGarment,salvageCost,0))
worthbuyinglist.append(salvagePrint('Malodorous Mass',salvageCloth,multiplier_prices,droprate_MalodorousMass,salvageCost,0))
worthbuyinglist.append(salvagePrint('Frayed Rag',salvageCloth,multiplier_prices,droprate_FrayedRag,salvageCost,0))
#T5
worthbuyinglist.append(salvagePrint('Torn Garment',salvageCloth,multiplier_prices,droprate_TornGarment,salvageCost,0))
worthbuyinglist.append(salvagePrint('Half-Digested Mass',salvageCloth,multiplier_prices,droprate_HalfDigestedMass,salvageCost,0))
worthbuyinglist.append(salvagePrint('Torn Rag',salvageCloth,multiplier_prices,droprate_TornRag,salvageCost,0))
#T6
worthbuyinglist.append(salvagePrint('Discarded Garment',salvageCloth,multiplier_prices,droprate_DiscardedGarment,salvageCost,0))
worthbuyinglist.append(salvagePrint('Regurgitated Mass',salvageCloth,multiplier_prices,droprate_RegurgitatedMass,salvageCost,0))
worthbuyinglist.append(salvagePrint('Rag',salvageCloth,multiplier_prices,droprate_Rag,salvageCost,0))
#Extra Garments
worthbuyinglist.append(salvagePrint('Garment_28',salvageCloth,multiplier_prices,droprate_Garment28,salvageCost,0))
worthbuyinglist.append(salvagePrint('Garment_32',salvageCloth,multiplier_prices,droprate_Garment32,salvageCost,0))
#All
worthbuyinglist.append(salvagePrint('Unstable Rag',salvageCloth,multiplier_prices,droprate_UnstableRag,salvageCost,0))
print('\n','#'*10,"Cloth / / / Wood",'#'*10)
worthbuyinglist.append(salvagePrint('Reclaimed Wood Chunk',salvageWood,multiplier_prices,droprate_ReclaimedWoodChunk,salvageCost,0))
print('\n','#'*10,"Wood / / / Summary ",'#'*10)
#Filter and sort the levels of "buy" using sorted and list comprehension. Don't care if swapping to a generator would be faster and more memory efficient
#x[2] is called in key because I want to sort based on profit amount, not profit percent at this moment
#Reverse gives me hightest first
#list of lists prints on a single line and I want 1 list per line so this is asssembled in a for loop
#This is Good candidate for refactoring into a function
#I am fine with the list formatting rather than a table
for x in sorted( [x for x in worthbuyinglist if 'MEGA BUY' in x ],key=lambda x:x[2], reverse=True):
print(x)
for x in sorted( [x for x in worthbuyinglist if 'BUYBUYBUY' in x ],key=lambda x:x[2], reverse=True):
print(x)
for x in sorted( [x for x in worthbuyinglist if 'Good' in x ],key=lambda x:x[2], reverse=True):
print(x)
for x in sorted( [x for x in worthbuyinglist if 'Consider' in x ],key=lambda x:x[2], reverse=True):
print(x)
| [
"gw2api.GuildWars2Client"
] | [((46466, 46484), 'gw2api.GuildWars2Client', 'GuildWars2Client', ([], {}), '()\n', (46482, 46484), False, 'from gw2api import GuildWars2Client\n')] |
import os
import pytest
from io import StringIO
pytest.register_assert_rewrite('tests.common')
@pytest.fixture
def content():
def _reader(filename):
with open(filename) as f:
return f.read()
return _reader
@pytest.fixture
def expected(request):
filename = os.path.splitext(request.module.__file__)[0]
filename += '.' + request.function.__name__ + '.exp'
with open(filename) as f:
return f.read()
@pytest.fixture
def rpath(request):
def _path_resolver(filename):
path = os.path.join(
os.path.dirname(request.module.__file__),
filename,
)
return os.path.relpath(
path,
os.path.join(os.path.dirname(__file__), '..'),
)
return _path_resolver
@pytest.fixture
def stringio():
return StringIO()
class _StringIOTTY(StringIO):
def isatty(self):
return True
@pytest.fixture
def stringio_tty():
return _StringIOTTY()
| [
"os.path.dirname",
"io.StringIO",
"os.path.splitext",
"pytest.register_assert_rewrite"
] | [((51, 97), 'pytest.register_assert_rewrite', 'pytest.register_assert_rewrite', (['"""tests.common"""'], {}), "('tests.common')\n", (81, 97), False, 'import pytest\n'), ((832, 842), 'io.StringIO', 'StringIO', ([], {}), '()\n', (840, 842), False, 'from io import StringIO\n'), ((296, 337), 'os.path.splitext', 'os.path.splitext', (['request.module.__file__'], {}), '(request.module.__file__)\n', (312, 337), False, 'import os\n'), ((566, 606), 'os.path.dirname', 'os.path.dirname', (['request.module.__file__'], {}), '(request.module.__file__)\n', (581, 606), False, 'import os\n'), ((716, 741), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (731, 741), False, 'import os\n')] |
from datetime import datetime, timedelta
from NodeDefender.db.sql import SQL, HeatModel, NodeModel, iCPEModel, GroupModel
from sqlalchemy import func
from sqlalchemy.sql import label
from itertools import groupby
def current(group):
group = SQL.session.query(GroupModel).filter(GroupModel.name ==
group).first()
if group is None:
return False
ret_data = []
group_data = {}
group_data['name'] = group.name
group_data['heat'] = 0.0
for node in group.nodes:
if not node.icpe:
continue
node_data = {}
node_data['name'] = node.name
min_ago = (datetime.now() - timedelta(hours=0.5))
latest_heat = SQL.session.query(HeatModel,\
label('sum', func.sum(HeatModel.average)),
label('count', func.count(HeatModel.average))).\
join(HeatModel.icpe).\
filter(iCPEModel.mac_address == node.icpe.mac_address).\
filter(HeatModel.date > min_ago).first()
if latest_heat.count:
node_data['heat'] = latest_heat.sum / latest_heat.count
group_data['heat'] += node_data['heat']
else:
node_data['heat'] = 0.0
ret_data.append(node_data)
ret_data.append(group_data)
return ret_data
def average(group):
group = SQL.session.query(GroupModel).filter(GroupModel.name ==
group).first()
if group is None:
return False
min_ago = (datetime.now() - timedelta(hours=0.5))
day_ago = (datetime.now() - timedelta(days=1))
week_ago = (datetime.now() - timedelta(days=7))
month_ago = (datetime.now() - timedelta(days=30))
group_data = {}
group_data['name'] = group.name
group_data['current'] = 0.0
group_data['daily'] = 0.0
group_data['weekly'] = 0.0
group_data['monthly'] = 0.0
icpes = [node.icpe.mac_address for node in group.nodes if node.icpe]
current_heat = SQL.session.query(HeatModel,\
label('sum', func.sum(HeatModel.average)),
label('count', func.count(HeatModel.average))).\
join(HeatModel.icpe).\
filter(iCPEModel.mac_address.in_(*[icpes])).\
filter(HeatModel.date > min_ago).first()
daily_heat = SQL.session.query(HeatModel,\
label('sum', func.sum(HeatModel.average)),
label('count', func.count(HeatModel.average))).\
join(HeatModel.icpe).\
filter(iCPEModel.mac_address.in_(*[icpes])).\
filter(HeatModel.date > day_ago).first()
weekly_heat = SQL.session.query(HeatModel,\
label('sum', func.sum(HeatModel.average)),
label('count', func.count(HeatModel.average))).\
join(HeatModel.icpe).\
filter(iCPEModel.mac_address.in_(*[icpes])).\
filter(HeatModel.date > week_ago).first()
monthly_heat = SQL.session.query(HeatModel,\
label('sum', func.sum(HeatModel.average)),
label('count', func.count(HeatModel.average))).\
join(HeatModel.icpe).\
filter(iCPEModel.mac_address.in_(*[icpes])).\
filter(HeatModel.date > month_ago).first()
if current_heat.count:
current_heat = (current_heat.sum / current_heat.count)
else:
current_heat = 0.0
if daily_heat.count:
daily_heat = (daily_heat.sum / daily_heat.count)
else:
daily_heat = 0.0
if weekly_heat.count:
weekly_heat = (weekly_heat.sum / weekly_heat.count)
else:
weekly_heat = 0.0
if monthly_heat.count:
monthly_heat = (monthly_heat.sum / monthly_heat.count)
else:
monthly_heat = 0.0
group_data['current'] = current_heat
group_data['daily'] = daily_heat
group_data['weekly'] = weekly_heat
group_data['monthly'] = monthly_heat
return group_data
def chart(group):
from_date = (datetime.now() - timedelta(days=30))
to_date = datetime.now()
group = SQL.session.query(GroupModel).filter(GroupModel.name ==
group).first()
if group is None:
return False
ret_data = []
for node in group.nodes:
if not node.icpe:
continue
heat_data = SQL.session.query(HeatModel).\
join(HeatModel.icpe).\
filter(iCPEModel.mac_address == node.icpe.mac_address).\
filter(HeatModel.date > from_date).\
filter(HeatModel.date < to_date).all()
if not heat_data:
continue
node_data = {}
node_data['name'] = node.name
node_data['heat'] = []
grouped_data = [list(v) for k, v in groupby(heat_data, lambda p:
p.date)]
for data in grouped_data:
entry = {'date' : str(data[0].date)}
for heat in data:
try:
entry['value'] = (heat.average + entry['heat']) / 2
except KeyError:
entry['value'] = heat.average
node_data['heat'].append(entry)
ret_data.append(node_data)
return ret_data
| [
"NodeDefender.db.sql.iCPEModel.mac_address.in_",
"sqlalchemy.func.count",
"itertools.groupby",
"NodeDefender.db.sql.SQL.session.query",
"sqlalchemy.func.sum",
"datetime.datetime.now",
"datetime.timedelta"
] | [((4175, 4189), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4187, 4189), False, 'from datetime import datetime, timedelta\n'), ((1603, 1617), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1615, 1617), False, 'from datetime import datetime, timedelta\n'), ((1620, 1640), 'datetime.timedelta', 'timedelta', ([], {'hours': '(0.5)'}), '(hours=0.5)\n', (1629, 1640), False, 'from datetime import datetime, timedelta\n'), ((1657, 1671), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1669, 1671), False, 'from datetime import datetime, timedelta\n'), ((1674, 1691), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1683, 1691), False, 'from datetime import datetime, timedelta\n'), ((1709, 1723), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1721, 1723), False, 'from datetime import datetime, timedelta\n'), ((1726, 1743), 'datetime.timedelta', 'timedelta', ([], {'days': '(7)'}), '(days=7)\n', (1735, 1743), False, 'from datetime import datetime, timedelta\n'), ((1762, 1776), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1774, 1776), False, 'from datetime import datetime, timedelta\n'), ((1779, 1797), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (1788, 1797), False, 'from datetime import datetime, timedelta\n'), ((4124, 4138), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4136, 4138), False, 'from datetime import datetime, timedelta\n'), ((4141, 4159), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (4150, 4159), False, 'from datetime import datetime, timedelta\n'), ((682, 696), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (694, 696), False, 'from datetime import datetime, timedelta\n'), ((699, 719), 'datetime.timedelta', 'timedelta', ([], {'hours': '(0.5)'}), '(hours=0.5)\n', (708, 719), False, 'from datetime import datetime, timedelta\n'), ((4934, 4970), 'itertools.groupby', 'groupby', (['heat_data', '(lambda p: p.date)'], {}), '(heat_data, lambda p: p.date)\n', (4941, 4970), False, 'from itertools import groupby\n'), ((246, 275), 'NodeDefender.db.sql.SQL.session.query', 'SQL.session.query', (['GroupModel'], {}), '(GroupModel)\n', (263, 275), False, 'from NodeDefender.db.sql import SQL, HeatModel, NodeModel, iCPEModel, GroupModel\n'), ((1426, 1455), 'NodeDefender.db.sql.SQL.session.query', 'SQL.session.query', (['GroupModel'], {}), '(GroupModel)\n', (1443, 1455), False, 'from NodeDefender.db.sql import SQL, HeatModel, NodeModel, iCPEModel, GroupModel\n'), ((4207, 4236), 'NodeDefender.db.sql.SQL.session.query', 'SQL.session.query', (['GroupModel'], {}), '(GroupModel)\n', (4224, 4236), False, 'from NodeDefender.db.sql import SQL, HeatModel, NodeModel, iCPEModel, GroupModel\n'), ((2298, 2333), 'NodeDefender.db.sql.iCPEModel.mac_address.in_', 'iCPEModel.mac_address.in_', (['*[icpes]'], {}), '(*[icpes])\n', (2323, 2333), False, 'from NodeDefender.db.sql import SQL, HeatModel, NodeModel, iCPEModel, GroupModel\n'), ((2632, 2667), 'NodeDefender.db.sql.iCPEModel.mac_address.in_', 'iCPEModel.mac_address.in_', (['*[icpes]'], {}), '(*[icpes])\n', (2657, 2667), False, 'from NodeDefender.db.sql import SQL, HeatModel, NodeModel, iCPEModel, GroupModel\n'), ((2967, 3002), 'NodeDefender.db.sql.iCPEModel.mac_address.in_', 'iCPEModel.mac_address.in_', (['*[icpes]'], {}), '(*[icpes])\n', (2992, 3002), False, 'from NodeDefender.db.sql import SQL, HeatModel, NodeModel, iCPEModel, GroupModel\n'), ((3300, 3335), 'NodeDefender.db.sql.iCPEModel.mac_address.in_', 'iCPEModel.mac_address.in_', (['*[icpes]'], {}), '(*[icpes])\n', (3325, 3335), False, 'from NodeDefender.db.sql import SQL, HeatModel, NodeModel, iCPEModel, GroupModel\n'), ((2141, 2168), 'sqlalchemy.func.sum', 'func.sum', (['HeatModel.average'], {}), '(HeatModel.average)\n', (2149, 2168), False, 'from sqlalchemy import func\n'), ((2202, 2231), 'sqlalchemy.func.count', 'func.count', (['HeatModel.average'], {}), '(HeatModel.average)\n', (2212, 2231), False, 'from sqlalchemy import func\n'), ((2475, 2502), 'sqlalchemy.func.sum', 'func.sum', (['HeatModel.average'], {}), '(HeatModel.average)\n', (2483, 2502), False, 'from sqlalchemy import func\n'), ((2536, 2565), 'sqlalchemy.func.count', 'func.count', (['HeatModel.average'], {}), '(HeatModel.average)\n', (2546, 2565), False, 'from sqlalchemy import func\n'), ((2810, 2837), 'sqlalchemy.func.sum', 'func.sum', (['HeatModel.average'], {}), '(HeatModel.average)\n', (2818, 2837), False, 'from sqlalchemy import func\n'), ((2871, 2900), 'sqlalchemy.func.count', 'func.count', (['HeatModel.average'], {}), '(HeatModel.average)\n', (2881, 2900), False, 'from sqlalchemy import func\n'), ((3143, 3170), 'sqlalchemy.func.sum', 'func.sum', (['HeatModel.average'], {}), '(HeatModel.average)\n', (3151, 3170), False, 'from sqlalchemy import func\n'), ((3204, 3233), 'sqlalchemy.func.count', 'func.count', (['HeatModel.average'], {}), '(HeatModel.average)\n', (3214, 3233), False, 'from sqlalchemy import func\n'), ((807, 834), 'sqlalchemy.func.sum', 'func.sum', (['HeatModel.average'], {}), '(HeatModel.average)\n', (815, 834), False, 'from sqlalchemy import func\n'), ((872, 901), 'sqlalchemy.func.count', 'func.count', (['HeatModel.average'], {}), '(HeatModel.average)\n', (882, 901), False, 'from sqlalchemy import func\n'), ((4490, 4518), 'NodeDefender.db.sql.SQL.session.query', 'SQL.session.query', (['HeatModel'], {}), '(HeatModel)\n', (4507, 4518), False, 'from NodeDefender.db.sql import SQL, HeatModel, NodeModel, iCPEModel, GroupModel\n')] |
# -*- coding: utf-8 -*-
"""
main program for IMRT QA PDF report parser
Created on Thu May 30 2019
@author: <NAME>, PhD
"""
from os.path import isdir, join, splitext, normpath
from os import walk, listdir
import zipfile
from datetime import datetime
from dateutil.parser import parse as date_parser
import numpy as np
import codecs
DELIMITER = ',' # delimiter for the csv output file for process_files
ALTERNATE = '^' # replace the delimiter character with this so not to confuse csv file parsing
def are_all_strings_in_text(text, list_of_strings):
"""
:param text: output from convert_pdf_to_text
:type text: list of str
:param list_of_strings: a list of strings used to identify document type
:type list_of_strings: list of str
:return: Will return true if every string in list_of_strings is found in the text data
:rtype: bool
"""
for str_to_find in list_of_strings:
if str_to_find not in text:
return False
return True
#############################################################
# CSV related functions
#############################################################
def get_csv(data, columns):
"""
Convert a dictionary of data into a row for a csv file
:param data: a dictionary with values with str representations
:type data: dict
:param columns: a list of keys dictating the order of the csv
:type columns: list
:return: a csv string delimited by DELIMITER
:rtype: str
"""
clean_csv = [str(data[column]).replace(DELIMITER, ALTERNATE) for column in columns]
return DELIMITER.join(clean_csv)
def load_csv_file(file_path):
with codecs.open(file_path, 'r', encoding='utf-8', errors='ignore') as doc:
return [line.split(',') for line in doc]
def import_csv(file_path, day_first=False):
raw_data = load_csv_file(file_path)
keys = raw_data.pop(0) # remove column header row
keys = [key.strip() for key in keys if key.strip()] + ['file_name']
data = {key: [] for key in keys}
for row in raw_data:
for col, key in enumerate(keys):
data[key].append(row[col])
sorted_data = {key: [] for key in keys}
sorted_data['date_time_obj'] = []
date_time_objs = get_date_times(data, day_first=day_first)
for i in get_sorted_indices(date_time_objs):
for key in keys:
sorted_data[key].append(data[key][i])
sorted_data['date_time_obj'].append(date_time_objs[i])
return sorted_data
def get_file_names_from_csv_file(file_path):
raw_data = load_csv_file(file_path)
column_headers = raw_data.pop(0) # remove column header row
fp_start = len(column_headers)
file_names = []
for row in raw_data:
file_name_fields = [value for value in row[fp_start:]]
file_name = ','.join(file_name_fields)
file_names.append(normpath(file_name.strip()))
return file_names
#############################################################
# Plotting and Stat related functions
#############################################################
def collapse_into_single_dates(x, y):
"""
Function used for a time plot to convert multiple values into one value, while retaining enough information
to perform a moving average over time
:param x: a list of dates in ascending order
:param y: a list of values and can use the '+' operator as a function of date
:return: a unique list of dates, sum of y for that date, and number of original points for that date
:rtype: dict
"""
# average daily data and keep track of points per day
x_collapsed = [x[0]]
y_collapsed = [y[0]]
w_collapsed = [1]
for n in range(1, len(x)):
if x[n] == x_collapsed[-1]:
y_collapsed[-1] = (y_collapsed[-1] + y[n])
w_collapsed[-1] += 1
else:
x_collapsed.append(x[n])
y_collapsed.append(y[n])
w_collapsed.append(1)
return {'x': x_collapsed, 'y': y_collapsed, 'w': w_collapsed}
def moving_avg(xyw, avg_len):
"""
Calculate a moving average for a given averaging length
:param xyw: output from collapse_into_single_dates
:type xyw: dict
:param avg_len: average of these number of points, i.e., look-back window
:type avg_len: int
:return: list of x values, list of y values
:rtype: tuple
"""
cumsum, moving_aves, x_final = [0], [], []
for i, y in enumerate(xyw['y'], 1):
cumsum.append(cumsum[i - 1] + y / xyw['w'][i - 1])
if i >= avg_len:
moving_ave = (cumsum[i] - cumsum[i - avg_len]) / avg_len
moving_aves.append(moving_ave)
x_final = [xyw['x'][i] for i in range(avg_len - 1, len(xyw['x']))]
return x_final, moving_aves
def get_sorted_indices(some_list):
try:
return [i[0] for i in sorted(enumerate(some_list), key=lambda x: x[1])]
except TypeError: # can't sort if a mix of str and float
try:
temp_data = [[value, -float('inf')][value == 'None'] for value in some_list]
return [i[0] for i in sorted(enumerate(temp_data), key=lambda x: x[1])]
except TypeError:
temp_data = [str(value) for value in some_list]
return [i[0] for i in sorted(enumerate(temp_data), key=lambda x: x[1])]
def get_date_times(data, datetime_key='Plan Date', row_id_key='Patient ID', day_first=False):
dates = []
for i, date_str in enumerate(data[datetime_key]):
try:
dates.append(date_parser(date_str, dayfirst=day_first).date())
except ValueError:
print('ERROR: Could not parse the following into a date: %s' % date_str)
print("\tPatient ID: %s" % data[row_id_key][i])
print("\tUsing today's date instead")
dates.append(datetime.today().date())
return dates
def get_control_limits(y):
"""
Calculate control limits for Control Chart
:param y: data
:type y: list
:return: center line, upper control limit, and lower control limit
"""
y = np.array(y)
center_line = np.mean(y)
avg_moving_range = np.mean(np.absolute(np.diff(y)))
scalar_d = 1.128
ucl = center_line + 3 * avg_moving_range / scalar_d
lcl = center_line - 3 * avg_moving_range / scalar_d
return center_line, ucl, lcl
#############################################################
# File related functions
#############################################################
def extract_files_from_zipped_files(init_directory, extract_to_path, extension='.pdf'):
"""
Function to extract .pdf files from zipped files
:param init_directory: initial top-level directory to walk through
:type init_directory: str
:param extract_to_path: directory to extract pdfs into
:type extract_to_path: str
:param extension: file extension of file type to extract, set to None to extract all files
:type extension: str or None
"""
for dirName, subdirList, fileList in walk(init_directory): # iterate through files and all sub-directories
for fileName in fileList:
if splitext(fileName)[1].lower == '.zip':
zip_file_path = join(dirName, fileName)
with zipfile.ZipFile(zip_file_path, 'r') as z:
for file_name in z.namelist():
if not isdir(file_name) and (extension is None or splitext(file_name)[1].lower == extension):
temp_path = join(extract_to_path)
z.extract(file_name, path=temp_path)
def find_latest_results(init_directory, no_recursive_search=False):
"""
Find the most recent IQDM results csv file within the provided directory
:param init_directory: initial scan directory
:type init_directory: str
:param no_recursive_search: set to True to ignore subdirectories
:type no_recursive_search: bool
:return: a dictionary like {report_type: {'time_stamp': datetime, 'file_path': str}}
:rtype: dict
"""
results = {}
if no_recursive_search:
process_result_csvs(listdir(init_directory), results)
else:
for dirName, subdirList, fileList in walk(init_directory): # iterate through files and all sub-directories
process_result_csvs(fileList, results, directory_name=dirName)
return results
def process_result_csvs(file_list, results, directory_name=None):
"""
Parse each file for report type and time stamp, edit results with the latest file_path for each report_type
:param file_list: files to be parsed
:type file_list: list
:param results: results dict from find_latest_results()
:type results: dict
:param directory_name: optionally specify the directory
:type directory_name: str
"""
for file_name in file_list:
fn = splitext(file_name)[0].lower()
ext = splitext(file_name)[1].lower()
if ext == '.csv' and '_results_' in fn:
try:
result_info = file_name.split('_')
report_type = result_info[0]
time_stamp = result_info[2].replace(ext, '')
time_stamp = datetime.strptime(time_stamp[:-7], '%Y-%m-%d %H-%M-%S')
if report_type and report_type not in results.keys() \
or results[report_type]['time_stamp'] < time_stamp:
if directory_name is None:
file_path = file_name
else:
file_path = join(directory_name, file_name)
results[report_type] = {'time_stamp': time_stamp, 'file_path': file_path}
except Exception:
continue
def get_processed_files(init_directory, no_recursive_search=False):
processed = []
if no_recursive_search:
get_file_names_from_result_csvs(listdir(init_directory), processed)
else:
for dirName, subdirList, fileList in walk(init_directory): # iterate through files and all sub-directories
get_file_names_from_result_csvs(fileList, processed, directory_name=dirName)
return list(set(processed))
def get_file_names_from_result_csvs(file_list, processed, directory_name=None):
for file_name in file_list:
fn = splitext(file_name)[0].lower()
ext = splitext(file_name)[1].lower()
if ext == '.csv' and '_results_' in fn:
if directory_name is None:
file_path = file_name
else:
file_path = join(directory_name, file_name)
try:
file_names = get_file_names_from_csv_file(file_path)
processed.extend(file_names)
except Exception:
continue
def is_file_name_found_in_processed_files(file_name, directory, processed_files):
for processed_file in processed_files:
if normpath(file_name) in processed_file or normpath(join(directory, file_name)) in processed_files:
return True
return False
| [
"numpy.mean",
"dateutil.parser.parse",
"os.listdir",
"zipfile.ZipFile",
"datetime.datetime.strptime",
"os.path.join",
"numpy.diff",
"os.path.splitext",
"os.path.normpath",
"numpy.array",
"os.path.isdir",
"datetime.datetime.today",
"codecs.open",
"os.walk"
] | [((6035, 6046), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (6043, 6046), True, 'import numpy as np\n'), ((6066, 6076), 'numpy.mean', 'np.mean', (['y'], {}), '(y)\n', (6073, 6076), True, 'import numpy as np\n'), ((6970, 6990), 'os.walk', 'walk', (['init_directory'], {}), '(init_directory)\n', (6974, 6990), False, 'from os import walk, listdir\n'), ((1650, 1712), 'codecs.open', 'codecs.open', (['file_path', '"""r"""'], {'encoding': '"""utf-8"""', 'errors': '"""ignore"""'}), "(file_path, 'r', encoding='utf-8', errors='ignore')\n", (1661, 1712), False, 'import codecs\n'), ((8160, 8180), 'os.walk', 'walk', (['init_directory'], {}), '(init_directory)\n', (8164, 8180), False, 'from os import walk, listdir\n'), ((9922, 9942), 'os.walk', 'walk', (['init_directory'], {}), '(init_directory)\n', (9926, 9942), False, 'from os import walk, listdir\n'), ((6120, 6130), 'numpy.diff', 'np.diff', (['y'], {}), '(y)\n', (6127, 6130), True, 'import numpy as np\n'), ((8071, 8094), 'os.listdir', 'listdir', (['init_directory'], {}), '(init_directory)\n', (8078, 8094), False, 'from os import walk, listdir\n'), ((9831, 9854), 'os.listdir', 'listdir', (['init_directory'], {}), '(init_directory)\n', (9838, 9854), False, 'from os import walk, listdir\n'), ((7161, 7184), 'os.path.join', 'join', (['dirName', 'fileName'], {}), '(dirName, fileName)\n', (7165, 7184), False, 'from os.path import isdir, join, splitext, normpath\n'), ((9134, 9189), 'datetime.datetime.strptime', 'datetime.strptime', (['time_stamp[:-7]', '"""%Y-%m-%d %H-%M-%S"""'], {}), "(time_stamp[:-7], '%Y-%m-%d %H-%M-%S')\n", (9151, 9189), False, 'from datetime import datetime\n'), ((10488, 10519), 'os.path.join', 'join', (['directory_name', 'file_name'], {}), '(directory_name, file_name)\n', (10492, 10519), False, 'from os.path import isdir, join, splitext, normpath\n'), ((10844, 10863), 'os.path.normpath', 'normpath', (['file_name'], {}), '(file_name)\n', (10852, 10863), False, 'from os.path import isdir, join, splitext, normpath\n'), ((7206, 7241), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zip_file_path', '"""r"""'], {}), "(zip_file_path, 'r')\n", (7221, 7241), False, 'import zipfile\n'), ((8807, 8826), 'os.path.splitext', 'splitext', (['file_name'], {}), '(file_name)\n', (8815, 8826), False, 'from os.path import isdir, join, splitext, normpath\n'), ((8852, 8871), 'os.path.splitext', 'splitext', (['file_name'], {}), '(file_name)\n', (8860, 8871), False, 'from os.path import isdir, join, splitext, normpath\n'), ((10241, 10260), 'os.path.splitext', 'splitext', (['file_name'], {}), '(file_name)\n', (10249, 10260), False, 'from os.path import isdir, join, splitext, normpath\n'), ((10286, 10305), 'os.path.splitext', 'splitext', (['file_name'], {}), '(file_name)\n', (10294, 10305), False, 'from os.path import isdir, join, splitext, normpath\n'), ((10894, 10920), 'os.path.join', 'join', (['directory', 'file_name'], {}), '(directory, file_name)\n', (10898, 10920), False, 'from os.path import isdir, join, splitext, normpath\n'), ((5488, 5529), 'dateutil.parser.parse', 'date_parser', (['date_str'], {'dayfirst': 'day_first'}), '(date_str, dayfirst=day_first)\n', (5499, 5529), True, 'from dateutil.parser import parse as date_parser\n'), ((7090, 7108), 'os.path.splitext', 'splitext', (['fileName'], {}), '(fileName)\n', (7098, 7108), False, 'from os.path import isdir, join, splitext, normpath\n'), ((9493, 9524), 'os.path.join', 'join', (['directory_name', 'file_name'], {}), '(directory_name, file_name)\n', (9497, 9524), False, 'from os.path import isdir, join, splitext, normpath\n'), ((5785, 5801), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (5799, 5801), False, 'from datetime import datetime\n'), ((7457, 7478), 'os.path.join', 'join', (['extract_to_path'], {}), '(extract_to_path)\n', (7461, 7478), False, 'from os.path import isdir, join, splitext, normpath\n'), ((7330, 7346), 'os.path.isdir', 'isdir', (['file_name'], {}), '(file_name)\n', (7335, 7346), False, 'from os.path import isdir, join, splitext, normpath\n'), ((7373, 7392), 'os.path.splitext', 'splitext', (['file_name'], {}), '(file_name)\n', (7381, 7392), False, 'from os.path import isdir, join, splitext, normpath\n')] |
# Last phase of the ETL: Load into an external source
import logging
import os
import pandas as pd
from config import ETL_CSV_ARGS, ETL_DATA_PATH, ETL_DATASET_CONFIG
from src.common.utils import check_df, get_folder_path
def save_dataset(dataframe, complete_path: str, csv_arguments={}) -> bool:
logging.info(f"Saving dataset: {complete_path}")
result = False
if csv_arguments:
args = csv_arguments.copy()
if "engine" in args.keys():
args.pop("engine")
args.pop("error_bad_lines")
args.pop("nrows")
dataframe.to_csv(complete_path, **args)
else:
dataframe.to_csv(complete_path)
result = True
return result
def load(dataframe: pd.DataFrame) -> bool:
logging.info("Start final loading")
# Get the dataframes
dataframe_list = list(dataframe.values())
# Get the filenames from config
dataframe_names = list(ETL_DATASET_CONFIG.values())
# Define all the settings for the csv reading
pandas_csv_arguments = ETL_CSV_ARGS
folder_path = get_folder_path(".")
data_dir = ETL_DATA_PATH
data_path = os.path.join(folder_path, data_dir)
# Save the files names
file1_name = dataframe_names[0]
file2_name = dataframe_names[1]
file3_name = dataframe_names[2]
file1_name = os.path.join(data_path, file1_name)
file2_name = os.path.join(data_path, file2_name)
file3_name = os.path.join(data_path, file3_name)
df1 = dataframe_list[0]
df2 = dataframe_list[1]
df3 = dataframe_list[2]
save_dataset(df1, file1_name, pandas_csv_arguments)
save_dataset(df2, file2_name, pandas_csv_arguments)
save_dataset(df3, file3_name, pandas_csv_arguments)
return True
| [
"os.path.join",
"logging.info",
"src.common.utils.get_folder_path",
"config.ETL_DATASET_CONFIG.values"
] | [((305, 353), 'logging.info', 'logging.info', (['f"""Saving dataset: {complete_path}"""'], {}), "(f'Saving dataset: {complete_path}')\n", (317, 353), False, 'import logging\n'), ((753, 788), 'logging.info', 'logging.info', (['"""Start final loading"""'], {}), "('Start final loading')\n", (765, 788), False, 'import logging\n'), ((1063, 1083), 'src.common.utils.get_folder_path', 'get_folder_path', (['"""."""'], {}), "('.')\n", (1078, 1083), False, 'from src.common.utils import check_df, get_folder_path\n'), ((1129, 1164), 'os.path.join', 'os.path.join', (['folder_path', 'data_dir'], {}), '(folder_path, data_dir)\n', (1141, 1164), False, 'import os\n'), ((1319, 1354), 'os.path.join', 'os.path.join', (['data_path', 'file1_name'], {}), '(data_path, file1_name)\n', (1331, 1354), False, 'import os\n'), ((1372, 1407), 'os.path.join', 'os.path.join', (['data_path', 'file2_name'], {}), '(data_path, file2_name)\n', (1384, 1407), False, 'import os\n'), ((1425, 1460), 'os.path.join', 'os.path.join', (['data_path', 'file3_name'], {}), '(data_path, file3_name)\n', (1437, 1460), False, 'import os\n'), ((924, 951), 'config.ETL_DATASET_CONFIG.values', 'ETL_DATASET_CONFIG.values', ([], {}), '()\n', (949, 951), False, 'from config import ETL_CSV_ARGS, ETL_DATA_PATH, ETL_DATASET_CONFIG\n')] |
#!/usr/bin/env python3
"""Algorithms for determining connected components of graphs.
Edges must be symmetric (u, v) <==> (v, u).
TODOS
-----
- Allow user input 'seed' labels
- Create generic `concomp` method (maybe use fast_sv for a few iterations then
seed that to bfs_lp_rs).
"""
__all__ = ["bfs_lp", "bfs_lp_rs", "fast_sv", "lps"]
from typing import Tuple, Union
from warnings import warn
import numpy as np
import arkouda as ak
from akgraph.util import get_perm, minimum
_WARN = ("Componenents likely incorrect.\n"
"Try again with `max_steps=None` or a higher value.\n"
"Or try a different algorithm.")
def bfs_lp(
V: ak.pdarray,
U: ak.pdarray,
randomize: bool = False,
shortcut: bool = False,
max_steps: Union[None, int] = 100,
verbose: bool = False
) -> Tuple[int, ak.pdarray]:
"""
Calculate connected components of a graph.
Uses a parallel breadth-first search and label propogation with
optional randomization and shortcutting.
Parameters
----------
V : ak.pdarray[int64]
out nodes
U : ak.pdarray[int64]
in nodes
randomized : bool (default False)
start with randomly permuted labels on each node
shortcut : bool (default False)
perform a label propogation through known components at each step
max_steps : Union[int, None] (default 100)
quit after this many steps
verbose : bool (default False)
print progress
Returns
-------
k : int
number of steps to converge
c : ak.pdarray[int64]
component label for each node
"""
n = V.max() + 1
if not V.is_sorted():
pi = ak.argsort(V)
V, U = V[pi], U[pi]
g = ak.GroupBy(U, assume_sorted=False)
c = get_perm(n) if randomize else ak.arange(n)
c_prev = ak.zeros_like(c)
k = 0
converged, n_comps = False, c.size
while not converged and n_comps > 1:
k += 1
if max_steps is not None and k > max_steps:
warn(f"Exceeded max_steps={max_steps} iterations.\n" + _WARN)
break
c_prev[:] = c[:]
cV = g.broadcast(c_prev, permute=False)
cU = cV[g.permutation]
C = minimum(cV, cU)
_, c = g.min(C)
if shortcut:
gl = ak.GroupBy(c_prev)
_, comp_labels = gl.min(c)
c = gl.broadcast(comp_labels, permute=True)
converged = (c == c_prev).all()
n_comps = ak.unique(c).size
if verbose:
print(f' k = {k}\n'
f' |C| = {ak.unique(c).size}\n')
return (k, c)
def bfs_lp_rs(V: ak.pdarray, U: ak.pdarray, max_steps=100) -> Tuple[int, ak.pdarray]:
"""BFS connected components algorithm with randomization and shortcutting."""
return bfs_lp(V, U, randomize=True, shortcut=True, max_steps=max_steps)
def fast_sv(
V: ak.pdarray,
U: ak.pdarray,
max_steps: Union[int, None] = 100
) -> Tuple[int, ak.pdarray]:
"""
Calculate connected components of a graph.
Distributed Shiloach-Vishkin inspired algorithm.
Parameters
----------
V : ak.pdarray[int64]
out nodes
U : ak.pdarray[int64]
in nodes
max_steps : Union[int, None] (default 100)
quit after this many steps
Returns
-------
k : int
number of steps to converge
L : ak.pdarray[int64]
component label for each node (minimal node in connected component)
References
----------
FastSV: a distributed-memory connected component algorithm with Fast
convergence<NAME>, <NAME>, <NAME>. arXiv1910.05971v2
(2020)
Parallel algorithms for finding connected components using linear
algebra. <NAME>, <NAME>, <NAME>. Journal of Parallel
and Distributed Computing, Volume 144, 2020, pp. 14-27.
"""
n = ak.max(V) + 1
nf, ng = ak.arange(n), ak.arange(n)
f, g = ak.zeros_like(nf), ak.zeros_like(ng)
if not V.is_sorted():
pi = ak.argsort(V)
V, U = V[pi], U[pi]
gV = ak.GroupBy(V, assume_sorted=True)
gU = ak.GroupBy(U, assume_sorted=False)
k = 0
converged, n_comps = False, nf.size
while not converged and n_comps > 1:
k += 1
if max_steps is not None and k > max_steps:
warn(f"Exceeded max_steps={max_steps} iterations.\n" + _WARN)
break
g[:] = ng[:]
f[:] = nf[:]
# hooking phase
# f_k = A @ g using (Select2nd, min) semiring
grand_U = gU.broadcast(g, permute=True)
_, f_k = gV.min(grand_U)
f[f] = f_k # stochastic hooking
f = minimum(f, f_k) # aggressive hooking
nf = minimum(f, g) # shortcutting
ng = nf[nf] # calculate grandparents
converged = (ng == g).all()
n_comps = ak.unique(nf).size
return (k, nf)
def lps(
V: ak.pdarray,
U: ak.pdarray,
max_steps: Union[int, None] = 100
) -> Tuple[int, ak.pdarray]:
"""
Calculate connected components of a graph.
Label propagation + symmetrization algorithm.
Parameters
----------
V : ak.pdarray[int64]
out nodes
U : ak.pdarray[int64]
in nodes
max_steps : Union[int, None] (default 100)
quit after this many steps
Returns
-------
k : int
number of steps to converge
L : ak.pdarray[int64]
component label for each node (minimal node in connected component)
References
----------
Graph connectivity in log steps using label propagation.
<NAME>. arXiv1808:06705v4 (2021)
"""
perm = ak.argsort(V)
X, Y = V[perm], U[perm]
gy = ak.GroupBy(X, assume_sorted=True)
lbl_nxt = minimum(*gy.min(Y))
lbl_cur = ak.zeros_like(lbl_nxt)
Y_nxt = ak.zeros_like(Y)
X, Y, Y_nxt = Y_nxt, X, Y
k = 0
converged, n_comps = False, lbl_nxt.size
while not converged and n_comps > 1:
k += 1
if max_steps is not None and k > max_steps:
warn(f"Exceeded max_steps={max_steps} iterations.\n" + _WARN)
break
lbl_cur[:] = lbl_nxt[:]
X, Y, Y_nxt = Y, Y_nxt, X
gx, gy = gy, ak.GroupBy(Y)
Lx = gx.broadcast(lbl_cur[gx.unique_keys], permute=True)
Ly = gy.broadcast(lbl_cur[gy.unique_keys], permute=True)
Y_nxt[:] = X[:] # Symmetrization
prop_mask = Y != Lx # Label Propagation
Y_nxt[prop_mask] = Lx[prop_mask] # Label Propagation
Ly[prop_mask] = minimum(Lx, Ly)[prop_mask] # Label Propagation
y, ly = gy.min(Ly)
lbl_nxt[y] = ly
converged = (lbl_nxt == lbl_cur).all()
n_comps = ak.unique(lbl_nxt).size
return (k, lbl_nxt)
| [
"arkouda.unique",
"akgraph.util.get_perm",
"arkouda.argsort",
"akgraph.util.minimum",
"arkouda.GroupBy",
"arkouda.arange",
"warnings.warn",
"arkouda.max",
"arkouda.zeros_like"
] | [((1723, 1757), 'arkouda.GroupBy', 'ak.GroupBy', (['U'], {'assume_sorted': '(False)'}), '(U, assume_sorted=False)\n', (1733, 1757), True, 'import arkouda as ak\n'), ((1822, 1838), 'arkouda.zeros_like', 'ak.zeros_like', (['c'], {}), '(c)\n', (1835, 1838), True, 'import arkouda as ak\n'), ((4040, 4073), 'arkouda.GroupBy', 'ak.GroupBy', (['V'], {'assume_sorted': '(True)'}), '(V, assume_sorted=True)\n', (4050, 4073), True, 'import arkouda as ak\n'), ((4083, 4117), 'arkouda.GroupBy', 'ak.GroupBy', (['U'], {'assume_sorted': '(False)'}), '(U, assume_sorted=False)\n', (4093, 4117), True, 'import arkouda as ak\n'), ((5628, 5641), 'arkouda.argsort', 'ak.argsort', (['V'], {}), '(V)\n', (5638, 5641), True, 'import arkouda as ak\n'), ((5679, 5712), 'arkouda.GroupBy', 'ak.GroupBy', (['X'], {'assume_sorted': '(True)'}), '(X, assume_sorted=True)\n', (5689, 5712), True, 'import arkouda as ak\n'), ((5761, 5783), 'arkouda.zeros_like', 'ak.zeros_like', (['lbl_nxt'], {}), '(lbl_nxt)\n', (5774, 5783), True, 'import arkouda as ak\n'), ((5797, 5813), 'arkouda.zeros_like', 'ak.zeros_like', (['Y'], {}), '(Y)\n', (5810, 5813), True, 'import arkouda as ak\n'), ((1674, 1687), 'arkouda.argsort', 'ak.argsort', (['V'], {}), '(V)\n', (1684, 1687), True, 'import arkouda as ak\n'), ((1766, 1777), 'akgraph.util.get_perm', 'get_perm', (['n'], {}), '(n)\n', (1774, 1777), False, 'from akgraph.util import get_perm, minimum\n'), ((1796, 1808), 'arkouda.arange', 'ak.arange', (['n'], {}), '(n)\n', (1805, 1808), True, 'import arkouda as ak\n'), ((2207, 2222), 'akgraph.util.minimum', 'minimum', (['cV', 'cU'], {}), '(cV, cU)\n', (2214, 2222), False, 'from akgraph.util import get_perm, minimum\n'), ((3846, 3855), 'arkouda.max', 'ak.max', (['V'], {}), '(V)\n', (3852, 3855), True, 'import arkouda as ak\n'), ((3873, 3885), 'arkouda.arange', 'ak.arange', (['n'], {}), '(n)\n', (3882, 3885), True, 'import arkouda as ak\n'), ((3887, 3899), 'arkouda.arange', 'ak.arange', (['n'], {}), '(n)\n', (3896, 3899), True, 'import arkouda as ak\n'), ((3911, 3928), 'arkouda.zeros_like', 'ak.zeros_like', (['nf'], {}), '(nf)\n', (3924, 3928), True, 'import arkouda as ak\n'), ((3930, 3947), 'arkouda.zeros_like', 'ak.zeros_like', (['ng'], {}), '(ng)\n', (3943, 3947), True, 'import arkouda as ak\n'), ((3988, 4001), 'arkouda.argsort', 'ak.argsort', (['V'], {}), '(V)\n', (3998, 4001), True, 'import arkouda as ak\n'), ((4637, 4652), 'akgraph.util.minimum', 'minimum', (['f', 'f_k'], {}), '(f, f_k)\n', (4644, 4652), False, 'from akgraph.util import get_perm, minimum\n'), ((4692, 4705), 'akgraph.util.minimum', 'minimum', (['f', 'g'], {}), '(f, g)\n', (4699, 4705), False, 'from akgraph.util import get_perm, minimum\n'), ((2009, 2070), 'warnings.warn', 'warn', (["(f'Exceeded max_steps={max_steps} iterations.\\n' + _WARN)"], {}), "(f'Exceeded max_steps={max_steps} iterations.\\n' + _WARN)\n", (2013, 2070), False, 'from warnings import warn\n'), ((2286, 2304), 'arkouda.GroupBy', 'ak.GroupBy', (['c_prev'], {}), '(c_prev)\n', (2296, 2304), True, 'import arkouda as ak\n'), ((2459, 2471), 'arkouda.unique', 'ak.unique', (['c'], {}), '(c)\n', (2468, 2471), True, 'import arkouda as ak\n'), ((4289, 4350), 'warnings.warn', 'warn', (["(f'Exceeded max_steps={max_steps} iterations.\\n' + _WARN)"], {}), "(f'Exceeded max_steps={max_steps} iterations.\\n' + _WARN)\n", (4293, 4350), False, 'from warnings import warn\n'), ((4839, 4852), 'arkouda.unique', 'ak.unique', (['nf'], {}), '(nf)\n', (4848, 4852), True, 'import arkouda as ak\n'), ((6020, 6081), 'warnings.warn', 'warn', (["(f'Exceeded max_steps={max_steps} iterations.\\n' + _WARN)"], {}), "(f'Exceeded max_steps={max_steps} iterations.\\n' + _WARN)\n", (6024, 6081), False, 'from warnings import warn\n'), ((6188, 6201), 'arkouda.GroupBy', 'ak.GroupBy', (['Y'], {}), '(Y)\n', (6198, 6201), True, 'import arkouda as ak\n'), ((6568, 6583), 'akgraph.util.minimum', 'minimum', (['Lx', 'Ly'], {}), '(Lx, Ly)\n', (6575, 6583), False, 'from akgraph.util import get_perm, minimum\n'), ((6733, 6751), 'arkouda.unique', 'ak.unique', (['lbl_nxt'], {}), '(lbl_nxt)\n', (6742, 6751), True, 'import arkouda as ak\n'), ((2560, 2572), 'arkouda.unique', 'ak.unique', (['c'], {}), '(c)\n', (2569, 2572), True, 'import arkouda as ak\n')] |
import numpy as np
from PIL import Image
from retina.retina import warp_image
class DatasetGenerator(object):
def __init__(self, data, output_dim=28, scenario=1, noise_var=None, common_dim=200):
""" DatasetGenerator initialization.
:param data: original dataset, MNIST
:param output_dim: the dimensionality for the first transformation
:param scenario: one of the paradigm proposed [1, 2, 4]
:param noise_var: useful in paradigm 1, 4
:param common_dim: dimensionality of output for scenario 4
"""
self.data = data
self.output_dim = output_dim
self.scenario = scenario
if noise_var is None:
noise_var = 2e-1
self.noise_var = noise_var
n_samples, dim1, dim2 = self.data.shape
# here we want to split
self.n_samples = n_samples
self.dim1 = dim1
self.dim2 = dim2
self.common_dim = common_dim # we upscale and then add noise
self.edge = int((self.output_dim - self.dim1) / 2)
if self.scenario == 4:
self.edge = int((self.common_dim - self.output_dim) / 2)
self.output = None
def add_noise_and_std(self):
""" Add noise to the original image and standardize the entire image.
The pixels for this image are between values [0,1].
We generate the larger image, where the noise is such to be positive.
We then standardize every image, so that its pixels distribution become Gaussian.
"""
out = self.noise_var * np.abs(np.random.randn(self.n_samples,
2 * self.edge + self.dim1,
2 * self.edge + self.dim2))
out[:, self.edge:self.edge+self.dim1, self.edge:self.edge+self.dim2] = self.data
out_std = np.zeros_like(out)
mean_ = np.mean(out, axis=(1, 2))
std_ = np.std(out, axis=(1, 2))
for k_, (m_, s_) in enumerate(zip(mean_, std_)):
out_std[k_] = (out[k_] - m_) / s_
self.output = out_std
return self
def upscale_std(self):
"""
Automatic PIL upscale of the image with standardization.
"""
new_x = np.zeros((self.n_samples, self.output_dim, self.output_dim))
for n_, old_image_ in enumerate(self.data):
image = Image.fromarray(old_image_)
tmp_x = image.resize(size=(self.output_dim, self.output_dim))
tmp_std_x = (tmp_x - np.mean(tmp_x)) / np.std(tmp_x)
new_x[n_] = tmp_std_x
self.output = new_x
return self
def _upscale_no_std(self):
""" Upscale for experiment 4 wo standardization
"""
new_x = np.zeros((self.n_samples, self.output_dim, self.output_dim))
for n_, old_image_ in enumerate(self.data):
image = Image.fromarray(old_image_)
new_x[n_] = image.resize(size=(self.output_dim, self.output_dim))
self.dim1 = self.output_dim
self.dim2 = self.output_dim
return new_x
def upscale_add_noise_std(self):
upscaled_mnist = self._upscale_no_std()
self.data = upscaled_mnist
self.add_noise_and_std()
def foveation(self):
""" In the original implementation, the image is rescaled to a smaller dimension
and then lifted to the original dimensions. We do not want to lose information.
To prevent this we keep the scaling factor as it is, and we stick to the implementation:
https://github.com/dicarlolab/retinawarp
We assume here that the image has square dimension.
:returns foveated_dataset: the output after foveation.
"""
ret_img = np.zeros_like(self.data)
for n_ in range(self.n_samples):
ret_img[n_] = warp_image(self.data[n_], output_size=self.dim1, input_size=self.dim1)
self.output = ret_img
def run(self):
if self.scenario == 1:
self.add_noise_and_std()
elif self.scenario == 2:
self.upscale_std()
elif self.scenario == 4:
self.upscale_add_noise_std()
else:
raise ValueError('Nope')
| [
"numpy.mean",
"PIL.Image.fromarray",
"numpy.zeros",
"numpy.random.randn",
"retina.retina.warp_image",
"numpy.std",
"numpy.zeros_like"
] | [((1868, 1886), 'numpy.zeros_like', 'np.zeros_like', (['out'], {}), '(out)\n', (1881, 1886), True, 'import numpy as np\n'), ((1903, 1928), 'numpy.mean', 'np.mean', (['out'], {'axis': '(1, 2)'}), '(out, axis=(1, 2))\n', (1910, 1928), True, 'import numpy as np\n'), ((1944, 1968), 'numpy.std', 'np.std', (['out'], {'axis': '(1, 2)'}), '(out, axis=(1, 2))\n', (1950, 1968), True, 'import numpy as np\n'), ((2255, 2315), 'numpy.zeros', 'np.zeros', (['(self.n_samples, self.output_dim, self.output_dim)'], {}), '((self.n_samples, self.output_dim, self.output_dim))\n', (2263, 2315), True, 'import numpy as np\n'), ((2756, 2816), 'numpy.zeros', 'np.zeros', (['(self.n_samples, self.output_dim, self.output_dim)'], {}), '((self.n_samples, self.output_dim, self.output_dim))\n', (2764, 2816), True, 'import numpy as np\n'), ((3748, 3772), 'numpy.zeros_like', 'np.zeros_like', (['self.data'], {}), '(self.data)\n', (3761, 3772), True, 'import numpy as np\n'), ((2389, 2416), 'PIL.Image.fromarray', 'Image.fromarray', (['old_image_'], {}), '(old_image_)\n', (2404, 2416), False, 'from PIL import Image\n'), ((2889, 2916), 'PIL.Image.fromarray', 'Image.fromarray', (['old_image_'], {}), '(old_image_)\n', (2904, 2916), False, 'from PIL import Image\n'), ((3840, 3910), 'retina.retina.warp_image', 'warp_image', (['self.data[n_]'], {'output_size': 'self.dim1', 'input_size': 'self.dim1'}), '(self.data[n_], output_size=self.dim1, input_size=self.dim1)\n', (3850, 3910), False, 'from retina.retina import warp_image\n'), ((1565, 1654), 'numpy.random.randn', 'np.random.randn', (['self.n_samples', '(2 * self.edge + self.dim1)', '(2 * self.edge + self.dim2)'], {}), '(self.n_samples, 2 * self.edge + self.dim1, 2 * self.edge +\n self.dim2)\n', (1580, 1654), True, 'import numpy as np\n'), ((2542, 2555), 'numpy.std', 'np.std', (['tmp_x'], {}), '(tmp_x)\n', (2548, 2555), True, 'import numpy as np\n'), ((2524, 2538), 'numpy.mean', 'np.mean', (['tmp_x'], {}), '(tmp_x)\n', (2531, 2538), True, 'import numpy as np\n')] |
import os
import sqlite3
from typing import Optional
from loguru import logger
get_relative_path = lambda p: os.path.join(os.path.dirname(__file__), p)
class DataBase(object):
def __init__(self):
logger.info("Connecting database...")
if os.path.exists(get_relative_path('../data/data.db')):
is_init = True
else:
is_init = False
self.conn = sqlite3.connect(get_relative_path('../data/data.db'), check_same_thread=False)
self.cursor = self.conn.cursor()
if not is_init:
self.cursor.execute("CREATE TABLE settings (uuid text, username text, data json);")
def __del__(self):
logger.info("Disconnecting database...")
self.cursor.close()
self.conn.commit()
self.conn.close()
# def read(self, uuid: str):
# self.cursor.execute(f"SELECT * FROM settings WHERE uuid='{uuid}';")
# result = self.cursor.fetchall()
# return result[0][1] if result else None
def create(self, uuid: str):
self.cursor.execute('INSERT INTO settings VALUES (?, ?, ?);', (uuid, None, None))
self.conn.commit()
def update_settings(self, uuid: str, data: str):
self.cursor.execute('UPDATE settings SET data = ? WHERE uuid = ?;', (data, uuid))
self.conn.commit()
def update_username(self, uuid: str, username: str):
self.cursor.execute('UPDATE settings SET username = ? WHERE uuid = ?;', (username, uuid))
self.conn.commit()
def read_all(self):
self.cursor.execute('SELECT * FROM settings;')
return self.cursor.fetchall()
| [
"os.path.dirname",
"loguru.logger.info"
] | [((124, 149), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (139, 149), False, 'import os\n'), ((211, 248), 'loguru.logger.info', 'logger.info', (['"""Connecting database..."""'], {}), "('Connecting database...')\n", (222, 248), False, 'from loguru import logger\n'), ((678, 718), 'loguru.logger.info', 'logger.info', (['"""Disconnecting database..."""'], {}), "('Disconnecting database...')\n", (689, 718), False, 'from loguru import logger\n')] |
import os
import sys
import re
def parse_sample_files(sample_files, names):
sample_file_dict = {}
sample_files = sample_files.split(',')
if names is None:
names = [ os.path.splitext(os.path.basename(sample_file))[0]
for sample_file in sample_files ]
else:
names = names.split(',')
for sample_file, name in zip(sample_files, names):
if not os.path.isfile(sample_file):
sys.exit("Error: sample file " + sample_file + " not found")
sample_file_dict[name] = sample_file
return sample_file_dict
def collapse_feature_counts(sample_files, out_file, names):
sample_file_dict = parse_sample_files(sample_files, names)
o = open(out_file, 'w')
print("sample", "feature", "cell", "count", sep="\t", file=o)
for sample,count_file in sample_file_dict.items():
i = open(count_file, 'r')
header = i.readline()
if re.search("\tcell\t", header):
for line in i:
o.write(sample + '\t' + line)
else:
for line in i:
l = line.split('\t')
o.write('\t'.join([sample, l[0], "", l[1]]))
i.close()
o.close()
if __name__ == "__main__":
##############################################################################
# Initiates argument parser
import argparse
parser = argparse.ArgumentParser(description='Combine multiple feature barcode count files')
parser.add_argument('out_file', help = 'Where to write the CSV output file', type=str)
parser.add_argument('sample_files', help='Comma-separated list of sample count files', type=str)
# Optional arguments
parser.add_argument('--names', help='Comma-separated list of sample names',
default=None, type=str)
args = parser.parse_args()
##############################################################################
collapse_feature_counts(args.sample_files, args.out_file, args.names)
| [
"argparse.ArgumentParser",
"os.path.isfile",
"os.path.basename",
"sys.exit",
"re.search"
] | [((1406, 1494), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Combine multiple feature barcode count files"""'}), "(description=\n 'Combine multiple feature barcode count files')\n", (1429, 1494), False, 'import argparse\n'), ((953, 982), 're.search', 're.search', (['"""\tcell\t"""', 'header'], {}), "('\\tcell\\t', header)\n", (962, 982), False, 'import re\n'), ((421, 448), 'os.path.isfile', 'os.path.isfile', (['sample_file'], {}), '(sample_file)\n', (435, 448), False, 'import os\n'), ((462, 522), 'sys.exit', 'sys.exit', (["('Error: sample file ' + sample_file + ' not found')"], {}), "('Error: sample file ' + sample_file + ' not found')\n", (470, 522), False, 'import sys\n'), ((207, 236), 'os.path.basename', 'os.path.basename', (['sample_file'], {}), '(sample_file)\n', (223, 236), False, 'import os\n')] |
from FeatureProcess import *
import pandas as pd
import numpy as np
fsd = FeaturesStandard()
data = [[0, 0], [0, 0], [1, 1], [1, 1]]
scr=fsd.fit(data)
print(scr.mean_)
print(scr.transform(data))
print('--------------------')
fe = FeaturesEncoder(handle_unknown='ignore')
X = [['Male', 1], ['Female', 3], ['Female', 2]]
enc =fe.fit(X)
print(enc.categories_)
print(enc.transform([['Female', 1], ['Male', 4]]).toarray())
print(enc.inverse_transform([[0, 1, 1, 0, 0], [0, 0, 0, 1, 0]]))
print(enc.get_feature_names(['gender', 'group']))
fd = FeaturesDecomposition(n_components=2)
X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
p = fd.fit(X)
print(p.explained_variance_ratio_)
print(p.singular_values_)
fs = FeaturesSelection(threshold=(.8 * (1 - .8)))
X = [[0, 0, 1], [0, 1, 0], [1, 0, 0], [0, 1, 1], [0, 1, 0], [0, 1, 1]]
fs.fit(X)
X = fs.transform(X)
print(X)
| [
"numpy.array"
] | [((586, 650), 'numpy.array', 'np.array', (['[[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]]'], {}), '([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])\n', (594, 650), True, 'import numpy as np\n')] |
import matplotlib.pyplot as plt
import numpy as np
import os
import pandas as pd
from pathlib import Path
import ptitprince as pt
# ----------
# Loss Plots
# ----------
def save_loss_plot(path, loss_function, v_path=None, show=True):
df = pd.read_csv(path)
if v_path is not None:
vdf = pd.read_csv(v_path)
else:
vdf = None
p = Path(path)
n = p.stem
d = p.parents[0]
out_path = os.path.join(d, n + '_loss.png')
fig, ax = plot_loss(df, vdf=vdf, x_lab='Iteration', y_lab=loss_function, save=out_path, show=show)
def plot_loss(df, vdf=None, x_lab='Iteration', y_lab='BCE Loss', save=None, show=True):
x = df['Unnamed: 0'].values
y = df['loss'].values
epochs = len(df['epoch'].unique())
no_batches = int(len(x) / epochs)
epoch_ends = np.array([((i + 1) * no_batches) - 1 for i in range(epochs)])
epoch_end_x = x[epoch_ends]
epoch_end_y = y[epoch_ends]
fig, ax = plt.subplots()
leg = ['loss',]
ax.plot(x, y, linewidth=2)
ax.scatter(epoch_end_x, epoch_end_y)
title = 'Training loss'
if vdf is not None:
if len(vdf) > epochs:
vy = vdf.groupby('batch_id').mean()['validation_loss'].values
vx = vdf['batch_id'].unique()
else:
vy = vdf['validation_loss'].values
vx = epoch_end_x
title = title + ' with validation loss'
leg.append('validation loss')
if len(vdf) > epochs:
#vy_err = v_df.groupby('batch_id').sem()['validation_loss'].values
#ax.errorbar(vx, vy, vy_err, marker='.')
ax.plot(vx, vy, linewidth=2, marker='o')
else:
ax.plot(vx, vy, linewidth=2, marker='o')
ax.set(xlabel=x_lab, ylabel=y_lab)
ax.set_title(title)
ax.legend(leg)
fig.set_size_inches(13, 9)
if save is not None:
plt.savefig(save, dpi=300)
if show:
plt.show()
return fig, ax
def save_channel_loss_plot(path, show=True):
df = pd.read_csv(path)
p = Path(path)
n = p.stem
d = p.parents[0]
out_path = os.path.join(d, n + '_channel-loss.png')
fig, ax = plot_channel_losses(df, save=out_path, show=show)
def plot_channel_losses(df, x_lab='Iteration', y_lab='BCE Loss', save=None, show=True):
cols = list(df.columns)
x = df['Unnamed: 0'].values
non_channel_cols = ['Unnamed: 0', 'epoch', 'batch_num', 'loss', 'data_id']
channel_losses = [col for col in cols if col not in non_channel_cols]
fig, axs = plt.subplots(2, 2)
zs, ys, xs, cs = [], [], [], []
for col in channel_losses:
y = df[col].values
if col.startswith('z'):
ls = _get_linestyle(zs)
axs[0, 0].plot(x, y, linewidth=1, linestyle=ls)
zs.append(col)
if col.startswith('y'):
ls = _get_linestyle(ys)
axs[0, 1].plot(x, y, linewidth=1, linestyle=ls)
ys.append(col)
if col.startswith('x'):
ls = _get_linestyle(xs)
axs[1, 0].plot(x, y, linewidth=1, linestyle=ls)
xs.append(col)
if col.startswith('centre'):
ls = _get_linestyle(cs)
axs[1, 1].plot(x, y, linewidth=1, linestyle=ls)
cs.append(col)
axs[0, 0].set_title('Z affinities losses')
axs[0, 0].legend(zs)
axs[0, 1].set_title('Y affinities losses')
axs[0, 1].legend(ys)
axs[1, 0].set_title('X affinities losses')
axs[1, 0].legend(xs)
axs[1, 1].set_title('Centreness losses')
axs[1, 1].legend(cs)
for ax in axs.flat:
ax.set(xlabel=x_lab, ylabel=y_lab)
fig.set_size_inches(13, 9)
if save is not None:
plt.savefig(save, dpi=300)
if show:
plt.show()
return fig, axs
def _get_linestyle(lis):
if len(lis) == 0:
ls = '-'
elif len(lis) == 1:
ls = '--'
else:
ls = ':'
return ls
# --------
# VI Plots
# --------
def VI_plot(
path,
cond_ent_over="GT | Output",
cond_ent_under="Output | GT",
lab="",
save=False,
show=True):
df = pd.read_csv(path)
overseg = df[cond_ent_over].values
o_groups = [cond_ent_over] * len(overseg)
underseg = df[cond_ent_under].values
u_groups = [cond_ent_under] * len(underseg)
groups = o_groups + u_groups
x = 'Variation of information'
y = 'Conditional entropy'
data = {
x : groups,
y : np.concatenate([overseg, underseg])
}
data = pd.DataFrame(data)
o = 'h'
pal = 'Set2'
sigma = .2
f, ax = plt.subplots(figsize=(12, 10))
pt.RainCloud(x = x, y = y, data = data, palette = pal, bw = sigma,
width_viol = .6, ax = ax, orient = o)
p = Path(path)
plt.title(p.stem)
if save:
save_path = os.path.join(p.parents[0], p.stem + lab + '_VI_rainclout_plot.png')
plt.savefig(save_path, bbox_inches='tight')
if show:
plt.show()
def experiment_VI_plots(
paths,
names,
title,
out_name,
out_dir,
cond_ent_over="GT | Output",
cond_ent_under="Output | GT",
):
groups = []
ce0 = []
ce1 = []
for i, p in enumerate(paths):
df = pd.read_csv(p)
ce0.append(df[cond_ent_over].values)
ce1.append(df[cond_ent_under].values)
groups += [names[i]] * len(df)
x = 'Experiment'
data = {
x : groups,
cond_ent_over : np.concatenate(ce0),
cond_ent_under : np.concatenate(ce1)
}
data = pd.DataFrame(data)
f, axs = plt.subplots(1, 2, figsize=(12, 10))
ax0 = axs[0, 0]
ax1 = axs[0, 1]
o = 'h'
pal = 'Set2'
sigma = .2
pt.RainCloud(x = x, y = cond_ent_over, data = data, palette = pal, bw = sigma,
width_viol = .6, ax = ax0, orient = o)
pt.RainCloud(x = x, y = cond_ent_under, data = data, palette = pal, bw = sigma,
width_viol = .6, ax = ax1, orient = o)
plt.title(title)
if save:
save_path = os.path.join(out_dir, '_VI_rainclould_plots.png')
plt.savefig(save_path, bbox_inches='tight')
if show:
plt.show()
if __name__ == '__main__':
#name = 'loss_z-1_z-2_y-1_y-2_y-3_x-1_x-2_x-3_c_cl.csv'
name = 'loss_210401_150158_z-1_y-1_x-1__wBCE2-1-1.csv'
#dir_ = '/Users/amcg0011/Data/pia-tracking/cang_training/210331_training_0'
dir_ = '/Users/amcg0011/Data/pia-tracking/cang_training/210401_150158_z-1_y-1_x-1__wBCE2-1-1'
path = os.path.join(dir_, name)
save_channel_loss_plot(path)
#v_name = 'validation-loss_z-1_z-2_y-1_y-2_y-3_x-1_x-2_x-3_c_cl.csv'
v_name = 'validation-loss_210401_150158_z-1_y-1_x-1__wBCE2-1-1.csv'
v_path = os.path.join(dir_, v_name)
loss_function = 'Weighted BCE Loss (2, 1, 1)'
save_loss_plot(path, loss_function, v_path)
| [
"matplotlib.pyplot.savefig",
"pandas.read_csv",
"pathlib.Path",
"ptitprince.RainCloud",
"os.path.join",
"numpy.concatenate",
"pandas.DataFrame",
"matplotlib.pyplot.title",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((245, 262), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (256, 262), True, 'import pandas as pd\n'), ((361, 371), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (365, 371), False, 'from pathlib import Path\n'), ((423, 455), 'os.path.join', 'os.path.join', (['d', "(n + '_loss.png')"], {}), "(d, n + '_loss.png')\n", (435, 455), False, 'import os\n'), ((942, 956), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (954, 956), True, 'import matplotlib.pyplot as plt\n'), ((1986, 2003), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (1997, 2003), True, 'import pandas as pd\n'), ((2012, 2022), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (2016, 2022), False, 'from pathlib import Path\n'), ((2074, 2114), 'os.path.join', 'os.path.join', (['d', "(n + '_channel-loss.png')"], {}), "(d, n + '_channel-loss.png')\n", (2086, 2114), False, 'import os\n'), ((2498, 2516), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(2)'], {}), '(2, 2)\n', (2510, 2516), True, 'import matplotlib.pyplot as plt\n'), ((4114, 4131), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (4125, 4131), True, 'import pandas as pd\n'), ((4507, 4525), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (4519, 4525), True, 'import pandas as pd\n'), ((4582, 4612), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(12, 10)'}), '(figsize=(12, 10))\n', (4594, 4612), True, 'import matplotlib.pyplot as plt\n'), ((4617, 4711), 'ptitprince.RainCloud', 'pt.RainCloud', ([], {'x': 'x', 'y': 'y', 'data': 'data', 'palette': 'pal', 'bw': 'sigma', 'width_viol': '(0.6)', 'ax': 'ax', 'orient': 'o'}), '(x=x, y=y, data=data, palette=pal, bw=sigma, width_viol=0.6, ax\n =ax, orient=o)\n', (4629, 4711), True, 'import ptitprince as pt\n'), ((4747, 4757), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (4751, 4757), False, 'from pathlib import Path\n'), ((4762, 4779), 'matplotlib.pyplot.title', 'plt.title', (['p.stem'], {}), '(p.stem)\n', (4771, 4779), True, 'import matplotlib.pyplot as plt\n'), ((5556, 5574), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (5568, 5574), True, 'import pandas as pd\n'), ((5588, 5624), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '(12, 10)'}), '(1, 2, figsize=(12, 10))\n', (5600, 5624), True, 'import matplotlib.pyplot as plt\n'), ((5713, 5819), 'ptitprince.RainCloud', 'pt.RainCloud', ([], {'x': 'x', 'y': 'cond_ent_over', 'data': 'data', 'palette': 'pal', 'bw': 'sigma', 'width_viol': '(0.6)', 'ax': 'ax0', 'orient': 'o'}), '(x=x, y=cond_ent_over, data=data, palette=pal, bw=sigma,\n width_viol=0.6, ax=ax0, orient=o)\n', (5725, 5819), True, 'import ptitprince as pt\n'), ((5852, 5959), 'ptitprince.RainCloud', 'pt.RainCloud', ([], {'x': 'x', 'y': 'cond_ent_under', 'data': 'data', 'palette': 'pal', 'bw': 'sigma', 'width_viol': '(0.6)', 'ax': 'ax1', 'orient': 'o'}), '(x=x, y=cond_ent_under, data=data, palette=pal, bw=sigma,\n width_viol=0.6, ax=ax1, orient=o)\n', (5864, 5959), True, 'import ptitprince as pt\n'), ((5992, 6008), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (6001, 6008), True, 'import matplotlib.pyplot as plt\n'), ((6514, 6538), 'os.path.join', 'os.path.join', (['dir_', 'name'], {}), '(dir_, name)\n', (6526, 6538), False, 'import os\n'), ((6730, 6756), 'os.path.join', 'os.path.join', (['dir_', 'v_name'], {}), '(dir_, v_name)\n', (6742, 6756), False, 'import os\n'), ((304, 323), 'pandas.read_csv', 'pd.read_csv', (['v_path'], {}), '(v_path)\n', (315, 323), True, 'import pandas as pd\n'), ((1851, 1877), 'matplotlib.pyplot.savefig', 'plt.savefig', (['save'], {'dpi': '(300)'}), '(save, dpi=300)\n', (1862, 1877), True, 'import matplotlib.pyplot as plt\n'), ((1899, 1909), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1907, 1909), True, 'import matplotlib.pyplot as plt\n'), ((3653, 3679), 'matplotlib.pyplot.savefig', 'plt.savefig', (['save'], {'dpi': '(300)'}), '(save, dpi=300)\n', (3664, 3679), True, 'import matplotlib.pyplot as plt\n'), ((3701, 3711), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3709, 3711), True, 'import matplotlib.pyplot as plt\n'), ((4450, 4485), 'numpy.concatenate', 'np.concatenate', (['[overseg, underseg]'], {}), '([overseg, underseg])\n', (4464, 4485), True, 'import numpy as np\n'), ((4813, 4880), 'os.path.join', 'os.path.join', (['p.parents[0]', "(p.stem + lab + '_VI_rainclout_plot.png')"], {}), "(p.parents[0], p.stem + lab + '_VI_rainclout_plot.png')\n", (4825, 4880), False, 'import os\n'), ((4889, 4932), 'matplotlib.pyplot.savefig', 'plt.savefig', (['save_path'], {'bbox_inches': '"""tight"""'}), "(save_path, bbox_inches='tight')\n", (4900, 4932), True, 'import matplotlib.pyplot as plt\n'), ((4954, 4964), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4962, 4964), True, 'import matplotlib.pyplot as plt\n'), ((5248, 5262), 'pandas.read_csv', 'pd.read_csv', (['p'], {}), '(p)\n', (5259, 5262), True, 'import pandas as pd\n'), ((5472, 5491), 'numpy.concatenate', 'np.concatenate', (['ce0'], {}), '(ce0)\n', (5486, 5491), True, 'import numpy as np\n'), ((5519, 5538), 'numpy.concatenate', 'np.concatenate', (['ce1'], {}), '(ce1)\n', (5533, 5538), True, 'import numpy as np\n'), ((6042, 6091), 'os.path.join', 'os.path.join', (['out_dir', '"""_VI_rainclould_plots.png"""'], {}), "(out_dir, '_VI_rainclould_plots.png')\n", (6054, 6091), False, 'import os\n'), ((6100, 6143), 'matplotlib.pyplot.savefig', 'plt.savefig', (['save_path'], {'bbox_inches': '"""tight"""'}), "(save_path, bbox_inches='tight')\n", (6111, 6143), True, 'import matplotlib.pyplot as plt\n'), ((6165, 6175), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6173, 6175), True, 'import matplotlib.pyplot as plt\n')] |
import argparse
import asyncio
from getpass import getuser
from .mxtunnel import open_tunnel
def main():
parser = argparse.ArgumentParser(description='Live And HTTPS Localhost')
parser.add_argument('-p', '--port', type=int, default=False, help='Port number of the local server')
parser.add_argument('-V', '--version', action='store_true' ,help='Version number of jpmx-tunnel')
# parser.add_argument
args = parser.parse_args()
if args.version:
print("0.1.8")
return
if not args.port:
print("Please specify -p/--port argument and port.")
return
# username = getuser()
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(
await open_tunnel(
ws_uri=f'wss://mx.tunnel.messengerx.io/_ws/?username=&port={args.port}',
http_uri=f'http://127.0.0.1:{args.port}'
)
)
except KeyboardInterrupt:
print("\nmx-tunnel tunnel closed")
| [
"asyncio.get_event_loop",
"argparse.ArgumentParser"
] | [((120, 183), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Live And HTTPS Localhost"""'}), "(description='Live And HTTPS Localhost')\n", (143, 183), False, 'import argparse\n'), ((651, 675), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (673, 675), False, 'import asyncio\n')] |
from torch import nn
from torch.nn import functional as F
from .norm_module import *
# adopted from https://github.com/rosinality/stylegan2-pytorch/blob/master/model.py#L280
class NoiseInjection(nn.Module):
def __init__(self, full=False):
super().__init__()
self.noise_weight_seed = nn.Parameter(torch.tensor(0.0))
self.full = full
def forward(self, image, noise=None):
if noise is None:
batch, channel, height, width = image.shape
noise = image.new_empty(batch, [1, channel][self.full], height, width).normal_()
return image + F.softplus(self.noise_weight_seed) * noise
class ChannelwiseNoiseInjection(nn.Module):
def __init__(self, num_channels, full=False):
super().__init__()
self.noise_weight_seed = nn.Parameter(torch.zeros((1, num_channels, 1, 1)))
self.num_channels = num_channels
self.full = full
def forward(self, image, noise=None):
if noise is None:
batch, channel, height, width = image.shape
noise = image.new_empty(batch, [1, channel][self.full], height, width).normal_()
return image + F.softplus(self.noise_weight_seed) * noise
def __repr__(self):
return self.__class__.__name__ + '(' + str(self.num_channels) + ')'
class ResBlockG(nn.Module):
def __init__(self, in_ch, out_ch, h_ch=None, ksize=3, pad=1, upsample=False, num_w=128):
super().__init__()
self.upsample = upsample
self.h_ch = h_ch if h_ch else out_ch
self.conv1 = conv2d(in_ch, self.h_ch, ksize, pad=pad, bias=False)
self.conv2 = conv2d(self.h_ch, out_ch, ksize, pad=pad, bias=False)
self.b1 = SpatialAdaptiveSynBatchGroupNorm2d(in_ch, num_w=num_w)
self.b2 = SpatialAdaptiveSynBatchGroupNorm2d(self.h_ch, num_w=num_w)
self.learnable_sc = in_ch != out_ch or upsample
if self.learnable_sc:
self.c_sc = conv2d(in_ch, out_ch, 1, 1, 0)
self.activation = nn.LeakyReLU(0.01)
self.alpha = nn.Parameter(torch.tensor(0.0))
self.out_ch = out_ch
# self.noise1 = NoiseInjection()
# self.noise2 = NoiseInjection()
self.noise1 = ChannelwiseNoiseInjection(self.h_ch)
self.noise2 = ChannelwiseNoiseInjection(out_ch)
def residual(self, in_feat, w, bbox):
x = in_feat
x = self.b1(x, w, bbox)
x = self.activation(x)
if self.upsample:
x = F.interpolate(x, scale_factor=2, mode='nearest')
x = self.conv1(x)
x = self.noise1(x)
x = self.b2(x, w, bbox)
x = self.activation(x)
x = self.conv2(x)
x = self.noise2(x)
return x
def shortcut(self, x):
if self.learnable_sc:
if self.upsample:
x = F.interpolate(x, scale_factor=2, mode='nearest')
x = self.c_sc(x)
return x
def forward(self, in_feat, w, bbox):
return self.alpha * self.residual(in_feat, w, bbox) + self.shortcut(in_feat)
class ResBlockD(nn.Module):
def __init__(self, in_ch, out_ch, ksize=3, pad=1, downsample=False):
super().__init__()
self.conv1 = conv2d(in_ch, out_ch, ksize, 1, pad)
self.conv2 = conv2d(out_ch, out_ch, ksize, 1, pad)
self.activation = nn.LeakyReLU(0.01)
self.downsample = downsample
self.learnable_sc = (in_ch != out_ch) or downsample
if self.learnable_sc:
self.c_sc = conv2d(in_ch, out_ch, 1, 1, 0)
self.alpha = nn.Parameter(torch.tensor(0.0))
def residual(self, in_feat):
x = in_feat
x = self.conv1(self.activation(x))
x = self.conv2(self.activation(x))
if self.downsample:
x = F.avg_pool2d(x, 2)
return x
def shortcut(self, x):
if self.learnable_sc:
x = self.c_sc(x)
if self.downsample:
x = F.avg_pool2d(x, 2)
return x
def forward(self, in_feat):
return self.alpha.clamp(-1,1) * self.residual(in_feat) + self.shortcut(in_feat)
def conv2d(in_feat, out_feat, kernel_size=3, stride=1, pad=1, spectral_norm=True, bias=True):
conv = nn.Conv2d(in_feat, out_feat, kernel_size, stride, pad, bias=bias)
if spectral_norm:
return nn.utils.spectral_norm(conv, eps=1e-4)
else:
return conv
class MaskRegressBlock(nn.Module):
def __init__(self, channels, kernel_size=3, bias = False):
super().__init__()
conv = list()
conv.append(nn.BatchNorm2d(channels))
conv.append(nn.LeakyReLU(0.01))
conv.append(conv2d(channels, channels, kernel_size, bias = bias))
self.conv = nn.Sequential(*conv)
self.alpha = nn.Parameter(torch.tensor(0.0))
def forward(self, x):
return x + self.alpha * self.conv(x)
# BGN+SPADE
class SpatialAdaptiveSynBatchGroupNorm2d(nn.Module):
def __init__(self, num_features, num_w=512):
super().__init__()
self.num_features = num_features
self.weight_proj = nn.utils.spectral_norm(
nn.Linear(num_w, num_features))
self.bias_proj = nn.utils.spectral_norm(nn.Linear(num_w, num_features))
self.batch_norm2d = nn.BatchNorm2d(num_features, eps=1e-5, affine=False,
momentum=0.1, track_running_stats=True)
self.group_norm = nn.GroupNorm(4, num_features, eps=1e-5, affine=False)
self.rho = nn.Parameter(torch.tensor(0.1)) # the ratio of GN
self.alpha = nn.Parameter(torch.tensor(0.0)) # the scale of the affined
def forward(self, x, vector, bbox):
"""
:param x: input feature map (b, c, h, w)
:param vector: latent vector (b*o, dim_w)
:param bbox: bbox map (b, o, h, w)
:return:
"""
self.batch_norm2d._check_input_dim(x)
# use BGN
output_b = self.batch_norm2d(x)
output_g = self.group_norm(x)
output = output_b + self.rho.clamp(0,1) * (output_g - output_b)
b, o, _, _ = bbox.size()
_, _, h, w = x.size()
bbox = F.interpolate(bbox, size=(h, w), mode='bilinear', align_corners=False) # b o h w
weight, bias = self.weight_proj(vector), self.bias_proj(vector) # b*o d
bbox_non_spatial = bbox.view(b, o, -1) # b o h*w
bbox_non_spatial_margin = bbox_non_spatial.sum(dim=1, keepdim=True) + torch.tensor(1e-4) # b 1 h*w
bbox_non_spatial.div_(bbox_non_spatial_margin)
weight, bias = weight.view(b, o, -1), bias.view(b, o, -1) # b o d
weight.transpose_(1, 2), bias.transpose_(1, 2) # b d o
weight, bias = torch.bmm(weight, bbox_non_spatial), torch.bmm(bias, bbox_non_spatial) # b d h*w
# weight.div_(bbox_non_spatial_margin), bias.div_(bbox_non_spatial_margin) # b d h*w
weight, bias = weight.view(b, -1, h, w), bias.view(b, -1, h, w)
# weight = torch.sum(bbox * weight, dim=1, keepdim=False) / \
# (torch.sum(bbox, dim=1, keepdim=False) + 1e-6) # b d h w
# bias = torch.sum(bbox * bias, dim=1, keepdim=False) / \
# (torch.sum(bbox, dim=1, keepdim=False) + 1e-6) # b d h w
affined = weight * output + bias
return output + self.alpha.clamp(-1, 1) * affined
def __repr__(self):
return self.__class__.__name__ + '(' + str(self.num_features) + ')'
| [
"torch.nn.BatchNorm2d",
"torch.nn.GroupNorm",
"torch.nn.LeakyReLU",
"torch.nn.Sequential",
"torch.nn.functional.avg_pool2d",
"torch.nn.Conv2d",
"torch.nn.functional.softplus",
"torch.nn.utils.spectral_norm",
"torch.nn.functional.interpolate",
"torch.nn.Linear"
] | [((4189, 4254), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_feat', 'out_feat', 'kernel_size', 'stride', 'pad'], {'bias': 'bias'}), '(in_feat, out_feat, kernel_size, stride, pad, bias=bias)\n', (4198, 4254), False, 'from torch import nn\n'), ((1999, 2017), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.01)'], {}), '(0.01)\n', (2011, 2017), False, 'from torch import nn\n'), ((3313, 3331), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.01)'], {}), '(0.01)\n', (3325, 3331), False, 'from torch import nn\n'), ((4292, 4332), 'torch.nn.utils.spectral_norm', 'nn.utils.spectral_norm', (['conv'], {'eps': '(0.0001)'}), '(conv, eps=0.0001)\n', (4314, 4332), False, 'from torch import nn\n'), ((4690, 4710), 'torch.nn.Sequential', 'nn.Sequential', (['*conv'], {}), '(*conv)\n', (4703, 4710), False, 'from torch import nn\n'), ((5228, 5325), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['num_features'], {'eps': '(1e-05)', 'affine': '(False)', 'momentum': '(0.1)', 'track_running_stats': '(True)'}), '(num_features, eps=1e-05, affine=False, momentum=0.1,\n track_running_stats=True)\n', (5242, 5325), False, 'from torch import nn\n'), ((5376, 5430), 'torch.nn.GroupNorm', 'nn.GroupNorm', (['(4)', 'num_features'], {'eps': '(1e-05)', 'affine': '(False)'}), '(4, num_features, eps=1e-05, affine=False)\n', (5388, 5430), False, 'from torch import nn\n'), ((6098, 6168), 'torch.nn.functional.interpolate', 'F.interpolate', (['bbox'], {'size': '(h, w)', 'mode': '"""bilinear"""', 'align_corners': '(False)'}), "(bbox, size=(h, w), mode='bilinear', align_corners=False)\n", (6111, 6168), True, 'from torch.nn import functional as F\n'), ((2474, 2522), 'torch.nn.functional.interpolate', 'F.interpolate', (['x'], {'scale_factor': '(2)', 'mode': '"""nearest"""'}), "(x, scale_factor=2, mode='nearest')\n", (2487, 2522), True, 'from torch.nn import functional as F\n'), ((3751, 3769), 'torch.nn.functional.avg_pool2d', 'F.avg_pool2d', (['x', '(2)'], {}), '(x, 2)\n', (3763, 3769), True, 'from torch.nn import functional as F\n'), ((4530, 4554), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['channels'], {}), '(channels)\n', (4544, 4554), False, 'from torch import nn\n'), ((4576, 4594), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.01)'], {}), '(0.01)\n', (4588, 4594), False, 'from torch import nn\n'), ((5088, 5118), 'torch.nn.Linear', 'nn.Linear', (['num_w', 'num_features'], {}), '(num_w, num_features)\n', (5097, 5118), False, 'from torch import nn\n'), ((5168, 5198), 'torch.nn.Linear', 'nn.Linear', (['num_w', 'num_features'], {}), '(num_w, num_features)\n', (5177, 5198), False, 'from torch import nn\n'), ((602, 636), 'torch.nn.functional.softplus', 'F.softplus', (['self.noise_weight_seed'], {}), '(self.noise_weight_seed)\n', (612, 636), True, 'from torch.nn import functional as F\n'), ((1158, 1192), 'torch.nn.functional.softplus', 'F.softplus', (['self.noise_weight_seed'], {}), '(self.noise_weight_seed)\n', (1168, 1192), True, 'from torch.nn import functional as F\n'), ((2817, 2865), 'torch.nn.functional.interpolate', 'F.interpolate', (['x'], {'scale_factor': '(2)', 'mode': '"""nearest"""'}), "(x, scale_factor=2, mode='nearest')\n", (2830, 2865), True, 'from torch.nn import functional as F\n'), ((3926, 3944), 'torch.nn.functional.avg_pool2d', 'F.avg_pool2d', (['x', '(2)'], {}), '(x, 2)\n', (3938, 3944), True, 'from torch.nn import functional as F\n')] |
from dataclasses import dataclass, field
from enum import Enum
from typing import List, Optional
__NAMESPACE__ = "http://schemas.microsoft.com/office/excel/2003/xml"
class MapInfoTypeHideInactiveListBorder(Enum):
TRUE = "true"
FALSE = "false"
@dataclass
class SchemaType:
any_element: List[object] = field(
default_factory=list,
metadata={
"type": "Wildcard",
"namespace": "##any",
}
)
id: Optional[str] = field(
default=None,
metadata={
"name": "ID",
"type": "Attribute",
"namespace": "http://schemas.microsoft.com/office/excel/2003/xml",
"required": True,
}
)
namespace: Optional[str] = field(
default=None,
metadata={
"name": "Namespace",
"type": "Attribute",
"namespace": "http://schemas.microsoft.com/office/excel/2003/xml",
"required": True,
}
)
schema_ref: Optional[str] = field(
default=None,
metadata={
"name": "SchemaRef",
"type": "Attribute",
"namespace": "http://schemas.microsoft.com/office/excel/2003/xml",
}
)
class TruefalseType(Enum):
TRUE = "true"
FALSE = "false"
@dataclass
class MapInfoType:
schema: List[SchemaType] = field(
default_factory=list,
metadata={
"name": "Schema",
"type": "Element",
"namespace": "http://schemas.microsoft.com/office/excel/2003/xml",
"min_occurs": 1,
}
)
hide_inactive_list_border: MapInfoTypeHideInactiveListBorder = field(
default=MapInfoTypeHideInactiveListBorder.FALSE,
metadata={
"name": "HideInactiveListBorder",
"type": "Attribute",
"namespace": "http://schemas.microsoft.com/office/excel/2003/xml",
}
)
selection_namespaces: Optional[str] = field(
default=None,
metadata={
"name": "SelectionNamespaces",
"type": "Attribute",
"namespace": "http://schemas.microsoft.com/office/excel/2003/xml",
}
)
hide_single_mapped_cell_border: TruefalseType = field(
default=TruefalseType.TRUE,
metadata={
"name": "HideSingleMappedCellBorder",
"type": "Attribute",
"namespace": "http://schemas.microsoft.com/office/excel/2003/xml",
}
)
@dataclass
class MapInfo(MapInfoType):
class Meta:
namespace = "http://schemas.microsoft.com/office/excel/2003/xml"
| [
"dataclasses.field"
] | [((317, 402), 'dataclasses.field', 'field', ([], {'default_factory': 'list', 'metadata': "{'type': 'Wildcard', 'namespace': '##any'}"}), "(default_factory=list, metadata={'type': 'Wildcard', 'namespace': '##any'}\n )\n", (322, 402), False, 'from dataclasses import dataclass, field\n'), ((479, 637), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'name': 'ID', 'type': 'Attribute', 'namespace':\n 'http://schemas.microsoft.com/office/excel/2003/xml', 'required': True}"}), "(default=None, metadata={'name': 'ID', 'type': 'Attribute',\n 'namespace': 'http://schemas.microsoft.com/office/excel/2003/xml',\n 'required': True})\n", (484, 637), False, 'from dataclasses import dataclass, field\n'), ((742, 907), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'name': 'Namespace', 'type': 'Attribute', 'namespace':\n 'http://schemas.microsoft.com/office/excel/2003/xml', 'required': True}"}), "(default=None, metadata={'name': 'Namespace', 'type': 'Attribute',\n 'namespace': 'http://schemas.microsoft.com/office/excel/2003/xml',\n 'required': True})\n", (747, 907), False, 'from dataclasses import dataclass, field\n'), ((1013, 1156), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'name': 'SchemaRef', 'type': 'Attribute', 'namespace':\n 'http://schemas.microsoft.com/office/excel/2003/xml'}"}), "(default=None, metadata={'name': 'SchemaRef', 'type': 'Attribute',\n 'namespace': 'http://schemas.microsoft.com/office/excel/2003/xml'})\n", (1018, 1156), False, 'from dataclasses import dataclass, field\n'), ((1352, 1519), 'dataclasses.field', 'field', ([], {'default_factory': 'list', 'metadata': "{'name': 'Schema', 'type': 'Element', 'namespace':\n 'http://schemas.microsoft.com/office/excel/2003/xml', 'min_occurs': 1}"}), "(default_factory=list, metadata={'name': 'Schema', 'type': 'Element',\n 'namespace': 'http://schemas.microsoft.com/office/excel/2003/xml',\n 'min_occurs': 1})\n", (1357, 1519), False, 'from dataclasses import dataclass, field\n'), ((1660, 1855), 'dataclasses.field', 'field', ([], {'default': 'MapInfoTypeHideInactiveListBorder.FALSE', 'metadata': "{'name': 'HideInactiveListBorder', 'type': 'Attribute', 'namespace':\n 'http://schemas.microsoft.com/office/excel/2003/xml'}"}), "(default=MapInfoTypeHideInactiveListBorder.FALSE, metadata={'name':\n 'HideInactiveListBorder', 'type': 'Attribute', 'namespace':\n 'http://schemas.microsoft.com/office/excel/2003/xml'})\n", (1665, 1855), False, 'from dataclasses import dataclass, field\n'), ((1959, 2116), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'name': 'SelectionNamespaces', 'type': 'Attribute', 'namespace':\n 'http://schemas.microsoft.com/office/excel/2003/xml'}"}), "(default=None, metadata={'name': 'SelectionNamespaces', 'type':\n 'Attribute', 'namespace':\n 'http://schemas.microsoft.com/office/excel/2003/xml'})\n", (1964, 2116), False, 'from dataclasses import dataclass, field\n'), ((2230, 2408), 'dataclasses.field', 'field', ([], {'default': 'TruefalseType.TRUE', 'metadata': "{'name': 'HideSingleMappedCellBorder', 'type': 'Attribute', 'namespace':\n 'http://schemas.microsoft.com/office/excel/2003/xml'}"}), "(default=TruefalseType.TRUE, metadata={'name':\n 'HideSingleMappedCellBorder', 'type': 'Attribute', 'namespace':\n 'http://schemas.microsoft.com/office/excel/2003/xml'})\n", (2235, 2408), False, 'from dataclasses import dataclass, field\n')] |
# -*- coding: utf-8 -*-
"""
LPRM time series reader
"""
from pynetcf.time_series import GriddedNcOrthoMultiTs
import os
import pygeogrids.netcdf as nc
from cadati.jd_date import jd2dt
from io_utils.utils import mjd2jd
from io_utils.read.geo_ts_readers.mixins import OrthoMultiTsCellReaderMixin
class LPRMTs(GriddedNcOrthoMultiTs, OrthoMultiTsCellReaderMixin):
_t0 = 'SCANTIME_MJD'
def __init__(self, ts_path, exact_index=False, grid_path=None, **kwargs):
if grid_path is None:
grid_path = os.path.join(ts_path, "grid.nc")
grid = nc.load_grid(grid_path)
super(LPRMTs, self).__init__(ts_path, grid, automask=True, **kwargs)
self.exact_index = exact_index
if exact_index and (self.parameters is not None):
self.parameters.append(self._t0)
def read(self, *args, **kwargs):
df = super(LPRMTs, self).read(*args, **kwargs)
if self.exact_index:
df[self._t0] = jd2dt(mjd2jd(df[self._t0].values))
df = df.set_index(self._t0) # drop nan index
df = df.loc[df.index.dropna()]
return df | [
"os.path.join",
"io_utils.utils.mjd2jd",
"pygeogrids.netcdf.load_grid"
] | [((570, 593), 'pygeogrids.netcdf.load_grid', 'nc.load_grid', (['grid_path'], {}), '(grid_path)\n', (582, 593), True, 'import pygeogrids.netcdf as nc\n'), ((521, 553), 'os.path.join', 'os.path.join', (['ts_path', '"""grid.nc"""'], {}), "(ts_path, 'grid.nc')\n", (533, 553), False, 'import os\n'), ((969, 996), 'io_utils.utils.mjd2jd', 'mjd2jd', (['df[self._t0].values'], {}), '(df[self._t0].values)\n', (975, 996), False, 'from io_utils.utils import mjd2jd\n')] |
from django.contrib import admin
from django_summernote.admin import SummernoteModelAdmin
from . import models
@admin.register(models.Post)
class PostAdmin(SummernoteModelAdmin):
# list_filter = ('post_type')
list_display = ['id','title', 'post_type', 'creator',
'view_count','created_at']
fields = ['title', 'post_type', 'creator', 'description',
'view_count', 'isImportant']
list_display_links = ('title','creator')
@admin.register(models.PostLike)
class LikeAdmin(admin.ModelAdmin):
pass
@admin.register(models.PostComment)
class CommentAdmin(SummernoteModelAdmin):
pass
| [
"django.contrib.admin.register"
] | [((114, 141), 'django.contrib.admin.register', 'admin.register', (['models.Post'], {}), '(models.Post)\n', (128, 141), False, 'from django.contrib import admin\n'), ((472, 503), 'django.contrib.admin.register', 'admin.register', (['models.PostLike'], {}), '(models.PostLike)\n', (486, 503), False, 'from django.contrib import admin\n'), ((550, 584), 'django.contrib.admin.register', 'admin.register', (['models.PostComment'], {}), '(models.PostComment)\n', (564, 584), False, 'from django.contrib import admin\n')] |
#!/usr/bin/env python2.7
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit test suite for common.cros.chromite."""
import test_env
import base64
import json
import unittest
from common import cros_chromite
class MockConfigCache(object):
def __init__(self, data):
self.data = data
def Get(self, name, version=None):
return self.data.get((name, version))
class ChromiteConfigTestCase(unittest.TestCase):
CHROMITE_CONFIG = {
'_default': {
'foo': 'bar',
'key': 'value',
'hw_tests': [
'default0',
'default1',
],
},
'_templates': {
'no-hwtest-pre-cq': {
'hw_tests': [
],
},
},
'test': {
'foo': 'baz',
},
'no-hwtest-pre-cq': {
'_template': 'no-hwtest-pre-cq',
'name': 'no-hwtest-pre-cq',
},
'parent': {
'name': 'parent',
'child_configs': [
{
'name': 'alice',
'vm_tests': [
'test',
],
'hw_tests': [
'test',
],
'unittests': True,
},
{'name': 'bob'}
],
},
'parent-template': {
'name': 'parent-template',
'hw_tests': [],
'child_configs': [
{
'_template': 'no-hwtest-pre-cq',
'name': 'joe',
}
],
},
'baremetal-pre-cq': {
'vm_tests': [
'test',
],
'hw_tests': [
'test',
],
'unittests': True,
},
'pre-cq-group': {},
'master-thing': {
'master': True,
},
}
def setUp(self):
self.config = cros_chromite.ChromiteConfig.FromConfigDict(
self.CHROMITE_CONFIG)
self.test = self.config['test']
self.no_hwtest_pre_cq = self.config['no-hwtest-pre-cq']
self.parent = self.config['parent']
self.parent_template = self.config['parent-template']
self.baremetal = self.config['baremetal-pre-cq']
def testChildren(self):
self.assertEqual(len(self.test.children), 0)
self.assertEqual(len(self.parent.children), 2)
self.assertEqual(self.parent.children[0]['name'], 'alice')
self.assertEqual(self.parent.children[1]['name'], 'bob')
def testDefaultFallthrough_UsesLocalWhenAvailable(self):
self.assertEqual(self.test['foo'], 'baz')
def testDefaultFallthrough_UsesDefaultWhenMissing(self):
self.assertEqual(self.test['key'], 'value')
def testDefaultFallthrough_ParentUsesDefaults(self):
self.assertEqual(self.parent['hw_tests'], ['default0', 'default1'])
def testHasTests(self):
self.assertFalse(self.test.HasVmTests())
self.assertTrue(self.test.HasHwTests())
self.assertFalse(self.no_hwtest_pre_cq.HasHwTests())
self.assertFalse(self.test.HasUnitTests())
self.assertTrue(self.baremetal.HasVmTests())
self.assertTrue(self.baremetal.HasHwTests())
self.assertTrue(self.baremetal.HasUnitTests())
def testHasTests_DetectsInChildren(self):
self.assertTrue(self.parent.HasVmTests())
self.assertTrue(self.parent.HasHwTests())
self.assertFalse(self.parent_template.HasHwTests())
self.assertTrue(self.baremetal.HasUnitTests())
def testPreCqDetection(self):
self.assertFalse(self.test.IsPreCqBuilder())
self.assertTrue(self.baremetal.IsPreCqBuilder())
self.assertFalse(self.baremetal.IsGeneralPreCqBuilder())
pre_cq_group = self.config['pre-cq-group']
self.assertTrue(pre_cq_group.IsPreCqBuilder())
self.assertTrue(pre_cq_group.IsGeneralPreCqBuilder())
def testIsMaster(self):
self.assertTrue(self.config['master-thing'].is_master)
def testCategorize(self):
# Type-based: name, build_type => base, suffix, category
expectations = (
)
# Name-based: name => base, suffix, category
expectations = (
# (With Board Type)
('pre-cq-launcher', 'priest', 'pre-cq-launcher', None, 'PRE_CQ_LAUNCHER'),
# The canary board type should override the name-based inferences,
# marking this board as a canary.
('odd-name-paladin', 'canary', 'odd-name-paladin', None, 'CANARY'),
('my-board-asan', None, 'my-board', 'asan', 'ASAN'),
('my-board-pre-cq', None, 'my-board', 'pre-cq', 'PRE_CQ'),
('my-board-chrome-pfq', None, 'my-board', 'chrome-pfq', 'PFQ'),
('my-board-chromium-pfq', None, 'my-board', 'chromium-pfq', 'PFQ'),
('my-board-paladin', None, 'my-board', 'paladin', 'PALADIN'),
('my-board-release', None, 'my-board', 'release', 'CANARY'),
('my-board-release-group', None, 'my-board', 'release-group', 'CANARY'),
('my-board-firmware', None, 'my-board', 'firmware', 'FIRMWARE'),
('my-board-incremental', None, 'my-board', 'incremental', 'INCREMENTAL'),
('my-board-factory', None, 'my-board', 'factory', 'FACTORY'),
('my-board-project-sdk', None, 'my-board-project', 'sdk', 'SDK'),
('my-board-toolchain-major', None,
'my-board', 'toolchain-major', 'TOOLCHAIN'),
('my-board-toolchain-minor', None,
'my-board', 'toolchain-minor', 'TOOLCHAIN'),
('master-toolchain-release', None,
'master', 'toolchain-release', 'CANARY_TOOLCHAIN'),
)
for name, build_type, exp_base, exp_suffix, exp_cat_attr in expectations:
exp_category = getattr(cros_chromite.ChromiteTarget, exp_cat_attr)
base, suffix, category = cros_chromite.ChromiteTarget.Categorize(
name,
build_type=build_type)
self.assertEqual(
(base, suffix, category), (exp_base, exp_suffix, exp_category))
class ChromitePinManagerTestCase(unittest.TestCase):
def testGetPinnedBranch_PinnedBranchReturnsPinnedValue(self):
pm = cros_chromite.ChromitePinManager(
'test',
pinned={'a': 'b'},
require=True)
self.assertEqual(pm.GetPinnedBranch('a'), 'b')
def testGetPinnedBranch_UnpinnedBranchReturnsBranch(self):
pm = cros_chromite.ChromitePinManager(
'test',
pinned={'a': 'b'},
require=False)
self.assertEqual(pm.GetPinnedBranch('foo'), 'foo')
def testGetPinnedBranch_UnpinnedBranchReturnsErrorWithRequiredPinning(self):
pm = cros_chromite.ChromitePinManager(
'test',
pinned={'a': 'b'},
require=True)
self.assertRaises(cros_chromite.ChromiteError,
pm.GetPinnedBranch, 'foo')
class ChromiteConfigManagerTestCase(unittest.TestCase):
def setUp(self):
self.cache = MockConfigCache({
('test', 'v1'): '{}',
('test', 'v_invalid'): '{NOT JSON}',
})
def testGetConfig_ValidSucceeds(self):
manager = cros_chromite.ChromiteConfigManager(self.cache,
cros_chromite.ChromitePinManager(
'test',
{'test': 'v1'}))
self.assertTrue(isinstance(manager.GetConfig('test'),
cros_chromite.ChromiteConfig))
def testGetConfig_InvalidJsonRaises(self):
manager = cros_chromite.ChromiteConfigManager(self.cache,
cros_chromite.ChromitePinManager(
'test',
{'test': 'v_invalid'}))
self.assertRaises(cros_chromite.ChromiteError, manager.GetConfig, 'test')
def testGetConfig_MissingRaises(self):
manager = cros_chromite.ChromiteConfigManager(self.cache)
self.assertRaises(cros_chromite.ChromiteError, manager.GetConfig, 'foo')
class ChromiteFetcherTestCase(unittest.TestCase):
def setUp(self):
self.fetcher = cros_chromite.ChromiteFetcher(
cros_chromite.ChromitePinManager(
'test',
{'test': 'v1'})
)
@staticmethod
def _configUrlForBranch(branch):
return '%s/+/%s/%s?format=text' % (
cros_chromite.ChromiteFetcher.CHROMITE_GITILES_BASE,
branch,
cros_chromite.ChromiteFetcher.CHROMITE_CONFIG_PATH,
)
def testFetch_Valid(self):
fetched_urls = []
def _MockGetText(url):
fetched_urls.append(url)
return base64.b64encode('content')
self.fetcher._GetText = _MockGetText
data = self.fetcher('test', None)
self.assertEqual(data, ('content', 'v1'))
self.assertEqual(fetched_urls, [self._configUrlForBranch('v1')])
def testFetch_NotBase64(self):
def _MockGetText(_url):
return 'Not Valid Base64'
self.fetcher._GetText = _MockGetText
self.assertRaises(cros_chromite.GitilesError, self.fetcher, 'test', None)
if __name__ == '__main__':
unittest.main()
| [
"common.cros_chromite.ChromiteConfigManager",
"common.cros_chromite.ChromitePinManager",
"base64.b64encode",
"common.cros_chromite.ChromiteTarget.Categorize",
"common.cros_chromite.ChromiteConfig.FromConfigDict",
"unittest.main"
] | [((8371, 8386), 'unittest.main', 'unittest.main', ([], {}), '()\n', (8384, 8386), False, 'import unittest\n'), ((1717, 1782), 'common.cros_chromite.ChromiteConfig.FromConfigDict', 'cros_chromite.ChromiteConfig.FromConfigDict', (['self.CHROMITE_CONFIG'], {}), '(self.CHROMITE_CONFIG)\n', (1760, 1782), False, 'from common import cros_chromite\n'), ((5742, 5815), 'common.cros_chromite.ChromitePinManager', 'cros_chromite.ChromitePinManager', (['"""test"""'], {'pinned': "{'a': 'b'}", 'require': '(True)'}), "('test', pinned={'a': 'b'}, require=True)\n", (5774, 5815), False, 'from common import cros_chromite\n'), ((5963, 6037), 'common.cros_chromite.ChromitePinManager', 'cros_chromite.ChromitePinManager', (['"""test"""'], {'pinned': "{'a': 'b'}", 'require': '(False)'}), "('test', pinned={'a': 'b'}, require=False)\n", (5995, 6037), False, 'from common import cros_chromite\n'), ((6207, 6280), 'common.cros_chromite.ChromitePinManager', 'cros_chromite.ChromitePinManager', (['"""test"""'], {'pinned': "{'a': 'b'}", 'require': '(True)'}), "('test', pinned={'a': 'b'}, require=True)\n", (6239, 6280), False, 'from common import cros_chromite\n'), ((7207, 7254), 'common.cros_chromite.ChromiteConfigManager', 'cros_chromite.ChromiteConfigManager', (['self.cache'], {}), '(self.cache)\n', (7242, 7254), False, 'from common import cros_chromite\n'), ((5425, 5493), 'common.cros_chromite.ChromiteTarget.Categorize', 'cros_chromite.ChromiteTarget.Categorize', (['name'], {'build_type': 'build_type'}), '(name, build_type=build_type)\n', (5464, 5493), False, 'from common import cros_chromite\n'), ((6695, 6751), 'common.cros_chromite.ChromitePinManager', 'cros_chromite.ChromitePinManager', (['"""test"""', "{'test': 'v1'}"], {}), "('test', {'test': 'v1'})\n", (6727, 6751), False, 'from common import cros_chromite\n'), ((6987, 7050), 'common.cros_chromite.ChromitePinManager', 'cros_chromite.ChromitePinManager', (['"""test"""', "{'test': 'v_invalid'}"], {}), "('test', {'test': 'v_invalid'})\n", (7019, 7050), False, 'from common import cros_chromite\n'), ((7462, 7518), 'common.cros_chromite.ChromitePinManager', 'cros_chromite.ChromitePinManager', (['"""test"""', "{'test': 'v1'}"], {}), "('test', {'test': 'v1'})\n", (7494, 7518), False, 'from common import cros_chromite\n'), ((7904, 7931), 'base64.b64encode', 'base64.b64encode', (['"""content"""'], {}), "('content')\n", (7920, 7931), False, 'import base64\n')] |
################################################################################
#
# test_xtram.py - testing the pyfeat xtram class
#
# author: <NAME> <<EMAIL>>
# author: <NAME> <<EMAIL>>
#
################################################################################
from nose.tools import assert_raises, assert_true
from pyfeat.estimator import XTRAM
from pytram import ExpressionError, NotConvergedWarning
import numpy as np
#XTRAM testing
def test_expression_error_None():
assert_raises( ExpressionError, XTRAM, np.ones( shape =(2,3,3), dtype=np.intc), None, np.ones( shape =(10), dtype=np.intc), np.ones( shape =(10), dtype=np.intc),np.ones( shape =(2,3), dtype=np.intc) )
assert_raises( ExpressionError, XTRAM, np.ones( shape =(2,3,3), dtype=np.intc), np.ones( shape =(2,10), dtype=np.float64 ),None, np.ones( shape =(10), dtype=np.intc), np.ones( shape =(2,3), dtype=np.intc) )
assert_raises( ExpressionError, XTRAM, np.ones( shape =(2,3,3), dtype=np.intc), np.ones( shape =(2,10), dtype=np.float64), np.ones( shape =(10), dtype=np.intc),None, np.ones( shape =(2,3), dtype=np.intc) )
assert_raises( ExpressionError, XTRAM, np.ones( shape =(2,3,3), dtype=np.intc), np.ones( shape =(2,10), dtype=np.float64), np.ones( shape =(10), dtype=np.intc),np.ones( shape =(10), dtype=np.intc), None )
def test_expression_error_dim():
assert_raises( ExpressionError, XTRAM, np.ones( shape =(2,3), dtype=np.intc), np.ones( shape =(2,10), dtype=np.float64 ), np.ones( shape =(10), dtype=np.intc ),np.ones( shape =(10), dtype=np.intc), np.ones( shape =(2,3), dtype=np.intc) )
assert_raises( ExpressionError, XTRAM, np.ones( shape =(2,3,3), dtype=np.intc), np.ones( shape =(10), dtype=np.float64 ), np.ones( shape =(10), dtype=np.intc ),np.ones( shape =(10), dtype=np.intc), np.ones( shape =(2,3), dtype=np.intc) )
assert_raises( ExpressionError, XTRAM, np.ones( shape =(2,3,3), dtype=np.intc), np.ones( shape =(2,10), dtype=np.float64 ), np.ones( shape =(10), dtype=np.intc ),np.ones( shape =(10), dtype=np.intc), np.ones( shape =(3), dtype=np.intc) )
def test_expression_error_markov():
assert_raises( ExpressionError, XTRAM, np.ones( shape =(2,3,3), dtype=np.intc), np.ones( shape =(2,10), dtype=np.float64), np.ones( shape =(10), dtype=np.intc),np.ones( shape =(10), dtype=np.intc), np.ones( shape =(2,4), dtype=np.intc) )
def test_expression_error_therm():
assert_raises( ExpressionError, XTRAM, np.ones( shape =(2,3,3), dtype=np.intc), np.ones( shape =(3,10), dtype=np.float64), np.ones( shape =(10), dtype=np.intc),np.ones( shape =(10), dtype=np.intc), np.ones( shape =(2,3), dtype=np.intc) )
assert_raises( ExpressionError, XTRAM, np.ones( shape =(2,3,3), dtype=np.intc), np.ones( shape =(2,10), dtype=np.float64), np.ones( shape =(10), dtype=np.intc),np.ones( shape =(10), dtype=np.intc), np.ones( shape =(3,4), dtype=np.intc) )
| [
"numpy.ones"
] | [((531, 570), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3, 3), dtype=np.intc)\n', (538, 570), True, 'import numpy as np\n'), ((578, 610), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (585, 610), True, 'import numpy as np\n'), ((616, 648), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (623, 648), True, 'import numpy as np\n'), ((653, 689), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3), dtype=np.intc)\n', (660, 689), True, 'import numpy as np\n'), ((736, 775), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3, 3), dtype=np.intc)\n', (743, 775), True, 'import numpy as np\n'), ((777, 817), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 10)', 'dtype': 'np.float64'}), '(shape=(2, 10), dtype=np.float64)\n', (784, 817), True, 'import numpy as np\n'), ((826, 858), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (833, 858), True, 'import numpy as np\n'), ((864, 900), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3), dtype=np.intc)\n', (871, 900), True, 'import numpy as np\n'), ((947, 986), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3, 3), dtype=np.intc)\n', (954, 986), True, 'import numpy as np\n'), ((988, 1028), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 10)', 'dtype': 'np.float64'}), '(shape=(2, 10), dtype=np.float64)\n', (995, 1028), True, 'import numpy as np\n'), ((1031, 1063), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (1038, 1063), True, 'import numpy as np\n'), ((1074, 1110), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3), dtype=np.intc)\n', (1081, 1110), True, 'import numpy as np\n'), ((1157, 1196), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3, 3), dtype=np.intc)\n', (1164, 1196), True, 'import numpy as np\n'), ((1198, 1238), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 10)', 'dtype': 'np.float64'}), '(shape=(2, 10), dtype=np.float64)\n', (1205, 1238), True, 'import numpy as np\n'), ((1241, 1273), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (1248, 1273), True, 'import numpy as np\n'), ((1278, 1310), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (1285, 1310), True, 'import numpy as np\n'), ((1400, 1436), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3), dtype=np.intc)\n', (1407, 1436), True, 'import numpy as np\n'), ((1439, 1479), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 10)', 'dtype': 'np.float64'}), '(shape=(2, 10), dtype=np.float64)\n', (1446, 1479), True, 'import numpy as np\n'), ((1483, 1515), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (1490, 1515), True, 'import numpy as np\n'), ((1521, 1553), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (1528, 1553), True, 'import numpy as np\n'), ((1559, 1595), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3), dtype=np.intc)\n', (1566, 1595), True, 'import numpy as np\n'), ((1642, 1681), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3, 3), dtype=np.intc)\n', (1649, 1681), True, 'import numpy as np\n'), ((1683, 1718), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.float64'}), '(shape=10, dtype=np.float64)\n', (1690, 1718), True, 'import numpy as np\n'), ((1725, 1757), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (1732, 1757), True, 'import numpy as np\n'), ((1763, 1795), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (1770, 1795), True, 'import numpy as np\n'), ((1801, 1837), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3), dtype=np.intc)\n', (1808, 1837), True, 'import numpy as np\n'), ((1884, 1923), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3, 3), dtype=np.intc)\n', (1891, 1923), True, 'import numpy as np\n'), ((1925, 1965), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 10)', 'dtype': 'np.float64'}), '(shape=(2, 10), dtype=np.float64)\n', (1932, 1965), True, 'import numpy as np\n'), ((1969, 2001), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (1976, 2001), True, 'import numpy as np\n'), ((2007, 2039), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (2014, 2039), True, 'import numpy as np\n'), ((2045, 2076), 'numpy.ones', 'np.ones', ([], {'shape': '(3)', 'dtype': 'np.intc'}), '(shape=3, dtype=np.intc)\n', (2052, 2076), True, 'import numpy as np\n'), ((2163, 2202), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3, 3), dtype=np.intc)\n', (2170, 2202), True, 'import numpy as np\n'), ((2204, 2244), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 10)', 'dtype': 'np.float64'}), '(shape=(2, 10), dtype=np.float64)\n', (2211, 2244), True, 'import numpy as np\n'), ((2247, 2279), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (2254, 2279), True, 'import numpy as np\n'), ((2284, 2316), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (2291, 2316), True, 'import numpy as np\n'), ((2322, 2358), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 4)', 'dtype': 'np.intc'}), '(shape=(2, 4), dtype=np.intc)\n', (2329, 2358), True, 'import numpy as np\n'), ((2440, 2479), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3, 3), dtype=np.intc)\n', (2447, 2479), True, 'import numpy as np\n'), ((2481, 2521), 'numpy.ones', 'np.ones', ([], {'shape': '(3, 10)', 'dtype': 'np.float64'}), '(shape=(3, 10), dtype=np.float64)\n', (2488, 2521), True, 'import numpy as np\n'), ((2524, 2556), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (2531, 2556), True, 'import numpy as np\n'), ((2561, 2593), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (2568, 2593), True, 'import numpy as np\n'), ((2599, 2635), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3), dtype=np.intc)\n', (2606, 2635), True, 'import numpy as np\n'), ((2682, 2721), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 3, 3)', 'dtype': 'np.intc'}), '(shape=(2, 3, 3), dtype=np.intc)\n', (2689, 2721), True, 'import numpy as np\n'), ((2723, 2763), 'numpy.ones', 'np.ones', ([], {'shape': '(2, 10)', 'dtype': 'np.float64'}), '(shape=(2, 10), dtype=np.float64)\n', (2730, 2763), True, 'import numpy as np\n'), ((2766, 2798), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (2773, 2798), True, 'import numpy as np\n'), ((2803, 2835), 'numpy.ones', 'np.ones', ([], {'shape': '(10)', 'dtype': 'np.intc'}), '(shape=10, dtype=np.intc)\n', (2810, 2835), True, 'import numpy as np\n'), ((2841, 2877), 'numpy.ones', 'np.ones', ([], {'shape': '(3, 4)', 'dtype': 'np.intc'}), '(shape=(3, 4), dtype=np.intc)\n', (2848, 2877), True, 'import numpy as np\n')] |
import torch
import torch.nn as nn
from lightconvpoint.nn.deprecated.module import Module as LCPModule
from lightconvpoint.nn.deprecated.convolutions import FKAConv
from lightconvpoint.nn.deprecated.pooling import max_pool
from lightconvpoint.spatial.deprecated import sampling_quantized, knn, upsample_nearest
from lightconvpoint.utils.functional import batch_gather
class ResidualBlock(LCPModule):
def __init__(self, in_channels, out_channels, kernel_size, conv_layer, sampling, spatial_search, ratio, neighborhood_size):
super().__init__()
self.cv0 = nn.Conv1d(in_channels, in_channels//2, 1)
self.bn0 = nn.BatchNorm1d(in_channels//2)
self.cv1 = conv_layer(in_channels//2, in_channels//2, kernel_size, bias=False, sampling=sampling,
spatial_search=spatial_search, ratio=ratio, neighborhood_size=neighborhood_size)
self.bn1 = nn.BatchNorm1d(in_channels//2)
self.cv2 = nn.Conv1d(in_channels//2, out_channels, 1)
self.bn2 = nn.BatchNorm1d(out_channels)
self.activation = nn.ReLU()
self.shortcut = nn.Conv1d(in_channels, out_channels, 1) if in_channels != out_channels else nn.Identity()
self.ratio = ratio
def forward_without_features(self, pos, support_points=None, indices=None):
return self.cv1(None, pos)
def forward_with_features(self, x, pos, support_points, indices):
x_short = x
x = self.activation(self.bn0(self.cv0(x)))
x = self.activation(self.bn1(self.cv1(x, pos, support_points, indices)))
x = self.bn2(self.cv2(x))
if x_short.shape[2] != x.shape[2]:
x_short = max_pool(x_short, indices)
x_short = self.shortcut(x_short)
return self.activation(x + x_short)
class FKAConvNetwork(LCPModule):
def __init__(self, in_channels, out_channels, segmentation=False, hidden=64, conv_layer=FKAConv ,sampling=sampling_quantized, neighborhood_search=knn):
super().__init__()
self.lcp_preprocess = True
self.segmentation = segmentation
self.cv0 = conv_layer(in_channels, hidden, 16, sampling=sampling,
neighborhood_search=neighborhood_search, ratio=1, neighborhood_size=16)
self.bn0 = nn.BatchNorm1d(hidden)
self.resnetb01 = ResidualBlock(hidden, hidden, 16, conv_layer, sampling, neighborhood_search, 1, 16)
self.resnetb10 = ResidualBlock(hidden, 2*hidden, 16, conv_layer, sampling, neighborhood_search, 0.25, 16)
self.resnetb11 = ResidualBlock(2*hidden, 2*hidden, 16, conv_layer, sampling, neighborhood_search, 1, 16)
self.resnetb20 = ResidualBlock(2*hidden, 4*hidden, 16, conv_layer, sampling, neighborhood_search, 0.25, 16)
self.resnetb21 = ResidualBlock(4*hidden, 4*hidden, 16, conv_layer, sampling, neighborhood_search, 1, 16)
self.resnetb30 = ResidualBlock(4*hidden, 8*hidden, 16, conv_layer, sampling, neighborhood_search, 0.25, 16)
self.resnetb31 = ResidualBlock(8*hidden, 8*hidden, 16, conv_layer, sampling, neighborhood_search, 1, 16)
self.resnetb40 = ResidualBlock(8*hidden, 16*hidden, 16, conv_layer, sampling, neighborhood_search, 0.25, 16)
self.resnetb41 = ResidualBlock(16*hidden, 16*hidden, 16, conv_layer, sampling, neighborhood_search, 1, 16)
if self.segmentation:
self.cv5 = nn.Conv1d(32*hidden, 16 * hidden, 1)
self.bn5 = nn.BatchNorm1d(16*hidden)
self.cv3d = nn.Conv1d(24*hidden, 8 * hidden, 1)
self.bn3d = nn.BatchNorm1d(8 * hidden)
self.cv2d = nn.Conv1d(12 * hidden, 4 * hidden, 1)
self.bn2d = nn.BatchNorm1d(4 * hidden)
self.cv1d = nn.Conv1d(6 * hidden, 2 * hidden, 1)
self.bn1d = nn.BatchNorm1d(2 * hidden)
self.cv0d = nn.Conv1d(3 * hidden, hidden, 1)
self.bn0d = nn.BatchNorm1d(hidden)
self.fcout = nn.Conv1d(hidden, out_channels, 1)
else:
self.fcout = nn.Linear(1024, out_channels)
self.dropout = nn.Dropout(0.5)
self.activation = nn.ReLU()
def forward_without_features(self, pos, support_points=None, indices=None):
_, _, ids_conv0 = self.cv0(None, pos)
_, support1, ids10 = self.resnetb10(None, pos)
_, _, ids11 = self.resnetb11(None, support1[0])
_, support2, ids20 = self.resnetb20(None, support1[0])
_, _, ids21 = self.resnetb21(None, support2[0])
_, support3, ids30 = self.resnetb30(None, support2[0])
_, _, ids31 = self.resnetb31(None, support3[0])
_, support4, ids40 = self.resnetb40(None, support3[0])
_, _, ids41 = self.resnetb41(None, support4[0])
support_points = support1 + support2 + support3 + support4
indices = ids_conv0 + ids10 + ids11 + ids20 + ids21 + ids30 + ids31 + ids40 + ids41
if self.segmentation:
ids3u = upsample_nearest(support4[0], support3[0])
ids2u = upsample_nearest(support3[0], support2[0])
ids1u = upsample_nearest(support2[0], support1[0])
ids0u = upsample_nearest(support1[0], pos)
indices += [ids3u, ids2u, ids1u, ids0u]
return None, support_points, indices
def forward_with_features(self, x, pos, support_points=None, indices=None):
if (support_points is None) or (indices is None):
_, indices, support_points = self.compute_indices(pos)
if self.segmentation:
ids0, ids10, ids11, ids20, ids21, ids30, ids31, ids40, ids41, ids3u, ids2u, ids1u, ids0u = indices
else:
ids0, ids10, ids11, ids20, ids21, ids30, ids31, ids40, ids41 = indices
support1, support2, support3, support4 = support_points
x0 = self.activation(self.bn0(self.cv0(x, pos, pos, ids0)))
x0 = self.resnetb01(x0, pos, pos, ids0)
x1 = self.resnetb10(x0, pos, support1, ids10)
x1 = self.resnetb11(x1, support1, support1, ids11)
x2 = self.resnetb20(x1, support1, support2, ids20)
x2 = self.resnetb21(x2, support2, support2, ids21)
x3 = self.resnetb30(x2, support2, support3, ids30)
x3 = self.resnetb31(x3, support3, support3, ids31)
x4 = self.resnetb40(x3, support3, support4, ids40)
x4 = self.resnetb41(x4, support4, support4, ids41)
if self.segmentation:
x5 = x4.max(dim=2, keepdim=True)[0].expand_as(x4)
x4 = self.activation(self.bn5(self.cv5(torch.cat([x4, x5], dim=1))))
xout = batch_gather(x4, 2, ids3u)
xout = self.activation(self.bn3d(self.cv3d(torch.cat([xout, x3], dim=1))))
xout = batch_gather(xout, 2, ids2u)
xout = self.activation(self.bn2d(self.cv2d(torch.cat([xout, x2], dim=1))))
xout = batch_gather(xout, 2, ids1u)
xout = self.activation(self.bn1d(self.cv1d(torch.cat([xout, x1], dim=1))))
xout = batch_gather(xout, 2, ids0u)
xout = self.activation(self.bn0d(self.cv0d(torch.cat([xout, x0], dim=1))))
xout = self.dropout(xout)
xout = self.fcout(xout)
else:
xout = x4.mean(dim=2)
xout = self.dropout(xout)
xout = self.fcout(xout)
return xout | [
"torch.nn.ReLU",
"torch.nn.Dropout",
"lightconvpoint.nn.deprecated.pooling.max_pool",
"lightconvpoint.utils.functional.batch_gather",
"torch.nn.BatchNorm1d",
"torch.nn.Linear",
"torch.nn.Identity",
"torch.nn.Conv1d",
"torch.cat",
"lightconvpoint.spatial.deprecated.upsample_nearest"
] | [((577, 620), 'torch.nn.Conv1d', 'nn.Conv1d', (['in_channels', '(in_channels // 2)', '(1)'], {}), '(in_channels, in_channels // 2, 1)\n', (586, 620), True, 'import torch.nn as nn\n'), ((638, 670), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(in_channels // 2)'], {}), '(in_channels // 2)\n', (652, 670), True, 'import torch.nn as nn\n'), ((904, 936), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(in_channels // 2)'], {}), '(in_channels // 2)\n', (918, 936), True, 'import torch.nn as nn\n'), ((954, 998), 'torch.nn.Conv1d', 'nn.Conv1d', (['(in_channels // 2)', 'out_channels', '(1)'], {}), '(in_channels // 2, out_channels, 1)\n', (963, 998), True, 'import torch.nn as nn\n'), ((1016, 1044), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['out_channels'], {}), '(out_channels)\n', (1030, 1044), True, 'import torch.nn as nn\n'), ((1071, 1080), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1078, 1080), True, 'import torch.nn as nn\n'), ((2276, 2298), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['hidden'], {}), '(hidden)\n', (2290, 2298), True, 'import torch.nn as nn\n'), ((4074, 4089), 'torch.nn.Dropout', 'nn.Dropout', (['(0.5)'], {}), '(0.5)\n', (4084, 4089), True, 'import torch.nn as nn\n'), ((4116, 4125), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (4123, 4125), True, 'import torch.nn as nn\n'), ((1106, 1145), 'torch.nn.Conv1d', 'nn.Conv1d', (['in_channels', 'out_channels', '(1)'], {}), '(in_channels, out_channels, 1)\n', (1115, 1145), True, 'import torch.nn as nn\n'), ((1182, 1195), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (1193, 1195), True, 'import torch.nn as nn\n'), ((1672, 1698), 'lightconvpoint.nn.deprecated.pooling.max_pool', 'max_pool', (['x_short', 'indices'], {}), '(x_short, indices)\n', (1680, 1698), False, 'from lightconvpoint.nn.deprecated.pooling import max_pool\n'), ((3394, 3432), 'torch.nn.Conv1d', 'nn.Conv1d', (['(32 * hidden)', '(16 * hidden)', '(1)'], {}), '(32 * hidden, 16 * hidden, 1)\n', (3403, 3432), True, 'import torch.nn as nn\n'), ((3454, 3481), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(16 * hidden)'], {}), '(16 * hidden)\n', (3468, 3481), True, 'import torch.nn as nn\n'), ((3505, 3542), 'torch.nn.Conv1d', 'nn.Conv1d', (['(24 * hidden)', '(8 * hidden)', '(1)'], {}), '(24 * hidden, 8 * hidden, 1)\n', (3514, 3542), True, 'import torch.nn as nn\n'), ((3565, 3591), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(8 * hidden)'], {}), '(8 * hidden)\n', (3579, 3591), True, 'import torch.nn as nn\n'), ((3616, 3653), 'torch.nn.Conv1d', 'nn.Conv1d', (['(12 * hidden)', '(4 * hidden)', '(1)'], {}), '(12 * hidden, 4 * hidden, 1)\n', (3625, 3653), True, 'import torch.nn as nn\n'), ((3678, 3704), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(4 * hidden)'], {}), '(4 * hidden)\n', (3692, 3704), True, 'import torch.nn as nn\n'), ((3729, 3765), 'torch.nn.Conv1d', 'nn.Conv1d', (['(6 * hidden)', '(2 * hidden)', '(1)'], {}), '(6 * hidden, 2 * hidden, 1)\n', (3738, 3765), True, 'import torch.nn as nn\n'), ((3790, 3816), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(2 * hidden)'], {}), '(2 * hidden)\n', (3804, 3816), True, 'import torch.nn as nn\n'), ((3841, 3873), 'torch.nn.Conv1d', 'nn.Conv1d', (['(3 * hidden)', 'hidden', '(1)'], {}), '(3 * hidden, hidden, 1)\n', (3850, 3873), True, 'import torch.nn as nn\n'), ((3898, 3920), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['hidden'], {}), '(hidden)\n', (3912, 3920), True, 'import torch.nn as nn\n'), ((3946, 3980), 'torch.nn.Conv1d', 'nn.Conv1d', (['hidden', 'out_channels', '(1)'], {}), '(hidden, out_channels, 1)\n', (3955, 3980), True, 'import torch.nn as nn\n'), ((4020, 4049), 'torch.nn.Linear', 'nn.Linear', (['(1024)', 'out_channels'], {}), '(1024, out_channels)\n', (4029, 4049), True, 'import torch.nn as nn\n'), ((4942, 4984), 'lightconvpoint.spatial.deprecated.upsample_nearest', 'upsample_nearest', (['support4[0]', 'support3[0]'], {}), '(support4[0], support3[0])\n', (4958, 4984), False, 'from lightconvpoint.spatial.deprecated import sampling_quantized, knn, upsample_nearest\n'), ((5005, 5047), 'lightconvpoint.spatial.deprecated.upsample_nearest', 'upsample_nearest', (['support3[0]', 'support2[0]'], {}), '(support3[0], support2[0])\n', (5021, 5047), False, 'from lightconvpoint.spatial.deprecated import sampling_quantized, knn, upsample_nearest\n'), ((5068, 5110), 'lightconvpoint.spatial.deprecated.upsample_nearest', 'upsample_nearest', (['support2[0]', 'support1[0]'], {}), '(support2[0], support1[0])\n', (5084, 5110), False, 'from lightconvpoint.spatial.deprecated import sampling_quantized, knn, upsample_nearest\n'), ((5131, 5165), 'lightconvpoint.spatial.deprecated.upsample_nearest', 'upsample_nearest', (['support1[0]', 'pos'], {}), '(support1[0], pos)\n', (5147, 5165), False, 'from lightconvpoint.spatial.deprecated import sampling_quantized, knn, upsample_nearest\n'), ((6552, 6578), 'lightconvpoint.utils.functional.batch_gather', 'batch_gather', (['x4', '(2)', 'ids3u'], {}), '(x4, 2, ids3u)\n', (6564, 6578), False, 'from lightconvpoint.utils.functional import batch_gather\n'), ((6685, 6713), 'lightconvpoint.utils.functional.batch_gather', 'batch_gather', (['xout', '(2)', 'ids2u'], {}), '(xout, 2, ids2u)\n', (6697, 6713), False, 'from lightconvpoint.utils.functional import batch_gather\n'), ((6820, 6848), 'lightconvpoint.utils.functional.batch_gather', 'batch_gather', (['xout', '(2)', 'ids1u'], {}), '(xout, 2, ids1u)\n', (6832, 6848), False, 'from lightconvpoint.utils.functional import batch_gather\n'), ((6955, 6983), 'lightconvpoint.utils.functional.batch_gather', 'batch_gather', (['xout', '(2)', 'ids0u'], {}), '(xout, 2, ids0u)\n', (6967, 6983), False, 'from lightconvpoint.utils.functional import batch_gather\n'), ((6503, 6529), 'torch.cat', 'torch.cat', (['[x4, x5]'], {'dim': '(1)'}), '([x4, x5], dim=1)\n', (6512, 6529), False, 'import torch\n'), ((6634, 6662), 'torch.cat', 'torch.cat', (['[xout, x3]'], {'dim': '(1)'}), '([xout, x3], dim=1)\n', (6643, 6662), False, 'import torch\n'), ((6769, 6797), 'torch.cat', 'torch.cat', (['[xout, x2]'], {'dim': '(1)'}), '([xout, x2], dim=1)\n', (6778, 6797), False, 'import torch\n'), ((6904, 6932), 'torch.cat', 'torch.cat', (['[xout, x1]'], {'dim': '(1)'}), '([xout, x1], dim=1)\n', (6913, 6932), False, 'import torch\n'), ((7039, 7067), 'torch.cat', 'torch.cat', (['[xout, x0]'], {'dim': '(1)'}), '([xout, x0], dim=1)\n', (7048, 7067), False, 'import torch\n')] |
import requests
import os
url_image = 'https://www.python.org/static/community_logos/python-logo.png'
r_image = requests.get(url_image)
print(r_image.headers['Content-Type'])
# image/png
filename_image = os.path.basename(url_image)
print(filename_image)
# python-logo.png
with open('data/temp/' + filename_image, 'wb') as f:
f.write(r_image.content)
url_zip = 'http://www.post.japanpost.jp/zipcode/dl/oogaki/zip/13tokyo.zip'
r_zip = requests.get(url_zip)
print(r_zip.headers['Content-Type'])
# application/zip
filename_zip = os.path.basename(url_zip)
print(filename_zip)
# 13tokyo.zip
with open('data/temp/' + filename_zip, 'wb') as f:
f.write(r_zip.content)
| [
"os.path.basename",
"requests.get"
] | [((114, 137), 'requests.get', 'requests.get', (['url_image'], {}), '(url_image)\n', (126, 137), False, 'import requests\n'), ((208, 235), 'os.path.basename', 'os.path.basename', (['url_image'], {}), '(url_image)\n', (224, 235), False, 'import os\n'), ((444, 465), 'requests.get', 'requests.get', (['url_zip'], {}), '(url_zip)\n', (456, 465), False, 'import requests\n'), ((538, 563), 'os.path.basename', 'os.path.basename', (['url_zip'], {}), '(url_zip)\n', (554, 563), False, 'import os\n')] |
"""JsonSchemaToRDF module."""
from typing import List
from rdflib.graph import Graph
import yaml
from jsonschematordf.modelldcatnofactory import create_model_element
from jsonschematordf.parsedschema import ParsedSchema
from jsonschematordf.schema import Schema
from jsonschematordf.utils import add_elements_to_graph
def json_schema_to_graph(json_schema_string: str, base_uri: str) -> Graph:
"""Parse JSON Schema to RDF Graph representation.
Args:
json_schema_string: a valid JSON Schema string.
base_uri: base URI of the schema.
Returns:
an RDF Graph representing the JSON Schema using modelldcatno.
Example:
>>> from jsonschematordf.parse import json_schema_to_graph
>>> json_schema_string = "{ 'Element': { 'type': 'object' } }"
>>> base_uri = "http://uri.com"
>>> graph = json_schema_to_graph(json_schema_string, base_uri)
"""
model_elements, orphan_elements = json_schema_to_modelldcatno(
json_schema_string, base_uri
)
schema_graph = add_elements_to_graph(Graph(), [*model_elements, *orphan_elements])
return schema_graph
def json_schema_to_modelldcatno(json_schema_string: str, base_uri: str) -> ParsedSchema:
"""Parse JSON Schema to modelldcatno representation.
Args:
json_schema_string: A valid JSON Schema string.
base_uri: Base URI of the schema.
Returns:
A ParsedSchema object containing the parsed modelldcatno ModelElements and
orphaned elements.
Example:
>>> from jsonschematordf.parse import json_schema_to_modelldcatno
>>> json_schema_string = "{ 'Element': { 'type': 'object' } }"
>>> base_uri = "http://uri.com"
>>> model_elements, orphan_elements = json_schema_to_modelldcatno(
... json_schema_string, base_uri
...)
"""
in_dict = yaml.safe_load(json_schema_string)
model_elements = []
orphan_elements = []
if isinstance(in_dict, dict):
schema = Schema(base_uri, in_dict)
for root_element in in_dict.keys():
parsed_schema = json_schema_component_to_modelldcatno(
schema, [root_element]
)
model_elements.extend(parsed_schema.model_elements)
orphan_elements.extend(parsed_schema.orphan_elements)
return ParsedSchema(model_elements, orphan_elements)
return ParsedSchema()
def json_schema_component_to_modelldcatno(
schema: Schema, path: List[str]
) -> ParsedSchema:
"""Parse a single component in a JSON Schema to a modelldcatno representation.
Args:
schema: A jsonschematordf Schema object.
path: Path to the component to be serialized.
Returns:
A ParsedSchema object containing the parsed modelldcatno ModelElements and
orphaned elements.
Example:
>>> from jsonschematordf.parse import json_schema_component_to_modelldcatno
>>> from jsonschematordf.schema import Schema
>>> json_schema_string = "{ 'schemas': { 'Element': { 'type': 'object' } } }"
>>> base_uri = "http://uri.com"
>>> schema = Schema(base_uri, json_schema_string)
>>> path = ["schemas", "Element"]
>>> model_elements, orphan_elements = json_schema_component_to_modelldcatno(
... schema, path
...)
"""
model_elements = []
components = schema.get_components_by_path_list(path)
for component in components:
parsed_element = create_model_element(component, schema)
if parsed_element:
model_elements.append(parsed_element)
return ParsedSchema(model_elements, schema.orphan_elements)
| [
"jsonschematordf.parsedschema.ParsedSchema",
"rdflib.graph.Graph",
"jsonschematordf.modelldcatnofactory.create_model_element",
"yaml.safe_load",
"jsonschematordf.schema.Schema"
] | [((1837, 1871), 'yaml.safe_load', 'yaml.safe_load', (['json_schema_string'], {}), '(json_schema_string)\n', (1851, 1871), False, 'import yaml\n'), ((2366, 2380), 'jsonschematordf.parsedschema.ParsedSchema', 'ParsedSchema', ([], {}), '()\n', (2378, 2380), False, 'from jsonschematordf.parsedschema import ParsedSchema\n'), ((3554, 3606), 'jsonschematordf.parsedschema.ParsedSchema', 'ParsedSchema', (['model_elements', 'schema.orphan_elements'], {}), '(model_elements, schema.orphan_elements)\n', (3566, 3606), False, 'from jsonschematordf.parsedschema import ParsedSchema\n'), ((1051, 1058), 'rdflib.graph.Graph', 'Graph', ([], {}), '()\n', (1056, 1058), False, 'from rdflib.graph import Graph\n'), ((1973, 1998), 'jsonschematordf.schema.Schema', 'Schema', (['base_uri', 'in_dict'], {}), '(base_uri, in_dict)\n', (1979, 1998), False, 'from jsonschematordf.schema import Schema\n'), ((2308, 2353), 'jsonschematordf.parsedschema.ParsedSchema', 'ParsedSchema', (['model_elements', 'orphan_elements'], {}), '(model_elements, orphan_elements)\n', (2320, 2353), False, 'from jsonschematordf.parsedschema import ParsedSchema\n'), ((3425, 3464), 'jsonschematordf.modelldcatnofactory.create_model_element', 'create_model_element', (['component', 'schema'], {}), '(component, schema)\n', (3445, 3464), False, 'from jsonschematordf.modelldcatnofactory import create_model_element\n')] |
from django.db import models
class DroneCategory(models.Model):
name = models.CharField(max_length=250, unique=True)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class Drone(models.Model):
owner = models.ForeignKey('auth.User',
related_name='drones',
on_delete=models.CASCADE)
name = models.CharField(max_length=250, unique=True)
drone_category = models.ForeignKey('DroneCategory',
related_name='drones',
on_delete=models.CASCADE)
manufacturing_date = models.DateTimeField()
has_it_competed = models.BooleanField(default=False)
inserted_timestamp = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class Pilot(models.Model):
MALE = 'M'
FEMALE = 'F'
GENDER_CHOICES = (
(MALE, 'Male'),
(FEMALE, 'Female'),
)
name = models.CharField(max_length=150,
blank=False,
unique=True,
)
gender = models.CharField(max_length=2,
choices=GENDER_CHOICES,
default=MALE)
races_count = models.IntegerField()
inserted_timestamp = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class Competition(models.Model):
pilot = models.ForeignKey('Pilot',
related_name='competitions',
on_delete=models.CASCADE)
drone = models.ForeignKey('Drone',
on_delete=models.CASCADE)
distance_in_feet = models.IntegerField()
distance_achievement_date = models.DateTimeField()
class Meta:
ordering = ('-distance_in_feet',)
| [
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((77, 122), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'unique': '(True)'}), '(max_length=250, unique=True)\n', (93, 122), False, 'from django.db import models\n'), ((259, 338), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""auth.User"""'], {'related_name': '"""drones"""', 'on_delete': 'models.CASCADE'}), "('auth.User', related_name='drones', on_delete=models.CASCADE)\n", (276, 338), False, 'from django.db import models\n'), ((410, 455), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'unique': '(True)'}), '(max_length=250, unique=True)\n', (426, 455), False, 'from django.db import models\n'), ((477, 565), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""DroneCategory"""'], {'related_name': '"""drones"""', 'on_delete': 'models.CASCADE'}), "('DroneCategory', related_name='drones', on_delete=models.\n CASCADE)\n", (494, 565), False, 'from django.db import models\n'), ((664, 686), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (684, 686), False, 'from django.db import models\n'), ((709, 743), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (728, 743), False, 'from django.db import models\n'), ((769, 808), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (789, 808), False, 'from django.db import models\n'), ((1057, 1115), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)', 'blank': '(False)', 'unique': '(True)'}), '(max_length=150, blank=False, unique=True)\n', (1073, 1115), False, 'from django.db import models\n'), ((1215, 1283), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(2)', 'choices': 'GENDER_CHOICES', 'default': 'MALE'}), '(max_length=2, choices=GENDER_CHOICES, default=MALE)\n', (1231, 1283), False, 'from django.db import models\n'), ((1362, 1383), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1381, 1383), False, 'from django.db import models\n'), ((1409, 1448), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1429, 1448), False, 'from django.db import models\n'), ((1591, 1677), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Pilot"""'], {'related_name': '"""competitions"""', 'on_delete': 'models.CASCADE'}), "('Pilot', related_name='competitions', on_delete=models.\n CASCADE)\n", (1608, 1677), False, 'from django.db import models\n'), ((1745, 1797), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Drone"""'], {'on_delete': 'models.CASCADE'}), "('Drone', on_delete=models.CASCADE)\n", (1762, 1797), False, 'from django.db import models\n'), ((1851, 1872), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1870, 1872), False, 'from django.db import models\n'), ((1905, 1927), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (1925, 1927), False, 'from django.db import models\n')] |
# test_files.py
import unittest2 as unittest
from graphviz.files import File, Source
class TestBase(unittest.TestCase):
def setUp(self):
self.file = File()
def test_format(self):
with self.assertRaisesRegexp(ValueError, 'format'):
self.file.format = 'spam'
def test_engine(self):
with self.assertRaisesRegexp(ValueError, 'engine'):
self.file.engine = 'spam'
def test_encoding(self):
with self.assertRaisesRegexp(LookupError, 'encoding'):
self.file.encoding = 'spam'
class TestFile(unittest.TestCase):
def test_init(self):
f = File('name', 'dir', 'PNG', 'NEATO', 'latin1')
self.assertEqual(f.filename, 'name')
self.assertEqual(f.format, 'png')
self.assertEqual(f.engine, 'neato')
self.assertEqual(f.encoding, 'latin1')
class TestNoent(unittest.TestCase):
def setUp(self):
import graphviz.files
graphviz.files.ENGINES.add('spam')
self.file = File('spam.gv', 'test-output', engine='spam')
self.file.source = 'spam'
def tearDown(self):
import graphviz.files
graphviz.files.ENGINES.discard('spam')
def test_pipe(self):
with self.assertRaisesRegexp(RuntimeError, 'failed to execute'):
self.file.pipe()
def test_render(self):
with self.assertRaisesRegexp(RuntimeError, 'failed to execute'):
self.file.render()
class TestSource(unittest.TestCase):
def test_init(self):
source = 'graph { hello -> world }'
s = Source(source)
self.assertEqual(s.source, source)
| [
"graphviz.files.File",
"graphviz.files.Source"
] | [((166, 172), 'graphviz.files.File', 'File', ([], {}), '()\n', (170, 172), False, 'from graphviz.files import File, Source\n'), ((633, 678), 'graphviz.files.File', 'File', (['"""name"""', '"""dir"""', '"""PNG"""', '"""NEATO"""', '"""latin1"""'], {}), "('name', 'dir', 'PNG', 'NEATO', 'latin1')\n", (637, 678), False, 'from graphviz.files import File, Source\n'), ((1010, 1055), 'graphviz.files.File', 'File', (['"""spam.gv"""', '"""test-output"""'], {'engine': '"""spam"""'}), "('spam.gv', 'test-output', engine='spam')\n", (1014, 1055), False, 'from graphviz.files import File, Source\n'), ((1573, 1587), 'graphviz.files.Source', 'Source', (['source'], {}), '(source)\n', (1579, 1587), False, 'from graphviz.files import File, Source\n')] |
"""
This instrument description contains information
that is instrument-specific and abstracts out how we obtain
information from the data file
"""
#pylint: disable=invalid-name, too-many-instance-attributes, line-too-long, bare-except
from __future__ import absolute_import, division, print_function
import sys
import os
import math
import logging
import numpy as np
# Import mantid according to the application configuration
from . import ApplicationConfiguration
application_conf = ApplicationConfiguration()
sys.path.insert(0, application_conf.mantid_path)
import mantid.simpleapi as api
# Option to use the slow flipper logs rather than the Analyzer/Polarizer logs
USE_SLOW_FLIPPER_LOG = True
# Constants
h = 6.626e-34 # m^2 kg s^-1
m = 1.675e-27 # kg
def get_cross_section_label(ws, entry_name):
"""
Return the proper cross-section label.
"""
entry_name = str(entry_name)
pol_is_on = entry_name.lower().startswith('on')
ana_is_on = entry_name.lower().endswith('on')
pol_label = ''
ana_label = ''
# Look for log that define whether OFF or ON is +
if 'PolarizerLabel' in ws.getRun():
pol_id = ws.getRun().getProperty("PolarizerLabel").value
if isinstance(pol_id, np.ndarray):
pol_id = int(pol_id[0])
if pol_id == 1:
pol_label = '+' if pol_is_on else '-'
elif pol_id == 0:
pol_label = '-' if pol_is_on else '+'
if 'AnalyzerLabel' in ws.getRun():
ana_id = ws.getRun().getProperty("AnalyzerLabel").value
if isinstance(ana_id, np.ndarray):
ana_id = int(ana_id[0])
if ana_id == 1:
ana_label = '+' if ana_is_on else '-'
elif ana_id == 0:
ana_label = '-' if ana_is_on else '-'
entry_name = entry_name.replace('_', '-')
if ana_label == '' and pol_label == '':
return entry_name
else:
return '%s%s' % (pol_label, ana_label)
class Instrument(object):
"""
Instrument class. Holds the data handling that is unique to a specific instrument.
"""
n_x_pixel = 304
n_y_pixel = 256
huber_x_cut = 6.5
peak_range_offset = 50
tolerance = 0.05
pixel_width = 0.0007
instrument_name = "REF_M"
instrument_dir = "/SNS/REF_M"
file_search_template = "/SNS/REF_M/*/nexus/REF_M_%s"
legacy_search_template = "/SNS/REF_M/*/data/REF_M_%s"
def __init__(self):
# Filtering
self.pol_state = application_conf.POL_STATE
self.pol_veto = application_conf.POL_VETO
self.ana_state = application_conf.ANA_STATE
self.ana_veto = application_conf.ANA_VETO
def dummy_filter_cross_sections(self, ws):
"""
Filter events according to an aggregated state log.
:param str file_path: file to read
BL4A:SF:ICP:getDI
015 (0000 1111): SF1=OFF, SF2=OFF, SF1Veto=OFF, SF2Veto=OFF
047 (0010 1111): SF1=ON, SF2=OFF, SF1Veto=OFF, SF2Veto=OFF
031 (0001 1111): SF1=OFF, SF2=ON, SF1Veto=OFF, SF2Veto=OFF
063 (0011 1111): SF1=ON, SF2=ON, SF1Veto=OFF, SF2Veto=OFF
"""
state_log = "BL4A:SF:ICP:getDI"
states = {'Off_Off': 15,
'On_Off': 47,
'Off_On': 31,
'On_On': 63}
cross_sections = []
for pol_state in ['Off_Off', 'On_On', 'Off_On', 'On_Off']:
try:
_ws = api.FilterByLogValue(InputWorkspace=ws, LogName=state_log, TimeTolerance=0.1,
MinimumValue=states[pol_state],
MaximumValue=states[pol_state], LogBoundary='Left',
OutputWorkspace='%s_entry-%s' % (ws.getRunNumber(), pol_state))
_ws.getRun()['cross_section_id'] = pol_state
cross_sections.append(_ws)
except:
logging.error("Could not filter %s: %s", pol_state, sys.exc_info()[1])
return cross_sections
def load_data(self, file_path):
"""
Load a data set according to the needs ot the instrument.
Returns a WorkspaceGroup with any number of cross-sections.
:param str file_path: path to the data file
"""
# Be careful with legacy data
is_legacy = file_path.endswith(".nxs")
if is_legacy or not USE_SLOW_FLIPPER_LOG:
base_name = os.path.basename(file_path)
_xs_list = api.MRFilterCrossSections(Filename=file_path,
PolState=self.pol_state,
AnaState=self.ana_state,
PolVeto=self.pol_veto,
AnaVeto=self.ana_veto,
CrossSectionWorkspaces="%s_entry" % base_name)
# Only keep good workspaced and get rid of the rejected events
xs_list = [ws for ws in _xs_list if not ws.getRun()['cross_section_id'].value == 'unfiltered']
else:
ws = api.LoadEventNexus(Filename=file_path, OutputWorkspace="raw_events")
xs_list = self.dummy_filter_cross_sections(ws)
return xs_list
@classmethod
def mid_q_value(cls, ws):
"""
Get the mid q value, at the requested wl mid-point.
This is used when sorting out data sets and doesn't need any overwrites.
:param workspace ws: Mantid workspace
"""
wl = ws.getRun().getProperty('LambdaRequest').value[0]
theta_d = api.MRGetTheta(ws)
return 4.0*math.pi*math.sin(theta_d) / wl
@classmethod
def scattering_angle_from_data(cls, data_object):
"""
Compute the scattering angle from a CrossSectionData object, in degrees.
@param data_object: CrossSectionData object
"""
_dirpix = data_object.configuration.direct_pixel_overwrite if data_object.configuration.set_direct_pixel else None
_dangle0 = data_object.configuration.direct_angle_offset_overwrite if data_object.configuration.set_direct_angle_offset else None
return api.MRGetTheta(data_object.event_workspace,
SpecularPixel=data_object.configuration.peak_position,
DAngle0Overwrite=_dangle0,
DirectPixelOverwrite=_dirpix) * 180.0 / math.pi
@classmethod
def check_direct_beam(cls, ws):
"""
Determine whether this data is a direct beam
"""
try:
return ws.getRun().getProperty("data_type").value[0] == 1
except:
return False
def direct_beam_match(self, scattering, direct_beam, skip_slits=False):
"""
Verify whether two data sets are compatible.
"""
if math.fabs(scattering.lambda_center-direct_beam.lambda_center) < self.tolerance \
and (skip_slits or \
(math.fabs(scattering.slit1_width-direct_beam.slit1_width) < self.tolerance \
and math.fabs(scattering.slit2_width-direct_beam.slit2_width) < self.tolerance \
and math.fabs(scattering.slit3_width-direct_beam.slit3_width) < self.tolerance)):
return True
return False
@classmethod
def get_info(cls, workspace, data_object):
"""
Retrieve information that is specific to this particular instrument
@param workspace: Mantid workspace
@param data_object: CrossSectionData object
"""
data = workspace.getRun()
data_object.lambda_center = data['LambdaRequest'].value[0]
data_object.dangle = data['DANGLE'].getStatistics().mean
if 'BL4A:Mot:S1:X:Gap' in data:
data_object.slit1_width = data['BL4A:Mot:S1:X:Gap'].value[0]
data_object.slit2_width = data['BL4A:Mot:S2:X:Gap'].value[0]
data_object.slit3_width = data['BL4A:Mot:S3:X:Gap'].value[0]
else:
data_object.slit1_width = data['S1HWidth'].value[0]
data_object.slit2_width = data['S2HWidth'].value[0]
data_object.slit3_width = data['S3HWidth'].value[0]
data_object.huber_x = data['HuberX'].getStatistics().mean
data_object.sangle = data['SANGLE'].getStatistics().mean
data_object.dist_sam_det = data['SampleDetDis'].value[0]*1e-3
data_object.dist_mod_det = data['ModeratorSamDis'].value[0]*1e-3+data_object.dist_sam_det
data_object.dist_mod_mon = data['ModeratorSamDis'].value[0]*1e-3-2.75
# Get these from instrument
data_object.pixel_width = float(workspace.getInstrument().getNumberParameter("pixel-width")[0]) / 1000.0
data_object.n_det_size_x = int(workspace.getInstrument().getNumberParameter("number-of-x-pixels")[0]) # 304
data_object.n_det_size_y = int(workspace.getInstrument().getNumberParameter("number-of-y-pixels")[0]) # 256
data_object.det_size_x = data_object.n_det_size_x * data_object.pixel_width # horizontal size of detector [m]
data_object.det_size_y = data_object.n_det_size_y * data_object.pixel_width # vertical size of detector [m]
# The following active area used to be taken from instrument.DETECTOR_REGION
data_object.active_area_x = (8, 295)
data_object.active_area_y = (8, 246)
# Convert to standard names
data_object.direct_pixel = data['DIRPIX'].getStatistics().mean
data_object.angle_offset = data['DANGLE0'].getStatistics().mean
# Get proper cross-section label
data_object.cross_section_label = get_cross_section_label(workspace, data_object.entry_name)
try:
data_object.is_direct_beam = data["data_type"].value[0] == 1
except:
data_object.is_direct_beam = False
def integrate_detector(self, ws, specular=True):
"""
Integrate a workspace along either the main direction (specular=False) or
the low-resolution direction (specular=True.
:param ws: Mantid workspace
:param specular bool: if True, the low-resolution direction is integrated over
"""
ws_summed = api.RefRoi(InputWorkspace=ws, IntegrateY=specular,
NXPixel=self.n_x_pixel, NYPixel=self.n_y_pixel,
ConvertToQ=False,
OutputWorkspace="ws_summed")
integrated = api.Integration(ws_summed)
integrated = api.Transpose(integrated)
return integrated
| [
"sys.path.insert",
"mantid.simpleapi.Transpose",
"mantid.simpleapi.MRGetTheta",
"mantid.simpleapi.RefRoi",
"sys.exc_info",
"math.fabs",
"mantid.simpleapi.Integration",
"os.path.basename",
"mantid.simpleapi.MRFilterCrossSections",
"mantid.simpleapi.LoadEventNexus",
"math.sin"
] | [((525, 573), 'sys.path.insert', 'sys.path.insert', (['(0)', 'application_conf.mantid_path'], {}), '(0, application_conf.mantid_path)\n', (540, 573), False, 'import sys\n'), ((5672, 5690), 'mantid.simpleapi.MRGetTheta', 'api.MRGetTheta', (['ws'], {}), '(ws)\n', (5686, 5690), True, 'import mantid.simpleapi as api\n'), ((10303, 10452), 'mantid.simpleapi.RefRoi', 'api.RefRoi', ([], {'InputWorkspace': 'ws', 'IntegrateY': 'specular', 'NXPixel': 'self.n_x_pixel', 'NYPixel': 'self.n_y_pixel', 'ConvertToQ': '(False)', 'OutputWorkspace': '"""ws_summed"""'}), "(InputWorkspace=ws, IntegrateY=specular, NXPixel=self.n_x_pixel,\n NYPixel=self.n_y_pixel, ConvertToQ=False, OutputWorkspace='ws_summed')\n", (10313, 10452), True, 'import mantid.simpleapi as api\n'), ((10564, 10590), 'mantid.simpleapi.Integration', 'api.Integration', (['ws_summed'], {}), '(ws_summed)\n', (10579, 10590), True, 'import mantid.simpleapi as api\n'), ((10612, 10637), 'mantid.simpleapi.Transpose', 'api.Transpose', (['integrated'], {}), '(integrated)\n', (10625, 10637), True, 'import mantid.simpleapi as api\n'), ((4470, 4497), 'os.path.basename', 'os.path.basename', (['file_path'], {}), '(file_path)\n', (4486, 4497), False, 'import os\n'), ((4521, 4717), 'mantid.simpleapi.MRFilterCrossSections', 'api.MRFilterCrossSections', ([], {'Filename': 'file_path', 'PolState': 'self.pol_state', 'AnaState': 'self.ana_state', 'PolVeto': 'self.pol_veto', 'AnaVeto': 'self.ana_veto', 'CrossSectionWorkspaces': "('%s_entry' % base_name)"}), "(Filename=file_path, PolState=self.pol_state,\n AnaState=self.ana_state, PolVeto=self.pol_veto, AnaVeto=self.ana_veto,\n CrossSectionWorkspaces='%s_entry' % base_name)\n", (4546, 4717), True, 'import mantid.simpleapi as api\n'), ((5168, 5236), 'mantid.simpleapi.LoadEventNexus', 'api.LoadEventNexus', ([], {'Filename': 'file_path', 'OutputWorkspace': '"""raw_events"""'}), "(Filename=file_path, OutputWorkspace='raw_events')\n", (5186, 5236), True, 'import mantid.simpleapi as api\n'), ((5718, 5735), 'math.sin', 'math.sin', (['theta_d'], {}), '(theta_d)\n', (5726, 5735), False, 'import math\n'), ((6255, 6419), 'mantid.simpleapi.MRGetTheta', 'api.MRGetTheta', (['data_object.event_workspace'], {'SpecularPixel': 'data_object.configuration.peak_position', 'DAngle0Overwrite': '_dangle0', 'DirectPixelOverwrite': '_dirpix'}), '(data_object.event_workspace, SpecularPixel=data_object.\n configuration.peak_position, DAngle0Overwrite=_dangle0,\n DirectPixelOverwrite=_dirpix)\n', (6269, 6419), True, 'import mantid.simpleapi as api\n'), ((6947, 7010), 'math.fabs', 'math.fabs', (['(scattering.lambda_center - direct_beam.lambda_center)'], {}), '(scattering.lambda_center - direct_beam.lambda_center)\n', (6956, 7010), False, 'import math\n'), ((7074, 7133), 'math.fabs', 'math.fabs', (['(scattering.slit1_width - direct_beam.slit1_width)'], {}), '(scattering.slit1_width - direct_beam.slit1_width)\n', (7083, 7133), False, 'import math\n'), ((7167, 7226), 'math.fabs', 'math.fabs', (['(scattering.slit2_width - direct_beam.slit2_width)'], {}), '(scattering.slit2_width - direct_beam.slit2_width)\n', (7176, 7226), False, 'import math\n'), ((7260, 7319), 'math.fabs', 'math.fabs', (['(scattering.slit3_width - direct_beam.slit3_width)'], {}), '(scattering.slit3_width - direct_beam.slit3_width)\n', (7269, 7319), False, 'import math\n'), ((4001, 4015), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (4013, 4015), False, 'import sys\n')] |
import logging
from bson import ObjectId
from flask_restful import Resource, reqparse
from sintel.db import schema
from sintel.resources.auth_utils import requires_auth
from sintel.resources.datarun import validate_signalrun_id
LOGGER = logging.getLogger(__name__)
def get_event(event_doc):
comments = list()
comment_docs = schema.Annotation.find(event=event_doc.id)
if comment_docs is not None:
for comment_doc in comment_docs:
comment = {
'id': str(comment_doc.id),
'text': comment_doc.comment,
'insert_time': comment_doc.insert_time.isoformat(),
'created_by': comment_doc.created_by
}
comments.append(comment)
return {
'id': str(event_doc.id),
'insert_time': event_doc.insert_time.isoformat(),
'start_time': event_doc.start_time,
'stop_time': event_doc.stop_time,
'score': event_doc.severity,
'tag': event_doc.tag,
'datarun': str(event_doc.signalrun.id),
'source': event_doc.source,
'comments': comments
}
def validate_event_id(event_id):
try:
eid = ObjectId(event_id)
except Exception as e:
LOGGER.exception(e)
return {'message': str(e)}, 400
event_doc = schema.Event.find_one(id=eid)
if event_doc is None:
LOGGER.exception('Event %s does not exist.', event_id)
return {
'message': 'Event {} does not exist'.format(event_id)
}, 400
return event_doc, 200
class Event(Resource):
def __init__(self):
parser_put = reqparse.RequestParser(bundle_errors=True)
parser_put.add_argument('start_time', type=int, required=True,
location='json')
parser_put.add_argument('stop_time', type=int, required=True,
location='json')
parser_put.add_argument('score', type=float, default=0, required=True,
location='json')
parser_put.add_argument('created_by', type=str, default=None,
required=True, location='json')
parser_put.add_argument('tag', type=str, default='Untagged',
required=False, location='json')
self.parser_put = parser_put
parser_delete = reqparse.RequestParser(bundle_errors=True)
parser_delete.add_argument('created_by', type=str, default=None,
required=True, location='args')
self.parser_delete = parser_delete
@requires_auth
def get(self, event_id):
"""
Get an event by ID
---
tags:
- event
security:
- tokenAuth: []
parameters:
- name: event_id
in: path
schema:
type: string
required: true
description: ID of the event to get
responses:
200:
description: Event to be returned
content:
application/json:
schema:
$ref: '#/components/schemas/Event'
400:
$ref: '#/components/responses/ErrorMessage'
401:
$ref: '#/components/responses/UnauthorizedError'
500:
$ref: '#/components/responses/ErrorMessage'
"""
# validate event_id
validate_result = validate_event_id(event_id)
if validate_result[1] == 400:
return validate_result
event_doc = validate_result[0]
# return result
try:
res = get_event(event_doc)
except Exception as e:
LOGGER.exception(e)
return {'message': str(e), 'code': 500}, 500
else:
return res
@requires_auth
def put(self, event_id):
"""
Update an event
---
tags:
- event
security:
- tokenAuth: []
parameters:
- name: event_id
in: path
schema:
type: string
required: true
description: ID of the event to update
requestBody:
required: true
content:
application/json:
schema:
type: object
properties:
start_time:
type: integer
stop_time:
type: integer
score:
type: integer
tag:
type: string
required: ['start_time', 'stop_time', 'score']
responses:
200:
description: Event after updating
content:
application/json:
schema:
$ref: '#/components/schemas/Event'
400:
$ref: '#/components/responses/ErrorMessage'
401:
$ref: '#/components/responses/UnauthorizedError'
500:
$ref: '#/components/responses/ErrorMessage'
"""
# validate event_id
validate_result = validate_event_id(event_id)
if validate_result[1] == 400:
return validate_result
event_doc = validate_result[0]
try:
args = self.parser_put.parse_args()
except Exception as e:
LOGGER.exception(str(e))
return {'message', str(e)}, 400
# update event
action_modify = False
if (args['start_time'] != event_doc.start_time
or args['stop_time'] != event_doc.stop_time):
action_modify = True
event_doc.start_time = args['start_time']
event_doc.stop_time = args['stop_time']
event_doc.score = args['score']
action_tag = False
if args['tag'] != 'Untagged' and args['tag'] != 'None':
if (args['tag'] != event_doc.tag):
action_tag = True
event_doc.tag = args['tag']
# return result
try:
event_doc.save()
user = args['created_by']
if action_modify:
doc = {
'event': event_doc.id,
'action': 'MODIFY',
'start_time': event_doc.start_time,
'stop_time': event_doc.stop_time,
'created_by': user
}
schema.EventInteraction.insert(**doc)
if action_tag:
doc = {
'event': event_doc.id,
'action': 'TAG',
'tag': event_doc.tag,
'created_by': user
}
schema.EventInteraction.insert(**doc)
res = get_event(event_doc)
except Exception as e:
LOGGER.exception('Error saving event. ' + str(e))
return {'message': str(e), 'code': 500}, 500
else:
return res
@requires_auth
def delete(self, event_id):
"""
Delete an event
---
tags:
- event
security:
- tokenAuth: []
parameters:
- name: event_id
in: path
schema:
type: string
required: true
description: ID of the event to get
responses:
200:
description: Event to be returned
content:
application/json:
schema:
$ref: '#/components/schemas/Event'
400:
$ref: '#/components/responses/ErrorMessage'
401:
$ref: '#/components/responses/UnauthorizedError'
500:
$ref: '#/components/responses/ErrorMessage'
"""
# validate event_id
validate_result = validate_event_id(event_id)
if validate_result[1] == 400:
return validate_result
event_doc = validate_result[0]
try:
args = self.parser_delete.parse_args()
except Exception as e:
LOGGER.exception(str(e))
return {'message', str(e)}, 400
try:
event_doc.delete()
doc = {
'event': event_doc.id,
'action': 'DELETE',
'created_by': args['created_by']
}
schema.EventInteraction.insert(**doc)
except Exception as e:
LOGGER.exception(e)
return {'message': str(e), 'code': 500}, 500
class Events(Resource):
def __init__(self):
parser_get = reqparse.RequestParser(bundle_errors=True)
parser_get.add_argument('datarun_id', type=str, required=False,
location='json')
self.parser_get = parser_get
parser_post = reqparse.RequestParser(bundle_errors=True)
parser_post.add_argument('start_time', type=str, required=True,
location='json')
parser_post.add_argument('stop_time', type=str, required=True,
location='json')
parser_post.add_argument('datarun_id', type=str, required=True,
location='json')
parser_post.add_argument('created_by', type=str, required=True,
location='json')
parser_post.add_argument('source', type=str, default='MANUALLY_CREATED',
location='json')
parser_post.add_argument('score', type=float, default=0,
location='json')
parser_post.add_argument('tag', type=str, default=None,
location='json')
self.parser_post = parser_post
def get(self):
"""
Return all events of a given signalrun
If signalrun is not given, it will return all events.
---
tags:
- event
security:
- tokenAuth: []
parameters:
- name: datarun_id
in: query
schema:
type: string
required: true
description: ID of the signalrun to filter events (We will \
update the name to signalrun)
responses:
200:
description: A list of events of the specified signalrun
content:
application/json:
schema:
type: object
properties:
events:
type: array
items:
$ref: '#/components/schemas/Event'
400:
$ref: '#/components/responses/ErrorMessage'
401:
$ref: '#/components/responses/UnauthorizedError'
500:
$ref: '#/components/responses/ErrorMessage'
"""
try:
args = self.parser_get.parse_args()
except Exception as e:
LOGGER.exception(str(e))
return {'message': str(e)}, 400
datarun_id = args['datarun_id']
query = dict()
# validate datarun_id
if datarun_id is not None and datarun_id != '':
validate_result = validate_signalrun_id(datarun_id)
if validate_result[1] == 400:
return validate_result
datarun_doc = validate_result[0]
query['signalrun'] = datarun_doc.id
event_docs = schema.Event.find(**query).order_by('+start_time')
if event_docs is None:
return []
try:
events = [get_event(event_doc) for event_doc in event_docs]
except Exception as e:
LOGGER.exception(e)
return {'message': str(e), 'code': 500}, 500
else:
return {'events': events}
@requires_auth
def post(self):
"""
Create an event
---
tags:
- event
security:
- tokenAuth: []
requestBody:
required: true
content:
application/json:
schema:
type: object
properties:
start_time:
type: integer
stop_time:
type: integer
score:
type: integer
datarun_id:
type: string
description: This is signalrun_id in fact (
to be fixed later).
created_by:
type: string
tag:
type: string
source:
type: string
enum: ["SHAPE_MATCHING", "MANUALLY_CREATED", "ORION"]
required: ['start_time', 'stop_time', 'datarun_id',
'created_by']
responses:
200:
description: The newly created Event
content:
application/json:
schema:
$ref: '#/components/schemas/Event'
400:
$ref: '#/components/responses/ErrorMessage'
401:
$ref: '#/components/responses/UnauthorizedError'
500:
$ref: '#/components/responses/ErrorMessage'
"""
try:
args = self.parser_post.parse_args()
except Exception as e:
LOGGER.exception(str(e))
return {'message', str(e)}, 400
# further validate datarun
validate_result = validate_signalrun_id(args['datarun_id'])
if validate_result[1] == 400:
return validate_result
# create and return event
try:
doc = {
key: args[key]
for key in ['start_time', 'stop_time', 'tag', 'source']
if args[key] is not None
}
doc['signalrun'] = args['datarun_id']
doc['severity'] = args['score']
signalrun_doc = schema.Signalrun.find_one(
signalrun=args['datarun_id'])
doc['signal'] = str(signalrun_doc.signal.id)
event_doc = schema.Event.insert(**doc)
doc = {
'event': event_doc.id,
'action': 'CREATE',
'start_time': event_doc.start_time,
'stop_time': event_doc.stop_time,
'created_by': args['created_by']
}
schema.EventInteraction.insert(**doc)
if event_doc.tag is not None:
doc = {
'event': event_doc.id,
'action': 'TAG',
'tag': event_doc.tag,
'created_by': args['created_by']
}
schema.EventInteraction.insert(**doc)
res = get_event(event_doc)
except Exception as e:
LOGGER.exception('Error creating event. ' + str(e))
return {'message': str(e), 'code': 500}, 500
else:
return res
class EventInteraction(Resource):
def __init__(self):
parser_get = reqparse.RequestParser(bundle_errors=True)
parser_get.add_argument('event_id', type=str, required=True,
location='args')
parser_get.add_argument('action', location='args',
choices=['DELETE', 'CREATE', 'MODIFY',
'TAG', 'COMMENT'])
self.parser_get = parser_get
def _get_event_interaction(self, doc):
annotation_id = None
if doc.annotation is not None:
annotation_id = str(doc.annotation.id)
record = {
'id': str(doc.id),
'event': str(doc.event.id),
'action': doc.action,
'tag': doc.tag,
'annotation': annotation_id,
'start_time': doc.start_time,
'stop_time': doc.stop_time,
'insert_time': doc.insert_time.isoformat(),
'created_by': doc.created_by
}
return record
@requires_auth
def get(self):
"""
Get event interaction history by Event ID
---
tags:
- event
security:
- tokenAuth: []
parameters:
- name: event_id
in: query
schema:
type: string
required: true
description: ID of the event to filter interactions
- name: action
in: query
schema:
type: string
required: false
description: action type to filter interactions
responses:
200:
description: A list of interactions of the specified event
content:
application/json:
schema:
type: object
properties:
records:
type: array
items:
$ref: '#/components/schemas/EventInteraction'
400:
$ref: '#/components/responses/ErrorMessage'
401:
$ref: '#/components/responses/UnauthorizedError'
500:
$ref: '#/components/responses/ErrorMessage'
"""
try:
args = self.parser_get.parse_args()
except Exception as e:
LOGGER.exception(str(e))
return {'message': str(e), 'code': 400}, 400
validate_result = validate_event_id(args['event_id'])
if validate_result[1] == 400:
return validate_result
query = {
'event': ObjectId(args['event_id'])
}
if args.action is not None:
query['action'] = args.action
docs = schema.EventInteraction.find(**query)
try:
records = [self._get_event_interaction(doc) for doc in docs]
except Exception as e:
LOGGER.exception(e)
return {'message': str(e), 'code': 500}, 500
else:
return {'records': records}
| [
"logging.getLogger",
"sintel.db.schema.Event.insert",
"flask_restful.reqparse.RequestParser",
"sintel.db.schema.Event.find_one",
"sintel.db.schema.EventInteraction.insert",
"sintel.db.schema.Signalrun.find_one",
"sintel.db.schema.Event.find",
"sintel.resources.datarun.validate_signalrun_id",
"bson.O... | [((240, 267), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (257, 267), False, 'import logging\n'), ((337, 379), 'sintel.db.schema.Annotation.find', 'schema.Annotation.find', ([], {'event': 'event_doc.id'}), '(event=event_doc.id)\n', (359, 379), False, 'from sintel.db import schema\n'), ((1305, 1334), 'sintel.db.schema.Event.find_one', 'schema.Event.find_one', ([], {'id': 'eid'}), '(id=eid)\n', (1326, 1334), False, 'from sintel.db import schema\n'), ((1174, 1192), 'bson.ObjectId', 'ObjectId', (['event_id'], {}), '(event_id)\n', (1182, 1192), False, 'from bson import ObjectId\n'), ((1621, 1663), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {'bundle_errors': '(True)'}), '(bundle_errors=True)\n', (1643, 1663), False, 'from flask_restful import Resource, reqparse\n'), ((2361, 2403), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {'bundle_errors': '(True)'}), '(bundle_errors=True)\n', (2383, 2403), False, 'from flask_restful import Resource, reqparse\n'), ((8629, 8671), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {'bundle_errors': '(True)'}), '(bundle_errors=True)\n', (8651, 8671), False, 'from flask_restful import Resource, reqparse\n'), ((8853, 8895), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {'bundle_errors': '(True)'}), '(bundle_errors=True)\n', (8875, 8895), False, 'from flask_restful import Resource, reqparse\n'), ((13569, 13610), 'sintel.resources.datarun.validate_signalrun_id', 'validate_signalrun_id', (["args['datarun_id']"], {}), "(args['datarun_id'])\n", (13590, 13610), False, 'from sintel.resources.datarun import validate_signalrun_id\n'), ((15151, 15193), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {'bundle_errors': '(True)'}), '(bundle_errors=True)\n', (15173, 15193), False, 'from flask_restful import Resource, reqparse\n'), ((17812, 17849), 'sintel.db.schema.EventInteraction.find', 'schema.EventInteraction.find', ([], {}), '(**query)\n', (17840, 17849), False, 'from sintel.db import schema\n'), ((8399, 8436), 'sintel.db.schema.EventInteraction.insert', 'schema.EventInteraction.insert', ([], {}), '(**doc)\n', (8429, 8436), False, 'from sintel.db import schema\n'), ((11247, 11280), 'sintel.resources.datarun.validate_signalrun_id', 'validate_signalrun_id', (['datarun_id'], {}), '(datarun_id)\n', (11268, 11280), False, 'from sintel.resources.datarun import validate_signalrun_id\n'), ((14033, 14088), 'sintel.db.schema.Signalrun.find_one', 'schema.Signalrun.find_one', ([], {'signalrun': "args['datarun_id']"}), "(signalrun=args['datarun_id'])\n", (14058, 14088), False, 'from sintel.db import schema\n'), ((14188, 14214), 'sintel.db.schema.Event.insert', 'schema.Event.insert', ([], {}), '(**doc)\n', (14207, 14214), False, 'from sintel.db import schema\n'), ((14488, 14525), 'sintel.db.schema.EventInteraction.insert', 'schema.EventInteraction.insert', ([], {}), '(**doc)\n', (14518, 14525), False, 'from sintel.db import schema\n'), ((17682, 17708), 'bson.ObjectId', 'ObjectId', (["args['event_id']"], {}), "(args['event_id'])\n", (17690, 17708), False, 'from bson import ObjectId\n'), ((6458, 6495), 'sintel.db.schema.EventInteraction.insert', 'schema.EventInteraction.insert', ([], {}), '(**doc)\n', (6488, 6495), False, 'from sintel.db import schema\n'), ((6742, 6779), 'sintel.db.schema.EventInteraction.insert', 'schema.EventInteraction.insert', ([], {}), '(**doc)\n', (6772, 6779), False, 'from sintel.db import schema\n'), ((11478, 11504), 'sintel.db.schema.Event.find', 'schema.Event.find', ([], {}), '(**query)\n', (11495, 11504), False, 'from sintel.db import schema\n'), ((14802, 14839), 'sintel.db.schema.EventInteraction.insert', 'schema.EventInteraction.insert', ([], {}), '(**doc)\n', (14832, 14839), False, 'from sintel.db import schema\n')] |
import numpy as np
from os.path import join
from os import listdir
from .utils import *
from sklearn.preprocessing import normalize
from sklearn.preprocessing import scale
from sklearn.preprocessing import MinMaxScaler
from scipy.signal import resample
from scipy.signal import decimate
import warnings
def load_data():
"""
data generator : yields subject data, label and age subject by subject
trims data with error measure (cf. data exploration)"""
data_path=join("..","PaHaW","PaHaW_public")#/00026/00026__1_1.svc"
folder_path=listdir(data_path)
folder_path.sort()
meta_path=join("data","PaHaW","corpus_PaHaW.csv")
meta_data=np.loadtxt(meta_path,dtype=str,skiprows=1,delimiter=";")#skip the first line == headers
labels=list(map(lambda x: 1 if x =="ON" else 0, meta_data[:,4]))
ages=meta_data[:,5].astype(int)
#Subjects 46 (control), 60 (PD) and 66 (control) didn't perform the spiral !
#data=[]
for i,folder in enumerate(folder_path):
subject=[]
task_path=listdir(join(data_path,folder))
task_path.sort()
if len(task_path)!=8:#subject didn't perform the spiral
#so we discard it
continue
#subject.append([])#add an empty array so that all tasks are on the same column number
for task_name in task_path:
path=join(data_path,folder,task_name)
#load data as float (not int because we will need to standardize it afterwards)
#and throw out the first line == number of lines in the file
task=np.loadtxt(path, dtype=float, skiprows=1,delimiter=" ")
if task[0][measure2index["button_status"]]!=1:#exam starts in air
for k,timestep in enumerate(task):
if(timestep[measure2index["button_status"]]==1):#wait for on paper button status
break
#then trims the data
task=task[k:]
elif any(task[:,measure2index["timestamp"]]>1e7):#defect of recording (see data exploration)
task=task[:-12]
subject.append(task)
yield subject,labels[i],ages[i]
## augmentation
def flip(task,axis_i):
warnigs.warn("flip should be deprecated and replaced by -x or -y depending on axis")
if axis_i is not 0 and axis_i is not 1:
raise ValueError("expected 0 or 1 for value of axis_i, got {}".format(axis_i))
axis=task[0][axis_i]
for i,point in enumerate(task[:,axis_i]):
if point < axis:
task[i][axis_i]=axis+(axis-point)
else:
task[i][axis_i]=axis-(point-axis)
return task
def rotate(task, delta_rotate):
x0=task[0][0]#angle starts here
y0=task[0][1]
for i, (y,x) in enumerate(task[:,:2]):
vector=[x-x0,y-y0]
norm=np.linalg.norm(vector)
angle=np.angle(vector[0]+vector[1]*1j)#*1j to add imaginary part to y-coordinate
task[i][1]=np.cos(angle+delta_rotate)*norm#new x
task[i][0]=np.sin(angle+delta_rotate)*norm#new y
return scale(task,axis=0)#recenters the task
#rotated=rotate_(task.copy(),np.pi/10)
"""
h_flip=horizontal_flip(task.copy())
v_flip=vertical_flip(task.copy())
double_flip=horizontal_flip(v_flip.copy())
translation=np.random.rand()-0.5#because the std is one
translated=task.copy()
translated[:,0]+=translation
translated[:,1]+=translation
#~ match the translation scale
#as the standardized data ranges ~ from -2 to 2
zoom_factor=np.random.uniform(0.8,1.2)
zoomed=task.copy()
zoomed[:,0]*=zoom_factor
zoomed[:,1]*=zoom_factor"""
## preprocessing
def compute_movement(data):
"""Compute movement
Transforms data as Zhang et al. (cf Report #5)"""
print("computing movement\n")
button_i=measure2index["button_status"]
for i,task in enumerate(data):
for t in range(len(task)-1):
button=task[t+1][button_i]*task[t][button_i]
data[i][t]=task[t+1]-task[t]
data[i][t][button_i]=button
data[i]=data[i][:-1]#throw out the last point
return data
def task_selection(data,task_i,newhandpd=False):
"""set `task_i` to None if you want to train the model on all tasks at once (i.e. early fusion)
Else set `task_i` to the desired task index (cf. task2index)
"""
if task_i is not None:
print("\ntask index, name")
print(task_i,index2task[task_i])
#keep only one task
data=[subject[task_i] for subject in data]
#keep only one measure
#data=[[[raw[i][task][j][6]] for j in range(len(raw[i][task])) ] for i,subject in enumerate(raw) if len(raw[i][task])!=0]#discard the subjects that didn't perform spiral
elif newhandpd:
print("setting task_i to -1")
task_i=-1
else:
print("task_i is None so we will use all tasks to train the model")
print("len(data), len(data[0]) :")
print(len(data),len(data[0]))
return data
def compute_speed_accel(data):
"""on single task training, concatenates the instantaneous speed and acceleration to each timestep of the data.
Thus the data is 2 timesteps shorter (we discard the first 2)"""
print("computing speed and acceleration")
for i,task in enumerate(data):
speed=np.zeros((len(task)-1,1))
for t in range(len(task)-1):
speed[t][0]=np.linalg.norm(#norm of vector
task[t+1][:2]-task[t][:2]#vector [y(t+1)-y(t) , x(t+1)-x(t)]
)
accel=np.zeros((len(speed)-1,1))
for t in range(len(speed)-1):
accel[t][0]=speed[t+1]-speed[t]
#discard the 1st speed point
speed_accel=np.concatenate((speed[1:],accel),axis=1)
#discard the 2 firsts timesteps
data[i]=np.concatenate((task[2:],speed_accel),axis=1)
return data
last_stroke_in_air_index=[[],#spiral
[4, 36, 71],#l
[11, 14, 16, 42],#le
[1, 13, 14, 20, 54]#les
]
non_letters_indexes=[[],#spiral
[(22,1), (26,2), (36,5), (37,1), (41,4), (46,4), (48,1),(3,4),
(3,2),(6,5), (6,3), (14,6), (14,4),(14,2), (16,6), (16,4), (16,2), (21,5), (71,6), (71,2)],#l
[(3,4), (6,5), (6,4), (6,2), (9,4), (9,3), (11,5), (12,1), (13, 1),
(14, 6), (14, 1), (16, 5), (18, 3), (18, 2), (18, 1), (20, 3), (26, 2),
(26, 1), (27, 4), (41, 5), (41, 2), (42, 7), (42, 5), (42, 3), (65, 5), (65, 3)],#le
[(1, 7),(1, 6),(3, 4),(6, 4),(6, 1),(9, 1),(13, 5),(14, 10), (14, 9), (14, 8), (14, 7),(14, 4),(14, 2),
(18, 4), (18, 3), (18, 2), (18, 1),(20, 8),(20, 6),(20, 4),(20, 2),(23, 4),(26, 4),(26, 1),(38, 3),
(48, 4),(50, 4),(54, 9),(54, 7),(54, 5),(54, 3),(54, 1),(62, 4),(65, 6),(65, 4),(65, 1)]#les
]
too_many_letters_indexes=[[],#spiral
[12, 21, 23, 44, 67],#l
[],#le
[1,37,62]#les
]
def LetterSplit(data,task_i):
print("Merging strokes into letters")
for j in range(len(data)):
tmp=[]
for i in range(0,len(data[j]),2):
try :
data[j][i+1]
except IndexError:
tmp.append(data[j][i])
else:
tmp.append(np.concatenate((data[j][i],data[j][i+1]),axis=0))
data[j]=tmp
def pop(i,j):
data[i][j-1]=np.concatenate((data[i][j-1],data[i][j]))
data[i].pop(j)
for i,j in non_letters_indexes[task_i]:
pop(i,j)
for i in too_many_letters_indexes[task_i]:
data[i].pop()
assert [i for i,s in enumerate(data) if len(s) != 5]==[]
return data
def DiscardNonLetters(data,task_i):
print("discarding non letters from stroke list")
for i,j in non_letters_indexes[task_i]:
if 2*j+1<len(data[i]):
data[i].pop(2*j+1)
data[i].pop(2*j)
for i in too_many_letters_indexes[task_i]:#did 6 l instead of 5
data[i].pop()
data[i].pop()
for i in last_stroke_in_air_index[task_i]:#in air stroke after last l
data[i].pop()
assert [i for i,s in enumerate(data) if len(s) != 9]==[]
return data
def massage_data(data,task_i,compute_speed_accel_,compute_movement_,downsampling_factor,
window_size,paper_air_split=False,newhandpd=False,max_len=None,letter_split=False,discard_non_letters=False,pad_subs=False,trim=False):
"""
returns data
set `task_i` to None if you want to train the model on all tasks at once (i.e. early fusion)
Else set `task_i` to the desired task index (cf. task2index)
compute_movement Transforms data as Zhang et al. (cf Report #5)
Set `downsampling_factor` to `1` if you don't want to downsample
Set `window_size` to `None` if you don't want to split data into subsequence of fixed length
Set `paper_air_split` to `False` if you don't want to split data into strokes
"""
data=task_selection(data,task_i,newhandpd)
if compute_speed_accel_:
data=compute_speed_accel(data)
elif compute_movement_:
data=compute_movement(data)
else:
print("\nneither speed nor movement was computed (i.e. data was not transformed)\n")
## Split in subsequence (or not)
#Set `window_size` to `None` if you don't want to split data into subsequence of fixed length
if task_i is not None:
overlap=90
if window_size is not None:
print("\nsplitting data into subsequences")
for i,task in enumerate(data):
data[i]=[task[w:w+window_size] for w in range(0,len(task)-window_size,window_size-overlap)]
print("len(data), data[0].shape, total n° of subsequences (i.e. training examples) :")
print(len(data),",",len(data[0]),len(data[0][0]),len(data[0][0][0]),",",sum([len(subs) for subs in data]))
elif paper_air_split:
print("\nsplitting data into strokes")
for j, task in enumerate(data):
changes = []
for i in range(len(task)-1):
if task[i][measure2index["button_status"]]!=task[i+1][measure2index["button_status"]]:
changes.append(i+1)
task=np.split(task,changes)
data[j]=task
if letter_split:#todo : rename in token split
data=LetterSplit(data,task_i)
elif discard_non_letters:
data=DiscardNonLetters(data,task_i)
print("len(data), data[0].shape, total n° of subsequences (i.e. training examples) :")
print(len(data),",",len(data[0]),len(data[0][0]),len(data[0][0][0]),",",sum([len(subs) for subs in data]))
else:
print("the task is represented as one single sequence (i.e. data was not transformed)")
if window_size is not None or paper_air_split or task_i is None:#subsequences or multiple tasks
print('computing global means')
for i,subject in enumerate(data):
for j,sub in enumerate(subject):
#removes t0 from each timestamps so the time stamp measure represents the length of the exams
data[i][j][:,measure2index["timestamp"]]-=data[i][j][0,measure2index["timestamp"]]
if task_i is None:
#computes overall measures and stds per task
data=np.asarray(data)
means,stds=[],[]
for task in range(data.shape[1]):
flat=flat_list(data[:,task])
means.append(np.mean(flat,axis=0)[measure2index["timestamp"]])
stds.append(np.std(flat,axis=0)[measure2index["timestamp"]])
else:
#computes overall measures and stds
flat=np.asarray(flat_list(flat_list(data)))
means,stds=np.mean(flat,axis=0)[measure2index["timestamp"]],np.std(flat,axis=0)[measure2index["timestamp"]]
print("scaling")
for i,subject in enumerate(data):
for j,sub in enumerate(subject):
data[i][j]=scale(sub,axis=0)
#keep the button_status unscaled
data[i][j][:,[measure2index["button_status"]]]=sub[:,[measure2index["button_status"]]]
#globally scale the timestamp
if task_i is None:
data[i][j][:,[measure2index["timestamp"]]]=(sub[:,[measure2index["timestamp"]]]-means[j])/stds[j]
else:
data[i][j][:,[measure2index["timestamp"]]]=(sub[:,[measure2index["timestamp"]]]-means)/stds
if downsampling_factor != 1:
if i ==0 and j==0:
print("and downsampling")
data[i][j]=decimate(data[i][j], downsampling_factor,axis=0)#then downsample
#rounds the button status because decimate applies a filter
data[i][j][:,[measure2index["button_status"]]]=[[round(b[0])] for b in data[i][j][:,[measure2index["button_status"]]]]
else:
print('computing global means')
for i in range(len(data)):
#removes t0 from each timestamps so the time stamp measure represents the length of the exams
data[i][:,measure2index["timestamp"]]-=data[i][0,measure2index["timestamp"]]
#computes overall measures and stds
flat=np.asarray(flat_list(data))
means,stds=np.mean(flat,axis=0)[measure2index["timestamp"]],np.std(flat,axis=0)[measure2index["timestamp"]]
## Scale then downsample (or not) then concatenate task id (or not)
print("scaling")
for i,subject in enumerate(data):
data[i]=scale(subject,axis=0)
#keep the button_status unscaled
data[i][:,[measure2index["button_status"]]]=subject[:,[measure2index["button_status"]]]
#globally scale the timestamp
data[i][:,[measure2index["timestamp"]]]=(subject[:,[measure2index["timestamp"]]]-means)/stds
if downsampling_factor != 1:
if i ==0:
print("and downsampling")
data[i]=decimate(data[i], downsampling_factor,axis=0)#then downsample
#rounds the button status because decimate applies a filter
data[i][:,[measure2index["button_status"]]]=[[round(b[0])] for b in data[i][:,[measure2index["button_status"]]]]
if max_len is not None:
print("padding data at {} timesteps. Trimming : {} ".format(max_len,trim))
if task_i is None :
for i,subject in enumerate(data):
for j,task in enumerate(subject):#task
if len(task) > max_len[j]:
if trim:
data[i][j]=task[:max_len[j]]
else:
data[i][j]=np.concatenate((task,np.zeros(shape=(max_len[j]-len(task),task.shape[1]))))
elif window_size is not None or paper_air_split :
for i,subject in enumerate(data):
for j,sub in enumerate(subject):#sub
if len(sub) > max_len:
if trim:
data[i][j]=sub[:max_len]
else:
data[i][j]=np.concatenate((sub,np.zeros(shape=(max_len-len(sub),sub.shape[1]))))
if pad_subs:
if i == 0:
print("padding # of subsequences to",max_strokes[task_i])
for _ in range(max_strokes[task_i]-len(subject)):
data[i].append(np.zeros(shape=(max_len,sub.shape[1])))
else:#only one task
for i,task in enumerate(data):
if len(task) > max_len:
if trim:
data[i]=task[:max_len]
else:
data[i]=np.concatenate((task,np.zeros(shape=(max_len-len(task),task.shape[1]))))
print("converting data to numpy array")
data=np.asarray(data)
print("data shape :",data.shape)
return data
| [
"numpy.mean",
"os.listdir",
"numpy.std",
"numpy.asarray",
"os.path.join",
"numpy.angle",
"scipy.signal.decimate",
"numpy.split",
"numpy.zeros",
"numpy.cos",
"numpy.concatenate",
"numpy.linalg.norm",
"numpy.sin",
"numpy.loadtxt",
"sklearn.preprocessing.scale"
] | [((478, 513), 'os.path.join', 'join', (['""".."""', '"""PaHaW"""', '"""PaHaW_public"""'], {}), "('..', 'PaHaW', 'PaHaW_public')\n", (482, 513), False, 'from os.path import join\n'), ((551, 569), 'os.listdir', 'listdir', (['data_path'], {}), '(data_path)\n', (558, 569), False, 'from os import listdir\n'), ((608, 649), 'os.path.join', 'join', (['"""data"""', '"""PaHaW"""', '"""corpus_PaHaW.csv"""'], {}), "('data', 'PaHaW', 'corpus_PaHaW.csv')\n", (612, 649), False, 'from os.path import join\n'), ((662, 721), 'numpy.loadtxt', 'np.loadtxt', (['meta_path'], {'dtype': 'str', 'skiprows': '(1)', 'delimiter': '""";"""'}), "(meta_path, dtype=str, skiprows=1, delimiter=';')\n", (672, 721), True, 'import numpy as np\n'), ((3047, 3066), 'sklearn.preprocessing.scale', 'scale', (['task'], {'axis': '(0)'}), '(task, axis=0)\n', (3052, 3066), False, 'from sklearn.preprocessing import scale\n'), ((15678, 15694), 'numpy.asarray', 'np.asarray', (['data'], {}), '(data)\n', (15688, 15694), True, 'import numpy as np\n'), ((2810, 2832), 'numpy.linalg.norm', 'np.linalg.norm', (['vector'], {}), '(vector)\n', (2824, 2832), True, 'import numpy as np\n'), ((2847, 2885), 'numpy.angle', 'np.angle', (['(vector[0] + vector[1] * 1.0j)'], {}), '(vector[0] + vector[1] * 1.0j)\n', (2855, 2885), True, 'import numpy as np\n'), ((5627, 5669), 'numpy.concatenate', 'np.concatenate', (['(speed[1:], accel)'], {'axis': '(1)'}), '((speed[1:], accel), axis=1)\n', (5641, 5669), True, 'import numpy as np\n'), ((5724, 5771), 'numpy.concatenate', 'np.concatenate', (['(task[2:], speed_accel)'], {'axis': '(1)'}), '((task[2:], speed_accel), axis=1)\n', (5738, 5771), True, 'import numpy as np\n'), ((7181, 7225), 'numpy.concatenate', 'np.concatenate', (['(data[i][j - 1], data[i][j])'], {}), '((data[i][j - 1], data[i][j]))\n', (7195, 7225), True, 'import numpy as np\n'), ((1039, 1062), 'os.path.join', 'join', (['data_path', 'folder'], {}), '(data_path, folder)\n', (1043, 1062), False, 'from os.path import join\n'), ((1355, 1389), 'os.path.join', 'join', (['data_path', 'folder', 'task_name'], {}), '(data_path, folder, task_name)\n', (1359, 1389), False, 'from os.path import join\n'), ((1570, 1626), 'numpy.loadtxt', 'np.loadtxt', (['path'], {'dtype': 'float', 'skiprows': '(1)', 'delimiter': '""" """'}), "(path, dtype=float, skiprows=1, delimiter=' ')\n", (1580, 1626), True, 'import numpy as np\n'), ((2941, 2969), 'numpy.cos', 'np.cos', (['(angle + delta_rotate)'], {}), '(angle + delta_rotate)\n', (2947, 2969), True, 'import numpy as np\n'), ((2998, 3026), 'numpy.sin', 'np.sin', (['(angle + delta_rotate)'], {}), '(angle + delta_rotate)\n', (3004, 3026), True, 'import numpy as np\n'), ((5316, 5361), 'numpy.linalg.norm', 'np.linalg.norm', (['(task[t + 1][:2] - task[t][:2])'], {}), '(task[t + 1][:2] - task[t][:2])\n', (5330, 5361), True, 'import numpy as np\n'), ((11109, 11125), 'numpy.asarray', 'np.asarray', (['data'], {}), '(data)\n', (11119, 11125), True, 'import numpy as np\n'), ((13375, 13397), 'sklearn.preprocessing.scale', 'scale', (['subject'], {'axis': '(0)'}), '(subject, axis=0)\n', (13380, 13397), False, 'from sklearn.preprocessing import scale\n'), ((11779, 11797), 'sklearn.preprocessing.scale', 'scale', (['sub'], {'axis': '(0)'}), '(sub, axis=0)\n', (11784, 11797), False, 'from sklearn.preprocessing import scale\n'), ((13115, 13136), 'numpy.mean', 'np.mean', (['flat'], {'axis': '(0)'}), '(flat, axis=0)\n', (13122, 13136), True, 'import numpy as np\n'), ((13164, 13184), 'numpy.std', 'np.std', (['flat'], {'axis': '(0)'}), '(flat, axis=0)\n', (13170, 13184), True, 'import numpy as np\n'), ((13826, 13872), 'scipy.signal.decimate', 'decimate', (['data[i]', 'downsampling_factor'], {'axis': '(0)'}), '(data[i], downsampling_factor, axis=0)\n', (13834, 13872), False, 'from scipy.signal import decimate\n'), ((7072, 7124), 'numpy.concatenate', 'np.concatenate', (['(data[j][i], data[j][i + 1])'], {'axis': '(0)'}), '((data[j][i], data[j][i + 1]), axis=0)\n', (7086, 7124), True, 'import numpy as np\n'), ((9991, 10014), 'numpy.split', 'np.split', (['task', 'changes'], {}), '(task, changes)\n', (9999, 10014), True, 'import numpy as np\n'), ((11543, 11564), 'numpy.mean', 'np.mean', (['flat'], {'axis': '(0)'}), '(flat, axis=0)\n', (11550, 11564), True, 'import numpy as np\n'), ((11592, 11612), 'numpy.std', 'np.std', (['flat'], {'axis': '(0)'}), '(flat, axis=0)\n', (11598, 11612), True, 'import numpy as np\n'), ((12447, 12496), 'scipy.signal.decimate', 'decimate', (['data[i][j]', 'downsampling_factor'], {'axis': '(0)'}), '(data[i][j], downsampling_factor, axis=0)\n', (12455, 12496), False, 'from scipy.signal import decimate\n'), ((11275, 11296), 'numpy.mean', 'np.mean', (['flat'], {'axis': '(0)'}), '(flat, axis=0)\n', (11282, 11296), True, 'import numpy as np\n'), ((11353, 11373), 'numpy.std', 'np.std', (['flat'], {'axis': '(0)'}), '(flat, axis=0)\n', (11359, 11373), True, 'import numpy as np\n'), ((15275, 15314), 'numpy.zeros', 'np.zeros', ([], {'shape': '(max_len, sub.shape[1])'}), '(shape=(max_len, sub.shape[1]))\n', (15283, 15314), True, 'import numpy as np\n')] |
import NBAStatsScraper
if __name__== "__main__":
App = NBAStatsScraper.NBAStatsScraper().run() | [
"NBAStatsScraper.NBAStatsScraper"
] | [((60, 93), 'NBAStatsScraper.NBAStatsScraper', 'NBAStatsScraper.NBAStatsScraper', ([], {}), '()\n', (91, 93), False, 'import NBAStatsScraper\n')] |
import pandas as pd
from sqlalchemy import Column
from sqlalchemy.sql.sqltypes import Integer, String
from datapipe.compute import run_pipeline
from datapipe.core_steps import UpdateExternalTable
from datapipe.datatable import DataStore
from datapipe.store.database import DBConn, TableStoreDB
from datapipe.compute import Table, Catalog, Pipeline
from datapipe.run_config import RunConfig
from .util import assert_df_equal
def test_external_table_updater_filter(dbconn: DBConn):
meta_dbconn = DBConn(dbconn.connstr, dbconn.schema)
test_store = TableStoreDB(
dbconn=dbconn,
name="test_data",
data_sql_schema=[
Column("composite_id_1", Integer(), primary_key=True),
Column("composite_id_2", Integer(), primary_key=True),
Column("data", String())
]
)
df_test = pd.DataFrame({
"composite_id_1": [1, 1, 2, 2],
"composite_id_2": [3, 4, 5, 6],
"data": ['a', 'b', 'c', 'd']
})
catalog = Catalog({
"test": Table(store=test_store),
})
pipeline = Pipeline([
UpdateExternalTable(
output="test"
)
])
ds = DataStore(meta_dbconn)
test_store.insert_rows(df_test)
run_pipeline(ds, catalog, pipeline)
assert_df_equal(catalog.get_datatable(ds, "test").get_data(),
df_test,
index_cols=["composite_id_1", "composite_id_2"])
config = RunConfig(filters={"composite_id_1": 2})
run_pipeline(ds, catalog, pipeline, run_config=config)
assert_df_equal(catalog.get_datatable(ds, "test").get_data(),
df_test,
index_cols=["composite_id_1", "composite_id_2"])
| [
"datapipe.store.database.DBConn",
"datapipe.datatable.DataStore",
"sqlalchemy.sql.sqltypes.Integer",
"sqlalchemy.sql.sqltypes.String",
"datapipe.compute.run_pipeline",
"datapipe.core_steps.UpdateExternalTable",
"datapipe.compute.Table",
"pandas.DataFrame",
"datapipe.run_config.RunConfig"
] | [((500, 537), 'datapipe.store.database.DBConn', 'DBConn', (['dbconn.connstr', 'dbconn.schema'], {}), '(dbconn.connstr, dbconn.schema)\n', (506, 537), False, 'from datapipe.store.database import DBConn, TableStoreDB\n'), ((845, 958), 'pandas.DataFrame', 'pd.DataFrame', (["{'composite_id_1': [1, 1, 2, 2], 'composite_id_2': [3, 4, 5, 6], 'data': [\n 'a', 'b', 'c', 'd']}"], {}), "({'composite_id_1': [1, 1, 2, 2], 'composite_id_2': [3, 4, 5, 6\n ], 'data': ['a', 'b', 'c', 'd']})\n", (857, 958), True, 'import pandas as pd\n'), ((1164, 1186), 'datapipe.datatable.DataStore', 'DataStore', (['meta_dbconn'], {}), '(meta_dbconn)\n', (1173, 1186), False, 'from datapipe.datatable import DataStore\n'), ((1229, 1264), 'datapipe.compute.run_pipeline', 'run_pipeline', (['ds', 'catalog', 'pipeline'], {}), '(ds, catalog, pipeline)\n', (1241, 1264), False, 'from datapipe.compute import run_pipeline\n'), ((1443, 1483), 'datapipe.run_config.RunConfig', 'RunConfig', ([], {'filters': "{'composite_id_1': 2}"}), "(filters={'composite_id_1': 2})\n", (1452, 1483), False, 'from datapipe.run_config import RunConfig\n'), ((1488, 1542), 'datapipe.compute.run_pipeline', 'run_pipeline', (['ds', 'catalog', 'pipeline'], {'run_config': 'config'}), '(ds, catalog, pipeline, run_config=config)\n', (1500, 1542), False, 'from datapipe.compute import run_pipeline\n'), ((1025, 1048), 'datapipe.compute.Table', 'Table', ([], {'store': 'test_store'}), '(store=test_store)\n', (1030, 1048), False, 'from datapipe.compute import Table, Catalog, Pipeline\n'), ((1091, 1125), 'datapipe.core_steps.UpdateExternalTable', 'UpdateExternalTable', ([], {'output': '"""test"""'}), "(output='test')\n", (1110, 1125), False, 'from datapipe.core_steps import UpdateExternalTable\n'), ((681, 690), 'sqlalchemy.sql.sqltypes.Integer', 'Integer', ([], {}), '()\n', (688, 690), False, 'from sqlalchemy.sql.sqltypes import Integer, String\n'), ((748, 757), 'sqlalchemy.sql.sqltypes.Integer', 'Integer', ([], {}), '()\n', (755, 757), False, 'from sqlalchemy.sql.sqltypes import Integer, String\n'), ((805, 813), 'sqlalchemy.sql.sqltypes.String', 'String', ([], {}), '()\n', (811, 813), False, 'from sqlalchemy.sql.sqltypes import Integer, String\n')] |
import falcon
import six
from monitorrent.settings_manager import SettingsManager
# noinspection PyUnusedLocal
class SettingsNotifyOn(object):
def __init__(self, settings_manager):
"""
:type settings_manager: SettingsManager
"""
self.settings_manager = settings_manager
def on_get(self, req, resp):
resp.json = self.settings_manager.get_external_notifications_levels()
def on_put(self, req, resp):
if req.json is None or len(req.json) == 0:
raise falcon.HTTPBadRequest('BodyRequired', 'Expecting not empty JSON body')
if not isinstance(req.json, list) or any([not isinstance(i, six.text_type) for i in req.json]):
raise falcon.HTTPBadRequest('ArrayOfStringExpected', 'Expecting list of string values')
existing_levels = self.settings_manager.get_existing_external_notifications_levels()
unknown_levels = [l for l in req.json if l not in existing_levels]
if len(unknown_levels) > 0:
raise falcon.HTTPBadRequest('UnknownLevels', '{0} are unknow levels'.format(unknown_levels))
self.settings_manager.set_external_notifications_levels(req.json)
resp.status = falcon.HTTP_NO_CONTENT
| [
"falcon.HTTPBadRequest"
] | [((523, 593), 'falcon.HTTPBadRequest', 'falcon.HTTPBadRequest', (['"""BodyRequired"""', '"""Expecting not empty JSON body"""'], {}), "('BodyRequired', 'Expecting not empty JSON body')\n", (544, 593), False, 'import falcon\n'), ((717, 802), 'falcon.HTTPBadRequest', 'falcon.HTTPBadRequest', (['"""ArrayOfStringExpected"""', '"""Expecting list of string values"""'], {}), "('ArrayOfStringExpected',\n 'Expecting list of string values')\n", (738, 802), False, 'import falcon\n')] |
# Generated by Django 3.2.9 on 2021-11-07 16:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rumergy', '0002_auto_20211107_1520'),
]
operations = [
migrations.AlterField(
model_name='meter',
name='comments',
field=models.CharField(default='No comment provided', max_length=200),
),
]
| [
"django.db.models.CharField"
] | [((337, 400), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""No comment provided"""', 'max_length': '(200)'}), "(default='No comment provided', max_length=200)\n", (353, 400), False, 'from django.db import migrations, models\n')] |
# Importing external package dependency:
import os
import logging
from datetime import datetime
from flask import Flask, render_template, request, url_for, redirect, flash, session, abort, jsonify
from flask_migrate import Migrate
from flask_login import LoginManager, AnonymousUserMixin
from flask_mail import Mail, Message
from flask_sqlalchemy import sqlalchemy, SQLAlchemy
from flask_wtf.csrf import CSRFProtect
from flask_moment import Moment
from logging.handlers import SMTPHandler
# Importing internal module dependency:
from sellerhub.config import config, BaseConfig
from sellerhub.logger import setup_logging
# Setting File Upload Extension types:
ALLOWED_EXTENSIONS = set(["csv", "txt", "tsv", "xlsx"])
# Initiating our primary Flask application. '__name__' shall currently direct to app.py:
app = Flask(__name__, static_folder = "static", template_folder="templates")
app.config.from_object(BaseConfig)
# Setting up logging levels [Currently with 'default' value]:
setup_logging()
# Database BaseConfig(object) settings for SQLite:
login_manager = LoginManager()
db = SQLAlchemy(app)
Migrate(app, db)
login_manager.init_app(app)
login_manager.login_view = "login"
# Email Push Initiation and Logging + Setting Sessions:
mail = Mail(app)
# Creating 'Guest' User:
class Anonymous(AnonymousUserMixin):
def __init__(self):
self.username = "Guest"
login_manager.anonymous_user = Anonymous
CSRFProtect(app)
moment = Moment(app)
from sellerhub import models, errors
if not app.debug:
if app.config["MAIL_SERVER"]:
auth = None
if app.config["MAIL_USERNAME"] or app.config["MAIL_PASSWORD"]:
auth = (app.config["MAIL_USERNAME"], app.config["MAIL_PASSWORD"])
secure = None
if app.config["MAIL_USE_TLS"] or app.config["MAIL_USE_SSL"]:
secure = ()
mail_handler = SMTPHandler(
mailhost = (app.config["MAIL_SERVER"], app.config["MAIL_PORT"]),
fromaddr = "no-reply@" + app.config["MAIL_SERVER"],
toaddrs = app.config["ADMINS"], subject="SellerHub Failure",
credentials=auth, secure=secure)
# Ignoring Warnings and Informational or Debugging messages:
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
| [
"flask_mail.Mail",
"flask_login.LoginManager",
"logging.handlers.SMTPHandler",
"flask.Flask",
"flask_wtf.csrf.CSRFProtect",
"flask_moment.Moment",
"flask_migrate.Migrate",
"flask_sqlalchemy.SQLAlchemy",
"sellerhub.logger.setup_logging"
] | [((838, 906), 'flask.Flask', 'Flask', (['__name__'], {'static_folder': '"""static"""', 'template_folder': '"""templates"""'}), "(__name__, static_folder='static', template_folder='templates')\n", (843, 906), False, 'from flask import Flask, render_template, request, url_for, redirect, flash, session, abort, jsonify\n'), ((1011, 1026), 'sellerhub.logger.setup_logging', 'setup_logging', ([], {}), '()\n', (1024, 1026), False, 'from sellerhub.logger import setup_logging\n'), ((1098, 1112), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (1110, 1112), False, 'from flask_login import LoginManager, AnonymousUserMixin\n'), ((1119, 1134), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (1129, 1134), False, 'from flask_sqlalchemy import sqlalchemy, SQLAlchemy\n'), ((1136, 1152), 'flask_migrate.Migrate', 'Migrate', (['app', 'db'], {}), '(app, db)\n', (1143, 1152), False, 'from flask_migrate import Migrate\n'), ((1285, 1294), 'flask_mail.Mail', 'Mail', (['app'], {}), '(app)\n', (1289, 1294), False, 'from flask_mail import Mail, Message\n'), ((1460, 1476), 'flask_wtf.csrf.CSRFProtect', 'CSRFProtect', (['app'], {}), '(app)\n', (1471, 1476), False, 'from flask_wtf.csrf import CSRFProtect\n'), ((1487, 1498), 'flask_moment.Moment', 'Moment', (['app'], {}), '(app)\n', (1493, 1498), False, 'from flask_moment import Moment\n'), ((1909, 2134), 'logging.handlers.SMTPHandler', 'SMTPHandler', ([], {'mailhost': "(app.config['MAIL_SERVER'], app.config['MAIL_PORT'])", 'fromaddr': "('no-reply@' + app.config['MAIL_SERVER'])", 'toaddrs': "app.config['ADMINS']", 'subject': '"""SellerHub Failure"""', 'credentials': 'auth', 'secure': 'secure'}), "(mailhost=(app.config['MAIL_SERVER'], app.config['MAIL_PORT']),\n fromaddr='no-reply@' + app.config['MAIL_SERVER'], toaddrs=app.config[\n 'ADMINS'], subject='SellerHub Failure', credentials=auth, secure=secure)\n", (1920, 2134), False, 'from logging.handlers import SMTPHandler\n')] |
# Copyright (c) 2009 The Foundry Visionmongers Ltd. All Rights Reserved.
import nuke
import nukescripts
import random
import os
import textwrap
def copy_knobs(args):
thisGroup = nuke.thisGroup()
if( thisGroup is not nuke.root() and ( thisGroup.locked() or thisGroup.subgraphLocked() ) ):
raise RuntimeError("Can't paste knob values because " + thisGroup.name() + " is locked")
selNodes = thisGroup.selectedNodes()
groupCopy = nuke.nodes.Group(name = "____tempcopyknobgroup__")
with groupCopy:
nuke.nodePaste(nukescripts.cut_paste_file())
excludedKnobs = ["name", "xpos", "ypos"]
try:
nodes = groupCopy.nodes()
for i in groupCopy.nodes():
for j in selNodes:
k1 = i.knobs()
k2 = j.knobs()
intersection = dict([(item, k1[item]) for item in list(k1.keys()) if item not in excludedKnobs and item in k2])
for k in list(intersection.keys()):
x1 = i[k]
x2 = j[k]
x2.fromScript(x1.toScript())
except Exception as e:
nuke.delete(groupCopy)
raise e
nuke.delete(groupCopy)
def connect_selected_to_viewer(inputIndex):
"""Connects the selected node to the given viewer input index, ignoring errors if no node is selected."""
selection = None
try:
selection = nuke.selectedNode()
except ValueError: # no node selected
pass
if selection is not None and selection.Class() == 'Viewer':
selection = None
nuke.connectViewer(inputIndex, selection)
def toggle_monitor_out():
"""Toggles monitor out (switches it on if it's off, or vice versa) for the currently active viewer."""
viewer = nuke.activeViewer()
if viewer is not None:
enableKnob = nuke.toNode('MonitorOutNode').knob("enable")
enableKnob.setValue(not enableKnob.value())
def clear_selection_recursive(group = nuke.root()):
"""Sets all nodes to unselected, including in child groups."""
for n in group.selectedNodes():
n.setSelected(False)
groups = [i for i in group.nodes() if i.Class() == 'Group']
for i in groups:
clear_selection_recursive(i)
def goofy_title():
"""Returns a random message for use as an untitled script name.
Can be assigned to nuke.untitled as a callable.
Put a goofy_title.txt somewhere in your NUKE_PATH to customise."""
goofyFile = None
for dir in nuke.pluginPath():
fileName = os.path.join(dir, "goofy_title.txt")
if os.path.exists(fileName):
goofyFile = fileName
break
if goofyFile is None:
return "Missing goofy_title.txt"
file = open(goofyFile)
lines = file.readlines()
file.close()
lines = [line.strip() for line in lines]
lines = [line for line in lines if len(line) > 0 and line[0] != '#']
if len(lines) < 1:
return "Empty goofy_title.txt"
return random.choice(lines)
def declone(node):
if node.clones() == 0:
return
args = node.writeKnobs(nuke.WRITE_ALL | nuke.WRITE_USER_KNOB_DEFS | nuke.WRITE_NON_DEFAULT_ONLY | nuke.TO_SCRIPT)
newnode = nuke.createNode(node.Class(), knobs = args)
nuke.inputs(newnode, nuke.inputs(node))
num_inputs = nuke.inputs(node)
for i in range(num_inputs):
newnode.setInput(i, node.input(i))
node.setInput(0, newnode)
nuke.delete(node)
def showname():
'''Shows the current script path and, if the selected node is a Read or Write node, the filename from it.'''
# get the nuke script path
# we always need this
nukescript = nuke.value("root.name")
# look if there is a selected node
# if not, output the script only
p = nuke.Panel("Current Info", 500)
try:
n = nuke.selectedNode()
if n.Class() == "Read" or n.Class() == "Write":
a = nuke.value(n.name()+".first", nuke.value("root.first_frame"))
b = nuke.value(n.name()+".last", nuke.value("root.last_frame"))
curfile = n.knob("file").value()+" "+str(a)+"-"+str(b)
p.addSingleLineInput("Filename", curfile)
p.addSingleLineInput("Script", nukescript)
p.show()
else:
p.addSingleLineInput("Script", nukescript)
p.show()
except:
p.addSingleLineInput("Script", nukescript)
p.show()
def swapAB(n):
"""Swaps the first two inputs of a node."""
thisGroup = nuke.thisGroup()
if thisGroup is not nuke.root() and ( thisGroup.locked() or thisGroup.subgraphLocked() ) :
lockedReason = "published" if thisGroup.subgraphLocked() else "locked"
raise RuntimeError("Can't swap nodes because " + thisGroup.name() + " is " + lockedReason)
if max(n.inputs(), n.minimumInputs()) > 1:
a = n.input(0)
n.setInput(0, n.input(1))
n.setInput(1, a)
def print_callback_info(verbose=False, callbackTypes=None):
"""
Returns a list of all currently active callbacks, with the following optional
arguments:
verbose=False : prints the documentation as well as the callback
callbackTypes=None : limit the callback info to a particular callback
type (e.g. ['OnCreates'])
"""
# list of all callback types
all_Callback_Types = [ 'onUserCreates',
'onCreates',
'onScriptLoads',
'onScriptSaves',
'onScriptCloses',
'onDestroys',
'knobChangeds',
'updateUIs',
'autolabels',
'beforeRenders',
'beforeFrameRenders',
'afterRenders',
'afterFrameRenders',
'renderProgresses',
'filenameFilters',
'validateFilenames',
'autoSaveFilters',
'autoSaveRestoreFilters',
'autoSaveDeleteFilters',
]
#if no callbackTypes defined or is an invalid type, then default search all callback types
is_valid_type = (type(callbackTypes) is dict) or (type(callbackTypes) is list)
if ( not callbackTypes or (not is_valid_type ) ):
callbackTypes = all_Callback_Types
callback_defs = {}
for callbackType in callbackTypes:
callback_defs[callbackType] = eval('nuke.%s' %(callbackType))
# find the max target name length
maxTargetNameLen = max( list(map( len,' '.join( [ (k+'').join( list(callback_defs[k].keys()) ) for k in list(callback_defs.keys()) ] ).split(' ') )) )
indent = (maxTargetNameLen+8)*' '
sortedCallbackTypes = sorted( callback_defs.keys() )
for callbackType in sortedCallbackTypes:
for callbackTarget in list(callback_defs[callbackType].keys()):
for func, a, b, c in callback_defs[callbackType][callbackTarget]:
id = '%s%s%s : '%(callbackType[:-1],'_'*(maxTargetNameLen-len(callbackType)-len(callbackTarget)+1),callbackTarget)
print('%s%s' %(id, func.__name__))
if verbose:
doc = func.__doc__
if not doc:
doc='NO DOCUMENTATION'
docNoReturns = str(doc).lstrip().replace('\n','')
docNoConsecutiveSpaces = " ".join(docNoReturns.split())
docWrappedText = textwrap.wrap(docNoConsecutiveSpaces, 60)
for line in docWrappedText:
print(indent + line.replace('\n', '\n' + indent))
| [
"os.path.exists",
"random.choice",
"nuke.connectViewer",
"nuke.nodes.Group",
"nuke.value",
"os.path.join",
"nuke.toNode",
"nuke.Panel",
"nukescripts.cut_paste_file",
"nuke.selectedNode",
"textwrap.wrap",
"nuke.thisGroup",
"nuke.root",
"nuke.pluginPath",
"nuke.delete",
"nuke.activeViewe... | [((184, 200), 'nuke.thisGroup', 'nuke.thisGroup', ([], {}), '()\n', (198, 200), False, 'import nuke\n'), ((445, 493), 'nuke.nodes.Group', 'nuke.nodes.Group', ([], {'name': '"""____tempcopyknobgroup__"""'}), "(name='____tempcopyknobgroup__')\n", (461, 493), False, 'import nuke\n'), ((1057, 1079), 'nuke.delete', 'nuke.delete', (['groupCopy'], {}), '(groupCopy)\n', (1068, 1079), False, 'import nuke\n'), ((1434, 1475), 'nuke.connectViewer', 'nuke.connectViewer', (['inputIndex', 'selection'], {}), '(inputIndex, selection)\n', (1452, 1475), False, 'import nuke\n'), ((1620, 1639), 'nuke.activeViewer', 'nuke.activeViewer', ([], {}), '()\n', (1637, 1639), False, 'import nuke\n'), ((1815, 1826), 'nuke.root', 'nuke.root', ([], {}), '()\n', (1824, 1826), False, 'import nuke\n'), ((2306, 2323), 'nuke.pluginPath', 'nuke.pluginPath', ([], {}), '()\n', (2321, 2323), False, 'import nuke\n'), ((2775, 2795), 'random.choice', 'random.choice', (['lines'], {}), '(lines)\n', (2788, 2795), False, 'import random\n'), ((3082, 3099), 'nuke.inputs', 'nuke.inputs', (['node'], {}), '(node)\n', (3093, 3099), False, 'import nuke\n'), ((3200, 3217), 'nuke.delete', 'nuke.delete', (['node'], {}), '(node)\n', (3211, 3217), False, 'import nuke\n'), ((3416, 3439), 'nuke.value', 'nuke.value', (['"""root.name"""'], {}), "('root.name')\n", (3426, 3439), False, 'import nuke\n'), ((3519, 3550), 'nuke.Panel', 'nuke.Panel', (['"""Current Info"""', '(500)'], {}), "('Current Info', 500)\n", (3529, 3550), False, 'import nuke\n'), ((4174, 4190), 'nuke.thisGroup', 'nuke.thisGroup', ([], {}), '()\n', (4188, 4190), False, 'import nuke\n'), ((1277, 1296), 'nuke.selectedNode', 'nuke.selectedNode', ([], {}), '()\n', (1294, 1296), False, 'import nuke\n'), ((2342, 2378), 'os.path.join', 'os.path.join', (['dir', '"""goofy_title.txt"""'], {}), "(dir, 'goofy_title.txt')\n", (2354, 2378), False, 'import os\n'), ((2388, 2412), 'os.path.exists', 'os.path.exists', (['fileName'], {}), '(fileName)\n', (2402, 2412), False, 'import os\n'), ((3048, 3065), 'nuke.inputs', 'nuke.inputs', (['node'], {}), '(node)\n', (3059, 3065), False, 'import nuke\n'), ((3566, 3585), 'nuke.selectedNode', 'nuke.selectedNode', ([], {}), '()\n', (3583, 3585), False, 'import nuke\n'), ((225, 236), 'nuke.root', 'nuke.root', ([], {}), '()\n', (234, 236), False, 'import nuke\n'), ((533, 561), 'nukescripts.cut_paste_file', 'nukescripts.cut_paste_file', ([], {}), '()\n', (559, 561), False, 'import nukescripts\n'), ((1020, 1042), 'nuke.delete', 'nuke.delete', (['groupCopy'], {}), '(groupCopy)\n', (1031, 1042), False, 'import nuke\n'), ((4213, 4224), 'nuke.root', 'nuke.root', ([], {}), '()\n', (4222, 4224), False, 'import nuke\n'), ((1682, 1711), 'nuke.toNode', 'nuke.toNode', (['"""MonitorOutNode"""'], {}), "('MonitorOutNode')\n", (1693, 1711), False, 'import nuke\n'), ((3678, 3708), 'nuke.value', 'nuke.value', (['"""root.first_frame"""'], {}), "('root.first_frame')\n", (3688, 3708), False, 'import nuke\n'), ((3749, 3778), 'nuke.value', 'nuke.value', (['"""root.last_frame"""'], {}), "('root.last_frame')\n", (3759, 3778), False, 'import nuke\n'), ((7100, 7141), 'textwrap.wrap', 'textwrap.wrap', (['docNoConsecutiveSpaces', '(60)'], {}), '(docNoConsecutiveSpaces, 60)\n', (7113, 7141), False, 'import textwrap\n')] |
from functionali import (
first,
ffirst,
second,
third,
fourth,
fifth,
last,
butlast,
rest,
)
def test_first():
assert 1 == first([1, 2, 3])
assert 1 == first((1, 2, 3))
assert 1 == first({1, 2, 3})
assert (1, "a") == first({1: "a", 2: "b"})
assert None == first([])
def test_ffirst():
assert 1 == ffirst([[1], [2], [3]])
assert 1 == ffirst(((1,), (2,), (3,)))
assert 1 == ffirst({(1, 2), (3, 4), (5, 6)})
assert None == ffirst([])
def test_last():
assert 3 == last([1, 2, 3])
assert 3 == last((1, 2, 3))
assert 3 == last({1, 2, 3})
assert (3, "c") == last({1: "a", 2: "b", 3: "c"})
assert None == last([])
def test_rest():
# convert to tuple since rest returns iterator
assert (2, 3) == tuple(rest([1, 2, 3]))
assert (2, 3) == tuple(rest((1, 2, 3)))
assert (2, 3) == tuple(rest({1, 2, 3}))
assert ((2, "b"), (3, "c")) == tuple(rest({1: "a", 2: "b", 3: "c"}))
assert () == tuple(rest([]))
def test_second():
assert 2 == second([1, 2, 3])
assert 2 == second((1, 2, 3))
assert 2 == second({1, 2, 3})
assert (2, "b") == second({1: "a", 2: "b"})
assert None == second([])
# check that the last item is returned
# when iterable is shorter than two
assert 1 == second([1])
def test_third():
assert 3 == third([1, 2, 3])
assert 3 == third((1, 2, 3))
assert 3 == third({1, 2, 3})
assert (3, "c") == third({1: "a", 2: "b", 3: "c"})
assert None == third([])
# check that the last item is returned
# when iterable is shorter than Three
assert 2 == third([1, 2])
def test_butlast():
assert (1, 2) == butlast([1, 2, 3])
assert (1, 2) == butlast((1, 2, 3))
assert (1, 2) == butlast({1, 2, 3})
assert ((1, "a"), (2, "b")) == butlast({1: "a", 2: "b", 3: "c"})
assert None == butlast([])
| [
"functionali.rest",
"functionali.second",
"functionali.butlast",
"functionali.first",
"functionali.last",
"functionali.ffirst",
"functionali.third"
] | [((166, 182), 'functionali.first', 'first', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (171, 182), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((199, 215), 'functionali.first', 'first', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (204, 215), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((232, 248), 'functionali.first', 'first', (['{1, 2, 3}'], {}), '({1, 2, 3})\n', (237, 248), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((272, 299), 'functionali.first', 'first', (["{(1): 'a', (2): 'b'}"], {}), "({(1): 'a', (2): 'b'})\n", (277, 299), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((315, 324), 'functionali.first', 'first', (['[]'], {}), '([])\n', (320, 324), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((362, 385), 'functionali.ffirst', 'ffirst', (['[[1], [2], [3]]'], {}), '([[1], [2], [3]])\n', (368, 385), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((402, 428), 'functionali.ffirst', 'ffirst', (['((1,), (2,), (3,))'], {}), '(((1,), (2,), (3,)))\n', (408, 428), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((445, 477), 'functionali.ffirst', 'ffirst', (['{(1, 2), (3, 4), (5, 6)}'], {}), '({(1, 2), (3, 4), (5, 6)})\n', (451, 477), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((497, 507), 'functionali.ffirst', 'ffirst', (['[]'], {}), '([])\n', (503, 507), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((543, 558), 'functionali.last', 'last', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (547, 558), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((575, 590), 'functionali.last', 'last', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (579, 590), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((607, 622), 'functionali.last', 'last', (['{1, 2, 3}'], {}), '({1, 2, 3})\n', (611, 622), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((646, 682), 'functionali.last', 'last', (["{(1): 'a', (2): 'b', (3): 'c'}"], {}), "({(1): 'a', (2): 'b', (3): 'c'})\n", (650, 682), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((696, 704), 'functionali.last', 'last', (['[]'], {}), '([])\n', (700, 704), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1050, 1067), 'functionali.second', 'second', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (1056, 1067), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1084, 1101), 'functionali.second', 'second', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1090, 1101), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1118, 1135), 'functionali.second', 'second', (['{1, 2, 3}'], {}), '({1, 2, 3})\n', (1124, 1135), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1159, 1187), 'functionali.second', 'second', (["{(1): 'a', (2): 'b'}"], {}), "({(1): 'a', (2): 'b'})\n", (1165, 1187), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1203, 1213), 'functionali.second', 'second', (['[]'], {}), '([])\n', (1209, 1213), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1313, 1324), 'functionali.second', 'second', (['[1]'], {}), '([1])\n', (1319, 1324), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1361, 1377), 'functionali.third', 'third', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (1366, 1377), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1394, 1410), 'functionali.third', 'third', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1399, 1410), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1427, 1443), 'functionali.third', 'third', (['{1, 2, 3}'], {}), '({1, 2, 3})\n', (1432, 1443), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1467, 1504), 'functionali.third', 'third', (["{(1): 'a', (2): 'b', (3): 'c'}"], {}), "({(1): 'a', (2): 'b', (3): 'c'})\n", (1472, 1504), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1518, 1527), 'functionali.third', 'third', (['[]'], {}), '([])\n', (1523, 1527), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1629, 1642), 'functionali.third', 'third', (['[1, 2]'], {}), '([1, 2])\n', (1634, 1642), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1686, 1704), 'functionali.butlast', 'butlast', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (1693, 1704), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1726, 1744), 'functionali.butlast', 'butlast', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1733, 1744), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1766, 1784), 'functionali.butlast', 'butlast', (['{1, 2, 3}'], {}), '({1, 2, 3})\n', (1773, 1784), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1820, 1859), 'functionali.butlast', 'butlast', (["{(1): 'a', (2): 'b', (3): 'c'}"], {}), "({(1): 'a', (2): 'b', (3): 'c'})\n", (1827, 1859), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1873, 1884), 'functionali.butlast', 'butlast', (['[]'], {}), '([])\n', (1880, 1884), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((802, 817), 'functionali.rest', 'rest', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (806, 817), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((846, 861), 'functionali.rest', 'rest', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (850, 861), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((890, 905), 'functionali.rest', 'rest', (['{1, 2, 3}'], {}), '({1, 2, 3})\n', (894, 905), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((948, 984), 'functionali.rest', 'rest', (["{(1): 'a', (2): 'b', (3): 'c'}"], {}), "({(1): 'a', (2): 'b', (3): 'c'})\n", (952, 984), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n'), ((1003, 1011), 'functionali.rest', 'rest', (['[]'], {}), '([])\n', (1007, 1011), False, 'from functionali import first, ffirst, second, third, fourth, fifth, last, butlast, rest\n')] |
import os
import sys
cwd = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
if os.path.join(cwd, 'slim') not in sys.path:
sys.path.append(os.path.join(cwd, 'slim'))
| [
"os.path.abspath",
"os.path.join"
] | [((94, 119), 'os.path.join', 'os.path.join', (['cwd', '"""slim"""'], {}), "(cwd, 'slim')\n", (106, 119), False, 'import os\n'), ((57, 82), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (72, 82), False, 'import os\n'), ((157, 182), 'os.path.join', 'os.path.join', (['cwd', '"""slim"""'], {}), "(cwd, 'slim')\n", (169, 182), False, 'import os\n')] |
import torch
from lab import logger
from lab.logger.indicators import Histogram
def add_model_indicators(model: torch.nn.Module, model_name: str = "model"):
for name, param in model.named_parameters():
if param.requires_grad:
logger.add_indicator(Histogram(f"{model_name}.{name}"))
logger.add_indicator(Histogram(f"{model_name}.{name}.grad"))
def store_model_indicators(model: torch.nn.Module, model_name: str = "model"):
for name, param in model.named_parameters():
if param.requires_grad:
logger.store(f"{model_name}.{name}", param)
logger.store(f"{model_name}.{name}.grad", param.grad)
| [
"lab.logger.store",
"lab.logger.indicators.Histogram"
] | [((556, 599), 'lab.logger.store', 'logger.store', (['f"""{model_name}.{name}"""', 'param'], {}), "(f'{model_name}.{name}', param)\n", (568, 599), False, 'from lab import logger\n'), ((612, 665), 'lab.logger.store', 'logger.store', (['f"""{model_name}.{name}.grad"""', 'param.grad'], {}), "(f'{model_name}.{name}.grad', param.grad)\n", (624, 665), False, 'from lab import logger\n'), ((274, 307), 'lab.logger.indicators.Histogram', 'Histogram', (['f"""{model_name}.{name}"""'], {}), "(f'{model_name}.{name}')\n", (283, 307), False, 'from lab.logger.indicators import Histogram\n'), ((342, 380), 'lab.logger.indicators.Histogram', 'Histogram', (['f"""{model_name}.{name}.grad"""'], {}), "(f'{model_name}.{name}.grad')\n", (351, 380), False, 'from lab.logger.indicators import Histogram\n')] |
"""A mock experiment."""
import sys
import unittest
from unittest import TestCase
from unittest.mock import MagicMock
from unittest.mock import Mock
import experiment
from workload_experiment import WorkloadExperiment
class Test_Experiment(TestCase):
"""Implements a generic experiment with dependencies mocked away."""
def test_verify__mock(self):
"""Test passes when the experiment runs to end."""
sys.argv = ["mock.py", "--testnet", "abc", "--wg_testnet", "def", "--skip_generate_report", "True"]
experiment.parse_command_line_args()
exp = ExperimentMock()
# Mock functions that won't work without a proper IC deployment
exp.get_targets = Mock(return_value=["1.1.1.1", "2.2.2.2"])
exp.get_hostnames = Mock(return_value=["3.3.3.3", "4.4.4.4"])
exp.get_ic_version = MagicMock()
exp.get_subnet_for_target = MagicMock()
exp.get_subnet_info = Mock(return_value="{}")
exp.get_topology = Mock(return_value="{}")
exp.store_hardware_info = MagicMock()
exp.get_iter_logs_from_targets = MagicMock()
exp.install_canister = MagicMock()
exp.run_workload_generator = MagicMock()
exp.init_metrics = MagicMock()
exp.kill_workload_generator = MagicMock()
exp.turn_off_replica = MagicMock()
exp.check_workload_generator_installed = Mock(return_value=True)
exp.get_ic_version = MagicMock(return_value="deadbeef")
exp.wait_for_quiet = MagicMock(return_value=None)
exp.init()
exp.init_experiment()
exp.install_canister("some canister")
exp.start_experiment()
exp.run_experiment({})
exp.subnet_id = "abc"
exp.write_summary_file("test", {}, [], "some x value")
exp.end_experiment()
exp.install_canister.assert_called_once()
exp.run_workload_generator.assert_called_once()
exp.init_metrics.assert_called_once()
class ExperimentMock(WorkloadExperiment):
"""Logic for experiment 1."""
def __init__(self):
"""Construct experiment 1."""
super().__init__()
def run_experiment_internal(self, config):
"""Mock similar to experiment 1."""
return self.run_workload_generator(
self.machines,
self.target_nodes,
200,
outdir=self.iter_outdir,
duration=60,
)
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"unittest.mock.MagicMock",
"unittest.mock.Mock",
"experiment.parse_command_line_args"
] | [((2449, 2464), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2462, 2464), False, 'import unittest\n'), ((537, 573), 'experiment.parse_command_line_args', 'experiment.parse_command_line_args', ([], {}), '()\n', (571, 573), False, 'import experiment\n'), ((705, 746), 'unittest.mock.Mock', 'Mock', ([], {'return_value': "['1.1.1.1', '2.2.2.2']"}), "(return_value=['1.1.1.1', '2.2.2.2'])\n", (709, 746), False, 'from unittest.mock import Mock\n'), ((775, 816), 'unittest.mock.Mock', 'Mock', ([], {'return_value': "['3.3.3.3', '4.4.4.4']"}), "(return_value=['3.3.3.3', '4.4.4.4'])\n", (779, 816), False, 'from unittest.mock import Mock\n'), ((846, 857), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (855, 857), False, 'from unittest.mock import MagicMock\n'), ((894, 905), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (903, 905), False, 'from unittest.mock import MagicMock\n'), ((937, 960), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '"""{}"""'}), "(return_value='{}')\n", (941, 960), False, 'from unittest.mock import Mock\n'), ((988, 1011), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '"""{}"""'}), "(return_value='{}')\n", (992, 1011), False, 'from unittest.mock import Mock\n'), ((1046, 1057), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1055, 1057), False, 'from unittest.mock import MagicMock\n'), ((1099, 1110), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1108, 1110), False, 'from unittest.mock import MagicMock\n'), ((1142, 1153), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1151, 1153), False, 'from unittest.mock import MagicMock\n'), ((1191, 1202), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1200, 1202), False, 'from unittest.mock import MagicMock\n'), ((1230, 1241), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1239, 1241), False, 'from unittest.mock import MagicMock\n'), ((1280, 1291), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1289, 1291), False, 'from unittest.mock import MagicMock\n'), ((1323, 1334), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1332, 1334), False, 'from unittest.mock import MagicMock\n'), ((1384, 1407), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (1388, 1407), False, 'from unittest.mock import Mock\n'), ((1437, 1471), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': '"""deadbeef"""'}), "(return_value='deadbeef')\n", (1446, 1471), False, 'from unittest.mock import MagicMock\n'), ((1501, 1529), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (1510, 1529), False, 'from unittest.mock import MagicMock\n')] |
# Copyright DST Group. Licensed under the MIT license.
import datetime
from ipaddress import IPv4Address, IPv4Network
import CybORG.Shared.Enums as CyEnums
class NetworkInterface:
"""A class for storing network interface information """
def __init__(self,
hostid: str = None,
interface_name: str = None,
ip_address: IPv4Address = None,
subnet: IPv4Network = None):
self.hostid = hostid
self.interface_name = interface_name
self.ip_address = IPv4Address(ip_address)
self.subnet = IPv4Network(subnet)
def get_info(self) -> dict:
"""Return network interface as dict.
Keys of dict match arguments of Observation.add_interface_info()
"""
return {
"hostid": self.hostid,
"interface_name": self.interface_name,
"ip_address": self.ip_address,
"subnet": self.subnet
}
def __str__(self):
output = [f"{self.__class__.__name__}:"]
for k, v in self.get_info().items():
if v is None:
continue
output.append(f"{k}={v}")
return f" ".join(output)
class File:
"""A dataclass for storing information about a single file """
def __init__(self,
name: str,
path: str,
file_type: str = None,
vendor: str = None,
version: str = None,
user: str = None,
user_permissionss: int = None,
group: str = None,
group_permissions: int = None,
default_permissions: int = None,
last_modified_time: datetime.datetime = None):
self.name = name
self.path = path
self.user = user
self.group = group
self.vendor = vendor
self.user_permissionss = user_permissionss
if self.user_permissionss is None and self.user is not None:
self.group_permissions = 7
self.group_permissions = group_permissions
if self.group_permissions is None and self.group is not None:
self.group_permissions = 7
self.default_permissions = default_permissions
if self.default_permissions is None:
self.default_permissions = 7
self.last_modified_time = last_modified_time
if self.last_modified_time is not None:
self.last_modified_time = datetime.strptime(
self.last_modified_time, "%d %b %Y %H:%M"
)
self.file_type = file_type
if file_type is None:
self.file_type = CyEnums.FileType.UNKNOWN
elif not isinstance(file_type, CyEnums.FileType):
self.file_type = CyEnums.FileType.parse_string(file_type)
self.version = version
if version is not None and not isinstance(CyEnums.FileVersion):
self.version = CyEnums.FileVersion.parse_string(version)
def get_info(self) -> dict:
"""Return network interface as dict.
Keys of dict match arguments of Observation.add_file_info()
"""
return {
"path": self.path,
"name": self.name,
"vendor": self.vendor,
"version": self.version,
"file_type": self.file_type,
"user": self.user,
"user_permissions": self.user_permissionss,
"group": self.group,
"group_permissions": self.group_permissions,
"default_permissions": self.default_permissions,
"last_modified_time": self.last_modified_time
}
def __str__(self):
output = [f"{self.__class__.__name__}:"]
for k, v in self.get_info().items():
if v is None:
continue
output.append(f"{k}={v}")
return f" ".join(output)
class Credentials:
"""A class for storing a set of credentials """
def __init__(self,
username: str,
password: str = None,
key_path: str = None,
password_hash: str = None,
password_hash_type: str = None,
groups: list = None):
self.username = username
self.password = password
self.key_path = key_path
self.password_hash = password_hash
self.password_hash_type = password_hash_type
self.groups = [] if groups is None else groups
def get_info(self) -> dict:
"""Return credentials as dict
Keys of dict match arguments of Observation.add_user_info()
"""
return {
"username": self.username,
"password": self.password,
"password_hash": self.password_hash,
"password_hash_type": self.password_hash_type,
"key_path": self.key_path
}
def __str__(self):
output = [f"{self.__class__.__name__}:"]
for k, v in self.get_info().items():
if v is None:
continue
output.append(f"{k}={v}")
return f" ".join(output)
class OperatingSystemInfo:
"""A class for storing information about the OS of a VM """
def __init__(self,
os_type: CyEnums.OperatingSystemType = None,
dist: CyEnums.OperatingSystemDistribution = None,
version: CyEnums.OperatingSystemVersion = None,
kernel: CyEnums.OperatingSystemKernelVersion = None,
architecture: CyEnums.Architecture = None,
patch: CyEnums.OperatingSystemPatch = None):
self.os_type = os_type
self.dist = dist
self.version = version
self.kernel = kernel
self.architecture = architecture
self.patch = patch
def get_info(self) -> dict:
"""Return OS info as dict
Keys of dict match arguments of Observation.add_system_info()
"""
return {
"os_type": self.os_type,
"os_distribution": self.dist,
"os_verson": self.version,
"os_kernel": self.kernel,
"os_patches": self.patch,
"architecture": self.architecture
}
def __str__(self):
output = [f"{self.__class__.__name__}:"]
for k, v in self.get_info().items():
if v is None:
continue
output.append(f"{k}={v}")
return f" ".join(output)
class Image:
"""An class for storing VM Image information """
def __init__(self,
name: str,
services: list = None,
os_info: OperatingSystemInfo = None,
credentials: dict = None,
root_user: str = None,
key_access: bool = False,
files: dict = None,
aux_info: dict = None):
"""
Parameters
----------
name : str
The name of the image. This is used to distinguish between images
with the same OS type, distribution and version in a human
readable format. e.g. between standard ubuntu 14.04 and the
Metasploitable 3 ubuntu 14.04
services : list, optional
Service objects defining services running on machine (default=None)
os_info : str, optional
image os information (i.e. type, distribution, version)
(default=None)
credentials : dict, optional
map of user to credentials for the VM image (default=None)
root_user : str, optional
the root user for the image. This is the user whose credentials are
used when configuring any instances using this image (default=None)
key_access : bool, optional
whether SSH access to instance is restricted to key only
(default=False)
files : dict, optional
any known/specified files that are on the image (default=None)
aux_info: dict, optional
any extra Image specific information (e.g. MSF or Host monitoring
info0 (Default=None)
"""
self.name = name
self.services = services
self.os_info = OperatingSystemInfo if os_info is None else os_info
self.credentials = {} if credentials is None else credentials
self.root_user = root_user
self.key_access = key_access
self.files = {} if files is None else files
self.aux_info = {} if aux_info is None else aux_info
assert root_user is None or root_user in credentials, \
f"Root user of Image must have matching entry in credentials"
def get_root_user_creds(self) -> Credentials:
"""Get the credentials of the root user of Image.
Returns
-------
Credentials
Root user credentials
Raises
------
AttributeError
If no valid credentials can be found
Notes
-----
If root_user attribute of image is not defined, this will return the
first user in the image credentials dict
"""
if self.root_user is not None:
return self.credentials[self.root_user]
for username, creds in self.credentials.items():
if not self.key_access or creds.key_path is not None:
return creds
raise AttributeError("No valid root user credentials found for {self}")
def __str__(self):
creds = [f"{u}: {c}" for u, c in self.credentials.items()]
creds_str = "[" + ", ".join(creds) + "]"
output = [f"{self.__class__.__name__}:",
f"Name={self.name}"
f"Services={self.services}"
f"OS Info={str(self.os_info)}"
f"Credentials={creds_str}",
f"Root User={self.root_user}"
f"Key Access={self.key_access}",
f"Files={self.files}",
f"Aux Info={self.aux_info}"]
return " ".join(output)
def __eq__(self, other):
if not isinstance(other, Image):
return False
return (other.name == self.name
and other.services == self.services
and other.os_info == self.os_info
and other.credentials == self.credentials
and other.files == self.files
and other.aux_info == self.aux_info)
| [
"CybORG.Shared.Enums.FileVersion.parse_string",
"ipaddress.IPv4Address",
"CybORG.Shared.Enums.FileType.parse_string",
"datetime.strptime",
"ipaddress.IPv4Network"
] | [((545, 568), 'ipaddress.IPv4Address', 'IPv4Address', (['ip_address'], {}), '(ip_address)\n', (556, 568), False, 'from ipaddress import IPv4Address, IPv4Network\n'), ((591, 610), 'ipaddress.IPv4Network', 'IPv4Network', (['subnet'], {}), '(subnet)\n', (602, 610), False, 'from ipaddress import IPv4Address, IPv4Network\n'), ((2496, 2556), 'datetime.strptime', 'datetime.strptime', (['self.last_modified_time', '"""%d %b %Y %H:%M"""'], {}), "(self.last_modified_time, '%d %b %Y %H:%M')\n", (2513, 2556), False, 'import datetime\n'), ((2966, 3007), 'CybORG.Shared.Enums.FileVersion.parse_string', 'CyEnums.FileVersion.parse_string', (['version'], {}), '(version)\n', (2998, 3007), True, 'import CybORG.Shared.Enums as CyEnums\n'), ((2794, 2834), 'CybORG.Shared.Enums.FileType.parse_string', 'CyEnums.FileType.parse_string', (['file_type'], {}), '(file_type)\n', (2823, 2834), True, 'import CybORG.Shared.Enums as CyEnums\n')] |
import matplotlib.pyplot as plt
import numpy as np
import os
import pickle
def get_mean_stds(data):
return np.mean(data), np.std(data) / np.sqrt(len(data)) * 1.96
if __name__ == '__main__':
labels = ['OpenTAL', 'EDL', 'SoftMax']
result_folders = ['edl_oshead_iou', 'edl_15kc', 'default']
colors = ['k', 'g', 'm']
split = '0'
tiou_target = 0.3
tidx = 0 # 0-4 for [0,3...,0.7]
items = ['$TP_{u2u}$', '$TP_{k2k}$', '$FP_{u2k}$', '$FP_{k2k}$', '$FP_{k2u}$', '$FP_{bg2u}$', '$FP_{bg2k}$']
fontsize = 18
width = 0.25
fig_path = 'experiments/figs'
os.makedirs(fig_path, exist_ok=True)
xrng = np.arange(len(items))
fig, ax = plt.subplots(1,1, figsize=(8,5))
plt.rcParams["font.family"] = "Arial"
for idx, (folder, label, color) in enumerate(zip(result_folders, labels, colors)):
# load result file
result_file = os.path.join('output', folder, f'split_{split}', 'open_stats.pkl')
with open(result_file, 'rb') as f:
stats = pickle.load(f)
print(label)
all_scores = 1 - np.array(stats['ood_scores'])
mean_scores = np.zeros((7))
std_scores = np.zeros((7))
mean_scores[0], std_scores[0] = get_mean_stds(all_scores[stats['tp_u2u'][tidx] > 0])
mean_scores[1], std_scores[1] = get_mean_stds(all_scores[stats['tp_k2k'][tidx].sum(axis=0) > 0])
mean_scores[2], std_scores[2] = get_mean_stds(all_scores[stats['fp_u2k'][tidx].sum(axis=0) > 0])
mean_scores[3], std_scores[3] = get_mean_stds(all_scores[stats['fp_k2k'][tidx].sum(axis=0) > 0])
mean_scores[4], std_scores[4] = get_mean_stds(all_scores[stats['fp_k2u'][tidx] > 0])
mean_scores[5], std_scores[5] = get_mean_stds(all_scores[stats['fp_bg2u'][tidx] > 0])
mean_scores[6], std_scores[6] = get_mean_stds(all_scores[stats['fp_bg2k'][tidx].sum(axis=0) > 0])
h = ax.bar(xrng + (idx-1) * width, mean_scores, yerr=std_scores, width=width, label=f'{label}', align='center', alpha=0.5, ecolor='black', color=color)
ax.set_ylim(0, 1.2)
ax.set_ylabel('OOD Scores', fontsize=fontsize)
ax.set_xticks(xrng)
ax.set_xticklabels(items, fontsize=fontsize-3)
ax.legend(fontsize=fontsize, loc='upper center', ncol=3)
plt.yticks(fontsize=fontsize)
plt.tight_layout()
plt.savefig(os.path.join(fig_path, 'OOD_Score_compare.png')) | [
"numpy.mean",
"os.makedirs",
"os.path.join",
"pickle.load",
"numpy.array",
"numpy.zeros",
"matplotlib.pyplot.yticks",
"matplotlib.pyplot.tight_layout",
"numpy.std",
"matplotlib.pyplot.subplots"
] | [((595, 631), 'os.makedirs', 'os.makedirs', (['fig_path'], {'exist_ok': '(True)'}), '(fig_path, exist_ok=True)\n', (606, 631), False, 'import os\n'), ((681, 715), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(8, 5)'}), '(1, 1, figsize=(8, 5))\n', (693, 715), True, 'import matplotlib.pyplot as plt\n'), ((2262, 2291), 'matplotlib.pyplot.yticks', 'plt.yticks', ([], {'fontsize': 'fontsize'}), '(fontsize=fontsize)\n', (2272, 2291), True, 'import matplotlib.pyplot as plt\n'), ((2296, 2314), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2312, 2314), True, 'import matplotlib.pyplot as plt\n'), ((112, 125), 'numpy.mean', 'np.mean', (['data'], {}), '(data)\n', (119, 125), True, 'import numpy as np\n'), ((892, 958), 'os.path.join', 'os.path.join', (['"""output"""', 'folder', 'f"""split_{split}"""', '"""open_stats.pkl"""'], {}), "('output', folder, f'split_{split}', 'open_stats.pkl')\n", (904, 958), False, 'import os\n'), ((1135, 1146), 'numpy.zeros', 'np.zeros', (['(7)'], {}), '(7)\n', (1143, 1146), True, 'import numpy as np\n'), ((1170, 1181), 'numpy.zeros', 'np.zeros', (['(7)'], {}), '(7)\n', (1178, 1181), True, 'import numpy as np\n'), ((2331, 2378), 'os.path.join', 'os.path.join', (['fig_path', '"""OOD_Score_compare.png"""'], {}), "(fig_path, 'OOD_Score_compare.png')\n", (2343, 2378), False, 'import os\n'), ((1022, 1036), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1033, 1036), False, 'import pickle\n'), ((1083, 1112), 'numpy.array', 'np.array', (["stats['ood_scores']"], {}), "(stats['ood_scores'])\n", (1091, 1112), True, 'import numpy as np\n'), ((127, 139), 'numpy.std', 'np.std', (['data'], {}), '(data)\n', (133, 139), True, 'import numpy as np\n')] |
"""test_cvvidproc.py
First, shows image of ISCO pump setup, closes when user clicks spacebar
Second, shows first frame of video of inner stream, closes when user clicks spacebar
Third, computes background of samples video. Should look like first frame^ w/o objects.
"""
import time
import cv2
import cvvidproc
import sys
sys.path.append('../src/')
import cvimproc.improc as improc
# test 0: shows image of isco pump
image = cv2.imread('../input/images/img.jpg')
cv2.imshow('Click spacebar', image)
cv2.waitKey(0)
cv2.destroyAllWindows()
# test 1: shows frame from vide of inner stream
vid_path = '../input/videos/vid2.mp4'
# loads video
cap = cv2.VideoCapture(vid_path)
ret, frame = cap.read()
cv2.imshow('Click spacebar', frame)
cv2.waitKey(0)
cv2.destroyAllWindows()
cap.release()
# test 2: computes background from video and shows result
bkgd = improc.compute_bkgd_med_thread(vid_path, num_frames=1000, max_threads=12)
cv2.imshow('Background -- click spacebar', bkgd)
cv2.waitKey(0)
cv2.destroyAllWindows()
# compare to previous, unoptimized, fully Python background algorithm in "Kornfield/ANALYSIS/improc-dev/bkg_alg_koe/test_python_speed.py"
| [
"cv2.imshow",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"sys.path.append",
"cvimproc.improc.compute_bkgd_med_thread",
"cv2.waitKey",
"cv2.imread"
] | [((323, 349), 'sys.path.append', 'sys.path.append', (['"""../src/"""'], {}), "('../src/')\n", (338, 349), False, 'import sys\n'), ((428, 465), 'cv2.imread', 'cv2.imread', (['"""../input/images/img.jpg"""'], {}), "('../input/images/img.jpg')\n", (438, 465), False, 'import cv2\n'), ((466, 501), 'cv2.imshow', 'cv2.imshow', (['"""Click spacebar"""', 'image'], {}), "('Click spacebar', image)\n", (476, 501), False, 'import cv2\n'), ((502, 516), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (513, 516), False, 'import cv2\n'), ((517, 540), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (538, 540), False, 'import cv2\n'), ((650, 676), 'cv2.VideoCapture', 'cv2.VideoCapture', (['vid_path'], {}), '(vid_path)\n', (666, 676), False, 'import cv2\n'), ((701, 736), 'cv2.imshow', 'cv2.imshow', (['"""Click spacebar"""', 'frame'], {}), "('Click spacebar', frame)\n", (711, 736), False, 'import cv2\n'), ((737, 751), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (748, 751), False, 'import cv2\n'), ((752, 775), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (773, 775), False, 'import cv2\n'), ((856, 929), 'cvimproc.improc.compute_bkgd_med_thread', 'improc.compute_bkgd_med_thread', (['vid_path'], {'num_frames': '(1000)', 'max_threads': '(12)'}), '(vid_path, num_frames=1000, max_threads=12)\n', (886, 929), True, 'import cvimproc.improc as improc\n'), ((930, 978), 'cv2.imshow', 'cv2.imshow', (['"""Background -- click spacebar"""', 'bkgd'], {}), "('Background -- click spacebar', bkgd)\n", (940, 978), False, 'import cv2\n'), ((979, 993), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (990, 993), False, 'import cv2\n'), ((994, 1017), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1015, 1017), False, 'import cv2\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) 2020, LEAM Technology System and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.integrations.utils import make_post_request
from frappe.model.naming import make_autoname
from renovation_core.utils.fcm import make_communication_doc, \
is_valid_session_id
def send_huawei_notification_to_topic(topic, title, body, data=None,
custom_android_configuration=None):
if not data:
data = frappe._dict({})
data.message_id = "HUAWEI-{}-{}".format(topic,
make_autoname("hash",
"Communication"))
# Message id response
response = send_huawei_notifications(
topic=topic, title=title, body=body, data=data,
custom_android_configuration=custom_android_configuration)
if response:
make_communication_doc(data.message_id, title, body, data, topic=topic)
def send_huawei_notifications(tokens=None, topic=None, title=None, body=None,
data=None, custom_android_configuration=None):
config = frappe.get_site_config().get("huawei_push_kit_config")
if not config or not config.get('app_id') or not config.get(
'client_id') or not config.get('client_secret'):
frappe.log_error(
title="Huawei Push Kit Error",
message="Message: {}".format(frappe._("Missing secret keys in config")))
return
authorization_token = get_huawei_auth_token(config)
if not authorization_token:
frappe.log_error(
title="Huawei Push Kit Error",
message="Message: {}".format(frappe._("Authorization token missing.")))
return
url = "https://push-api.cloud.huawei.com/v1/{}/messages:send".format(
config.get('app_id'))
# message format
# {
# data:str ,
# notification: { 'title' , 'body' , 'image' },
# android: check docs..,
# apns: check docs..,
# webpush: check docs..,
# token: [] ,
# topic: [] ,
# condition : '' check docs...
# }
message = {
"data": frappe.as_json(data) if data else {},
"notification": {"title": title, "body": body},
"android": {
"notification": {
"click_action": {
"type": 3
}
}
}
}
if custom_android_configuration and isinstance(custom_android_configuration,
dict):
message['android'].update(custom_android_configuration)
response = None
headers = {"Content-Type": "application/json",
"Authorization": authorization_token}
if tokens and len(tokens):
message.update({"token": tokens})
try:
payload = frappe._dict(validate_only=False, message=message)
response = make_post_request(url, data=frappe.as_json(payload),
headers=headers)
huawei_push_kit_error_handler(tokens=tokens, topic=topic, title=title,
body=body,
data=data,
recipient_count=len(tokens),
request_params=message)
except Exception as exc:
huawei_push_kit_error_handler(tokens=tokens, topic=topic, title=title,
body=body,
data=data,
exc=exc,
recipient_count=len(tokens),
request_params=message)
print("Sending to tokens: {}".format(tokens))
elif topic:
message.update({"topic": topic})
try:
payload = frappe._dict(validate_only=False, message=message)
response = make_post_request(url, data=frappe.as_json(payload),
headers=headers)
huawei_push_kit_error_handler(tokens=tokens, topic=topic, title=title,
body=body,
data=data,
request_params=message)
except Exception as exc:
huawei_push_kit_error_handler(tokens=tokens, topic=topic, title=title,
body=body,
data=data,
exc=exc,
request_params=message)
print("Sent TOPIC {} Msg: {}".format(topic, response))
return response
def get_huawei_tokens_for(target, roles=None, users=None):
if target == "Roles":
users = [x.parent for x in frappe.db.get_all(
"Has Role", fields=["distinct parent"],
filters={"role": ["IN", roles or []]})]
target = "Users"
if target != "Users":
frappe.throw("Invalid Target")
tokens = []
for u in users:
tokens.extend(get_huawei_client_tokens(user=u))
return [x for x in tokens if len(x) > 0]
def send_huawei_notification_to_user(user, title, body, data=None,
custom_android_configuration=None):
tokens = get_huawei_tokens_for("Users", users=[user])
if not data:
data = frappe._dict({})
# for saving purpose
data.message_id = "HUAWEI-{}-{}".format(user,
make_autoname("hash",
"Communication"))
# Batch Response
response = send_huawei_notifications(
tokens=tokens, title=title, body=body, data=data,
custom_android_configuration=custom_android_configuration)
if response:
make_communication_doc(data.message_id, title, body, data, user=user)
def get_huawei_client_tokens(user=None):
"""
Returns a list of valid user Huawei tokens
"""
if not user:
user = frappe.session.user if frappe.session else "Guest"
tokens = []
for t in frappe.get_all("Huawei User Token",
fields=["name", "token", "linked_sid"],
filters={"user": user}):
if t.linked_sid and not is_valid_session_id(t.linked_sid):
frappe.delete_doc("Huawei User Token", t.name, ignore_permissions=True)
continue
tokens.append(t.token)
return tokens
def delete_huawei_invalid_tokens(tokens):
for t in tokens:
t = frappe.db.get_value("Huawei User Token", {"token": t})
if t:
frappe.delete_doc("Huawei User Token", t, ignore_permissions=True)
def get_huawei_auth_token(config):
if not config or not config.get('app_id') or not config.get(
'client_id') or not config.get('client_secret'):
frappe.log_error(
title="Huawei Push Kit Error",
message="Message: {}".format(frappe._("Missing secret keys in config")))
return
cache_auth_token = check_redis_cache_for_huawei_auth_token()
if cache_auth_token:
return cache_auth_token
url = "https://oauth-login.cloud.huawei.com/oauth2/v3/token"
headers = {"Content-Type": "application/x-www-form-urlencoded",
"Accept": "application/json"}
payload = {
"grant_type": "client_credentials",
"client_id": config.get("client_id"),
"client_secret": config.get("client_secret")
}
access_token = ''
try:
response = make_post_request(url, data=payload,
headers=headers)
access_token = "{} {}".format(response.get('token_type'),
response.get('access_token'))
set_redis_cache_huawei_auth_token(access_token, response.get('expires_in'))
except Exception as exc:
status_code = frappe.flags.integration_request.status_code
error = frappe.parse_json(frappe.flags.integration_request.json())
huawei_error_code = error.get('error')
sub_error = error.get('sub_error')
error_description = error.get('error_description')
print(
"{}\nStatus Code: {}\nHuawei Error: {}\nSub Error: {}\nError Description: {}".format(
str(exc), status_code,
huawei_error_code,
sub_error,
error_description))
frappe.log_error(
title="Huawei Push Kit Error",
message="{}\n{}\nStatus Code: {}\nHuawei Error: {}\nSub Error: {}\nError Description: {}".format(
"Get Authorization token error.",
str(exc),
status_code,
huawei_error_code,
sub_error,
error_description))
return access_token
def check_redis_cache_for_huawei_auth_token():
user = get_default_values_for_redis_key().user
key = get_default_values_for_redis_key().key
val = frappe.cache().get_value(key, user=user, expires=True)
return val
def get_default_values_for_redis_key():
return frappe._dict(user='Administrator', key='huawei_auth_token')
def set_redis_cache_huawei_auth_token(auth_token: str, expires_in_sec):
user = get_default_values_for_redis_key().user
key = get_default_values_for_redis_key().key
frappe.cache().set_value(key, auth_token, user=user,
expires_in_sec=expires_in_sec - 10)
def huawei_push_kit_error_handler(tokens=None, topic=None, title=None,
exc=None, body=None, data=None,
recipient_count=1, request_params=None):
# {
# "code": "80000000",
# "msg": "Success",
# "requestId": "157440955549500001002006"
# }
status_code = frappe.flags.integration_request.status_code
response = None
try:
response = frappe.parse_json(frappe.flags.integration_request.json())
except Exception as e:
pass
huawei_error_code = response.get('error') if isinstance(response,
dict) else ''
sub_error = response.get('sub_error') if isinstance(response, dict) else ''
error_description = response.get('error_description') if isinstance(response,
dict) else ''
success_count = 0
failure_count = 0
if isinstance(response, dict) and response.get('code') == '80000000':
success_count = recipient_count
elif isinstance(response, dict) and response.get('code') == '80100000':
msg = frappe.parse_json(response.get('msg'))
success_count = msg.get('success', 0)
failure_count = msg.get('failure', 0)
delete_huawei_invalid_tokens(
frappe.parse_json(msg.get("illegal_tokens", "")))
preMessage = "Tokens: {}\nTopic: {}\nTitle: {}\nBody: {}\nData: {}\nSuccess/Recipients: {}/{} \nFailure:{}".format(
tokens, topic, title, body, data, success_count, recipient_count,
failure_count)
if response and response.get('code') == '80000000':
return
code = response.get('code') if isinstance(response, dict) else ''
message = response.get('msg') if isinstance(response, dict) else ''
print(
"- EXC\nCode: {}\nMessage: {}".format(code, message))
frappe.log_error(
title="Huawei Push Kit Error",
message="{}\nEXC: {}\nCode: {}\nMessage: {}\nStatus Code: {}\nHuawei Error Code: {}\nSub Error: {}\nError Description: {}\n Request Params: {}".format(
preMessage,
str(exc),
code,
message,
status_code,
huawei_error_code,
sub_error,
error_description,
request_params
))
def notify_via_hpk(title, body, data=None, roles=None, users=None, topics=None,
tokens=None, custom_android_configuration=None):
frappe.enqueue("renovation_core.utils.hpk._notify_via_hpk",
enqueue_after_commit=True,
title=title, body=body, data=data, roles=roles, users=users,
topics=topics, tokens=tokens,
custom_android_configuration=custom_android_configuration)
def _notify_via_hpk(title, body, data=None, roles=None, users=None,
topics=None, tokens=None,
custom_android_configuration=None):
users = set(users or [])
if roles:
users.union(set([x.parent for x in frappe.db.get_all("Has Role", fields=[
"distinct parent"], filters={"role": ["IN", roles or []]})]))
if data == None:
data = frappe._dict()
if not isinstance(data, dict):
frappe.throw("Data should be a key-value pair for HPK")
else:
data = frappe._dict(data)
for user in users:
send_huawei_notification_to_user(user, title=title, body=body, data=data,
custom_android_configuration=custom_android_configuration)
topics = set(topics or [])
for topic in topics:
send_huawei_notification_to_topic(topic=topic, title=title, body=body,
data=data,
custom_android_configuration=custom_android_configuration)
tokens = set(tokens or [])
if len(tokens):
send_huawei_notifications(list(tokens), title=title, body=body, data=data,
custom_android_configuration=custom_android_configuration)
| [
"frappe.flags.integration_request.json",
"frappe._dict",
"frappe.db.get_value",
"frappe.db.get_all",
"frappe.throw",
"renovation_core.utils.fcm.make_communication_doc",
"frappe.integrations.utils.make_post_request",
"frappe.get_site_config",
"frappe._",
"frappe.enqueue",
"frappe.delete_doc",
"... | [((5824, 5927), 'frappe.get_all', 'frappe.get_all', (['"""Huawei User Token"""'], {'fields': "['name', 'token', 'linked_sid']", 'filters': "{'user': user}"}), "('Huawei User Token', fields=['name', 'token', 'linked_sid'],\n filters={'user': user})\n", (5838, 5927), False, 'import frappe\n'), ((8575, 8634), 'frappe._dict', 'frappe._dict', ([], {'user': '"""Administrator"""', 'key': '"""huawei_auth_token"""'}), "(user='Administrator', key='huawei_auth_token')\n", (8587, 8634), False, 'import frappe\n'), ((11281, 11530), 'frappe.enqueue', 'frappe.enqueue', (['"""renovation_core.utils.hpk._notify_via_hpk"""'], {'enqueue_after_commit': '(True)', 'title': 'title', 'body': 'body', 'data': 'data', 'roles': 'roles', 'users': 'users', 'topics': 'topics', 'tokens': 'tokens', 'custom_android_configuration': 'custom_android_configuration'}), "('renovation_core.utils.hpk._notify_via_hpk',\n enqueue_after_commit=True, title=title, body=body, data=data, roles=\n roles, users=users, topics=topics, tokens=tokens,\n custom_android_configuration=custom_android_configuration)\n", (11295, 11530), False, 'import frappe\n'), ((551, 567), 'frappe._dict', 'frappe._dict', (['{}'], {}), '({})\n', (563, 567), False, 'import frappe\n'), ((660, 698), 'frappe.model.naming.make_autoname', 'make_autoname', (['"""hash"""', '"""Communication"""'], {}), "('hash', 'Communication')\n", (673, 698), False, 'from frappe.model.naming import make_autoname\n'), ((954, 1025), 'renovation_core.utils.fcm.make_communication_doc', 'make_communication_doc', (['data.message_id', 'title', 'body', 'data'], {'topic': 'topic'}), '(data.message_id, title, body, data, topic=topic)\n', (976, 1025), False, 'from renovation_core.utils.fcm import make_communication_doc, is_valid_session_id\n'), ((4747, 4777), 'frappe.throw', 'frappe.throw', (['"""Invalid Target"""'], {}), "('Invalid Target')\n", (4759, 4777), False, 'import frappe\n'), ((5131, 5147), 'frappe._dict', 'frappe._dict', (['{}'], {}), '({})\n', (5143, 5147), False, 'import frappe\n'), ((5261, 5299), 'frappe.model.naming.make_autoname', 'make_autoname', (['"""hash"""', '"""Communication"""'], {}), "('hash', 'Communication')\n", (5274, 5299), False, 'from frappe.model.naming import make_autoname\n'), ((5552, 5621), 'renovation_core.utils.fcm.make_communication_doc', 'make_communication_doc', (['data.message_id', 'title', 'body', 'data'], {'user': 'user'}), '(data.message_id, title, body, data, user=user)\n', (5574, 5621), False, 'from renovation_core.utils.fcm import make_communication_doc, is_valid_session_id\n'), ((6247, 6301), 'frappe.db.get_value', 'frappe.db.get_value', (['"""Huawei User Token"""', "{'token': t}"], {}), "('Huawei User Token', {'token': t})\n", (6266, 6301), False, 'import frappe\n'), ((7164, 7217), 'frappe.integrations.utils.make_post_request', 'make_post_request', (['url'], {'data': 'payload', 'headers': 'headers'}), '(url, data=payload, headers=headers)\n', (7181, 7217), False, 'from frappe.integrations.utils import make_post_request\n'), ((11973, 11987), 'frappe._dict', 'frappe._dict', ([], {}), '()\n', (11985, 11987), False, 'import frappe\n'), ((12026, 12081), 'frappe.throw', 'frappe.throw', (['"""Data should be a key-value pair for HPK"""'], {}), "('Data should be a key-value pair for HPK')\n", (12038, 12081), False, 'import frappe\n'), ((12101, 12119), 'frappe._dict', 'frappe._dict', (['data'], {}), '(data)\n', (12113, 12119), False, 'import frappe\n'), ((1194, 1218), 'frappe.get_site_config', 'frappe.get_site_config', ([], {}), '()\n', (1216, 1218), False, 'import frappe\n'), ((2109, 2129), 'frappe.as_json', 'frappe.as_json', (['data'], {}), '(data)\n', (2123, 2129), False, 'import frappe\n'), ((2719, 2769), 'frappe._dict', 'frappe._dict', ([], {'validate_only': '(False)', 'message': 'message'}), '(validate_only=False, message=message)\n', (2731, 2769), False, 'import frappe\n'), ((6046, 6117), 'frappe.delete_doc', 'frappe.delete_doc', (['"""Huawei User Token"""', 't.name'], {'ignore_permissions': '(True)'}), "('Huawei User Token', t.name, ignore_permissions=True)\n", (6063, 6117), False, 'import frappe\n'), ((6318, 6384), 'frappe.delete_doc', 'frappe.delete_doc', (['"""Huawei User Token"""', 't'], {'ignore_permissions': '(True)'}), "('Huawei User Token', t, ignore_permissions=True)\n", (6335, 6384), False, 'import frappe\n'), ((8456, 8470), 'frappe.cache', 'frappe.cache', ([], {}), '()\n', (8468, 8470), False, 'import frappe\n'), ((8807, 8821), 'frappe.cache', 'frappe.cache', ([], {}), '()\n', (8819, 8821), False, 'import frappe\n'), ((9364, 9403), 'frappe.flags.integration_request.json', 'frappe.flags.integration_request.json', ([], {}), '()\n', (9401, 9403), False, 'import frappe\n'), ((3684, 3734), 'frappe._dict', 'frappe._dict', ([], {'validate_only': '(False)', 'message': 'message'}), '(validate_only=False, message=message)\n', (3696, 3734), False, 'import frappe\n'), ((4586, 4686), 'frappe.db.get_all', 'frappe.db.get_all', (['"""Has Role"""'], {'fields': "['distinct parent']", 'filters': "{'role': ['IN', roles or []]}"}), "('Has Role', fields=['distinct parent'], filters={'role':\n ['IN', roles or []]})\n", (4603, 4686), False, 'import frappe\n'), ((6005, 6038), 'renovation_core.utils.fcm.is_valid_session_id', 'is_valid_session_id', (['t.linked_sid'], {}), '(t.linked_sid)\n', (6024, 6038), False, 'from renovation_core.utils.fcm import make_communication_doc, is_valid_session_id\n'), ((7577, 7616), 'frappe.flags.integration_request.json', 'frappe.flags.integration_request.json', ([], {}), '()\n', (7614, 7616), False, 'import frappe\n'), ((1459, 1500), 'frappe._', 'frappe._', (['"""Missing secret keys in config"""'], {}), "('Missing secret keys in config')\n", (1467, 1500), False, 'import frappe\n'), ((1692, 1732), 'frappe._', 'frappe._', (['"""Authorization token missing."""'], {}), "('Authorization token missing.')\n", (1700, 1732), False, 'import frappe\n'), ((2815, 2838), 'frappe.as_json', 'frappe.as_json', (['payload'], {}), '(payload)\n', (2829, 2838), False, 'import frappe\n'), ((6632, 6673), 'frappe._', 'frappe._', (['"""Missing secret keys in config"""'], {}), "('Missing secret keys in config')\n", (6640, 6673), False, 'import frappe\n'), ((3780, 3803), 'frappe.as_json', 'frappe.as_json', (['payload'], {}), '(payload)\n', (3794, 3803), False, 'import frappe\n'), ((11836, 11936), 'frappe.db.get_all', 'frappe.db.get_all', (['"""Has Role"""'], {'fields': "['distinct parent']", 'filters': "{'role': ['IN', roles or []]}"}), "('Has Role', fields=['distinct parent'], filters={'role':\n ['IN', roles or []]})\n", (11853, 11936), False, 'import frappe\n')] |