code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'cellPoseUI.ui'
# Created by: PyQt5 UI code generator 5.11.3
import os, platform, ctypes, sys
from PyQt5 import QtWidgets
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QFontDatabase
from scellseg.guis.scellsegUi import Ui_MainWindow
class scellsegGui(Ui_MainWindow):
def __init__(self, image=None, parent = None):
super(scellsegGui, self).__init__(parent)
self.setupUi(self)
self.splitter.setSizes([500, 250])
self.splitter.handle(1).setAttribute(Qt.WA_Hover, True)
self.splitter2.handle(1).setAttribute(Qt.WA_Hover, True)
def closeEvent(self, event):
answer = QtWidgets.QMessageBox.question(self, 'Close', 'Close Scellseg',
QtWidgets.QMessageBox.Yes |
QtWidgets.QMessageBox.No, QtWidgets.QMessageBox.No)
if answer == QtWidgets.QMessageBox.Yes:
event.accept()
elif answer == QtWidgets.QMessageBox.No:
event.ignore()
def start_gui():
Translucent = 'rgba(255,255,255,0)'
Primary = '#fafafa'
PrimaryLight = '#C0C0C0'
ListColor = '#F0F0F0'
SliderColor = '#0078D7'
LabelColor = '#7A581E'
BlackColor = '#000000'
BtnColor = '#0066FF'
Secondary = '#D3D3D3'
SecondaryLight = '#D3D3D3'
SecondaryDark = '#D3D3D3'
SecondaryText = '#000000'
border_image_path = os.path.dirname(os.path.abspath(__file__)).replace('\\', '/') + '/assets/slider_handle.png'
sheet = [
'QWidget',
'{',
'outline: 0;',
'font: 11pt "文泉驿微米黑";',
'selection-color: {0:s};'.format(SecondaryText),
'selection-background-color: {0:s};'.format(Secondary),
' } ',
'QSlider::handle:horizontal#rangeslider'
'{',
'border-image: url({0:s});'.format(border_image_path),
'}',
'QLabel#label_seg',
'{',
'color: {0:s};'.format(LabelColor),
'font: bold 18px "Arial"',
'}',
'QLabel#label_batchseg',
'{',
'color: {0:s};'.format(LabelColor),
'font: bold 18px "Arial"',
'}',
'QLabel#label_getsingle',
'{',
'color: {0:s};'.format(LabelColor),
'font: bold 18px "Arial"',
'}',
'QSplitter::handle:horizontal',
'{',
'width: 10px;',
'}',
'QSplitter::handle:vertical',
'{',
'height: 10px;',
'}',
'QSplitter::handle',
'{',
'background-color: {0:s};'.format(Translucent),
'}',
'QSplitter::handle:hover',
'{',
'background-color: {0:s};'.format(Secondary),
'}',
'QSplitter::handle:pressed',
'{',
'background-color: {0:s};'.format(Secondary),
'}',
'QTableView',
'{',
'background-color: {0:s};'.format(ListColor),
'border-style: none;',
'}',
'QHeaderView',
'{',
'background-color: {0:s};'.format(Translucent),
'border-bottom: 2px solid #505050',
'}',
'QHeaderView::section',
'{',
'background-color: {0:s};'.format(Translucent),
'border-bottom: 2px solid #505050',
'}',
'QMenuBar',
'{',
'background-color: {0:s};'.format(Primary),
'border-width: 1px;',
'border-style: none;',
'border-color: {0:s};'.format(SecondaryDark),
'color: {0:s};'.format(SecondaryText),
'margin: 0px;',
'}',
'QMenuBar::item:selected',
'{',
'background-color: {0:s};'.format(Secondary),
'color: {0:s};'.format(SecondaryText),
'}',
'QMenu',
'{',
'background-color:{0:s};'.format(PrimaryLight),
'border-width: 2px;',
'border-style: solid;',
'border-color: {0:s};'.format(SecondaryDark),
'margin: 0px;',
'}',
'QMenu::separator'
'{',
'height: 2px;'
'background-color: {0:s};'.format(Primary),
'margin: 0px 2px;',
'}',
'QMenu::icon:checked',
'{',
'background-color: {0:s};'.format(Secondary),
'border-width: 1px;',
'border-style: solid;',
'border-color: {0:s};'.format(Primary),
'}',
'QMenu::item',
'{',
'padding: 4px 25px 4px 20px;',
'}',
'QMenu::item:selected',
'{',
'background-color: {0:s};'.format(Secondary),
'color: {0:s};'.format(SecondaryText),
'}',
'QToolBox::tab',
'{',
'background-color: {0:s};'.format(SecondaryLight),
'border: 2px solid #e3e3e3;',
'padding: 5px;',
'}',
'QToolBox::tab:selected',
'{',
'background-color: {0:s};'.format(SecondaryDark),
'color: {0:s};'.format(SecondaryText),
'border: 2px solid #333;',
'}',
'QWidget#page,QWidget#page_2,QWidget#page_3',
'{',
'backgroundcolor:#F0F0F0;',
# 'background-image: url(./assets/background.jpg);',
'}',
'QProgressBar {',
'border: 1px solid rgb(0,0,0);',
'border-radius: 2px;',
'background-color: {0:s};'.format(SecondaryLight),
'}',
'QProgressBar::chunk {',
'border: 1px solid rgb(0,0,0);',
'border-radius: 0px;',
'background-color: {0:s};'.format(SecondaryDark),
'width: 10px;',
'margin: 2px;',
'}',
'QLabel#jLabelPicture',
'{',
'border-width: 2px;',
'border-radius: 0px;',
'border-style: solid;',
'border-color: {0:s};'.format(SecondaryDark),
'}',
'QScrollBar,QScrollBar::add-line,QScrollBar::add-page,QScrollBar::sub-line,QScrollBar::sub-page',
'{',
'background-color: {0:s};'.format(Translucent),
'}',
'QScrollBar:horizontal',
'{',
'height: 10px;',
'}',
'QScrollBar:vertical',
'{',
'width: 10px;',
'}',
'QScrollBar::handle',
'{',
'background-color: {0:s};'.format(Translucent),
'}',
'QScrollBar::handle:hover',
'{',
'background-color: {0:s};'.format(Secondary),
'}',
'QScrollBar::handle:pressed',
'{',
'background-color: {0:s};'.format(Secondary),
'}',
]
app = QtWidgets.QApplication(sys.argv)
loadedFontID = QFontDatabase.addApplicationFont(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets", "Font", "wqy-microhei.ttc"))
print('operating system: ', platform.system())
if platform.system() == 'Windows':
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID("scellseg")
gui = scellsegGui()
app.setStyleSheet('\n'.join(sheet))
gui.show()
sys.exit(app.exec_())
if __name__ == "__main__":
start_gui() | [
"PyQt5.QtWidgets.QMessageBox.question",
"platform.system",
"PyQt5.QtWidgets.QApplication",
"os.path.abspath",
"ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID"
] | [((6736, 6768), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (6758, 6768), False, 'from PyQt5 import QtWidgets\n'), ((732, 880), 'PyQt5.QtWidgets.QMessageBox.question', 'QtWidgets.QMessageBox.question', (['self', '"""Close"""', '"""Close Scellseg"""', '(QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No)', 'QtWidgets.QMessageBox.No'], {}), "(self, 'Close', 'Close Scellseg', QtWidgets.\n QMessageBox.Yes | QtWidgets.QMessageBox.No, QtWidgets.QMessageBox.No)\n", (762, 880), False, 'from PyQt5 import QtWidgets\n'), ((6963, 6980), 'platform.system', 'platform.system', ([], {}), '()\n', (6978, 6980), False, 'import os, platform, ctypes, sys\n'), ((6990, 7007), 'platform.system', 'platform.system', ([], {}), '()\n', (7005, 7007), False, 'import os, platform, ctypes, sys\n'), ((7031, 7104), 'ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID', 'ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID', (['"""scellseg"""'], {}), "('scellseg')\n", (7092, 7104), False, 'import os, platform, ctypes, sys\n'), ((6861, 6886), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (6876, 6886), False, 'import os, platform, ctypes, sys\n'), ((1551, 1576), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1566, 1576), False, 'import os, platform, ctypes, sys\n')] |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is a simplest TFF program that performs a federated computation of sums.
# It defines a program that locally at the federated nodes computes a range, sums it
# and then created a federated sum across all participants.
#
#
import tensorflow as tf
import tensorflow_federated as tff
import numpy as np
import nest_asyncio
nest_asyncio.apply()
from collections import OrderedDict
from group_by_key_lib import gather_data, key_list_func
def run():
dataset = gather_data("1")
key_list = key_list_func(dataset)
key_list_t = [t.numpy() for t in key_list]
@tf.function
def count_by_key(ds):
key_size = len(key_list_t)
idx_list = tf.range(key_size, dtype=tf.int64)
key_lookup = tf.lookup.StaticHashTable(
tf.lookup.KeyValueTensorInitializer(key_list_t, idx_list),
default_value=-1)
@tf.function
def _count_keys(acummulator, values):
indices = key_lookup.lookup(values["KEY"])
onehot = tf.one_hot(indices, depth=tf.cast(key_size, tf.int32), dtype=tf.int32)
return acummulator + onehot
return ds.reduce(
initial_state=tf.zeros([key_size], tf.int32),
reduce_func=_count_keys)
@tff.federated_computation(tff.FederatedType(tf.string, tff.CLIENTS))
def federated_group_agg(id):
# wrap the used function into tff computations
tff_gather_data_func = tff.tf_computation(gather_data, tf.string)
# Derive the dataset type from the gather function
tff_dataset_type = tff_gather_data_func.type_signature.result # tff.SequenceType(OrderedDict([('TRANS_ID', tf.string), ('SEND_BIC', tf.int64), ('REC_BIC', tf.int64), ('KEY', tf.int64)]))
# continue to wrap functions
tff_count_by_key = tff.tf_computation(count_by_key, tff_dataset_type)
tff_key_list_func = tff.tf_computation(key_list_func, tff_dataset_type)
# print out type signature (for dev purposes)
print(tff_gather_data_func.type_signature)
print(tff_count_by_key.type_signature)
print(tff_key_list_func.type_signature)
# Get dataset on client side
tff_client_dataset = tff.federated_map(tff_gather_data_func, id)
# Calculate the aggregates per client
client_aggregates = tff.federated_map(tff_count_by_key, tff_client_dataset)
# Start to build the aggregation function
@tff.tf_computation()
def build_zeros():
key_size = len(key_list_t)
return tf.zeros([key_size], tf.int32)
@tff.tf_computation(build_zeros.type_signature.result,build_zeros.type_signature.result)
def accumulate(accum, delta):
return accum + delta
@tff.tf_computation(accumulate.type_signature.result)
def report(accum):
return tf.convert_to_tensor(key_list_t), accum
aggregate = tff.federated_aggregate(
value=client_aggregates,
zero=build_zeros(),
accumulate=accumulate,
merge=accumulate,
report=report,
)
# Second one to print out type signatures (for dev purposes)
print(build_zeros.type_signature) # ( -> int32[key_size])
print(accumulate.type_signature) # (<int32[key_size],int32[key_size]> -> int32[key_size])
print(report.type_signature) # (int32[key_size] -> <string[K],int32[]>)
print(aggregate.type_signature)
return aggregate
## Now execute the federated
result = federated_group_agg(["1","2"])
print(result)
if __name__ == "__main__":
print("Running this in federated mode" )
run()
| [
"tensorflow.cast",
"tensorflow_federated.federated_map",
"group_by_key_lib.key_list_func",
"tensorflow.range",
"group_by_key_lib.gather_data",
"tensorflow.lookup.KeyValueTensorInitializer",
"tensorflow.convert_to_tensor",
"tensorflow_federated.FederatedType",
"tensorflow_federated.tf_computation",
... | [((910, 930), 'nest_asyncio.apply', 'nest_asyncio.apply', ([], {}), '()\n', (928, 930), False, 'import nest_asyncio\n'), ((1056, 1072), 'group_by_key_lib.gather_data', 'gather_data', (['"""1"""'], {}), "('1')\n", (1067, 1072), False, 'from group_by_key_lib import gather_data, key_list_func\n'), ((1091, 1113), 'group_by_key_lib.key_list_func', 'key_list_func', (['dataset'], {}), '(dataset)\n', (1104, 1113), False, 'from group_by_key_lib import gather_data, key_list_func\n'), ((1253, 1287), 'tensorflow.range', 'tf.range', (['key_size'], {'dtype': 'tf.int64'}), '(key_size, dtype=tf.int64)\n', (1261, 1287), True, 'import tensorflow as tf\n'), ((1957, 1999), 'tensorflow_federated.tf_computation', 'tff.tf_computation', (['gather_data', 'tf.string'], {}), '(gather_data, tf.string)\n', (1975, 1999), True, 'import tensorflow_federated as tff\n'), ((2312, 2362), 'tensorflow_federated.tf_computation', 'tff.tf_computation', (['count_by_key', 'tff_dataset_type'], {}), '(count_by_key, tff_dataset_type)\n', (2330, 2362), True, 'import tensorflow_federated as tff\n'), ((2390, 2441), 'tensorflow_federated.tf_computation', 'tff.tf_computation', (['key_list_func', 'tff_dataset_type'], {}), '(key_list_func, tff_dataset_type)\n', (2408, 2441), True, 'import tensorflow_federated as tff\n'), ((2700, 2743), 'tensorflow_federated.federated_map', 'tff.federated_map', (['tff_gather_data_func', 'id'], {}), '(tff_gather_data_func, id)\n', (2717, 2743), True, 'import tensorflow_federated as tff\n'), ((2813, 2868), 'tensorflow_federated.federated_map', 'tff.federated_map', (['tff_count_by_key', 'tff_client_dataset'], {}), '(tff_count_by_key, tff_client_dataset)\n', (2830, 2868), True, 'import tensorflow_federated as tff\n'), ((2922, 2942), 'tensorflow_federated.tf_computation', 'tff.tf_computation', ([], {}), '()\n', (2940, 2942), True, 'import tensorflow_federated as tff\n'), ((3053, 3146), 'tensorflow_federated.tf_computation', 'tff.tf_computation', (['build_zeros.type_signature.result', 'build_zeros.type_signature.result'], {}), '(build_zeros.type_signature.result, build_zeros.\n type_signature.result)\n', (3071, 3146), True, 'import tensorflow_federated as tff\n'), ((3208, 3260), 'tensorflow_federated.tf_computation', 'tff.tf_computation', (['accumulate.type_signature.result'], {}), '(accumulate.type_signature.result)\n', (3226, 3260), True, 'import tensorflow_federated as tff\n'), ((1804, 1845), 'tensorflow_federated.FederatedType', 'tff.FederatedType', (['tf.string', 'tff.CLIENTS'], {}), '(tf.string, tff.CLIENTS)\n', (1821, 1845), True, 'import tensorflow_federated as tff\n'), ((1340, 1397), 'tensorflow.lookup.KeyValueTensorInitializer', 'tf.lookup.KeyValueTensorInitializer', (['key_list_t', 'idx_list'], {}), '(key_list_t, idx_list)\n', (1375, 1397), True, 'import tensorflow as tf\n'), ((3012, 3042), 'tensorflow.zeros', 'tf.zeros', (['[key_size]', 'tf.int32'], {}), '([key_size], tf.int32)\n', (3020, 3042), True, 'import tensorflow as tf\n'), ((1708, 1738), 'tensorflow.zeros', 'tf.zeros', (['[key_size]', 'tf.int32'], {}), '([key_size], tf.int32)\n', (1716, 1738), True, 'import tensorflow as tf\n'), ((3297, 3329), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['key_list_t'], {}), '(key_list_t)\n', (3317, 3329), True, 'import tensorflow as tf\n'), ((1584, 1611), 'tensorflow.cast', 'tf.cast', (['key_size', 'tf.int32'], {}), '(key_size, tf.int32)\n', (1591, 1611), True, 'import tensorflow as tf\n')] |
import os
from ibm_watson import LanguageTranslatorV3
from ibm_cloud_sdk_core.authenticators import IAMAuthenticator
from dotenv import load_dotenv
load_dotenv()
apikey= os.environ['apikey']
url= os.environ['url']
VERSION= '2018-05-01'
authenticator= IAMAuthenticator(apikey)
language_translator= LanguageTranslatorV3(version=VERSION, authenticator=authenticator)
language_translator.set_service_url(url)
def englishToFrench(english_text):
french_translation= language_translator.translate(text= english_text, model_id= 'en-fr')
result= french_translation.get_result()
french_text= result['translations'][0]['translation']
return french_text
def frenchToEnglish(french_text):
english_translation= language_translator.translate(text= french_text, model_id= 'fr-en')
result= english_translation.get_result()
english_text= result['translations'][0]['translation']
return english_text
| [
"ibm_watson.LanguageTranslatorV3",
"ibm_cloud_sdk_core.authenticators.IAMAuthenticator",
"dotenv.load_dotenv"
] | [((149, 162), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (160, 162), False, 'from dotenv import load_dotenv\n'), ((253, 277), 'ibm_cloud_sdk_core.authenticators.IAMAuthenticator', 'IAMAuthenticator', (['apikey'], {}), '(apikey)\n', (269, 277), False, 'from ibm_cloud_sdk_core.authenticators import IAMAuthenticator\n'), ((299, 365), 'ibm_watson.LanguageTranslatorV3', 'LanguageTranslatorV3', ([], {'version': 'VERSION', 'authenticator': 'authenticator'}), '(version=VERSION, authenticator=authenticator)\n', (319, 365), False, 'from ibm_watson import LanguageTranslatorV3\n')] |
import trace_malloc as trace
'''trace 10 files with maximum memory allocated'''
trace.start()
# ... run your code ...
snapshot = trace.take_snapshot()
top_stats = snapshot.statistics('lineno')
print("[ Top 10 ]")
for stat in top_stats[:10]:
print(stat)
'''Backtrack the largest memory block'''
# Store 25 frames
trace.start(25)
# ... run your code ...
snapshot = trace.take_snapshot()
top_stats = snapshot.statistics('traceback')
# pick the biggest memory block
stat = top_stats[0]
print("%s memory blocks: %.1f KiB" % (stat.count, stat.size / 1024))
for line in stat.traceback.format():
print(line)
''' ''' | [
"trace_malloc.start",
"trace_malloc.take_snapshot"
] | [((83, 96), 'trace_malloc.start', 'trace.start', ([], {}), '()\n', (94, 96), True, 'import trace_malloc as trace\n'), ((134, 155), 'trace_malloc.take_snapshot', 'trace.take_snapshot', ([], {}), '()\n', (153, 155), True, 'import trace_malloc as trace\n'), ((326, 341), 'trace_malloc.start', 'trace.start', (['(25)'], {}), '(25)\n', (337, 341), True, 'import trace_malloc as trace\n'), ((379, 400), 'trace_malloc.take_snapshot', 'trace.take_snapshot', ([], {}), '()\n', (398, 400), True, 'import trace_malloc as trace\n')] |
from django.test import TestCase, Client
from django.urls import reverse
from django.test.utils import setup_test_environment
from bs4 import BeautifulSoup
import re
import time
from projects.models import *
from projects.forms import *
client = Client()
# length of base template, used to test for empty pages
LEN_BASE = 2600
class BaseWebsiteTestCase(TestCase):
def setUp(self):
super()
def test_homepage_load(self):
url = reverse("projects:home")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_homepage_not_empty(self):
url = reverse("projects:home")
response = self.client.get(url)
self.assertGreater(len(response.content), LEN_BASE)
def test_project_list_load(self):
url = reverse("projects:projects_list")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_project_list_not_empty(self):
url = reverse("projects:projects_list")
response = self.client.get(url)
self.assertGreater(len(response.content), LEN_BASE)
def test_project_students_load(self):
url = reverse("projects:students")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_project_students_not_empty(self):
url = reverse("projects:students")
response = self.client.get(url)
self.assertGreater(len(response.content), LEN_BASE)
def test_project_educators_load(self):
url = reverse("projects:educators")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_project_educators_not_empty(self):
url = reverse("projects:educators")
response = self.client.get(url)
self.assertGreater(len(response.content), LEN_BASE)
def test_project_leaders_load(self):
url = reverse("projects:leaders")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_project_leaders_not_empty(self):
url = reverse("projects:leaders")
response = self.client.get(url)
self.assertGreater(len(response.content), LEN_BASE)
VERBOSE = False
class TraverseLinksTest(TestCase):
def setUp(self):
# By default, login as superuser
um = OpenSUTDUserManager()
um.create_user("tom", display_name="<NAME>",
display_picture="https://via.placeholder.com/150",
graduation_year=2018, pillar="ISTD",
password="<PASSWORD>")
self.client = Client()
self.superuser = User.objects.get(username="tom")
self.client.login(username="tom", password="<PASSWORD>")
@classmethod
def setUpTestData(cls):
pm = OpenSUTDProjectManager()
um = OpenSUTDUserManager()
pm.create_project(project_uid="ACAD_00001",
title="OpenSUTD Web Platform",
caption="Sample project 1",
category="ACAD",
url="https://github.com/OpenSUTD/web-platform-prototype",
poster_url="https://via.placeholder.com/150",
featured_image="https://via.placeholder.com/150")
um.create_user("dick", display_name="<NAME>",
display_picture="https://via.placeholder.com/150",
graduation_year=2019, pillar="ISTD")
um.create_user("jane", display_name="<NAME>",
display_picture="https://via.placeholder.com/150",
graduation_year=2021, pillar="ESD")
pm.create_project(project_uid="ACAD_00002",
title="RandomZZZZZ",
caption="Sample project 2",
category="ACAD",
url="https://github.com/OpenSUTD/web-platform-prototype",
poster_url="https://via.placeholder.com/150",
featured_image="https://via.placeholder.com/150")
pm.set_project_status("ACAD_00001", "ACCEPT")
pm.add_user_to_project("ACAD_00001", "dick")
pm.add_user_to_project("ACAD_00001", "jane")
pm.add_tag_to_project(
"ACAD_00001", "rand1,rand2,education,student,policy")
pm.add_user_to_project("ACAD_00002", "jane")
pm.add_tag_to_project(
"ACAD_00002", "rand1,rand2,education,student,policy")
def test_traverse_urls(self):
# Fill these lists as needed with your site specific URLs to check and to avoid
to_traverse_list = ["/", "/projects/",
"/students/", "/educators/", "/leaders/"]
to_avoid_list = ["javascript:history\.back()", "https://*",
"javascript:history\.go\(-1\)", "^mailto:.*"]
done_list = []
error_list = []
source_of_link = dict()
for link in to_traverse_list:
source_of_link[link] = "initial"
(to_traverse_list, to_avoid_list, done_list, error_list, source_of_link) = \
self.recurse_into_path(
to_traverse_list, to_avoid_list, done_list, error_list, source_of_link)
print("END REACHED\nStats:")
if VERBOSE:
print("\nto_traverse_list = " + str(to_traverse_list))
if VERBOSE:
print("\nto_avoid_list = " + str(to_avoid_list))
if VERBOSE:
print("\nsource_of_link = " + str(source_of_link))
if VERBOSE:
print("\ndone_list = " + str(done_list))
print("Followed " + str(len(done_list)) + " links successfully")
print("Avoided " + str(len(to_avoid_list)) + " links")
if error_list:
print("!! " + str(len(error_list)) + " error(s) : ")
for error in error_list:
print(str(error) + " found in page " +
source_of_link[error[0]])
print("Errors found traversing links")
assert False
else:
print("No errors")
def recurse_into_path(self, to_traverse_list, to_avoid_list, done_list, error_list, source_of_link):
""" Dives into first item of to_traverse_list
Returns: (to_traverse_list, to_avoid_list, done_list, source_of_link)
"""
if to_traverse_list:
url = to_traverse_list.pop()
if not match_any(url, to_avoid_list):
print("\nSurfing to " + str(url) +
", discovered in " + str(source_of_link[url]))
response = self.client.get(url, follow=True)
if response.status_code == 200:
soup = BeautifulSoup(response.content, "html.parser")
text = soup.get_text()
for link in soup.find_all("a"):
new_link = link.get("href")
if VERBOSE:
print(" Found link: " + str(new_link))
if match_any(new_link, to_avoid_list):
if VERBOSE:
print(" Avoiding it")
elif new_link in done_list:
if VERBOSE:
print(" Already done, ignoring")
elif new_link in to_traverse_list:
if VERBOSE:
print(" Already in to traverse list, ignoring")
else:
if VERBOSE:
print(
" New, unknown link: Storing it to traverse later")
source_of_link[new_link] = url
to_traverse_list.append(new_link)
done_list.append(url)
if VERBOSE:
print("Done")
else:
error_list.append((url, response.status_code))
to_avoid_list.append(url)
if VERBOSE:
print("Diving into next level")
return self.recurse_into_path(to_traverse_list, to_avoid_list, done_list, error_list, source_of_link)
else:
# Nothing to traverse
if VERBOSE:
print("Returning to upper level")
return to_traverse_list, to_avoid_list, done_list, error_list, source_of_link
def match_any(my_string, regexp_list):
if my_string:
combined = "(" + ")|(".join(regexp_list) + ")"
return re.match(combined, my_string)
else:
# "None" as string always matches
return True
class SecuredPageTestCase(TestCase):
def setUp(self):
pm = OpenSUTDProjectManager()
pm.create_project(project_uid="ACAD_00001",
title="OpenSUTD Web Platform",
caption="Sample project 1",
category="ACAD",
url="https://github.com/OpenSUTD/web-platform-prototype",
poster_url="https://via.placeholder.com/150",
featured_image="https://via.placeholder.com/150")
um = OpenSUTDUserManager()
um.create_user("tom", display_name="<NAME>",
display_picture="https://via.placeholder.com/150",
graduation_year=2018, pillar="ISTD")
def test_auth_approval_view(self):
url = reverse("projects:approval")
response = self.client.get(url)
self.assertEqual(response.status_code, 302)
def test_auth_submit_view(self):
url = reverse("projects:submit_new")
response = self.client.get(url)
self.assertEqual(response.status_code, 302)
def test_auth_submit_reject(self):
url = reverse("projects:reject", args=("ACAD_00001",))
response = self.client.get(url)
self.assertEqual(response.status_code, 302)
def test_auth_submit_approve(self):
url = reverse("projects:approve", args=("ACAD_00001",))
response = self.client.get(url)
self.assertEqual(response.status_code, 302)
def test_auth_user_edit(self):
url = reverse("projects:user_edit", args=("tom",))
response = self.client.get(url)
self.assertEqual(response.status_code, 302)
def test_auth_project_edit(self):
url = reverse("projects:project_edit", args=("ACAD_00001",))
response = self.client.get(url)
self.assertEqual(response.status_code, 302)
def test_auth_project_bypass(self):
url = reverse("projects:project_page_bypass", args=("ACAD_00001",))
response = self.client.get(url)
# actually a custom 404 page
self.assertEqual(response.status_code, 200)
class SubmissionFormTest(TestCase):
def setUp(self):
self.client = Client()
um = OpenSUTDUserManager()
um.create_user("tom", display_name="<NAME>",
display_picture="https://via.placeholder.com/150",
graduation_year=2018, pillar="ISTD",
password="<PASSWORD>")
self.client.login(username="tom", password="<PASSWORD>")
def test_submission_form_entry(self):
response = self.client.get(reverse("projects:submit_new"))
self.assertEqual(response.status_code, 200)
# test submission mechanism
form = SubmissionForm({"project_name": "test",
"caption": "test caption",
"category": "ACAD",
"featured_image": "http://pluspng.com/img-png/user-png-icon-male-user-icon-512.png",
"github_url": "https://github.com/OpenSUTD/web-platform-prototype",
"poster_url": "http://pluspng.com/img-png/user-png-icon-male-user-icon-512.png"})
self.assertEqual(form.is_valid(), True)
def test_submission_form_entry_invalid(self):
response = self.client.get(reverse("projects:submit_new"))
self.assertEqual(response.status_code, 200)
# test submission mechanism
form = SubmissionForm({"project_name": "",
"caption": "",
"category": "",
"featured_image": "",
"github_url": "",
"poster_url": ""})
self.assertEqual(form.is_valid(), False)
def test_submission_form_entry_not_github(self):
response = self.client.get(reverse("projects:submit_new"))
self.assertEqual(response.status_code, 200)
# test submission mechanism
form = SubmissionForm({"project_name": "test",
"caption": "test caption",
"category": "ACAD",
"featured_image": "http://pluspng.com/img-png/user-png-icon-male-user-icon-512.png",
"github_url": "https://lolcats.com/OpenSUTD/web-platform-prototype",
"poster_url": "http://pluspng.com/img-png/user-png-icon-male-user-icon-512.png"})
self.assertEqual(form.is_valid(), False)
class UserProfileFormTest(TestCase):
def setUp(self):
self.client = Client()
um = OpenSUTDUserManager()
um.create_user("tom", display_name="<NAME>",
display_picture="https://via.placeholder.com/150",
graduation_year=2018, pillar="ISTD",
password="<PASSWORD>")
self.client.login(username="tom", password="<PASSWORD>")
def test_submission_form_entry(self):
# test user can actually get to the page
response = self.client.get(
reverse("projects:user_edit", args=("tom",)))
self.assertEqual(response.status_code, 200)
# test submission mechanism
form = UserProfileForm({"display_name": "tom2",
"display_picture": "http://pluspng.com/img-png/user-png-icon-male-user-icon-512.png",
"graduation_year": 2019,
"pillar": "ISTD",
"bio": "Hi I am Tom",
"contact_email": "<EMAIL>",
"personal_links": "tlkh.design"})
self.assertEqual(form.is_valid(), True)
def test_submission_form_entry_invalid(self):
# test user can actually get to the page
response = self.client.get(
reverse("projects:user_edit", args=("tom",)))
self.assertEqual(response.status_code, 200)
# test submission mechanism
form = UserProfileForm({"display_name": "",
"display_picture": "",
"graduation_year": 2019,
"pillar": "",
"bio": "",
"contact_email": "",
"personal_links": ""})
self.assertEqual(form.is_valid(), False)
class ProjectEditFormTest(TestCase):
def setUp(self):
self.client = Client()
um = OpenSUTDUserManager()
um.create_user("tom", display_name="<NAME>",
display_picture="https://via.placeholder.com/150",
graduation_year=2018, pillar="ISTD",
password="<PASSWORD>")
pm = OpenSUTDProjectManager()
pm.create_project(project_uid="ACAD_00001",
title="OpenSUTD Web Platform",
caption="Sample project 1",
category="ACAD",
url="https://github.com/OpenSUTD/web-platform-prototype",
poster_url="https://via.placeholder.com/150",
featured_image="https://via.placeholder.com/150")
pm.set_project_status("ACAD_00001", "ACCEPT")
self.client.login(username="tom", password="<PASSWORD>")
def test_submission_form_entry_invalid(self):
# test user can actually get to the page
response = self.client.get(
reverse("projects:project_edit", args=("ACAD_00001",)))
self.assertEqual(response.status_code, 200)
# test submission mechanism
form = ProjectEditForm({"title": "",
"caption": "",
"featured_image": "",
"url": "",
"poster_url": ""})
self.assertEqual(form.is_valid(), False)
def test_submission_form_entry(self):
# test user can actually get to the page
response = self.client.get(
reverse("projects:project_edit", args=("ACAD_00001",)))
self.assertEqual(response.status_code, 200)
# test submission mechanism
form = ProjectEditForm({"title": "lalalal",
"caption": "lalalal",
"featured_image": "lalalal.com",
"url": "https://github.com/OpenSUTD/web-platform-prototype",
"poster_url": "lalalal.com"})
self.assertEqual(form.is_valid(), True)
class LogintoSecuredPageTestCase(TestCase):
def setUp(self):
self.client = Client()
um = OpenSUTDUserManager()
um.create_user("tom", display_name="<NAME>",
display_picture="https://via.placeholder.com/150",
graduation_year=2018, pillar="ISTD",
password="<PASSWORD>")
pm = OpenSUTDProjectManager()
pm.create_project(project_uid="ACAD_00001",
title="OpenSUTD Web Platform",
caption="Sample project 1",
category="ACAD",
url="https://github.com/OpenSUTD/web-platform-prototype",
poster_url="https://via.placeholder.com/150",
featured_image="https://via.placeholder.com/150")
self.client.login(username="tom", password="<PASSWORD>")
def test_login_approval_view(self):
response = self.client.get(reverse("projects:approval"))
self.assertEqual(response.status_code, 200)
def test_login_submission_view(self):
response = self.client.get(reverse("projects:submit_new"))
self.assertEqual(response.status_code, 200)
def test_login_user_edit(self):
url = reverse("projects:user_edit", args=("tom",))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_login_project_edit(self):
pm = OpenSUTDProjectManager()
pm.set_project_status("ACAD_00001", "ACCEPT")
url = reverse("projects:project_edit", args=("ACAD_00001",))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
class UserTestCase(TestCase):
def setUp(self):
um = OpenSUTDUserManager()
um.create_user("tom", display_name="<NAME>",
display_picture="https://via.placeholder.com/150",
graduation_year=2018, pillar="ISTD")
um.create_user("jane", display_name="<NAME>",
display_picture="https://via.placeholder.com/150",
graduation_year=2021, pillar="ESD")
def test_user_get_name(self):
tom = User.objects.get(username="tom")
self.assertEqual(tom.display_name, "<NAME>")
jane = User.objects.get(username="jane")
self.assertEqual(jane.display_name, "<NAME>")
def test_user_get_year(self):
tom = User.objects.get(username="tom")
self.assertEqual(tom.graduation_year, 2018)
jane = User.objects.get(username="jane")
self.assertEqual(jane.graduation_year, 2021)
def test_user_get_pillar(self):
tom = User.objects.get(username="tom")
self.assertEqual(tom.pillar, "ISTD")
jane = User.objects.get(username="jane")
self.assertEqual(jane.pillar, "ESD")
# test user profile page contents
def test_user_page_load(self):
url = reverse("projects:user", args=("tom",))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
url = reverse("projects:user", args=("jane",))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_user_page_not_empty(self):
url = reverse("projects:user", args=("tom",))
response = self.client.get(url)
self.assertGreater(len(response.content), LEN_BASE)
url = reverse("projects:user", args=("jane",))
response = self.client.get(url)
self.assertGreater(len(response.content), LEN_BASE)
def test_user_page_name(self):
url = reverse("projects:user", args=("tom",))
response = str(self.client.get(url).content)
self.assertEqual("<NAME>" in response, True)
url = reverse("projects:user", args=("jane",))
response = str(self.client.get(url).content)
self.assertEqual("<NAME>" in response, True)
def test_user_page_year(self):
url = reverse("projects:user", args=("tom",))
response = str(self.client.get(url).content)
self.assertEqual("2018" in response, True)
url = reverse("projects:user", args=("jane",))
response = str(self.client.get(url).content)
self.assertEqual("2021" in response, True)
def test_user_page_pillar(self):
url = reverse("projects:user", args=("tom",))
response = str(self.client.get(url).content)
self.assertEqual("ISTD" in response, True)
url = reverse("projects:user", args=("jane",))
response = str(self.client.get(url).content)
self.assertEqual("ESD" in response, True)
def test_user_page_performance(self):
start = time.time()
for i in range(10):
url = reverse("projects:user", args=("tom",))
response = self.client.get(url)
url = reverse("projects:user", args=("jane",))
response = self.client.get(url)
duration = time.time() - start
self.assertLess(duration, 1.5)
class ProjectShowcaseTestCase(TestCase):
def setUp(self):
pm = OpenSUTDProjectManager()
pm.create_project(project_uid="ACAD_00001",
title="OpenSUTD Web Platform",
caption="Sample project 1",
category="ACAD",
url="https://github.com/OpenSUTD/web-platform-prototype",
poster_url="https://via.placeholder.com/150",
featured_image="https://via.placeholder.com/150")
um = OpenSUTDUserManager()
um.create_user("tom", display_name="<NAME>",
display_picture="https://via.placeholder.com/150",
graduation_year=2018, pillar="ISTD")
um.create_user("jane", display_name="<NAME>",
display_picture="https://via.placeholder.com/150",
graduation_year=2021, pillar="ESD")
def test_project_properties(self):
proj = Project.objects.get(project_uid="ACAD_00001")
self.assertEqual(proj.title, "OpenSUTD Web Platform")
def test_add_user_project(self):
pm = OpenSUTDProjectManager()
pm.add_user_to_project("ACAD_00001", "tom")
proj = Project.objects.get(project_uid="ACAD_00001")
self.assertEqual(len(proj.users.all()), 1)
pm.add_user_to_project("ACAD_00001", "jane")
self.assertEqual(len(proj.users.all()), 2)
def test_add_tag_project(self):
pm = OpenSUTDProjectManager()
pm.add_tag_to_project("ACAD_00001", "rand1,rand2")
proj = Project.objects.get(project_uid="ACAD_00001")
self.assertEqual(len(proj.tags.all()), 2)
def test_add_del_user_project(self):
tom = User.objects.get(username="tom")
jane = User.objects.get(username="jane")
proj = Project.objects.get(project_uid="ACAD_00001")
proj.users.add(tom)
proj.users.add(jane)
proj.users.remove(jane)
self.assertEqual(len(proj.users.all()), 1)
def test_project_page_not_approved(self):
pm = OpenSUTDProjectManager()
pm.set_project_status("ACAD_00001", "REJECT")
url = reverse("projects:project_page", args=("ACAD_00001",))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual("Error 404: Page Not Found!" in str(
response.content), True)
self.assertGreater(len(response.content), LEN_BASE)
def test_project_page_approved(self):
pm = OpenSUTDProjectManager()
pm.set_project_status("ACAD_00001", "ACCEPT")
url = reverse("projects:project_page", args=("ACAD_00001",))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertGreater(len(response.content), LEN_BASE)
def test_project_page_name(self):
pm = OpenSUTDProjectManager()
pm.set_project_status("ACAD_00001", "ACCEPT")
url = reverse("projects:project_page", args=("ACAD_00001",))
response = str(self.client.get(url).content)
self.assertEqual("OpenSUTD Web Platform" in response, True)
def test_project_tag(self):
pm = OpenSUTDProjectManager()
pm.set_project_status("ACAD_00001", "ACCEPT")
pm.add_tag_to_project("ACAD_00001", "tag1,tag2")
url = reverse("projects:project_page", args=("ACAD_00001",))
response = str(self.client.get(url).content)
self.assertEqual("tag1" in response, True)
self.assertEqual("tag2" in response, True)
def test_project_page_contents(self):
pm = OpenSUTDProjectManager()
pm.set_project_status("ACAD_00001", "ACCEPT")
url = reverse("projects:project_page", args=("ACAD_00001",))
response = str(self.client.get(url).content)
# print(response)
# test top and bottom of contents
# this does not pass on Travis for Pull Request builds
# due to them disabling env variables for security reasons
#self.assertEqual("Prototype for the Eventual OpenSUTD Web Platform" in response, True)
#self.assertEqual("Data Model" in response, True)
self.assertGreater(len(response), LEN_BASE)
def test_project_page_load(self):
pm = OpenSUTDProjectManager()
pm.set_project_status("ACAD_00001", "ACCEPT")
url = reverse("projects:project_page", args=("ACAD_00001",))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_project_page_not_empty(self):
pm = OpenSUTDProjectManager()
pm.set_project_status("ACAD_00001", "ACCEPT")
url = reverse("projects:project_page", args=("ACAD_00001",))
response = str(self.client.get(url).content)
self.assertGreater(len(response), LEN_BASE)
def test_project_author_name(self):
pm = OpenSUTDProjectManager()
pm.set_project_status("ACAD_00001", "ACCEPT")
pm.add_user_to_project("ACAD_00001", "tom")
url = reverse("projects:project_page", args=("ACAD_00001",))
response = str(self.client.get(url).content)
self.assertEqual("<NAME>" in response, True)
def test_project_author_pillar(self):
pm = OpenSUTDProjectManager()
pm.set_project_status("ACAD_00001", "ACCEPT")
pm.add_user_to_project("ACAD_00001", "tom")
url = reverse("projects:project_page", args=("ACAD_00001",))
response = str(self.client.get(url).content)
self.assertEqual("ISTD" in response, True)
def test_project_list_page(self):
pm = OpenSUTDProjectManager()
pm.set_project_status("ACAD_00001", "ACCEPT")
url = reverse("projects:projects_list")
response = str(self.client.get(url).content)
self.assertEqual("OpenSUTD Web Platform" in response, True)
self.assertEqual("Sample project 1" in response, True)
def test_project_page_performance(self):
start = time.time()
for _ in range(10):
url = reverse("projects:project_page", args=("ACAD_00001",))
response = self.client.get(url)
duration = time.time() - start
self.assertLess(duration, 1.5)
| [
"re.match",
"bs4.BeautifulSoup",
"django.urls.reverse",
"time.time",
"django.test.Client"
] | [((249, 257), 'django.test.Client', 'Client', ([], {}), '()\n', (255, 257), False, 'from django.test import TestCase, Client\n'), ((456, 480), 'django.urls.reverse', 'reverse', (['"""projects:home"""'], {}), "('projects:home')\n", (463, 480), False, 'from django.urls import reverse\n'), ((627, 651), 'django.urls.reverse', 'reverse', (['"""projects:home"""'], {}), "('projects:home')\n", (634, 651), False, 'from django.urls import reverse\n'), ((805, 838), 'django.urls.reverse', 'reverse', (['"""projects:projects_list"""'], {}), "('projects:projects_list')\n", (812, 838), False, 'from django.urls import reverse\n'), ((989, 1022), 'django.urls.reverse', 'reverse', (['"""projects:projects_list"""'], {}), "('projects:projects_list')\n", (996, 1022), False, 'from django.urls import reverse\n'), ((1180, 1208), 'django.urls.reverse', 'reverse', (['"""projects:students"""'], {}), "('projects:students')\n", (1187, 1208), False, 'from django.urls import reverse\n'), ((1363, 1391), 'django.urls.reverse', 'reverse', (['"""projects:students"""'], {}), "('projects:students')\n", (1370, 1391), False, 'from django.urls import reverse\n'), ((1550, 1579), 'django.urls.reverse', 'reverse', (['"""projects:educators"""'], {}), "('projects:educators')\n", (1557, 1579), False, 'from django.urls import reverse\n'), ((1735, 1764), 'django.urls.reverse', 'reverse', (['"""projects:educators"""'], {}), "('projects:educators')\n", (1742, 1764), False, 'from django.urls import reverse\n'), ((1921, 1948), 'django.urls.reverse', 'reverse', (['"""projects:leaders"""'], {}), "('projects:leaders')\n", (1928, 1948), False, 'from django.urls import reverse\n'), ((2102, 2129), 'django.urls.reverse', 'reverse', (['"""projects:leaders"""'], {}), "('projects:leaders')\n", (2109, 2129), False, 'from django.urls import reverse\n'), ((2639, 2647), 'django.test.Client', 'Client', ([], {}), '()\n', (2645, 2647), False, 'from django.test import TestCase, Client\n'), ((8678, 8707), 're.match', 're.match', (['combined', 'my_string'], {}), '(combined, my_string)\n', (8686, 8707), False, 'import re\n'), ((9593, 9621), 'django.urls.reverse', 'reverse', (['"""projects:approval"""'], {}), "('projects:approval')\n", (9600, 9621), False, 'from django.urls import reverse\n'), ((9766, 9796), 'django.urls.reverse', 'reverse', (['"""projects:submit_new"""'], {}), "('projects:submit_new')\n", (9773, 9796), False, 'from django.urls import reverse\n'), ((9943, 9991), 'django.urls.reverse', 'reverse', (['"""projects:reject"""'], {'args': "('ACAD_00001',)"}), "('projects:reject', args=('ACAD_00001',))\n", (9950, 9991), False, 'from django.urls import reverse\n'), ((10139, 10188), 'django.urls.reverse', 'reverse', (['"""projects:approve"""'], {'args': "('ACAD_00001',)"}), "('projects:approve', args=('ACAD_00001',))\n", (10146, 10188), False, 'from django.urls import reverse\n'), ((10331, 10375), 'django.urls.reverse', 'reverse', (['"""projects:user_edit"""'], {'args': "('tom',)"}), "('projects:user_edit', args=('tom',))\n", (10338, 10375), False, 'from django.urls import reverse\n'), ((10521, 10575), 'django.urls.reverse', 'reverse', (['"""projects:project_edit"""'], {'args': "('ACAD_00001',)"}), "('projects:project_edit', args=('ACAD_00001',))\n", (10528, 10575), False, 'from django.urls import reverse\n'), ((10723, 10784), 'django.urls.reverse', 'reverse', (['"""projects:project_page_bypass"""'], {'args': "('ACAD_00001',)"}), "('projects:project_page_bypass', args=('ACAD_00001',))\n", (10730, 10784), False, 'from django.urls import reverse\n'), ((10995, 11003), 'django.test.Client', 'Client', ([], {}), '()\n', (11001, 11003), False, 'from django.test import TestCase, Client\n'), ((13466, 13474), 'django.test.Client', 'Client', ([], {}), '()\n', (13472, 13474), False, 'from django.test import TestCase, Client\n'), ((15369, 15377), 'django.test.Client', 'Client', ([], {}), '()\n', (15375, 15377), False, 'from django.test import TestCase, Client\n'), ((17574, 17582), 'django.test.Client', 'Client', ([], {}), '()\n', (17580, 17582), False, 'from django.test import TestCase, Client\n'), ((18764, 18808), 'django.urls.reverse', 'reverse', (['"""projects:user_edit"""'], {'args': "('tom',)"}), "('projects:user_edit', args=('tom',))\n", (18771, 18808), False, 'from django.urls import reverse\n'), ((19047, 19101), 'django.urls.reverse', 'reverse', (['"""projects:project_edit"""'], {'args': "('ACAD_00001',)"}), "('projects:project_edit', args=('ACAD_00001',))\n", (19054, 19101), False, 'from django.urls import reverse\n'), ((20446, 20485), 'django.urls.reverse', 'reverse', (['"""projects:user"""'], {'args': "('tom',)"}), "('projects:user', args=('tom',))\n", (20453, 20485), False, 'from django.urls import reverse\n'), ((20593, 20633), 'django.urls.reverse', 'reverse', (['"""projects:user"""'], {'args': "('jane',)"}), "('projects:user', args=('jane',))\n", (20600, 20633), False, 'from django.urls import reverse\n'), ((20781, 20820), 'django.urls.reverse', 'reverse', (['"""projects:user"""'], {'args': "('tom',)"}), "('projects:user', args=('tom',))\n", (20788, 20820), False, 'from django.urls import reverse\n'), ((20936, 20976), 'django.urls.reverse', 'reverse', (['"""projects:user"""'], {'args': "('jane',)"}), "('projects:user', args=('jane',))\n", (20943, 20976), False, 'from django.urls import reverse\n'), ((21127, 21166), 'django.urls.reverse', 'reverse', (['"""projects:user"""'], {'args': "('tom',)"}), "('projects:user', args=('tom',))\n", (21134, 21166), False, 'from django.urls import reverse\n'), ((21288, 21328), 'django.urls.reverse', 'reverse', (['"""projects:user"""'], {'args': "('jane',)"}), "('projects:user', args=('jane',))\n", (21295, 21328), False, 'from django.urls import reverse\n'), ((21485, 21524), 'django.urls.reverse', 'reverse', (['"""projects:user"""'], {'args': "('tom',)"}), "('projects:user', args=('tom',))\n", (21492, 21524), False, 'from django.urls import reverse\n'), ((21644, 21684), 'django.urls.reverse', 'reverse', (['"""projects:user"""'], {'args': "('jane',)"}), "('projects:user', args=('jane',))\n", (21651, 21684), False, 'from django.urls import reverse\n'), ((21841, 21880), 'django.urls.reverse', 'reverse', (['"""projects:user"""'], {'args': "('tom',)"}), "('projects:user', args=('tom',))\n", (21848, 21880), False, 'from django.urls import reverse\n'), ((22000, 22040), 'django.urls.reverse', 'reverse', (['"""projects:user"""'], {'args': "('jane',)"}), "('projects:user', args=('jane',))\n", (22007, 22040), False, 'from django.urls import reverse\n'), ((22203, 22214), 'time.time', 'time.time', ([], {}), '()\n', (22212, 22214), False, 'import time\n'), ((24725, 24779), 'django.urls.reverse', 'reverse', (['"""projects:project_page"""'], {'args': "('ACAD_00001',)"}), "('projects:project_page', args=('ACAD_00001',))\n", (24732, 24779), False, 'from django.urls import reverse\n'), ((25180, 25234), 'django.urls.reverse', 'reverse', (['"""projects:project_page"""'], {'args': "('ACAD_00001',)"}), "('projects:project_page', args=('ACAD_00001',))\n", (25187, 25234), False, 'from django.urls import reverse\n'), ((25532, 25586), 'django.urls.reverse', 'reverse', (['"""projects:project_page"""'], {'args': "('ACAD_00001',)"}), "('projects:project_page', args=('ACAD_00001',))\n", (25539, 25586), False, 'from django.urls import reverse\n'), ((25904, 25958), 'django.urls.reverse', 'reverse', (['"""projects:project_page"""'], {'args': "('ACAD_00001',)"}), "('projects:project_page', args=('ACAD_00001',))\n", (25911, 25958), False, 'from django.urls import reverse\n'), ((26263, 26317), 'django.urls.reverse', 'reverse', (['"""projects:project_page"""'], {'args': "('ACAD_00001',)"}), "('projects:project_page', args=('ACAD_00001',))\n", (26270, 26317), False, 'from django.urls import reverse\n'), ((26920, 26974), 'django.urls.reverse', 'reverse', (['"""projects:project_page"""'], {'args': "('ACAD_00001',)"}), "('projects:project_page', args=('ACAD_00001',))\n", (26927, 26974), False, 'from django.urls import reverse\n'), ((27217, 27271), 'django.urls.reverse', 'reverse', (['"""projects:project_page"""'], {'args': "('ACAD_00001',)"}), "('projects:project_page', args=('ACAD_00001',))\n", (27224, 27271), False, 'from django.urls import reverse\n'), ((27576, 27630), 'django.urls.reverse', 'reverse', (['"""projects:project_page"""'], {'args': "('ACAD_00001',)"}), "('projects:project_page', args=('ACAD_00001',))\n", (27583, 27630), False, 'from django.urls import reverse\n'), ((27938, 27992), 'django.urls.reverse', 'reverse', (['"""projects:project_page"""'], {'args': "('ACAD_00001',)"}), "('projects:project_page', args=('ACAD_00001',))\n", (27945, 27992), False, 'from django.urls import reverse\n'), ((28242, 28275), 'django.urls.reverse', 'reverse', (['"""projects:projects_list"""'], {}), "('projects:projects_list')\n", (28249, 28275), False, 'from django.urls import reverse\n'), ((28522, 28533), 'time.time', 'time.time', ([], {}), '()\n', (28531, 28533), False, 'import time\n'), ((11416, 11446), 'django.urls.reverse', 'reverse', (['"""projects:submit_new"""'], {}), "('projects:submit_new')\n", (11423, 11446), False, 'from django.urls import reverse\n'), ((12164, 12194), 'django.urls.reverse', 'reverse', (['"""projects:submit_new"""'], {}), "('projects:submit_new')\n", (12171, 12194), False, 'from django.urls import reverse\n'), ((12720, 12750), 'django.urls.reverse', 'reverse', (['"""projects:submit_new"""'], {}), "('projects:submit_new')\n", (12727, 12750), False, 'from django.urls import reverse\n'), ((13948, 13992), 'django.urls.reverse', 'reverse', (['"""projects:user_edit"""'], {'args': "('tom',)"}), "('projects:user_edit', args=('tom',))\n", (13955, 13992), False, 'from django.urls import reverse\n'), ((14741, 14785), 'django.urls.reverse', 'reverse', (['"""projects:user_edit"""'], {'args': "('tom',)"}), "('projects:user_edit', args=('tom',))\n", (14748, 14785), False, 'from django.urls import reverse\n'), ((16390, 16444), 'django.urls.reverse', 'reverse', (['"""projects:project_edit"""'], {'args': "('ACAD_00001',)"}), "('projects:project_edit', args=('ACAD_00001',))\n", (16397, 16444), False, 'from django.urls import reverse\n'), ((16965, 17019), 'django.urls.reverse', 'reverse', (['"""projects:project_edit"""'], {'args': "('ACAD_00001',)"}), "('projects:project_edit', args=('ACAD_00001',))\n", (16972, 17019), False, 'from django.urls import reverse\n'), ((18469, 18497), 'django.urls.reverse', 'reverse', (['"""projects:approval"""'], {}), "('projects:approval')\n", (18476, 18497), False, 'from django.urls import reverse\n'), ((18629, 18659), 'django.urls.reverse', 'reverse', (['"""projects:submit_new"""'], {}), "('projects:submit_new')\n", (18636, 18659), False, 'from django.urls import reverse\n'), ((22262, 22301), 'django.urls.reverse', 'reverse', (['"""projects:user"""'], {'args': "('tom',)"}), "('projects:user', args=('tom',))\n", (22269, 22301), False, 'from django.urls import reverse\n'), ((22364, 22404), 'django.urls.reverse', 'reverse', (['"""projects:user"""'], {'args': "('jane',)"}), "('projects:user', args=('jane',))\n", (22371, 22404), False, 'from django.urls import reverse\n'), ((22469, 22480), 'time.time', 'time.time', ([], {}), '()\n', (22478, 22480), False, 'import time\n'), ((28581, 28635), 'django.urls.reverse', 'reverse', (['"""projects:project_page"""'], {'args': "('ACAD_00001',)"}), "('projects:project_page', args=('ACAD_00001',))\n", (28588, 28635), False, 'from django.urls import reverse\n'), ((28700, 28711), 'time.time', 'time.time', ([], {}), '()\n', (28709, 28711), False, 'import time\n'), ((6778, 6824), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content', '"""html.parser"""'], {}), "(response.content, 'html.parser')\n", (6791, 6824), False, 'from bs4 import BeautifulSoup\n')] |
import tensorflow as tf
from layers.attention_layers import attention_layer
from layers.common_layers import init_inputs
from layers.feed_forward_layers import feed_forward_diff_features, feed_forward_diff_layers
from utils.hyperparams import Hyperparams as hp
class AttentionBasedEnsembleNets:
def __init__(self, selection_methods, num_features, learning_rate=0.01):
with tf.name_scope('input'):
self.nn_inputs = init_inputs(num_features, selection_methods)
self.labels = tf.placeholder(dtype=tf.float32, shape=[None, 1], name='labels')
with tf.name_scope('ff'):
nets = {'fisher': [200, 50], 'ttest': [200, 50], 'corr': [200, 50], 'random': [200, 50]}
feed_forward = feed_forward_diff_layers(self.nn_inputs, nets)
with tf.name_scope('output'):
out = attention_layer(feed_forward, attention_size=50)
logits = tf.layers.dense(out, units=1)
sig = tf.nn.sigmoid(logits)
predictions = tf.round(sig)
with tf.name_scope('train'):
self.loss = tf.losses.sigmoid_cross_entropy(
multi_class_labels=self.labels, logits=logits)
self.opt = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(self.loss)
with tf.name_scope('summaries'):
self.acc = tf.reduce_mean((predictions * self.labels) + ((1 - predictions) * (1 - self.labels)))
self.precision, precision_op = tf.metrics.precision(self.labels, predictions)
# summaries
tf.summary.scalar('loss', self.loss)
tf.summary.scalar('accuracy', self.acc)
tf.summary.scalar('precision_op', precision_op)
self.merged_summary_op = tf.summary.merge_all()
| [
"tensorflow.round",
"tensorflow.train.AdamOptimizer",
"tensorflow.summary.merge_all",
"tensorflow.placeholder",
"tensorflow.losses.sigmoid_cross_entropy",
"layers.feed_forward_layers.feed_forward_diff_layers",
"tensorflow.nn.sigmoid",
"tensorflow.name_scope",
"tensorflow.reduce_mean",
"tensorflow.... | [((390, 412), 'tensorflow.name_scope', 'tf.name_scope', (['"""input"""'], {}), "('input')\n", (403, 412), True, 'import tensorflow as tf\n'), ((443, 487), 'layers.common_layers.init_inputs', 'init_inputs', (['num_features', 'selection_methods'], {}), '(num_features, selection_methods)\n', (454, 487), False, 'from layers.common_layers import init_inputs\n'), ((514, 578), 'tensorflow.placeholder', 'tf.placeholder', ([], {'dtype': 'tf.float32', 'shape': '[None, 1]', 'name': '"""labels"""'}), "(dtype=tf.float32, shape=[None, 1], name='labels')\n", (528, 578), True, 'import tensorflow as tf\n'), ((593, 612), 'tensorflow.name_scope', 'tf.name_scope', (['"""ff"""'], {}), "('ff')\n", (606, 612), True, 'import tensorflow as tf\n'), ((742, 788), 'layers.feed_forward_layers.feed_forward_diff_layers', 'feed_forward_diff_layers', (['self.nn_inputs', 'nets'], {}), '(self.nn_inputs, nets)\n', (766, 788), False, 'from layers.feed_forward_layers import feed_forward_diff_features, feed_forward_diff_layers\n'), ((802, 825), 'tensorflow.name_scope', 'tf.name_scope', (['"""output"""'], {}), "('output')\n", (815, 825), True, 'import tensorflow as tf\n'), ((845, 893), 'layers.attention_layers.attention_layer', 'attention_layer', (['feed_forward'], {'attention_size': '(50)'}), '(feed_forward, attention_size=50)\n', (860, 893), False, 'from layers.attention_layers import attention_layer\n'), ((915, 944), 'tensorflow.layers.dense', 'tf.layers.dense', (['out'], {'units': '(1)'}), '(out, units=1)\n', (930, 944), True, 'import tensorflow as tf\n'), ((963, 984), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['logits'], {}), '(logits)\n', (976, 984), True, 'import tensorflow as tf\n'), ((1011, 1024), 'tensorflow.round', 'tf.round', (['sig'], {}), '(sig)\n', (1019, 1024), True, 'import tensorflow as tf\n'), ((1039, 1061), 'tensorflow.name_scope', 'tf.name_scope', (['"""train"""'], {}), "('train')\n", (1052, 1061), True, 'import tensorflow as tf\n'), ((1087, 1165), 'tensorflow.losses.sigmoid_cross_entropy', 'tf.losses.sigmoid_cross_entropy', ([], {'multi_class_labels': 'self.labels', 'logits': 'logits'}), '(multi_class_labels=self.labels, logits=logits)\n', (1118, 1165), True, 'import tensorflow as tf\n'), ((1294, 1320), 'tensorflow.name_scope', 'tf.name_scope', (['"""summaries"""'], {}), "('summaries')\n", (1307, 1320), True, 'import tensorflow as tf\n'), ((1345, 1431), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['(predictions * self.labels + (1 - predictions) * (1 - self.labels))'], {}), '(predictions * self.labels + (1 - predictions) * (1 - self.\n labels))\n', (1359, 1431), True, 'import tensorflow as tf\n'), ((1474, 1520), 'tensorflow.metrics.precision', 'tf.metrics.precision', (['self.labels', 'predictions'], {}), '(self.labels, predictions)\n', (1494, 1520), True, 'import tensorflow as tf\n'), ((1557, 1593), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""loss"""', 'self.loss'], {}), "('loss', self.loss)\n", (1574, 1593), True, 'import tensorflow as tf\n'), ((1606, 1645), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""accuracy"""', 'self.acc'], {}), "('accuracy', self.acc)\n", (1623, 1645), True, 'import tensorflow as tf\n'), ((1658, 1705), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""precision_op"""', 'precision_op'], {}), "('precision_op', precision_op)\n", (1675, 1705), True, 'import tensorflow as tf\n'), ((1743, 1765), 'tensorflow.summary.merge_all', 'tf.summary.merge_all', ([], {}), '()\n', (1763, 1765), True, 'import tensorflow as tf\n'), ((1208, 1259), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': 'learning_rate'}), '(learning_rate=learning_rate)\n', (1230, 1259), True, 'import tensorflow as tf\n')] |
# Copyright 2013 10gen Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import types
import mock
import sys
from tests import MongoWSTestCase
from mongows.validators.ValidationTest import ValidationTest
class ValidatorsTestCase(MongoWSTestCase):
def test_imports_and_runs_the_specified_file(self):
# Create module test_script in scripts
test_script = types.ModuleType('test_script')
run_mock = mock.MagicMock()
class ValidationTestCase(ValidationTest):
def run(self):
run_mock(self.res_id)
return 'ok', 200
test_script.__dict__.update({'ValidationTestCase': ValidationTestCase})
sys.modules['mongows.validators.scripts.test_script'] = test_script
response = self.app.post('/validate/test_script',
data={'res_id': 'foo'})
self.assertEqual(response.data, 'ok')
self.assertEqual(response.status_code, 200)
run_mock.assert_called_once_with('foo')
del sys.modules['mongows.validators.scripts.test_script']
def test_returns_404_when_accessing_nonexistent_script(self):
response = self.app.post('/validate/test_script',
data={'res_id': 'foo'})
expected_message = 'Unknown validation script test_script'
self.assertEqual(response.data, expected_message)
self.assertEqual(response.status_code, 404)
| [
"types.ModuleType",
"mock.MagicMock"
] | [((902, 933), 'types.ModuleType', 'types.ModuleType', (['"""test_script"""'], {}), "('test_script')\n", (918, 933), False, 'import types\n'), ((953, 969), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (967, 969), False, 'import mock\n')] |
import math
from itertools import product
from typing import Tuple, List, Optional, Union
import numpy as np
import torch
import torch.nn as nn
import torch.nn.init as init
class EMA:
"""
Class that keeps track of exponential moving average of model parameters of a particular model.
Also see https://github.com/chrischute/squad/blob/master/util.py#L174-L220.
"""
def __init__(self, model: torch.nn.Module, decay: float):
"""
Initialization method for the EMA class.
Parameters
----------
model: torch.nn.Module
Torch model for which the EMA instance is used to track the exponential moving average of parameter values
decay: float
Decay rate used for exponential moving average of parameters calculation:
ema_t = decay * p_t + (1-decay) * ema_(t-1)
"""
self.decay = decay
self.shadow = {}
self.original = {}
# Register model parameters
for name, param in model.named_parameters():
if param.requires_grad:
self.shadow[name] = param.clone().detach()
def __call__(self, model):
"""
Implements call method of EMA class
Parameters
----------
model: torch.nn.Module
Current model based on which the EMA parameters are updated
"""
with torch.no_grad():
for name, param in model.named_parameters():
if param.requires_grad:
assert name in self.shadow
new_average = (1.0 - self.decay) * param + self.decay * self.shadow[
name
]
self.shadow[name] = new_average
def assign(self, model: torch.nn.Module):
"""
This method assigns the parameter EMAs saved in self.shadow to the given model. The current parameter values
of the model are saved to self.original. These original parameters can be restored using self.resume.
Parameters
----------
model: torch.nn.Module
Model to which the current parameter EMAs are assigned.
"""
for name, param in model.named_parameters():
if param.requires_grad:
self.original[name] = param.clone()
param.data.copy_(self.shadow[name].data)
def resume(self, model: torch.nn.Module):
"""
This method restores the parameters saved in self.original to the given model. It is usually called after
the `assign` method.
Parameters
----------
model: torch.nn.Module
Torch model to which the original parameters are restored
"""
for name, param in model.named_parameters():
if param.requires_grad:
param.data.copy_(self.original[name].data)
class ModelWrapper:
"""
ModelWrapper which can be used to extract outputs of intermediate layer of a network.
"""
def __init__(self, task_model: nn.Module, to_extract: Tuple):
"""
Initializes a model wrapper for the specified task model and layer names to extract.
Parameters
----------
task_model: torch.nn.Module
Torch model to which the original parameters are restored
to_extract: Tuple
Tuple that holds names of layers for which intermediate results should be extracted and returned,
e.g. to_extract=(`avgpool`, `fc`) to extract intermediate results after the avgpool layer and last fully
connected layer in a ResNet for example.
"""
self.task_model = task_model
self.to_extract = to_extract
def __call__(self, x: torch.Tensor):
"""
The __call__ method iterates through all modules of the provided `task_model` separately. It extracts and
returns the intermediate results at layers specified by to_extract
Parameters
----------
x: torch.Tensor
Batch of samples, e.g. images, which are passed through the network and for which specified intermediate
results are extracted
Returns
----------
results: Optional[torch.Tensor, List[torch.Tensor]]
Results of forward pass of input batch through the given task model. If len(to_extract) is 1, only the
single result tensor is returned. Otherwise, a list of tensors is returned, which holds the intermediate
results of specified layers in the order of occurrence in the network.
"""
results = []
for name, child in self.task_model.named_children():
x = child(x)
if name == "avgpool":
x = torch.flatten(x, 1)
if name in self.to_extract:
results.append(x)
return results[-1] if len(results) == 1 else results
def train(self):
self.task_model.train()
def eval(self):
self.task_model.eval()
def cuda(self):
self.task_model.cuda()
def to(self, device: Union[str, torch.device]):
self.task_model.to(device)
def get_embedding_dim(self):
last_layer = list(self.task_model.modules())[-1]
return last_layer.in_features
def model_init(m: torch.nn.Module):
"""
Method that initializes torch modules depending on their type:
- Convolutional Layers: Xavier Uniform Initialization
- BatchNorm Layers: Standard initialization
- Fully connected / linear layers: Xavier Normal Initialization#
Parameters
----------
m: torch.nn.Module
Torch module which to be initialized. The specific initialization used depends on the type of module.
"""
classname = m.__class__.__name__
if classname.find("Conv") != -1:
init.xavier_uniform_(m.weight, gain=math.sqrt(2))
if m.bias is not None:
init.constant_(m.bias, 0)
elif classname.find("BatchNorm") != -1:
init.constant_(m.weight, 1)
init.constant_(m.bias, 0)
elif classname.find("Linear") != -1:
init.xavier_normal_(m.weight, gain=math.sqrt(2))
if m.bias is not None:
init.constant_(m.bias, 0)
def wd_check(wd_tuple: Tuple, name: str):
"""
Method that checks if parameter name matches the key words in wd_tuple. This check is used to filter certain
types of parameters independent of the layer, which it belongs to, e.g. `conv1.weight`.
Parameters
----------
wd_tuple: Tuple
Tuple which contains the phrases which are checked for, e.g. (`conv`, `weight`) or (`fc`, `weight`)
name: str
Name of parameter as saved in state dict, e.g. `conv1.weight`
Returns
----------
wd_check: bool
Returns a bool indicating whether all strings in wd_tuple are contained in name.
"""
return all([x in name for x in wd_tuple])
def apply_wd(model: torch.nn.Module, wd: float, param_names: List = ["conv", "fc"], types: List = ["weight"]):
"""
Method that manually applies weight decay to model parameters that match the specified parameter names and types.
Parameters
----------
model: torch.nn.Module
Model to which weight decay is applied
wd: float
Float specifying weight decay. Parameters are updated to: param = (1-wd) * param
param_names: List (default: ["conv", "fc"])
Parameter names (or substring of names) for which the weight decay is applied.
types: List (default: ["weight"])
Parameter types for which weight decay is applied.
"""
with torch.no_grad():
for name, param in model.state_dict().items():
if any(
[wd_check(wd_tuple, name) for wd_tuple in product(param_names, types)]
):
param.mul_(1 - wd)
def set_bn_running_updates(model, enable: bool, bn_momentum: float = 0.001):
"""
Method that enables or disables updates of the running batch norm vars by setting the momentum parameter to 0
"""
for m in model.modules():
if isinstance(m, nn.BatchNorm2d):
m.momentum = bn_momentum if enable else 0.0
def linear_rampup(current: int, rampup_length: int):
if rampup_length == 0:
return 1.0
else:
current = np.clip(current / rampup_length, 0.0, 1.0)
return float(current)
def set_grads(model: torch.nn.Module, trainable_layers: List[str]):
"""
Method that enables or disables gradients of model parameters according to specified layers.
Parameters
----------
model: torch.nn.Module
Torch model for which parameter gradients should be set
trainable_layers: List
List of strings, i.e. layer / parameter names, for which training is enabled. For model parameters, which do not
match any pattern specified in trainable_layers, training is disable by setting requires_grad to False.
"""
def is_trainable(x, trainable_layers):
return any([(layer in x) or ('fc' in x) for layer in trainable_layers])
for p in model.parameters():
p.requires_grad = False
trainable_parameters = [n for n, p in model.named_parameters() if is_trainable(n, trainable_layers)]
for n, p in model.named_parameters():
if n in trainable_parameters:
p.requires_grad = True
| [
"numpy.clip",
"torch.nn.init.constant_",
"itertools.product",
"math.sqrt",
"torch.no_grad",
"torch.flatten"
] | [((7623, 7638), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (7636, 7638), False, 'import torch\n'), ((8322, 8364), 'numpy.clip', 'np.clip', (['(current / rampup_length)', '(0.0)', '(1.0)'], {}), '(current / rampup_length, 0.0, 1.0)\n', (8329, 8364), True, 'import numpy as np\n'), ((1390, 1405), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1403, 1405), False, 'import torch\n'), ((5931, 5956), 'torch.nn.init.constant_', 'init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (5945, 5956), True, 'import torch.nn.init as init\n'), ((6009, 6036), 'torch.nn.init.constant_', 'init.constant_', (['m.weight', '(1)'], {}), '(m.weight, 1)\n', (6023, 6036), True, 'import torch.nn.init as init\n'), ((6045, 6070), 'torch.nn.init.constant_', 'init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (6059, 6070), True, 'import torch.nn.init as init\n'), ((4755, 4774), 'torch.flatten', 'torch.flatten', (['x', '(1)'], {}), '(x, 1)\n', (4768, 4774), False, 'import torch\n'), ((5874, 5886), 'math.sqrt', 'math.sqrt', (['(2)'], {}), '(2)\n', (5883, 5886), False, 'import math\n'), ((6212, 6237), 'torch.nn.init.constant_', 'init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (6226, 6237), True, 'import torch.nn.init as init\n'), ((6155, 6167), 'math.sqrt', 'math.sqrt', (['(2)'], {}), '(2)\n', (6164, 6167), False, 'import math\n'), ((7777, 7804), 'itertools.product', 'product', (['param_names', 'types'], {}), '(param_names, types)\n', (7784, 7804), False, 'from itertools import product\n')] |
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import csv
import seaborn as sns
import itertools
import pandas as pd
import scipy
from scipy.signal import savgol_filter
from scipy.signal import find_peaks_cwt
from scipy.signal import boxcar
sns.set(font_scale=1.2)
sns.set_style("white")
colors = ["#95a5a6", "amber"]
sns.set_palette(sns.color_palette())
hr_24 = np.loadtxt("MDA DTX_1 4_24hr.txt", skiprows=1)
ctl = np.loadtxt("MDA DTX_1 Ctl.txt", skiprows=1)
hr_4 = np.loadtxt("MDA DTX_1 4hr.txt", skiprows=1)
# hr_2 = np.loadtxt("MDA-DTX-#2hr.txt", skiprows=1)
hr_8 = np.loadtxt("MDA DTX 8hr.txt", skiprows=1)
dmso = np.loadtxt("MDA DTX DMSO.txt", skiprows=1)
def filterDat(data):
num = 9
ones = boxcar(num)/num
result = np.abs(np.convolve(data, ones, mode='same'))
return np.interp(result, (result.min(), result.max()), (0, 100))
def shift(data):
"""
firstIndex = 200
index = np.argmax(data)
if index < firstIndex:
data = np.insert(data, 0, np.zeros(
firstIndex-index))[:-(firstIndex-index)]
elif index > firstIndex:
data = data[index-firstIndex:]
data = np.insert(data, len(data)-1, np.zeros(index-firstIndex))
"""
# Stretch
secondIndex = 400
indexes = find_peaks_cwt(data, np.arange(1, 100))
# find max of indexes
peaks = data[indexes]
secondMax = 0
lastPeak = 0
for x in range(len(peaks)):
if peaks[x] < 95.0:
if peaks[x] > lastPeak:
lastPeak = peaks[x]
secondMax = x
secondMaxIndex = indexes[secondMax]
difference = secondIndex-secondMaxIndex
ratio = secondIndex/(secondIndex-difference)
old_x = np.linspace(0, int(len(data))-1, int(len(data)))
new_x = np.linspace(0, int(len(data))-1, int(len(data)*ratio))
new_data = np.interp(new_x, old_x, data)
return new_data, np.linspace(0, int(len(new_x))-1, int(len(new_x)))
fig, axes = plt.subplots(figsize=(8, 6))
filterData = filterDat(ctl[:, 2])
y, x = shift(filterData)
axes.plot(x, y, label="Control", color='black')
axes.fill_between(x, y, alpha=0.3)
"""filterData = filterDat(hr_4[:, 2])
y, x = shift(filterData)
axes.plot(x, y, label="4 hour")
axes.fill_between(x, y, alpha=0.3)
filterData = filterDat(hr_8[:, 2])
y, x = shift(filterData)
axes.plot(x, y, label="8 hour")
axes.fill_between(x, y, alpha=0.3)
"""
filterData = filterDat(hr_24[:, 2])
y, x = shift(filterData)
axes.plot(x, y, label="24 hour", color='maroon')
axes.fill_between(x, y, alpha=0.3)
axes.legend()
axes.set_ylabel('% of Max')
axes.set_xlabel('Fluorescence')
axes.set_xlim((0, 800))
plt.show()
| [
"seaborn.set",
"numpy.convolve",
"seaborn.color_palette",
"seaborn.set_style",
"scipy.signal.boxcar",
"numpy.interp",
"numpy.loadtxt",
"matplotlib.pyplot.subplots",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((264, 287), 'seaborn.set', 'sns.set', ([], {'font_scale': '(1.2)'}), '(font_scale=1.2)\n', (271, 287), True, 'import seaborn as sns\n'), ((288, 310), 'seaborn.set_style', 'sns.set_style', (['"""white"""'], {}), "('white')\n", (301, 310), True, 'import seaborn as sns\n'), ((387, 433), 'numpy.loadtxt', 'np.loadtxt', (['"""MDA DTX_1 4_24hr.txt"""'], {'skiprows': '(1)'}), "('MDA DTX_1 4_24hr.txt', skiprows=1)\n", (397, 433), True, 'import numpy as np\n'), ((440, 483), 'numpy.loadtxt', 'np.loadtxt', (['"""MDA DTX_1 Ctl.txt"""'], {'skiprows': '(1)'}), "('MDA DTX_1 Ctl.txt', skiprows=1)\n", (450, 483), True, 'import numpy as np\n'), ((491, 534), 'numpy.loadtxt', 'np.loadtxt', (['"""MDA DTX_1 4hr.txt"""'], {'skiprows': '(1)'}), "('MDA DTX_1 4hr.txt', skiprows=1)\n", (501, 534), True, 'import numpy as np\n'), ((594, 635), 'numpy.loadtxt', 'np.loadtxt', (['"""MDA DTX 8hr.txt"""'], {'skiprows': '(1)'}), "('MDA DTX 8hr.txt', skiprows=1)\n", (604, 635), True, 'import numpy as np\n'), ((643, 685), 'numpy.loadtxt', 'np.loadtxt', (['"""MDA DTX DMSO.txt"""'], {'skiprows': '(1)'}), "('MDA DTX DMSO.txt', skiprows=1)\n", (653, 685), True, 'import numpy as np\n'), ((1958, 1986), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(8, 6)'}), '(figsize=(8, 6))\n', (1970, 1986), True, 'import matplotlib.pyplot as plt\n'), ((2636, 2646), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2644, 2646), True, 'import matplotlib.pyplot as plt\n'), ((357, 376), 'seaborn.color_palette', 'sns.color_palette', ([], {}), '()\n', (374, 376), True, 'import seaborn as sns\n'), ((1841, 1870), 'numpy.interp', 'np.interp', (['new_x', 'old_x', 'data'], {}), '(new_x, old_x, data)\n', (1850, 1870), True, 'import numpy as np\n'), ((732, 743), 'scipy.signal.boxcar', 'boxcar', (['num'], {}), '(num)\n', (738, 743), False, 'from scipy.signal import boxcar\n'), ((768, 804), 'numpy.convolve', 'np.convolve', (['data', 'ones'], {'mode': '"""same"""'}), "(data, ones, mode='same')\n", (779, 804), True, 'import numpy as np\n'), ((1294, 1311), 'numpy.arange', 'np.arange', (['(1)', '(100)'], {}), '(1, 100)\n', (1303, 1311), True, 'import numpy as np\n')] |
import mock
import pytest
from prf.tests.prf_testcase import PrfTestCase
from pyramid.exceptions import ConfigurationExecutionError
from prf.resource import Resource, get_view_class, get_parent_elements
from prf.view import BaseView
class TestResource(PrfTestCase):
def test_init_(self):
res = Resource(self.conf)
assert res.member_name == ''
assert res.collection_name == ''
assert res.parent == None
assert res.uid == ''
with pytest.raises(ValueError):
#member name cant be empty
res.add('', view=BaseView)
def test_repr_(self):
res = Resource(self.conf, 'member', 'collection', uid='uid')
assert 'uid' in res.__repr__()
def test_get_ancestors(self):
root = Resource(self.conf)
one = root.add('one', view=BaseView)
assert one.get_ancestors() == []
two = one.add('two', view=BaseView)
anc = two.get_ancestors()
assert anc[0] == one
def test_add(self):
root = Resource(self.conf)
two = root.add('two', view=BaseView, id_name='two')
assert two.parent == root
assert two.member_name == 'two'
assert two.collection_name == 'twos'
assert two.uid == 'twos'
assert two.is_singular is False
three = two.add('tree', 'trix', view=BaseView, id_name='three')
assert three.parent == two
assert three.member_name == 'tree'
assert three.collection_name == 'trix'
assert three.uid == 'twos:trix'
assert three.is_singular is False
assert three in two.children
four = three.add('four', view=BaseView)
sing = two.add('sing', collection_name=None, view=BaseView)
assert sing.is_singular is True
pref = root.add('five', prefix='pref', view=BaseView)
assert pref.uid == 'pref:fives'
def test_add_id_name(self):
root = Resource(self.conf)
two = root.add('two', view=BaseView, id_name='username')
assert two.id_name == 'username'
three = two.add('tree', view=BaseView, id_name='username')
assert three.path == 'twos/{two_username}/trees'
@mock.patch('prf.resource.maybe_dotted')
def test_get_view_class(self, fake_maybe_dotted):
root = Resource(self.conf)
fake_maybe_dotted.return_value = BaseView
assert get_view_class(BaseView, root) == BaseView
assert get_view_class('prf.view.BaseView', root) == BaseView
fake_maybe_dotted.reset_mock()
def test_get_parent_elements(self):
root = Resource(self.conf)
ppref, npref = get_parent_elements(
root.add('one', view=BaseView).add('two', view=BaseView).add('three', view=BaseView))
assert ppref == 'ones/{one_id}/twos/{two_id}'
assert npref == 'ones:twos:'
@pytest.mark.skip('route_prefix is broken')
def test_get_parent_elements_w_route_prefix(self):
self.conf.route_prefix = 'route_prefix'
root = Resource(self.conf)
ppref, npref = get_parent_elements(
root.add('one', view=BaseView).add('two', view=BaseView).add('three', view=BaseView))
assert ppref == 'route_prefix/ones/{one_id}/twos/{two_id}'
assert npref == 'route_prefix:ones:'
| [
"mock.patch",
"pytest.mark.skip",
"pytest.raises",
"prf.resource.get_view_class",
"prf.resource.Resource"
] | [((2185, 2224), 'mock.patch', 'mock.patch', (['"""prf.resource.maybe_dotted"""'], {}), "('prf.resource.maybe_dotted')\n", (2195, 2224), False, 'import mock\n'), ((2852, 2894), 'pytest.mark.skip', 'pytest.mark.skip', (['"""route_prefix is broken"""'], {}), "('route_prefix is broken')\n", (2868, 2894), False, 'import pytest\n'), ((309, 328), 'prf.resource.Resource', 'Resource', (['self.conf'], {}), '(self.conf)\n', (317, 328), False, 'from prf.resource import Resource, get_view_class, get_parent_elements\n'), ((630, 684), 'prf.resource.Resource', 'Resource', (['self.conf', '"""member"""', '"""collection"""'], {'uid': '"""uid"""'}), "(self.conf, 'member', 'collection', uid='uid')\n", (638, 684), False, 'from prf.resource import Resource, get_view_class, get_parent_elements\n'), ((774, 793), 'prf.resource.Resource', 'Resource', (['self.conf'], {}), '(self.conf)\n', (782, 793), False, 'from prf.resource import Resource, get_view_class, get_parent_elements\n'), ((1028, 1047), 'prf.resource.Resource', 'Resource', (['self.conf'], {}), '(self.conf)\n', (1036, 1047), False, 'from prf.resource import Resource, get_view_class, get_parent_elements\n'), ((1927, 1946), 'prf.resource.Resource', 'Resource', (['self.conf'], {}), '(self.conf)\n', (1935, 1946), False, 'from prf.resource import Resource, get_view_class, get_parent_elements\n'), ((2294, 2313), 'prf.resource.Resource', 'Resource', (['self.conf'], {}), '(self.conf)\n', (2302, 2313), False, 'from prf.resource import Resource, get_view_class, get_parent_elements\n'), ((2588, 2607), 'prf.resource.Resource', 'Resource', (['self.conf'], {}), '(self.conf)\n', (2596, 2607), False, 'from prf.resource import Resource, get_view_class, get_parent_elements\n'), ((3013, 3032), 'prf.resource.Resource', 'Resource', (['self.conf'], {}), '(self.conf)\n', (3021, 3032), False, 'from prf.resource import Resource, get_view_class, get_parent_elements\n'), ((484, 509), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (497, 509), False, 'import pytest\n'), ((2380, 2410), 'prf.resource.get_view_class', 'get_view_class', (['BaseView', 'root'], {}), '(BaseView, root)\n', (2394, 2410), False, 'from prf.resource import Resource, get_view_class, get_parent_elements\n'), ((2439, 2480), 'prf.resource.get_view_class', 'get_view_class', (['"""prf.view.BaseView"""', 'root'], {}), "('prf.view.BaseView', root)\n", (2453, 2480), False, 'from prf.resource import Resource, get_view_class, get_parent_elements\n')] |
"""Test aggregation of config files and command-line options."""
import os
import pytest
from flake8.main import options
from flake8.options import aggregator
from flake8.options import config
from flake8.options import manager
@pytest.fixture
def optmanager():
"""Create a new OptionManager."""
option_manager = manager.OptionManager(
version="3.0.0",
plugin_versions="",
parents=[],
)
options.register_default_options(option_manager)
return option_manager
@pytest.fixture
def flake8_config(tmp_path):
cfg_s = """\
[flake8]
ignore =
E123,
W234,
E111
exclude =
foo/,
bar/,
bogus/
quiet = 1
"""
cfg = tmp_path.joinpath("tox.ini")
cfg.write_text(cfg_s)
return str(cfg)
def test_aggregate_options_with_config(optmanager, flake8_config):
"""Verify we aggregate options and config values appropriately."""
arguments = [
"flake8",
"--select",
"E11,E34,E402,W,F",
"--exclude",
"tests/*",
]
cfg, cfg_dir = config.load_config(flake8_config, [])
options = aggregator.aggregate_options(
optmanager,
cfg,
cfg_dir,
arguments,
)
assert options.select == ["E11", "E34", "E402", "W", "F"]
assert options.ignore == ["E123", "W234", "E111"]
assert options.exclude == [os.path.abspath("tests/*")]
def test_aggregate_options_when_isolated(optmanager, flake8_config):
"""Verify we aggregate options and config values appropriately."""
arguments = [
"flake8",
"--select",
"E11,E34,E402,W,F",
"--exclude",
"tests/*",
]
cfg, cfg_dir = config.load_config(flake8_config, [], isolated=True)
optmanager.extend_default_ignore(["E8"])
options = aggregator.aggregate_options(optmanager, cfg, cfg_dir, arguments)
assert options.select == ["E11", "E34", "E402", "W", "F"]
assert options.ignore is None
assert options.exclude == [os.path.abspath("tests/*")]
| [
"flake8.main.options.register_default_options",
"flake8.options.config.load_config",
"flake8.options.aggregator.aggregate_options",
"os.path.abspath",
"flake8.options.manager.OptionManager"
] | [((325, 395), 'flake8.options.manager.OptionManager', 'manager.OptionManager', ([], {'version': '"""3.0.0"""', 'plugin_versions': '""""""', 'parents': '[]'}), "(version='3.0.0', plugin_versions='', parents=[])\n", (346, 395), False, 'from flake8.options import manager\n'), ((431, 479), 'flake8.main.options.register_default_options', 'options.register_default_options', (['option_manager'], {}), '(option_manager)\n', (463, 479), False, 'from flake8.main import options\n'), ((1046, 1083), 'flake8.options.config.load_config', 'config.load_config', (['flake8_config', '[]'], {}), '(flake8_config, [])\n', (1064, 1083), False, 'from flake8.options import config\n'), ((1098, 1163), 'flake8.options.aggregator.aggregate_options', 'aggregator.aggregate_options', (['optmanager', 'cfg', 'cfg_dir', 'arguments'], {}), '(optmanager, cfg, cfg_dir, arguments)\n', (1126, 1163), False, 'from flake8.options import aggregator\n'), ((1670, 1722), 'flake8.options.config.load_config', 'config.load_config', (['flake8_config', '[]'], {'isolated': '(True)'}), '(flake8_config, [], isolated=True)\n', (1688, 1722), False, 'from flake8.options import config\n'), ((1782, 1847), 'flake8.options.aggregator.aggregate_options', 'aggregator.aggregate_options', (['optmanager', 'cfg', 'cfg_dir', 'arguments'], {}), '(optmanager, cfg, cfg_dir, arguments)\n', (1810, 1847), False, 'from flake8.options import aggregator\n'), ((1351, 1377), 'os.path.abspath', 'os.path.abspath', (['"""tests/*"""'], {}), "('tests/*')\n", (1366, 1377), False, 'import os\n'), ((1976, 2002), 'os.path.abspath', 'os.path.abspath', (['"""tests/*"""'], {}), "('tests/*')\n", (1991, 2002), False, 'import os\n')] |
# Imports
import sys, os, time, logging, json
# QR code scanning is on a separate file
from qr import qrscan
# Configuration using config.json
with open('config.json', 'r') as f:
config = json.load(f)
if 'outfile' in config:
outfile = config['outfile']
if 'path' in config:
path = config['path']
extensions = config['extensions']
level = -1
if 'loglevel' in config:
if config['loglevel'] == 'info':
level = logging.INFO
if config['loglevel'] == 'debug':
level = logging.DEBUG
elif config['loglevel'] == 'error':
level = logging.ERROR
if level != -1:
handlers = [logging.StreamHandler(sys.stdout)]
if 'logfile' in config:
handlers.append(logging.FileHandler(filename=config['logfile']))
logging.basicConfig(encoding='utf-8',
level=level, format=config['logformat'],
handlers=handlers)
# Read optional parameters that override config.json
if len(sys.argv) > 2:
path = sys.argv[1]
outfile = sys.argv[2]
elif len(sys.argv) > 1:
path = sys.argv[1]
# File scan function
def filescan(filepath):
try:
if extensions.count(os.path.splitext(filepath)[1].lower()) > 0:
logging.info('Scanning file: %s', filepath)
start = time.time()
codes = qrscan(filepath)
if len(codes) > 0:
logging.info('Found %s code' + ('s' if len(codes) >
1 else '') + ': %s', len(codes), codes)
else:
logging.info('No codes found.')
file_time = time.time() - start
logging.info('Scanned in %ss', round(file_time, 2))
return codes
except Exception as e:
logging.error('Error: %s', e)
# Initiate empty codes list
codes = []
# Start scanning with both single file and directory support, appending to our codes list
if os.path.isfile(path):
filecodes = filescan(path)
codes.append({
'file': path,
'codes': filecodes
})
elif os.path.isdir(path):
start = time.time()
for file in os.listdir(path):
filepath = os.path.join(path, os.fsdecode(file))
filecodes = filescan(filepath)
codes.append({
'file': filepath,
'codes': filecodes
})
logging.info('All scans finished in %ss', round(time.time() - start, 2))
else:
logging.error('Invalid path.')
# Output if codes were found
if len(codes) > 0 and outfile:
with open(outfile, 'w') as out:
json.dump(codes, out, indent=2)
logging.info('Codes available in %s', outfile) | [
"logging.basicConfig",
"logging.StreamHandler",
"os.listdir",
"qr.qrscan",
"os.fsdecode",
"os.path.splitext",
"os.path.isfile",
"os.path.isdir",
"logging.FileHandler",
"time.time",
"json.load",
"logging.info",
"logging.error",
"json.dump"
] | [((1789, 1809), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (1803, 1809), False, 'import sys, os, time, logging, json\n'), ((192, 204), 'json.load', 'json.load', (['f'], {}), '(f)\n', (201, 204), False, 'import sys, os, time, logging, json\n'), ((1908, 1927), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (1921, 1927), False, 'import sys, os, time, logging, json\n'), ((2393, 2439), 'logging.info', 'logging.info', (['"""Codes available in %s"""', 'outfile'], {}), "('Codes available in %s', outfile)\n", (2405, 2439), False, 'import sys, os, time, logging, json\n'), ((594, 627), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (615, 627), False, 'import sys, os, time, logging, json\n'), ((728, 830), 'logging.basicConfig', 'logging.basicConfig', ([], {'encoding': '"""utf-8"""', 'level': 'level', 'format': "config['logformat']", 'handlers': 'handlers'}), "(encoding='utf-8', level=level, format=config[\n 'logformat'], handlers=handlers)\n", (747, 830), False, 'import sys, os, time, logging, json\n'), ((1939, 1950), 'time.time', 'time.time', ([], {}), '()\n', (1948, 1950), False, 'import sys, os, time, logging, json\n'), ((1965, 1981), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (1975, 1981), False, 'import sys, os, time, logging, json\n'), ((2229, 2259), 'logging.error', 'logging.error', (['"""Invalid path."""'], {}), "('Invalid path.')\n", (2242, 2259), False, 'import sys, os, time, logging, json\n'), ((2359, 2390), 'json.dump', 'json.dump', (['codes', 'out'], {'indent': '(2)'}), '(codes, out, indent=2)\n', (2368, 2390), False, 'import sys, os, time, logging, json\n'), ((675, 722), 'logging.FileHandler', 'logging.FileHandler', ([], {'filename': "config['logfile']"}), "(filename=config['logfile'])\n", (694, 722), False, 'import sys, os, time, logging, json\n'), ((1163, 1206), 'logging.info', 'logging.info', (['"""Scanning file: %s"""', 'filepath'], {}), "('Scanning file: %s', filepath)\n", (1175, 1206), False, 'import sys, os, time, logging, json\n'), ((1221, 1232), 'time.time', 'time.time', ([], {}), '()\n', (1230, 1232), False, 'import sys, os, time, logging, json\n'), ((1247, 1263), 'qr.qrscan', 'qrscan', (['filepath'], {}), '(filepath)\n', (1253, 1263), False, 'from qr import qrscan\n'), ((1625, 1654), 'logging.error', 'logging.error', (['"""Error: %s"""', 'e'], {}), "('Error: %s', e)\n", (1638, 1654), False, 'import sys, os, time, logging, json\n'), ((1449, 1480), 'logging.info', 'logging.info', (['"""No codes found."""'], {}), "('No codes found.')\n", (1461, 1480), False, 'import sys, os, time, logging, json\n'), ((1499, 1510), 'time.time', 'time.time', ([], {}), '()\n', (1508, 1510), False, 'import sys, os, time, logging, json\n'), ((2017, 2034), 'os.fsdecode', 'os.fsdecode', (['file'], {}), '(file)\n', (2028, 2034), False, 'import sys, os, time, logging, json\n'), ((2196, 2207), 'time.time', 'time.time', ([], {}), '()\n', (2205, 2207), False, 'import sys, os, time, logging, json\n'), ((1113, 1139), 'os.path.splitext', 'os.path.splitext', (['filepath'], {}), '(filepath)\n', (1129, 1139), False, 'import sys, os, time, logging, json\n')] |
import cv2
import numpy as np
from rich import print
dewarped = cv2.imread('../dewarped.png')
'''
SIZE = 600
# Get ROI corners
arucoDict = cv2.aruco.Dictionary_get(cv2.aruco.DICT_APRILTAG_36h11)
arucoParams = cv2.aruco.DetectorParameters_create()
(corners, ids, rejected) = cv2.aruco.detectMarkers(image, arucoDict, parameters=arucoParams)
assert len(corners) == len(ids) == 4
detected = [[ids[i], corners[i]] for i in range(4)]
detected.sort(key = lambda x: x[0])
print(detected)
bounding_box = [
detected[0][1][0][2],
detected[1][1][0][3],
detected[2][1][0][0],
detected[3][1][0][1]
]
img_boxed = image.copy()
cv2.polylines(img_boxed, np.int32([bounding_box]), True, (0, 255, 0), 2)
# cv2.imshow('Fiducial Detection', img_boxed)
# Dewarp
vertices = [
[0, 0],
[SIZE, 0],
[SIZE, SIZE],
[0, SIZE]
]
matrix = cv2.getPerspectiveTransform(np.float32(bounding_box), np.float32(vertices))
dewarped = cv2.warpPerspective(image, matrix, (SIZE, SIZE))
cv2.imwrite('dewarped.png', dewarped)
'''
cv2.imshow('Dewarped', dewarped)
# Marker selection
markers = []
def selectMarker(event, x, y, flags, param):
global markers, dewarped
if event == cv2.EVENT_LBUTTONDOWN:
markers.append((x, y))
print(f'Marker {len(markers)} selected at ({x}, {y})')
dewarped = cv2.circle(dewarped, (x,y), radius=5, color=(0, 0, 255), thickness=-1)
if len(markers) == 27:
print('All 27 markers selected')
print(markers)
cv2.imshow('Dewarped', dewarped)
cv2.setMouseCallback('Dewarped', selectMarker)
cv2.waitKey()
cv2.destroyAllWindows() | [
"cv2.setMouseCallback",
"cv2.imshow",
"cv2.circle",
"rich.print",
"cv2.destroyAllWindows",
"cv2.waitKey",
"cv2.imread"
] | [((65, 94), 'cv2.imread', 'cv2.imread', (['"""../dewarped.png"""'], {}), "('../dewarped.png')\n", (75, 94), False, 'import cv2\n'), ((1024, 1056), 'cv2.imshow', 'cv2.imshow', (['"""Dewarped"""', 'dewarped'], {}), "('Dewarped', dewarped)\n", (1034, 1056), False, 'import cv2\n'), ((1531, 1577), 'cv2.setMouseCallback', 'cv2.setMouseCallback', (['"""Dewarped"""', 'selectMarker'], {}), "('Dewarped', selectMarker)\n", (1551, 1577), False, 'import cv2\n'), ((1579, 1592), 'cv2.waitKey', 'cv2.waitKey', ([], {}), '()\n', (1590, 1592), False, 'import cv2\n'), ((1593, 1616), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1614, 1616), False, 'import cv2\n'), ((1315, 1386), 'cv2.circle', 'cv2.circle', (['dewarped', '(x, y)'], {'radius': '(5)', 'color': '(0, 0, 255)', 'thickness': '(-1)'}), '(dewarped, (x, y), radius=5, color=(0, 0, 255), thickness=-1)\n', (1325, 1386), False, 'import cv2\n'), ((1497, 1529), 'cv2.imshow', 'cv2.imshow', (['"""Dewarped"""', 'dewarped'], {}), "('Dewarped', dewarped)\n", (1507, 1529), False, 'import cv2\n'), ((1429, 1461), 'rich.print', 'print', (['"""All 27 markers selected"""'], {}), "('All 27 markers selected')\n", (1434, 1461), False, 'from rich import print\n'), ((1474, 1488), 'rich.print', 'print', (['markers'], {}), '(markers)\n', (1479, 1488), False, 'from rich import print\n')] |
#!/usr/bin/python
# Copyright (c) 2014 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
import trie
class TrieTest(unittest.TestCase):
def MakeUncompressedTrie(self):
uncompressed = trie.Node()
accept = trie.AcceptInfo(input_rr='%eax', output_rr='%edx')
trie.AddToUncompressedTrie(uncompressed, ['0', '1', '2'], accept)
trie.AddToUncompressedTrie(uncompressed, ['0', '1', '2', '3'], accept)
trie.AddToUncompressedTrie(uncompressed, ['0', '1', '3'], accept)
trie.AddToUncompressedTrie(uncompressed, ['0', '1', '4'], accept)
trie.AddToUncompressedTrie(uncompressed, ['0', '1', '5'], accept)
return uncompressed
def CheckTrieAccepts(self, accept_sequences):
accept = trie.AcceptInfo(input_rr='%eax', output_rr='%edx')
self.assertEquals([(accept, ['0', '1', '2']),
(accept, ['0', '1', '2', '3']),
(accept, ['0', '1', '3']),
(accept, ['0', '1', '4']),
(accept, ['0', '1', '5'])],
accept_sequences)
def testTrieAddAndMerge(self):
uncompressed = self.MakeUncompressedTrie()
self.CheckTrieAccepts(trie.GetAllAcceptSequences(uncompressed))
# n0 -0-> n1 -1-> n2 -2-> n3 -3-> n4
# | -3-> n5
# | -4-> n6
# | -5-> n7
self.assertEquals(8, len(trie.GetAllUniqueNodes(uncompressed)))
node_cache = trie.NodeCache()
compressed_trie = node_cache.Merge(node_cache.empty_node, uncompressed)
self.CheckTrieAccepts(trie.GetAllAcceptSequences(compressed_trie))
# (n4, n5. n6, n7) can be grouped together from above
self.assertEquals(5, len(trie.GetAllUniqueNodes(compressed_trie)))
def testTrieSerializationAndDeserialization(self):
uncompressed = self.MakeUncompressedTrie()
node_cache = trie.NodeCache()
compressed_trie = node_cache.Merge(node_cache.empty_node, uncompressed)
reconstructed_trie = trie.TrieFromDict(trie.TrieToDict(compressed_trie),
node_cache)
self.CheckTrieAccepts(trie.GetAllAcceptSequences(reconstructed_trie))
self.assertEquals(5, len(trie.GetAllUniqueNodes(reconstructed_trie)))
def testTrieDiff(self):
trie1 = trie.Node()
trie2 = trie.Node()
accept1 = trie.AcceptInfo(input_rr='%eax', output_rr='%edx')
accept2 = trie.AcceptInfo(input_rr='%eax', output_rr='%ecx')
trie.AddToUncompressedTrie(trie1, ['0', '1', '2'], accept1)
trie.AddToUncompressedTrie(trie1, ['0', '1', '3'], accept1)
trie.AddToUncompressedTrie(trie1, ['0', '1', '4'], accept1)
trie.AddToUncompressedTrie(trie1, ['0', '1', '5'], accept1)
trie.AddToUncompressedTrie(trie2, ['0', '1', '2'], accept1)
trie.AddToUncompressedTrie(trie2, ['0', '1', '3'], accept1)
trie.AddToUncompressedTrie(trie2, ['0', '1', '4'], accept2)
node_cache = trie.NodeCache()
compressed_trie1 = node_cache.Merge(node_cache.empty_node, trie1)
compressed_trie2 = node_cache.Merge(node_cache.empty_node, trie2)
diffs = set()
compressed_diffs = set()
for diff in trie.DiffTries(trie1, trie2, node_cache.empty_node, ()):
diffs.add(diff)
for diff in trie.DiffTries(compressed_trie1, compressed_trie2,
node_cache.empty_node, ()):
compressed_diffs.add(diff)
self.assertEquals(
diffs,
set([(('0', '1', '4'), accept1, accept2),
(('0', '1', '5'), accept1, None)]))
self.assertEquals(diffs, compressed_diffs)
if __name__ == '__main__':
unittest.main()
| [
"trie.AcceptInfo",
"trie.GetAllUniqueNodes",
"trie.Node",
"trie.NodeCache",
"trie.TrieToDict",
"trie.AddToUncompressedTrie",
"trie.DiffTries",
"unittest.main",
"trie.GetAllAcceptSequences"
] | [((3683, 3698), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3696, 3698), False, 'import unittest\n'), ((310, 321), 'trie.Node', 'trie.Node', ([], {}), '()\n', (319, 321), False, 'import trie\n'), ((335, 385), 'trie.AcceptInfo', 'trie.AcceptInfo', ([], {'input_rr': '"""%eax"""', 'output_rr': '"""%edx"""'}), "(input_rr='%eax', output_rr='%edx')\n", (350, 385), False, 'import trie\n'), ((390, 455), 'trie.AddToUncompressedTrie', 'trie.AddToUncompressedTrie', (['uncompressed', "['0', '1', '2']", 'accept'], {}), "(uncompressed, ['0', '1', '2'], accept)\n", (416, 455), False, 'import trie\n'), ((460, 530), 'trie.AddToUncompressedTrie', 'trie.AddToUncompressedTrie', (['uncompressed', "['0', '1', '2', '3']", 'accept'], {}), "(uncompressed, ['0', '1', '2', '3'], accept)\n", (486, 530), False, 'import trie\n'), ((535, 600), 'trie.AddToUncompressedTrie', 'trie.AddToUncompressedTrie', (['uncompressed', "['0', '1', '3']", 'accept'], {}), "(uncompressed, ['0', '1', '3'], accept)\n", (561, 600), False, 'import trie\n'), ((605, 670), 'trie.AddToUncompressedTrie', 'trie.AddToUncompressedTrie', (['uncompressed', "['0', '1', '4']", 'accept'], {}), "(uncompressed, ['0', '1', '4'], accept)\n", (631, 670), False, 'import trie\n'), ((675, 740), 'trie.AddToUncompressedTrie', 'trie.AddToUncompressedTrie', (['uncompressed', "['0', '1', '5']", 'accept'], {}), "(uncompressed, ['0', '1', '5'], accept)\n", (701, 740), False, 'import trie\n'), ((827, 877), 'trie.AcceptInfo', 'trie.AcceptInfo', ([], {'input_rr': '"""%eax"""', 'output_rr': '"""%edx"""'}), "(input_rr='%eax', output_rr='%edx')\n", (842, 877), False, 'import trie\n'), ((1549, 1565), 'trie.NodeCache', 'trie.NodeCache', ([], {}), '()\n', (1563, 1565), False, 'import trie\n'), ((1960, 1976), 'trie.NodeCache', 'trie.NodeCache', ([], {}), '()\n', (1974, 1976), False, 'import trie\n'), ((2372, 2383), 'trie.Node', 'trie.Node', ([], {}), '()\n', (2381, 2383), False, 'import trie\n'), ((2396, 2407), 'trie.Node', 'trie.Node', ([], {}), '()\n', (2405, 2407), False, 'import trie\n'), ((2422, 2472), 'trie.AcceptInfo', 'trie.AcceptInfo', ([], {'input_rr': '"""%eax"""', 'output_rr': '"""%edx"""'}), "(input_rr='%eax', output_rr='%edx')\n", (2437, 2472), False, 'import trie\n'), ((2487, 2537), 'trie.AcceptInfo', 'trie.AcceptInfo', ([], {'input_rr': '"""%eax"""', 'output_rr': '"""%ecx"""'}), "(input_rr='%eax', output_rr='%ecx')\n", (2502, 2537), False, 'import trie\n'), ((2543, 2602), 'trie.AddToUncompressedTrie', 'trie.AddToUncompressedTrie', (['trie1', "['0', '1', '2']", 'accept1'], {}), "(trie1, ['0', '1', '2'], accept1)\n", (2569, 2602), False, 'import trie\n'), ((2607, 2666), 'trie.AddToUncompressedTrie', 'trie.AddToUncompressedTrie', (['trie1', "['0', '1', '3']", 'accept1'], {}), "(trie1, ['0', '1', '3'], accept1)\n", (2633, 2666), False, 'import trie\n'), ((2671, 2730), 'trie.AddToUncompressedTrie', 'trie.AddToUncompressedTrie', (['trie1', "['0', '1', '4']", 'accept1'], {}), "(trie1, ['0', '1', '4'], accept1)\n", (2697, 2730), False, 'import trie\n'), ((2735, 2794), 'trie.AddToUncompressedTrie', 'trie.AddToUncompressedTrie', (['trie1', "['0', '1', '5']", 'accept1'], {}), "(trie1, ['0', '1', '5'], accept1)\n", (2761, 2794), False, 'import trie\n'), ((2800, 2859), 'trie.AddToUncompressedTrie', 'trie.AddToUncompressedTrie', (['trie2', "['0', '1', '2']", 'accept1'], {}), "(trie2, ['0', '1', '2'], accept1)\n", (2826, 2859), False, 'import trie\n'), ((2864, 2923), 'trie.AddToUncompressedTrie', 'trie.AddToUncompressedTrie', (['trie2', "['0', '1', '3']", 'accept1'], {}), "(trie2, ['0', '1', '3'], accept1)\n", (2890, 2923), False, 'import trie\n'), ((2928, 2987), 'trie.AddToUncompressedTrie', 'trie.AddToUncompressedTrie', (['trie2', "['0', '1', '4']", 'accept2'], {}), "(trie2, ['0', '1', '4'], accept2)\n", (2954, 2987), False, 'import trie\n'), ((3006, 3022), 'trie.NodeCache', 'trie.NodeCache', ([], {}), '()\n', (3020, 3022), False, 'import trie\n'), ((3228, 3283), 'trie.DiffTries', 'trie.DiffTries', (['trie1', 'trie2', 'node_cache.empty_node', '()'], {}), '(trie1, trie2, node_cache.empty_node, ())\n', (3242, 3283), False, 'import trie\n'), ((3324, 3401), 'trie.DiffTries', 'trie.DiffTries', (['compressed_trie1', 'compressed_trie2', 'node_cache.empty_node', '()'], {}), '(compressed_trie1, compressed_trie2, node_cache.empty_node, ())\n', (3338, 3401), False, 'import trie\n'), ((1281, 1321), 'trie.GetAllAcceptSequences', 'trie.GetAllAcceptSequences', (['uncompressed'], {}), '(uncompressed)\n', (1307, 1321), False, 'import trie\n'), ((1668, 1711), 'trie.GetAllAcceptSequences', 'trie.GetAllAcceptSequences', (['compressed_trie'], {}), '(compressed_trie)\n', (1694, 1711), False, 'import trie\n'), ((2096, 2128), 'trie.TrieToDict', 'trie.TrieToDict', (['compressed_trie'], {}), '(compressed_trie)\n', (2111, 2128), False, 'import trie\n'), ((2211, 2257), 'trie.GetAllAcceptSequences', 'trie.GetAllAcceptSequences', (['reconstructed_trie'], {}), '(reconstructed_trie)\n', (2237, 2257), False, 'import trie\n'), ((1492, 1528), 'trie.GetAllUniqueNodes', 'trie.GetAllUniqueNodes', (['uncompressed'], {}), '(uncompressed)\n', (1514, 1528), False, 'import trie\n'), ((1800, 1839), 'trie.GetAllUniqueNodes', 'trie.GetAllUniqueNodes', (['compressed_trie'], {}), '(compressed_trie)\n', (1822, 1839), False, 'import trie\n'), ((2288, 2330), 'trie.GetAllUniqueNodes', 'trie.GetAllUniqueNodes', (['reconstructed_trie'], {}), '(reconstructed_trie)\n', (2310, 2330), False, 'import trie\n')] |
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options
from oslo_log import log as logging
from deckhand.conf import config
CONF = config.CONF
LOG = logging.getLogger(__name__)
_CACHE_OPTS = {
'cache.type': 'memory',
'expire': CONF.barbican.cache_timeout,
}
_CACHE = CacheManager(**parse_cache_config_options(_CACHE_OPTS))
_BARBICAN_CACHE = _CACHE.get_cache('barbican_cache')
# NOTE(felipemonteiro): The functions below realize a lookup and reverse-lookup
# to allow for much faster retrieval of encrypted data from Barbican, which
# doesn't currently support batched requests in its Secrets API. This behavior
# is necessary since Deckhand has to potentially retrieve and store up to
# dozens of secrets per request. Note that data for both lookup functions
# below are invalidated together, as they are tied to the same cache.
def lookup_by_ref(barbicanclient, secret_ref):
"""Look up secret object using secret reference.
Allows for quick lookup of secret payloads using ``secret_ref`` via
caching.
"""
def do_lookup():
"""Returns secret object stored in Barbican."""
return barbicanclient.call("secrets.get", secret_ref)
if CONF.barbican.enable_cache:
return _BARBICAN_CACHE.get(key=secret_ref, createfunc=do_lookup)
else:
return do_lookup()
def lookup_by_payload(barbicanclient, **kwargs):
"""Look up secret reference using the secret payload.
Allows for quick lookup of secret references using ``secret_payload`` via
caching (essentially a reverse-lookup).
Useful for ensuring that documents with the same secret payload (which
occurs when the same document is recreated across different revisions)
persist the same secret reference in the database -- and thus quicker
future ``lookup_by_ref`` lookups.
"""
def do_lookup():
"""Returns secret Barbican reference."""
secret = barbicanclient.call("secrets.create", **kwargs)
return secret.store()
secret_payload = kwargs['payload']
if CONF.barbican.enable_cache:
return _BARBICAN_CACHE.get(key=secret_payload, createfunc=do_lookup)
else:
return do_lookup()
def invalidate():
_BARBICAN_CACHE.clear()
| [
"beaker.util.parse_cache_config_options",
"oslo_log.log.getLogger"
] | [((805, 832), 'oslo_log.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (822, 832), True, 'from oslo_log import log as logging\n'), ((947, 986), 'beaker.util.parse_cache_config_options', 'parse_cache_config_options', (['_CACHE_OPTS'], {}), '(_CACHE_OPTS)\n', (973, 986), False, 'from beaker.util import parse_cache_config_options\n')] |
import os
import reader
import json
# todo: get this logger from elsewhere
from celery.utils.log import get_task_logger
log = get_task_logger(__name__)
defaultFieldMappings = [
### SET
(['info','protocol'], 'getReplayProtocolVersion'),
(['info','bytes'], 'getReplayFileByteSize'),
(['info','gameloops'], 'getMatchLengthGameloops'),
(['info','seconds'], 'getMatchLengthSeconds'),
(['info','start_timestamp'], 'getMatchUTCTimestamp'),
(['info','speed'], 'getMatchSpeed'),
(['info','match_type'], 'getMatchType'),
(['info','hero_selelection_mode'], 'getHeroSelectionMode'),
(['map','name'], 'getMapName'),
(['map',{'m_mapSizeX':'width', 'm_mapSizeY':'height'}], 'getGameDescription'),
(['team', [], 'levels'], 'getTeamLevels'),
#(['players', [], 'talents'], 'getTalents'),
#(['players', [], 'talents', [], {'name':'name'}], 'getTalents'),
#(['players', [], {'m_teamId': 'team', 'm_name': 'name', 'm_toonId': 'toon_id'}], 'getPlayers'),
(['raw','players'], 'getPlayers'),
(['raw','details'], 'getReplayDetails'),
(['raw','init_data'], 'getReplayInitData'),
#(['raw','translated_attributes_events'], 'getTranslatedReplayAttributesEvents'),
#(['players', [], 'hero'], 'getPlayersHeroChoiceArray'),
]
named_field_mappings = {
'RawReplayDetails': [(['raw','details'], 'getReplayDetails')],
'RawReplayInitData': [(['raw','init_data'], 'getReplayInitData')],
'RawReplayTrackerEvents': [(['raw','tracker_events'], 'getReplayTrackerEvents')],
'RawReplayAttributesEvents': [(['raw','attributes_events'], 'getReplayAttributesEvents')],
'RawReplayGameEvents': [(['raw','game_events'], 'getReplayGameEvents')],
'RawReplayMessageEvents': [(['raw','message_events'], 'getReplayMessageEvents')],
'RawTalentSelectionGameEvents': [(['raw','selections'], 'getTalentSelectionGameEvents')],
}
class StormReplayAnalyzer:
@staticmethod
def getAllFieldMappingNames():
return named_field_mappings.keys()
@staticmethod
def getFieldMappingForNames(names):
fieldMapping = []
for name in names:
fieldMapping = fieldMapping + named_field_mappings.get(name, [])
return fieldMapping
def __init__(self, reader):
self.reader = reader
def analyze(self, fieldMappings=None):
if fieldMappings is None:
fieldMappings = defaultFieldMappings
retval = {}
for field in fieldMappings:
value = getattr(self, field[1])()
worklist = [(retval, field[0], value)]
while len(worklist) > 0:
workItem = worklist.pop()
obj = workItem[0]
keyPath = workItem[1]
value = workItem[2]
key = keyPath[0]
isArray = isinstance(key, (int, long))
if isArray and key >= len(obj):
obj.extend([None]*(key + 1 - len(obj)))
if len(keyPath) == 1:
obj[key] = value
elif isinstance(keyPath[1], basestring):
if isArray:
if obj[key] is None:
obj[key] = {}
obj = obj[key]
else:
obj = obj.setdefault(key, {})
worklist.append( (obj, keyPath[1:], value) )
elif isinstance(keyPath[1], list):
if isArray:
if obj[key] is None:
obj[key] = []
obj = obj[key]
else:
obj = obj.setdefault(key, [])
for index, element in enumerate(value):
worklist.append( (obj, [index] + keyPath[2:], element) )
elif isinstance(keyPath[1], dict):
if isArray:
if obj[key] is None:
obj[key] = {}
obj = obj[key]
else:
obj = obj.setdefault(key, {})
for dictKey in value:
if 0 == len(keyPath[1]):
keyToWrite = dictKey
elif keyPath[1].has_key(dictKey):
keyToWrite = keyPath[1][dictKey]
else:
continue
worklist.append( (obj, [keyToWrite] + keyPath[2:], value[dictKey]) )
else:
raise Exception('Key of invalid type: %s' % str(key))
return retval
def getReplayFileByteSize(self):
return self.reader.getReplayFileByteSize()
def getTalentSelectionGameEvents(self):
events = []
for event in self.reader.getReplayGameEvents():
if (event['_event'] != 'NNet.Game.SHeroTalentTreeSelectedEvent'):
continue
events.append(event)
return events
def getReplayProtocolVersion(self):
return self.reader.getReplayProtocolVersion()
def getReplayInitData(self):
return self.reader.getReplayInitData()
def getReplayAttributesEvents(self):
return self.reader.getReplayAttributesEvents()
def getReplayDetails(self):
return self.reader.getReplayDetails()
def getReplayTrackerEvents(self):
return self.reader.getReplayTrackerEvents()
def getReplayGameEvents(self):
return self.reader.getReplayGameEvents()
def getReplayMessageEvents(self):
return self.reader.getReplayMessageEvents()
def getTranslatedReplayAttributesEvents(self):
talentsReader = self.getTalentsReader()
return talentsReader.translate_replay_attributes_events(self.getReplayAttributesEvents())
def getGameDescription(self):
initData = self.getReplayInitData()
return initData['m_syncLobbyState']['m_gameDescription']
def getGameSpeed(self):
try:
return self.gameSpeed
except AttributeError:
self.gameSpeed = 0
return self.gameSpeed
def getTalentsReader(self):
try:
return self.talentsReader
except AttributeError:
replayVersion = self.reader.getReplayProtocolVersion()
try:
self.talentsReader = __import__('stormreplay.talents%s' % replayVersion, fromlist=['talents'])
except ImportError:
raise Exception('Unsupported StormReplay build number for talents: %i' % replayVersion)
return self.talentsReader
def getTalents(self):
try:
return self.talents
except AttributeError:
self.talents = [[] for _ in xrange(10)]
talentsReader = self.getTalentsReader()
generator = talentsReader.decode_game_events_talent_choices(self.reader.getReplayGameEvents(), self.getPlayersHeroChoiceArray())
for choice in generator:
self.talents[choice['_userid']].append({
'seconds': self.gameloopToSeconds(choice['_gameloop']),
'level': choice['m_level'],
'name': choice['m_talentName'],
'description': choice['m_talentDescription'],
'index': choice['m_talentIndex'],
})
return self.talents
def getTeamTalentTierTimes(self):
try:
return self.teamTalentTierTimes
except AttributeError:
teamTalentTierLevel = [[], []]
teamTalentTiersFirstPick = [[], []]
teamTalentTiersLastPick = [[], []]
players = self.getPlayers()
for playerIndex, playerTalentPicks in enumerate(self.getTalents()):
player = players[playerIndex]
for talentTierIndex, talentPick in enumerate(playerTalentPicks):
talentPickTime = talentPick['seconds']
teamIndex = player['m_teamId']
tiersFirstPick = teamTalentTiersFirstPick[teamIndex]
if (talentTierIndex >= len(tiersFirstPick)):
tiersFirstPick.append(talentPickTime)
elif (talentPickTime < tiersFirstPick[talentTierIndex]):
tiersFirstPick[talentTierIndex] = talentPickTime
tiersLastPick = teamTalentTiersLastPick[teamIndex]
if (talentTierIndex >= len(tiersLastPick)):
tiersLastPick.append(talentPickTime)
elif (talentPickTime > tiersLastPick[talentTierIndex]):
tiersLastPick[talentTierIndex] = talentPickTime
if (talentTierIndex >= len(teamTalentTierLevel[teamIndex])):
teamTalentTierLevel[teamIndex].append(talentPick['level'])
else:
teamTalentTierLevel[teamIndex][talentTierIndex] = talentPick['level']
self.teamTalentTierTimes = [[], []]
for teamIndex in xrange(2):
for talentTierIndex, level in enumerate(teamTalentTierLevel[teamIndex]):
self.teamTalentTierTimes[teamIndex].append({
'earliest': teamTalentTiersFirstPick[teamIndex][talentTierIndex],
'latest': teamTalentTiersLastPick[teamIndex][talentTierIndex],
'level': level,
})
return self.teamTalentTierTimes
def getTeamLevels(self):
try:
return self.teamLevels
except AttributeError:
teamTalentTierTimes = self.getTeamTalentTierTimes()
self.teamLevels = [[], []]
for teamIndex in xrange(2):
talentTierTimes = teamTalentTierTimes[teamIndex]
levelTimes = [0] * talentTierTimes[-1]['level']
for firstTier, nextTier in zip(talentTierTimes, talentTierTimes[1:]):
levelRange = nextTier['level'] - firstTier['level']
for level in xrange(firstTier['level'], nextTier['level']+1):
levelIndex = level-1
lerp = float(level - firstTier['level']) / levelRange
time = lerp * (nextTier['earliest'] - firstTier['earliest']) + firstTier['earliest']
levelTimes[levelIndex] = time
levelToTalentTierInfo = {}
for tierInfo in talentTierTimes:
levelToTalentTierInfo[str(tierInfo['level'])] = tierInfo
for levelIndex, time in enumerate(levelTimes):
level = levelIndex + 1
levelInfo = {
'level': levelIndex + 1,
'seconds': time,
'is_talent_tier': False,
}
if levelToTalentTierInfo.has_key(str(level)):
tierInfo = levelToTalentTierInfo[str(level)]
levelInfo['is_talent_tier'] = True
levelInfo['earliest_talent_picked_time'] = tierInfo['earliest']
levelInfo['latest_talent_picked_time'] = tierInfo['latest']
self.teamLevels[teamIndex].append(levelInfo)
return self.teamLevels
def getMapName(self):
try:
return self.mapName
except AttributeError:
self.mapName = self.reader.getReplayDetails()['m_title']['utf8']
return self.mapName
def getPlayersHeroChoiceArray(self):
try:
return self.playersHeroArray
except AttributeError:
self.playersHeroArray = [None] * 10
for i, player in enumerate(self.getPlayerSpawnInfo()):
self.playersHeroArray[i] = player['hero']
return self.playersHeroArray
# returns array indexed by user ID
def getPlayers(self):
try:
return self.players
except AttributeError:
self.players = [None] * 10
for i, player in enumerate(self.getReplayDetails()['m_playerList']):
#TODO: confirm that m_workingSetSlotId == i always
toon = player['m_toon']
player['m_toonId'] = "%i-%s-%i-%i" % (toon['m_region'], toon['m_programId'], toon['m_realm'], toon['m_id'])
player['m_name'] = player['m_name']['utf8']
player['m_controlPlayerId'] = i+1
self.players[i] = player
return self.players
# returns array indexed by user ID
def getPlayerSpawnInfo(self):
try:
return self.playerSpawnInfo
except AttributeError:
self.playerSpawnInfo = [None] * 10
playerIdToUserId = {}
for event in self.getReplayTrackerEvents():
if event['_event'] == 'NNet.Replay.Tracker.SPlayerSetupEvent':
playerIdToUserId[event['m_playerId']] = event['m_userId']
elif event['_event'] == 'NNet.Replay.Tracker.SUnitBornEvent' and (int(event['_gameloop']) > 0):
playerId = event['m_controlPlayerId']
if (playerIdToUserId.has_key(playerId)):
playerIndex = playerIdToUserId[playerId] # always playerId-1 so far, but this is safer
self.playerSpawnInfo[playerIndex] = {
'hero': event['m_unitTypeName']['utf8'],
'unit_tag': event['m_unitTag']
}
del playerIdToUserId[playerId]
if len(playerIdToUserId) == 0:
break
return self.playerSpawnInfo
def getMatchSpeed(self):
attributes = self.getTranslatedReplayAttributesEvents()
return attributes[16]['m_gameSpeed']
def getMatchType(self):
attributes = self.getTranslatedReplayAttributesEvents()
return attributes[16]['m_gameType']
def getHeroSelectionMode(self):
attributes = self.getTranslatedReplayAttributesEvents()
return attributes[16]['m_heroSelectionMode']
def getMatchUTCTimestamp(self):
try:
return self.utcTimestamp
except AttributeError:
self.utcTimestamp = (self.getReplayDetails()['m_timeUTC'] / 10000000) - 11644473600
return self.utcTimestamp
def getMatchLengthGameloops(self):
lastEvent = self.getReplayTrackerEvents()[-1]
return lastEvent['_gameloop']
def getMatchLengthSeconds(self):
return self.gameloopToSeconds(self.getMatchLengthGameloops())
def gameloopToSeconds(self, gameloop):
return gameloop / 16.0
def gameloopToTimestamp(self, gameloop):
return self.getMatchUTCTimestamp() + _gameloop / 16.0
def getChat(self):
try:
return self.chat
except AttributeError:
self.chat = []
for messageEvent in self.getReplayMessageEvents():
if (messageEvent['_event'] != 'NNet.Game.SChatMessage'):
continue
userId = messageEvent['_userid']['m_userId']
chatData = {
't': self.gameloopToTimestamp(messageEvent['_gameloop']),
'user': userId,
'msg': messageEvent['m_string']['utf8'],
}
self.chat.append(chatData)
return self.chat
| [
"celery.utils.log.get_task_logger"
] | [((128, 153), 'celery.utils.log.get_task_logger', 'get_task_logger', (['__name__'], {}), '(__name__)\n', (143, 153), False, 'from celery.utils.log import get_task_logger\n')] |
# -*- coding: utf-8 -*-
import matplotlib.colors as colorplt
import matplotlib.pyplot as plt
import numpy as np
from sktime.distances._distance import distance_alignment_path, pairwise_distance
gray_cmap = colorplt.LinearSegmentedColormap.from_list("", ["#c9cacb", "white"])
def _path_mask(cost_matrix, path, ax, theme=gray_cmap):
plot_matrix = np.zeros_like(cost_matrix)
max_size = max(cost_matrix.shape)
for i in range(max_size):
for j in range(max_size):
if (i, j) in path:
plot_matrix[i, j] = 1.0
elif cost_matrix[i, j] == np.inf:
plot_matrix[i, j] = 0.0
else:
plot_matrix[i, j] = 0.25
for i in range(max_size):
for j in range(max_size):
c = cost_matrix[j, i]
ax.text(i, j, str(round(c, 2)), va="center", ha="center", size=10)
ax.text(i, j, str(round(c, 2)), va="center", ha="center", size=10)
ax.matshow(plot_matrix, cmap=theme)
def _pairwise_path(x, y, metric):
pw_matrix = pairwise_distance(x, y, metric=metric)
path = []
for i in range(pw_matrix.shape[0]):
for j in range(pw_matrix.shape[1]):
if i == j:
path.append((i, j))
return path, pw_matrix.trace(), pw_matrix
def _plot_path(
x: np.ndarray,
y: np.ndarray,
metric: str,
dist_kwargs: dict = None,
title: str = "",
plot_over_pw: bool = False,
):
if dist_kwargs is None:
dist_kwargs = {}
try:
path, dist, cost_matrix = distance_alignment_path(
x, y, metric=metric, return_cost_matrix=True, **dist_kwargs
)
if metric == "lcss":
_path = []
for tup in path:
_path.append(tuple(x + 1 for x in tup))
path = _path
if plot_over_pw is True:
if metric == "lcss":
pw = pairwise_distance(x, y, metric="euclidean")
cost_matrix = np.zeros_like(cost_matrix)
cost_matrix[1:, 1:] = pw
else:
pw = pairwise_distance(x, y, metric="squared")
cost_matrix = pw
except NotImplementedError:
path, dist, cost_matrix = _pairwise_path(x, y, metric)
plt.figure(1, figsize=(8, 8))
x_size = x.shape[0]
# definitions for the axes
left, bottom = 0.01, 0.1
w_ts = h_ts = 0.2
left_h = left + w_ts + 0.02
width = height = 0.65
bottom_h = bottom + height + 0.02
rect_s_y = [left, bottom, w_ts, height]
rect_gram = [left_h, bottom, width, height]
rect_s_x = [left_h, bottom_h, width, h_ts]
ax_gram = plt.axes(rect_gram)
ax_s_x = plt.axes(rect_s_x)
ax_s_y = plt.axes(rect_s_y)
_path_mask(cost_matrix, path, ax_gram)
ax_gram.axis("off")
ax_gram.autoscale(False)
# ax_gram.plot([j for (i, j) in path], [i for (i, j) in path], "w-",
# linewidth=3.)
ax_s_x.plot(np.arange(x_size), y, "b-", linewidth=3.0, color="#818587")
ax_s_x.axis("off")
ax_s_x.set_xlim((0, x_size - 1))
ax_s_y.plot(-x, np.arange(x_size), "b-", linewidth=3.0, color="#818587")
ax_s_y.axis("off")
ax_s_y.set_ylim((0, x_size - 1))
ax_s_x.set_title(title, size=10)
return plt
def _plot_alignment(x, y, metric, dist_kwargs: dict = None, title: str = ""):
if dist_kwargs is None:
dist_kwargs = {}
try:
path, dist, cost_matrix = distance_alignment_path(
x, y, metric=metric, return_cost_matrix=True, **dist_kwargs
)
except NotImplementedError:
path, dist, cost_matrix = _pairwise_path(x, y, metric)
plt.figure(1, figsize=(8, 8))
plt.plot(x, "b-", color="black")
plt.plot(y, "g-", color="black")
for positions in path:
try:
plt.plot(
[positions[0], positions[1]],
[x[positions[0]], y[positions[1]]],
"--",
color="#818587",
)
except:
continue
plt.legend()
plt.title(title)
plt.tight_layout()
return plt
if __name__ == "__main__":
x = np.array(
[
-0.7553383207,
0.4460987596,
1.197682907,
0.1714334808,
0.5639929213,
0.6891222874,
1.793828873,
0.06570866314,
0.2877381702,
1.633620422,
]
)
y = np.array(
[
0.01765193577,
1.536784164,
-0.1413292622,
-0.7609346135,
-0.1767363331,
-2.192007072,
-0.1933165696,
-0.4648166839,
-0.9444888843,
-0.239523623,
]
)
import os
def _save_plt(plt):
plt[0].savefig(f"{metric_path}/{plt[1]}")
plt[0].cla()
plt[0].clf()
if not os.path.exists("./plots"):
os.makedirs("./plots")
metrics = [
"euclidean",
"erp",
"edr",
"lcss",
"squared",
"dtw",
"ddtw",
"wdtw",
"wddtw",
"msm",
]
# metrics = ['lcss']
for metric in metrics:
metric_path = f"./plots/{metric}"
if not os.path.exists(metric_path):
os.makedirs(metric_path)
save_plt(
(
_plot_path(x, y, metric, {"epsilon": 1.0}),
f"{metric}_path_through_cost_matrix",
)
)
_save_plt(
(
_plot_path(x, y, metric, {"window": 0.2, "epsilon": 1.0}),
f"{metric}_path_through_20_cost_matrix",
)
)
if metric == "wdtw":
g_val = [0.2, 0.3]
for g in g_val:
file_save = str(g).split(".")
_save_plt(
(
_plot_path(x, y, metric, {"g": g}),
f"{metric}_path_through_g{file_save[1]}_cost_matrix",
)
)
_save_plt((_plot_alignment(x, y, metric), f"{metric}_alignment"))
_save_plt(
(_plot_alignment(x, y, metric, {"window": 0.2}), f"{metric}_alignment_20")
)
| [
"os.path.exists",
"os.makedirs",
"numpy.arange",
"matplotlib.pyplot.plot",
"matplotlib.colors.LinearSegmentedColormap.from_list",
"sktime.distances._distance.pairwise_distance",
"numpy.array",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.axes",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyp... | [((208, 276), 'matplotlib.colors.LinearSegmentedColormap.from_list', 'colorplt.LinearSegmentedColormap.from_list', (['""""""', "['#c9cacb', 'white']"], {}), "('', ['#c9cacb', 'white'])\n", (250, 276), True, 'import matplotlib.colors as colorplt\n'), ((353, 379), 'numpy.zeros_like', 'np.zeros_like', (['cost_matrix'], {}), '(cost_matrix)\n', (366, 379), True, 'import numpy as np\n'), ((1048, 1086), 'sktime.distances._distance.pairwise_distance', 'pairwise_distance', (['x', 'y'], {'metric': 'metric'}), '(x, y, metric=metric)\n', (1065, 1086), False, 'from sktime.distances._distance import distance_alignment_path, pairwise_distance\n'), ((2259, 2288), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {'figsize': '(8, 8)'}), '(1, figsize=(8, 8))\n', (2269, 2288), True, 'import matplotlib.pyplot as plt\n'), ((2647, 2666), 'matplotlib.pyplot.axes', 'plt.axes', (['rect_gram'], {}), '(rect_gram)\n', (2655, 2666), True, 'import matplotlib.pyplot as plt\n'), ((2680, 2698), 'matplotlib.pyplot.axes', 'plt.axes', (['rect_s_x'], {}), '(rect_s_x)\n', (2688, 2698), True, 'import matplotlib.pyplot as plt\n'), ((2712, 2730), 'matplotlib.pyplot.axes', 'plt.axes', (['rect_s_y'], {}), '(rect_s_y)\n', (2720, 2730), True, 'import matplotlib.pyplot as plt\n'), ((3646, 3675), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {'figsize': '(8, 8)'}), '(1, figsize=(8, 8))\n', (3656, 3675), True, 'import matplotlib.pyplot as plt\n'), ((3681, 3713), 'matplotlib.pyplot.plot', 'plt.plot', (['x', '"""b-"""'], {'color': '"""black"""'}), "(x, 'b-', color='black')\n", (3689, 3713), True, 'import matplotlib.pyplot as plt\n'), ((3718, 3750), 'matplotlib.pyplot.plot', 'plt.plot', (['y', '"""g-"""'], {'color': '"""black"""'}), "(y, 'g-', color='black')\n", (3726, 3750), True, 'import matplotlib.pyplot as plt\n'), ((4022, 4034), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4032, 4034), True, 'import matplotlib.pyplot as plt\n'), ((4039, 4055), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (4048, 4055), True, 'import matplotlib.pyplot as plt\n'), ((4061, 4079), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (4077, 4079), True, 'import matplotlib.pyplot as plt\n'), ((4132, 4291), 'numpy.array', 'np.array', (['[-0.7553383207, 0.4460987596, 1.197682907, 0.1714334808, 0.5639929213, \n 0.6891222874, 1.793828873, 0.06570866314, 0.2877381702, 1.633620422]'], {}), '([-0.7553383207, 0.4460987596, 1.197682907, 0.1714334808, \n 0.5639929213, 0.6891222874, 1.793828873, 0.06570866314, 0.2877381702, \n 1.633620422])\n', (4140, 4291), True, 'import numpy as np\n'), ((4436, 4601), 'numpy.array', 'np.array', (['[0.01765193577, 1.536784164, -0.1413292622, -0.7609346135, -0.1767363331, -\n 2.192007072, -0.1933165696, -0.4648166839, -0.9444888843, -0.239523623]'], {}), '([0.01765193577, 1.536784164, -0.1413292622, -0.7609346135, -\n 0.1767363331, -2.192007072, -0.1933165696, -0.4648166839, -0.9444888843,\n -0.239523623])\n', (4444, 4601), True, 'import numpy as np\n'), ((1545, 1634), 'sktime.distances._distance.distance_alignment_path', 'distance_alignment_path', (['x', 'y'], {'metric': 'metric', 'return_cost_matrix': '(True)'}), '(x, y, metric=metric, return_cost_matrix=True, **\n dist_kwargs)\n', (1568, 1634), False, 'from sktime.distances._distance import distance_alignment_path, pairwise_distance\n'), ((2951, 2968), 'numpy.arange', 'np.arange', (['x_size'], {}), '(x_size)\n', (2960, 2968), True, 'import numpy as np\n'), ((3092, 3109), 'numpy.arange', 'np.arange', (['x_size'], {}), '(x_size)\n', (3101, 3109), True, 'import numpy as np\n'), ((3439, 3528), 'sktime.distances._distance.distance_alignment_path', 'distance_alignment_path', (['x', 'y'], {'metric': 'metric', 'return_cost_matrix': '(True)'}), '(x, y, metric=metric, return_cost_matrix=True, **\n dist_kwargs)\n', (3462, 3528), False, 'from sktime.distances._distance import distance_alignment_path, pairwise_distance\n'), ((4881, 4906), 'os.path.exists', 'os.path.exists', (['"""./plots"""'], {}), "('./plots')\n", (4895, 4906), False, 'import os\n'), ((4916, 4938), 'os.makedirs', 'os.makedirs', (['"""./plots"""'], {}), "('./plots')\n", (4927, 4938), False, 'import os\n'), ((3804, 3905), 'matplotlib.pyplot.plot', 'plt.plot', (['[positions[0], positions[1]]', '[x[positions[0]], y[positions[1]]]', '"""--"""'], {'color': '"""#818587"""'}), "([positions[0], positions[1]], [x[positions[0]], y[positions[1]]],\n '--', color='#818587')\n", (3812, 3905), True, 'import matplotlib.pyplot as plt\n'), ((5235, 5262), 'os.path.exists', 'os.path.exists', (['metric_path'], {}), '(metric_path)\n', (5249, 5262), False, 'import os\n'), ((5276, 5300), 'os.makedirs', 'os.makedirs', (['metric_path'], {}), '(metric_path)\n', (5287, 5300), False, 'import os\n'), ((1903, 1946), 'sktime.distances._distance.pairwise_distance', 'pairwise_distance', (['x', 'y'], {'metric': '"""euclidean"""'}), "(x, y, metric='euclidean')\n", (1920, 1946), False, 'from sktime.distances._distance import distance_alignment_path, pairwise_distance\n'), ((1977, 2003), 'numpy.zeros_like', 'np.zeros_like', (['cost_matrix'], {}), '(cost_matrix)\n', (1990, 2003), True, 'import numpy as np\n'), ((2084, 2125), 'sktime.distances._distance.pairwise_distance', 'pairwise_distance', (['x', 'y'], {'metric': '"""squared"""'}), "(x, y, metric='squared')\n", (2101, 2125), False, 'from sktime.distances._distance import distance_alignment_path, pairwise_distance\n')] |
# RT Ext - Useful Util
# 注意:普通のエクステンションとは違います。
import asyncio
from json import dumps
from time import time
import discord
from aiofile import async_open
from discord.ext import commands, tasks
class RtUtil(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.data = {
"list_embed": {}
}
self.ARROW_EMOJI = ["◀️", "▶️"]
self.now = time()
self.save_queue = []
self.list_embed_timeout_loop.start()
def list_embed(self, member_id: int, embeds: list[discord.Embed],
timeout: int = 60, anyone: bool = False
) -> list[discord.Embed, list[str]]:
self.data["list_embed"][member_id] = {
"embeds": [embed.to_dict() for embed in embeds],
"timeout": self.now + timeout,
"anyone": anyone
}
return embeds[0], self.ARROW_EMOJI
@tasks.loop(seconds=5)
async def list_embed_timeout_loop(self):
for user_id in self.data["list_embed"]:
if self.now > self.data["list_embed"][user_id]["timeout"]:
del self.data["list_embed"][user_id]
async def list_embed_reaction_task(self, reaction, user):
# === list embed === #
if (not reaction.message.embeds
or str(reaction.emoji) not in self.ARROW_EMOJI
or reaction.message.author.id != self.bot.user.id
or user.id not in self.data["list_embed"]):
return
embed = reaction.message.embeds[0]
now_embed = embed.to_dict()
for user_id in self.data["list_embed"]:
if (now_embed in self.data["list_embed"][user.id]["embeds"]
and user_id == user.id):
data = self.data["list_embed"][user.id]
now = data["embeds"].index(now_embed)
next_page = 0
if str(reaction.emoji) == self.ARROW_EMOJI[0]:
next_page = now - 1
elif str(reaction.emoji) == self.ARROW_EMOJI[1]:
next_page = now + 1
if len(data["embeds"]) != next_page and now != 0:
embed = data["embeds"][next_page]
await reaction.message.edit(embed=embed)
break
try:
await reaction.message.remove_reaction(str(reaction.emoji), user)
except Exception:
pass
@commands.Cog.listener()
async def on_reaction_add(self, reaction, user):
await self.list_embed_reaction_task(reaction, user)
def unload_cog(self):
self.list_embed_timeout_loop.cancel()
# Webhook Sender
async def send(channel, author, content=None, embeds=None,
files=None, wait=False, name='RT-Tool'):
wb = discord.utils.get(await channel.webhooks(), name=name)
wb = wb if wb else await channel.create_webhook(name=name)
return await wb.send(wait=wait, username=author.name,
avatar_url=author.avatar_url, content=content,
embeds=embeds, files=files)
async def not_author_send(channel, author_name, icon_url, content=None,
embeds=None, files=None, wait=False, name='RT-Tool'):
wb = discord.utils.get(await channel.webhooks(), name=name)
wb = wb if wb else await channel.create_webhook(name=name)
return await wb.send(wait=wait, username=author_name, avatar_url=icon_url,
content=content, embeds=embeds, files=files)
# easy_embed
def easy_embed(content, color=discord.Embed.Empty):
es = ">>"
spl = content.splitlines()
title = spl[0][len(es):]
desc, fields = [], {}
footer = None if ';;' not in spl[-1] else spl[-1][2:]
if footer:
spl.pop(-1)
spl.pop(0)
f = None
for c in spl:
if c == "":
continue
if c[0] == '<':
f = c[1:] if '!' != c[1] else c[2:]
fields[f] = {'i': True if '!' != c[1] else False, 'c': []}
continue
if f:
fields[f]['c'].append(c)
continue
desc.append(c)
e = discord.Embed(
title=title,
description='\n'.join(desc),
color=color
)
for f in fields.keys():
e.add_field(
name=f,
value='\n'.join(fields[f]['c']),
inline=fields[f]['i']
)
if footer:
e.set_footer(text=footer)
return e
# Role TOOL
def check_int(v):
try:
int(v)
except BaseException:
return False
else:
return True
def has_roles(member, roles):
return any(bool(discord.utils.get(
member.roles, id=role.id)) for role in roles)
def role2obj(guild, arg):
roles_raw, roles = arg.split(','), []
for role in roles_raw:
if '@' in role:
roles.append(guild.get_role(int(role[3:-1])))
elif check_int(role):
roles.append(guild.get_role(int(role)))
else:
roles.append(discord.utils.get(guild.roles, name=role))
return roles
class Roler(discord.ext.commands.Converter):
async def convert(self, ctx, arg):
return role2obj(ctx.guild, arg)
def similer(b, a, m):
return any(a[i:i + m] in b for i in range(len(a) - m))
def setup(bot):
return RtUtil(bot)
| [
"discord.ext.commands.Cog.listener",
"discord.utils.get",
"discord.ext.tasks.loop",
"time.time"
] | [((906, 927), 'discord.ext.tasks.loop', 'tasks.loop', ([], {'seconds': '(5)'}), '(seconds=5)\n', (916, 927), False, 'from discord.ext import commands, tasks\n'), ((2425, 2448), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (2446, 2448), False, 'from discord.ext import commands, tasks\n'), ((399, 405), 'time.time', 'time', ([], {}), '()\n', (403, 405), False, 'from time import time\n'), ((4627, 4670), 'discord.utils.get', 'discord.utils.get', (['member.roles'], {'id': 'role.id'}), '(member.roles, id=role.id)\n', (4644, 4670), False, 'import discord\n'), ((5000, 5041), 'discord.utils.get', 'discord.utils.get', (['guild.roles'], {'name': 'role'}), '(guild.roles, name=role)\n', (5017, 5041), False, 'import discord\n')] |
import numpy as np
import pytest
from src.models.noise_transformation import average_true_var_real, average_true_var_imag, average_true_cov, \
average_true_noise_covariance, naive_noise_covariance
test_cases_real_variance = [
(2 - 3j, 0, 0, 0),
(0, 1, 1, np.exp(-2) * (2 * np.cosh(2) - np.cosh(1))),
(2j, 1, 1, 4 * np.exp(-2) * (np.sinh(2) - np.sinh(1)) + np.exp(-2) * (2 * np.sinh(2) - np.sinh(1))),
(-2j, 1, 1, 4 * np.exp(-2) * (np.sinh(2) - np.sinh(1)) + np.exp(-2) * (2 * np.sinh(2) - np.sinh(1))),
]
test_cases_imag_variance = [
(4 - 3j, 0, 0, 0),
(0, 1, 1, np.exp(-2) * (2 * np.sinh(2) - np.sinh(1))),
(2j, 1, 1, 4 * np.exp(-2) * (np.cosh(2) - np.cosh(1)) + np.exp(-2) * (2 * np.cosh(2) - np.cosh(1))),
(-2j, 1, 1, 4 * np.exp(-2) * (np.cosh(2) - np.cosh(1)) + np.exp(-2) * (2 * np.cosh(2) - np.cosh(1))),
]
test_cases_covariance = [
(4 - 3j, 0, 0, 0),
(0, 1, 1, 0),
(2j, 1, 1, 0),
(-2j, 1, 1, 0),
(np.sqrt(2) * (1 + 1j), 1, 1, 0.5 * np.exp(-4) * (1 + 5 * (1 - np.exp(1)))),
]
@pytest.mark.parametrize("m,sd_magnitude,sd_phase,expected", test_cases_real_variance)
def test_variance_of_real_noise(m, sd_magnitude, sd_phase, expected):
res = average_true_var_real(m, sd_magnitude, sd_phase)
np.testing.assert_allclose(res, expected)
@pytest.mark.parametrize("m,sd_magnitude,sd_phase,expected", test_cases_imag_variance)
def test_variance_of_imag_noise(m, sd_magnitude, sd_phase, expected):
res = average_true_var_imag(m, sd_magnitude, sd_phase)
np.testing.assert_allclose(res, expected)
@pytest.mark.parametrize("m,sd_magnitude,sd_phase,expected", test_cases_covariance)
def test_covariance_of_noise(m, sd_magnitude, sd_phase, expected):
res = average_true_cov(m, sd_magnitude, sd_phase)
np.testing.assert_allclose(res, expected, rtol=0, atol=1e-10)
def test_cartesian_noise_covariance_matrix():
sd_magnitude = 1
sd_phase = 1
measurement = np.zeros(2)
res = average_true_noise_covariance(measurement, sd_magnitude, sd_phase)
expected = np.diag(
[np.exp(-2) * (2 * np.cosh(2) - np.cosh(1))] * 2 + [np.exp(-2) * (2 * np.sinh(2) - np.sinh(1))] * 2)
np.testing.assert_allclose(res.todense(), expected)
def test_naive_covariance_matrix():
sd_magnitude = 1
sd_phase = 1
measurement = np.array([0, 1j])
expected = np.array([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 1],
], dtype=np.float)
res = naive_noise_covariance(measurement, sd_magnitude, sd_phase)
np.testing.assert_allclose(res.todense(), expected, rtol=0, atol=1e-10)
| [
"src.models.noise_transformation.naive_noise_covariance",
"numpy.sqrt",
"src.models.noise_transformation.average_true_var_imag",
"numpy.testing.assert_allclose",
"src.models.noise_transformation.average_true_cov",
"numpy.sinh",
"numpy.exp",
"pytest.mark.parametrize",
"numpy.zeros",
"src.models.noi... | [((1045, 1134), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""m,sd_magnitude,sd_phase,expected"""', 'test_cases_real_variance'], {}), "('m,sd_magnitude,sd_phase,expected',\n test_cases_real_variance)\n", (1068, 1134), False, 'import pytest\n'), ((1309, 1398), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""m,sd_magnitude,sd_phase,expected"""', 'test_cases_imag_variance'], {}), "('m,sd_magnitude,sd_phase,expected',\n test_cases_imag_variance)\n", (1332, 1398), False, 'import pytest\n'), ((1573, 1659), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""m,sd_magnitude,sd_phase,expected"""', 'test_cases_covariance'], {}), "('m,sd_magnitude,sd_phase,expected',\n test_cases_covariance)\n", (1596, 1659), False, 'import pytest\n'), ((1211, 1259), 'src.models.noise_transformation.average_true_var_real', 'average_true_var_real', (['m', 'sd_magnitude', 'sd_phase'], {}), '(m, sd_magnitude, sd_phase)\n', (1232, 1259), False, 'from src.models.noise_transformation import average_true_var_real, average_true_var_imag, average_true_cov, average_true_noise_covariance, naive_noise_covariance\n'), ((1264, 1305), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['res', 'expected'], {}), '(res, expected)\n', (1290, 1305), True, 'import numpy as np\n'), ((1475, 1523), 'src.models.noise_transformation.average_true_var_imag', 'average_true_var_imag', (['m', 'sd_magnitude', 'sd_phase'], {}), '(m, sd_magnitude, sd_phase)\n', (1496, 1523), False, 'from src.models.noise_transformation import average_true_var_real, average_true_var_imag, average_true_cov, average_true_noise_covariance, naive_noise_covariance\n'), ((1528, 1569), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['res', 'expected'], {}), '(res, expected)\n', (1554, 1569), True, 'import numpy as np\n'), ((1733, 1776), 'src.models.noise_transformation.average_true_cov', 'average_true_cov', (['m', 'sd_magnitude', 'sd_phase'], {}), '(m, sd_magnitude, sd_phase)\n', (1749, 1776), False, 'from src.models.noise_transformation import average_true_var_real, average_true_var_imag, average_true_cov, average_true_noise_covariance, naive_noise_covariance\n'), ((1781, 1842), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['res', 'expected'], {'rtol': '(0)', 'atol': '(1e-10)'}), '(res, expected, rtol=0, atol=1e-10)\n', (1807, 1842), True, 'import numpy as np\n'), ((1947, 1958), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (1955, 1958), True, 'import numpy as np\n'), ((1969, 2035), 'src.models.noise_transformation.average_true_noise_covariance', 'average_true_noise_covariance', (['measurement', 'sd_magnitude', 'sd_phase'], {}), '(measurement, sd_magnitude, sd_phase)\n', (1998, 2035), False, 'from src.models.noise_transformation import average_true_var_real, average_true_var_imag, average_true_cov, average_true_noise_covariance, naive_noise_covariance\n'), ((2319, 2338), 'numpy.array', 'np.array', (['[0, 1.0j]'], {}), '([0, 1.0j])\n', (2327, 2338), True, 'import numpy as np\n'), ((2352, 2439), 'numpy.array', 'np.array', (['[[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 0, 0], [0, 0, 0, 1]]'], {'dtype': 'np.float'}), '([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 0, 0], [0, 0, 0, 1]], dtype=np\n .float)\n', (2360, 2439), True, 'import numpy as np\n'), ((2484, 2543), 'src.models.noise_transformation.naive_noise_covariance', 'naive_noise_covariance', (['measurement', 'sd_magnitude', 'sd_phase'], {}), '(measurement, sd_magnitude, sd_phase)\n', (2506, 2543), False, 'from src.models.noise_transformation import average_true_var_real, average_true_var_imag, average_true_cov, average_true_noise_covariance, naive_noise_covariance\n'), ((269, 279), 'numpy.exp', 'np.exp', (['(-2)'], {}), '(-2)\n', (275, 279), True, 'import numpy as np\n'), ((594, 604), 'numpy.exp', 'np.exp', (['(-2)'], {}), '(-2)\n', (600, 604), True, 'import numpy as np\n'), ((964, 974), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (971, 974), True, 'import numpy as np\n'), ((300, 310), 'numpy.cosh', 'np.cosh', (['(1)'], {}), '(1)\n', (307, 310), True, 'import numpy as np\n'), ((374, 384), 'numpy.exp', 'np.exp', (['(-2)'], {}), '(-2)\n', (380, 384), True, 'import numpy as np\n'), ((480, 490), 'numpy.exp', 'np.exp', (['(-2)'], {}), '(-2)\n', (486, 490), True, 'import numpy as np\n'), ((625, 635), 'numpy.sinh', 'np.sinh', (['(1)'], {}), '(1)\n', (632, 635), True, 'import numpy as np\n'), ((699, 709), 'numpy.exp', 'np.exp', (['(-2)'], {}), '(-2)\n', (705, 709), True, 'import numpy as np\n'), ((805, 815), 'numpy.exp', 'np.exp', (['(-2)'], {}), '(-2)\n', (811, 815), True, 'import numpy as np\n'), ((999, 1009), 'numpy.exp', 'np.exp', (['(-4)'], {}), '(-4)\n', (1005, 1009), True, 'import numpy as np\n'), ((287, 297), 'numpy.cosh', 'np.cosh', (['(2)'], {}), '(2)\n', (294, 297), True, 'import numpy as np\n'), ((333, 343), 'numpy.exp', 'np.exp', (['(-2)'], {}), '(-2)\n', (339, 343), True, 'import numpy as np\n'), ((347, 357), 'numpy.sinh', 'np.sinh', (['(2)'], {}), '(2)\n', (354, 357), True, 'import numpy as np\n'), ((360, 370), 'numpy.sinh', 'np.sinh', (['(1)'], {}), '(1)\n', (367, 370), True, 'import numpy as np\n'), ((405, 415), 'numpy.sinh', 'np.sinh', (['(1)'], {}), '(1)\n', (412, 415), True, 'import numpy as np\n'), ((439, 449), 'numpy.exp', 'np.exp', (['(-2)'], {}), '(-2)\n', (445, 449), True, 'import numpy as np\n'), ((453, 463), 'numpy.sinh', 'np.sinh', (['(2)'], {}), '(2)\n', (460, 463), True, 'import numpy as np\n'), ((466, 476), 'numpy.sinh', 'np.sinh', (['(1)'], {}), '(1)\n', (473, 476), True, 'import numpy as np\n'), ((511, 521), 'numpy.sinh', 'np.sinh', (['(1)'], {}), '(1)\n', (518, 521), True, 'import numpy as np\n'), ((612, 622), 'numpy.sinh', 'np.sinh', (['(2)'], {}), '(2)\n', (619, 622), True, 'import numpy as np\n'), ((658, 668), 'numpy.exp', 'np.exp', (['(-2)'], {}), '(-2)\n', (664, 668), True, 'import numpy as np\n'), ((672, 682), 'numpy.cosh', 'np.cosh', (['(2)'], {}), '(2)\n', (679, 682), True, 'import numpy as np\n'), ((685, 695), 'numpy.cosh', 'np.cosh', (['(1)'], {}), '(1)\n', (692, 695), True, 'import numpy as np\n'), ((730, 740), 'numpy.cosh', 'np.cosh', (['(1)'], {}), '(1)\n', (737, 740), True, 'import numpy as np\n'), ((764, 774), 'numpy.exp', 'np.exp', (['(-2)'], {}), '(-2)\n', (770, 774), True, 'import numpy as np\n'), ((778, 788), 'numpy.cosh', 'np.cosh', (['(2)'], {}), '(2)\n', (785, 788), True, 'import numpy as np\n'), ((791, 801), 'numpy.cosh', 'np.cosh', (['(1)'], {}), '(1)\n', (798, 801), True, 'import numpy as np\n'), ((836, 846), 'numpy.cosh', 'np.cosh', (['(1)'], {}), '(1)\n', (843, 846), True, 'import numpy as np\n'), ((392, 402), 'numpy.sinh', 'np.sinh', (['(2)'], {}), '(2)\n', (399, 402), True, 'import numpy as np\n'), ((498, 508), 'numpy.sinh', 'np.sinh', (['(2)'], {}), '(2)\n', (505, 508), True, 'import numpy as np\n'), ((717, 727), 'numpy.cosh', 'np.cosh', (['(2)'], {}), '(2)\n', (724, 727), True, 'import numpy as np\n'), ((823, 833), 'numpy.cosh', 'np.cosh', (['(2)'], {}), '(2)\n', (830, 833), True, 'import numpy as np\n'), ((1026, 1035), 'numpy.exp', 'np.exp', (['(1)'], {}), '(1)\n', (1032, 1035), True, 'import numpy as np\n'), ((2069, 2079), 'numpy.exp', 'np.exp', (['(-2)'], {}), '(-2)\n', (2075, 2079), True, 'import numpy as np\n'), ((2120, 2130), 'numpy.exp', 'np.exp', (['(-2)'], {}), '(-2)\n', (2126, 2130), True, 'import numpy as np\n'), ((2100, 2110), 'numpy.cosh', 'np.cosh', (['(1)'], {}), '(1)\n', (2107, 2110), True, 'import numpy as np\n'), ((2151, 2161), 'numpy.sinh', 'np.sinh', (['(1)'], {}), '(1)\n', (2158, 2161), True, 'import numpy as np\n'), ((2087, 2097), 'numpy.cosh', 'np.cosh', (['(2)'], {}), '(2)\n', (2094, 2097), True, 'import numpy as np\n'), ((2138, 2148), 'numpy.sinh', 'np.sinh', (['(2)'], {}), '(2)\n', (2145, 2148), True, 'import numpy as np\n')] |
"""Finds out all the people you need to follow to follow all the same people as another user. Then, optionally, follows them for you."""
import configparser
import csv
import errno
import os
import tweepy
from tqdm import tqdm
#Useful Constants
PATH_TO_TARGET_CSV = "./output/targetfriends.csv"
PATH_TO_USER_CSV = "./output/yourfriends.csv"
PATH_TO_DIFF_CSV = "./output/difffriends.csv"
# Getting API Keys
SECRETS = configparser.ConfigParser()
SECRETS.read("secrets.ini")
# Gonna have to get ur own keys to use this
API_KEY = SECRETS["API KEYS"]["ConsumerKey"]
API_SECRET = SECRETS["API KEYS"]["ConsumerSecret"]
# https://gist.github.com/garrettdreyfus/8153571
def yes_or_no(question):
while "the answer is invalid":
reply = str(input(question + ' (y/n): ')).lower().strip()
if reply[0] == 'y':
return True
if reply[0] == 'n':
return False
#Creating Folders
#https://stackoverflow.com/a/273227
try:
os.makedirs("./output")
except OSError as e:
if e.errno != errno.EEXIST:
raise
# Setting Up API
AUTH = tweepy.OAuthHandler(API_KEY, API_SECRET, 'oob')
REDIRECT_URL = AUTH.get_authorization_url()
print("Go to " + REDIRECT_URL +
" and sign in on the account you want to transfer your Following list to.")
API = tweepy.API(AUTH, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
AUTH.get_access_token(input("Enter the key that comes up here: "))
# https://stackoverflow.com/a/19302732
def list_to_csv(list_to_dump, filename):
with open(filename, "w") as output:
writer = csv.writer(output, lineterminator='\n')
for val in list_to_dump:
writer.writerow([val])
# https://stackoverflow.com/a/19302732
def two_lists_to_csv(header, in_list1, in_list2, filename):
list1 = [header] + in_list1
list2 = [" "] + in_list2
with open(filename, 'w', newline='') as csv_file:
writer = csv.writer(csv_file)
writer.writerows(zip(list1, list2))
# Wrote this one myself actually. It's for debugging.
def check_limits():
limits = API.rate_limit_status()['resources']
for category_name in limits:
category = limits[category_name]
for item_name in category:
item = limits[category_name][item_name]
if item['limit'] != item['remaining']:
print(item_name, item)
# https://stackoverflow.com/a/312464
def chunks(my_list, len_of_chunk):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(my_list), len_of_chunk):
yield my_list[i:i + len_of_chunk]
# https://stackoverflow.com/a/39320334
def get_100_usernames(list_of_ids):
""" can only do lookup in steps of 100;
so 'ids' should be a list of 100 ids
"""
user_objs = API.lookup_users(user_ids=list_of_ids)
return [user.screen_name for user in user_objs]
# This one too
def get_usernames(ids):
usernames = []
for chunk in tqdm(chunks(ids, 100), unit="hundred names"):
usernames += get_100_usernames(chunk)
return usernames
# Wow, this one as well
def retrieve_usernames(list_of_ids, dict_of_ids_to_names):
"""
For retrieving usernames when we've already gotten them from twitter.
For saving on API requests
"""
usernames = []
for user_id in list_of_ids:
usernames.append(dict_of_ids_to_names[user_id])
return usernames
# https://codereview.stackexchange.com/a/101947
def get_list_of_friends(target_id):
ids = []
for friend in tqdm(tweepy.Cursor(API.friends_ids, id=target_id).items(), unit="Friend"):
ids.append(friend)
return ids
def _check_csv_header_(filename, text_to_check_for):
try:
with open(filename) as csvfile:
if list(csv.reader(csvfile))[0][0] == text_to_check_for:
return True
return False
except IOError:
return False
def detect_progress(name_of_target, name_of_user):
"""
Returns Tuple (is_target_finished, is_user_finished, is_diff_finished)
"""
is_target_finished = _check_csv_header_(PATH_TO_TARGET_CSV, name_of_target)
is_user_finished = _check_csv_header_(PATH_TO_USER_CSV, name_of_user)
is_diff_finished = _check_csv_header_(PATH_TO_DIFF_CSV, name_of_target + " - " + name_of_user)
return (is_target_finished, is_user_finished, is_diff_finished)
def restore_progress(filename):
"""
Returns
-------
id_list : list
List of ids restored from the CSV
name_list : list
List of names restored from the CSV
"""
with open(filename) as csvfile:
csvfile = csv.reader(csvfile)
csvfile = list(map(list, zip(*csvfile))) #https://stackoverflow.com/a/6473724 Transposing lists
id_list = csvfile[0][1:]
id_list = [int(s) for s in id_list]
name_list = csvfile[1][1:]
return id_list, name_list
TARGET = input("Target Username (who we'll be copying from): ")
MY_SCREEN_NAME = API.me().screen_name
PROGRESS = detect_progress(TARGET, MY_SCREEN_NAME)
USE_PROGRESS = yes_or_no("Should we use progress from last time? (Choose no if the target has followed anyone since last time or you haven't run this before)")
if not PROGRESS[0] or not USE_PROGRESS: #If we haven't already finished getting friends from the target
print("Getting List of Friends (Following) of Target...")
TARGET_FRIEND_IDS = get_list_of_friends(TARGET)
print("Converting IDs to names...")
TARGET_FRIEND_NAMES = get_usernames(TARGET_FRIEND_IDS)
print("Saving to CSV...")
two_lists_to_csv(TARGET, TARGET_FRIEND_IDS, TARGET_FRIEND_NAMES, PATH_TO_TARGET_CSV)
else:
print("Restoring Progress on Target...")
TARGET_FRIEND_IDS, TARGET_FRIEND_NAMES = restore_progress(PATH_TO_TARGET_CSV)
#Save names for later
NAMES_DICT = dict(zip(TARGET_FRIEND_IDS, TARGET_FRIEND_NAMES))
print("Getting List of Your Friends (Following)...")
YOUR_FRIEND_IDS = get_list_of_friends(API.me().id)
print("Converting IDs to names...")
YOUR_FRIEND_NAMES = get_usernames(YOUR_FRIEND_IDS)
print("Saving to CSV...")
two_lists_to_csv(MY_SCREEN_NAME, YOUR_FRIEND_IDS, YOUR_FRIEND_NAMES, PATH_TO_USER_CSV)
print("Subtracting who you've already followed...")
DIFF_FRIEND_IDS = [f for f in TARGET_FRIEND_IDS if f not in YOUR_FRIEND_IDS]
print("Converting ids to names...")
DIFF_FRIEND_NAMES = retrieve_usernames(DIFF_FRIEND_IDS, NAMES_DICT)
print("Saving to CSV...")
two_lists_to_csv(TARGET+" - "+MY_SCREEN_NAME,DIFF_FRIEND_IDS, DIFF_FRIEND_NAMES, "./output/diffriends.csv")
print(TARGET_FRIEND_IDS)
print("To follow everyone that Target follows, you need to follow:\n\n\n" +
"\n@".join(DIFF_FRIEND_NAMES))
print("At some point your account may be limited and unable to follow any more people. Probably will go away. ¯\\_(ツ)_/¯")
if yes_or_no("Are you sure you want to (try to) follow %s users?" % len(DIFF_FRIEND_IDS)):
print("Begin following.")
for followtuple in zip(tqdm(DIFF_FRIEND_IDS, unit="Friend"), DIFF_FRIEND_NAMES):
user_id, name = followtuple
tqdm.write("Following @" + name + "...")
API.create_friendship(user_id)
| [
"configparser.ConfigParser",
"os.makedirs",
"tqdm.tqdm.write",
"tweepy.Cursor",
"csv.writer",
"tqdm.tqdm",
"tweepy.API",
"csv.reader",
"tweepy.OAuthHandler"
] | [((436, 463), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (461, 463), False, 'import configparser\n'), ((1119, 1166), 'tweepy.OAuthHandler', 'tweepy.OAuthHandler', (['API_KEY', 'API_SECRET', '"""oob"""'], {}), "(API_KEY, API_SECRET, 'oob')\n", (1138, 1166), False, 'import tweepy\n'), ((1335, 1408), 'tweepy.API', 'tweepy.API', (['AUTH'], {'wait_on_rate_limit': '(True)', 'wait_on_rate_limit_notify': '(True)'}), '(AUTH, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)\n', (1345, 1408), False, 'import tweepy\n'), ((997, 1020), 'os.makedirs', 'os.makedirs', (['"""./output"""'], {}), "('./output')\n", (1008, 1020), False, 'import os\n'), ((1620, 1659), 'csv.writer', 'csv.writer', (['output'], {'lineterminator': '"""\n"""'}), "(output, lineterminator='\\n')\n", (1630, 1659), False, 'import csv\n'), ((1969, 1989), 'csv.writer', 'csv.writer', (['csv_file'], {}), '(csv_file)\n', (1979, 1989), False, 'import csv\n'), ((4726, 4745), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (4736, 4745), False, 'import csv\n'), ((7107, 7143), 'tqdm.tqdm', 'tqdm', (['DIFF_FRIEND_IDS'], {'unit': '"""Friend"""'}), "(DIFF_FRIEND_IDS, unit='Friend')\n", (7111, 7143), False, 'from tqdm import tqdm\n'), ((7211, 7251), 'tqdm.tqdm.write', 'tqdm.write', (["('Following @' + name + '...')"], {}), "('Following @' + name + '...')\n", (7221, 7251), False, 'from tqdm import tqdm\n'), ((3597, 3641), 'tweepy.Cursor', 'tweepy.Cursor', (['API.friends_ids'], {'id': 'target_id'}), '(API.friends_ids, id=target_id)\n', (3610, 3641), False, 'import tweepy\n'), ((3839, 3858), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (3849, 3858), False, 'import csv\n')] |
# Thanks `https://github.com/pypa/sampleproject`!!
from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'VERSION'), 'r', encoding='utf-8') as f:
version = f.read().strip()
with open(path.join(here, 'README.md'), 'r', encoding='utf-8') as f:
long_description = f.read()
setup(
name = 'brutelogger',
version = version,
description = 'A brutish file logger for when you just need to `tee` your screen.',
long_description = long_description,
long_description_content_type='text/markdown',
url = 'https://github.com/giuse/brutelogger',
author = '<NAME>',
author_email = '<EMAIL>',
license = 'MIT',
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
keywords = 'logging tee',
packages = find_packages(exclude=['contrib', 'docs', 'tests']), # Required
python_requires = '>=3.6, <4',
install_requires = [],
project_urls={
'Bug Reports' : 'https://github.com/giuse/brutelogger/issues',
'Source' : 'https://github.com/giuse/brutelogger/',
},
download_url = f"https://github.com/giuse/brutelogger/archive/{version}.tar.gz",
)
| [
"os.path.dirname",
"setuptools.find_packages",
"os.path.join"
] | [((136, 158), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (148, 158), False, 'from os import path\n'), ((171, 197), 'os.path.join', 'path.join', (['here', '"""VERSION"""'], {}), "(here, 'VERSION')\n", (180, 197), False, 'from os import path\n'), ((267, 295), 'os.path.join', 'path.join', (['here', '"""README.md"""'], {}), "(here, 'README.md')\n", (276, 295), False, 'from os import path\n'), ((1216, 1267), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['contrib', 'docs', 'tests']"}), "(exclude=['contrib', 'docs', 'tests'])\n", (1229, 1267), False, 'from setuptools import setup, find_packages\n')] |
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 26 19:39:00 2019
@author: hehehe
"""
from __future__ import absolute_import, print_function
from tweepy import OAuthHandler, Stream, StreamListener
#Buat API Twitter di link berikut https://developer.twitter.com/en/apps
consumer_key = "masukkan consumer_key"
consumer_secret = "masukkan consumer_secret"
access_token = "masukkan access_token"
access_token_secret = "masukkan access_token_secret"
class TukangBaca(StreamListener):
""" A listener handles tweets that are received from the stream.
This is a basic listener that just prints received tweets to stdout.
"""
def on_data(self, data):
baca = json.loads(data)
print("\n\n pada", baca["created_at"])
print(baca["id_str"])
print("Tweet: ",baca["text"])
print("Jumlah Retweet: ",baca["retweet_count"])
print("Jumlah favorit: ", baca["favorite_count"])
print(baca["user"])
return True
if __name__ == '__main__':
l = TukangBaca()
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
query=input("Masukkan Keyword : ")
stream = Stream(auth, l)
stream.filter(track=[query])
| [
"tweepy.Stream",
"tweepy.OAuthHandler"
] | [((1181, 1196), 'tweepy.Stream', 'Stream', (['auth', 'l'], {}), '(auth, l)\n', (1187, 1196), False, 'from tweepy import OAuthHandler, Stream, StreamListener\n'), ((1031, 1074), 'tweepy.OAuthHandler', 'OAuthHandler', (['consumer_key', 'consumer_secret'], {}), '(consumer_key, consumer_secret)\n', (1043, 1074), False, 'from tweepy import OAuthHandler, Stream, StreamListener\n')] |
from flask import Blueprint
api = Blueprint("data_manage_api", __name__)
from . import data_venation
from . import operator_manage
from . import model_manage
from . import resource_manage
from . import pipeline_manage
from . import trainedmodel_manage
| [
"flask.Blueprint"
] | [((35, 73), 'flask.Blueprint', 'Blueprint', (['"""data_manage_api"""', '__name__'], {}), "('data_manage_api', __name__)\n", (44, 73), False, 'from flask import Blueprint\n')] |
import os
import typing
import cfg_exporter.custom as custom
from cfg_exporter.const import DataType
from cfg_exporter.const import TEMPLATE_EXTENSION
from cfg_exporter.exports.base.export import BaseExport
from cfg_exporter.lang_template import lang
from cfg_exporter.tables.base.type import DefaultValue
EXTENSION = 'py'
BASE_TEMPLATE_PATH = os.path.join(os.path.dirname(__file__), 'template', EXTENSION)
BASE_TEMPLATE = f'{EXTENSION}_base.{TEMPLATE_EXTENSION}'
def _by_default(value):
"""
默认Iter类型格式化
"""
return f'{value}'
def _by_reference(replace_table):
"""
引用Iter类型格式化
"""
return lambda value: _get_reference(replace_table, value)
def _get_reference(replace_table, value):
key = _by_default(value)
if key in replace_table:
_, layer_num, index_num = replace_table[key]
return f'_rt_{layer_num}[{index_num}]'
return f'{value}'
# Iter类型格式化函数
_format_iter_value = _by_default
def format_value(value):
if isinstance(value, DataType.str):
return f'"{value}"'
elif isinstance(value, DataType.lang):
return f'"{lang(value.text)}"'
elif isinstance(value, DataType.iter):
return _format_iter_value(value)
elif isinstance(value, DefaultValue):
return format_value(value.text)
else:
return f'{value}'
_data_type_details = {
'lang': 'str',
'iter': 'typing.Union[list, tuple]',
'raw': 'typing.Any'
}
class PyExport(BaseExport):
def __init__(self, args):
global_vars = {'format_value': format_value}
super().__init__(args, BASE_TEMPLATE_PATH, [EXTENSION], global_vars)
def export(self, table_obj) -> typing.NoReturn:
global _format_iter_value
if self.args.py_optimize:
replace_table, reference_table = _analyze_reference_table(table_obj)
default_values = custom.analyze_default_value(table_obj)
_format_iter_value = _by_reference(replace_table)
else:
reference_table = []
default_values = {}
_format_iter_value = _by_default
ctx = {
'table_obj': table_obj, 'prefix': self.args.file_prefix,
'reference_table': reference_table, 'default_values': default_values
}
table_name = table_obj.table_name
filename = f'{table_name}.{EXTENSION}'
if table_name in self.extend_templates.get(EXTENSION, []):
self.render(filename, f'{table_name}.{EXTENSION}.{TEMPLATE_EXTENSION}', ctx)
else:
self.render(filename, BASE_TEMPLATE, ctx)
def file_desc(self) -> str:
return "######################################\n" \
"# AUTO GENERATE BY CFG_EXPORTER #\n" \
"######################################\n"
@staticmethod
def naming_convention() -> typing.Any:
import cfg_exporter.util as util
return util.snake_case
@staticmethod
def data_type_detail(data_type_str) -> str:
return _data_type_details.get(data_type_str, data_type_str)
def _analyze_reference_table(table_obj):
"""
统计替换引用表
"""
replace_table = {}
for field_name, data_type in zip(table_obj.field_names, table_obj.data_types):
if field_name in table_obj.key_field_name_iter:
continue
if data_type is DataType.iter:
for value in table_obj.data_iter_by_field_names(field_name):
_stat_replace_table_layer(replace_table, value, 0)
_stat_replace_table_index(replace_table)
reference_table = _stat_reference_table(replace_table)
return replace_table, reference_table
def _stat_replace_table_layer(reference_table, value, layer_num):
"""
统计替换表的最高层级
"""
if isinstance(value, DataType.iter.value.real_type):
for child_value in value:
if isinstance(child_value, DataType.iter.value.real_type):
_stat_replace_table_layer(reference_table, child_value, layer_num + 1)
key = _by_default(value)
if key in reference_table:
if reference_table[key][1] < layer_num:
reference_table[key] = (value, layer_num)
else:
reference_table[key] = (value, layer_num)
def _stat_replace_table_index(reference_table):
"""
统计替换表的元素下标
"""
index_dict = {}
for key, (value, layer_num) in reference_table.items():
replace_rt = _replace_rt_table(reference_table, value)
index = index_dict.get(layer_num, 0)
index_dict[layer_num] = index
reference_table[key] = (replace_rt, layer_num, index)
def _replace_rt_table(reference_table, value):
"""
替换引用表的上级引用
"""
if isinstance(value, (list, tuple)):
t = ', '.join(_get_reference(reference_table, v) if isinstance(v, (list, tuple)) else f'{v}' for v in value)
if isinstance(value, list):
return f'[{t}]'
elif isinstance(value, tuple):
return f'({t})'
return f'{value}'
def _stat_reference_table(replace_table):
"""
统计生成引用表
"""
rt_dict = {}
for rt_value, layer_num, _index_num in replace_table.values():
layer_list = rt_dict.get(layer_num, [])
layer_list.append(rt_value)
rt_dict[layer_num] = layer_list
return sorted(rt_dict.items(), key=lambda item: item[0], reverse=True)
| [
"os.path.dirname",
"cfg_exporter.custom.analyze_default_value",
"cfg_exporter.lang_template.lang"
] | [((359, 384), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (374, 384), False, 'import os\n'), ((1861, 1900), 'cfg_exporter.custom.analyze_default_value', 'custom.analyze_default_value', (['table_obj'], {}), '(table_obj)\n', (1889, 1900), True, 'import cfg_exporter.custom as custom\n'), ((1106, 1122), 'cfg_exporter.lang_template.lang', 'lang', (['value.text'], {}), '(value.text)\n', (1110, 1122), False, 'from cfg_exporter.lang_template import lang\n')] |
from typing import Any
from starlette.datastructures import State
class DefaultState:
state = State()
def get(self,key:str, value: Any = None) -> Any:
if hasattr(self.state, key):
return getattr(self.state, key)
else:
if not value:
raise Exception('state don`t %s attribute' %key)
else:
return value
def set(self, key:str, value: Any) -> None:
if hasattr(self.state, key):
raise Exception('state don`t %s attribute' %key)
else:
setattr(self.state, key, value)
def update(self, key:str, value: Any) -> None:
if hasattr(self.state, key):
setattr(self.state, key, value)
default_state = DefaultState()
| [
"starlette.datastructures.State"
] | [((102, 109), 'starlette.datastructures.State', 'State', ([], {}), '()\n', (107, 109), False, 'from starlette.datastructures import State\n')] |
# -*- coding: utf-8 -*-
import os
import sys
# ensure `tests` directory path is on top of Python's module search
filedir = os.path.dirname(__file__)
sys.path.insert(0, filedir)
while filedir in sys.path[1:]:
sys.path.pop(sys.path.index(filedir)) # avoid duplication
import pytest
import numpy as np
from copy import deepcopy
from backend import K, AE_CONFIGS, BASEDIR, tempdir, notify, make_autoencoder
from backend import _init_session, _do_test_load, _get_test_names
from deeptrain.util.preprocessors import Preprocessor
from deeptrain.metrics import _standardize, _weighted_loss
#### CONFIGURE TESTING #######################################################
batch_size = 128
width, height = 28, 28
channels = 1
datadir = os.path.join(BASEDIR, 'tests', 'data', 'image')
tests_done = {}
CONFIGS = deepcopy(AE_CONFIGS)
CONFIGS['model']['batch_shape'] = (batch_size, width, height, channels)
CONFIGS['datagen']['batch_size'] = batch_size
CONFIGS['val_datagen']['batch_size'] = batch_size
def init_session(C, weights_path=None, loadpath=None, model=None):
return _init_session(C, weights_path=weights_path, loadpath=loadpath,
model=model, model_fn=make_autoencoder)
def mean_L_error(y_true, y_pred, sample_weight=1):
L = 1.5 # configurable
y_true, y_pred, sample_weight = _standardize(y_true, y_pred,
sample_weight)
return _weighted_loss(np.mean(np.abs(y_true - y_pred) ** L, axis=-1),
sample_weight)
def mLe(y_true, y_pred):
L = 1.5 # configurable
return K.mean(K.pow(K.abs(y_true - y_pred), L), axis=-1)
def numpy_loader(self, set_num):
# allow_pickle is irrelevant here, just for demo
return np.load(self._path(set_num), allow_pickle=True)
class RandCropPreprocessor(Preprocessor):
"""2D random crop. MNIST is 28x28, we try 25x25 crops,
e.g. batch[2:27, 3:28]."""
def __init__(self, size, crop_batch=True, crop_labels=False,
crop_same=False):
# length -> (length, length)
# (width, height) -> (width, height)
assert isinstance(size, (tuple, int))
self.size = size if isinstance(size, tuple) else (size, size)
self.crop_batch = crop_batch
self.crop_labels = crop_labels
self.crop_same = crop_same
def process(self, batch, labels):
if self.crop_batch:
(x_start, x_end), (y_start, y_end) = self._make_crop_mask(batch)
batch = batch[:, x_start:x_end, y_start:y_end]
if self.crop_labels:
if not self.crop_same or not self.crop_batch:
(x_start, x_end), (y_start, y_end
) = self._make_crop_mask(labels)
labels = labels[:, x_start:x_end, y_start:y_end]
return batch, labels
def _make_crop_mask(self, data):
_, w, h, *_ = data.shape # (samples, width, height, channels)
x_offset = np.random.randint(0, w - self.size[0])
y_offset = np.random.randint(0, h - self.size[1])
x_start, x_end = x_offset, x_offset + self.size[0]
y_start, y_end = y_offset, y_offset + self.size[1]
return (x_start, x_end), (y_start, y_end)
##############################################################################
@notify(tests_done)
def test_main():
C = deepcopy(AE_CONFIGS)
C['model' ].update({'loss': mLe,
'batch_shape': (128, 24, 24, 1)})
C['datagen' ].update({'data_loader': numpy_loader,
'preprocessor': RandCropPreprocessor(size=24)})
C['val_datagen'].update({'data_loader': numpy_loader,
'preprocessor': RandCropPreprocessor(size=24)})
C['traingen']['custom_metrics'] = {'mLe': mean_L_error}
with tempdir(C['traingen']['logs_dir']), \
tempdir(C['traingen']['best_models_dir']):
tg = init_session(C)
tg.train()
_do_test_load(tg, C, init_session)
##############################################################################
tests_done.update({name: None for name in _get_test_names(__name__)})
if __name__ == '__main__':
pytest.main([__file__, "-s"])
| [
"numpy.abs",
"sys.path.insert",
"backend.notify",
"backend.tempdir",
"os.path.join",
"backend._do_test_load",
"pytest.main",
"os.path.dirname",
"numpy.random.randint",
"sys.path.index",
"backend._get_test_names",
"copy.deepcopy",
"backend.K.abs",
"deeptrain.metrics._standardize",
"backen... | [((123, 148), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (138, 148), False, 'import os\n'), ((149, 176), 'sys.path.insert', 'sys.path.insert', (['(0)', 'filedir'], {}), '(0, filedir)\n', (164, 176), False, 'import sys\n'), ((734, 781), 'os.path.join', 'os.path.join', (['BASEDIR', '"""tests"""', '"""data"""', '"""image"""'], {}), "(BASEDIR, 'tests', 'data', 'image')\n", (746, 781), False, 'import os\n'), ((809, 829), 'copy.deepcopy', 'deepcopy', (['AE_CONFIGS'], {}), '(AE_CONFIGS)\n', (817, 829), False, 'from copy import deepcopy\n'), ((3320, 3338), 'backend.notify', 'notify', (['tests_done'], {}), '(tests_done)\n', (3326, 3338), False, 'from backend import K, AE_CONFIGS, BASEDIR, tempdir, notify, make_autoencoder\n'), ((1078, 1184), 'backend._init_session', '_init_session', (['C'], {'weights_path': 'weights_path', 'loadpath': 'loadpath', 'model': 'model', 'model_fn': 'make_autoencoder'}), '(C, weights_path=weights_path, loadpath=loadpath, model=model,\n model_fn=make_autoencoder)\n', (1091, 1184), False, 'from backend import _init_session, _do_test_load, _get_test_names\n'), ((1323, 1366), 'deeptrain.metrics._standardize', '_standardize', (['y_true', 'y_pred', 'sample_weight'], {}), '(y_true, y_pred, sample_weight)\n', (1335, 1366), False, 'from deeptrain.metrics import _standardize, _weighted_loss\n'), ((3364, 3384), 'copy.deepcopy', 'deepcopy', (['AE_CONFIGS'], {}), '(AE_CONFIGS)\n', (3372, 3384), False, 'from copy import deepcopy\n'), ((4193, 4222), 'pytest.main', 'pytest.main', (["[__file__, '-s']"], {}), "([__file__, '-s'])\n", (4204, 4222), False, 'import pytest\n'), ((225, 248), 'sys.path.index', 'sys.path.index', (['filedir'], {}), '(filedir)\n', (239, 248), False, 'import sys\n'), ((2973, 3011), 'numpy.random.randint', 'np.random.randint', (['(0)', '(w - self.size[0])'], {}), '(0, w - self.size[0])\n', (2990, 3011), True, 'import numpy as np\n'), ((3031, 3069), 'numpy.random.randint', 'np.random.randint', (['(0)', '(h - self.size[1])'], {}), '(0, h - self.size[1])\n', (3048, 3069), True, 'import numpy as np\n'), ((3830, 3864), 'backend.tempdir', 'tempdir', (["C['traingen']['logs_dir']"], {}), "(C['traingen']['logs_dir'])\n", (3837, 3864), False, 'from backend import K, AE_CONFIGS, BASEDIR, tempdir, notify, make_autoencoder\n'), ((3876, 3917), 'backend.tempdir', 'tempdir', (["C['traingen']['best_models_dir']"], {}), "(C['traingen']['best_models_dir'])\n", (3883, 3917), False, 'from backend import K, AE_CONFIGS, BASEDIR, tempdir, notify, make_autoencoder\n'), ((3975, 4009), 'backend._do_test_load', '_do_test_load', (['tg', 'C', 'init_session'], {}), '(tg, C, init_session)\n', (3988, 4009), False, 'from backend import _init_session, _do_test_load, _get_test_names\n'), ((1610, 1632), 'backend.K.abs', 'K.abs', (['(y_true - y_pred)'], {}), '(y_true - y_pred)\n', (1615, 1632), False, 'from backend import K, AE_CONFIGS, BASEDIR, tempdir, notify, make_autoencoder\n'), ((4133, 4158), 'backend._get_test_names', '_get_test_names', (['__name__'], {}), '(__name__)\n', (4148, 4158), False, 'from backend import _init_session, _do_test_load, _get_test_names\n'), ((1450, 1473), 'numpy.abs', 'np.abs', (['(y_true - y_pred)'], {}), '(y_true - y_pred)\n', (1456, 1473), True, 'import numpy as np\n')] |
"""
File copy from https://github.com/moddevices/lilvlib
"""
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------------------------------------
# Imports
import json
import lilv
import os
from math import fmod
# ------------------------------------------------------------------------------------------------------------
# Utilities
def LILV_FOREACH(collection, func):
itr = collection.begin()
while itr:
yield func(collection.get(itr))
itr = collection.next(itr)
class NS(object):
def __init__(self, world, base):
self.world = world
self.base = base
self._cache = {}
def __getattr__(self, attr):
if attr.endswith("_"):
attr = attr[:-1]
if attr not in self._cache:
self._cache[attr] = lilv.Node(self.world.new_uri(self.base+attr))
return self._cache[attr]
def is_integer(string):
return string.strip().lstrip("-+").isdigit()
def get_short_port_name(portName):
if len(portName) <= 16:
return portName
portName = portName.split("/",1)[0].split(" (",1)[0].split(" [",1)[0].strip()
# cut stuff if too big
if len(portName) > 16:
portName = portName[0] + portName[1:].replace("a","").replace("e","").replace("i","").replace("o","").replace("u","")
if len(portName) > 16:
portName = portName[:16]
return portName.strip()
# ------------------------------------------------------------------------------------------------------------
def get_category(nodes):
lv2_category_indexes = {
'DelayPlugin': ['Delay'],
'DistortionPlugin': ['Distortion'],
'WaveshaperPlugin': ['Distortion', 'Waveshaper'],
'DynamicsPlugin': ['Dynamics'],
'AmplifierPlugin': ['Dynamics', 'Amplifier'],
'CompressorPlugin': ['Dynamics', 'Compressor'],
'ExpanderPlugin': ['Dynamics', 'Expander'],
'GatePlugin': ['Dynamics', 'Gate'],
'LimiterPlugin': ['Dynamics', 'Limiter'],
'FilterPlugin': ['Filter'],
'AllpassPlugin': ['Filter', 'Allpass'],
'BandpassPlugin': ['Filter', 'Bandpass'],
'CombPlugin': ['Filter', 'Comb'],
'EQPlugin': ['Filter', 'Equaliser'],
'MultiEQPlugin': ['Filter', 'Equaliser', 'Multiband'],
'ParaEQPlugin': ['Filter', 'Equaliser', 'Parametric'],
'HighpassPlugin': ['Filter', 'Highpass'],
'LowpassPlugin': ['Filter', 'Lowpass'],
'GeneratorPlugin': ['Generator'],
'ConstantPlugin': ['Generator', 'Constant'],
'InstrumentPlugin': ['Generator', 'Instrument'],
'OscillatorPlugin': ['Generator', 'Oscillator'],
'ModulatorPlugin': ['Modulator'],
'ChorusPlugin': ['Modulator', 'Chorus'],
'FlangerPlugin': ['Modulator', 'Flanger'],
'PhaserPlugin': ['Modulator', 'Phaser'],
'ReverbPlugin': ['Reverb'],
'SimulatorPlugin': ['Simulator'],
'SpatialPlugin': ['Spatial'],
'SpectralPlugin': ['Spectral'],
'PitchPlugin': ['Spectral', 'Pitch Shifter'],
'UtilityPlugin': ['Utility'],
'AnalyserPlugin': ['Utility', 'Analyser'],
'ConverterPlugin': ['Utility', 'Converter'],
'FunctionPlugin': ['Utility', 'Function'],
'MixerPlugin': ['Utility', 'Mixer'],
#'MIDIPlugin': ['MIDI', 'Utility'],
}
mod_category_indexes = {
'DelayPlugin': ['Delay'],
'DistortionPlugin': ['Distortion'],
'DynamicsPlugin': ['Dynamics'],
'FilterPlugin': ['Filter'],
'GeneratorPlugin': ['Generator'],
'ModulatorPlugin': ['Modulator'],
'ReverbPlugin': ['Reverb'],
'SimulatorPlugin': ['Simulator'],
'SpatialPlugin': ['Spatial'],
'SpectralPlugin': ['Spectral'],
'UtilityPlugin': ['Utility'],
'MIDIPlugin': ['Utility', 'MIDI'],
}
def fill_in_lv2_category(node):
category = node.as_string().replace("http://lv2plug.in/ns/lv2core#","")
if category in lv2_category_indexes.keys():
return lv2_category_indexes[category]
return []
def fill_in_mod_category(node):
category = node.as_string().replace("http://moddevices.com/ns/mod#","")
if category in mod_category_indexes.keys():
return mod_category_indexes[category]
return []
categories = []
for cat in [cat for catlist in LILV_FOREACH(nodes, fill_in_mod_category) for cat in catlist]:
if cat not in categories:
categories.append(cat)
if len(categories) > 0:
return categories
for cat in [cat for catlist in LILV_FOREACH(nodes, fill_in_lv2_category) for cat in catlist]:
if cat not in categories:
categories.append(cat)
return categories
def get_port_data(port, subj):
nodes = port.get_value(subj.me)
data = []
it = lilv.lilv_nodes_begin(nodes)
while not lilv.lilv_nodes_is_end(nodes, it):
dat = lilv.lilv_nodes_get(nodes, it)
it = lilv.lilv_nodes_next(nodes, it)
if dat is None:
continue
data.append(lilv.lilv_node_as_string(dat))
return data
def get_port_unit(miniuri):
# using label, render, symbol
units = {
's': ["seconds", "%f s", "s"],
'ms': ["milliseconds", "%f ms", "ms"],
'min': ["minutes", "%f mins", "min"],
'bar': ["bars", "%f bars", "bars"],
'beat': ["beats", "%f beats", "beats"],
'frame': ["audio frames", "%f frames", "frames"],
'm': ["metres", "%f m", "m"],
'cm': ["centimetres", "%f cm", "cm"],
'mm': ["millimetres", "%f mm", "mm"],
'km': ["kilometres", "%f km", "km"],
'inch': ["inches", """%f\"""", "in"],
'mile': ["miles", "%f mi", "mi"],
'db': ["decibels", "%f dB", "dB"],
'pc': ["percent", "%f%%", "%"],
'coef': ["coefficient", "* %f", "*"],
'hz': ["hertz", "%f Hz", "Hz"],
'khz': ["kilohertz", "%f kHz", "kHz"],
'mhz': ["megahertz", "%f MHz", "MHz"],
'bpm': ["beats per minute", "%f BPM", "BPM"],
'oct': ["octaves", "%f octaves", "oct"],
'cent': ["cents", "%f ct", "ct"],
'semitone12TET': ["semitones", "%f semi", "semi"],
'degree': ["degrees", "%f deg", "deg"],
'midiNote': ["MIDI note", "MIDI note %d", "note"],
}
if miniuri in units.keys():
return units[miniuri]
return ("","","")
# ------------------------------------------------------------------------------------------------------------
# get_bundle_dirname
def get_bundle_dirname(bundleuri):
bundle = lilv.lilv_uri_to_path(bundleuri)
if not os.path.exists(bundle):
raise IOError(bundleuri)
if os.path.isfile(bundle):
bundle = os.path.dirname(bundle)
return bundle
# ------------------------------------------------------------------------------------------------------------
# get_pedalboard_info
# Get info from an lv2 bundle
# @a bundle is a string, consisting of a directory in the filesystem (absolute pathname).
def get_pedalboard_info(bundle):
# lilv wants the last character as the separator
bundle = os.path.abspath(bundle)
if not bundle.endswith(os.sep):
bundle += os.sep
# Create our own unique lilv world
# We'll load a single bundle and get all plugins from it
world = lilv.World()
# this is needed when loading specific bundles instead of load_all
# (these functions are not exposed via World yet)
lilv.lilv_world_load_specifications(world.me)
lilv.lilv_world_load_plugin_classes(world.me)
# convert bundle string into a lilv node
bundlenode = lilv.lilv_new_file_uri(world.me, None, bundle)
# load the bundle
world.load_bundle(bundlenode)
# free bundlenode, no longer needed
lilv.lilv_node_free(bundlenode)
# get all plugins in the bundle
plugins = world.get_all_plugins()
# make sure the bundle includes 1 and only 1 plugin (the pedalboard)
if plugins.size() != 1:
raise Exception('get_pedalboard_info(%s) - bundle has 0 or > 1 plugin'.format(bundle))
# no indexing in python-lilv yet, just get the first item
plugin = None
for p in plugins:
plugin = p
break
if plugin is None:
raise Exception('get_pedalboard_info(%s) - failed to get plugin, you are using an old lilv!'.format(bundle))
# define the needed stuff
ns_rdf = NS(world, lilv.LILV_NS_RDF)
ns_lv2core = NS(world, lilv.LILV_NS_LV2)
ns_ingen = NS(world, "http://drobilla.net/ns/ingen#")
ns_mod = NS(world, "http://moddevices.com/ns/mod#")
ns_modpedal = NS(world, "http://moddevices.com/ns/modpedal#")
# check if the plugin is a pedalboard
def fill_in_type(node):
return node.as_string()
plugin_types = [i for i in LILV_FOREACH(plugin.get_value(ns_rdf.type_), fill_in_type)]
if "http://moddevices.com/ns/modpedal#Pedalboard" not in plugin_types:
raise Exception('get_pedalboard_info(%s) - plugin has no mod:Pedalboard type'.format(bundle))
# let's get all the info now
ingenarcs = []
ingenblocks = []
info = {
'name' : plugin.get_name().as_string(),
'uri' : plugin.get_uri().as_string(),
'author': plugin.get_author_name().as_string() or "", # Might be empty
'hardware': {
# we save this info later
'audio': {
'ins' : 0,
'outs': 0
},
'cv': {
'ins' : 0,
'outs': 0
},
'midi': {
'ins' : 0,
'outs': 0
}
},
'size': {
'width' : plugin.get_value(ns_modpedal.width).get_first().as_int(),
'height': plugin.get_value(ns_modpedal.height).get_first().as_int(),
},
'screenshot' : os.path.basename(plugin.get_value(ns_modpedal.screenshot).get_first().as_string() or ""),
'thumbnail' : os.path.basename(plugin.get_value(ns_modpedal.thumbnail).get_first().as_string() or ""),
'connections': [], # we save this info later
'plugins' : [] # we save this info later
}
# connections
arcs = plugin.get_value(ns_ingen.arc)
it = arcs.begin()
while not arcs.is_end(it):
arc = arcs.get(it)
it = arcs.next(it)
if arc.me is None:
continue
head = lilv.lilv_world_get(world.me, arc.me, ns_ingen.head.me, None)
tail = lilv.lilv_world_get(world.me, arc.me, ns_ingen.tail.me, None)
if head is None or tail is None:
continue
ingenarcs.append({
"source": lilv.lilv_uri_to_path(lilv.lilv_node_as_string(tail)).replace(bundle,"",1),
"target": lilv.lilv_uri_to_path(lilv.lilv_node_as_string(head)).replace(bundle,"",1)
})
# hardware ports
handled_port_uris = []
ports = plugin.get_value(ns_lv2core.port)
it = ports.begin()
while not ports.is_end(it):
port = ports.get(it)
it = ports.next(it)
if port.me is None:
continue
# check if we already handled this port
port_uri = port.as_uri()
if port_uri in handled_port_uris:
continue
if port_uri.endswith("/control_in") or port_uri.endswith("/control_out"):
continue
handled_port_uris.append(port_uri)
# get types
port_types = lilv.lilv_world_find_nodes(world.me, port.me, ns_rdf.type_.me, None)
if port_types is None:
continue
portDir = "" # input or output
portType = "" # atom, audio or cv
it2 = lilv.lilv_nodes_begin(port_types)
while not lilv.lilv_nodes_is_end(port_types, it2):
port_type = lilv.lilv_nodes_get(port_types, it2)
it2 = lilv.lilv_nodes_next(port_types, it2)
if port_type is None:
continue
port_type_uri = lilv.lilv_node_as_uri(port_type)
if port_type_uri == "http://lv2plug.in/ns/lv2core#InputPort":
portDir = "input"
elif port_type_uri == "http://lv2plug.in/ns/lv2core#OutputPort":
portDir = "output"
elif port_type_uri == "http://lv2plug.in/ns/lv2core#AudioPort":
portType = "audio"
elif port_type_uri == "http://lv2plug.in/ns/lv2core#CVPort":
portType = "cv"
elif port_type_uri == "http://lv2plug.in/ns/ext/atom#AtomPort":
portType = "atom"
if not (portDir or portType):
continue
if portType == "audio":
if portDir == "input":
info['hardware']['audio']['ins'] += 1
else:
info['hardware']['audio']['outs'] += 1
elif portType == "atom":
if portDir == "input":
info['hardware']['midi']['ins'] += 1
else:
info['hardware']['midi']['outs'] += 1
elif portType == "cv":
if portDir == "input":
info['hardware']['cv']['ins'] += 1
else:
info['hardware']['cv']['outs'] += 1
# plugins
blocks = plugin.get_value(ns_ingen.block)
it = blocks.begin()
while not blocks.is_end(it):
block = blocks.get(it)
it = blocks.next(it)
if block.me is None:
continue
protouri1 = lilv.lilv_world_get(world.me, block.me, ns_lv2core.prototype.me, None)
protouri2 = lilv.lilv_world_get(world.me, block.me, ns_ingen.prototype.me, None)
if protouri1 is not None:
proto = protouri1
elif protouri2 is not None:
proto = protouri2
else:
continue
instance = lilv.lilv_uri_to_path(lilv.lilv_node_as_string(block.me)).replace(bundle,"",1)
uri = lilv.lilv_node_as_uri(proto)
enabled = lilv.lilv_world_get(world.me, block.me, ns_ingen.enabled.me, None)
builder = lilv.lilv_world_get(world.me, block.me, ns_mod.builderVersion.me, None)
release = lilv.lilv_world_get(world.me, block.me, ns_mod.releaseNumber.me, None)
minorver = lilv.lilv_world_get(world.me, block.me, ns_lv2core.minorVersion.me, None)
microver = lilv.lilv_world_get(world.me, block.me, ns_lv2core.microVersion.me, None)
ingenblocks.append({
"instance": instance,
"uri" : uri,
"x" : lilv.lilv_node_as_float(lilv.lilv_world_get(world.me, block.me, ns_ingen.canvasX.me, None)),
"y" : lilv.lilv_node_as_float(lilv.lilv_world_get(world.me, block.me, ns_ingen.canvasY.me, None)),
"enabled" : lilv.lilv_node_as_bool(enabled) if enabled is not None else False,
"builder" : lilv.lilv_node_as_int(builder) if builder else 0,
"release" : lilv.lilv_node_as_int(release) if release else 0,
"minorVersion": lilv.lilv_node_as_int(minorver) if minorver else 0,
"microVersion": lilv.lilv_node_as_int(microver) if microver else 0,
})
info['connections'] = ingenarcs
info['plugins'] = ingenblocks
return info
# ------------------------------------------------------------------------------------------------------------
# get_pedalboard_name
# Faster version of get_pedalboard_info when we just need to know the pedalboard name
# @a bundle is a string, consisting of a directory in the filesystem (absolute pathname).
def get_pedalboard_name(bundle):
# lilv wants the last character as the separator
bundle = os.path.abspath(bundle)
if not bundle.endswith(os.sep):
bundle += os.sep
# Create our own unique lilv world
# We'll load a single bundle and get all plugins from it
world = lilv.World()
# this is needed when loading specific bundles instead of load_all
# (these functions are not exposed via World yet)
lilv.lilv_world_load_specifications(world.me)
lilv.lilv_world_load_plugin_classes(world.me)
# convert bundle string into a lilv node
bundlenode = lilv.lilv_new_file_uri(world.me, None, bundle)
# load the bundle
world.load_bundle(bundlenode)
# free bundlenode, no longer needed
lilv.lilv_node_free(bundlenode)
# get all plugins in the bundle
plugins = world.get_all_plugins()
# make sure the bundle includes 1 and only 1 plugin (the pedalboard)
if plugins.size() != 1:
raise Exception('get_pedalboard_info(%s) - bundle has 0 or > 1 plugin'.format(bundle))
# no indexing in python-lilv yet, just get the first item
plugin = None
for p in plugins:
plugin = p
break
if plugin is None:
raise Exception('get_pedalboard_info(%s) - failed to get plugin, you are using an old lilv!'.format(bundle))
# define the needed stuff
ns_rdf = NS(world, lilv.LILV_NS_RDF)
# check if the plugin is a pedalboard
def fill_in_type(node):
return node.as_string()
plugin_types = [i for i in LILV_FOREACH(plugin.get_value(ns_rdf.type_), fill_in_type)]
if "http://moddevices.com/ns/modpedal#Pedalboard" not in plugin_types:
raise Exception('get_pedalboard_info(%s) - plugin has no mod:Pedalboard type'.format(bundle))
return plugin.get_name().as_string()
# ------------------------------------------------------------------------------------------------------------
# plugin_has_modgui
# Check if a plugin has modgui
def plugin_has_modgui(world, plugin):
# define the needed stuff
ns_modgui = NS(world, "http://moddevices.com/ns/modgui#")
# --------------------------------------------------------------------------------------------------------
# get the proper modgui
modguigui = None
nodes = plugin.get_value(ns_modgui.gui)
it = nodes.begin()
while not nodes.is_end(it):
mgui = nodes.get(it)
it = nodes.next(it)
if mgui.me is None:
continue
resdir = world.find_nodes(mgui.me, ns_modgui.resourcesDirectory.me, None).get_first()
if resdir.me is None:
continue
modguigui = mgui
if os.path.expanduser("~") in lilv.lilv_uri_to_path(resdir.as_string()):
# found a modgui in the home dir, stop here and use it
break
del nodes, it
# --------------------------------------------------------------------------------------------------------
# check selected modgui
if modguigui is None or modguigui.me is None:
return False
# resourcesDirectory *must* be present
modgui_resdir = world.find_nodes(modguigui.me, ns_modgui.resourcesDirectory.me, None).get_first()
if modgui_resdir.me is None:
return False
return os.path.exists(lilv.lilv_uri_to_path(modgui_resdir.as_string()))
# ------------------------------------------------------------------------------------------------------------
# get_plugin_info
# Get info from a lilv plugin
# This is used in get_plugins_info below and MOD-SDK
def get_plugin_info(world, plugin, useAbsolutePath = True):
# define the needed stuff
ns_doap = NS(world, lilv.LILV_NS_DOAP)
ns_foaf = NS(world, lilv.LILV_NS_FOAF)
ns_rdf = NS(world, lilv.LILV_NS_RDF)
ns_rdfs = NS(world, lilv.LILV_NS_RDFS)
ns_lv2core = NS(world, lilv.LILV_NS_LV2)
ns_atom = NS(world, "http://lv2plug.in/ns/ext/atom#")
ns_midi = NS(world, "http://lv2plug.in/ns/ext/midi#")
ns_morph = NS(world, "http://lv2plug.in/ns/ext/morph#")
ns_pprops = NS(world, "http://lv2plug.in/ns/ext/port-props#")
ns_pset = NS(world, "http://lv2plug.in/ns/ext/presets#")
ns_units = NS(world, "http://lv2plug.in/ns/extensions/units#")
ns_mod = NS(world, "http://moddevices.com/ns/mod#")
ns_modgui = NS(world, "http://moddevices.com/ns/modgui#")
bundleuri = plugin.get_bundle_uri().as_string()
bundle = lilv.lilv_uri_to_path(bundleuri)
errors = []
warnings = []
# --------------------------------------------------------------------------------------------------------
# uri
uri = plugin.get_uri().as_string() or ""
if not uri:
errors.append("plugin uri is missing or invalid")
elif uri.startswith("file:"):
errors.append("plugin uri is local, and thus not suitable for redistribution")
#elif not (uri.startswith("http:") or uri.startswith("https:")):
#warnings.append("plugin uri is not a real url")
# --------------------------------------------------------------------------------------------------------
# name
name = plugin.get_name().as_string() or ""
if not name:
errors.append("plugin name is missing")
# --------------------------------------------------------------------------------------------------------
# binary
binary = lilv.lilv_uri_to_path(plugin.get_library_uri().as_string() or "")
if not binary:
errors.append("plugin binary is missing")
elif not useAbsolutePath:
binary = binary.replace(bundle,"",1)
# --------------------------------------------------------------------------------------------------------
# license
license = plugin.get_value(ns_doap.license).get_first().as_string() or ""
if not license:
prj = plugin.get_value(ns_lv2core.project).get_first()
if prj.me is not None:
licsnode = lilv.lilv_world_get(world.me, prj.me, ns_doap.license.me, None)
if licsnode is not None:
license = lilv.lilv_node_as_string(licsnode)
del licsnode
del prj
if not license:
errors.append("plugin license is missing")
elif license.startswith(bundleuri):
license = license.replace(bundleuri,"",1)
warnings.append("plugin license entry is a local path instead of a string")
# --------------------------------------------------------------------------------------------------------
# comment
comment = (plugin.get_value(ns_rdfs.comment).get_first().as_string() or "").strip()
# sneaky empty comments!
if len(comment) > 0 and comment == len(comment) * comment[0]:
comment = ""
if not comment:
errors.append("plugin comment is missing")
# --------------------------------------------------------------------------------------------------------
# version
microver = plugin.get_value(ns_lv2core.microVersion).get_first()
minorver = plugin.get_value(ns_lv2core.minorVersion).get_first()
if microver.me is None and minorver.me is None:
errors.append("plugin is missing version information")
minorVersion = 0
microVersion = 0
else:
if minorver.me is None:
errors.append("plugin is missing minorVersion")
minorVersion = 0
else:
minorVersion = minorver.as_int()
if microver.me is None:
errors.append("plugin is missing microVersion")
microVersion = 0
else:
microVersion = microver.as_int()
del minorver
del microver
version = "%d.%d" % (minorVersion, microVersion)
# 0.x is experimental
if minorVersion == 0:
stability = "experimental"
# odd x.2 or 2.x is testing/development
elif minorVersion % 2 != 0 or microVersion % 2 != 0:
stability = "testing"
# otherwise it's stable
else:
stability = "stable"
# --------------------------------------------------------------------------------------------------------
# author
author = {
'name' : plugin.get_author_name().as_string() or "",
'homepage': plugin.get_author_homepage().as_string() or "",
'email' : plugin.get_author_email().as_string() or "",
}
if not author['name']:
errors.append("plugin author name is missing")
if not author['homepage']:
prj = plugin.get_value(ns_lv2core.project).get_first()
if prj.me is not None:
maintainer = lilv.lilv_world_get(world.me, prj.me, ns_doap.maintainer.me, None)
if maintainer is not None:
homepage = lilv.lilv_world_get(world.me, maintainer, ns_foaf.homepage.me, None)
if homepage is not None:
author['homepage'] = lilv.lilv_node_as_string(homepage)
del homepage
del maintainer
del prj
if not author['homepage']:
warnings.append("plugin author homepage is missing")
if not author['email']:
pass
elif author['email'].startswith(bundleuri):
author['email'] = author['email'].replace(bundleuri,"",1)
warnings.append("plugin author email entry is missing 'mailto:' prefix")
elif author['email'].startswith("mailto:"):
author['email'] = author['email'].replace("mailto:","",1)
# --------------------------------------------------------------------------------------------------------
# brand
brand = plugin.get_value(ns_mod.brand).get_first().as_string() or ""
if not brand:
brand = author['name'].split(" - ",1)[0].split(" ",1)[0]
brand = brand.rstrip(",").rstrip(";")
if len(brand) > 11:
brand = brand[:11]
warnings.append("plugin brand is missing")
elif len(brand) > 11:
brand = brand[:11]
errors.append("plugin brand has more than 11 characters")
# --------------------------------------------------------------------------------------------------------
# label
label = plugin.get_value(ns_mod.label).get_first().as_string() or ""
if not label:
if len(name) <= 16:
label = name
else:
labels = name.split(" - ",1)[0].split(" ")
if labels[0].lower() in bundle.lower() and len(labels) > 1 and not labels[1].startswith(("(","[")):
label = labels[1]
else:
label = labels[0]
if len(label) > 16:
label = label[:16]
warnings.append("plugin label is missing")
del labels
elif len(label) > 16:
label = label[:16]
errors.append("plugin label has more than 16 characters")
# --------------------------------------------------------------------------------------------------------
# bundles
bundles = []
if useAbsolutePath:
bnodes = lilv.lilv_plugin_get_data_uris(plugin.me)
it = lilv.lilv_nodes_begin(bnodes)
while not lilv.lilv_nodes_is_end(bnodes, it):
bnode = lilv.lilv_nodes_get(bnodes, it)
it = lilv.lilv_nodes_next(bnodes, it)
if bnode is None:
continue
if not lilv.lilv_node_is_uri(bnode):
continue
bpath = os.path.abspath(os.path.dirname(lilv.lilv_uri_to_path(lilv.lilv_node_as_uri(bnode))))
if not bpath.endswith(os.sep):
bpath += os.sep
if bpath not in bundles:
bundles.append(bpath)
if bundle not in bundles:
bundles.append(bundle)
del bnodes, it
# --------------------------------------------------------------------------------------------------------
# get the proper modgui
modguigui = None
nodes = plugin.get_value(ns_modgui.gui)
it = nodes.begin()
while not nodes.is_end(it):
mgui = nodes.get(it)
it = nodes.next(it)
if mgui.me is None:
continue
resdir = world.find_nodes(mgui.me, ns_modgui.resourcesDirectory.me, None).get_first()
if resdir.me is None:
continue
modguigui = mgui
if not useAbsolutePath:
# special build, use first modgui found
break
if os.path.expanduser("~") in lilv.lilv_uri_to_path(resdir.as_string()):
# found a modgui in the home dir, stop here and use it
break
del nodes, it
# --------------------------------------------------------------------------------------------------------
# gui
gui = {}
if modguigui is None or modguigui.me is None:
warnings.append("no modgui available")
else:
# resourcesDirectory *must* be present
modgui_resdir = world.find_nodes(modguigui.me, ns_modgui.resourcesDirectory.me, None).get_first()
if modgui_resdir.me is None:
errors.append("modgui has no resourcesDirectory data")
else:
if useAbsolutePath:
gui['resourcesDirectory'] = lilv.lilv_uri_to_path(modgui_resdir.as_string())
# check if modgui is defined in a separate file
gui['usingSeeAlso'] = os.path.exists(os.path.join(bundle, "modgui.ttl"))
# check if the modgui definition is on its own file and in the user dir
gui['modificableInPlace'] = bool((bundle not in gui['resourcesDirectory'] or gui['usingSeeAlso']) and
os.path.expanduser("~") in gui['resourcesDirectory'])
else:
gui['resourcesDirectory'] = modgui_resdir.as_string().replace(bundleuri,"",1)
# icon and settings templates
modgui_icon = world.find_nodes(modguigui.me, ns_modgui.iconTemplate .me, None).get_first()
modgui_setts = world.find_nodes(modguigui.me, ns_modgui.settingsTemplate.me, None).get_first()
if modgui_icon.me is None:
errors.append("modgui has no iconTemplate data")
else:
iconFile = lilv.lilv_uri_to_path(modgui_icon.as_string())
if os.path.exists(iconFile):
gui['iconTemplate'] = iconFile if useAbsolutePath else iconFile.replace(bundle,"",1)
else:
errors.append("modgui iconTemplate file is missing")
del iconFile
if modgui_setts.me is not None:
settingsFile = lilv.lilv_uri_to_path(modgui_setts.as_string())
if os.path.exists(settingsFile):
gui['settingsTemplate'] = settingsFile if useAbsolutePath else settingsFile.replace(bundle,"",1)
else:
errors.append("modgui settingsTemplate file is missing")
del settingsFile
# javascript and stylesheet files
modgui_script = world.find_nodes(modguigui.me, ns_modgui.javascript.me, None).get_first()
modgui_style = world.find_nodes(modguigui.me, ns_modgui.stylesheet.me, None).get_first()
if modgui_script.me is not None:
javascriptFile = lilv.lilv_uri_to_path(modgui_script.as_string())
if os.path.exists(javascriptFile):
gui['javascript'] = javascriptFile if useAbsolutePath else javascriptFile.replace(bundle,"",1)
else:
errors.append("modgui javascript file is missing")
del javascriptFile
if modgui_style.me is None:
errors.append("modgui has no stylesheet data")
else:
stylesheetFile = lilv.lilv_uri_to_path(modgui_style.as_string())
if os.path.exists(stylesheetFile):
gui['stylesheet'] = stylesheetFile if useAbsolutePath else stylesheetFile.replace(bundle,"",1)
else:
errors.append("modgui stylesheet file is missing")
del stylesheetFile
# template data for backwards compatibility
# FIXME remove later once we got rid of all templateData files
modgui_templ = world.find_nodes(modguigui.me, ns_modgui.templateData.me, None).get_first()
if modgui_templ.me is not None:
warnings.append("modgui is using old deprecated templateData")
templFile = lilv.lilv_uri_to_path(modgui_templ.as_string())
if os.path.exists(templFile):
with open(templFile, 'r') as fd:
try:
data = json.loads(fd.read())
except:
data = {}
keys = list(data.keys())
if 'author' in keys:
gui['brand'] = data['author']
if 'label' in keys:
gui['label'] = data['label']
if 'color' in keys:
gui['color'] = data['color']
if 'knob' in keys:
gui['knob'] = data['knob']
if 'controls' in keys:
index = 0
ports = []
for ctrl in data['controls']:
ports.append({
'index' : index,
'name' : ctrl['name'],
'symbol': ctrl['symbol'],
})
index += 1
gui['ports'] = ports
del templFile
# screenshot and thumbnail
modgui_scrn = world.find_nodes(modguigui.me, ns_modgui.screenshot.me, None).get_first()
modgui_thumb = world.find_nodes(modguigui.me, ns_modgui.thumbnail .me, None).get_first()
if modgui_scrn.me is not None:
gui['screenshot'] = lilv.lilv_uri_to_path(modgui_scrn.as_string())
if not os.path.exists(gui['screenshot']):
errors.append("modgui screenshot file is missing")
if not useAbsolutePath:
gui['screenshot'] = gui['screenshot'].replace(bundle,"",1)
else:
errors.append("modgui has no screnshot data")
if modgui_thumb.me is not None:
gui['thumbnail'] = lilv.lilv_uri_to_path(modgui_thumb.as_string())
if not os.path.exists(gui['thumbnail']):
errors.append("modgui thumbnail file is missing")
if not useAbsolutePath:
gui['thumbnail'] = gui['thumbnail'].replace(bundle,"",1)
else:
errors.append("modgui has no thumbnail data")
# extra stuff, all optional
modgui_brand = world.find_nodes(modguigui.me, ns_modgui.brand.me, None).get_first()
modgui_label = world.find_nodes(modguigui.me, ns_modgui.label.me, None).get_first()
modgui_model = world.find_nodes(modguigui.me, ns_modgui.model.me, None).get_first()
modgui_panel = world.find_nodes(modguigui.me, ns_modgui.panel.me, None).get_first()
modgui_color = world.find_nodes(modguigui.me, ns_modgui.color.me, None).get_first()
modgui_knob = world.find_nodes(modguigui.me, ns_modgui.knob .me, None).get_first()
if modgui_brand.me is not None:
gui['brand'] = modgui_brand.as_string()
if modgui_label.me is not None:
gui['label'] = modgui_label.as_string()
if modgui_model.me is not None:
gui['model'] = modgui_model.as_string()
if modgui_panel.me is not None:
gui['panel'] = modgui_panel.as_string()
if modgui_color.me is not None:
gui['color'] = modgui_color.as_string()
if modgui_knob.me is not None:
gui['knob'] = modgui_knob.as_string()
# ports
errpr = False
sybls = []
ports = []
nodes = world.find_nodes(modguigui.me, ns_modgui.port.me, None)
it = lilv.lilv_nodes_begin(nodes.me)
while not lilv.lilv_nodes_is_end(nodes.me, it):
port = lilv.lilv_nodes_get(nodes.me, it)
it = lilv.lilv_nodes_next(nodes.me, it)
if port is None:
break
port_indx = world.find_nodes(port, ns_lv2core.index .me, None).get_first()
port_symb = world.find_nodes(port, ns_lv2core.symbol.me, None).get_first()
port_name = world.find_nodes(port, ns_lv2core.name .me, None).get_first()
if None in (port_indx.me, port_name.me, port_symb.me):
if not errpr:
errors.append("modgui has some invalid port data")
errpr = True
continue
port_indx = port_indx.as_int()
port_symb = port_symb.as_string()
port_name = port_name.as_string()
ports.append({
'index' : port_indx,
'symbol': port_symb,
'name' : port_name,
})
if port_symb not in sybls:
sybls.append(port_symb)
elif not errpr:
errors.append("modgui has some duplicated port symbols")
errpr = True
# sort ports
if len(ports) > 0:
ports2 = {}
for port in ports:
ports2[port['index']] = port
gui['ports'] = [ports2[i] for i in ports2]
del ports2
# cleanup
del ports, nodes, it
# --------------------------------------------------------------------------------------------------------
# ports
index = 0
ports = {
'audio' : { 'input': [], 'output': [] },
'control': { 'input': [], 'output': [] },
'midi' : { 'input': [], 'output': [] }
}
portsymbols = []
portnames = []
# function for filling port info
def fill_port_info(port):
# base data
portname = lilv.lilv_node_as_string(port.get_name()) or ""
if not portname:
portname = "_%i" % index
errors.append("port with index %i has no name" % index)
portsymbol = lilv.lilv_node_as_string(port.get_symbol()) or ""
if not portsymbol:
portsymbol = "_%i" % index
errors.append("port with index %i has no symbol" % index)
# check for duplicate names
if portname in portsymbols:
warnings.append("port name '%s' is not unique" % portname)
else:
portnames.append(portname)
# check for duplicate symbols
if portsymbol in portsymbols:
errors.append("port symbol '%s' is not unique" % portsymbol)
else:
portsymbols.append(portsymbol)
# short name
psname = lilv.lilv_nodes_get_first(port.get_value(ns_lv2core.shortName.me))
if psname is not None:
psname = lilv.lilv_node_as_string(psname) or ""
if not psname:
psname = get_short_port_name(portname)
if len(psname) > 16:
warnings.append("port '%s' name is too big, reduce the name size or provide a shortName" % portname)
elif len(psname) > 16:
psname = psname[:16]
errors.append("port '%s' short name has more than 16 characters" % portname)
# check for old style shortName
if port.get_value(ns_lv2core.shortname.me) is not None:
errors.append("port '%s' short name is using old style 'shortname' instead of 'shortName'" % portname)
# port types
types = [typ.rsplit("#",1)[-1].replace("Port","",1) for typ in get_port_data(port, ns_rdf.type_)]
if "Atom" in types \
and port.supports_event(ns_midi.MidiEvent.me) \
and lilv.Nodes(port.get_value(ns_atom.bufferType.me)).get_first() == ns_atom.Sequence:
types.append("MIDI")
#if "Morph" in types:
#morphtyp = lilv.lilv_nodes_get_first(port.get_value(ns_morph.supportsType.me))
#if morphtyp is not None:
#morphtyp = lilv.lilv_node_as_uri(morphtyp)
#if morphtyp:
#types.append(morphtyp.rsplit("#",1)[-1].replace("Port","",1))
# port comment
pcomment = (get_port_data(port, ns_rdfs.comment) or [""])[0]
# port designation
designation = (get_port_data(port, ns_lv2core.designation) or [""])[0]
# port rangeSteps
rangeSteps = (get_port_data(port, ns_mod.rangeSteps) or get_port_data(port, ns_pprops.rangeSteps) or [None])[0]
# port properties
properties = [typ.rsplit("#",1)[-1] for typ in get_port_data(port, ns_lv2core.portProperty)]
# data
ranges = {}
scalepoints = []
# unit block
ulabel = ""
urender = ""
usymbol = ""
# control and cv must contain ranges, might contain scale points
if "Control" in types or "CV" in types:
isInteger = "integer" in properties
if isInteger and "CV" in types:
errors.append("port '%s' has integer property and CV type" % portname)
xdefault = lilv.lilv_nodes_get_first(port.get_value(ns_mod.default.me)) or \
lilv.lilv_nodes_get_first(port.get_value(ns_lv2core.default.me))
xminimum = lilv.lilv_nodes_get_first(port.get_value(ns_mod.minimum.me)) or \
lilv.lilv_nodes_get_first(port.get_value(ns_lv2core.minimum.me))
xmaximum = lilv.lilv_nodes_get_first(port.get_value(ns_mod.maximum.me)) or \
lilv.lilv_nodes_get_first(port.get_value(ns_lv2core.maximum.me))
if xminimum is not None and xmaximum is not None:
if isInteger:
if is_integer(lilv.lilv_node_as_string(xminimum)):
ranges['minimum'] = lilv.lilv_node_as_int(xminimum)
else:
ranges['minimum'] = lilv.lilv_node_as_float(xminimum)
if fmod(ranges['minimum'], 1.0) == 0.0:
warnings.append("port '%s' has integer property but minimum value is float" % portname)
else:
errors.append("port '%s' has integer property but minimum value has non-zero decimals" % portname)
ranges['minimum'] = int(ranges['minimum'])
if is_integer(lilv.lilv_node_as_string(xmaximum)):
ranges['maximum'] = lilv.lilv_node_as_int(xmaximum)
else:
ranges['maximum'] = lilv.lilv_node_as_float(xmaximum)
if fmod(ranges['maximum'], 1.0) == 0.0:
warnings.append("port '%s' has integer property but maximum value is float" % portname)
else:
errors.append("port '%s' has integer property but maximum value has non-zero decimals" % portname)
ranges['maximum'] = int(ranges['maximum'])
else:
ranges['minimum'] = lilv.lilv_node_as_float(xminimum)
ranges['maximum'] = lilv.lilv_node_as_float(xmaximum)
if is_integer(lilv.lilv_node_as_string(xminimum)):
warnings.append("port '%s' minimum value is an integer" % portname)
if is_integer(lilv.lilv_node_as_string(xmaximum)):
warnings.append("port '%s' maximum value is an integer" % portname)
if ranges['minimum'] >= ranges['maximum']:
ranges['maximum'] = ranges['minimum'] + (1 if isInteger else 0.1)
errors.append("port '%s' minimum value is equal or higher than its maximum" % portname)
if xdefault is not None:
if isInteger:
if is_integer(lilv.lilv_node_as_string(xdefault)):
ranges['default'] = lilv.lilv_node_as_int(xdefault)
else:
ranges['default'] = lilv.lilv_node_as_float(xdefault)
if fmod(ranges['default'], 1.0) == 0.0:
warnings.append("port '%s' has integer property but default value is float" % portname)
else:
errors.append("port '%s' has integer property but default value has non-zero decimals" % portname)
ranges['default'] = int(ranges['default'])
else:
ranges['default'] = lilv.lilv_node_as_float(xdefault)
if is_integer(lilv.lilv_node_as_string(xdefault)):
warnings.append("port '%s' default value is an integer" % portname)
testmin = ranges['minimum']
testmax = ranges['maximum']
if "sampleRate" in properties:
testmin *= 48000
testmax *= 48000
if not (testmin <= ranges['default'] <= testmax):
ranges['default'] = ranges['minimum']
errors.append("port '%s' default value is out of bounds" % portname)
else:
ranges['default'] = ranges['minimum']
if "Input" in types:
errors.append("port '%s' is missing default value" % portname)
else:
if isInteger:
ranges['minimum'] = 0
ranges['maximum'] = 1
ranges['default'] = 0
else:
ranges['minimum'] = -1.0 if "CV" in types else 0.0
ranges['maximum'] = 1.0
ranges['default'] = 0.0
if "CV" not in types and designation != "http://lv2plug.in/ns/lv2core#latency":
errors.append("port '%s' is missing value ranges" % portname)
nodes = port.get_scale_points()
if nodes is not None:
scalepoints_unsorted = []
it = lilv.lilv_scale_points_begin(nodes)
while not lilv.lilv_scale_points_is_end(nodes, it):
sp = lilv.lilv_scale_points_get(nodes, it)
it = lilv.lilv_scale_points_next(nodes, it)
if sp is None:
continue
label = lilv.lilv_scale_point_get_label(sp)
value = lilv.lilv_scale_point_get_value(sp)
if label is None:
errors.append("a port scalepoint is missing its label")
continue
label = lilv.lilv_node_as_string(label) or ""
if not label:
errors.append("a port scalepoint is missing its label")
continue
if value is None:
errors.append("port scalepoint '%s' is missing its value" % label)
continue
if isInteger:
if is_integer(lilv.lilv_node_as_string(value)):
value = lilv.lilv_node_as_int(value)
else:
value = lilv.lilv_node_as_float(value)
if fmod(value, 1.0) == 0.0:
warnings.append("port '%s' has integer property but scalepoint '%s' value is float" % (portname, label))
else:
errors.append("port '%s' has integer property but scalepoint '%s' value has non-zero decimals" % (portname, label))
value = int(value)
else:
if is_integer(lilv.lilv_node_as_string(value)):
warnings.append("port '%s' scalepoint '%s' value is an integer" % (portname, label))
value = lilv.lilv_node_as_float(value)
if ranges['minimum'] <= value <= ranges['maximum']:
scalepoints_unsorted.append((value, label))
else:
errors.append(("port scalepoint '%s' has an out-of-bounds value:\n" % label) +
("%d < %d < %d" if isInteger else "%f < %f < %f") % (ranges['minimum'], value, ranges['maximum']))
if len(scalepoints_unsorted) != 0:
unsorted = dict(s for s in scalepoints_unsorted)
values = list(v for v, l in scalepoints_unsorted)
values.sort()
scalepoints = list({ 'value': v, 'label': unsorted[v] } for v in values)
del unsorted, values
del scalepoints_unsorted
if "enumeration" in properties and len(scalepoints) <= 1:
errors.append("port '%s' wants to use enumeration but doesn't have enough values" % portname)
properties.remove("enumeration")
# control ports might contain unit
if "Control" in types:
# unit
uunit = lilv.lilv_nodes_get_first(port.get_value(ns_units.unit.me))
if uunit is not None:
uuri = lilv.lilv_node_as_uri(uunit)
# using pre-existing lv2 unit
if uuri is not None and uuri.startswith("http://lv2plug.in/ns/"):
uuri = uuri.replace("http://lv2plug.in/ns/extensions/units#","",1)
alnum = uuri.isalnum()
if not alnum:
errors.append("port '%s' has wrong lv2 unit uri" % portname)
uuri = uuri.rsplit("#",1)[-1].rsplit("/",1)[-1]
ulabel, urender, usymbol = get_port_unit(uuri)
if alnum and not (ulabel and urender and usymbol):
errors.append("port '%s' has unknown lv2 unit (our bug?, data is '%s', '%s', '%s')" % (portname,
ulabel,
urender,
usymbol))
# using custom unit
else:
xlabel = world.find_nodes(uunit, ns_rdfs .label.me, None).get_first()
xrender = world.find_nodes(uunit, ns_units.render.me, None).get_first()
xsymbol = world.find_nodes(uunit, ns_units.symbol.me, None).get_first()
if xlabel.me is not None:
ulabel = xlabel.as_string()
else:
errors.append("port '%s' has custom unit with no label" % portname)
if xrender.me is not None:
urender = xrender.as_string()
else:
errors.append("port '%s' has custom unit with no render" % portname)
if xsymbol.me is not None:
usymbol = xsymbol.as_string()
else:
errors.append("port '%s' has custom unit with no symbol" % portname)
return (types, {
'name' : portname,
'symbol' : portsymbol,
'ranges' : ranges,
'units' : {
'label' : ulabel,
'render': urender,
'symbol': usymbol,
} if "Control" in types and ulabel and urender and usymbol else {},
'comment' : pcomment,
'designation': designation,
'properties' : properties,
'rangeSteps' : rangeSteps,
'scalePoints': scalepoints,
'shortName' : psname,
})
for p in (plugin.get_port_by_index(i) for i in range(plugin.get_num_ports())):
types, info = fill_port_info(p)
info['index'] = index
index += 1
isInput = "Input" in types
types.remove("Input" if isInput else "Output")
for typ in [typl.lower() for typl in types]:
if typ not in ports.keys():
ports[typ] = { 'input': [], 'output': [] }
ports[typ]["input" if isInput else "output"].append(info)
# --------------------------------------------------------------------------------------------------------
# presets
def get_preset_data(preset):
world.load_resource(preset.me)
uri = preset.as_string() or ""
label = world.find_nodes(preset.me, ns_rdfs.label.me, None).get_first().as_string() or ""
if not uri:
errors.append("preset with label '%s' has no uri" % (label or "<unknown>"))
if not label:
errors.append("preset with uri '%s' has no label" % (uri or "<unknown>"))
return (uri, label)
presets = []
presets_related = plugin.get_related(ns_pset.Preset)
presets_data = list(LILV_FOREACH(presets_related, get_preset_data))
if len(presets_data) != 0:
unsorted = dict(p for p in presets_data)
uris = list(unsorted.keys())
uris.sort()
presets = list({ 'uri': p, 'label': unsorted[p] } for p in uris)
del unsorted, uris
del presets_related
# --------------------------------------------------------------------------------------------------------
# done
return {
'uri' : uri,
'name': name,
'binary' : binary,
'brand' : brand,
'label' : label,
'license': license,
'comment': comment,
'category' : get_category(plugin.get_value(ns_rdf.type_)),
'microVersion': microVersion,
'minorVersion': minorVersion,
'version' : version,
'stability': stability,
'author' : author,
'bundles': bundles,
'gui' : gui,
'ports' : ports,
'presets': presets,
'errors' : errors,
'warnings': warnings,
}
# ------------------------------------------------------------------------------------------------------------
# get_plugin_info_helper
# Get info from a simple URI, without the need of your own lilv world
# This is used by get_plugins_info in MOD-SDK
def get_plugin_info_helper(uri):
world = lilv.World()
world.load_all()
plugins = world.get_all_plugins()
return [get_plugin_info(world, p, False) for p in plugins]
# ------------------------------------------------------------------------------------------------------------
# get_plugins_info
# Get plugin-related info from a list of lv2 bundles
# @a bundles is a list of strings, consisting of directories in the filesystem (absolute pathnames).
def get_plugins_info(bundles):
# if empty, do nothing
if len(bundles) == 0:
raise Exception('get_plugins_info() - no bundles provided')
# Create our own unique lilv world
# We'll load the selected bundles and get all plugins from it
world = lilv.World()
# this is needed when loading specific bundles instead of load_all
# (these functions are not exposed via World yet)
lilv.lilv_world_load_specifications(world.me)
lilv.lilv_world_load_plugin_classes(world.me)
# load all bundles
for bundle in bundles:
# lilv wants the last character as the separator
bundle = os.path.abspath(bundle)
if not bundle.endswith(os.sep):
bundle += os.sep
# convert bundle string into a lilv node
bundlenode = lilv.lilv_new_file_uri(world.me, None, bundle)
# load the bundle
world.load_bundle(bundlenode)
# free bundlenode, no longer needed
lilv.lilv_node_free(bundlenode)
# get all plugins available in the selected bundles
plugins = world.get_all_plugins()
# make sure the bundles include something
if plugins.size() == 0:
raise Exception('get_plugins_info() - selected bundles have no plugins')
# return all the info
return [get_plugin_info(world, p, False) for p in plugins]
# ------------------------------------------------------------------------------------------------------------
if __name__ == '__main__':
from sys import argv, exit
from pprint import pprint
#get_plugins_info(argv[1:])
#for i in get_plugins_info(argv[1:]): pprint(i)
#exit(0)
for i in get_plugins_info(argv[1:]):
warnings = i['warnings'].copy()
if 'plugin brand is missing' in warnings:
i['warnings'].remove('plugin brand is missing')
if 'plugin label is missing' in warnings:
i['warnings'].remove('plugin label is missing')
if 'no modgui available' in warnings:
i['warnings'].remove('no modgui available')
for warn in warnings:
if "has no short name" in warn:
i['warnings'].remove(warn)
pprint({
'uri' : i['uri'],
'errors' : i['errors'],
'warnings': i['warnings']
}, width=200)
# ------------------------------------------------------------------------------------------------------------
| [
"lilv.lilv_world_load_plugin_classes",
"lilv.lilv_uri_to_path",
"lilv.lilv_node_free",
"lilv.lilv_nodes_next",
"lilv.lilv_scale_point_get_value",
"lilv.lilv_world_get",
"lilv.lilv_node_is_uri",
"pprint.pprint",
"lilv.lilv_scale_points_get",
"os.path.exists",
"lilv.lilv_scale_points_is_end",
"l... | [((4919, 4947), 'lilv.lilv_nodes_begin', 'lilv.lilv_nodes_begin', (['nodes'], {}), '(nodes)\n', (4940, 4947), False, 'import lilv\n'), ((6609, 6641), 'lilv.lilv_uri_to_path', 'lilv.lilv_uri_to_path', (['bundleuri'], {}), '(bundleuri)\n', (6630, 6641), False, 'import lilv\n'), ((6718, 6740), 'os.path.isfile', 'os.path.isfile', (['bundle'], {}), '(bundle)\n', (6732, 6740), False, 'import os\n'), ((7156, 7179), 'os.path.abspath', 'os.path.abspath', (['bundle'], {}), '(bundle)\n', (7171, 7179), False, 'import os\n'), ((7354, 7366), 'lilv.World', 'lilv.World', ([], {}), '()\n', (7364, 7366), False, 'import lilv\n'), ((7497, 7542), 'lilv.lilv_world_load_specifications', 'lilv.lilv_world_load_specifications', (['world.me'], {}), '(world.me)\n', (7532, 7542), False, 'import lilv\n'), ((7547, 7592), 'lilv.lilv_world_load_plugin_classes', 'lilv.lilv_world_load_plugin_classes', (['world.me'], {}), '(world.me)\n', (7582, 7592), False, 'import lilv\n'), ((7656, 7702), 'lilv.lilv_new_file_uri', 'lilv.lilv_new_file_uri', (['world.me', 'None', 'bundle'], {}), '(world.me, None, bundle)\n', (7678, 7702), False, 'import lilv\n'), ((7805, 7836), 'lilv.lilv_node_free', 'lilv.lilv_node_free', (['bundlenode'], {}), '(bundlenode)\n', (7824, 7836), False, 'import lilv\n'), ((15621, 15644), 'os.path.abspath', 'os.path.abspath', (['bundle'], {}), '(bundle)\n', (15636, 15644), False, 'import os\n'), ((15819, 15831), 'lilv.World', 'lilv.World', ([], {}), '()\n', (15829, 15831), False, 'import lilv\n'), ((15962, 16007), 'lilv.lilv_world_load_specifications', 'lilv.lilv_world_load_specifications', (['world.me'], {}), '(world.me)\n', (15997, 16007), False, 'import lilv\n'), ((16012, 16057), 'lilv.lilv_world_load_plugin_classes', 'lilv.lilv_world_load_plugin_classes', (['world.me'], {}), '(world.me)\n', (16047, 16057), False, 'import lilv\n'), ((16121, 16167), 'lilv.lilv_new_file_uri', 'lilv.lilv_new_file_uri', (['world.me', 'None', 'bundle'], {}), '(world.me, None, bundle)\n', (16143, 16167), False, 'import lilv\n'), ((16270, 16301), 'lilv.lilv_node_free', 'lilv.lilv_node_free', (['bundlenode'], {}), '(bundlenode)\n', (16289, 16301), False, 'import lilv\n'), ((19957, 19989), 'lilv.lilv_uri_to_path', 'lilv.lilv_uri_to_path', (['bundleuri'], {}), '(bundleuri)\n', (19978, 19989), False, 'import lilv\n'), ((54570, 54582), 'lilv.World', 'lilv.World', ([], {}), '()\n', (54580, 54582), False, 'import lilv\n'), ((55261, 55273), 'lilv.World', 'lilv.World', ([], {}), '()\n', (55271, 55273), False, 'import lilv\n'), ((55404, 55449), 'lilv.lilv_world_load_specifications', 'lilv.lilv_world_load_specifications', (['world.me'], {}), '(world.me)\n', (55439, 55449), False, 'import lilv\n'), ((55454, 55499), 'lilv.lilv_world_load_plugin_classes', 'lilv.lilv_world_load_plugin_classes', (['world.me'], {}), '(world.me)\n', (55489, 55499), False, 'import lilv\n'), ((4962, 4995), 'lilv.lilv_nodes_is_end', 'lilv.lilv_nodes_is_end', (['nodes', 'it'], {}), '(nodes, it)\n', (4984, 4995), False, 'import lilv\n'), ((5011, 5041), 'lilv.lilv_nodes_get', 'lilv.lilv_nodes_get', (['nodes', 'it'], {}), '(nodes, it)\n', (5030, 5041), False, 'import lilv\n'), ((5056, 5087), 'lilv.lilv_nodes_next', 'lilv.lilv_nodes_next', (['nodes', 'it'], {}), '(nodes, it)\n', (5076, 5087), False, 'import lilv\n'), ((6654, 6676), 'os.path.exists', 'os.path.exists', (['bundle'], {}), '(bundle)\n', (6668, 6676), False, 'import os\n'), ((6759, 6782), 'os.path.dirname', 'os.path.dirname', (['bundle'], {}), '(bundle)\n', (6774, 6782), False, 'import os\n'), ((10438, 10499), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'arc.me', 'ns_ingen.head.me', 'None'], {}), '(world.me, arc.me, ns_ingen.head.me, None)\n', (10457, 10499), False, 'import lilv\n'), ((10515, 10576), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'arc.me', 'ns_ingen.tail.me', 'None'], {}), '(world.me, arc.me, ns_ingen.tail.me, None)\n', (10534, 10576), False, 'import lilv\n'), ((11466, 11534), 'lilv.lilv_world_find_nodes', 'lilv.lilv_world_find_nodes', (['world.me', 'port.me', 'ns_rdf.type_.me', 'None'], {}), '(world.me, port.me, ns_rdf.type_.me, None)\n', (11492, 11534), False, 'import lilv\n'), ((11686, 11719), 'lilv.lilv_nodes_begin', 'lilv.lilv_nodes_begin', (['port_types'], {}), '(port_types)\n', (11707, 11719), False, 'import lilv\n'), ((13455, 13525), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'block.me', 'ns_lv2core.prototype.me', 'None'], {}), '(world.me, block.me, ns_lv2core.prototype.me, None)\n', (13474, 13525), False, 'import lilv\n'), ((13546, 13614), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'block.me', 'ns_ingen.prototype.me', 'None'], {}), '(world.me, block.me, ns_ingen.prototype.me, None)\n', (13565, 13614), False, 'import lilv\n'), ((13899, 13927), 'lilv.lilv_node_as_uri', 'lilv.lilv_node_as_uri', (['proto'], {}), '(proto)\n', (13920, 13927), False, 'import lilv\n'), ((13948, 14014), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'block.me', 'ns_ingen.enabled.me', 'None'], {}), '(world.me, block.me, ns_ingen.enabled.me, None)\n', (13967, 14014), False, 'import lilv\n'), ((14034, 14105), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'block.me', 'ns_mod.builderVersion.me', 'None'], {}), '(world.me, block.me, ns_mod.builderVersion.me, None)\n', (14053, 14105), False, 'import lilv\n'), ((14125, 14195), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'block.me', 'ns_mod.releaseNumber.me', 'None'], {}), '(world.me, block.me, ns_mod.releaseNumber.me, None)\n', (14144, 14195), False, 'import lilv\n'), ((14215, 14288), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'block.me', 'ns_lv2core.minorVersion.me', 'None'], {}), '(world.me, block.me, ns_lv2core.minorVersion.me, None)\n', (14234, 14288), False, 'import lilv\n'), ((14308, 14381), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'block.me', 'ns_lv2core.microVersion.me', 'None'], {}), '(world.me, block.me, ns_lv2core.microVersion.me, None)\n', (14327, 14381), False, 'import lilv\n'), ((26442, 26483), 'lilv.lilv_plugin_get_data_uris', 'lilv.lilv_plugin_get_data_uris', (['plugin.me'], {}), '(plugin.me)\n', (26472, 26483), False, 'import lilv\n'), ((26498, 26527), 'lilv.lilv_nodes_begin', 'lilv.lilv_nodes_begin', (['bnodes'], {}), '(bnodes)\n', (26519, 26527), False, 'import lilv\n'), ((55625, 55648), 'os.path.abspath', 'os.path.abspath', (['bundle'], {}), '(bundle)\n', (55640, 55648), False, 'import os\n'), ((55789, 55835), 'lilv.lilv_new_file_uri', 'lilv.lilv_new_file_uri', (['world.me', 'None', 'bundle'], {}), '(world.me, None, bundle)\n', (55811, 55835), False, 'import lilv\n'), ((55954, 55985), 'lilv.lilv_node_free', 'lilv.lilv_node_free', (['bundlenode'], {}), '(bundlenode)\n', (55973, 55985), False, 'import lilv\n'), ((57158, 57248), 'pprint.pprint', 'pprint', (["{'uri': i['uri'], 'errors': i['errors'], 'warnings': i['warnings']}"], {'width': '(200)'}), "({'uri': i['uri'], 'errors': i['errors'], 'warnings': i['warnings']},\n width=200)\n", (57164, 57248), False, 'from pprint import pprint\n'), ((5153, 5182), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['dat'], {}), '(dat)\n', (5177, 5182), False, 'import lilv\n'), ((11738, 11777), 'lilv.lilv_nodes_is_end', 'lilv.lilv_nodes_is_end', (['port_types', 'it2'], {}), '(port_types, it2)\n', (11760, 11777), False, 'import lilv\n'), ((11803, 11839), 'lilv.lilv_nodes_get', 'lilv.lilv_nodes_get', (['port_types', 'it2'], {}), '(port_types, it2)\n', (11822, 11839), False, 'import lilv\n'), ((11858, 11895), 'lilv.lilv_nodes_next', 'lilv.lilv_nodes_next', (['port_types', 'it2'], {}), '(port_types, it2)\n', (11878, 11895), False, 'import lilv\n'), ((11985, 12017), 'lilv.lilv_node_as_uri', 'lilv.lilv_node_as_uri', (['port_type'], {}), '(port_type)\n', (12006, 12017), False, 'import lilv\n'), ((18185, 18208), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (18203, 18208), False, 'import os\n'), ((21447, 21510), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'prj.me', 'ns_doap.license.me', 'None'], {}), '(world.me, prj.me, ns_doap.license.me, None)\n', (21466, 21510), False, 'import lilv\n'), ((24059, 24125), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'prj.me', 'ns_doap.maintainer.me', 'None'], {}), '(world.me, prj.me, ns_doap.maintainer.me, None)\n', (24078, 24125), False, 'import lilv\n'), ((26546, 26580), 'lilv.lilv_nodes_is_end', 'lilv.lilv_nodes_is_end', (['bnodes', 'it'], {}), '(bnodes, it)\n', (26568, 26580), False, 'import lilv\n'), ((26602, 26633), 'lilv.lilv_nodes_get', 'lilv.lilv_nodes_get', (['bnodes', 'it'], {}), '(bnodes, it)\n', (26621, 26633), False, 'import lilv\n'), ((26654, 26686), 'lilv.lilv_nodes_next', 'lilv.lilv_nodes_next', (['bnodes', 'it'], {}), '(bnodes, it)\n', (26674, 26686), False, 'import lilv\n'), ((27826, 27849), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (27844, 27849), False, 'import os\n'), ((35795, 35826), 'lilv.lilv_nodes_begin', 'lilv.lilv_nodes_begin', (['nodes.me'], {}), '(nodes.me)\n', (35816, 35826), False, 'import lilv\n'), ((21574, 21608), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['licsnode'], {}), '(licsnode)\n', (21598, 21608), False, 'import lilv\n'), ((24192, 24260), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'maintainer', 'ns_foaf.homepage.me', 'None'], {}), '(world.me, maintainer, ns_foaf.homepage.me, None)\n', (24211, 24260), False, 'import lilv\n'), ((26762, 26790), 'lilv.lilv_node_is_uri', 'lilv.lilv_node_is_uri', (['bnode'], {}), '(bnode)\n', (26783, 26790), False, 'import lilv\n'), ((29691, 29715), 'os.path.exists', 'os.path.exists', (['iconFile'], {}), '(iconFile)\n', (29705, 29715), False, 'import os\n'), ((30089, 30117), 'os.path.exists', 'os.path.exists', (['settingsFile'], {}), '(settingsFile)\n', (30103, 30117), False, 'import os\n'), ((30766, 30796), 'os.path.exists', 'os.path.exists', (['javascriptFile'], {}), '(javascriptFile)\n', (30780, 30796), False, 'import os\n'), ((31263, 31293), 'os.path.exists', 'os.path.exists', (['stylesheetFile'], {}), '(stylesheetFile)\n', (31277, 31293), False, 'import os\n'), ((31992, 32017), 'os.path.exists', 'os.path.exists', (['templFile'], {}), '(templFile)\n', (32006, 32017), False, 'import os\n'), ((35849, 35885), 'lilv.lilv_nodes_is_end', 'lilv.lilv_nodes_is_end', (['nodes.me', 'it'], {}), '(nodes.me, it)\n', (35871, 35885), False, 'import lilv\n'), ((35910, 35943), 'lilv.lilv_nodes_get', 'lilv.lilv_nodes_get', (['nodes.me', 'it'], {}), '(nodes.me, it)\n', (35929, 35943), False, 'import lilv\n'), ((35967, 36001), 'lilv.lilv_nodes_next', 'lilv.lilv_nodes_next', (['nodes.me', 'it'], {}), '(nodes.me, it)\n', (35987, 36001), False, 'import lilv\n'), ((38855, 38887), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['psname'], {}), '(psname)\n', (38879, 38887), False, 'import lilv\n'), ((46187, 46222), 'lilv.lilv_scale_points_begin', 'lilv.lilv_scale_points_begin', (['nodes'], {}), '(nodes)\n', (46215, 46222), False, 'import lilv\n'), ((49383, 49411), 'lilv.lilv_node_as_uri', 'lilv.lilv_node_as_uri', (['uunit'], {}), '(uunit)\n', (49404, 49411), False, 'import lilv\n'), ((13823, 13857), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['block.me'], {}), '(block.me)\n', (13847, 13857), False, 'import lilv\n'), ((14523, 14589), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'block.me', 'ns_ingen.canvasX.me', 'None'], {}), '(world.me, block.me, ns_ingen.canvasX.me, None)\n', (14542, 14589), False, 'import lilv\n'), ((14640, 14706), 'lilv.lilv_world_get', 'lilv.lilv_world_get', (['world.me', 'block.me', 'ns_ingen.canvasY.me', 'None'], {}), '(world.me, block.me, ns_ingen.canvasY.me, None)\n', (14659, 14706), False, 'import lilv\n'), ((14733, 14764), 'lilv.lilv_node_as_bool', 'lilv.lilv_node_as_bool', (['enabled'], {}), '(enabled)\n', (14755, 14764), False, 'import lilv\n'), ((14824, 14854), 'lilv.lilv_node_as_int', 'lilv.lilv_node_as_int', (['builder'], {}), '(builder)\n', (14845, 14854), False, 'import lilv\n'), ((14898, 14928), 'lilv.lilv_node_as_int', 'lilv.lilv_node_as_int', (['release'], {}), '(release)\n', (14919, 14928), False, 'import lilv\n'), ((14976, 15007), 'lilv.lilv_node_as_int', 'lilv.lilv_node_as_int', (['minorver'], {}), '(minorver)\n', (14997, 15007), False, 'import lilv\n'), ((15056, 15087), 'lilv.lilv_node_as_int', 'lilv.lilv_node_as_int', (['microver'], {}), '(microver)\n', (15077, 15087), False, 'import lilv\n'), ((24343, 24377), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['homepage'], {}), '(homepage)\n', (24367, 24377), False, 'import lilv\n'), ((28761, 28795), 'os.path.join', 'os.path.join', (['bundle', '"""modgui.ttl"""'], {}), "(bundle, 'modgui.ttl')\n", (28773, 28795), False, 'import os\n'), ((33634, 33667), 'os.path.exists', 'os.path.exists', (["gui['screenshot']"], {}), "(gui['screenshot'])\n", (33648, 33667), False, 'import os\n'), ((34090, 34122), 'os.path.exists', 'os.path.exists', (["gui['thumbnail']"], {}), "(gui['thumbnail'])\n", (34104, 34122), False, 'import os\n'), ((43108, 43141), 'lilv.lilv_node_as_float', 'lilv.lilv_node_as_float', (['xminimum'], {}), '(xminimum)\n', (43131, 43141), False, 'import lilv\n'), ((43182, 43215), 'lilv.lilv_node_as_float', 'lilv.lilv_node_as_float', (['xmaximum'], {}), '(xmaximum)\n', (43205, 43215), False, 'import lilv\n'), ((46249, 46289), 'lilv.lilv_scale_points_is_end', 'lilv.lilv_scale_points_is_end', (['nodes', 'it'], {}), '(nodes, it)\n', (46278, 46289), False, 'import lilv\n'), ((46316, 46353), 'lilv.lilv_scale_points_get', 'lilv.lilv_scale_points_get', (['nodes', 'it'], {}), '(nodes, it)\n', (46342, 46353), False, 'import lilv\n'), ((46379, 46417), 'lilv.lilv_scale_points_next', 'lilv.lilv_scale_points_next', (['nodes', 'it'], {}), '(nodes, it)\n', (46406, 46417), False, 'import lilv\n'), ((46516, 46551), 'lilv.lilv_scale_point_get_label', 'lilv.lilv_scale_point_get_label', (['sp'], {}), '(sp)\n', (46547, 46551), False, 'import lilv\n'), ((46580, 46615), 'lilv.lilv_scale_point_get_value', 'lilv.lilv_scale_point_get_value', (['sp'], {}), '(sp)\n', (46611, 46615), False, 'import lilv\n'), ((26892, 26920), 'lilv.lilv_node_as_uri', 'lilv.lilv_node_as_uri', (['bnode'], {}), '(bnode)\n', (26913, 26920), False, 'import lilv\n'), ((41768, 41802), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['xminimum'], {}), '(xminimum)\n', (41792, 41802), False, 'import lilv\n'), ((41849, 41880), 'lilv.lilv_node_as_int', 'lilv.lilv_node_as_int', (['xminimum'], {}), '(xminimum)\n', (41870, 41880), False, 'import lilv\n'), ((41951, 41984), 'lilv.lilv_node_as_float', 'lilv.lilv_node_as_float', (['xminimum'], {}), '(xminimum)\n', (41974, 41984), False, 'import lilv\n'), ((42424, 42458), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['xmaximum'], {}), '(xmaximum)\n', (42448, 42458), False, 'import lilv\n'), ((42505, 42536), 'lilv.lilv_node_as_int', 'lilv.lilv_node_as_int', (['xmaximum'], {}), '(xmaximum)\n', (42526, 42536), False, 'import lilv\n'), ((42607, 42640), 'lilv.lilv_node_as_float', 'lilv.lilv_node_as_float', (['xmaximum'], {}), '(xmaximum)\n', (42630, 42640), False, 'import lilv\n'), ((43251, 43285), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['xminimum'], {}), '(xminimum)\n', (43275, 43285), False, 'import lilv\n'), ((43415, 43449), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['xmaximum'], {}), '(xmaximum)\n', (43439, 43449), False, 'import lilv\n'), ((44635, 44668), 'lilv.lilv_node_as_float', 'lilv.lilv_node_as_float', (['xdefault'], {}), '(xdefault)\n', (44658, 44668), False, 'import lilv\n'), ((46797, 46828), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['label'], {}), '(label)\n', (46821, 46828), False, 'import lilv\n'), ((48080, 48110), 'lilv.lilv_node_as_float', 'lilv.lilv_node_as_float', (['value'], {}), '(value)\n', (48103, 48110), False, 'import lilv\n'), ((10712, 10742), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['tail'], {}), '(tail)\n', (10736, 10742), False, 'import lilv\n'), ((10810, 10840), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['head'], {}), '(head)\n', (10834, 10840), False, 'import lilv\n'), ((29052, 29075), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (29070, 29075), False, 'import os\n'), ((42012, 42040), 'math.fmod', 'fmod', (["ranges['minimum']", '(1.0)'], {}), "(ranges['minimum'], 1.0)\n", (42016, 42040), False, 'from math import fmod\n'), ((42668, 42696), 'math.fmod', 'fmod', (["ranges['maximum']", '(1.0)'], {}), "(ranges['maximum'], 1.0)\n", (42672, 42696), False, 'from math import fmod\n'), ((43912, 43946), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['xdefault'], {}), '(xdefault)\n', (43936, 43946), False, 'import lilv\n'), ((43997, 44028), 'lilv.lilv_node_as_int', 'lilv.lilv_node_as_int', (['xdefault'], {}), '(xdefault)\n', (44018, 44028), False, 'import lilv\n'), ((44107, 44140), 'lilv.lilv_node_as_float', 'lilv.lilv_node_as_float', (['xdefault'], {}), '(xdefault)\n', (44130, 44140), False, 'import lilv\n'), ((44708, 44742), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['xdefault'], {}), '(xdefault)\n', (44732, 44742), False, 'import lilv\n'), ((47219, 47250), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['value'], {}), '(value)\n', (47243, 47250), False, 'import lilv\n'), ((47289, 47317), 'lilv.lilv_node_as_int', 'lilv.lilv_node_as_int', (['value'], {}), '(value)\n', (47310, 47317), False, 'import lilv\n'), ((47384, 47414), 'lilv.lilv_node_as_float', 'lilv.lilv_node_as_float', (['value'], {}), '(value)\n', (47407, 47414), False, 'import lilv\n'), ((47901, 47932), 'lilv.lilv_node_as_string', 'lilv.lilv_node_as_string', (['value'], {}), '(value)\n', (47925, 47932), False, 'import lilv\n'), ((44172, 44200), 'math.fmod', 'fmod', (["ranges['default']", '(1.0)'], {}), "(ranges['default'], 1.0)\n", (44176, 44200), False, 'from math import fmod\n'), ((47446, 47462), 'math.fmod', 'fmod', (['value', '(1.0)'], {}), '(value, 1.0)\n', (47450, 47462), False, 'from math import fmod\n')] |
import os
'''
This script takes a beatmap file and a bunch of replays, calculates hitoffsets for each replay and saves them
to a *.csv file. This script works for std gamemode only.
'''
class SaveHitoffsets():
def create_dir(self, dir_path):
if not os.path.exists(dir_path):
try: os.mkdir(dir_path)
except OSError: print(f'failed to create folder: {dir_path}')
def run(self, beatmap_name, beatmap_filepath, replay_folder):
self.create_dir('download/osu/hitoffsets')
self.create_dir(f'download/osu/hitoffsets/{beatmap_name}')
replay_filenames = [ f for f in os.listdir(replay_folder) if os.path.isfile(os.path.join(replay_folder, f)) ]
replay_filepaths = [ f'{replay_folder}/{replay_filename}' for replay_filename in replay_filenames ]
print('Loading map...')
beatmap = BeatmapIO.open_beatmap(beatmap_filepath)
print('Loading map data...')
map_data = StdMapData.get_aimpoint_data(beatmap.hitobjects)
print('Loading replays...')
replays = [ ReplayIO.open_replay(replay_filepath) for replay_filepath in replay_filepaths ]
print('Loading replay data...')
replay_data = [ StdReplayData.get_replay_data(replay.play_data) for replay in replays ]
print('Loading scores...')
score_data = [ StdScoreData.get_score_data(data, map_data) for data in replay_data ]
for replay_filename, score in zip(replay_filenames, score_data):
replay_filename = replay_filename.split('.')[0]
data = score[:, [StdScoreDataEnums.TIME.value, StdScoreDataEnums.HIT_OFFSET.value ]]
CmdUtils.export_csv(f'download/osu/hitoffsets/{beatmap_name}/{replay_filename}', data.T) | [
"os.path.exists",
"os.listdir",
"os.path.join",
"os.mkdir"
] | [((264, 288), 'os.path.exists', 'os.path.exists', (['dir_path'], {}), '(dir_path)\n', (278, 288), False, 'import os\n'), ((307, 325), 'os.mkdir', 'os.mkdir', (['dir_path'], {}), '(dir_path)\n', (315, 325), False, 'import os\n'), ((631, 656), 'os.listdir', 'os.listdir', (['replay_folder'], {}), '(replay_folder)\n', (641, 656), False, 'import os\n'), ((675, 705), 'os.path.join', 'os.path.join', (['replay_folder', 'f'], {}), '(replay_folder, f)\n', (687, 705), False, 'import os\n')] |
import pytest
from mixins.fsm import FinalStateMachineMixin
MSG_START = 'start'
MSG_COMPLETE = 'complete'
MSG_BREAK = 'break'
MSG_RESTART = 'repair'
MSG_UNREGISTERED = 'unknown'
class SampleTask(FinalStateMachineMixin):
STATE_NEW = 'new'
STATE_RUNNING = 'running'
STATE_READY = 'ready'
STATE_FAILED = 'failed'
def __init__(self):
self.status = self.initial_state
@property
def state_field_name(self):
return 'status'
@property
def registered_messages(self):
return [MSG_START, MSG_COMPLETE, MSG_BREAK, MSG_RESTART]
@property
def state_transitions(self):
return {
self.STATE_NEW: {
MSG_START: self._make_transition(self.STATE_RUNNING),
MSG_COMPLETE: self._make_transition(self.STATE_READY),
},
self.STATE_RUNNING: {
MSG_COMPLETE: self._make_transition(self.STATE_READY),
MSG_BREAK: self._make_transition(self.STATE_FAILED),
},
self.STATE_READY: {
# all messages are ignored
},
self.STATE_FAILED: {
MSG_RESTART: self._make_transition(self.STATE_RUNNING),
},
}
@property
def initial_state(self):
return self.STATE_NEW
@pytest.fixture
def sample_task():
return SampleTask()
def test_fsm_uses_state(sample_task):
sample_task.status = SampleTask.STATE_READY
assert getattr(sample_task, sample_task.state_field_name) == sample_task.status
def test_full_succes_path(sample_task):
assert sample_task.status == SampleTask.STATE_NEW
sample_task.accept_message(MSG_START)
assert sample_task.status == SampleTask.STATE_RUNNING
sample_task.accept_message(MSG_BREAK)
assert sample_task.status == SampleTask.STATE_FAILED
sample_task.accept_message(MSG_RESTART)
assert sample_task.status == SampleTask.STATE_RUNNING
sample_task.accept_message(MSG_COMPLETE)
assert sample_task.status == SampleTask.STATE_READY
def test_unregistered_msg_causes_failure(sample_task):
expected_msg = "FSM: Unexpected message ({}) is received.".format(MSG_UNREGISTERED)
with pytest.raises(Exception, message=expected_msg):
sample_task.accept_message(MSG_UNREGISTERED)
def test_short_success_path(sample_task):
assert sample_task.status == SampleTask.STATE_NEW
sample_task.accept_message(MSG_BREAK)
assert sample_task.status == SampleTask.STATE_NEW
sample_task.accept_message(MSG_COMPLETE)
assert sample_task.status == SampleTask.STATE_READY
for msg in [MSG_RESTART, MSG_START, MSG_BREAK]:
sample_task.accept_message(msg)
assert sample_task.status == SampleTask.STATE_READY
class UnrestrictedFsm(SampleTask):
@property
def registered_messages(self):
return []
@pytest.fixture
def unrestricted_fsm():
return UnrestrictedFsm()
def test_unregistred_msg_ignored(unrestricted_fsm):
assert unrestricted_fsm.status == SampleTask.STATE_NEW
unrestricted_fsm.accept_message(MSG_UNREGISTERED)
assert unrestricted_fsm.status == SampleTask.STATE_NEW
def test_unregistred_but_effective_msg(unrestricted_fsm):
assert unrestricted_fsm.status == SampleTask.STATE_NEW
unrestricted_fsm.accept_message(MSG_COMPLETE)
assert unrestricted_fsm.status == SampleTask.STATE_READY
class NotImplementedFsm(FinalStateMachineMixin):
pass
@pytest.fixture
def incomplete_fsm():
return NotImplementedFsm()
def test_state_field_name(incomplete_fsm):
with pytest.raises(NotImplementedError):
incomplete_fsm.state_field_name
def test_registered_messages(incomplete_fsm):
with pytest.raises(NotImplementedError):
incomplete_fsm.registered_messages
def test_state_transitions(incomplete_fsm):
with pytest.raises(NotImplementedError):
incomplete_fsm.state_transitions
| [
"pytest.raises"
] | [((2205, 2251), 'pytest.raises', 'pytest.raises', (['Exception'], {'message': 'expected_msg'}), '(Exception, message=expected_msg)\n', (2218, 2251), False, 'import pytest\n'), ((3572, 3606), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {}), '(NotImplementedError)\n', (3585, 3606), False, 'import pytest\n'), ((3705, 3739), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {}), '(NotImplementedError)\n', (3718, 3739), False, 'import pytest\n'), ((3839, 3873), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {}), '(NotImplementedError)\n', (3852, 3873), False, 'import pytest\n')] |
import requests
import json
import os
import sys
import shutil
from .azureblob import AzureBlob
from .azuretable import AzureTable
from .timeutil import get_time_offset, str_to_dt, dt_to_str
from .series import Series
from .constant import STATUS_SUCCESS, STATUS_FAIL
from telemetry import log
# To get the meta of a specific metric from TSANA
# Parameters:]
# config: a dict object which should include TSANA_API_KEY, TSANA_API_ENDPOINT, SERIES_LIMIT
# metric_id: a UUID string
# Return:
# meta: the meta of the specified metric, or None if there is something wrong.
def get_metric_meta(config, metric_id):
headers = {
"x-api-key": config.tsana_api_key,
"Content-Type": "application/json"
}
response = requests.get(config.tsana_api_endpoint + '/metrics/' + metric_id + '/meta', headers = headers)
if response.status_code == 200:
return response.json()
else:
return None
# Verify if the data could be used for this application
# Parameters:
# series_sets: a array of series set
# parameters: parameters of this application.
# Return:
# result: STATUS_FAIL / STATUS_SUCCESS
# message: a description of the result
def do_verify(config, parameters, subscription):
# common headers
headers = {
# The key to access TSANA
"x-api-key": config.tsana_api_key,
"Content-Type": "application/json"
}
# ------TO BE REPLACED: Other application just replace below part-------
# For forecast, check the factors and target has same granularity, and each factor could only contain one series
meta = get_metric_meta(config, parameters['instance']['params']['target']['metricId'])
if meta is None:
return STATUS_FAIL, 'Target is not found. '
target_gran = meta['granularityName']
# Only for custom, the granularity amount is meaningful which is the number of seconds
target_gran_amount = meta['granularityAmount']
for data in parameters['seriesSets']:
if target_gran != data['metricMeta']['granularityName'] or (target_gran == 'Custom' and target_gran_amount != data['metricMeta']['granularityAmount']):
return STATUS_FAIL, 'Granularity must be identical between target and factors. '
# Check series count, and each factor should only contain 1 series
seriesCount = 0
for data in parameters['seriesSets']:
dim = {}
for dimkey in data['dimensionFilter']:
dim[dimkey] = [data['dimensionFilter'][dimkey]]
dt = dt_to_str(str_to_dt(meta['dataStartFrom']))
para = dict(metricId=data['metricId'], dimensions=dim, count=2, startTime=dt) # Let's said 100 is your limitation
response = requests.post(config.tsana_api_endpoint + '/metrics/' + data['metricId'] + '/rank-series', data = json.dumps(para), headers = headers)
ret = response.json()
if ret is None or response.status_code != 200 or 'value' not in ret:
return STATUS_FAIL, 'Read series rank filed. '
seriesCount += len(ret['value'])
if seriesCount > config.series_limit:
return STATUS_FAIL, 'Cannot accept ambiguous factors or too many series in the group, limit is ' + str(config.series_limit) + '.'
return STATUS_SUCCESS, ''
# Query time series from TSANA
# Parameters:
# config: a dict object which should include TSANA_API_KEY, TSANA_API_ENDPOINT
# series_sets: Array of series set
# start_time: inclusive, the first timestamp to be query
# end_time: exclusive
# offset: a number will be added to each timestamp of each time-series. The unit is defined by granularity
# granularityName: if Offset > 0, the granularityName is Monthly / Weekly / Daily / Hourly / Minutely / Secondly / Custom
# granularityAmount: if granularityName is Custom, granularityAmount is the seconds of the exact granularity
# Return:
# A array of Series object
def get_timeseries(config, series_sets, start_time, end_time, offset = 0, granularityName = None, granularityAmount = 0):
# common headers
headers = {
"x-api-key": config.tsana_api_key,
"Content-Type": "application/json"
}
if offset != 0 and granularityName is None:
offset = 0
end_str = dt_to_str(end_time)
start_str = dt_to_str(start_time)
dedup = {}
series = []
# Query each series's tag
for data in series_sets:
dim = {}
if 'dimensionFilter' not in data:
data['dimensionFilter'] = data['filters']
for dimkey in data['dimensionFilter']:
dim[dimkey] = [data['dimensionFilter'][dimkey]]
para = dict(metricId=data['metricId'], dimensions=dim, count=1, startTime=start_str, endTime=end_str)
response = requests.post(config.tsana_api_endpoint + '/metrics/' + data['metricId'] + '/rank-series', data = json.dumps(para), headers = headers)
if response.status_code == 200:
ret = response.json()
for s in ret['value']:
if s['seriesId'] not in dedup:
s['startTime'] = start_str
s['endTime'] = end_str
s['dimension'] = s['dimensions']
del s['dimensions']
series.append(s)
dedup[s['seriesId']] = True
else:
log.info("Fail to call rank %s", json.dumps(para))
return None
# Query the data
multi_series_data = None
if len(series) > 0:
response = requests.post(config.tsana_api_endpoint + '/metrics/series/data', data = json.dumps(dict(value=series)), headers = headers)
if response.status_code == 200:
ret = response.json()
if granularityName is not None:
multi_series_data = [
Series(factor['id']['metricId'], factor['id']['seriesId'], factor['id']['dimension'],
[dict(timestamp = get_time_offset(str_to_dt(y[0]), (granularityName, granularityAmount),
offset)
, value = y[1])
for y in factor['values']])
for factor in ret['value']
]
else:
multi_series_data = [
Series(factor['id']['metricId'], factor['id']['seriesId'], factor['id']['dimension'],
value = [dict(timestamp = y[0]
, value = y[1])
for y in factor['values']])
for factor in ret['value']
]
else:
log.info("Fail to call %s ", json.dumps(para))
else:
log.info("Series is empty")
return multi_series_data
def upload_data(config, data_dir, model_key, time_key):
zip_file_base = os.path.join(config.model_temp_dir, 'data')
zip_file = zip_file_base + '.zip'
if os.path.exists(zip_file):
os.remove(zip_file)
shutil.make_archive(zip_file_base, 'zip', data_dir)
azure_blob = AzureBlob(config.az_tsana_model_blob_connection)
container_name = config.tsana_app_name
blob_name = model_key + '_' + time_key
try:
azure_blob.create_container(container_name)
except:
print("Unexpected error:", sys.exc_info()[0])
with open(zip_file, "rb") as data:
azure_blob.upload_blob(container_name, blob_name, data)
os.remove(zip_file)
data_blob_info = {}
data_blob_info['az_blob_connection'] = config.az_tsana_model_blob_connection
data_blob_info['container_name'] = container_name
data_blob_info['blob_name'] = blob_name
return data_blob_info | [
"os.path.exists",
"shutil.make_archive",
"json.dumps",
"os.path.join",
"requests.get",
"sys.exc_info",
"telemetry.log.info",
"os.remove"
] | [((742, 838), 'requests.get', 'requests.get', (["(config.tsana_api_endpoint + '/metrics/' + metric_id + '/meta')"], {'headers': 'headers'}), "(config.tsana_api_endpoint + '/metrics/' + metric_id + '/meta',\n headers=headers)\n", (754, 838), False, 'import requests\n'), ((7019, 7062), 'os.path.join', 'os.path.join', (['config.model_temp_dir', '"""data"""'], {}), "(config.model_temp_dir, 'data')\n", (7031, 7062), False, 'import os\n'), ((7108, 7132), 'os.path.exists', 'os.path.exists', (['zip_file'], {}), '(zip_file)\n', (7122, 7132), False, 'import os\n'), ((7166, 7217), 'shutil.make_archive', 'shutil.make_archive', (['zip_file_base', '"""zip"""', 'data_dir'], {}), "(zip_file_base, 'zip', data_dir)\n", (7185, 7217), False, 'import shutil\n'), ((7612, 7631), 'os.remove', 'os.remove', (['zip_file'], {}), '(zip_file)\n', (7621, 7631), False, 'import os\n'), ((6880, 6907), 'telemetry.log.info', 'log.info', (['"""Series is empty"""'], {}), "('Series is empty')\n", (6888, 6907), False, 'from telemetry import log\n'), ((7142, 7161), 'os.remove', 'os.remove', (['zip_file'], {}), '(zip_file)\n', (7151, 7161), False, 'import os\n'), ((2810, 2826), 'json.dumps', 'json.dumps', (['para'], {}), '(para)\n', (2820, 2826), False, 'import json\n'), ((4863, 4879), 'json.dumps', 'json.dumps', (['para'], {}), '(para)\n', (4873, 4879), False, 'import json\n'), ((5398, 5414), 'json.dumps', 'json.dumps', (['para'], {}), '(para)\n', (5408, 5414), False, 'import json\n'), ((6843, 6859), 'json.dumps', 'json.dumps', (['para'], {}), '(para)\n', (6853, 6859), False, 'import json\n'), ((7484, 7498), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (7496, 7498), False, 'import sys\n')] |
import click
from rastervision.command import Command
class TrainCommand(Command):
def __init__(self, task):
self.task = task
def run(self, tmp_dir=None):
if not tmp_dir:
tmp_dir = self.get_tmp_dir()
msg = 'Training model...'
click.echo(click.style(msg, fg='green'))
self.task.train(tmp_dir)
| [
"click.style"
] | [((293, 321), 'click.style', 'click.style', (['msg'], {'fg': '"""green"""'}), "(msg, fg='green')\n", (304, 321), False, 'import click\n')] |
import inspect
from IPython.core.interactiveshell import InteractiveShell
from IPython.core.magic import cell_magic, magics_class, Magics
from IPython.core.magic_arguments import (argument, magic_arguments,
parse_argstring)
import warnings
from htools.meta import timebox
@magics_class
class InteractiveMagic(Magics):
@cell_magic
@magic_arguments()
@argument('-p', action='store_true',
help='Boolean flag. If passed, the change will apply for the '
'rest of the notebook, or until the user changes it again. '
'The default behavior is to apply the change only to the '
'current cell.')
def talk(self, line=None, cell=None):
"""When Jupyter notebook is in default mode where
ast_node_interactivity=last (i.e. only the last unprinted statement is
displayed), this will run the current cell while printing all
statements. It then resets the mode so future cells only print the last
statement again.
Examples
---------
In the example below, each cell contains two statements. Notice that
the cell containing the magic displays both lines of output, while the
other cells only display the last output.
>>> 5 + 10
>>> 6 + 11
17
%%talk
>>> 6 + 2
>>> 3 + 1
8
4
>>> 1 + 2
>>> 3 + 4
7
"""
self._adjust_verbosity(cell, 'all', parse_argstring(self.talk, line))
@cell_magic
@magic_arguments()
@argument('-p', action='store_true',
help='Boolean flag. If passed, the change will apply for the '
'rest of the notebook, or until the user changes it again. '
'The default behavior is to apply the change only to the '
'current cell.')
def hush(self, line=None, cell=None):
"""The reverse of the `talk` magic. When the notebook is in
ast_node_interactivty='all' mode, this can be used to suppress outputs
other than the last one for a single cell. Cells that follow will
return to the display mode set for the whole notebook.
Examples
---------
In the example below, each cell contains two statements. Notice that
the cell containing the magic only displays the last line of output,
while the other cells display both outputs.
>>> 5 + 10
>>> 6 + 11
15
17
%%hush
>>> 6 + 2
>>> 3 + 1
4
>>> 1 + 2
>>> 3 + 4
3
7
"""
self._adjust_verbosity(cell, 'last', parse_argstring(self.hush, line))
@cell_magic
@magic_arguments()
@argument('-p', action='store_true',
help='Boolean flag. If passed, the change will apply for the '
'rest of the notebook, or until the user changes it again. '
'The default behavior is to apply the change only to the '
'current cell.')
def mute(self, line=None, cell=None):
"""A more extreme version of the `hush` magic that suppresses all
output from a cell. Cells that follow will return to the default mode
of ast_node_interactivity='last' unless the -p flag (for persist) is
provided.
Examples
---------
In the example below, each cell contains two statements. Notice that
the cell containing the magic displays no output, while the other cells
display the final output.
>>> 5 + 10
>>> 6 + 11
17
%%mute
>>> 6 + 2
>>> 3 + 1
>>> 1 + 2
>>> 3 + 4
7
"""
self._adjust_verbosity(cell, 'none', parse_argstring(self.mute, line))
def _adjust_verbosity(self, cell, mode, args):
old_setting = InteractiveShell.ast_node_interactivity
InteractiveShell.ast_node_interactivity = mode
self.shell.run_cell(cell)
if not args.p:
InteractiveShell.ast_node_interactivity = old_setting
@magics_class
class WarningMagic(Magics):
@cell_magic
@magic_arguments()
@argument('-p', action='store_true', help='Boolean flag. If passed, the '
'change will apply for the rest of the notebook, or until the '
'user changes it again. The default behavior is to apply the '
'change only to the current cell.')
def lax(self, line, cell):
"""Silence warnings for a cell. The -p flag can be used to make the
change persist, at least until the user changes it again.
"""
args = parse_argstring(self.lax, line)
self._warn(cell, 'ignore', args.p)
@cell_magic
@magic_arguments()
@argument('-p', action='store_true', help='Boolean flag. If passed, the '
'change will apply for the rest of the notebook, or until the '
'user changes it again. The default behavior is to apply the '
'change only to the current cell.')
def nag(self, line, cell):
"""Silence warnings for a cell. The -p flag can be used to make the
change persist, at least until the user changes it again.
"""
args = parse_argstring(self.nag, line)
self._warn(cell, 'always', args.p)
def _warn(self, cell, mode, persist):
"""Base method for lax and nag. These could easily be handled in a
single method with optional flags, but I find the usage to be more
intuitive when the names are different, and generally prefer flag-free
magics since the goal is ease of use.
The persist flag is processed in the child methods because parsing
references the method that was called.
"""
warnings.filterwarnings(mode)
self.shell.run_cell(cell)
# Reset manually because warnings.resetwarnings() behaved erratically.
if not persist:
out_modes = {'ignore', 'always'}
out_modes.remove(mode)
warnings.filterwarnings(list(out_modes)[0])
@magics_class
class FunctionRacerMagic(Magics):
@cell_magic
@magic_arguments()
@argument('-n', help='Number of loops when timing functions (inner loop).')
@argument('-r', help='Number of runs when timing functions (outer loop).')
def race(self, line, cell):
"""Time 2 or more functions to allow the user to easily compare speeds.
Each line will be timed separately, so a function call cannot take up
multiple lines. This is essentially a convenient wrapper for the
%%timeit magic that ensures all functions are timed with the same
choice of parameters. (When timing each function separately, I found
that during the testing process I would often end up changing some
function or timeit parameters in one case but forget to change it for
another. This magic aims to prevent that situation.)
Examples
---------
Example 1: A fairly standard case where we time three possible
implementations of a function to see which is fastest.
%%race -n 10 -r 3
>>> tokenizer_v1(text)
>>> tokenizer_v2(text)
>>> tokenizer_v3(text)
Example 2: If a function requires many arguments or if parameter
names are long, consider passing in a list or dictionary of arguments.
%%race
>>> many_args_func_v1(**params)
>>> many_args_func_v2(**params)
"""
args = parse_argstring(self.race, line)
n = args.n or 5
r = args.r or 3
# Split cell into lines of code to execute.
rows = [row for row in cell.strip().split('\n')
if not row.startswith('#')]
prefix = f'%timeit -n {n} -r {r} '
for row in rows:
self.shell.run_cell(prefix + row)
@magics_class
class TimeboxMagic(Magics):
"""Timebox a cell's execution to a user-specified duration. As with any
standard try/except block, note that values can change during execution
even if an error is eventually thrown (i.e. no rollback occurs).
Sample usage:
%%timebox 3
# Throw error if cell takes longer than 3 seconds to execute.
output = slow_function(*args)
%%timebox 3 -p
# Attempt to execute cell for 3 seconds, then give up. Message is printed
# stating that time is exceeded but no error is thrown.
output = slow_function(*args)
"""
@cell_magic
@magic_arguments()
@argument('time', type=int,
help='Max number of seconds before throwing error.')
@argument('-p', action='store_true',
help='Boolean flag: if provided, use permissive '
'execution (if the cell exceeds the specified '
'time, no error will be thrown, meaning '
'following cells can still execute.) If '
'flag is not provided, default behavior is to '
'raise a TimeExceededError and halt notebook '
'execution.')
def timebox(self, line=None, cell=None):
args = parse_argstring(self.timebox, line)
if args.p: cell = self._make_cell_permissive(cell)
with timebox(args.time) as tb:
self.shell.run_cell(cell)
@staticmethod
def _make_cell_permissive(cell):
"""Place whole cell in try/except block. Built-in error handling in
timebox context manager doesn't work because ipython shell has
its own logic for error handling, so we need to do this messy string
manipulation.
"""
robust_cell = (
'try:\n\t' + cell.replace('\n', '\n\t')
+ '\nexcept:\n\tprint("Time exceeded. '
'\\nWarning: objects may have changed during execution.")'
)
return robust_cell
# Automatically register all magics defined in this module.
magics = (obj for obj in map(locals().get, dir())
if inspect.isclass(obj)
and obj.__name__ != 'Magics'
and issubclass(obj, Magics))
get_ipython().register_magics(*magics)
| [
"IPython.core.magic_arguments.parse_argstring",
"htools.meta.timebox",
"IPython.core.magic_arguments.argument",
"IPython.core.magic_arguments.magic_arguments",
"inspect.isclass",
"warnings.filterwarnings"
] | [((385, 402), 'IPython.core.magic_arguments.magic_arguments', 'magic_arguments', ([], {}), '()\n', (400, 402), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((408, 644), 'IPython.core.magic_arguments.argument', 'argument', (['"""-p"""'], {'action': '"""store_true"""', 'help': '"""Boolean flag. If passed, the change will apply for the rest of the notebook, or until the user changes it again. The default behavior is to apply the change only to the current cell."""'}), "('-p', action='store_true', help=\n 'Boolean flag. If passed, the change will apply for the rest of the notebook, or until the user changes it again. The default behavior is to apply the change only to the current cell.'\n )\n", (416, 644), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((1595, 1612), 'IPython.core.magic_arguments.magic_arguments', 'magic_arguments', ([], {}), '()\n', (1610, 1612), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((1618, 1854), 'IPython.core.magic_arguments.argument', 'argument', (['"""-p"""'], {'action': '"""store_true"""', 'help': '"""Boolean flag. If passed, the change will apply for the rest of the notebook, or until the user changes it again. The default behavior is to apply the change only to the current cell."""'}), "('-p', action='store_true', help=\n 'Boolean flag. If passed, the change will apply for the rest of the notebook, or until the user changes it again. The default behavior is to apply the change only to the current cell.'\n )\n", (1626, 1854), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((2789, 2806), 'IPython.core.magic_arguments.magic_arguments', 'magic_arguments', ([], {}), '()\n', (2804, 2806), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((2812, 3048), 'IPython.core.magic_arguments.argument', 'argument', (['"""-p"""'], {'action': '"""store_true"""', 'help': '"""Boolean flag. If passed, the change will apply for the rest of the notebook, or until the user changes it again. The default behavior is to apply the change only to the current cell."""'}), "('-p', action='store_true', help=\n 'Boolean flag. If passed, the change will apply for the rest of the notebook, or until the user changes it again. The default behavior is to apply the change only to the current cell.'\n )\n", (2820, 3048), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((4236, 4253), 'IPython.core.magic_arguments.magic_arguments', 'magic_arguments', ([], {}), '()\n', (4251, 4253), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((4259, 4495), 'IPython.core.magic_arguments.argument', 'argument', (['"""-p"""'], {'action': '"""store_true"""', 'help': '"""Boolean flag. If passed, the change will apply for the rest of the notebook, or until the user changes it again. The default behavior is to apply the change only to the current cell."""'}), "('-p', action='store_true', help=\n 'Boolean flag. If passed, the change will apply for the rest of the notebook, or until the user changes it again. The default behavior is to apply the change only to the current cell.'\n )\n", (4267, 4495), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((4834, 4851), 'IPython.core.magic_arguments.magic_arguments', 'magic_arguments', ([], {}), '()\n', (4849, 4851), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((4857, 5093), 'IPython.core.magic_arguments.argument', 'argument', (['"""-p"""'], {'action': '"""store_true"""', 'help': '"""Boolean flag. If passed, the change will apply for the rest of the notebook, or until the user changes it again. The default behavior is to apply the change only to the current cell."""'}), "('-p', action='store_true', help=\n 'Boolean flag. If passed, the change will apply for the rest of the notebook, or until the user changes it again. The default behavior is to apply the change only to the current cell.'\n )\n", (4865, 5093), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((6247, 6264), 'IPython.core.magic_arguments.magic_arguments', 'magic_arguments', ([], {}), '()\n', (6262, 6264), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((6270, 6344), 'IPython.core.magic_arguments.argument', 'argument', (['"""-n"""'], {'help': '"""Number of loops when timing functions (inner loop)."""'}), "('-n', help='Number of loops when timing functions (inner loop).')\n", (6278, 6344), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((6350, 6423), 'IPython.core.magic_arguments.argument', 'argument', (['"""-r"""'], {'help': '"""Number of runs when timing functions (outer loop)."""'}), "('-r', help='Number of runs when timing functions (outer loop).')\n", (6358, 6423), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((8597, 8614), 'IPython.core.magic_arguments.magic_arguments', 'magic_arguments', ([], {}), '()\n', (8612, 8614), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((8620, 8699), 'IPython.core.magic_arguments.argument', 'argument', (['"""time"""'], {'type': 'int', 'help': '"""Max number of seconds before throwing error."""'}), "('time', type=int, help='Max number of seconds before throwing error.')\n", (8628, 8699), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((8719, 9037), 'IPython.core.magic_arguments.argument', 'argument', (['"""-p"""'], {'action': '"""store_true"""', 'help': '"""Boolean flag: if provided, use permissive execution (if the cell exceeds the specified time, no error will be thrown, meaning following cells can still execute.) If flag is not provided, default behavior is to raise a TimeExceededError and halt notebook execution."""'}), "('-p', action='store_true', help=\n 'Boolean flag: if provided, use permissive execution (if the cell exceeds the specified time, no error will be thrown, meaning following cells can still execute.) If flag is not provided, default behavior is to raise a TimeExceededError and halt notebook execution.'\n )\n", (8727, 9037), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((4737, 4768), 'IPython.core.magic_arguments.parse_argstring', 'parse_argstring', (['self.lax', 'line'], {}), '(self.lax, line)\n', (4752, 4768), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((5335, 5366), 'IPython.core.magic_arguments.parse_argstring', 'parse_argstring', (['self.nag', 'line'], {}), '(self.nag, line)\n', (5350, 5366), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((5871, 5900), 'warnings.filterwarnings', 'warnings.filterwarnings', (['mode'], {}), '(mode)\n', (5894, 5900), False, 'import warnings\n'), ((7618, 7650), 'IPython.core.magic_arguments.parse_argstring', 'parse_argstring', (['self.race', 'line'], {}), '(self.race, line)\n', (7633, 7650), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((9234, 9269), 'IPython.core.magic_arguments.parse_argstring', 'parse_argstring', (['self.timebox', 'line'], {}), '(self.timebox, line)\n', (9249, 9269), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((1539, 1571), 'IPython.core.magic_arguments.parse_argstring', 'parse_argstring', (['self.talk', 'line'], {}), '(self.talk, line)\n', (1554, 1571), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((2733, 2765), 'IPython.core.magic_arguments.parse_argstring', 'parse_argstring', (['self.hush', 'line'], {}), '(self.hush, line)\n', (2748, 2765), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((3844, 3876), 'IPython.core.magic_arguments.parse_argstring', 'parse_argstring', (['self.mute', 'line'], {}), '(self.mute, line)\n', (3859, 3876), False, 'from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring\n'), ((9342, 9360), 'htools.meta.timebox', 'timebox', (['args.time'], {}), '(args.time)\n', (9349, 9360), False, 'from htools.meta import timebox\n'), ((10081, 10101), 'inspect.isclass', 'inspect.isclass', (['obj'], {}), '(obj)\n', (10096, 10101), False, 'import inspect\n')] |
import asyncio
from datetime import datetime, timedelta
import functools
from http import HTTPStatus
import logging
import os
import pickle
from random import random
import re
import struct
from typing import Any, Callable, Coroutine, Dict, List, Optional, Sequence, Tuple
import warnings
import aiohttp.web
from aiohttp.web_runner import GracefulExit
import aiomcache
from connexion.exceptions import AuthenticationProblem, OAuthProblem, Unauthorized
from connexion.lifecycle import ConnexionRequest
import connexion.security
from connexion.utils import deep_get
with warnings.catch_warnings():
# this will suppress all warnings in this block
warnings.filterwarnings("ignore", message="int_from_bytes is deprecated")
from jose import jwt
from multidict import CIMultiDict
import sentry_sdk
from sqlalchemy import select
from athenian.api.async_utils import gather
from athenian.api.cache import cached
from athenian.api.controllers.account import get_user_account_status
from athenian.api.kms import AthenianKMS
from athenian.api.models.state.models import Account, God, UserToken
from athenian.api.models.web import ForbiddenError, GenericError
from athenian.api.models.web.user import User
from athenian.api.request import AthenianWebRequest
from athenian.api.response import ResponseError
from athenian.api.tracing import sentry_span
from athenian.api.typing_utils import wraps
class Auth0:
"""Class for Auth0 middleware compatible with aiohttp."""
AUTH0_DOMAIN = os.getenv("AUTH0_DOMAIN")
AUTH0_AUDIENCE = os.getenv("AUTH0_AUDIENCE")
AUTH0_CLIENT_ID = os.getenv("AUTH0_CLIENT_ID")
AUTH0_CLIENT_SECRET = os.getenv("AUTH0_CLIENT_SECRET")
DEFAULT_USER = os.getenv("ATHENIAN_DEFAULT_USER")
KEY = os.getenv("ATHENIAN_INVITATION_KEY")
USERINFO_CACHE_TTL = 60 # seconds
log = logging.getLogger("auth")
def __init__(self,
domain=AUTH0_DOMAIN,
audience=AUTH0_AUDIENCE,
client_id=AUTH0_CLIENT_ID,
client_secret=AUTH0_CLIENT_SECRET, whitelist: Sequence[str] = tuple(),
default_user=DEFAULT_USER,
key=KEY,
cache: Optional[aiomcache.Client] = None,
lazy=False,
force_user: str = ""):
"""
Create a new Auth0 middleware.
See:
- https://auth0.com/docs/tokens/guides/get-access-tokens#control-access-token-audience
- https://auth0.com/docs/api-auth/tutorials/client-credentials
:param domain: Auth0 domain.
:param audience: JWT audience parameter.
:param client_id: Application's Client ID.
:param client_secret: Application's Client Secret.
:param whitelist: Routes that do not need authorization.
:param default_user: Default user ID - the one that's assigned to public, unauthorized \
requests.
:param key: Global secret used to encrypt sensitive personal information.
:param cache: memcached client to cache the user profiles.
:param lazy: Value that indicates whether Auth0 Management API tokens and JWKS data \
must be asynchronously requested at first related method call.
:param force_user: Ignore all the incoming bearer tokens and make all requests on behalf \
of this user ID.
"""
for var, env_name in ((domain, "AUTH0_DOMAIN"),
(audience, "AUTH0_AUDIENCE"),
(client_id, "AUTH0_CLIENT_ID"),
(client_secret, "AUTH0_CLIENT_SECRET"),
(default_user, "ATHENIAN_DEFAULT_USER"),
(key, "ATHENIAN_INVITATION_KEY")):
if not var:
raise EnvironmentError("%s environment variable must be set." % env_name)
self._domain = domain
self._audience = audience
self._whitelist = whitelist
self._cache = cache
self._client_id = client_id
self._client_secret = client_secret
self._default_user_id = default_user
self._default_user = None # type: Optional[User]
self._key = key
self.force_user = force_user
if force_user:
self.log.warning("Forced user authorization mode: %s", force_user)
self._session = aiohttp.ClientSession()
self._kids_event = asyncio.Event()
if not lazy:
self._jwks_loop = asyncio.ensure_future(self._fetch_jwks_loop())
else:
self._jwks_loop = None # type: Optional[asyncio.Future]
self._kids: Dict[str, Any] = {}
self._mgmt_event = asyncio.Event()
self._mgmt_token = None # type: Optional[str]
if not lazy:
self._mgmt_loop = asyncio.ensure_future(self._acquire_management_token_loop())
else:
self._mgmt_loop = None # type: Optional[asyncio.Future]
async def kids(self) -> Dict[str, Any]:
"""Return the mapping kid -> Auth0 jwks record with that kid; wait until fetched."""
if self._jwks_loop is None:
self._jwks_loop = asyncio.ensure_future(self._fetch_jwks_loop())
await self._kids_event.wait()
return self._kids
async def mgmt_token(self) -> str:
"""Return the Auth0 management API token; wait until fetched."""
if self._mgmt_loop is None:
self._mgmt_loop = asyncio.ensure_future(self._acquire_management_token_loop())
await self._mgmt_event.wait()
if not self._mgmt_token:
raise LookupError("Could not acquire the Auth0 Management token.")
return self._mgmt_token
async def default_user(self) -> User:
"""Return the user of unauthorized, public requests."""
if self._default_user is not None:
return self._default_user
self._default_user = await self.get_user(self._default_user_id)
if self._default_user is None:
message = "Failed to fetch the default user (%s) details. " \
"Try changing ATHENIAN_DEFAULT_USER" % self._default_user_id
self.log.error(message)
raise GracefulExit(message)
return self._default_user
@property
def domain(self) -> str:
"""Return the assigned Auth0 domain, e.g. "athenian.auth0.com"."""
return self._domain
@property
def audience(self) -> str:
"""Return the assigned Auth0 audience URL, e.g. "https://api.athenian.co"."""
return self._audience
@property
def key(self) -> str:
"""Return the global secret used to encrypt sensitive personal information."""
return self._key
async def close(self):
"""Free resources and close connections associated with the object."""
if self._jwks_loop is not None:
self._jwks_loop.cancel()
if self._mgmt_loop is not None: # this may happen if lazy_mgmt=True
self._mgmt_loop.cancel()
session = self._session
# FIXME(vmarkovtsev): remove this bloody mess when this issue is resolved:
# https://github.com/aio-libs/aiohttp/issues/1925#issuecomment-575754386
transports = 0
all_is_lost = asyncio.Event()
if session.connector is not None:
for conn in session.connector._conns.values():
for handler, _ in conn:
proto = getattr(handler.transport, "_ssl_protocol", None)
if proto is None:
continue
transports += 1
def connection_lost(orig_lost, exc):
orig_lost(exc)
nonlocal transports
transports -= 1
if transports == 0:
all_is_lost.set()
def eof_received(orig_eof_received):
try:
orig_eof_received()
except AttributeError:
# It may happen that eof_received() is called after
# _app_protocol and _transport are set to None.
# Jeez, asyncio sucks sometimes.
pass
proto.connection_lost = functools.partial(
connection_lost, proto.connection_lost)
proto.eof_received = functools.partial(eof_received, proto.eof_received)
await session.close()
if transports > 0:
await all_is_lost.wait()
async def get_user(self, user: str) -> Optional[User]:
"""Retrieve a user using Auth0 mgmt API by ID."""
users = await self.get_users([user])
if len(users) == 0:
return None
return next(iter(users.values()))
@sentry_span
async def get_users(self, users: Sequence[str]) -> Dict[str, User]:
"""
Retrieve several users using Auth0 mgmt API by ID.
:return: Mapping from user ID to the found user details. Some users may be not found, \
some users may be duplicates.
"""
token = await self.mgmt_token()
assert len(users) >= 0 # we need __len__
async def get_batch(batch: List[str]) -> List[User]:
nonlocal token
query = "user_id:(%s)" % " ".join('"%s"' % u for u in batch)
for retries in range(1, 31):
try:
resp = await self._session.get(
"https://%s/api/v2/users?q=%s" % (self._domain, query),
headers={"Authorization": "Bearer " + token})
except aiohttp.ClientOSError as e:
if e.errno in (-3, 101, 103, 104):
self.log.warning("Auth0 Management API: %s", e)
# -3: Temporary failure in name resolution
# 101: Network is unreachable
await asyncio.sleep(0.1)
continue
raise e from None
except RuntimeError:
# our loop is closed and we are doomed
return []
if resp.status == HTTPStatus.TOO_MANY_REQUESTS:
self.log.warning("Auth0 Management API rate limit hit while listing "
"%d/%d users, retry %d",
len(batch), len(users), retries)
await asyncio.sleep(0.5 + random())
elif resp.status in (HTTPStatus.REQUEST_URI_TOO_LONG, HTTPStatus.BAD_REQUEST):
if len(batch) == 1:
return []
m = len(batch) // 2
self.log.warning("Auth0 Management API /users raised HTTP %d, bisecting "
"%d/%d -> %d, %d",
resp.status, len(batch), len(users), m, len(batch) - m)
b1, b2 = await gather(get_batch(batch[:m]), get_batch(batch[m:]))
return b1 + b2
elif resp.status == HTTPStatus.UNAUTHORIZED:
# force refresh the token
self._mgmt_loop.cancel()
self._mgmt_loop = None
self._mgmt_token = None
token = await self.mgmt_token()
else:
if resp.status >= 400:
try:
response_body = await resp.json()
except aiohttp.ContentTypeError:
response_body = await resp.text()
self.log.error("Auth0 Management API /users raised HTTP %d: %s",
resp.status, response_body)
break
else: # for retries in range
return []
if resp.status != HTTPStatus.OK:
return []
found = await resp.json()
return [User.from_auth0(**u, encryption_key=self.key) for u in found]
return {u.id: u for u in await get_batch(list(users))}
async def _fetch_jwks_loop(self) -> None:
while True:
await self._fetch_jwks()
await asyncio.sleep(3600) # 1 hour
async def _acquire_management_token_loop(self) -> None:
while True:
expires_in = await self._acquire_management_token(1)
await asyncio.sleep(expires_in)
async def _fetch_jwks(self) -> None:
req = await self._session.get("https://%s/.well-known/jwks.json" % self._domain)
jwks = await req.json()
self.log.info("Fetched %d JWKS records", len(jwks))
self._kids = {key["kid"]: {k: key[k] for k in ("kty", "kid", "use", "n", "e")}
for key in jwks["keys"]}
self._kids_event.set()
async def _acquire_management_token(self, attempt: int) -> float:
max_attempts = 10
error = None
try:
resp = await self._session.post("https://%s/oauth/token" % self._domain, headers={
"content-type": "application/x-www-form-urlencoded",
}, data={
"grant_type": "client_credentials",
"client_id": self._client_id,
"client_secret": self._client_secret,
"audience": "https://%s/api/v2/" % self._domain,
}, timeout=5)
data = await resp.json()
self._mgmt_token = data["access_token"]
self._mgmt_event.set()
expires_in = int(data["expires_in"])
except Exception as e:
error = e
try:
resp_text = await resp.text()
except Exception:
resp_text = "N/A"
# do not use %s - Sentry does not display it properly
if attempt >= max_attempts:
self.log.exception("Failed to renew the Auth0 management token: " + resp_text)
raise GracefulExit() from e
if error is not None:
self.log.warning("Failed to renew the Auth0 management token %d / %d: %s: %s",
attempt, max_attempts, error, resp_text)
await asyncio.sleep(1)
return await self._acquire_management_token(attempt + 1)
self.log.info("Acquired new Auth0 management token %s...%s for the next %s",
self._mgmt_token[:12], self._mgmt_token[-12:], timedelta(seconds=expires_in))
expires_in -= 5 * 60 # 5 minutes earlier
if expires_in < 0:
expires_in = 0
return expires_in
def _is_whitelisted(self, request: aiohttp.web.Request) -> bool:
for pattern in self._whitelist:
if re.match(pattern, request.path):
return True
return False
async def _get_user_info(self, token: str) -> User:
if token == "null":
return await self.default_user()
return await self._get_user_info_cached(token)
@cached(
exptime=lambda self, **_: self.USERINFO_CACHE_TTL,
serialize=pickle.dumps,
deserialize=pickle.loads,
key=lambda token, **_: (token,),
cache=lambda self, **_: self._cache,
)
async def _get_user_info_cached(self, token: str) -> User:
resp = await self._session.get("https://%s/userinfo" % self._domain,
headers={"Authorization": "Bearer " + token})
try:
user = await resp.json()
except aiohttp.ContentTypeError:
raise ResponseError(GenericError(
"/errors/Auth0", title=resp.reason, status=resp.status,
detail=await resp.text()))
if resp.status != 200:
raise ResponseError(GenericError(
"/errors/Auth0", title=resp.reason, status=resp.status,
detail=user.get("description", str(user))))
return User.from_auth0(**user, encryption_key=self.key)
async def _set_user(self, request: AthenianWebRequest, token: str, method: str) -> None:
if method == "bearer":
token_info = await self._extract_bearer_token(token)
request.uid, request.account = token_info["sub"], None
elif method == "apikey":
request.uid, request.account = await self._extract_api_key(token, request)
else:
raise AssertionError("Unsupported auth method: %s" % method)
god = await request.sdb.fetch_one(
select([God.mapped_id]).where(God.user_id == request.uid))
if god is not None:
request.god_id = request.uid
if "X-Identity" in request.headers:
mapped_id = request.headers["X-Identity"]
else:
mapped_id = god[God.mapped_id.name]
if mapped_id is not None:
request.uid = mapped_id
self.log.info("God mode: %s became %s", request.god_id, mapped_id)
request.is_default_user = request.uid == self._default_user_id
sentry_sdk.set_user({"id": request.uid})
async def get_user_info():
if method != "bearer" or (god is not None and request.god_id is not None):
user_info = await self.get_user(key := request.uid)
else:
user_info = await self._get_user_info(key := token)
if user_info is None:
raise ResponseError(GenericError(
"/errors/Auth0", title="Failed to retrieve user details from Auth0",
status=HTTPStatus.SERVICE_UNAVAILABLE,
detail=key,
))
sentry_sdk.set_user({"username": user_info.login, "email": user_info.email})
return user_info
request.user = get_user_info
async def _extract_bearer_token(self, token: str) -> Dict[str, Any]:
if token == "null":
return {"sub": self.force_user or self._default_user_id}
# People who understand what's going on here:
# - @dennwc
# - @vmarkovtsev
try:
unverified_header = jwt.get_unverified_header(token)
except jwt.JWTError as e:
raise OAuthProblem(
description="Invalid header: %s. Use an RS256 signed JWT Access Token." % e)
if unverified_header["alg"] != "RS256":
raise OAuthProblem(
description="Invalid algorithm %s. Use an RS256 signed JWT Access Token." %
unverified_header["alg"])
kids = await self.kids()
try:
rsa_key = kids[unverified_header["kid"]]
except KeyError:
raise OAuthProblem(description="Unable to find the matching Auth0 RSA public key")
try:
return jwt.decode(
token,
rsa_key,
algorithms=["RS256"],
audience=self._audience,
issuer="https://%s/" % self._domain,
)
except jwt.ExpiredSignatureError as e:
raise OAuthProblem(description="JWT expired: %s" % e)
except jwt.JWTClaimsError as e:
raise OAuthProblem(description="invalid claims: %s" % e)
except jwt.JWTError as e:
raise OAuthProblem(description="Unable to parse the authentication token: %s" % e)
async def _extract_api_key(self, token: str, request: AthenianWebRequest) -> Tuple[str, int]:
kms = request.app["kms"] # type: AthenianKMS
if kms is None:
raise AuthenticationProblem(
status=HTTPStatus.UNAUTHORIZED,
title="Unable to authenticate with an API key.",
detail="The backend was not properly configured and there is no connection with "
"Google Key Management Service to decrypt API keys.")
try:
plaintext = await kms.decrypt(token)
except aiohttp.ClientResponseError:
raise Unauthorized()
try:
token_id = struct.unpack("<q", plaintext)[0]
except (ValueError, struct.error):
raise Unauthorized() from None
token_obj = await request.sdb.fetch_one(
select([UserToken]).where(UserToken.id == token_id))
if token_obj is None:
raise Unauthorized()
uid = token_obj[UserToken.user_id.name]
account = token_obj[UserToken.account_id.name]
return uid, account
class AthenianAioHttpSecurityHandlerFactory(connexion.security.AioHttpSecurityHandlerFactory):
"""Override verify_security() to re-route the security affairs to our Auth0 class."""
def __init__(self, auth: Auth0, pass_context_arg_name):
"""`auth` is supplied by AthenianAioHttpApi."""
super().__init__(pass_context_arg_name=pass_context_arg_name)
self.auth = auth
def verify_security(self, auth_funcs, required_scopes, function,
) -> Callable[[ConnexionRequest], Coroutine[None, None, Any]]:
"""
Decorate the request pipeline to check the security, either JWT or APIKey.
If we don't see any authorization details, we assume the "default" user.
"""
auth = self.auth # type: Auth0
async def get_token_info(request: ConnexionRequest):
token_info = self.no_value
for func in auth_funcs:
token_info = func(request, required_scopes)
while asyncio.iscoroutine(token_info):
token_info = await token_info
if token_info is not self.no_value:
break
return token_info
@functools.wraps(function)
async def wrapper(request: ConnexionRequest):
token_info = self.no_value if auth.force_user else await get_token_info(request)
if token_info is self.no_value:
# "null" is the "magic" JWT that loads the default or forced user
request.headers = CIMultiDict(request.headers)
request.headers["Authorization"] = "Bearer null"
token_info = await get_token_info(request)
if token_info is self.no_value:
raise Unauthorized("The endpoint you are calling requires X-API-Key header.")
# token_info = {"token": <token>, "method": "bearer" or "apikey"}
await auth._set_user(context := request.context, **token_info)
# check whether the user may access the specified account
if isinstance(request.json, dict):
if (account := request.json.get("account")) is not None:
assert isinstance(account, int)
with sentry_sdk.configure_scope() as scope:
scope.set_tag("account", account)
await get_user_account_status(
context.uid, account, context.sdb, context.cache)
elif (account := getattr(context, "account", None)) is not None:
canonical = context.match_info.route.resource.canonical
route_specs = context.app["route_spec"]
if (spec := route_specs.get(canonical, None)) is not None:
try:
required = "account" in deep_get(spec, [
"requestBody", "content", "application/json", "schema",
"required",
])
except KeyError:
required = False
if required:
request.json["account"] = account
context.account = account
# check whether the account is enabled
if context.account is not None:
expires_at = await context.sdb.fetch_val(
select([Account.expires_at]).where(Account.id == context.account))
if not getattr(context, "god_id", False) and (
expires_at is None or expires_at < datetime.now(expires_at.tzinfo)):
auth.log.warning("Attempt to use an expired account %d by user %s",
context.account, context.uid)
raise Unauthorized("Your account has expired.")
# finish the auth processing and chain forward
return await function(request)
return wrapper
def disable_default_user(func):
"""Decorate an endpoint handler to raise 403 if the user is the default one."""
async def wrapped_disable_default_user(request: AthenianWebRequest,
*args, **kwargs) -> aiohttp.web.Response:
if request.is_default_user:
raise ResponseError(ForbiddenError("%s is the default user" % request.uid))
return await func(request, *args, **kwargs)
wraps(wrapped_disable_default_user, func)
return wrapped_disable_default_user
| [
"logging.getLogger",
"sentry_sdk.configure_scope",
"athenian.api.controllers.account.get_user_account_status",
"connexion.exceptions.Unauthorized",
"multidict.CIMultiDict",
"sqlalchemy.select",
"datetime.timedelta",
"functools.wraps",
"athenian.api.models.web.user.User.from_auth0",
"asyncio.sleep"... | [((570, 595), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (593, 595), False, 'import warnings\n'), ((653, 726), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'message': '"""int_from_bytes is deprecated"""'}), "('ignore', message='int_from_bytes is deprecated')\n", (676, 726), False, 'import warnings\n'), ((1493, 1518), 'os.getenv', 'os.getenv', (['"""AUTH0_DOMAIN"""'], {}), "('AUTH0_DOMAIN')\n", (1502, 1518), False, 'import os\n'), ((1540, 1567), 'os.getenv', 'os.getenv', (['"""AUTH0_AUDIENCE"""'], {}), "('AUTH0_AUDIENCE')\n", (1549, 1567), False, 'import os\n'), ((1590, 1618), 'os.getenv', 'os.getenv', (['"""AUTH0_CLIENT_ID"""'], {}), "('AUTH0_CLIENT_ID')\n", (1599, 1618), False, 'import os\n'), ((1645, 1677), 'os.getenv', 'os.getenv', (['"""AUTH0_CLIENT_SECRET"""'], {}), "('AUTH0_CLIENT_SECRET')\n", (1654, 1677), False, 'import os\n'), ((1697, 1731), 'os.getenv', 'os.getenv', (['"""ATHENIAN_DEFAULT_USER"""'], {}), "('ATHENIAN_DEFAULT_USER')\n", (1706, 1731), False, 'import os\n'), ((1742, 1778), 'os.getenv', 'os.getenv', (['"""ATHENIAN_INVITATION_KEY"""'], {}), "('ATHENIAN_INVITATION_KEY')\n", (1751, 1778), False, 'import os\n'), ((1828, 1853), 'logging.getLogger', 'logging.getLogger', (['"""auth"""'], {}), "('auth')\n", (1845, 1853), False, 'import logging\n'), ((15161, 15348), 'athenian.api.cache.cached', 'cached', ([], {'exptime': '(lambda self, **_: self.USERINFO_CACHE_TTL)', 'serialize': 'pickle.dumps', 'deserialize': 'pickle.loads', 'key': '(lambda token, **_: (token,))', 'cache': '(lambda self, **_: self._cache)'}), '(exptime=lambda self, **_: self.USERINFO_CACHE_TTL, serialize=pickle.\n dumps, deserialize=pickle.loads, key=lambda token, **_: (token,), cache\n =lambda self, **_: self._cache)\n', (15167, 15348), False, 'from athenian.api.cache import cached\n'), ((25053, 25094), 'athenian.api.typing_utils.wraps', 'wraps', (['wrapped_disable_default_user', 'func'], {}), '(wrapped_disable_default_user, func)\n', (25058, 25094), False, 'from athenian.api.typing_utils import wraps\n'), ((4450, 4465), 'asyncio.Event', 'asyncio.Event', ([], {}), '()\n', (4463, 4465), False, 'import asyncio\n'), ((4714, 4729), 'asyncio.Event', 'asyncio.Event', ([], {}), '()\n', (4727, 4729), False, 'import asyncio\n'), ((7284, 7299), 'asyncio.Event', 'asyncio.Event', ([], {}), '()\n', (7297, 7299), False, 'import asyncio\n'), ((16087, 16135), 'athenian.api.models.web.user.User.from_auth0', 'User.from_auth0', ([], {'encryption_key': 'self.key'}), '(**user, encryption_key=self.key)\n', (16102, 16135), False, 'from athenian.api.models.web.user import User\n'), ((17201, 17241), 'sentry_sdk.set_user', 'sentry_sdk.set_user', (["{'id': request.uid}"], {}), "({'id': request.uid})\n", (17220, 17241), False, 'import sentry_sdk\n'), ((21802, 21827), 'functools.wraps', 'functools.wraps', (['function'], {}), '(function)\n', (21817, 21827), False, 'import functools\n'), ((6227, 6248), 'aiohttp.web_runner.GracefulExit', 'GracefulExit', (['message'], {}), '(message)\n', (6239, 6248), False, 'from aiohttp.web_runner import GracefulExit\n'), ((14602, 14631), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'expires_in'}), '(seconds=expires_in)\n', (14611, 14631), False, 'from datetime import datetime, timedelta\n'), ((14888, 14919), 're.match', 're.match', (['pattern', 'request.path'], {}), '(pattern, request.path)\n', (14896, 14919), False, 'import re\n'), ((17814, 17890), 'sentry_sdk.set_user', 'sentry_sdk.set_user', (["{'username': user_info.login, 'email': user_info.email}"], {}), "({'username': user_info.login, 'email': user_info.email})\n", (17833, 17890), False, 'import sentry_sdk\n'), ((18273, 18305), 'jose.jwt.get_unverified_header', 'jwt.get_unverified_header', (['token'], {}), '(token)\n', (18298, 18305), False, 'from jose import jwt\n'), ((18531, 18654), 'connexion.exceptions.OAuthProblem', 'OAuthProblem', ([], {'description': "('Invalid algorithm %s. Use an RS256 signed JWT Access Token.' %\n unverified_header['alg'])"}), "(description=\n 'Invalid algorithm %s. Use an RS256 signed JWT Access Token.' %\n unverified_header['alg'])\n", (18543, 18654), False, 'from connexion.exceptions import AuthenticationProblem, OAuthProblem, Unauthorized\n'), ((18931, 19045), 'jose.jwt.decode', 'jwt.decode', (['token', 'rsa_key'], {'algorithms': "['RS256']", 'audience': 'self._audience', 'issuer': "('https://%s/' % self._domain)"}), "(token, rsa_key, algorithms=['RS256'], audience=self._audience,\n issuer='https://%s/' % self._domain)\n", (18941, 19045), False, 'from jose import jwt\n'), ((19683, 19933), 'connexion.exceptions.AuthenticationProblem', 'AuthenticationProblem', ([], {'status': 'HTTPStatus.UNAUTHORIZED', 'title': '"""Unable to authenticate with an API key."""', 'detail': '"""The backend was not properly configured and there is no connection with Google Key Management Service to decrypt API keys."""'}), "(status=HTTPStatus.UNAUTHORIZED, title=\n 'Unable to authenticate with an API key.', detail=\n 'The backend was not properly configured and there is no connection with Google Key Management Service to decrypt API keys.'\n )\n", (19704, 19933), False, 'from connexion.exceptions import AuthenticationProblem, OAuthProblem, Unauthorized\n'), ((20451, 20465), 'connexion.exceptions.Unauthorized', 'Unauthorized', ([], {}), '()\n', (20463, 20465), False, 'from connexion.exceptions import AuthenticationProblem, OAuthProblem, Unauthorized\n'), ((12139, 12184), 'athenian.api.models.web.user.User.from_auth0', 'User.from_auth0', ([], {'encryption_key': 'self.key'}), '(**u, encryption_key=self.key)\n', (12154, 12184), False, 'from athenian.api.models.web.user import User\n'), ((12387, 12406), 'asyncio.sleep', 'asyncio.sleep', (['(3600)'], {}), '(3600)\n', (12400, 12406), False, 'import asyncio\n'), ((12581, 12606), 'asyncio.sleep', 'asyncio.sleep', (['expires_in'], {}), '(expires_in)\n', (12594, 12606), False, 'import asyncio\n'), ((14362, 14378), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (14375, 14378), False, 'import asyncio\n'), ((18358, 18452), 'connexion.exceptions.OAuthProblem', 'OAuthProblem', ([], {'description': "('Invalid header: %s. Use an RS256 signed JWT Access Token.' % e)"}), "(description=\n 'Invalid header: %s. Use an RS256 signed JWT Access Token.' % e)\n", (18370, 18452), False, 'from connexion.exceptions import AuthenticationProblem, OAuthProblem, Unauthorized\n'), ((18822, 18898), 'connexion.exceptions.OAuthProblem', 'OAuthProblem', ([], {'description': '"""Unable to find the matching Auth0 RSA public key"""'}), "(description='Unable to find the matching Auth0 RSA public key')\n", (18834, 18898), False, 'from connexion.exceptions import AuthenticationProblem, OAuthProblem, Unauthorized\n'), ((19202, 19249), 'connexion.exceptions.OAuthProblem', 'OAuthProblem', ([], {'description': "('JWT expired: %s' % e)"}), "(description='JWT expired: %s' % e)\n", (19214, 19249), False, 'from connexion.exceptions import AuthenticationProblem, OAuthProblem, Unauthorized\n'), ((19308, 19358), 'connexion.exceptions.OAuthProblem', 'OAuthProblem', ([], {'description': "('invalid claims: %s' % e)"}), "(description='invalid claims: %s' % e)\n", (19320, 19358), False, 'from connexion.exceptions import AuthenticationProblem, OAuthProblem, Unauthorized\n'), ((19411, 19487), 'connexion.exceptions.OAuthProblem', 'OAuthProblem', ([], {'description': "('Unable to parse the authentication token: %s' % e)"}), "(description='Unable to parse the authentication token: %s' % e)\n", (19423, 19487), False, 'from connexion.exceptions import AuthenticationProblem, OAuthProblem, Unauthorized\n'), ((20118, 20132), 'connexion.exceptions.Unauthorized', 'Unauthorized', ([], {}), '()\n', (20130, 20132), False, 'from connexion.exceptions import AuthenticationProblem, OAuthProblem, Unauthorized\n'), ((20169, 20199), 'struct.unpack', 'struct.unpack', (['"""<q"""', 'plaintext'], {}), "('<q', plaintext)\n", (20182, 20199), False, 'import struct\n'), ((20264, 20278), 'connexion.exceptions.Unauthorized', 'Unauthorized', ([], {}), '()\n', (20276, 20278), False, 'from connexion.exceptions import AuthenticationProblem, OAuthProblem, Unauthorized\n'), ((21601, 21632), 'asyncio.iscoroutine', 'asyncio.iscoroutine', (['token_info'], {}), '(token_info)\n', (21620, 21632), False, 'import asyncio\n'), ((22135, 22163), 'multidict.CIMultiDict', 'CIMultiDict', (['request.headers'], {}), '(request.headers)\n', (22146, 22163), False, 'from multidict import CIMultiDict\n'), ((22354, 22425), 'connexion.exceptions.Unauthorized', 'Unauthorized', (['"""The endpoint you are calling requires X-API-Key header."""'], {}), "('The endpoint you are calling requires X-API-Key header.')\n", (22366, 22425), False, 'from connexion.exceptions import AuthenticationProblem, OAuthProblem, Unauthorized\n'), ((24940, 24994), 'athenian.api.models.web.ForbiddenError', 'ForbiddenError', (["('%s is the default user' % request.uid)"], {}), "('%s is the default user' % request.uid)\n", (24954, 24994), False, 'from athenian.api.models.web import ForbiddenError, GenericError\n'), ((8374, 8431), 'functools.partial', 'functools.partial', (['connection_lost', 'proto.connection_lost'], {}), '(connection_lost, proto.connection_lost)\n', (8391, 8431), False, 'import functools\n'), ((8498, 8549), 'functools.partial', 'functools.partial', (['eof_received', 'proto.eof_received'], {}), '(eof_received, proto.eof_received)\n', (8515, 8549), False, 'import functools\n'), ((14131, 14145), 'aiohttp.web_runner.GracefulExit', 'GracefulExit', ([], {}), '()\n', (14143, 14145), False, 'from aiohttp.web_runner import GracefulExit\n'), ((17589, 17731), 'athenian.api.models.web.GenericError', 'GenericError', (['"""/errors/Auth0"""'], {'title': '"""Failed to retrieve user details from Auth0"""', 'status': 'HTTPStatus.SERVICE_UNAVAILABLE', 'detail': 'key'}), "('/errors/Auth0', title=\n 'Failed to retrieve user details from Auth0', status=HTTPStatus.\n SERVICE_UNAVAILABLE, detail=key)\n", (17601, 17731), False, 'from athenian.api.models.web import ForbiddenError, GenericError\n'), ((24429, 24470), 'connexion.exceptions.Unauthorized', 'Unauthorized', (['"""Your account has expired."""'], {}), "('Your account has expired.')\n", (24441, 24470), False, 'from connexion.exceptions import AuthenticationProblem, OAuthProblem, Unauthorized\n'), ((16656, 16679), 'sqlalchemy.select', 'select', (['[God.mapped_id]'], {}), '([God.mapped_id])\n', (16662, 16679), False, 'from sqlalchemy import select\n'), ((20350, 20369), 'sqlalchemy.select', 'select', (['[UserToken]'], {}), '([UserToken])\n', (20356, 20369), False, 'from sqlalchemy import select\n'), ((22846, 22874), 'sentry_sdk.configure_scope', 'sentry_sdk.configure_scope', ([], {}), '()\n', (22872, 22874), False, 'import sentry_sdk\n'), ((22969, 23042), 'athenian.api.controllers.account.get_user_account_status', 'get_user_account_status', (['context.uid', 'account', 'context.sdb', 'context.cache'], {}), '(context.uid, account, context.sdb, context.cache)\n', (22992, 23042), False, 'from athenian.api.controllers.account import get_user_account_status\n'), ((24214, 24245), 'datetime.datetime.now', 'datetime.now', (['expires_at.tzinfo'], {}), '(expires_at.tzinfo)\n', (24226, 24245), False, 'from datetime import datetime, timedelta\n'), ((10063, 10081), 'asyncio.sleep', 'asyncio.sleep', (['(0.1)'], {}), '(0.1)\n', (10076, 10081), False, 'import asyncio\n'), ((10611, 10619), 'random.random', 'random', ([], {}), '()\n', (10617, 10619), False, 'from random import random\n'), ((24025, 24053), 'sqlalchemy.select', 'select', (['[Account.expires_at]'], {}), '([Account.expires_at])\n', (24031, 24053), False, 'from sqlalchemy import select\n'), ((23445, 23533), 'connexion.utils.deep_get', 'deep_get', (['spec', "['requestBody', 'content', 'application/json', 'schema', 'required']"], {}), "(spec, ['requestBody', 'content', 'application/json', 'schema',\n 'required'])\n", (23453, 23533), False, 'from connexion.utils import deep_get\n')] |
import subprocess
def check_working_tree():
try:
subprocess.check_output(['git', 'diff', '--exit-code'])
except subprocess.CalledProcessError as e:
print('called process error')
out_bytes = e.output # Output generated before error
code = e.returncode # Return code
raise Exception('Your working tree is not empty, please commit all changes.')
else:
return True
def get_git_revision_short_hash():
try:
commit_id = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode('ascii').strip()
except subprocess.CalledProcessError:
raise Exception('Somthing is wrong with your git workspace!')
else:
return commit_id | [
"subprocess.check_output"
] | [((63, 118), 'subprocess.check_output', 'subprocess.check_output', (["['git', 'diff', '--exit-code']"], {}), "(['git', 'diff', '--exit-code'])\n", (86, 118), False, 'import subprocess\n'), ((497, 561), 'subprocess.check_output', 'subprocess.check_output', (["['git', 'rev-parse', '--short', 'HEAD']"], {}), "(['git', 'rev-parse', '--short', 'HEAD'])\n", (520, 561), False, 'import subprocess\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('assets', '0013_metadocumentasset_metadocumentsecureasset'),
]
operations = [
migrations.AlterField(
model_name='metadocumentasset',
name='asset',
field=models.OneToOneField(to='assets.Asset', null=True, related_name='meta_document'),
),
migrations.AlterField(
model_name='metadocumentsecureasset',
name='asset',
field=models.OneToOneField(to='assets.SecureAsset', null=True, related_name='meta_document'),
),
]
| [
"django.db.models.OneToOneField"
] | [((384, 469), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'to': '"""assets.Asset"""', 'null': '(True)', 'related_name': '"""meta_document"""'}), "(to='assets.Asset', null=True, related_name='meta_document'\n )\n", (404, 469), False, 'from django.db import migrations, models\n'), ((602, 693), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'to': '"""assets.SecureAsset"""', 'null': '(True)', 'related_name': '"""meta_document"""'}), "(to='assets.SecureAsset', null=True, related_name=\n 'meta_document')\n", (622, 693), False, 'from django.db import migrations, models\n')] |
# -*- coding: utf-8 -*-
"""
Logging configuration functions
"""
from logbook import NullHandler, FileHandler, NestedSetup
from logbook.more import ColorizedStderrHandler
from logbook.queues import ThreadedWrapperHandler
def logging_options(parser):
"""Add cli options for logging to parser"""
LOG_LEVELS = ("critical", "error", "warning", "notice", "info", "debug")
parser.add_argument("--log-file")
parser.add_argument(
"--log-file-level", choices=LOG_LEVELS, default="debug"
)
stderr_parser = parser.add_mutually_exclusive_group()
stderr_parser.add_argument(
"--stderr-level", choices=LOG_LEVELS, default="notice"
)
stderr_parser.add_argument(
"--quiet", "-q", default=False, action="store_true",
)
stderr_parser.add_argument(
"--verbose", "-v", default=False, action="store_true",
)
def log_handler(args, thread_wrapping=True):
"""
Return log handler with given config
"""
if not isinstance(args, dict):
args = vars(args)
if args.get("quiet"):
stderr_handler = ColorizedStderrHandler(level="ERROR")
elif args.get("verbose"):
stderr_handler = ColorizedStderrHandler(level="DEBUG")
else:
stderr_handler = ColorizedStderrHandler(
level=args.get("stderr_level", "NOTICE").upper(), bubble=True
)
if args.get("log_file"):
file_handler = FileHandler(
args.get("log_file"),
level=args.get("log_file_level", "DEBUG").upper(), bubble=True
)
else:
file_handler = NullHandler()
if thread_wrapping:
file_handler = ThreadedWrapperHandler(file_handler)
stderr_handler = ThreadedWrapperHandler(stderr_handler)
return NestedSetup([
NullHandler(), # catch everything else
file_handler, stderr_handler
])
| [
"logbook.more.ColorizedStderrHandler",
"logbook.NullHandler",
"logbook.queues.ThreadedWrapperHandler"
] | [((1084, 1121), 'logbook.more.ColorizedStderrHandler', 'ColorizedStderrHandler', ([], {'level': '"""ERROR"""'}), "(level='ERROR')\n", (1106, 1121), False, 'from logbook.more import ColorizedStderrHandler\n'), ((1575, 1588), 'logbook.NullHandler', 'NullHandler', ([], {}), '()\n', (1586, 1588), False, 'from logbook import NullHandler, FileHandler, NestedSetup\n'), ((1637, 1673), 'logbook.queues.ThreadedWrapperHandler', 'ThreadedWrapperHandler', (['file_handler'], {}), '(file_handler)\n', (1659, 1673), False, 'from logbook.queues import ThreadedWrapperHandler\n'), ((1699, 1737), 'logbook.queues.ThreadedWrapperHandler', 'ThreadedWrapperHandler', (['stderr_handler'], {}), '(stderr_handler)\n', (1721, 1737), False, 'from logbook.queues import ThreadedWrapperHandler\n'), ((1177, 1214), 'logbook.more.ColorizedStderrHandler', 'ColorizedStderrHandler', ([], {'level': '"""DEBUG"""'}), "(level='DEBUG')\n", (1199, 1214), False, 'from logbook.more import ColorizedStderrHandler\n'), ((1772, 1785), 'logbook.NullHandler', 'NullHandler', ([], {}), '()\n', (1783, 1785), False, 'from logbook import NullHandler, FileHandler, NestedSetup\n')] |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
class CinderApiTests(test.APITestCase):
def test_volume_list(self):
search_opts = {'all_tenants': 1}
volumes = self.volumes.list()
cinderclient = self.stub_cinderclient()
cinderclient.volumes = self.mox.CreateMockAnything()
cinderclient.volumes.list(search_opts=search_opts,).AndReturn(volumes)
self.mox.ReplayAll()
# No assertions are necessary. Verification is handled by mox.
api.cinder.volume_list(self.request, search_opts=search_opts)
def test_volume_snapshot_list(self):
volume_snapshots = self.volume_snapshots.list()
cinderclient = self.stub_cinderclient()
cinderclient.volume_snapshots = self.mox.CreateMockAnything()
cinderclient.volume_snapshots.list().AndReturn(volume_snapshots)
self.mox.ReplayAll()
api.cinder.volume_snapshot_list(self.request)
def test_volume_snapshot_list_no_volume_configured(self):
# remove volume from service catalog
catalog = self.service_catalog
for service in catalog:
if service["type"] == "volume":
self.service_catalog.remove(service)
volume_snapshots = self.volume_snapshots.list()
cinderclient = self.stub_cinderclient()
cinderclient.volume_snapshots = self.mox.CreateMockAnything()
cinderclient.volume_snapshots.list().AndReturn(volume_snapshots)
self.mox.ReplayAll()
api.cinder.volume_snapshot_list(self.request)
| [
"openstack_dashboard.api.cinder.volume_list",
"openstack_dashboard.api.cinder.volume_snapshot_list"
] | [((1191, 1252), 'openstack_dashboard.api.cinder.volume_list', 'api.cinder.volume_list', (['self.request'], {'search_opts': 'search_opts'}), '(self.request, search_opts=search_opts)\n', (1213, 1252), False, 'from openstack_dashboard import api\n'), ((1580, 1625), 'openstack_dashboard.api.cinder.volume_snapshot_list', 'api.cinder.volume_snapshot_list', (['self.request'], {}), '(self.request)\n', (1611, 1625), False, 'from openstack_dashboard import api\n'), ((2188, 2233), 'openstack_dashboard.api.cinder.volume_snapshot_list', 'api.cinder.volume_snapshot_list', (['self.request'], {}), '(self.request)\n', (2219, 2233), False, 'from openstack_dashboard import api\n')] |
# Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for checks."""
from makani.analysis.checks import base_check
from makani.lib.python import import_util
# TODO: Move this to //analysis/checks/base_check.py
def LoadListOfChecks(path_to_checks):
"""Load the ListOfChecks object given the path to its file and class.
Args:
path_to_checks: A string specifying the location of the checks.
E.g. makani.analysis.my_checks.MyCheck.
Returns:
The ListOfChecks object.
"""
cls = import_util.ImportClass(path_to_checks)
return cls(for_log=True)
def LoadJsonCheck(path_to_check, parameters_json):
r"""Load the Check object given the path to its classpath and parameters.
Args:
path_to_check: A string specifying the location of the checks.
E.g. makani.analysis.my_checks.MyCheck
parameters_json: A JSON serialized string of the parameters needed to
instantiate the class.
E.g. "{\"for_log\": true, \"warning_ranges\": {\"ranges\": [0, 180]},
\"normal_ranges\": {\"ranges\": [80, 150]}}"
Returns:
The Check object.
"""
cls = import_util.ImportClass(path_to_check)
parameters = base_check.ParseCheckSpecs(parameters_json)
return cls(**parameters)
def LoadCheck(path_to_check, params):
"""Load the ListOfChecks object given the path to its file and class.
Args:
path_to_check: A string specifying the location of the checks.
E.g. makani.analysis.my_checks.MyCheck.
params: A string specifying parameters to be passed into the check.
Returns:
The CheckItem object.
"""
cls = import_util.ImportClass(path_to_check)
return cls(**params)
| [
"makani.analysis.checks.base_check.ParseCheckSpecs",
"makani.lib.python.import_util.ImportClass"
] | [((1051, 1090), 'makani.lib.python.import_util.ImportClass', 'import_util.ImportClass', (['path_to_checks'], {}), '(path_to_checks)\n', (1074, 1090), False, 'from makani.lib.python import import_util\n'), ((1659, 1697), 'makani.lib.python.import_util.ImportClass', 'import_util.ImportClass', (['path_to_check'], {}), '(path_to_check)\n', (1682, 1697), False, 'from makani.lib.python import import_util\n'), ((1713, 1756), 'makani.analysis.checks.base_check.ParseCheckSpecs', 'base_check.ParseCheckSpecs', (['parameters_json'], {}), '(parameters_json)\n', (1739, 1756), False, 'from makani.analysis.checks import base_check\n'), ((2144, 2182), 'makani.lib.python.import_util.ImportClass', 'import_util.ImportClass', (['path_to_check'], {}), '(path_to_check)\n', (2167, 2182), False, 'from makani.lib.python import import_util\n')] |
# p2wsh input (2-of-2 multisig)
# p2wpkh output
import argparse
import hashlib
import ecdsa
def dSHA256(data):
hash_1 = hashlib.sha256(data).digest()
hash_2 = hashlib.sha256(hash_1).digest()
return hash_2
def hash160(s):
'''sha256 followed by ripemd160'''
return hashlib.new('ripemd160', hashlib.sha256(s).digest()).digest()
def privkey_to_pubkey(privkey):
signing_key = ecdsa.SigningKey.from_string(privkey, curve=ecdsa.SECP256k1) # Don't forget to specify the curve
verifying_key = signing_key.get_verifying_key()
# Use this code block if the address you gave corresponds to the compressed public key
x_cor = bytes.fromhex(verifying_key.to_string().hex())[:32] # The first 32 bytes are the x coordinate
y_cor = bytes.fromhex(verifying_key.to_string().hex())[32:] # The last 32 bytes are the y coordinate
if int.from_bytes(y_cor, byteorder="big", signed=True) % 2 == 0: # We need to turn the y_cor into a number.
public_key = bytes.fromhex("02" + x_cor.hex())
else:
public_key = bytes.fromhex("03" + x_cor.hex())
return public_key
################################
parser = argparse.ArgumentParser()
parser.add_argument("--cust_close_privkey", "-ccpk", help="public key of cust close to-self output")
parser.add_argument("--output_pubkey", "-cpk", help="pubkey of output for the cust")
parser.add_argument("--merch_disp_pubkey", "-mdpk", help="public key of merchant dispute")
parser.add_argument("--revocation_lock", "-rl", help="revocation lock (hash160{revocation_secret})")
parser.add_argument("--to_self_delay", "-tsd", help="to_self_delay (in unit of blocks) for the merchant's to-self output")
parser.add_argument("--txid", "-tx", help="txid of outpoint as hex string")
parser.add_argument("--index", "-ind", help="index of outpoint")
parser.add_argument("--amount_btc", "-a", help="amount of btc in")
parser.add_argument("--output_btc", "-mo", help="btc to merchant close output")
args = parser.parse_args()
################################
# version is 4-bytes little endian. Version 2 should be default
version = bytes.fromhex("0200 0000")
marker = bytes.fromhex("00") # this must be 00
flag = bytes.fromhex("01") # this must be 01
# txID_str = "f4df16149735c2963832ccaa9627f4008a06291e8b932c2fc76b3a5d62d462e1"
# tx_index = 0 # index starts at 0
txID_str = args.txid
txid = (bytes.fromhex(txID_str))[::-1]
tx_index = int(args.index)
index = tx_index.to_bytes(4, byteorder="little", signed=False)
txid = (bytes.fromhex(txID_str))[::-1]
index = tx_index.to_bytes(4, byteorder="little", signed=False)
nSequence_as_blocks = int(args.to_self_delay, 16)
sequence = nSequence_as_blocks.to_bytes(4, byteorder="little", signed=False)
# todo: find a nicer way to do this
l = int(len(args.to_self_delay)/2)
short_sequence = nSequence_as_blocks.to_bytes(l, byteorder="little", signed=False)
input_amount_sat = int(float(args.amount_btc) * 100000000)
output_value_sat = int(float(args.output_btc) * 100000000)
input_amount = input_amount_sat.to_bytes(8, byteorder="little", signed=True)
output_value = output_value_sat.to_bytes(8, byteorder="little", signed=True)
cust_close_privkey_hex = args.cust_close_privkey
cust_close_privkey = bytes.fromhex(cust_close_privkey_hex)
cust_close_pubkey = privkey_to_pubkey(cust_close_privkey)
merch_disp_pubkey = bytes.fromhex(args.merch_disp_pubkey)
revocation_lock = bytes.fromhex(args.revocation_lock)
# P2WSH cust-close scriptPubKey
# 0x63 OP_IF
# 0xa9 OP_HASH160
# 0x14 OP_DATA - len(revocation_lock {hash160[revocation-secret]})
# revocation_lock
# 0x88 OP_EQUALVERIFY
# 0x21 OP_DATA - len(merch_disp_pubkey)
# merch_disp_pubkey
# 0x67 OP_ELSE
# 0x__ OP_DATA - len(to_self_delay) (probably ~0x02)
# to_self_delay
# 0xb2 OP_CHECKSEQUENCEVERIFY
# 0x75 OP_DROP
# 0x21 OP_DATA - len(cust_close_pubkey)
# cust_close_pk
# 0x68 OP_ENDIF
# 0xac OP_CHECKSIG
nSequence_as_blocks = int(args.to_self_delay, 16)
# todo: find a nicer way to do this
l = int(len(args.to_self_delay)/2)
short_sequence = nSequence_as_blocks.to_bytes(l, byteorder="little", signed=False)
cust_close_script = (
bytes.fromhex("63 a8 20")
+ revocation_lock
+ bytes.fromhex("88 21")
+ merch_disp_pubkey
+ bytes.fromhex("67")
+ (len(short_sequence)).to_bytes(1, byteorder="little", signed=False)
+ short_sequence
+ bytes.fromhex("b2 75 21")
+ cust_close_pubkey
+ bytes.fromhex("68 ac")
)
# send output to another P2WPKH address
output_pubkey = bytes.fromhex(args.output_pubkey)
output_scriptPK = bytes.fromhex("0014") + hash160(output_pubkey)
locktime = bytes.fromhex("00000000")
sighash = bytes.fromhex("01000000")
sighash_type_flag = bytes.fromhex("01")
tx_in_count = bytes.fromhex("01")
tx_out_count = bytes.fromhex("01")
##########################################
# hashPrevOuts and outpoint
outpoint = (
txid
+ index
)
hashPrevOuts = dSHA256(outpoint)
# hashSequence
hashSequence = dSHA256(sequence)
# hashOutputs and output
output = (
output_value
+ (len(output_scriptPK)).to_bytes(1, byteorder="little", signed=False)
+ output_scriptPK
)
hashOutputs = dSHA256(output)
scriptcode = (
(len(cust_close_script)).to_bytes(1, byteorder="little", signed=False)
+ cust_close_script
)
# serialized bip_143 object
bip_143 = (
version
+ hashPrevOuts
+ hashSequence
+ outpoint
+ scriptcode
+ input_amount
+ sequence
+ hashOutputs
+ locktime
+ sighash
)
hashed_bip_143 = dSHA256(bip_143)
signing_key_cust_close = ecdsa.SigningKey.from_string(cust_close_privkey, curve=ecdsa.SECP256k1) # Don't forget to specify the curve
signature_cust_close = signing_key_cust_close.sign_digest(hashed_bip_143, sigencode=ecdsa.util.sigencode_der_canonize)
witness = (
# indicate the number of stack items for the txin
bytes.fromhex("03")
# signature
+ (len(signature_cust_close)+1).to_bytes(1, byteorder="little", signed=False)
+ signature_cust_close
+ sighash_type_flag
# So that we enter OP_ELSE in the script
+ bytes.fromhex("00")
# witnessScript
# This is the script that the creator of this transaction needs to provide, and
# solve, in order to redeem the UTXO listed in the input
+ (len(cust_close_script)).to_bytes(1, byteorder="little", signed=False)
+ cust_close_script
)
scriptSig = (
bytes.fromhex("00") # length of empty scriptSig
)
final_tx = (
version
+ marker
+ flag
+ tx_in_count
+ outpoint
+ scriptSig
+ sequence
+ tx_out_count
+ output
+ witness
+ locktime
)
print(final_tx.hex())
# print(merch_close_script.hex())
| [
"hashlib.sha256",
"argparse.ArgumentParser",
"ecdsa.SigningKey.from_string"
] | [((1149, 1174), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1172, 1174), False, 'import argparse\n'), ((5586, 5657), 'ecdsa.SigningKey.from_string', 'ecdsa.SigningKey.from_string', (['cust_close_privkey'], {'curve': 'ecdsa.SECP256k1'}), '(cust_close_privkey, curve=ecdsa.SECP256k1)\n', (5614, 5657), False, 'import ecdsa\n'), ((399, 459), 'ecdsa.SigningKey.from_string', 'ecdsa.SigningKey.from_string', (['privkey'], {'curve': 'ecdsa.SECP256k1'}), '(privkey, curve=ecdsa.SECP256k1)\n', (427, 459), False, 'import ecdsa\n'), ((126, 146), 'hashlib.sha256', 'hashlib.sha256', (['data'], {}), '(data)\n', (140, 146), False, 'import hashlib\n'), ((169, 191), 'hashlib.sha256', 'hashlib.sha256', (['hash_1'], {}), '(hash_1)\n', (183, 191), False, 'import hashlib\n'), ((311, 328), 'hashlib.sha256', 'hashlib.sha256', (['s'], {}), '(s)\n', (325, 328), False, 'import hashlib\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This file imports your code from localization_logic.py and uses class Visualisation from visualisation.py.
Additionally, it imports code from sensor_fusion.py file.
You can change the code at your own risk!
"""
import easygopigo3 as go
import signal
import pyqtgraph as pg
import cv2
import threading
import visualisation
import read_sensors as sensors
import localization_logic as loc
import sys
import sensor_fusion as fusion
# Dictionary for holding positions
positions = fusion.positions # See sensor_fusion.py file for the positions dictionary
def slow_worker():
"""
Slower code
Low update rate is suitable for slow processes, such as image processing, displaying data to graph, etc;
"""
global positions
ret, frame = cap.read()
# Get the blob size and convert it to distance from the wall
keypoints = loc.detect_blobs(frame)
blob_size = loc.get_blob_size(keypoints)
# Save this distance to the positions dictionary
positions['current_cam'] = loc.get_distance_with_cam(blob_size)
# Call the callback function on new camera measurement from sensor_fusion module
fusion.on_camera_measurement(positions['current_cam'])
# Plot the positions and velocities
visual.draw(positions['current_us'], positions['current_enc'], positions['current_cam'],
positions['current_moving_avg_us'], positions['current_complementary'], positions['current_kalman'],
fusion.velocities.velocities['us'], fusion.velocities.velocities['enc'], fusion.velocities.velocities['cam'],
fusion.velocities.velocities['moving_avg_us'], fusion.velocities.velocities['complementary'], fusion.velocities.velocities['kalman'],
fusion.camera_gaussian, fusion.encoder_diff_gaussian, fusion.kalman_filter.filtered_result)
def signal_handler(sig, frame):
"""
This function will be called when CTRL+C is pressed
"""
close('\nYou pressed Ctrl+C! Closing the program nicely :)')
def close(message=""):
"""
Fusion visualisation specific cleanup function
"""
global running, ser, robot, timer
print(message)
running = False
robot.stop()
if ser.is_open:
ser.close()
timer.stop()
if fast_thread.is_alive:
try:
fast_thread.join()
except:
pass
sys.exit(0)
if __name__ == "__main__":
# Register a callback for CTRL+C
signal.signal(signal.SIGINT, signal_handler)
running, ser = sensors.initialize_serial('/dev/ttyUSB0')
robot = go.EasyGoPiGo3()
robot.set_speed(60)
# Open the camera
cap = cv2.VideoCapture(0)
# Create timer
timer = pg.QtCore.QTimer()
# Initialize visualization logic
visual = visualisation.initialize_visualisation(fusion.TASK, close)
# Create fast_worker in a separate thread
fast_thread = threading.Thread(
target=loc.fast_worker,
args=(running,
robot,
positions,
ser,
close)
)
fast_thread.daemon = True
fast_thread.start()
# Connecting slow_worker to timer, it will be executed with certain interval
timer.timeout.connect(slow_worker)
# Start timer with interval 100 msec
timer.start(100)
# Start the visualisation app
visual.run()
close()
| [
"localization_logic.get_distance_with_cam",
"sensor_fusion.on_camera_measurement",
"signal.signal",
"easygopigo3.EasyGoPiGo3",
"visualisation.initialize_visualisation",
"localization_logic.get_blob_size",
"localization_logic.detect_blobs",
"read_sensors.initialize_serial",
"cv2.VideoCapture",
"sys... | [((895, 918), 'localization_logic.detect_blobs', 'loc.detect_blobs', (['frame'], {}), '(frame)\n', (911, 918), True, 'import localization_logic as loc\n'), ((935, 963), 'localization_logic.get_blob_size', 'loc.get_blob_size', (['keypoints'], {}), '(keypoints)\n', (952, 963), True, 'import localization_logic as loc\n'), ((1048, 1084), 'localization_logic.get_distance_with_cam', 'loc.get_distance_with_cam', (['blob_size'], {}), '(blob_size)\n', (1073, 1084), True, 'import localization_logic as loc\n'), ((1174, 1228), 'sensor_fusion.on_camera_measurement', 'fusion.on_camera_measurement', (["positions['current_cam']"], {}), "(positions['current_cam'])\n", (1202, 1228), True, 'import sensor_fusion as fusion\n'), ((2388, 2399), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2396, 2399), False, 'import sys\n'), ((2470, 2514), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal_handler'], {}), '(signal.SIGINT, signal_handler)\n', (2483, 2514), False, 'import signal\n'), ((2535, 2576), 'read_sensors.initialize_serial', 'sensors.initialize_serial', (['"""/dev/ttyUSB0"""'], {}), "('/dev/ttyUSB0')\n", (2560, 2576), True, 'import read_sensors as sensors\n'), ((2590, 2606), 'easygopigo3.EasyGoPiGo3', 'go.EasyGoPiGo3', ([], {}), '()\n', (2604, 2606), True, 'import easygopigo3 as go\n'), ((2664, 2683), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (2680, 2683), False, 'import cv2\n'), ((2716, 2734), 'pyqtgraph.QtCore.QTimer', 'pg.QtCore.QTimer', ([], {}), '()\n', (2732, 2734), True, 'import pyqtgraph as pg\n'), ((2786, 2844), 'visualisation.initialize_visualisation', 'visualisation.initialize_visualisation', (['fusion.TASK', 'close'], {}), '(fusion.TASK, close)\n', (2824, 2844), False, 'import visualisation\n'), ((2910, 3000), 'threading.Thread', 'threading.Thread', ([], {'target': 'loc.fast_worker', 'args': '(running, robot, positions, ser, close)'}), '(target=loc.fast_worker, args=(running, robot, positions,\n ser, close))\n', (2926, 3000), False, 'import threading\n')] |
import os, sys, inspect, logging, time
lib_folder = os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0], '..')
lib_load = os.path.realpath(os.path.abspath(lib_folder))
if lib_load not in sys.path:
sys.path.insert(0, lib_load)
import capablerobot_usbhub
hub = capablerobot_usbhub.USBHub()
## Input enabled here on the output so that reading the output's current state works
hub.gpio.configure(ios=[0], output=True, input=True)
hub.gpio.configure(ios=[1], input=True, pull_down=True)
while True:
hub.gpio.io0 = True
print("IO {} {}".format(*hub.gpio.io))
time.sleep(1)
hub.gpio.io0 = False
print("IO {} {}".format(*hub.gpio.io))
time.sleep(1) | [
"capablerobot_usbhub.USBHub",
"sys.path.insert",
"inspect.currentframe",
"time.sleep",
"os.path.abspath"
] | [((288, 316), 'capablerobot_usbhub.USBHub', 'capablerobot_usbhub.USBHub', ([], {}), '()\n', (314, 316), False, 'import capablerobot_usbhub\n'), ((161, 188), 'os.path.abspath', 'os.path.abspath', (['lib_folder'], {}), '(lib_folder)\n', (176, 188), False, 'import os, sys, inspect, logging, time\n'), ((224, 252), 'sys.path.insert', 'sys.path.insert', (['(0)', 'lib_load'], {}), '(0, lib_load)\n', (239, 252), False, 'import os, sys, inspect, logging, time\n'), ((601, 614), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (611, 614), False, 'import os, sys, inspect, logging, time\n'), ((689, 702), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (699, 702), False, 'import os, sys, inspect, logging, time\n'), ((97, 119), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (117, 119), False, 'import os, sys, inspect, logging, time\n')] |
from setuptools import setup
# Get the long description by reading the README
try:
readme_content = open("README.rst").read()
except Exception as e:
readme_content = ""
# Create the actual setup method
setup(
name="pycinga",
version="1.0.0",
description="Python library to write Icinga plugins.",
long_description=readme_content,
author="<NAME>",
author_email="<EMAIL>",
maintainer="<NAME>",
maintainer_email="<EMAIL>",
url="https://github.com/hurricanelabs/python-pycinga",
license="MIT License",
keywords=["nagios", "pynagios", "icinga", "pycinga", "monitoring"],
packages=["pycinga"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: System :: Systems Administration"
]
)
| [
"setuptools.setup"
] | [((212, 899), 'setuptools.setup', 'setup', ([], {'name': '"""pycinga"""', 'version': '"""1.0.0"""', 'description': '"""Python library to write Icinga plugins."""', 'long_description': 'readme_content', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'maintainer': '"""<NAME>"""', 'maintainer_email': '"""<EMAIL>"""', 'url': '"""https://github.com/hurricanelabs/python-pycinga"""', 'license': '"""MIT License"""', 'keywords': "['nagios', 'pynagios', 'icinga', 'pycinga', 'monitoring']", 'packages': "['pycinga']", 'classifiers': "['Development Status :: 4 - Beta',\n 'Intended Audience :: System Administrators',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent', 'Programming Language :: Python',\n 'Topic :: System :: Systems Administration']"}), "(name='pycinga', version='1.0.0', description=\n 'Python library to write Icinga plugins.', long_description=\n readme_content, author='<NAME>', author_email='<EMAIL>', maintainer=\n '<NAME>', maintainer_email='<EMAIL>', url=\n 'https://github.com/hurricanelabs/python-pycinga', license=\n 'MIT License', keywords=['nagios', 'pynagios', 'icinga', 'pycinga',\n 'monitoring'], packages=['pycinga'], classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: System Administrators',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent', 'Programming Language :: Python',\n 'Topic :: System :: Systems Administration'])\n", (217, 899), False, 'from setuptools import setup\n')] |
"""
Main call.
TODO:
- parallize the mda processing portion? (dask)
"""
import numpy as np
import matplotlib.pyplot as plt
import MDAnalysis as mda
from command_line import create_cmd_arguments, handle_command_line
from calc_relax import Calc_19F_Relaxation
from calc_fh_dists import Calc_FH_Dists
from plot_relax import Plot_Relaxation
# if python file is being used
if __name__ == '__main__':
# args_list to save time for now (TODO)
magnet = 14.1 # Tesla (600 MHz of 1H+)
tc = 8.2e-9 # 8.2ns for CypA, tc in sec
"""
Command line
"""
# Create command line arguments with argparse
argument_parser = create_cmd_arguments()
# Retrieve list of args
args = handle_command_line(argument_parser)
# TODO: hack for now, later put as seperate args?
# CSA tensors for 4F-Trp
if args.system == "w4f":
sgm11 = 11.2
sgm22 = -48.3
sgm33 = -112.8
elif args.system == "w5f":
sgm11 = 4.8
sgm22 = -60.5
sgm33 = -86.1
elif args.system == "w6f":
sgm11 = 12.9
sgm22 = -51.2
sgm33 = -91.6
elif args.system == "w7f":
sgm11 = 4.6
sgm22 = -48.3
sgm33 = -123.3
"""
Load trajectory or pdb data and calc all F-H distances.
# TODO: do for each frame, also test with water
"""
# TODO: for big trajectories, can't load in_memory, must stream it but this can be slow
traj = mda.Universe(args.parm, args.crd, in_memory=True, in_memory_step=args.step_size)
fh_dist_base = Calc_FH_Dists(traj, dist=3).run()
"""
For each distance value, calculate the R1 and R2 value.
"""
# TODO: update to ndarrays, maybe make into function, seperate script?
# test speed and optimize
# TODO: make this able to take multiple files and find stdev, maybe a seperate proc function
# array of size frames x 3 columns (frame, avg R1, avg R2) # TODO: add stdev?
r1_r2 = np.zeros(shape=(len(fh_dist_base.results[:,1:]), 3))
r1_r2[:, 0] = fh_dist_base.results[:,0]
# Here: calling each calc class seperately and only sum the dd contributions, csa is not dependent
# note this new implementation is alot slower... (compared to having just one calc_relax and averaging later)
# but not sure, didn't test the difference
for num, dists in enumerate(fh_dist_base.results[:,1:]):
calc_relax = Calc_19F_Relaxation(tc, magnet, sgm11, sgm22, sgm33)
r1_csa = calc_relax.calc_csa_r1()
r2_csa = calc_relax.calc_csa_r2()
# TODO: these are relatively small lists, may not need to change to ndarray
# but if I do, then I need to cut out the NaN or zero values before the np.mean step
r1_dd = 0
r2_dd = 0
for fh_dist in dists:
if fh_dist == 0:
continue # TODO: is there a better way to do this?
# instantiate the calc_relax class and then call individual class methods
calc_relax = Calc_19F_Relaxation(tc, magnet, sgm11, sgm22, sgm33, fh_dist)
# sum each dd contribution
r1_dd += calc_relax.calc_dd_r1()
r2_dd += calc_relax.calc_dd_r2()
# fill in col 1 (R1), col 2 (R2)
r1_r2[num, 1] = r1_dd + r1_csa
r1_r2[num, 2] = r2_dd + r2_csa
# test seperate values
print(r1_dd, r1_csa)
print(r2_dd, r2_csa)
"""
Save the frame, avg and stdev R1 and R2 data as a tsv?
"""
if args.output_file is not None:
np.savetxt(args.output_file, r1_r2, delimiter="\t")
"""
Plot the R1 and R2 data.
"""
# plt.plot(fh_dist_base.results[:,0], r1_r2[:,0])
# plt.plot(fh_dist_base.results[:,0], r1_r2[:,1])
plt.plot(r1_r2[:, 0], r1_r2[:, 1])
plt.plot(r1_r2[:, 0], r1_r2[:, 2])
print(f"R1-AVG={np.mean(r1_r2[:,1])}\nR2-AVG={np.mean(r1_r2[:,2])}")
#plt.hlines(1.99, xmin=0, xmax=fh_dist_base.results[-1,0]) # R1
#plt.hlines(109.1, xmin=0, xmax=fh_dist_base.results[-1,0]) # R2
plt.show()
# plotter class
# plotter = Plot_Relaxation(r1_r2, "dist")
# plotter.plot_r2()
# plt.show()
| [
"numpy.mean",
"calc_fh_dists.Calc_FH_Dists",
"calc_relax.Calc_19F_Relaxation",
"command_line.handle_command_line",
"matplotlib.pyplot.plot",
"command_line.create_cmd_arguments",
"numpy.savetxt",
"MDAnalysis.Universe",
"matplotlib.pyplot.show"
] | [((681, 703), 'command_line.create_cmd_arguments', 'create_cmd_arguments', ([], {}), '()\n', (701, 703), False, 'from command_line import create_cmd_arguments, handle_command_line\n'), ((743, 779), 'command_line.handle_command_line', 'handle_command_line', (['argument_parser'], {}), '(argument_parser)\n', (762, 779), False, 'from command_line import create_cmd_arguments, handle_command_line\n'), ((1482, 1567), 'MDAnalysis.Universe', 'mda.Universe', (['args.parm', 'args.crd'], {'in_memory': '(True)', 'in_memory_step': 'args.step_size'}), '(args.parm, args.crd, in_memory=True, in_memory_step=args.step_size\n )\n', (1494, 1567), True, 'import MDAnalysis as mda\n'), ((3759, 3793), 'matplotlib.pyplot.plot', 'plt.plot', (['r1_r2[:, 0]', 'r1_r2[:, 1]'], {}), '(r1_r2[:, 0], r1_r2[:, 1])\n', (3767, 3793), True, 'import matplotlib.pyplot as plt\n'), ((3798, 3832), 'matplotlib.pyplot.plot', 'plt.plot', (['r1_r2[:, 0]', 'r1_r2[:, 2]'], {}), '(r1_r2[:, 0], r1_r2[:, 2])\n', (3806, 3832), True, 'import matplotlib.pyplot as plt\n'), ((4052, 4062), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4060, 4062), True, 'import matplotlib.pyplot as plt\n'), ((2444, 2496), 'calc_relax.Calc_19F_Relaxation', 'Calc_19F_Relaxation', (['tc', 'magnet', 'sgm11', 'sgm22', 'sgm33'], {}), '(tc, magnet, sgm11, sgm22, sgm33)\n', (2463, 2496), False, 'from calc_relax import Calc_19F_Relaxation\n'), ((3549, 3600), 'numpy.savetxt', 'np.savetxt', (['args.output_file', 'r1_r2'], {'delimiter': '"""\t"""'}), "(args.output_file, r1_r2, delimiter='\\t')\n", (3559, 3600), True, 'import numpy as np\n'), ((1582, 1609), 'calc_fh_dists.Calc_FH_Dists', 'Calc_FH_Dists', (['traj'], {'dist': '(3)'}), '(traj, dist=3)\n', (1595, 1609), False, 'from calc_fh_dists import Calc_FH_Dists\n'), ((3035, 3096), 'calc_relax.Calc_19F_Relaxation', 'Calc_19F_Relaxation', (['tc', 'magnet', 'sgm11', 'sgm22', 'sgm33', 'fh_dist'], {}), '(tc, magnet, sgm11, sgm22, sgm33, fh_dist)\n', (3054, 3096), False, 'from calc_relax import Calc_19F_Relaxation\n'), ((3853, 3873), 'numpy.mean', 'np.mean', (['r1_r2[:, 1]'], {}), '(r1_r2[:, 1])\n', (3860, 3873), True, 'import numpy as np\n'), ((3883, 3903), 'numpy.mean', 'np.mean', (['r1_r2[:, 2]'], {}), '(r1_r2[:, 2])\n', (3890, 3903), True, 'import numpy as np\n')] |
import numpy as np
class BoundBox:
"""
Adopted from https://github.com/thtrieu/darkflow/blob/master/darkflow/utils/box.py
"""
def __init__(self, obj_prob, probs=None, box_coord=[float() for i in range(4)]):
self.x, self.y = float(box_coord[0]), float(box_coord[1])
self.w, self.h = float(box_coord[2]), float(box_coord[3])
self.c = 0.
self.obj_prob = obj_prob
self.class_probs = None if probs is None else np.array(probs)
def get_score(self):
return max(self.class_probs)
def get_classindex(self):
return np.argmax(self.class_probs) # class_index = np.argmax(box.classes)
def get_coordinates(self):
return self.x, self.y, self.w, self.h
def overlap(x1, w1, x2, w2):
l1 = x1 - w1 / 2.
l2 = x2 - w2 / 2.
left = max(l1, l2)
r1 = x1 + w1 / 2.
r2 = x2 + w2 / 2.
right = min(r1, r2)
return right - left
def box_intersection(a, b):
w = overlap(a.x, a.w, b.x, b.w)
h = overlap(a.y, a.h, b.y, b.h)
if w < 0 or h < 0: return 0;
area = w * h
return area
def box_union(a, b):
i = box_intersection(a, b)
u = a.w * a.h + b.w * b.h - i
return u
def box_iou(a, b):
# Box intersect over union.
return box_intersection(a, b) / box_union(a, b)
def prob_compare(box):
return box.probs[box.class_num]
def prob_compare2(boxa, boxb):
if (boxa.pi < boxb.pi):
return 1
elif (boxa.pi == boxb.pi):
return 0
else:
return -1
| [
"numpy.array",
"numpy.argmax"
] | [((594, 621), 'numpy.argmax', 'np.argmax', (['self.class_probs'], {}), '(self.class_probs)\n', (603, 621), True, 'import numpy as np\n'), ((469, 484), 'numpy.array', 'np.array', (['probs'], {}), '(probs)\n', (477, 484), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
"""
Created on Feb 28 20:56:45 2020
Copyright (c) Huawei Technologies Co., Ltd. 2019. All rights reserved.
"""
import json
import os
import sys
if __name__ == '__main__':
if len(sys.argv) != 3:
print(sys.argv)
print('argv error, inert_op_info.py your_op_file lib_op_file')
exit(2)
with open(sys.argv[1], 'r') as load_f:
insert_op = json.load(load_f)
all_op = {}
if os.path.exists(sys.argv[2]):
if os.path.getsize(sys.argv[2]) != 0:
with open(sys.argv[2], 'r') as load_f:
all_op = json.load(load_f)
for k in insert_op.keys():
if k in all_op.keys():
print('replace op:[', k, '] success')
else:
print('insert op:[', k, '] success')
all_op[k] = insert_op[k]
with open(sys.argv[2], 'w') as f:
f.write(json.dumps(all_op, indent=4))
| [
"json.load",
"os.path.exists",
"os.path.getsize",
"json.dumps"
] | [((441, 468), 'os.path.exists', 'os.path.exists', (['sys.argv[2]'], {}), '(sys.argv[2])\n', (455, 468), False, 'import os\n'), ((399, 416), 'json.load', 'json.load', (['load_f'], {}), '(load_f)\n', (408, 416), False, 'import json\n'), ((481, 509), 'os.path.getsize', 'os.path.getsize', (['sys.argv[2]'], {}), '(sys.argv[2])\n', (496, 509), False, 'import os\n'), ((874, 902), 'json.dumps', 'json.dumps', (['all_op'], {'indent': '(4)'}), '(all_op, indent=4)\n', (884, 902), False, 'import json\n'), ((592, 609), 'json.load', 'json.load', (['load_f'], {}), '(load_f)\n', (601, 609), False, 'import json\n')] |
import unittest
import docs as bp
class ApplicationClassTest(unittest.TestCase):
def setUp(self):
self.app = bp.Application()
def test_set_fps(self):
self.app.set_fps(30)
self.assertEqual(self.app.fps, 30)
self.app.set_fps(60)
self.assertEqual(self.app.fps, 60)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"docs.Application"
] | [((347, 362), 'unittest.main', 'unittest.main', ([], {}), '()\n', (360, 362), False, 'import unittest\n'), ((123, 139), 'docs.Application', 'bp.Application', ([], {}), '()\n', (137, 139), True, 'import docs as bp\n')] |
# -*- coding: utf-8 -*-
from django.conf.urls import include, url
from .user_admin_views import UserAdminListViewSet
user_admin_list=UserAdminListViewSet.as_view({
"get":"get"
})
urlpatterns = (
url(r'^user$', user_admin_list, name='user-admin-list'),
)
| [
"django.conf.urls.url"
] | [((209, 263), 'django.conf.urls.url', 'url', (['"""^user$"""', 'user_admin_list'], {'name': '"""user-admin-list"""'}), "('^user$', user_admin_list, name='user-admin-list')\n", (212, 263), False, 'from django.conf.urls import include, url\n')] |
import pandas as pd
import numpy as np
import click
import os
PRIORITY = ('Read-through', 'Protein coding',
'Pseudogene', 'TUCP', 'lncrna', 'lncRNA', 'other', 'ncRNA,other')
type_map = {
'other': 'lncRNA',
'ncRNA,other': 'lncRNA',
'lncrna': 'lncRNA',
'protein_coding': 'Protein coding',
'pseudogene': 'Pseudogene',
'read_through': 'Read-through'
}
@click.command()
@click.option(
'-m',
'--meta_table',
type=click.Path(exists=True, dir_okay=False),
help='taco compare metadata',
required=True,
)
@click.option(
'-t',
'--tucp',
type=click.Path(exists=True, dir_okay=False),
help='tucp transcripts.',
required=True,
)
@click.option(
'-o',
'--out_dir',
type=click.Path(file_okay=False),
help='gene classify/summary directory based on \
taco compare result and feelnc classify.',
required=True
)
@click.option(
'-n',
'--name',
type=click.STRING,
help='Summary table name',
default=None
)
def main(meta_table, tucp, out_dir, name):
meta_table_df = pd.read_table(meta_table, index_col=0)
tucp_df = pd.read_table(tucp, header=None, index_col=0)
tucp_series = tucp_df.index.intersection(meta_table_df.index)
# label TUCP
meta_table_df.loc[tucp_series, 'category'] = 'TUCP'
# label read_through
mask = meta_table_df.category_relative_detail == 'read_through'
meta_table_df.loc[mask, 'category'] = 'read_through'
# filter out intronic transcripts
meta_table_df = meta_table_df[meta_table_df.category_relative_detail !=
'intronic_same_strand']
# rename gene type to analysis name
meta_table_df.loc[:, 'category'].replace(type_map, inplace=True)
# function to summarize transcript/gene type
def type_summary(type_df, col_name):
type_df.columns = ['category', 'novel_status']
type_summary = type_df.groupby(
['category', 'novel_status']).size()
type_summary.name = col_name
type_summary = pd.DataFrame(type_summary)
f_sum = type_summary.unstack('novel_status', fill_value=0)
f_sum.loc[:, (col_name, 'Total')] = (
f_sum.loc[:, (col_name, 'Annotated')] +
f_sum.loc[:, (col_name, 'Unannotated')])
return f_sum
# annotation status according to exonic_overlap
meta_table_df.loc[:, 'novel_status'] = np.where(
meta_table_df.category_relative == 'exonic_overlap',
'Annotated', 'Unannotated')
meta_table_df = meta_table_df.reset_index()
tr_sum = type_summary(meta_table_df.loc[:, ['category', 'novel_status']],
'Transcript')
meta_table_df.loc[:, 'new_gene_id'] = meta_table_df.novel_status + \
'.' + meta_table_df.gene_id
tr_type_df = meta_table_df.loc[:, ['transcript_id', 'new_gene_id', 'category']]
meta_table_type_df = meta_table_df.loc[:, [
'new_gene_id', 'category', 'novel_status']]
meta_table_type_df.columns = ['gene_id', 'category', 'novel_status']
gene_type_map = meta_table_type_df.groupby(
['gene_id', 'novel_status'])['category'].unique()
meta_table_df = meta_table_df.reset_index()
gene_name_df = meta_table_df.loc[:, ['new_gene_id',
'category_relative',
'ref_gene_id',
'ref_gene_name']]
gene_name_df.columns = [
'gene_id', 'category_relative', 'ref_gene_id', 'ref_gene_name']
gene_name_df = gene_name_df[gene_name_df.category_relative ==
'exonic_overlap']
gene_name_df = gene_name_df.loc[:, [
'gene_id', 'ref_gene_id', 'ref_gene_name']].drop_duplicates()
def get_type(type_list):
for each_type in PRIORITY:
if each_type in type_list:
return type_map.get(each_type, each_type)
gene_type_list = map(get_type, gene_type_map)
gene_type_df = pd.DataFrame(
gene_type_list, index=gene_type_map.index, columns=['type'])
gene_type_df = gene_type_df.reset_index().set_index('gene_id')
read_through_genes = gene_type_df[gene_type_df.type ==
"Read-through"].index
gene_name_df = gene_name_df[~gene_name_df.gene_id.isin(read_through_genes)]
gene_name_df = gene_name_df.set_index('gene_id')
read_through_sup = gene_name_df[
gene_name_df.index.value_counts() > 1].index.unique()
gene_type_df.loc[read_through_sup, 'type'] = 'Read-through'
g_sum = type_summary(gene_type_df.loc[:, ['type', 'novel_status']],
'Gene')
type_stats = pd.concat([tr_sum, g_sum], axis=1)
type_stats.index.name = 'Category'
summary_file = os.path.join(out_dir, 'assembly.number.summary.txt')
classify_file = os.path.join(out_dir, 'gene.classify.txt')
tr_classify_file = os.path.join(out_dir, 'tr.classify.txt')
name_file = os.path.join(out_dir, 'gene.name.txt')
if name is not None:
type_stats.loc[:, ('', 'Name')] = name
output_header = False
else:
output_header = True
gene_type_df = gene_type_df.drop('novel_status', axis=1)
type_stats.to_csv(summary_file, sep='\t', header=output_header)
gene_type_df.to_csv(classify_file, sep='\t')
tr_type_df.to_csv(tr_classify_file, sep='\t', index=False)
gene_name_df = gene_name_df[gene_name_df.index.value_counts() == 1]
gene_name_df.to_csv(name_file, sep='\t')
if __name__ == '__main__':
main()
| [
"click.option",
"numpy.where",
"os.path.join",
"click.Path",
"pandas.read_table",
"pandas.DataFrame",
"click.command",
"pandas.concat"
] | [((390, 405), 'click.command', 'click.command', ([], {}), '()\n', (403, 405), False, 'import click\n'), ((897, 989), 'click.option', 'click.option', (['"""-n"""', '"""--name"""'], {'type': 'click.STRING', 'help': '"""Summary table name"""', 'default': 'None'}), "('-n', '--name', type=click.STRING, help='Summary table name',\n default=None)\n", (909, 989), False, 'import click\n'), ((1071, 1109), 'pandas.read_table', 'pd.read_table', (['meta_table'], {'index_col': '(0)'}), '(meta_table, index_col=0)\n', (1084, 1109), True, 'import pandas as pd\n'), ((1124, 1169), 'pandas.read_table', 'pd.read_table', (['tucp'], {'header': 'None', 'index_col': '(0)'}), '(tucp, header=None, index_col=0)\n', (1137, 1169), True, 'import pandas as pd\n'), ((2397, 2490), 'numpy.where', 'np.where', (["(meta_table_df.category_relative == 'exonic_overlap')", '"""Annotated"""', '"""Unannotated"""'], {}), "(meta_table_df.category_relative == 'exonic_overlap', 'Annotated',\n 'Unannotated')\n", (2405, 2490), True, 'import numpy as np\n'), ((3983, 4056), 'pandas.DataFrame', 'pd.DataFrame', (['gene_type_list'], {'index': 'gene_type_map.index', 'columns': "['type']"}), "(gene_type_list, index=gene_type_map.index, columns=['type'])\n", (3995, 4056), True, 'import pandas as pd\n'), ((4670, 4704), 'pandas.concat', 'pd.concat', (['[tr_sum, g_sum]'], {'axis': '(1)'}), '([tr_sum, g_sum], axis=1)\n', (4679, 4704), True, 'import pandas as pd\n'), ((4763, 4815), 'os.path.join', 'os.path.join', (['out_dir', '"""assembly.number.summary.txt"""'], {}), "(out_dir, 'assembly.number.summary.txt')\n", (4775, 4815), False, 'import os\n'), ((4836, 4878), 'os.path.join', 'os.path.join', (['out_dir', '"""gene.classify.txt"""'], {}), "(out_dir, 'gene.classify.txt')\n", (4848, 4878), False, 'import os\n'), ((4902, 4942), 'os.path.join', 'os.path.join', (['out_dir', '"""tr.classify.txt"""'], {}), "(out_dir, 'tr.classify.txt')\n", (4914, 4942), False, 'import os\n'), ((4959, 4997), 'os.path.join', 'os.path.join', (['out_dir', '"""gene.name.txt"""'], {}), "(out_dir, 'gene.name.txt')\n", (4971, 4997), False, 'import os\n'), ((2035, 2061), 'pandas.DataFrame', 'pd.DataFrame', (['type_summary'], {}), '(type_summary)\n', (2047, 2061), True, 'import pandas as pd\n'), ((460, 499), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'dir_okay': '(False)'}), '(exists=True, dir_okay=False)\n', (470, 499), False, 'import click\n'), ((604, 643), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'dir_okay': '(False)'}), '(exists=True, dir_okay=False)\n', (614, 643), False, 'import click\n'), ((747, 774), 'click.Path', 'click.Path', ([], {'file_okay': '(False)'}), '(file_okay=False)\n', (757, 774), False, 'import click\n')] |
from django import template
register = template.Library()
@register.filter
def is_bbb_mod(room, user):
return room.is_moderator(user)
| [
"django.template.Library"
] | [((40, 58), 'django.template.Library', 'template.Library', ([], {}), '()\n', (56, 58), False, 'from django import template\n')] |
# -*- coding: utf-8 -*-
import scinum as sn
import numpy as np
def create_config(base_cfg):
# setup the config for 2018 data
from analysis.config.campaign_UltraLegacy18 import campaign as campaign_UltraLegacy18
from analysis.config.jet_tagging_sf import ch_ee, ch_emu, ch_mumu, ch_e, ch_mu
cfg = base_cfg.copy(campaign=campaign_UltraLegacy18)
# add datasets
dataset_names = [
"data_A_ee", "data_B_ee", "data_C_ee", "data_D_ee",
"data_A_emu", "data_B_emu", "data_C_emu", "data_D_emu",
"data_A_mumu", "data_B_mumu", "data_C_mumu", "data_D_mumu",
"data_A_e", "data_B_e", "data_C_e", "data_D_e",
"data_A_mu", "data_B_mu", "data_C_mu", "data_D_mu",
"tt_dl", "tt_sl",
"dy_lep_10To50",
#"dy_lep_50ToInf",
"dy_lep_LO_50ToInf",
#"dy_lep_0Jets", "dy_lep_1Jets", "dy_lep_2Jets",
"st_s_lep",
"st_t_t", "st_t_tbar",
"st_tW_t", "st_tW_tbar",
"WW", "WZ", "ZZ",
"W_lep",
#"ttH",
#"ttWJets_lep", "ttWJets_had", "ttZJets_lep", "ttZJets_had",
]
for dataset_name in dataset_names:
dataset = campaign_UltraLegacy18.get_dataset(dataset_name)
cfg.add_dataset(dataset)
# store channels per real dataset
cfg.set_aux("dataset_channels", {
dataset: cfg.get_channel(dataset.name.split("_")[-1])
for dataset in cfg.datasets.values()
if dataset.is_data
})
# store b-tagger working points
cfg.set_aux("working_points", {
"deepcsv": {
"loose": 0.1208,
"medium": 0.4168,
"tight": 0.7665,
},
"deepjet": {
"loose": 0.0490,
"medium": 0.2783,
"tight": 0.7100,
}
})
# luminosities per channel in /pb
cfg.set_aux("lumi", {
ch_ee: 59830.,
ch_emu: 59830.,
ch_mumu: 59830.,
ch_e: 59830.,
ch_mu: 59830.,
})
# run ranges
rr = cfg.set_aux("run_ranges", {
"A": (315252, 316995),
"B": (316998, 319312),
"C": (319313, 320393),
"D": (320394, 325273),
})
# global tags
cfg.set_aux("global_tag", {
"data": "106X_dataRun2_v28",
"mc": "106X_upgrade2018_realistic_v11_L1v1",
})
# lumi, normtag and pileup file
cfg.set_aux("lumi_file", "/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions18/13TeV/"
"Legacy_2018/Cert_314472-325175_13TeV_Legacy2018_Collisions18_JSON.txt")
# https://twiki.cern.ch/twiki/bin/view/CMS/TWikiLUM
cfg.set_aux("normtag_file", "/cvmfs/cms-bril.cern.ch/cms-lumi-pog/Normtags/normtag_PHYSICS.json")
cfg.set_aux("pileup_file", "/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions18/13TeV/"
"PileUp/pileup_latest.txt")
# triggers
# https://twiki.cern.ch/twiki/bin/view/CMS/TopTriggerYear2018
cfg.set_aux("triggers", {
ch_ee: [
"HLT_Ele23_Ele12_CaloIdL_TrackIdL_IsoVL_v*",
"HLT_Ele23_Ele12_CaloIdL_TrackIdL_IsoVL_DZ_v*",
],
ch_emu: [
"HLT_Mu23_TrkIsoVVL_Ele12_CaloIdL_TrackIdL_IsoVL_v*",
"HLT_Mu23_TrkIsoVVL_Ele12_CaloIdL_TrackIdL_IsoVL_DZ_v*",
"HLT_Mu12_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_v*",
"HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_v*",
],
ch_mumu: [
"HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ_Mass3p8_v*",
"HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ_Mass8_v*",
],
ch_e: [
"HLT_Ele35_WPTight_Gsf_v*",
"HLT_Ele28_eta2p1_WPTight_Gsf_HT150_v*",
],
ch_mu: [
"HLT_IsoMu24_v*",
],
})
# special triggers per real dataset
cfg.set_aux("data_triggers", {})
# MET filters
# https://twiki.cern.ch/twiki/bin/view/CMS/MissingETOptionalFiltersRun2
cfg.set_aux("metFilters", {
"data": [
"Flag_goodVertices", "Flag_globalSuperTightHalo2016Filter", "Flag_HBHENoiseFilter",
"Flag_HBHENoiseIsoFilter", "Flag_EcalDeadCellTriggerPrimitiveFilter",
"Flag_BadPFMuonFilter", #"Flag_BadChargedCandidateFilter",
"Flag_eeBadScFilter", #"Flag_ecalBadCalibReducedMINIAODFilter",
],
"mc": [
"Flag_goodVertices", "Flag_globalSuperTightHalo2016Filter", "Flag_HBHENoiseFilter",
"Flag_HBHENoiseIsoFilter", "Flag_EcalDeadCellTriggerPrimitiveFilter",
"Flag_BadPFMuonFilter", #"Flag_BadChargedCandidateFilter",
#"Flag_ecalBadCalibReducedMINIAODFilter",
],
})
# JER
cfg.set_aux("jer_version", "Summer19UL18_JRV2")
# JES
cfg.set_aux("jes_version", {
"data": [
rr["A"] + ("Summer19UL18_RunA_V5_DATA",),
rr["B"] + ("Summer19UL18_RunB_V5_DATA",),
rr["C"] + ("Summer19UL18_RunC_V5_DATA",),
rr["D"] + ("Summer19UL18_RunD_V5_DATA",),
],
"mc": [
(1, int(1e9), "Summer19UL18_V5_MC"),
],
})
# JES veto maps
cfg.set_aux("jes_veto_map", {
"file": "Summer19UL18_V1/hotjets-UL18.root",
"hist_name": "h2hot_ul18_plus_hem1516_plus_hbp2m1",
})
cfg.set_aux("jes_uncertainty_file", {
"factorized": None, # take file from jes github
"reduced": "",
})
# https://github.com/cms-sw/cmssw/blob/master/SimGeneral/MixingModule/python/mix_2018_25ns_UltraLegacy_PoissonOOTPU_cfi.py
cfg.set_aux("pileup_mc", [
8.89374611122e-07, 1.1777062868e-05, 3.99725585118e-05, 0.000129888015252, 0.000265224848687,
0.000313088635109, 0.000353781668514, 0.000508787237162, 0.000873670065767, 0.00147166880932,
0.00228230649018, 0.00330375581273, 0.00466047608406, 0.00624959203029, 0.00810375867901,
0.010306521821, 0.0129512453978, 0.0160303925502, 0.0192913204592, 0.0223108613632,
0.0249798930986, 0.0273973789867, 0.0294402350483, 0.031029854302, 0.0324583524255,
0.0338264469857, 0.0351267479019, 0.0360320204259, 0.0367489568401, 0.0374133183052,
0.0380352633799, 0.0386200967002, 0.039124376968, 0.0394201612616, 0.0394673457109,
0.0391705388069, 0.0384758587461, 0.0372984548399, 0.0356497876549, 0.0334655175178,
0.030823567063, 0.0278340752408, 0.0246009685048, 0.0212676009273, 0.0180250593982,
0.0149129830776, 0.0120582333486, 0.00953400069415, 0.00738546929512, 0.00563442079939,
0.00422052915668, 0.00312446316347, 0.00228717533955, 0.00164064894334, 0.00118425084792,
0.000847785826565, 0.000603466454784, 0.000419347268964, 0.000291768785963, 0.000199761337863,
0.000136624574661, 9.46855200945e-05, 6.80243180179e-05, 4.94806013765e-05, 3.53122628249e-05,
2.556765786e-05, 1.75845711623e-05, 1.23828210848e-05, 9.31669724108e-06, 6.0713272037e-06,
3.95387384933e-06, 2.02760874107e-06, 1.22535149516e-06, 9.79612472109e-07, 7.61730246474e-07,
4.2748847738e-07, 2.41170461205e-07, 1.38701083552e-07, 3.37678010922e-08, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0
])
# https://twiki.cern.ch/twiki/bin/viewauth/CMS/PileupJSONFileforData#Pileup_JSON_Files_For_Run_II
cfg.set_aux("min_bias_xs", sn.Number(69.2, (sn.Number.REL, 0.046))) # mb
# file merging information (stage -> dataset -> files after merging)
cfg.set_aux("file_merging", {
"trees": {
"data_D_e": 2,
"data_A_mu": 2,
"data_D_mu": 3,
"tt_dl": 456,
"tt_sl": 491,
"dy_lep_LO_50ToInf": 30,
"st_s_lep": 14,
"st_t_t": 14,
"st_t_tbar": 7,
"st_tW_t": 34,
"st_tW_tbar": 31,
"WW": 3,
"WZ": 2,
"W_lep": 3
}
})
# versions
cfg.set_aux("versions", {
"WriteTrees": "prod2", # including SL events
"MergeTrees": "prod2",
"MergeMetaData": "prod2",
"WriteHistograms": "prod2",
"MergeHistograms": "prod2",
"MeasureCScaleFactors": "prod1",
"MeasureScaleFactors": "prod1",
"FitScaleFactors": "prod1",
"BundleScaleFactors": "prod1",
"GetScaleFactorWeights": "prod1",
"MergeScaleFactorWeights": "prod1",
"OptimizeBinning": "prod1",
"CreateScaleFactorResults": "prod1",
})
return cfg
| [
"analysis.config.campaign_UltraLegacy18.campaign.get_dataset",
"scinum.Number"
] | [((1151, 1199), 'analysis.config.campaign_UltraLegacy18.campaign.get_dataset', 'campaign_UltraLegacy18.get_dataset', (['dataset_name'], {}), '(dataset_name)\n', (1185, 1199), True, 'from analysis.config.campaign_UltraLegacy18 import campaign as campaign_UltraLegacy18\n'), ((7288, 7327), 'scinum.Number', 'sn.Number', (['(69.2)', '(sn.Number.REL, 0.046)'], {}), '(69.2, (sn.Number.REL, 0.046))\n', (7297, 7327), True, 'import scinum as sn\n')] |
#!/usr/bin/evn python
# jojo_xia
import os
import re
import socket
from urllib import request
from urllib.error import ContentTooShortError
import apk_info
import data_utils
import file_utils
socket.setdefaulttimeout(30)
class qihu:
def __init__(self):
self.url_list = []
self.apk_list = []
self.index = 0
self.download_path = data_utils.parse_cfg('download', 'path', '../apks')
self.baseurl = 'http://zhushou.360.cn/list/index/cid/1?page='
def get_url(self, page):
for i in range(1, page + 1):
self.url_list.append(self.baseurl + str(i))
def get_app(self):
print('download root dir is : %s' % self.download_path)
if not os.path.exists(self.download_path):
os.makedirs(self.download_path)
for index in range(len(self.url_list)):
self.index = 0
self.apk_list = []
response = request.urlopen(self.url_list[index], timeout=15)
html = response.read()
html = html.decode('utf-8')
# print('url list is : ', re.findall(r"(?<=&url=).*?apk", html))
link_list = re.findall(r"(?<=&url=).*?%26v%3D%26f%3Dz.apk", html)
patten = re.compile(r'thirdlink&name=(.*?)&icon=')
app_name_list = patten.findall(html)
print("当前分类: %d, 本页共计%d个app,将依次进行下载,详情如下:" % (index, len(app_name_list)), app_name_list)
for url in link_list:
try:
app_name = '{0}.apk'.format(app_name_list[self.index])
if " " in app_name:
print("app name constrains ")
app_name = app_name.replace(" ", '')
# http://s.shouji.qihucdn.com/210615/88e3d6ad97f17836fc2be9c7f10f8ee8/com.doumi.jianzhi_134.apk
# ?en=curpage%3D%26exp%3D1626254620%26from%3DAppList_json%26m2%3D%26ts%3D1625649820%26tok%3Dbb89589c22a22c76bc917767b8083660%26v%3D%26f%3Dz.apk
file_path = os.path.join(self.download_path, app_name)
if not os.path.isfile(file_path):
count = 1
while count <= 5:
try:
print('\rtry to download %s with %d times' % (file_path, count))
self.real_down(url=url, file_path=file_path)
break
except socket.timeout:
error_info = 'Reloading for %d time' % count if count == 1 else 'Reloading for %d times' % count
print("\rerror info : %s" % error_info)
count += 1
except ContentTooShortError:
print('Network conditions is not good. Reloading...')
self.real_down(url=url, file_path=file_path)
if count > 5:
print('\ndownload failed!')
else:
print('\nfile already exists! file path is : %s' % file_path)
md5_file = '{0}.md5'.format(file_path)
if not os.path.isfile(md5_file):
os.remove(file_path)
else:
file_out = open(md5_file, "r")
if file_out.read() == file_utils.gen_file_md5(file_path):
apk_info.get_apk_info(file_path)
else:
os.remove(file_path)
os.remove(md5_file)
self.index = self.index + 1
except Exception as e:
print('\rexception >> %s --> %s' % (url, str(e)))
def real_down(self, url, file_path):
def reporthook(block_num, block_size, block_total):
print('\rdownload progress: %.2f%%' % (block_num * block_size * 100.0 / block_total), end="")
request.urlretrieve(url, file_path, reporthook=reporthook)
apk_info.get_apk_info(file_path)
request.urlcleanup()
file_size = os.path.getsize(file_path)
print('\rdownload finished, file size : %.2f MB' % (file_size / 1024 / 1024))
file_utils.gen_file_md5(file_path)
# time.sleep(3)
def start(self):
self.get_url(50)
self.get_app()
| [
"os.path.exists",
"os.path.getsize",
"os.makedirs",
"urllib.request.urlretrieve",
"re.compile",
"data_utils.parse_cfg",
"file_utils.gen_file_md5",
"os.path.join",
"apk_info.get_apk_info",
"urllib.request.urlcleanup",
"os.path.isfile",
"re.findall",
"urllib.request.urlopen",
"socket.setdefa... | [((194, 222), 'socket.setdefaulttimeout', 'socket.setdefaulttimeout', (['(30)'], {}), '(30)\n', (218, 222), False, 'import socket\n'), ((367, 418), 'data_utils.parse_cfg', 'data_utils.parse_cfg', (['"""download"""', '"""path"""', '"""../apks"""'], {}), "('download', 'path', '../apks')\n", (387, 418), False, 'import data_utils\n'), ((4078, 4136), 'urllib.request.urlretrieve', 'request.urlretrieve', (['url', 'file_path'], {'reporthook': 'reporthook'}), '(url, file_path, reporthook=reporthook)\n', (4097, 4136), False, 'from urllib import request\n'), ((4145, 4177), 'apk_info.get_apk_info', 'apk_info.get_apk_info', (['file_path'], {}), '(file_path)\n', (4166, 4177), False, 'import apk_info\n'), ((4186, 4206), 'urllib.request.urlcleanup', 'request.urlcleanup', ([], {}), '()\n', (4204, 4206), False, 'from urllib import request\n'), ((4227, 4253), 'os.path.getsize', 'os.path.getsize', (['file_path'], {}), '(file_path)\n', (4242, 4253), False, 'import os\n'), ((4348, 4382), 'file_utils.gen_file_md5', 'file_utils.gen_file_md5', (['file_path'], {}), '(file_path)\n', (4371, 4382), False, 'import file_utils\n'), ((715, 749), 'os.path.exists', 'os.path.exists', (['self.download_path'], {}), '(self.download_path)\n', (729, 749), False, 'import os\n'), ((763, 794), 'os.makedirs', 'os.makedirs', (['self.download_path'], {}), '(self.download_path)\n', (774, 794), False, 'import os\n'), ((925, 974), 'urllib.request.urlopen', 'request.urlopen', (['self.url_list[index]'], {'timeout': '(15)'}), '(self.url_list[index], timeout=15)\n', (940, 974), False, 'from urllib import request\n'), ((1151, 1203), 're.findall', 're.findall', (['"""(?<=&url=).*?%26v%3D%26f%3Dz.apk"""', 'html'], {}), "('(?<=&url=).*?%26v%3D%26f%3Dz.apk', html)\n", (1161, 1203), False, 'import re\n'), ((1226, 1266), 're.compile', 're.compile', (['"""thirdlink&name=(.*?)&icon="""'], {}), "('thirdlink&name=(.*?)&icon=')\n", (1236, 1266), False, 'import re\n'), ((2016, 2058), 'os.path.join', 'os.path.join', (['self.download_path', 'app_name'], {}), '(self.download_path, app_name)\n', (2028, 2058), False, 'import os\n'), ((2086, 2111), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (2100, 2111), False, 'import os\n'), ((3249, 3273), 'os.path.isfile', 'os.path.isfile', (['md5_file'], {}), '(md5_file)\n', (3263, 3273), False, 'import os\n'), ((3303, 3323), 'os.remove', 'os.remove', (['file_path'], {}), '(file_path)\n', (3312, 3323), False, 'import os\n'), ((3463, 3497), 'file_utils.gen_file_md5', 'file_utils.gen_file_md5', (['file_path'], {}), '(file_path)\n', (3486, 3497), False, 'import file_utils\n'), ((3531, 3563), 'apk_info.get_apk_info', 'apk_info.get_apk_info', (['file_path'], {}), '(file_path)\n', (3552, 3563), False, 'import apk_info\n'), ((3630, 3650), 'os.remove', 'os.remove', (['file_path'], {}), '(file_path)\n', (3639, 3650), False, 'import os\n'), ((3683, 3702), 'os.remove', 'os.remove', (['md5_file'], {}), '(md5_file)\n', (3692, 3702), False, 'import os\n')] |
import subprocess
import time
import socket
import os
import smtplib
class CyberCPLogFileWriter:
fileName = "/home/cyberpanel/error-logs.txt"
@staticmethod
def SendEmail(sender, receivers, message, subject=None, type=None):
try:
smtpPath = '/home/cyberpanel/smtpDetails'
if os.path.exists(smtpPath):
import json
mailSettings = json.loads(open(smtpPath, 'r').read())
smtpHost = mailSettings['smtpHost']
smtpPort = mailSettings['smtpPort']
smtpUserName = mailSettings['smtpUserName']
smtpPassword = mailSettings['smtpPassword']
smtpServer = smtplib.SMTP(str(smtpHost), int(smtpPort))
smtpServer.login(smtpUserName, smtpPassword)
##
if subject != None:
message = 'Subject: {}\n\n{}'.format(subject, message)
smtpServer.sendmail(smtpUserName, receivers, message)
else:
smtpObj = smtplib.SMTP('localhost')
smtpObj.sendmail(sender, receivers, message)
except BaseException as msg:
CyberCPLogFileWriter.writeToFile(str(msg))
@staticmethod
def writeToFile(message, email=None):
try:
file = open(CyberCPLogFileWriter.fileName,'a')
file.writelines("[" + time.strftime(
"%m.%d.%Y_%H-%M-%S") + "] "+ message + "\n")
file.close()
## Send Email
emailPath = '/usr/local/CyberCP/emailDebug'
try:
if os.path.exists(emailPath):
SUBJECT = "CyberPanel log reporting"
adminEmailPath = '/home/cyberpanel/adminEmail'
adminEmail = open(adminEmailPath, 'r').read().rstrip('\n')
sender = 'root@%s' % (socket.gethostname())
TO = [adminEmail]
message = """\
From: %s
To: %s
Subject: %s
%s
""" % (
sender, ", ".join(TO), SUBJECT, '[%s] %s. \n' % (time.strftime("%m.%d.%Y_%H-%M-%S"), message))
if email == None or email == 1:
CyberCPLogFileWriter.SendEmail(sender, TO, message)
except BaseException as msg:
file = open(CyberCPLogFileWriter.fileName, 'a')
file.writelines("[" + time.strftime(
"%m.%d.%Y_%H-%M-%S") + "] " + str(msg) + "\n")
file.close()
except BaseException as msg:
return "Can not write to error file."
@staticmethod
def writeforCLI(message, level, method):
try:
file = open(CyberCPLogFileWriter.fileName, 'a')
file.writelines("[" + time.strftime(
"%m.%d.%Y_%H-%M-%S") + "] [" + level + ":" + method + "] " + message + "\n")
file.close()
file.close()
except BaseException:
return "Can not write to error file!"
@staticmethod
def readLastNFiles(numberOfLines,fileName):
try:
lastFewLines = str(subprocess.check_output(["tail", "-n",str(numberOfLines),fileName]).decode("utf-8"))
return lastFewLines
except subprocess.CalledProcessError as msg:
return "File was empty"
@staticmethod
def statusWriter(tempStatusPath, mesg, append = None):
try:
if append == None:
statusFile = open(tempStatusPath, 'w')
else:
statusFile = open(tempStatusPath, 'a')
statusFile.writelines(mesg + '\n')
statusFile.close()
print((mesg + '\n'))
except BaseException as msg:
CyberCPLogFileWriter.writeToFile(str(msg) + ' [statusWriter]')
#print str(msg) | [
"os.path.exists",
"smtplib.SMTP",
"socket.gethostname",
"time.strftime"
] | [((321, 345), 'os.path.exists', 'os.path.exists', (['smtpPath'], {}), '(smtpPath)\n', (335, 345), False, 'import os\n'), ((1051, 1076), 'smtplib.SMTP', 'smtplib.SMTP', (['"""localhost"""'], {}), "('localhost')\n", (1063, 1076), False, 'import smtplib\n'), ((1623, 1648), 'os.path.exists', 'os.path.exists', (['emailPath'], {}), '(emailPath)\n', (1637, 1648), False, 'import os\n'), ((1895, 1915), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (1913, 1915), False, 'import socket\n'), ((1397, 1431), 'time.strftime', 'time.strftime', (['"""%m.%d.%Y_%H-%M-%S"""'], {}), "('%m.%d.%Y_%H-%M-%S')\n", (1410, 1431), False, 'import time\n'), ((2099, 2133), 'time.strftime', 'time.strftime', (['"""%m.%d.%Y_%H-%M-%S"""'], {}), "('%m.%d.%Y_%H-%M-%S')\n", (2112, 2133), False, 'import time\n'), ((2417, 2451), 'time.strftime', 'time.strftime', (['"""%m.%d.%Y_%H-%M-%S"""'], {}), "('%m.%d.%Y_%H-%M-%S')\n", (2430, 2451), False, 'import time\n'), ((2787, 2821), 'time.strftime', 'time.strftime', (['"""%m.%d.%Y_%H-%M-%S"""'], {}), "('%m.%d.%Y_%H-%M-%S')\n", (2800, 2821), False, 'import time\n')] |
#!/usr/bin/env python
"""Module for global fitting titrations (pH and cl) on 2 datasets
"""
import os
import sys
import argparse
import numpy as np
from lmfit import Parameters, Minimizer, minimize, conf_interval, report_fit
import pandas as pd
import matplotlib.pyplot as plt
# from scipy import optimize
def ci_report(ci):
"""return text of a report for confidence intervals"""
maxlen = max([len(i) for i in ci])
buff = []
add = buff.append
convp = lambda x: ("%.2f" % (x[0]*100))+'%'
# I modified "%.5f"
conv = lambda x: "%.6G" % x[1]
title_shown = False
for name, row in ci.items():
if not title_shown:
add("".join([''.rjust(maxlen)] +
[i.rjust(10) for i in map(convp, row)]))
title_shown = True
add("".join([name.rjust(maxlen)] +
[i.rjust(10) for i in map(conv, row)]))
return '\n'.join(buff)
def residual(pars, x, data=None, titration_type=None):
"""residual function for lmfit
Parameters
----------
pars: lmfit Parameters()
x : list of x vectors
data : list of y vectors
Return
------
a vector for the residues (yfit - data)
or the fitted values
"""
vals = pars.valuesdict()
SA1 = vals['SA1']
SB1 = vals['SB1']
K = vals['K']
SA2 = vals['SA2']
SB2 = vals['SB2']
if titration_type == 'pH':
model1 = (SB1 + SA1 * 10 ** (K - x[0])) / (1 + 10 ** (K - x[0]))
model2 = (SB2 + SA2 * 10 ** (K - x[1])) / (1 + 10 ** (K - x[1]))
elif titration_type == 'cl':
model1 = (SA1 + SB1 * x[0] / K) / (1 + x[0] / K)
model2 = (SA2 + SB2 * x[1] / K) / (1 + x[1] / K)
else:
print('Error: residual call must indicate a titration type')
sys.exit()
if data is None:
return np.r_[model1, model2]
return np.r_[model1 - data[0], model2 - data[1]]
def main():
description = "Fit a pH or Cl titration file: x y1 y2"
parser = argparse.ArgumentParser(description=description)
parser.add_argument('file',
help='the file <x y1 y2> without heads')
parser.add_argument('out_folder',
help='The folder to output the .txt and .png files')
parser.add_argument('-t', '--titration-of', dest='titration_type',
action="store", default="pH", choices=["pH", "cl"],
help='Type of titration, pH or cl')
parser.add_argument('-v', '--verbose', action='store_true',
help='Printout runtime information.increase verbosity')
parser.add_argument('--boot', dest='nboot', type=int,
help='bootstraping using <n> iterations')
args = parser.parse_args()
ttype = args.titration_type
#df = pd.read_csv(args.file, sep=' ', names=['x', 'y1', 'y2'])
df = pd.read_csv(args.file)
if not os.path.isdir(args.out_folder):
os.makedirs(args.out_folder)
fit_params = Parameters()
fit_params.add('SA1', value=df.y1[df.x == min(df.x)].values[0], min=0)
fit_params.add('SB1', value=df.y1[df.x == max(df.x)].values[0], min=0)
fit_params.add('SA2', value=df.y2[df.x == min(df.x)].values[0], min=0)
fit_params.add('SB2', value=df.y2[df.x == max(df.x)].values[0], min=0)
if args.titration_type == "pH":
fit_params.add('K', value=7, min=4, max=10)
elif args.titration_type == "cl":
fit_params.add('K', value=20, min=0, max=1000)
mini = Minimizer(residual, fit_params, fcn_args=([df.x, df.x],),
fcn_kws={'data': [df.y1, df.y2], 'titration_type': ttype})
res = mini.minimize()
report_fit(fit_params)
ci = conf_interval(mini, res, sigmas=[.674, .95])
print(ci_report(ci))
# plotting
xfit = np.linspace(df.x.min(), df.x.max(), 100)
yfit = residual(fit_params, [xfit, xfit], titration_type=ttype) # kws={}
yfit = yfit.reshape(2, len(yfit) // 2)
plt.plot(df.x, df.y1, 'o', df.x, df.y2, 's', xfit, yfit[0], '-',
xfit, yfit[1], '-')
plt.grid(True)
f_out = os.path.join(args.out_folder, os.path.split(args.file)[1])
plt.savefig(f_out + ".png")
if args.nboot:
bootstrap(df, args.nboot, fit_params, f_out, ttype)
def bootstrap(df, nboot, fit_params, f_out, ttype):
"""Perform bootstrap to estimate parameters variance
Parameters
----------
df : DataFrame
nboot : int
fit_params: lmfit.fit_params
f_out : string
Output
------
print results
plot
"""
import seaborn as sns
n_points = len(df)
kds = []
sa1 = []
sb1 = []
sa2 = []
sb2 = []
for i in range(nboot):
boot_idxs = np.random.randint(0, n_points-1, n_points)
df2 = df.loc[boot_idxs]
df2.reset_index(drop=True, inplace=True)
boot_idxs = np.random.randint(0, n_points-1, n_points)
df3 = df.loc[boot_idxs]
df3.reset_index(drop=True, inplace=True)
try:
res = minimize(residual, fit_params, args=([df2.x, df3.x],),
kws={'data': [df2.y1, df3.y2], 'titration_type': ttype})
kds.append(res.params['K'].value)
sa1.append(res.params['SA1'].value)
sb1.append(res.params['SB1'].value)
sa2.append(res.params['SA2'].value)
sb2.append(res.params['SB2'].value)
except:
print(df2)
print(df3)
dff = pd.DataFrame({'K': kds, 'SA1': sa1, 'SB1': sb1, 'SA2': sa2,
'SB2': sb2})
print("bootstrap: ",
round(dff.K.quantile(.025), 3),
round(dff.K.quantile(.163), 3),
round(dff.K.median(), 3),
round(dff.K.quantile(.837), 3),
round(dff.K.quantile(.975), 3))
sns.set_style('darkgrid')
g = sns.PairGrid(dff)
# g.map_diag(sns.kdeplot, lw=3)
g.map_diag(plt.hist, alpha=0.4)
g.map_upper(plt.scatter, s=9, alpha=0.6)
g.map_lower(sns.kdeplot, cmap="Blues_d")
plt.savefig(f_out + "-bs" + ".png")
if __name__ == '__main__':
main()
| [
"matplotlib.pyplot.grid",
"matplotlib.pyplot.savefig",
"lmfit.Minimizer",
"argparse.ArgumentParser",
"pandas.read_csv",
"lmfit.conf_interval",
"os.makedirs",
"matplotlib.pyplot.plot",
"os.path.split",
"seaborn.set_style",
"numpy.random.randint",
"os.path.isdir",
"lmfit.report_fit",
"sys.ex... | [((1986, 2034), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'description'}), '(description=description)\n', (2009, 2034), False, 'import argparse\n'), ((2861, 2883), 'pandas.read_csv', 'pd.read_csv', (['args.file'], {}), '(args.file)\n', (2872, 2883), True, 'import pandas as pd\n'), ((2981, 2993), 'lmfit.Parameters', 'Parameters', ([], {}), '()\n', (2991, 2993), False, 'from lmfit import Parameters, Minimizer, minimize, conf_interval, report_fit\n'), ((3486, 3606), 'lmfit.Minimizer', 'Minimizer', (['residual', 'fit_params'], {'fcn_args': '([df.x, df.x],)', 'fcn_kws': "{'data': [df.y1, df.y2], 'titration_type': ttype}"}), "(residual, fit_params, fcn_args=([df.x, df.x],), fcn_kws={'data':\n [df.y1, df.y2], 'titration_type': ttype})\n", (3495, 3606), False, 'from lmfit import Parameters, Minimizer, minimize, conf_interval, report_fit\n'), ((3652, 3674), 'lmfit.report_fit', 'report_fit', (['fit_params'], {}), '(fit_params)\n', (3662, 3674), False, 'from lmfit import Parameters, Minimizer, minimize, conf_interval, report_fit\n'), ((3684, 3730), 'lmfit.conf_interval', 'conf_interval', (['mini', 'res'], {'sigmas': '[0.674, 0.95]'}), '(mini, res, sigmas=[0.674, 0.95])\n', (3697, 3730), False, 'from lmfit import Parameters, Minimizer, minimize, conf_interval, report_fit\n'), ((3947, 4036), 'matplotlib.pyplot.plot', 'plt.plot', (['df.x', 'df.y1', '"""o"""', 'df.x', 'df.y2', '"""s"""', 'xfit', 'yfit[0]', '"""-"""', 'xfit', 'yfit[1]', '"""-"""'], {}), "(df.x, df.y1, 'o', df.x, df.y2, 's', xfit, yfit[0], '-', xfit, yfit\n [1], '-')\n", (3955, 4036), True, 'import matplotlib.pyplot as plt\n'), ((4049, 4063), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (4057, 4063), True, 'import matplotlib.pyplot as plt\n'), ((4139, 4166), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(f_out + '.png')"], {}), "(f_out + '.png')\n", (4150, 4166), True, 'import matplotlib.pyplot as plt\n'), ((5436, 5508), 'pandas.DataFrame', 'pd.DataFrame', (["{'K': kds, 'SA1': sa1, 'SB1': sb1, 'SA2': sa2, 'SB2': sb2}"], {}), "({'K': kds, 'SA1': sa1, 'SB1': sb1, 'SA2': sa2, 'SB2': sb2})\n", (5448, 5508), True, 'import pandas as pd\n'), ((5766, 5791), 'seaborn.set_style', 'sns.set_style', (['"""darkgrid"""'], {}), "('darkgrid')\n", (5779, 5791), True, 'import seaborn as sns\n'), ((5800, 5817), 'seaborn.PairGrid', 'sns.PairGrid', (['dff'], {}), '(dff)\n', (5812, 5817), True, 'import seaborn as sns\n'), ((5984, 6019), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(f_out + '-bs' + '.png')"], {}), "(f_out + '-bs' + '.png')\n", (5995, 6019), True, 'import matplotlib.pyplot as plt\n'), ((2895, 2925), 'os.path.isdir', 'os.path.isdir', (['args.out_folder'], {}), '(args.out_folder)\n', (2908, 2925), False, 'import os\n'), ((2935, 2963), 'os.makedirs', 'os.makedirs', (['args.out_folder'], {}), '(args.out_folder)\n', (2946, 2963), False, 'import os\n'), ((4693, 4737), 'numpy.random.randint', 'np.random.randint', (['(0)', '(n_points - 1)', 'n_points'], {}), '(0, n_points - 1, n_points)\n', (4710, 4737), True, 'import numpy as np\n'), ((4837, 4881), 'numpy.random.randint', 'np.random.randint', (['(0)', '(n_points - 1)', 'n_points'], {}), '(0, n_points - 1, n_points)\n', (4854, 4881), True, 'import numpy as np\n'), ((1778, 1788), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1786, 1788), False, 'import sys\n'), ((4106, 4130), 'os.path.split', 'os.path.split', (['args.file'], {}), '(args.file)\n', (4119, 4130), False, 'import os\n'), ((4992, 5107), 'lmfit.minimize', 'minimize', (['residual', 'fit_params'], {'args': '([df2.x, df3.x],)', 'kws': "{'data': [df2.y1, df3.y2], 'titration_type': ttype}"}), "(residual, fit_params, args=([df2.x, df3.x],), kws={'data': [df2.y1,\n df3.y2], 'titration_type': ttype})\n", (5000, 5107), False, 'from lmfit import Parameters, Minimizer, minimize, conf_interval, report_fit\n')] |
import os
import sys
import neuron
import json
from pprint import pprint
from neuron import h
import matplotlib.pyplot as plt
import numpy as np
import h5py
## Runs the 5 cell iclamp simulation but in NEURON for each individual cell
# $ python pure_nrn.py <gid>
neuron.load_mechanisms('../components/mechanisms')
h.load_file('stdgui.hoc')
h.load_file('import3d.hoc')
cells_table = {
# gid = [model id, cre line, morph file]
0: [472363762, 'Scnn1a', 'Scnn1a_473845048_m.swc'],
1: [473863510, 'Rorb', 'Rorb_325404214_m.swc'],
2: [473863035, 'Nr5a1', 'Nr5a1_471087815_m.swc'],
3: [472912177, 'PV1', 'Pvalb_470522102_m.swc'],
4: [473862421, 'PV2', 'Pvalb_469628681_m.swc']
}
def run_simulation(gid, morphologies_dir='../components/morphologies', plot_results=True):
swc_file = os.path.join(morphologies_dir, cells_table[gid][2])
model_file = 'model_gid{}_{}_{}.json'.format(gid, cells_table[gid][0], cells_table[gid][1])
params_dict = json.load(open(model_file, 'r'))
# pprint(params_dict)
# load the cell
nrn_swc = h.Import3d_SWC_read()
nrn_swc.input(str(swc_file))
imprt = h.Import3d_GUI(nrn_swc, 0)
h("objref this")
imprt.instantiate(h.this)
# Cut the axon
h("soma[0] area(0.5)")
for sec in h.allsec():
sec.nseg = 1 + 2 * int(sec.L / 40.0)
if sec.name()[:4] == "axon":
h.delete_section(sec=sec)
h('create axon[2]')
for sec in h.axon:
sec.L = 30
sec.diam = 1
sec.nseg = 1 + 2 * int(sec.L / 40.0)
h.axon[0].connect(h.soma[0], 0.5, 0.0)
h.axon[1].connect(h.axon[0], 1.0, 0.0)
h.define_shape()
# set model params
h("access soma")
for sec in h.allsec():
sec_name = sec.name().split('[')[0]
# special case for passive channels rev. potential
sec.insert('pas')
for seg in sec:
if sec_name not in params_dict['e_pas']:
continue
seg.pas.e = params_dict['e_pas'][sec_name]
# insert mechanisms (if req.) and set density
for prop in params_dict[sec_name]:
if 'mechanism' in prop:
sec.insert(prop['mechanism'])
setattr(sec, prop['name'], prop['value'])
# simulation properties
h.stdinit()
h.tstop = 4000.0
h.dt = 0.1
h.steps_per_ms = 1/h.dt
h.celsius = 34.0
h.v_init = -80.0
# stimuli is an increasing series of 3 step currents
cclamp1 = h.IClamp(h.soma[0](0.5))
cclamp1.delay = 500.0
cclamp1.dur = 500.0
cclamp1.amp = 0.1500
cclamp2 = h.IClamp(h.soma[0](0.5))
cclamp2.delay = 1500.0
cclamp2.dur = 500.0
cclamp2.amp = 0.1750
cclamp3 = h.IClamp(h.soma[0](0.5))
cclamp3.delay = 2500.0
cclamp3.dur = 500.0
cclamp3.amp = 0.2000
# run simulation
v_vec = h.Vector()
v_vec.record(h.soma[0](0.5)._ref_v)
h.startsw()
h.run(h.tstop)
voltages = [v for v in v_vec]
cell_var_name = 'cellvar_gid{}_{}_{}.h5'.format(gid, cells_table[gid][0], cells_table[gid][1])
with h5py.File(cell_var_name, 'w') as h5:
# fake a mapping table just for convience
h5.create_dataset('/mapping/gids', data=[gid], dtype=np.uint16)
h5.create_dataset('/mapping/element_pos', data=[0.5], dtype=np.float)
h5.create_dataset('/mapping/element_id', data=[0], dtype=np.uint16)
h5.create_dataset('/mapping/index_pointer', data=[0], dtype=np.uint16)
h5.create_dataset('/v/data', data=voltages, dtype=np.float64)
if plot_results:
times = np.linspace(0.0, h.tstop, len(voltages))
plt.plot(times, voltages)
plt.show()
if __name__ == '__main__':
if __file__ != sys.argv[-1]:
run_simulation(sys.argv[-1])
else:
for gid in range(5):
run_simulation(gid, plot_results=False)
| [
"neuron.h.startsw",
"neuron.h",
"neuron.h.run",
"neuron.h.Import3d_GUI",
"os.path.join",
"neuron.h.define_shape",
"matplotlib.pyplot.plot",
"h5py.File",
"neuron.h.Import3d_SWC_read",
"neuron.h.load_file",
"neuron.h.allsec",
"neuron.h.delete_section",
"neuron.load_mechanisms",
"neuron.h.std... | [((264, 314), 'neuron.load_mechanisms', 'neuron.load_mechanisms', (['"""../components/mechanisms"""'], {}), "('../components/mechanisms')\n", (286, 314), False, 'import neuron\n'), ((315, 340), 'neuron.h.load_file', 'h.load_file', (['"""stdgui.hoc"""'], {}), "('stdgui.hoc')\n", (326, 340), False, 'from neuron import h\n'), ((341, 368), 'neuron.h.load_file', 'h.load_file', (['"""import3d.hoc"""'], {}), "('import3d.hoc')\n", (352, 368), False, 'from neuron import h\n'), ((806, 857), 'os.path.join', 'os.path.join', (['morphologies_dir', 'cells_table[gid][2]'], {}), '(morphologies_dir, cells_table[gid][2])\n', (818, 857), False, 'import os\n'), ((1066, 1087), 'neuron.h.Import3d_SWC_read', 'h.Import3d_SWC_read', ([], {}), '()\n', (1085, 1087), False, 'from neuron import h\n'), ((1133, 1159), 'neuron.h.Import3d_GUI', 'h.Import3d_GUI', (['nrn_swc', '(0)'], {}), '(nrn_swc, 0)\n', (1147, 1159), False, 'from neuron import h\n'), ((1164, 1180), 'neuron.h', 'h', (['"""objref this"""'], {}), "('objref this')\n", (1165, 1180), False, 'from neuron import h\n'), ((1235, 1257), 'neuron.h', 'h', (['"""soma[0] area(0.5)"""'], {}), "('soma[0] area(0.5)')\n", (1236, 1257), False, 'from neuron import h\n'), ((1273, 1283), 'neuron.h.allsec', 'h.allsec', ([], {}), '()\n', (1281, 1283), False, 'from neuron import h\n'), ((1409, 1428), 'neuron.h', 'h', (['"""create axon[2]"""'], {}), "('create axon[2]')\n", (1410, 1428), False, 'from neuron import h\n'), ((1627, 1643), 'neuron.h.define_shape', 'h.define_shape', ([], {}), '()\n', (1641, 1643), False, 'from neuron import h\n'), ((1672, 1688), 'neuron.h', 'h', (['"""access soma"""'], {}), "('access soma')\n", (1673, 1688), False, 'from neuron import h\n'), ((1704, 1714), 'neuron.h.allsec', 'h.allsec', ([], {}), '()\n', (1712, 1714), False, 'from neuron import h\n'), ((2272, 2283), 'neuron.h.stdinit', 'h.stdinit', ([], {}), '()\n', (2281, 2283), False, 'from neuron import h\n'), ((2828, 2838), 'neuron.h.Vector', 'h.Vector', ([], {}), '()\n', (2836, 2838), False, 'from neuron import h\n'), ((2883, 2894), 'neuron.h.startsw', 'h.startsw', ([], {}), '()\n', (2892, 2894), False, 'from neuron import h\n'), ((2899, 2913), 'neuron.h.run', 'h.run', (['h.tstop'], {}), '(h.tstop)\n', (2904, 2913), False, 'from neuron import h\n'), ((3057, 3086), 'h5py.File', 'h5py.File', (['cell_var_name', '"""w"""'], {}), "(cell_var_name, 'w')\n", (3066, 3086), False, 'import h5py\n'), ((3607, 3632), 'matplotlib.pyplot.plot', 'plt.plot', (['times', 'voltages'], {}), '(times, voltages)\n', (3615, 3632), True, 'import matplotlib.pyplot as plt\n'), ((3641, 3651), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3649, 3651), True, 'import matplotlib.pyplot as plt\n'), ((1379, 1404), 'neuron.h.delete_section', 'h.delete_section', ([], {'sec': 'sec'}), '(sec=sec)\n', (1395, 1404), False, 'from neuron import h\n')] |
# Recipe creation tool - create command build system handlers
#
# Copyright (C) 2014 Intel Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import re
import logging
from recipetool.create import RecipeHandler, read_pkgconfig_provides
logger = logging.getLogger('recipetool')
tinfoil = None
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
class CmakeRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled):
if 'buildsystem' in handled:
return False
if RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']):
classes.append('cmake')
lines_after.append('# Specify any options you want to pass to cmake using EXTRA_OECMAKE:')
lines_after.append('EXTRA_OECMAKE = ""')
lines_after.append('')
handled.append('buildsystem')
return True
return False
class SconsRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled):
if 'buildsystem' in handled:
return False
if RecipeHandler.checkfiles(srctree, ['SConstruct', 'Sconstruct', 'sconstruct']):
classes.append('scons')
lines_after.append('# Specify any options you want to pass to scons using EXTRA_OESCONS:')
lines_after.append('EXTRA_OESCONS = ""')
lines_after.append('')
handled.append('buildsystem')
return True
return False
class QmakeRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled):
if 'buildsystem' in handled:
return False
if RecipeHandler.checkfiles(srctree, ['*.pro']):
classes.append('qmake2')
handled.append('buildsystem')
return True
return False
class AutotoolsRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled):
if 'buildsystem' in handled:
return False
autoconf = False
if RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in']):
autoconf = True
values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree)
classes.extend(values.pop('inherit', '').split())
for var, value in values.iteritems():
lines_before.append('%s = "%s"' % (var, value))
else:
conffile = RecipeHandler.checkfiles(srctree, ['configure'])
if conffile:
# Check if this is just a pre-generated autoconf configure script
with open(conffile[0], 'r') as f:
for i in range(1, 10):
if 'Generated by GNU Autoconf' in f.readline():
autoconf = True
break
if autoconf:
lines_before.append('# NOTE: if this software is not capable of being built in a separate build directory')
lines_before.append('# from the source, you should replace autotools with autotools-brokensep in the')
lines_before.append('# inherit line')
classes.append('autotools')
lines_after.append('# Specify any options you want to pass to the configure script using EXTRA_OECONF:')
lines_after.append('EXTRA_OECONF = ""')
lines_after.append('')
handled.append('buildsystem')
return True
return False
@staticmethod
def extract_autotools_deps(outlines, srctree, acfile=None):
import shlex
import oe.package
values = {}
inherits = []
# FIXME this mapping is very thin
progmap = {'flex': 'flex-native',
'bison': 'bison-native',
'm4': 'm4-native'}
progclassmap = {'gconftool-2': 'gconf',
'pkg-config': 'pkgconfig'}
ignoredeps = ['gcc-runtime', 'glibc', 'uclibc']
pkg_re = re.compile('PKG_CHECK_MODULES\(\[?[a-zA-Z0-9]*\]?, \[?([^,\]]*)[),].*')
lib_re = re.compile('AC_CHECK_LIB\(\[?([a-zA-Z0-9]*)\]?, .*')
progs_re = re.compile('_PROGS?\(\[?[a-zA-Z0-9]*\]?, \[?([^,\]]*)\]?[),].*')
dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?')
# Build up lib library->package mapping
shlib_providers = oe.package.read_shlib_providers(tinfoil.config_data)
libdir = tinfoil.config_data.getVar('libdir', True)
base_libdir = tinfoil.config_data.getVar('base_libdir', True)
libpaths = list(set([base_libdir, libdir]))
libname_re = re.compile('^lib(.+)\.so.*$')
pkglibmap = {}
for lib, item in shlib_providers.iteritems():
for path, pkg in item.iteritems():
if path in libpaths:
res = libname_re.match(lib)
if res:
libname = res.group(1)
if not libname in pkglibmap:
pkglibmap[libname] = pkg[0]
else:
logger.debug('unable to extract library name from %s' % lib)
# Now turn it into a library->recipe mapping
recipelibmap = {}
pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True)
for libname, pkg in pkglibmap.iteritems():
try:
with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f:
for line in f:
if line.startswith('PN:'):
recipelibmap[libname] = line.split(':', 1)[-1].strip()
break
except IOError as ioe:
if ioe.errno == 2:
logger.warn('unable to find a pkgdata file for package %s' % pkg)
else:
raise
# Since a configure.ac file is essentially a program, this is only ever going to be
# a hack unfortunately; but it ought to be enough of an approximation
if acfile:
srcfiles = [acfile]
else:
srcfiles = RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in'])
pcdeps = []
deps = []
unmapped = []
unmappedlibs = []
with open(srcfiles[0], 'r') as f:
for line in f:
if 'PKG_CHECK_MODULES' in line:
res = pkg_re.search(line)
if res:
res = dep_re.findall(res.group(1))
if res:
pcdeps.extend([x[0] for x in res])
inherits.append('pkgconfig')
if line.lstrip().startswith('AM_GNU_GETTEXT'):
inherits.append('gettext')
elif 'AC_CHECK_PROG' in line or 'AC_PATH_PROG' in line:
res = progs_re.search(line)
if res:
for prog in shlex.split(res.group(1)):
prog = prog.split()[0]
progclass = progclassmap.get(prog, None)
if progclass:
inherits.append(progclass)
else:
progdep = progmap.get(prog, None)
if progdep:
deps.append(progdep)
else:
if not prog.startswith('$'):
unmapped.append(prog)
elif 'AC_CHECK_LIB' in line:
res = lib_re.search(line)
if res:
lib = res.group(1)
libdep = recipelibmap.get(lib, None)
if libdep:
deps.append(libdep)
else:
if libdep is None:
if not lib.startswith('$'):
unmappedlibs.append(lib)
elif 'AC_PATH_X' in line:
deps.append('libx11')
if unmapped:
outlines.append('# NOTE: the following prog dependencies are unknown, ignoring: %s' % ' '.join(unmapped))
if unmappedlibs:
outlines.append('# NOTE: the following library dependencies are unknown, ignoring: %s' % ' '.join(unmappedlibs))
outlines.append('# (this is based on recipes that have previously been built and packaged)')
recipemap = read_pkgconfig_provides(tinfoil.config_data)
unmapped = []
for pcdep in pcdeps:
recipe = recipemap.get(pcdep, None)
if recipe:
deps.append(recipe)
else:
if not pcdep.startswith('$'):
unmapped.append(pcdep)
deps = set(deps).difference(set(ignoredeps))
if unmapped:
outlines.append('# NOTE: unable to map the following pkg-config dependencies: %s' % ' '.join(unmapped))
outlines.append('# (this is based on recipes that have previously been built and packaged)')
if deps:
values['DEPENDS'] = ' '.join(deps)
if inherits:
values['inherit'] = ' '.join(list(set(inherits)))
return values
class MakefileRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled):
if 'buildsystem' in handled:
return False
makefile = RecipeHandler.checkfiles(srctree, ['Makefile'])
if makefile:
lines_after.append('# NOTE: this is a Makefile-only piece of software, so we cannot generate much of the')
lines_after.append('# recipe automatically - you will need to examine the Makefile yourself and ensure')
lines_after.append('# that the appropriate arguments are passed in.')
lines_after.append('')
scanfile = os.path.join(srctree, 'configure.scan')
skipscan = False
try:
stdout, stderr = bb.process.run('autoscan', cwd=srctree, shell=True)
except bb.process.ExecutionError as e:
skipscan = True
if scanfile and os.path.exists(scanfile):
values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, acfile=scanfile)
classes.extend(values.pop('inherit', '').split())
for var, value in values.iteritems():
if var == 'DEPENDS':
lines_before.append('# NOTE: some of these dependencies may be optional, check the Makefile and/or upstream documentation')
lines_before.append('%s = "%s"' % (var, value))
lines_before.append('')
for f in ['configure.scan', 'autoscan.log']:
fp = os.path.join(srctree, f)
if os.path.exists(fp):
os.remove(fp)
self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here'])
func = []
func.append('# You will almost certainly need to add additional arguments here')
func.append('oe_runmake')
self.genfunction(lines_after, 'do_compile', func)
installtarget = True
try:
stdout, stderr = bb.process.run('make -qn install', cwd=srctree, shell=True)
except bb.process.ExecutionError as e:
if e.exitcode != 1:
installtarget = False
func = []
if installtarget:
func.append('# This is a guess; additional arguments may be required')
makeargs = ''
with open(makefile[0], 'r') as f:
for i in range(1, 100):
if 'DESTDIR' in f.readline():
makeargs += " 'DESTDIR=${D}'"
break
func.append('oe_runmake install%s' % makeargs)
else:
func.append('# NOTE: unable to determine what to put here - there is a Makefile but no')
func.append('# target named "install", so you will need to define this yourself')
self.genfunction(lines_after, 'do_install', func)
handled.append('buildsystem')
else:
lines_after.append('# NOTE: no Makefile found, unable to determine what needs to be done')
lines_after.append('')
self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here'])
self.genfunction(lines_after, 'do_compile', ['# Specify compilation commands here'])
self.genfunction(lines_after, 'do_install', ['# Specify install commands here'])
def plugin_init(pluginlist):
pass
def register_recipe_handlers(handlers):
# These are in a specific order so that the right one is detected first
handlers.append(CmakeRecipeHandler())
handlers.append(AutotoolsRecipeHandler())
handlers.append(SconsRecipeHandler())
handlers.append(QmakeRecipeHandler())
handlers.append(MakefileRecipeHandler())
| [
"logging.getLogger",
"recipetool.create.RecipeHandler.checkfiles",
"recipetool.create.read_pkgconfig_provides",
"re.compile"
] | [((850, 881), 'logging.getLogger', 'logging.getLogger', (['"""recipetool"""'], {}), "('recipetool')\n", (867, 881), False, 'import logging\n'), ((1162, 1215), 'recipetool.create.RecipeHandler.checkfiles', 'RecipeHandler.checkfiles', (['srctree', "['CMakeLists.txt']"], {}), "(srctree, ['CMakeLists.txt'])\n", (1186, 1215), False, 'from recipetool.create import RecipeHandler, read_pkgconfig_provides\n'), ((1724, 1801), 'recipetool.create.RecipeHandler.checkfiles', 'RecipeHandler.checkfiles', (['srctree', "['SConstruct', 'Sconstruct', 'sconstruct']"], {}), "(srctree, ['SConstruct', 'Sconstruct', 'sconstruct'])\n", (1748, 1801), False, 'from recipetool.create import RecipeHandler, read_pkgconfig_provides\n'), ((2310, 2354), 'recipetool.create.RecipeHandler.checkfiles', 'RecipeHandler.checkfiles', (['srctree', "['*.pro']"], {}), "(srctree, ['*.pro'])\n", (2334, 2354), False, 'from recipetool.create import RecipeHandler, read_pkgconfig_provides\n'), ((2702, 2769), 'recipetool.create.RecipeHandler.checkfiles', 'RecipeHandler.checkfiles', (['srctree', "['configure.ac', 'configure.in']"], {}), "(srctree, ['configure.ac', 'configure.in'])\n", (2726, 2769), False, 'from recipetool.create import RecipeHandler, read_pkgconfig_provides\n'), ((4640, 4716), 're.compile', 're.compile', (['"""PKG_CHECK_MODULES\\\\(\\\\[?[a-zA-Z0-9]*\\\\]?, \\\\[?([^,\\\\]]*)[),].*"""'], {}), "('PKG_CHECK_MODULES\\\\(\\\\[?[a-zA-Z0-9]*\\\\]?, \\\\[?([^,\\\\]]*)[),].*')\n", (4650, 4716), False, 'import re\n'), ((4729, 4784), 're.compile', 're.compile', (['"""AC_CHECK_LIB\\\\(\\\\[?([a-zA-Z0-9]*)\\\\]?, .*"""'], {}), "('AC_CHECK_LIB\\\\(\\\\[?([a-zA-Z0-9]*)\\\\]?, .*')\n", (4739, 4784), False, 'import re\n'), ((4801, 4871), 're.compile', 're.compile', (['"""_PROGS?\\\\(\\\\[?[a-zA-Z0-9]*\\\\]?, \\\\[?([^,\\\\]]*)\\\\]?[),].*"""'], {}), "('_PROGS?\\\\(\\\\[?[a-zA-Z0-9]*\\\\]?, \\\\[?([^,\\\\]]*)\\\\]?[),].*')\n", (4811, 4871), False, 'import re\n'), ((4883, 4926), 're.compile', 're.compile', (['"""([^ ><=]+)( [<>=]+ [^ ><=]+)?"""'], {}), "('([^ ><=]+)( [<>=]+ [^ ><=]+)?')\n", (4893, 4926), False, 'import re\n'), ((5258, 5288), 're.compile', 're.compile', (['"""^lib(.+)\\\\.so.*$"""'], {}), "('^lib(.+)\\\\.so.*$')\n", (5268, 5288), False, 'import re\n'), ((9218, 9262), 'recipetool.create.read_pkgconfig_provides', 'read_pkgconfig_provides', (['tinfoil.config_data'], {}), '(tinfoil.config_data)\n', (9241, 9262), False, 'from recipetool.create import RecipeHandler, read_pkgconfig_provides\n'), ((10208, 10255), 'recipetool.create.RecipeHandler.checkfiles', 'RecipeHandler.checkfiles', (['srctree', "['Makefile']"], {}), "(srctree, ['Makefile'])\n", (10232, 10255), False, 'from recipetool.create import RecipeHandler, read_pkgconfig_provides\n'), ((3102, 3150), 'recipetool.create.RecipeHandler.checkfiles', 'RecipeHandler.checkfiles', (['srctree', "['configure']"], {}), "(srctree, ['configure'])\n", (3126, 3150), False, 'from recipetool.create import RecipeHandler, read_pkgconfig_provides\n'), ((6751, 6818), 'recipetool.create.RecipeHandler.checkfiles', 'RecipeHandler.checkfiles', (['srctree', "['configure.ac', 'configure.in']"], {}), "(srctree, ['configure.ac', 'configure.in'])\n", (6775, 6818), False, 'from recipetool.create import RecipeHandler, read_pkgconfig_provides\n')] |
import unittest
import sys
import os
sys.path.append('../')
from src.class_query import ClassQuery
from src.query_tool import QueryTool, Mode
base_path = os.path.dirname(__file__)
cq = ClassQuery(base_path + '/sample_queries/class_queries.xml')
class TestClassQuery(unittest.TestCase):
def test_class_cluster(self):
qt = QueryTool(base_path + '/sample_ttls/doc1.ttl', Mode.CLUSTER)
responses, stat, errors = cq.ask_all(qt)
res = [len(x.find('justifications')) for x in responses.getchildren()]
self.assertFalse(errors)
self.assertEqual(res, [2, 1])
| [
"os.path.dirname",
"src.class_query.ClassQuery",
"sys.path.append",
"src.query_tool.QueryTool"
] | [((37, 59), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (52, 59), False, 'import sys\n'), ((155, 180), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (170, 180), False, 'import os\n'), ((186, 245), 'src.class_query.ClassQuery', 'ClassQuery', (["(base_path + '/sample_queries/class_queries.xml')"], {}), "(base_path + '/sample_queries/class_queries.xml')\n", (196, 245), False, 'from src.class_query import ClassQuery\n'), ((336, 396), 'src.query_tool.QueryTool', 'QueryTool', (["(base_path + '/sample_ttls/doc1.ttl')", 'Mode.CLUSTER'], {}), "(base_path + '/sample_ttls/doc1.ttl', Mode.CLUSTER)\n", (345, 396), False, 'from src.query_tool import QueryTool, Mode\n')] |
import init_file as variables
import cj_function_lib as cj
from datetime import datetime
fert_table = cj.extract_table_from_mdb(variables.QSWAT_MDB, "fert", variables.path + "\\fert.tmp~")
fert = ""
for fert_line in fert_table:
fert += cj.trailing_spaces(4, fert_line.split(",")[1], 0) + cj.string_trailing_spaces(9, fert_line.split(",")[2]) + cj.trailing_spaces(8, fert_line.split(",")[3], 3) + cj.trailing_spaces(8, fert_line.split(",")[4], 3) + cj.trailing_spaces(8, fert_line.split(",")[5], 3) + cj.trailing_spaces(8, fert_line.split(",")[6], 3) + cj.trailing_spaces(8, fert_line.split(",")[7], 3) + cj.trailing_spaces(4, fert_line.split(",")[8], 2) + "E+00" + cj.trailing_spaces(4, fert_line.split(",")[9], 2)+ "E+00" + cj.trailing_spaces(8, fert_line.split(",")[10], 3) + "\n"
fileName = "fert.dat"
cj.write_to(variables.DefaultSimDir + "TxtInOut\\" + fileName, fert)
#print fileName
| [
"cj_function_lib.write_to",
"cj_function_lib.extract_table_from_mdb"
] | [((103, 193), 'cj_function_lib.extract_table_from_mdb', 'cj.extract_table_from_mdb', (['variables.QSWAT_MDB', '"""fert"""', "(variables.path + '\\\\fert.tmp~')"], {}), "(variables.QSWAT_MDB, 'fert', variables.path +\n '\\\\fert.tmp~')\n", (128, 193), True, 'import cj_function_lib as cj\n'), ((813, 881), 'cj_function_lib.write_to', 'cj.write_to', (["(variables.DefaultSimDir + 'TxtInOut\\\\' + fileName)", 'fert'], {}), "(variables.DefaultSimDir + 'TxtInOut\\\\' + fileName, fert)\n", (824, 881), True, 'import cj_function_lib as cj\n')] |
from scrapy.item import Field, Item
# pylint: disable-msg=too-many-ancestors
class FundItem(Item):
code = Field()
name = Field()
tier = Field()
start_date = Field()
date = Field()
price = Field()
| [
"scrapy.item.Field"
] | [((111, 118), 'scrapy.item.Field', 'Field', ([], {}), '()\n', (116, 118), False, 'from scrapy.item import Field, Item\n'), ((130, 137), 'scrapy.item.Field', 'Field', ([], {}), '()\n', (135, 137), False, 'from scrapy.item import Field, Item\n'), ((149, 156), 'scrapy.item.Field', 'Field', ([], {}), '()\n', (154, 156), False, 'from scrapy.item import Field, Item\n'), ((174, 181), 'scrapy.item.Field', 'Field', ([], {}), '()\n', (179, 181), False, 'from scrapy.item import Field, Item\n'), ((194, 201), 'scrapy.item.Field', 'Field', ([], {}), '()\n', (199, 201), False, 'from scrapy.item import Field, Item\n'), ((214, 221), 'scrapy.item.Field', 'Field', ([], {}), '()\n', (219, 221), False, 'from scrapy.item import Field, Item\n')] |
__author__ = 'dimd'
from zope.interface import Interface, Attribute
class IProtocolStogareInterface(Interface):
"""
This interface define our session storage
Every custom storage have to implement this Interface
"""
session = Attribute(""" Container for our session """) | [
"zope.interface.Attribute"
] | [((252, 292), 'zope.interface.Attribute', 'Attribute', (['""" Container for our session """'], {}), "(' Container for our session ')\n", (261, 292), False, 'from zope.interface import Interface, Attribute\n')] |
#!/usr/bin/env python
from __future__ import division
import numpy as np
from lfd.environment.simulation import DynamicSimulationRobotWorld
from lfd.environment.simulation_object import XmlSimulationObject, BoxSimulationObject
from lfd.environment import environment
from lfd.environment import sim_util
from lfd.demonstration.demonstration import Demonstration
from lfd.registration.registration import TpsRpmRegistrationFactory
from lfd.registration.plotting_openrave import registration_plot_cb
from lfd.transfer.transfer import FingerTrajectoryTransferer
from lfd.transfer.registration_transfer import TwoStepRegistrationAndTrajectoryTransferer
from move_rope import create_augmented_traj, create_rope
def create_rope_demo(env, rope_poss):
rope_sim_obj = create_rope(rope_poss)
env.sim.add_objects([rope_sim_obj])
env.sim.settle()
scene_state = env.observe_scene()
env.sim.remove_objects([rope_sim_obj])
pick_pos = rope_poss[0] + .1 * (rope_poss[1] - rope_poss[0])
drop_pos = rope_poss[3] + .1 * (rope_poss[2] - rope_poss[3]) + np.r_[0, .2, 0]
pick_R = np.array([[0, 0, 1], [0, 1, 0], [-1, 0, 0]])
drop_R = np.array([[0, 1, 0], [0, 0, -1], [-1, 0, 0]])
move_height = .2
aug_traj = create_augmented_traj(env.sim.robot, pick_pos, drop_pos, pick_R, drop_R, move_height)
demo = Demonstration("rope_demo", scene_state, aug_traj)
return demo
def main():
# define simulation objects
table_height = 0.77
sim_objs = []
sim_objs.append(XmlSimulationObject("robots/pr2-beta-static.zae", dynamic=False))
sim_objs.append(BoxSimulationObject("table", [1, 0, table_height-.1], [.85, .85, .1], dynamic=False))
# initialize simulation world and environment
sim = DynamicSimulationRobotWorld()
sim.add_objects(sim_objs)
sim.create_viewer()
sim.robot.SetDOFValues([0.25], [sim.robot.GetJoint('torso_lift_joint').GetJointIndex()])
sim.robot.SetDOFValues([1.25], [sim.robot.GetJoint('head_tilt_joint').GetJointIndex()]) # move head down so it can see the rope
sim_util.reset_arms_to_side(sim)
env = environment.LfdEnvironment(sim, sim, downsample_size=0.025)
demo_rope_poss = np.array([[.2, -.2, table_height+0.006],
[.8, -.2, table_height+0.006],
[.8, .2, table_height+0.006],
[.2, .2, table_height+0.006]])
demo = create_rope_demo(env, demo_rope_poss)
test_rope_poss = np.array([[.2, -.2, table_height+0.006],
[.5, -.4, table_height+0.006],
[.8, .0, table_height+0.006],
[.8, .2, table_height+0.006],
[.6, .0, table_height+0.006],
[.4, .2, table_height+0.006],
[.2, .2, table_height+0.006]])
test_rope_sim_obj = create_rope(test_rope_poss)
sim.add_objects([test_rope_sim_obj])
sim.settle()
test_scene_state = env.observe_scene()
reg_factory = TpsRpmRegistrationFactory()
traj_transferer = FingerTrajectoryTransferer(sim)
plot_cb = lambda i, i_em, x_nd, y_md, xtarg_nd, wt_n, f, corr_nm, rad: registration_plot_cb(sim, x_nd, y_md, f)
reg_and_traj_transferer = TwoStepRegistrationAndTrajectoryTransferer(reg_factory, traj_transferer)
test_aug_traj = reg_and_traj_transferer.transfer(demo, test_scene_state, callback=plot_cb, plotting=True)
env.execute_augmented_trajectory(test_aug_traj)
if __name__ == '__main__':
main()
| [
"move_rope.create_rope",
"lfd.environment.environment.LfdEnvironment",
"move_rope.create_augmented_traj",
"lfd.transfer.registration_transfer.TwoStepRegistrationAndTrajectoryTransferer",
"lfd.environment.sim_util.reset_arms_to_side",
"lfd.registration.registration.TpsRpmRegistrationFactory",
"lfd.enviro... | [((767, 789), 'move_rope.create_rope', 'create_rope', (['rope_poss'], {}), '(rope_poss)\n', (778, 789), False, 'from move_rope import create_augmented_traj, create_rope\n'), ((1098, 1142), 'numpy.array', 'np.array', (['[[0, 0, 1], [0, 1, 0], [-1, 0, 0]]'], {}), '([[0, 0, 1], [0, 1, 0], [-1, 0, 0]])\n', (1106, 1142), True, 'import numpy as np\n'), ((1156, 1201), 'numpy.array', 'np.array', (['[[0, 1, 0], [0, 0, -1], [-1, 0, 0]]'], {}), '([[0, 1, 0], [0, 0, -1], [-1, 0, 0]])\n', (1164, 1201), True, 'import numpy as np\n'), ((1238, 1327), 'move_rope.create_augmented_traj', 'create_augmented_traj', (['env.sim.robot', 'pick_pos', 'drop_pos', 'pick_R', 'drop_R', 'move_height'], {}), '(env.sim.robot, pick_pos, drop_pos, pick_R, drop_R,\n move_height)\n', (1259, 1327), False, 'from move_rope import create_augmented_traj, create_rope\n'), ((1340, 1389), 'lfd.demonstration.demonstration.Demonstration', 'Demonstration', (['"""rope_demo"""', 'scene_state', 'aug_traj'], {}), "('rope_demo', scene_state, aug_traj)\n", (1353, 1389), False, 'from lfd.demonstration.demonstration import Demonstration\n'), ((1750, 1779), 'lfd.environment.simulation.DynamicSimulationRobotWorld', 'DynamicSimulationRobotWorld', ([], {}), '()\n', (1777, 1779), False, 'from lfd.environment.simulation import DynamicSimulationRobotWorld\n'), ((2068, 2100), 'lfd.environment.sim_util.reset_arms_to_side', 'sim_util.reset_arms_to_side', (['sim'], {}), '(sim)\n', (2095, 2100), False, 'from lfd.environment import sim_util\n'), ((2116, 2175), 'lfd.environment.environment.LfdEnvironment', 'environment.LfdEnvironment', (['sim', 'sim'], {'downsample_size': '(0.025)'}), '(sim, sim, downsample_size=0.025)\n', (2142, 2175), False, 'from lfd.environment import environment\n'), ((2202, 2360), 'numpy.array', 'np.array', (['[[0.2, -0.2, table_height + 0.006], [0.8, -0.2, table_height + 0.006], [0.8,\n 0.2, table_height + 0.006], [0.2, 0.2, table_height + 0.006]]'], {}), '([[0.2, -0.2, table_height + 0.006], [0.8, -0.2, table_height + \n 0.006], [0.8, 0.2, table_height + 0.006], [0.2, 0.2, table_height + 0.006]]\n )\n', (2210, 2360), True, 'import numpy as np\n'), ((2508, 2773), 'numpy.array', 'np.array', (['[[0.2, -0.2, table_height + 0.006], [0.5, -0.4, table_height + 0.006], [0.8,\n 0.0, table_height + 0.006], [0.8, 0.2, table_height + 0.006], [0.6, 0.0,\n table_height + 0.006], [0.4, 0.2, table_height + 0.006], [0.2, 0.2, \n table_height + 0.006]]'], {}), '([[0.2, -0.2, table_height + 0.006], [0.5, -0.4, table_height + \n 0.006], [0.8, 0.0, table_height + 0.006], [0.8, 0.2, table_height + \n 0.006], [0.6, 0.0, table_height + 0.006], [0.4, 0.2, table_height + \n 0.006], [0.2, 0.2, table_height + 0.006]])\n', (2516, 2773), True, 'import numpy as np\n'), ((2952, 2979), 'move_rope.create_rope', 'create_rope', (['test_rope_poss'], {}), '(test_rope_poss)\n', (2963, 2979), False, 'from move_rope import create_augmented_traj, create_rope\n'), ((3104, 3131), 'lfd.registration.registration.TpsRpmRegistrationFactory', 'TpsRpmRegistrationFactory', ([], {}), '()\n', (3129, 3131), False, 'from lfd.registration.registration import TpsRpmRegistrationFactory\n'), ((3154, 3185), 'lfd.transfer.transfer.FingerTrajectoryTransferer', 'FingerTrajectoryTransferer', (['sim'], {}), '(sim)\n', (3180, 3185), False, 'from lfd.transfer.transfer import FingerTrajectoryTransferer\n'), ((3337, 3409), 'lfd.transfer.registration_transfer.TwoStepRegistrationAndTrajectoryTransferer', 'TwoStepRegistrationAndTrajectoryTransferer', (['reg_factory', 'traj_transferer'], {}), '(reg_factory, traj_transferer)\n', (3379, 3409), False, 'from lfd.transfer.registration_transfer import TwoStepRegistrationAndTrajectoryTransferer\n'), ((1513, 1577), 'lfd.environment.simulation_object.XmlSimulationObject', 'XmlSimulationObject', (['"""robots/pr2-beta-static.zae"""'], {'dynamic': '(False)'}), "('robots/pr2-beta-static.zae', dynamic=False)\n", (1532, 1577), False, 'from lfd.environment.simulation_object import XmlSimulationObject, BoxSimulationObject\n'), ((1599, 1693), 'lfd.environment.simulation_object.BoxSimulationObject', 'BoxSimulationObject', (['"""table"""', '[1, 0, table_height - 0.1]', '[0.85, 0.85, 0.1]'], {'dynamic': '(False)'}), "('table', [1, 0, table_height - 0.1], [0.85, 0.85, 0.1],\n dynamic=False)\n", (1618, 1693), False, 'from lfd.environment.simulation_object import XmlSimulationObject, BoxSimulationObject\n'), ((3266, 3306), 'lfd.registration.plotting_openrave.registration_plot_cb', 'registration_plot_cb', (['sim', 'x_nd', 'y_md', 'f'], {}), '(sim, x_nd, y_md, f)\n', (3286, 3306), False, 'from lfd.registration.plotting_openrave import registration_plot_cb\n')] |
import csv
import os.path
import random
import numpy as np
import scipy.io
import torch
import torchvision
from torch.utils.data import Dataset
# from .util import *
from data.util import default_loader, read_img, augment, get_image_paths
class PIPALFolder(Dataset):
def __init__(self, root=None, index=None, transform=None, opt=None):
if index is None:
index = list(range(0, 200))
if opt is not None:
self.opt = opt
root = opt['datasets']['pipal']
patch_num = opt['patch_num']
else:
patch_num = 32
refpath = os.path.join(root, 'Train_Ref')
refname = self.getFileName(refpath, '.bmp')
dispath = os.path.join(root, 'Train_Dis')
txtpath = os.path.join(root, 'Train_Label')
sample = []
for i, item in enumerate(index):
ref = refname[item]
# print(ref, end=' ')
txtname = ref.split('.')[0] + '.txt'
fh = open(os.path.join(txtpath, txtname), 'r')
for line in fh:
line = line.split('\n')
words = line[0].split(',')
for aug in range(patch_num):
sample.append((
(os.path.join(dispath, words[0]), os.path.join(refpath, ref)),
np.array(words[1]).astype(np.float32) / 1000.0
))
# print('')
self.samples = sorted(sample)
self.transform = torchvision.transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
self.patch_size = opt['patch_size']
# self.loader = default_loader
def __getitem__(self, index):
path, target = self.samples[index]
'''img_dis = self.loader(path[0])
img_ref = self.loader(path[1])'''
img_dis = read_img(env=None, path=path[0])
img_ref = read_img(env=None, path=path[1])
'''if self.transform is not None:
img_dis = self.transform(img_dis)
img_ref = self.transform(img_ref)'''
if self.patch_size < 288:
H, W, _ = img_ref.shape
crop_size = self.patch_size
rnd_h = random.randint(0, max(0, (H - crop_size)))
rnd_w = random.randint(0, max(0, (W - crop_size)))
img_dis = img_dis[rnd_h:rnd_h + crop_size, rnd_w:rnd_w + crop_size, :]
img_ref = img_ref[rnd_h:rnd_h + crop_size, rnd_w:rnd_w + crop_size, :]
# augmentation - flip, rotate
img_dis, img_ref = augment([img_dis, img_ref], self.opt['use_flip'], rot=False)
if img_ref.shape[2] == 3:
img_ref = img_ref[:, :, [2, 1, 0]]
img_dis = img_dis[:, :, [2, 1, 0]]
img_ref = torch.from_numpy(np.ascontiguousarray(np.transpose(img_ref, (2, 0, 1)))).float()
img_dis = torch.from_numpy(np.ascontiguousarray(np.transpose(img_dis, (2, 0, 1)))).float()
img_dis = self.transform(img_dis)
img_ref = self.transform(img_ref)
return {'Dis': img_dis, 'Ref': img_ref, 'Label': target}
def __len__(self):
length = len(self.samples)
return length
@staticmethod
def getFileName(path, suffix):
filename = []
f_list = os.listdir(path)
# print f_list
for i in f_list:
if os.path.splitext(i)[1] == suffix:
filename.append(i)
filename.sort()
return filename
# TODO
class IQATestDataset(Dataset):
def __init__(self, opt):
super(IQATestDataset, self).__init__()
self.opt = opt
self.paths_Dis = None
self.paths_Ref = None
refpath = os.path.join(root, 'Train_Ref')
refname = self.getFileName(refpath, '.bmp')
dispath = os.path.join(root, 'Train_Dis')
txtpath = os.path.join(root, 'Train_Label')
sample = []
for i, item in enumerate(index):
ref = refname[item]
# print(ref, end=' ')
txtname = ref.split('.')[0] + '.txt'
fh = open(os.path.join(txtpath, txtname), 'r')
for line in fh:
line = line.split('\n')
words = line[0].split(',')
sample.append((
(os.path.join(dispath, words[0]), os.path.join(refpath, ref)),
np.array(words[1]).astype(np.float32)
))
# print('')
self.samples = sample
self.transform = torchvision.transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
def __getitem__(self, index):
path, target = self.samples[index]
img_dis = read_img(env=None, path=path[0])
img_ref = read_img(env=None, path=path[1])
'''H, W, _ = img_ref.shape
crop_size = 224
rnd_h = random.randint(0, max(0, (H - crop_size) // 2))
rnd_w = random.randint(0, max(0, (W - crop_size) // 2))
img_dis = img_dis[rnd_h:rnd_h + crop_size, rnd_w:rnd_w + crop_size, :]
img_ref = img_ref[rnd_h:rnd_h + crop_size, rnd_w:rnd_w + crop_size, :]
# augmentation - flip, rotate
img_dis, img_ref = augment([img_dis, img_ref], self.opt['use_flip'], rot=False)'''
if img_ref.shape[2] == 3:
img_ref = img_ref[:, :, [2, 1, 0]]
img_dis = img_dis[:, :, [2, 1, 0]]
img_ref = torch.from_numpy(np.ascontiguousarray(np.transpose(img_ref, (2, 0, 1)))).float()
img_dis = torch.from_numpy(np.ascontiguousarray(np.transpose(img_dis, (2, 0, 1)))).float()
img_dis = self.transform(img_dis)
img_ref = self.transform(img_ref)
return {'Dis': img_dis, 'Ref': img_ref, 'Label': target, 'Dis_path': path[0]}
def __len__(self):
return len(self.samples)
@staticmethod
def getFileName(path, suffix):
filename = []
f_list = os.listdir(path)
# print f_list
for i in f_list:
if os.path.splitext(i)[1] == suffix:
filename.append(i)
filename.sort()
return filename
| [
"data.util.augment",
"numpy.array",
"torchvision.transforms.Normalize",
"data.util.read_img",
"numpy.transpose"
] | [((1538, 1630), 'torchvision.transforms.Normalize', 'torchvision.transforms.Normalize', ([], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n', (1570, 1630), False, 'import torchvision\n'), ((1956, 1988), 'data.util.read_img', 'read_img', ([], {'env': 'None', 'path': 'path[0]'}), '(env=None, path=path[0])\n', (1964, 1988), False, 'from data.util import default_loader, read_img, augment, get_image_paths\n'), ((2008, 2040), 'data.util.read_img', 'read_img', ([], {'env': 'None', 'path': 'path[1]'}), '(env=None, path=path[1])\n', (2016, 2040), False, 'from data.util import default_loader, read_img, augment, get_image_paths\n'), ((2663, 2723), 'data.util.augment', 'augment', (['[img_dis, img_ref]', "self.opt['use_flip']"], {'rot': '(False)'}), "([img_dis, img_ref], self.opt['use_flip'], rot=False)\n", (2670, 2723), False, 'from data.util import default_loader, read_img, augment, get_image_paths\n'), ((4654, 4746), 'torchvision.transforms.Normalize', 'torchvision.transforms.Normalize', ([], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n', (4686, 4746), False, 'import torchvision\n'), ((4903, 4935), 'data.util.read_img', 'read_img', ([], {'env': 'None', 'path': 'path[0]'}), '(env=None, path=path[0])\n', (4911, 4935), False, 'from data.util import default_loader, read_img, augment, get_image_paths\n'), ((4955, 4987), 'data.util.read_img', 'read_img', ([], {'env': 'None', 'path': 'path[1]'}), '(env=None, path=path[1])\n', (4963, 4987), False, 'from data.util import default_loader, read_img, augment, get_image_paths\n'), ((2916, 2948), 'numpy.transpose', 'np.transpose', (['img_ref', '(2, 0, 1)'], {}), '(img_ref, (2, 0, 1))\n', (2928, 2948), True, 'import numpy as np\n'), ((3016, 3048), 'numpy.transpose', 'np.transpose', (['img_dis', '(2, 0, 1)'], {}), '(img_dis, (2, 0, 1))\n', (3028, 3048), True, 'import numpy as np\n'), ((5666, 5698), 'numpy.transpose', 'np.transpose', (['img_ref', '(2, 0, 1)'], {}), '(img_ref, (2, 0, 1))\n', (5678, 5698), True, 'import numpy as np\n'), ((5766, 5798), 'numpy.transpose', 'np.transpose', (['img_dis', '(2, 0, 1)'], {}), '(img_dis, (2, 0, 1))\n', (5778, 5798), True, 'import numpy as np\n'), ((4516, 4534), 'numpy.array', 'np.array', (['words[1]'], {}), '(words[1])\n', (4524, 4534), True, 'import numpy as np\n'), ((1379, 1397), 'numpy.array', 'np.array', (['words[1]'], {}), '(words[1])\n', (1387, 1397), True, 'import numpy as np\n')] |
import unittest
from Familytree.individual import Person
from Familytree import variables
class Testperson(unittest.TestCase):
def setUp(self):
self.person = Person(1, "Jane", "Female")
def test_initialization(self):
# check instance
self.assertEqual(isinstance(self.person, Person), True)
# check properties
self.assertEqual(self.person.id, 1)
self.assertEqual(self.person.name, "Jane")
self.assertEqual(self.person.gender, "Female")
self.assertEqual(self.person.mother, None)
self.assertEqual(self.person.father, None)
self.assertEqual(self.person.spouse, None)
self.assertEqual(self.person.children, [])
def test_assign_mother(self):
mother_error_case = "error_value"
mother_error_male_case = Person(2, "male_person", "Male")
mother_success_case = Person(3, "Mother", "Female")
# error case
self.assertRaises(ValueError, self.person.assign_mother, mother_error_case)
self.assertRaises(ValueError, self.person.assign_mother, mother_error_male_case)
# success case
self.person.assign_mother(mother_success_case)
self.assertEqual(self.person.mother.name, "Mother")
self.assertTrue(self.person.mother.gender, "Female")
def test_assign_father(self):
father_error_case = "error_value"
father_error_female_case = Person(2, "female_father", "Female")
father_success_case = Person(3, "Father", "Male")
# error cases
self.assertRaises(ValueError, self.person.assign_father, father_error_case)
self.assertRaises(ValueError, self.person.assign_father, father_error_female_case)
# success case
self.person.assign_father(father_success_case)
self.assertEqual(self.person.father.name, "Father")
self.assertTrue(self.person.father.gender, "Male")
def test_assign_spouse(self):
spouse_error_case = "error_value"
spouse_error_same_gender = Person(2, "same_gender_spouse", "Female")
spouse_success_case = Person(3, "Husband", "Male")
# error cases
self.assertRaises(ValueError, self.person.assign_spouse, spouse_error_case)
self.assertRaises(ValueError, self.person.assign_spouse, spouse_error_same_gender)
# success case
self.person.assign_spouse(spouse_success_case)
self.assertEqual(self.person.spouse.name, "Husband")
self.assertEqual(self.person.spouse.gender, "Male")
def test_add_children(self):
child_error_case = "error_Case"
child_success_case = Person(4, "Daughter", "Female")
# error case
self.assertRaises(ValueError, self.person.add_children, child_error_case)
# success case
self.person.add_children(child_success_case)
self.assertEqual(len(self.person.children), 1)
self.assertEqual(self.person.children[0].name, "Daughter")
self.assertEqual(self.person.children[0].gender, "Female")
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"Familytree.individual.Person"
] | [((3058, 3073), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3071, 3073), False, 'import unittest\n'), ((173, 200), 'Familytree.individual.Person', 'Person', (['(1)', '"""Jane"""', '"""Female"""'], {}), "(1, 'Jane', 'Female')\n", (179, 200), False, 'from Familytree.individual import Person\n'), ((818, 850), 'Familytree.individual.Person', 'Person', (['(2)', '"""male_person"""', '"""Male"""'], {}), "(2, 'male_person', 'Male')\n", (824, 850), False, 'from Familytree.individual import Person\n'), ((881, 910), 'Familytree.individual.Person', 'Person', (['(3)', '"""Mother"""', '"""Female"""'], {}), "(3, 'Mother', 'Female')\n", (887, 910), False, 'from Familytree.individual import Person\n'), ((1418, 1454), 'Familytree.individual.Person', 'Person', (['(2)', '"""female_father"""', '"""Female"""'], {}), "(2, 'female_father', 'Female')\n", (1424, 1454), False, 'from Familytree.individual import Person\n'), ((1485, 1512), 'Familytree.individual.Person', 'Person', (['(3)', '"""Father"""', '"""Male"""'], {}), "(3, 'Father', 'Male')\n", (1491, 1512), False, 'from Familytree.individual import Person\n'), ((2021, 2062), 'Familytree.individual.Person', 'Person', (['(2)', '"""same_gender_spouse"""', '"""Female"""'], {}), "(2, 'same_gender_spouse', 'Female')\n", (2027, 2062), False, 'from Familytree.individual import Person\n'), ((2093, 2121), 'Familytree.individual.Person', 'Person', (['(3)', '"""Husband"""', '"""Male"""'], {}), "(3, 'Husband', 'Male')\n", (2099, 2121), False, 'from Familytree.individual import Person\n'), ((2623, 2654), 'Familytree.individual.Person', 'Person', (['(4)', '"""Daughter"""', '"""Female"""'], {}), "(4, 'Daughter', 'Female')\n", (2629, 2654), False, 'from Familytree.individual import Person\n')] |
import codecs
import collections
import io
import os
import re
import struct
from .instruction import Instruction
from .opcode import Opcodes
from .registers import Registers
from .section import Section
from .symbol import Symbol
def p32(v):
return struct.pack('<I', v)
def unescape_str_to_bytes(x):
return codecs.escape_decode(x.encode('utf8'))[0]
class QueueReader(object):
def __init__(self, *files):
self.fq = list(files)
def add_file(self, f):
self.fq.append(f)
def insert_file(self, f, idx=0):
self.fq.insert(idx, f)
def readline(self):
while len(self.fq) > 0:
r = self.fq[0].readline()
if not r:
self.fq.pop(0)
continue
return r
return ''
class Parser(object):
def __init__(self, fin):
self.sections = None
self.section_bodies = {}
self.entry = None
if type(fin) is str:
fin = io.StringIO(fin)
self.reader = QueueReader(fin)
self.parse()
def parse(self):
sections = collections.OrderedDict()
current_section = None
lineno = 0
while True:
lineno += 1
raw = self.reader.readline()
if not raw:
break
line = raw.split(';')[0].strip()
if not line:
continue
elif line.startswith('.sect'):
args = line.split(maxsplit=1)[1].split(' ')
name = args[0].upper()
if len(args) > 1:
addr = int(args[1], 16)
else:
if name == 'TEXT':
addr = 0x4000
else:
addr = 0x6000
new_sect = Section(addr)
sections[name] = new_sect
current_section = new_sect
elif line.startswith('.include'):
filename = line.split(maxsplit=1)[1].strip()
if filename.startswith('zstdlib/'):
filename = os.path.join(os.path.dirname(__file__), '../../..', filename)
self.reader.insert_file(open(filename))
elif line.startswith('.entry'):
entry = line.split()[1]
return self.try_parse_imm(entry)
elif line.startswith('.align'):
current_section.align(self._parse_int(line.split()[1]))
elif line.startswith('.db'):
data = line[3:].split(',')
bytes_data = bytes(int(i.strip(), 16) for i in data)
current_section.write(bytes_data)
elif line.startswith('.zero'):
data = line[5:].strip()
if data.startswith('0x'):
n = int(data, 16)
else:
n = int(data)
current_section.write(b'\0' * n)
elif line.startswith('.str'):
data = line[4:].strip()
bytes_data = unescape_str_to_bytes(data[1:-1])
current_section.write(bytes_data + b'\0\0')
elif line[-1] == ':':
label_name = line[:-1]
current_section.label(label_name)
else:
for ins in self.parse_instruction(line):
current_section.write(ins)
self.sections = sections
def resolve_label(self, name):
for section in self.sections.values():
addr = section.labels.get(name, None)
if addr:
return addr
def get_entry(self):
if type(self.entry) is Symbol:
return self.entry.resolve(self.resolve_label)
elif self.entry is not None:
return self.entry
elif self.resolve_label('start'):
return self.resolve_label('start')
else:
return 0x4000
def build(self):
sections = []
bodies = []
for name, section in self.sections.items():
buff = io.BytesIO()
ip = section.addr
for data in section.container:
if type(data) is Instruction:
ins = data
if type(ins.imm) is Symbol:
sym = ins.imm
buff.write(ins.compose(sym.resolve(self.resolve_label, ip)))
else:
buff.write(ins.compose())
ip += 4
elif type(data) is Symbol:
val = data.resolve(self.resolve_label, ip)
buff.write(p32(val))
ip += 4
elif type(data) is bytes:
buff.write(data)
ip += len(data)
body = buff.getvalue()
self.section_bodies[name] = body
bodies.append(body)
sections.append(struct.pack('<HH',
section.addr, # section_addr
len(body), # section_size
))
header = struct.pack('<ccHHH',
b'Z', b'z', # magic
0, # file_ver
self.get_entry(), # entry
len(bodies), # section_count
)
return header + b''.join(sections) + b''.join(bodies)
def parse_instruction(self, line):
try:
ins_name, args = line.split(maxsplit=1)
args = [ i.strip() for i in args.split(',') ]
except:
ins_name = line
args = []
if ins_name.upper() == 'JMP':
is_jmp = True
ins_name = 'ADDI'
args = ['IP', 'IP', args[0]]
else:
is_jmp = False
if len(args) > 0:
if ins_name[0].upper() == 'J' or ins_name.upper() == 'CALL' or is_jmp:
rel = True
else:
rel = False
imm = self.try_parse_imm(args[-1], rel=rel)
if imm is None:
if rel:
raise ValueError('jump instruction must have target\nline: %r' % line)
regs = args
else:
regs = args[:-1]
yield Instruction(ins_name, *regs, imm=imm)
else:
yield Instruction(ins_name, *args)
def try_parse_imm(self, val, rel=False):
if val[0] == '$':
if '+' in val:
name, offset = val[1:].split('+')
offset = self._parse_int(offset)
return Symbol(name, offset, is_relative=rel)
else:
return Symbol(val[1:], is_relative=rel)
try:
return self._parse_int(val)
except:
pass
def _parse_int(self, s):
s = s.strip()
if s[:2] == '0x':
return int(s, 16)
elif s[0] == '#':
return int(s[1:], 10)
else:
return int (s)
| [
"collections.OrderedDict",
"io.BytesIO",
"struct.pack",
"os.path.dirname",
"io.StringIO"
] | [((256, 276), 'struct.pack', 'struct.pack', (['"""<I"""', 'v'], {}), "('<I', v)\n", (267, 276), False, 'import struct\n'), ((1095, 1120), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (1118, 1120), False, 'import collections\n'), ((976, 992), 'io.StringIO', 'io.StringIO', (['fin'], {}), '(fin)\n', (987, 992), False, 'import io\n'), ((4058, 4070), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (4068, 4070), False, 'import io\n'), ((2112, 2137), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2127, 2137), False, 'import os\n')] |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pickle as pkl
from argparse import ArgumentParser
from collections import OrderedDict
from typing import Dict
import numpy as np
import torch
from build_index import load_model
from omegaconf import DictConfig, OmegaConf
from nemo.utils import logging
try:
import faiss
except ModuleNotFoundError:
logging.warning("Faiss is required for building the index. Please install faiss-gpu")
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
def get_query_embedding(query, model):
"""Use entity linking encoder to get embedding for index query"""
model_input = model.tokenizer(
query,
add_special_tokens=True,
padding=True,
truncation=True,
max_length=512,
return_token_type_ids=True,
return_attention_mask=True,
)
query_emb = model.forward(
input_ids=torch.LongTensor([model_input["input_ids"]]).to(device),
token_type_ids=torch.LongTensor([model_input["token_type_ids"]]).to(device),
attention_mask=torch.LongTensor([model_input["attention_mask"]]).to(device),
)
return query_emb
def query_index(
query: str, cfg: DictConfig, model: object, index: object, pca: object, idx2id: dict, id2string: dict,
) -> Dict:
"""
Query the nearest neighbor index of entities to find the
concepts in the index dataset that are most similar to the
query.
Args:
query (str): entity to look up in the index
cfg (DictConfig): config object to specifiy query parameters
model (EntityLinkingModel): entity linking encoder model
index (object): faiss index
pca (object): sklearn pca transformation to be applied to queries
idx2id (dict): dictionary mapping unique concept dataset index to
its CUI
id2string (dict): dictionary mapping each unqiue CUI to a
representative english description of
the concept
Returns:
A dictionary with the concept ids of the index's most similar
entities as the keys and a tuple containing the string
representation of that concept and its cosine similarity to
the query as the values.
"""
query_emb = get_query_embedding(query, model).detach().cpu().numpy()
if cfg.apply_pca:
query_emb = pca.transform(query_emb)
dist, neighbors = index.search(query_emb.astype(np.float32), cfg.query_num_factor * cfg.top_n)
dist, neighbors = dist[0], neighbors[0]
unique_ids = OrderedDict()
neighbor_idx = 0
# Many of nearest neighbors could map to the same concept id, their idx is their unique identifier
while len(unique_ids) < cfg.top_n and neighbor_idx < len(neighbors):
concept_id_idx = neighbors[neighbor_idx]
concept_id = idx2id[concept_id_idx]
# Only want one instance of each unique concept
if concept_id not in unique_ids:
concept = id2string[concept_id]
unique_ids[concept_id] = (concept, 1 - dist[neighbor_idx])
neighbor_idx += 1
unique_ids = dict(unique_ids)
return unique_ids
def main(cfg: DictConfig, restore: bool):
"""
Loads faiss index and allows commandline queries
to the index. Builds new index if one hasn't been built yet.
Args:
cfg: Config file specifying index parameters
restore: Whether to restore model weights trained
by the user. Otherwise will load weights
used before self alignment pretraining.
"""
if not os.path.isfile(cfg.index.index_save_name) or (
cfg.apply_pca and not os.path.isfile(cfg.index.pca.pca_save_name) or not os.path.isfile(cfg.index.idx_to_id)
):
logging.warning("Either no index and/or no mapping from entity idx to ids exists. Please run `build_index.py`")
return
logging.info("Loading entity linking encoder model")
model = load_model(cfg.model, restore)
logging.info("Loading index and associated files")
index = faiss.read_index(cfg.index.index_save_name)
idx2id = pkl.load(open(cfg.index.idx_to_id, "rb"))
id2string = pkl.load(open(cfg.index.id_to_string, "rb")) # Should be created during dataset prep
if cfg.index.apply_pca:
pca = pkl.load(open(cfg.index.pca.pca_save_name, "rb"))
while True:
query = input("enter index query: ")
output = query_index(query, cfg.top_n, cfg.index, model, index, pca, idx2id, id2string)
if query == "exit":
break
for concept_id in output:
concept_details = output[concept_id]
concept_id = "C" + str(concept_id).zfill(7)
print(concept_id, concept_details)
print("----------------\n")
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument(
"--restore", action="store_true", help="Whether to restore encoder model weights from nemo path"
)
parser.add_argument("--project_dir", required=False, type=str, default=".")
parser.add_argument("--cfg", required=False, type=str, default="./conf/umls_medical_entity_linking_config.yaml")
args = parser.parse_args()
cfg = OmegaConf.load(args.cfg)
cfg.project_dir = args.project_dir
main(cfg, args.restore)
| [
"nemo.utils.logging.info",
"collections.OrderedDict",
"argparse.ArgumentParser",
"torch.LongTensor",
"omegaconf.OmegaConf.load",
"faiss.read_index",
"nemo.utils.logging.warning",
"os.path.isfile",
"torch.cuda.is_available",
"build_index.load_model"
] | [((3169, 3182), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (3180, 3182), False, 'from collections import OrderedDict\n'), ((4511, 4563), 'nemo.utils.logging.info', 'logging.info', (['"""Loading entity linking encoder model"""'], {}), "('Loading entity linking encoder model')\n", (4523, 4563), False, 'from nemo.utils import logging\n'), ((4576, 4606), 'build_index.load_model', 'load_model', (['cfg.model', 'restore'], {}), '(cfg.model, restore)\n', (4586, 4606), False, 'from build_index import load_model\n'), ((4612, 4662), 'nemo.utils.logging.info', 'logging.info', (['"""Loading index and associated files"""'], {}), "('Loading index and associated files')\n", (4624, 4662), False, 'from nemo.utils import logging\n'), ((4675, 4718), 'faiss.read_index', 'faiss.read_index', (['cfg.index.index_save_name'], {}), '(cfg.index.index_save_name)\n', (4691, 4718), False, 'import faiss\n'), ((5440, 5456), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (5454, 5456), False, 'from argparse import ArgumentParser\n'), ((5832, 5856), 'omegaconf.OmegaConf.load', 'OmegaConf.load', (['args.cfg'], {}), '(args.cfg)\n', (5846, 5856), False, 'from omegaconf import DictConfig, OmegaConf\n'), ((937, 1027), 'nemo.utils.logging.warning', 'logging.warning', (['"""Faiss is required for building the index. Please install faiss-gpu"""'], {}), "(\n 'Faiss is required for building the index. Please install faiss-gpu')\n", (952, 1027), False, 'from nemo.utils import logging\n'), ((1056, 1081), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1079, 1081), False, 'import torch\n'), ((4379, 4500), 'nemo.utils.logging.warning', 'logging.warning', (['"""Either no index and/or no mapping from entity idx to ids exists. Please run `build_index.py`"""'], {}), "(\n 'Either no index and/or no mapping from entity idx to ids exists. Please run `build_index.py`'\n )\n", (4394, 4500), False, 'from nemo.utils import logging\n'), ((4200, 4241), 'os.path.isfile', 'os.path.isfile', (['cfg.index.index_save_name'], {}), '(cfg.index.index_save_name)\n', (4214, 4241), False, 'import os\n'), ((4328, 4363), 'os.path.isfile', 'os.path.isfile', (['cfg.index.idx_to_id'], {}), '(cfg.index.idx_to_id)\n', (4342, 4363), False, 'import os\n'), ((1487, 1531), 'torch.LongTensor', 'torch.LongTensor', (["[model_input['input_ids']]"], {}), "([model_input['input_ids']])\n", (1503, 1531), False, 'import torch\n'), ((1567, 1616), 'torch.LongTensor', 'torch.LongTensor', (["[model_input['token_type_ids']]"], {}), "([model_input['token_type_ids']])\n", (1583, 1616), False, 'import torch\n'), ((1652, 1701), 'torch.LongTensor', 'torch.LongTensor', (["[model_input['attention_mask']]"], {}), "([model_input['attention_mask']])\n", (1668, 1701), False, 'import torch\n'), ((4277, 4320), 'os.path.isfile', 'os.path.isfile', (['cfg.index.pca.pca_save_name'], {}), '(cfg.index.pca.pca_save_name)\n', (4291, 4320), False, 'import os\n')] |
"""
test_Payload.py
Copyright 2012 <NAME>
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import unittest
from mock import MagicMock
from w3af.plugins.attack.payloads.base_payload import Payload
from w3af.plugins.attack.payloads.payloads.tests.test_payload_handler import (FakeReadShell,
FakeExecShell)
class TestBasePayload(unittest.TestCase):
def setUp(self):
self.bp = Payload(FakeReadShell())
def test_can_run(self):
self.assertEqual(self.bp.can_run(), set())
def test_run_only_read(self):
bp = Payload(FakeReadShell())
self.assertRaises(AttributeError, bp.run, 'filename')
def test_run_execute(self):
class Executable(Payload):
called_run_execute = False
called_api_execute = False
def run_execute(self, cmd):
self.called_run_execute = True
self.shell.execute(cmd)
def api_execute(self, cmd):
self.called_api_execute = True
shell = FakeExecShell()
shell.execute = MagicMock(return_value='')
executable = Executable(shell)
self.assertEqual(self.bp.can_run(), set())
executable.run('command')
self.assertTrue(executable.called_run_execute)
self.assertEqual(executable.shell.execute.call_count, 1)
executable.run_api('command')
self.assertTrue(executable.called_api_execute)
| [
"w3af.plugins.attack.payloads.payloads.tests.test_payload_handler.FakeExecShell",
"mock.MagicMock",
"w3af.plugins.attack.payloads.payloads.tests.test_payload_handler.FakeReadShell"
] | [((1724, 1739), 'w3af.plugins.attack.payloads.payloads.tests.test_payload_handler.FakeExecShell', 'FakeExecShell', ([], {}), '()\n', (1737, 1739), False, 'from w3af.plugins.attack.payloads.payloads.tests.test_payload_handler import FakeReadShell, FakeExecShell\n'), ((1764, 1790), 'mock.MagicMock', 'MagicMock', ([], {'return_value': '""""""'}), "(return_value='')\n", (1773, 1790), False, 'from mock import MagicMock\n'), ((1085, 1100), 'w3af.plugins.attack.payloads.payloads.tests.test_payload_handler.FakeReadShell', 'FakeReadShell', ([], {}), '()\n', (1098, 1100), False, 'from w3af.plugins.attack.payloads.payloads.tests.test_payload_handler import FakeReadShell, FakeExecShell\n'), ((1246, 1261), 'w3af.plugins.attack.payloads.payloads.tests.test_payload_handler.FakeReadShell', 'FakeReadShell', ([], {}), '()\n', (1259, 1261), False, 'from w3af.plugins.attack.payloads.payloads.tests.test_payload_handler import FakeReadShell, FakeExecShell\n')] |
import moeda
p = float(input('Digite o preço: '))
t = int(input('Qual o valor da taxa? '))
moeda.resumo(p, t)
| [
"moeda.resumo"
] | [((92, 110), 'moeda.resumo', 'moeda.resumo', (['p', 't'], {}), '(p, t)\n', (104, 110), False, 'import moeda\n')] |
#!/usr/bin/env python
r'''
https://www.hackerrank.com/challenges/journey-to-the-moon/problem
'''
import math
import os
import random
import re
import sys
class Node:
def __init__(self, v):
self.v = v
self.neighbors = set()
self.visit = False
def addN(self, n):
if n not in self.neighbors:
self.neighbors.add(n)
n.addN(self)
def __hash__(self):
return hash(self.v)
def __eq__(self, other):
return self.__class__ == other.__class__ and self.v == other.v
def n(self):
for n in self.neighbors:
yield n
def dfs(self):
from collections import deque
root = self
root.visit = True
nlist = deque()
nlist.append(root)
vlist = []
while len(nlist) > 0:
node = nlist.popleft()
vlist.append(node.v)
for n in node.n():
if not n.visit:
nlist.append(n)
n.visit = True
return vlist
# Complete the journeyToMoon function below.
def journeyToMoon(n, astronaut):
ndict = {}
cty_list = []
# Create graph
for a, b in astronaut:
if a not in ndict:
ndict[a] = Node(a)
if b not in ndict:
ndict[b] = Node(b)
ndict[a].addN(ndict[b])
# Search disjoin set
for node in ndict.values():
if not node.visit:
cty_list.append(node.dfs())
print('Group-{}: {}'.format(node.v, cty_list[-1]))
# Other distinct countury
for i in range(n):
if i not in ndict:
cty_list.append(set([i]))
print('Total {} unique countries...{}'.format(len(cty_list), cty_list))
# Calculate unique pairs
if len(cty_list) == 1:
return 0
elif len(cty_list) == 2:
return len(cty_list[0]) * len(cty_list[1])
else:
cty_len_list = map(len, cty_list)
psum = cty_len_list[0] * cty_len_list[1]
nsum = cty_len_list[0] + cty_len_list[1]
for i in range(2, len(cty_len_list)):
psum += nsum * cty_len_list[i]
nsum += cty_len_list[i]
return psum
#print("{}".format(journeyToMoon(5, [(0, 1), (2, 3), (0, 4)])))
#print("{}".format(journeyToMoon(4, [(0, 2)])))
import unittest
class FAT(unittest.TestCase):
def setUp(self):
pass
def test_01(self):
tdatas = [
(5, [(0, 1), (2, 3), (0, 4)], 6),
(4, [(0, 2)], 5)
]
for n, astronaut, a in tdatas:
r = journeyToMoon(n, astronaut)
self.assertEqual(a, r, 'Expect={}; Real={}'.format(a, r))
def test_02(self):
tid = [1]
tdatas = []
for id in tid:
with open('journey-to-the-moon.t{}'.format(id), 'r') as fh:
na, pn = fh.readline().strip().split(' ')
astronaut = []
for i in range(int(pn)):
astronaut.append(map(int, fh.readline().split(' ')))
with open('journey-to-the-moon.a{}'.format(id), 'r') as fh2:
tdatas.append((int(na), astronaut, int(fh2.readline())))
for n, astronaut, a in tdatas:
r = journeyToMoon(n, astronaut)
self.assertEqual(a, r, 'Expect={}; Real={}\n{}'.format(a, r, astronaut))
| [
"collections.deque"
] | [((737, 744), 'collections.deque', 'deque', ([], {}), '()\n', (742, 744), False, 'from collections import deque\n')] |
import pytest
import numpy as np
import pandas as pd
from SPARTACUS10 import spatial_silhouette as spasi
import sklearn.metrics as metrics
import os
def find_path(name, path = None):
if path is None:
path = os.getcwd()
for root, dirs, files in os.walk(path):
if name in files:
return os.path.join(root, name)
def test_silhouette():
"""
Does silhouette_coefficient() function produce the same results as
silhouette_score() function from sklearn.metrics using Euclidean metric?
"""
# Test on matrixA
X = np.genfromtxt(find_path("matrixA.csv"), delimiter=",", skip_header=1, usecols = range(1,21))
V = X.shape[1]
for i in range(3, 11):
labels = np.random.randint(1, i+1, V)
sil_score1 = spasi.silhouette_coefficient(X, labels, metric = "euclidean", iter_max = 10)
sil_score2 = metrics.silhouette_score(X.T, labels, metric = "euclidean")
assert np.round(sil_score1,10) == np.round(sil_score2, 10), "Silhouette function (Euclidean) produces different results than that implemented in scikit-learn"
# Test on random data comparison with existing function
V = 100
X = np.random.normal(size = (10, V))
for i in range(3, 11):
labels = np.random.randint(1, i+1, V)
sil_score1 = spasi.silhouette_coefficient(X, labels, metric = "euclidean", iter_max = 10)
sil_score2 = metrics.silhouette_score(X.T, labels, metric = "euclidean")
assert np.round(sil_score1,10) == np.round(sil_score2, 10), "Silhouette function (Euclidean) produces different results than that implemented in scikit-learn"
# Test on random data
random_data = np.genfromtxt(find_path("random_data.csv"), delimiter=",")
random_labels = np.genfromtxt(find_path("random_labels.csv"), delimiter=",")
silhouette_score_Eucl = spasi.silhouette_coefficient(random_data, random_labels, metric = "euclidean")
assert np.isclose(silhouette_score_Eucl, -0.018137954346288798), "Error in Euclidean silhouette_coefficient function"
silhouette_score_corr = spasi.silhouette_coefficient(random_data, random_labels, metric = "correlation")
assert np.isclose(silhouette_score_corr, -0.01710701512585803), "Error in correlation silhouette_coefficient function"
def test_ensemble_silhouette():
X = np.array([[1,1,2,2,3,3,4,4],
[1,1,2,2,3,3,4,4],
[1,1,2,2,3,3,4,4],
[1,1,2,2,5,5,6,6],
[1,1,1,2,3,3,3,4],
[1,1,1,2,3,3,3,4]])
labels = [1,1,2,2,3,3,4,4]
assert spasi.silhouette_coefficient(X[0:4,], labels, metric = "jaccard", iter_max = 4) == 1, "Ensemble silhouette produces wrong results"
sil_score1 = spasi.silhouette_coefficient(X, labels, metric = "jaccard", iter_max = 4)
assert np.round(sil_score1, 8) == 0.79166667, "Ensemble silhouette produces wrong results"
X1 = np.array([[1,1,2,2], [1,2,2,2], [1,1,1,2]])
labels1 = [1,1,2,2]
sil_score2 = spasi.silhouette_coefficient(X1, labels1, metric = "jaccard", iter_max = 4)
assert np.round(sil_score2, 8) == 0.46666667, "Ensemble silhouette produces wrong results"
def test_simplified_silhouette():
# Test on random data
random_data = np.genfromtxt(find_path("random_data.csv"), delimiter=",")
random_labels = np.genfromtxt(find_path("random_labels.csv"), delimiter=",")
simp_silhouette_score_Eucl = spasi.simplified_silhouette_coefficient(random_data, random_labels, metric = "euclidean")
assert np.isclose(simp_silhouette_score_Eucl, 0.01761300723620632), "Error in Euclidean simplified_silhouette_coefficient function"
simp_silhouette_score_corr = spasi.simplified_silhouette_coefficient(random_data, random_labels, metric = "correlation")
assert np.isclose(simp_silhouette_score_corr, 0.07464102055366918), "Error in correlation simplified_silhouette_coefficient function"
def test_spatial_silhouette():
# Test on random data
random_data = np.genfromtxt(find_path("random_data_spatial.csv"), delimiter=",")
matXYZ = np.argwhere(np.zeros((8, 3, 2)) == 0)
labels = np.repeat(np.array([1,2,3,4]), 2*3*2)
list_neighbors = spasi.get_list_neighbors(matXYZ)
spatial_silhouette_score_Eucl = spasi.silhouette_coefficient_spatial(random_data, labels, list_neighbors, metric = "euclidean")
assert np.isclose(spatial_silhouette_score_Eucl, -0.0019062813008068388), "Error in Euclidean silhouette_coefficient_spatial function"
spatial_silhouette_score_corr = spasi.silhouette_coefficient_spatial(random_data, labels, list_neighbors, metric = "correlation")
assert np.isclose(spatial_silhouette_score_corr, -0.0013034499248535598), "Error in correlation silhouette_coefficient_spatial function"
def test_spatial_simplified_silhouette():
# Test on random data
random_data = np.genfromtxt(find_path("random_data_spatial.csv"), delimiter=",")
matXYZ = np.argwhere(np.zeros((8, 3, 2)) == 0)
labels = np.repeat(np.array([1,2,3,4]), 2*3*2)
list_neighbors = spasi.get_list_neighbors(matXYZ)
spatial_simp_silhouette_score_Eucl = spasi.simplified_silhouette_coefficient_spatial(random_data, labels, list_neighbors, metric = "euclidean")
assert np.isclose(spatial_simp_silhouette_score_Eucl, 0.06783823739924444), "Error in Euclidean simplified_silhouette_coefficient_spatial function"
spatial_simp_silhouette_score_corr = spasi.simplified_silhouette_coefficient_spatial(random_data, labels, list_neighbors, metric = "correlation")
assert np.isclose(spatial_simp_silhouette_score_corr, 0.22422765231602626), "Error in correlation simplified_silhouette_coefficient_spatial function"
def test_list_neighbors():
list_neighbors_true = pd.read_csv(find_path("list_neighbors.csv"))
list_neighbors_true.columns = pd.RangeIndex(start=0, stop=5, step=1)
matXYZ = np.argwhere(np.zeros((4, 3, 2)) == 0)
list_neighbors = spasi.get_list_neighbors(matXYZ)
list_neighbors = pd.DataFrame(list_neighbors)
list_neighbors.columns = pd.RangeIndex(start=0, stop=5, step=1)
assert pd.DataFrame.equals(list_neighbors_true, list_neighbors), "list_neighbors does not work"
# pd.testing.assert_frame_equal(list_neighbors_true, list_neighbors, check_dtype = False, check_column_type = False)
# def test_main():
# assert main([]) == 0
| [
"numpy.random.normal",
"numpy.isclose",
"SPARTACUS10.spatial_silhouette.simplified_silhouette_coefficient_spatial",
"numpy.round",
"os.path.join",
"SPARTACUS10.spatial_silhouette.silhouette_coefficient_spatial",
"os.getcwd",
"numpy.array",
"numpy.random.randint",
"SPARTACUS10.spatial_silhouette.ge... | [((268, 281), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (275, 281), False, 'import os\n'), ((1207, 1237), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(10, V)'}), '(size=(10, V))\n', (1223, 1237), True, 'import numpy as np\n'), ((1882, 1958), 'SPARTACUS10.spatial_silhouette.silhouette_coefficient', 'spasi.silhouette_coefficient', (['random_data', 'random_labels'], {'metric': '"""euclidean"""'}), "(random_data, random_labels, metric='euclidean')\n", (1910, 1958), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((1972, 2028), 'numpy.isclose', 'np.isclose', (['silhouette_score_Eucl', '(-0.018137954346288798)'], {}), '(silhouette_score_Eucl, -0.018137954346288798)\n', (1982, 2028), True, 'import numpy as np\n'), ((2111, 2189), 'SPARTACUS10.spatial_silhouette.silhouette_coefficient', 'spasi.silhouette_coefficient', (['random_data', 'random_labels'], {'metric': '"""correlation"""'}), "(random_data, random_labels, metric='correlation')\n", (2139, 2189), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((2203, 2258), 'numpy.isclose', 'np.isclose', (['silhouette_score_corr', '(-0.01710701512585803)'], {}), '(silhouette_score_corr, -0.01710701512585803)\n', (2213, 2258), True, 'import numpy as np\n'), ((2360, 2535), 'numpy.array', 'np.array', (['[[1, 1, 2, 2, 3, 3, 4, 4], [1, 1, 2, 2, 3, 3, 4, 4], [1, 1, 2, 2, 3, 3, 4, \n 4], [1, 1, 2, 2, 5, 5, 6, 6], [1, 1, 1, 2, 3, 3, 3, 4], [1, 1, 1, 2, 3,\n 3, 3, 4]]'], {}), '([[1, 1, 2, 2, 3, 3, 4, 4], [1, 1, 2, 2, 3, 3, 4, 4], [1, 1, 2, 2, \n 3, 3, 4, 4], [1, 1, 2, 2, 5, 5, 6, 6], [1, 1, 1, 2, 3, 3, 3, 4], [1, 1,\n 1, 2, 3, 3, 3, 4]])\n', (2368, 2535), True, 'import numpy as np\n'), ((2788, 2857), 'SPARTACUS10.spatial_silhouette.silhouette_coefficient', 'spasi.silhouette_coefficient', (['X', 'labels'], {'metric': '"""jaccard"""', 'iter_max': '(4)'}), "(X, labels, metric='jaccard', iter_max=4)\n", (2816, 2857), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((2966, 3018), 'numpy.array', 'np.array', (['[[1, 1, 2, 2], [1, 2, 2, 2], [1, 1, 1, 2]]'], {}), '([[1, 1, 2, 2], [1, 2, 2, 2], [1, 1, 1, 2]])\n', (2974, 3018), True, 'import numpy as np\n'), ((3051, 3122), 'SPARTACUS10.spatial_silhouette.silhouette_coefficient', 'spasi.silhouette_coefficient', (['X1', 'labels1'], {'metric': '"""jaccard"""', 'iter_max': '(4)'}), "(X1, labels1, metric='jaccard', iter_max=4)\n", (3079, 3122), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((3482, 3574), 'SPARTACUS10.spatial_silhouette.simplified_silhouette_coefficient', 'spasi.simplified_silhouette_coefficient', (['random_data', 'random_labels'], {'metric': '"""euclidean"""'}), "(random_data, random_labels, metric=\n 'euclidean')\n", (3521, 3574), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((3583, 3642), 'numpy.isclose', 'np.isclose', (['simp_silhouette_score_Eucl', '(0.01761300723620632)'], {}), '(simp_silhouette_score_Eucl, 0.01761300723620632)\n', (3593, 3642), True, 'import numpy as np\n'), ((3741, 3835), 'SPARTACUS10.spatial_silhouette.simplified_silhouette_coefficient', 'spasi.simplified_silhouette_coefficient', (['random_data', 'random_labels'], {'metric': '"""correlation"""'}), "(random_data, random_labels, metric=\n 'correlation')\n", (3780, 3835), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((3844, 3903), 'numpy.isclose', 'np.isclose', (['simp_silhouette_score_corr', '(0.07464102055366918)'], {}), '(simp_silhouette_score_corr, 0.07464102055366918)\n', (3854, 3903), True, 'import numpy as np\n'), ((4237, 4269), 'SPARTACUS10.spatial_silhouette.get_list_neighbors', 'spasi.get_list_neighbors', (['matXYZ'], {}), '(matXYZ)\n', (4261, 4269), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((4307, 4404), 'SPARTACUS10.spatial_silhouette.silhouette_coefficient_spatial', 'spasi.silhouette_coefficient_spatial', (['random_data', 'labels', 'list_neighbors'], {'metric': '"""euclidean"""'}), "(random_data, labels, list_neighbors,\n metric='euclidean')\n", (4343, 4404), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((4417, 4482), 'numpy.isclose', 'np.isclose', (['spatial_silhouette_score_Eucl', '(-0.0019062813008068388)'], {}), '(spatial_silhouette_score_Eucl, -0.0019062813008068388)\n', (4427, 4482), True, 'import numpy as np\n'), ((4581, 4680), 'SPARTACUS10.spatial_silhouette.silhouette_coefficient_spatial', 'spasi.silhouette_coefficient_spatial', (['random_data', 'labels', 'list_neighbors'], {'metric': '"""correlation"""'}), "(random_data, labels, list_neighbors,\n metric='correlation')\n", (4617, 4680), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((4693, 4758), 'numpy.isclose', 'np.isclose', (['spatial_silhouette_score_corr', '(-0.0013034499248535598)'], {}), '(spatial_silhouette_score_corr, -0.0013034499248535598)\n', (4703, 4758), True, 'import numpy as np\n'), ((5100, 5132), 'SPARTACUS10.spatial_silhouette.get_list_neighbors', 'spasi.get_list_neighbors', (['matXYZ'], {}), '(matXYZ)\n', (5124, 5132), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((5175, 5283), 'SPARTACUS10.spatial_silhouette.simplified_silhouette_coefficient_spatial', 'spasi.simplified_silhouette_coefficient_spatial', (['random_data', 'labels', 'list_neighbors'], {'metric': '"""euclidean"""'}), "(random_data, labels,\n list_neighbors, metric='euclidean')\n", (5222, 5283), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((5296, 5363), 'numpy.isclose', 'np.isclose', (['spatial_simp_silhouette_score_Eucl', '(0.06783823739924444)'], {}), '(spatial_simp_silhouette_score_Eucl, 0.06783823739924444)\n', (5306, 5363), True, 'import numpy as np\n'), ((5478, 5588), 'SPARTACUS10.spatial_silhouette.simplified_silhouette_coefficient_spatial', 'spasi.simplified_silhouette_coefficient_spatial', (['random_data', 'labels', 'list_neighbors'], {'metric': '"""correlation"""'}), "(random_data, labels,\n list_neighbors, metric='correlation')\n", (5525, 5588), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((5601, 5668), 'numpy.isclose', 'np.isclose', (['spatial_simp_silhouette_score_corr', '(0.22422765231602626)'], {}), '(spatial_simp_silhouette_score_corr, 0.22422765231602626)\n', (5611, 5668), True, 'import numpy as np\n'), ((5881, 5919), 'pandas.RangeIndex', 'pd.RangeIndex', ([], {'start': '(0)', 'stop': '(5)', 'step': '(1)'}), '(start=0, stop=5, step=1)\n', (5894, 5919), True, 'import pandas as pd\n'), ((5992, 6024), 'SPARTACUS10.spatial_silhouette.get_list_neighbors', 'spasi.get_list_neighbors', (['matXYZ'], {}), '(matXYZ)\n', (6016, 6024), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((6046, 6074), 'pandas.DataFrame', 'pd.DataFrame', (['list_neighbors'], {}), '(list_neighbors)\n', (6058, 6074), True, 'import pandas as pd\n'), ((6104, 6142), 'pandas.RangeIndex', 'pd.RangeIndex', ([], {'start': '(0)', 'stop': '(5)', 'step': '(1)'}), '(start=0, stop=5, step=1)\n', (6117, 6142), True, 'import pandas as pd\n'), ((6154, 6210), 'pandas.DataFrame.equals', 'pd.DataFrame.equals', (['list_neighbors_true', 'list_neighbors'], {}), '(list_neighbors_true, list_neighbors)\n', (6173, 6210), True, 'import pandas as pd\n'), ((227, 238), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (236, 238), False, 'import os\n'), ((741, 771), 'numpy.random.randint', 'np.random.randint', (['(1)', '(i + 1)', 'V'], {}), '(1, i + 1, V)\n', (758, 771), True, 'import numpy as np\n'), ((791, 863), 'SPARTACUS10.spatial_silhouette.silhouette_coefficient', 'spasi.silhouette_coefficient', (['X', 'labels'], {'metric': '"""euclidean"""', 'iter_max': '(10)'}), "(X, labels, metric='euclidean', iter_max=10)\n", (819, 863), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((893, 950), 'sklearn.metrics.silhouette_score', 'metrics.silhouette_score', (['X.T', 'labels'], {'metric': '"""euclidean"""'}), "(X.T, labels, metric='euclidean')\n", (917, 950), True, 'import sklearn.metrics as metrics\n'), ((1284, 1314), 'numpy.random.randint', 'np.random.randint', (['(1)', '(i + 1)', 'V'], {}), '(1, i + 1, V)\n', (1301, 1314), True, 'import numpy as np\n'), ((1334, 1406), 'SPARTACUS10.spatial_silhouette.silhouette_coefficient', 'spasi.silhouette_coefficient', (['X', 'labels'], {'metric': '"""euclidean"""', 'iter_max': '(10)'}), "(X, labels, metric='euclidean', iter_max=10)\n", (1362, 1406), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((1436, 1493), 'sklearn.metrics.silhouette_score', 'metrics.silhouette_score', (['X.T', 'labels'], {'metric': '"""euclidean"""'}), "(X.T, labels, metric='euclidean')\n", (1460, 1493), True, 'import sklearn.metrics as metrics\n'), ((2640, 2715), 'SPARTACUS10.spatial_silhouette.silhouette_coefficient', 'spasi.silhouette_coefficient', (['X[0:4,]', 'labels'], {'metric': '"""jaccard"""', 'iter_max': '(4)'}), "(X[0:4,], labels, metric='jaccard', iter_max=4)\n", (2668, 2715), True, 'from SPARTACUS10 import spatial_silhouette as spasi\n'), ((2873, 2896), 'numpy.round', 'np.round', (['sil_score1', '(8)'], {}), '(sil_score1, 8)\n', (2881, 2896), True, 'import numpy as np\n'), ((3139, 3162), 'numpy.round', 'np.round', (['sil_score2', '(8)'], {}), '(sil_score2, 8)\n', (3147, 3162), True, 'import numpy as np\n'), ((4188, 4210), 'numpy.array', 'np.array', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (4196, 4210), True, 'import numpy as np\n'), ((5051, 5073), 'numpy.array', 'np.array', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (5059, 5073), True, 'import numpy as np\n'), ((328, 352), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (340, 352), False, 'import os\n'), ((970, 994), 'numpy.round', 'np.round', (['sil_score1', '(10)'], {}), '(sil_score1, 10)\n', (978, 994), True, 'import numpy as np\n'), ((997, 1021), 'numpy.round', 'np.round', (['sil_score2', '(10)'], {}), '(sil_score2, 10)\n', (1005, 1021), True, 'import numpy as np\n'), ((1513, 1537), 'numpy.round', 'np.round', (['sil_score1', '(10)'], {}), '(sil_score1, 10)\n', (1521, 1537), True, 'import numpy as np\n'), ((1540, 1564), 'numpy.round', 'np.round', (['sil_score2', '(10)'], {}), '(sil_score2, 10)\n', (1548, 1564), True, 'import numpy as np\n'), ((4139, 4158), 'numpy.zeros', 'np.zeros', (['(8, 3, 2)'], {}), '((8, 3, 2))\n', (4147, 4158), True, 'import numpy as np\n'), ((5002, 5021), 'numpy.zeros', 'np.zeros', (['(8, 3, 2)'], {}), '((8, 3, 2))\n', (5010, 5021), True, 'import numpy as np\n'), ((5945, 5964), 'numpy.zeros', 'np.zeros', (['(4, 3, 2)'], {}), '((4, 3, 2))\n', (5953, 5964), True, 'import numpy as np\n')] |
# Standard
import os
import platform
# Pip
import typer
import yaml
from PIL import Image
from PyPDF2 import PdfFileReader, PdfFileWriter
from yaml.scanner import ScannerError
from yaml.loader import SafeLoader
# Custom
from auxiliary.message_keys import MessageKeys as mk
from auxiliary.file_explorer import FileExplorer
# Typer app
app = typer.Typer()
# Files
current_dir = os.getcwd()
files = FileExplorer(home_dir=current_dir)
# Message keys
generate = mk.GeneratePdf
add_meta = mk.AddMetadata
gen_dir = mk.GenerateDir
# Mac and Windows use different slashes.
system: str = platform.system()
if system == "Darwin":
slash = "/"
elif system == "Windows":
slash = "\\"
@app.command(name=gen_dir.generate_dir, help=gen_dir.generate_dir_help)
def generate_directories() -> None:
"""
Generating directories wherein the file that should be combined
are to reside.
example:
python main_app.py gen-dir
:return:
None
"""
try:
typer.echo(gen_dir.generating_dir)
[os.makedirs(f) for f in ["config", "images", "pdfs", "results"]]
typer.echo(gen_dir.directory_generated)
except FileExistsError:
typer.echo(gen_dir.folders_exists)
@app.command(name=generate.generate_pdf_name,
help=generate.generate_pdf_command)
def generate_pdf(save_name: str = typer.Argument("generated",
help=generate.generate_pdf_help
)) -> None:
"""
description:
Images gathered from the images directory are combined into a single
.pdf file that is then placed in the pdfs directory. Using the PIL
library, .jpg, .gif, .png and .tga are supported.
example:
python main_app.py gen-pdf
:arg:
save_name: str the name of the .pdf file being saved.
:returns
no returns
"""
image_dir: str = files.get_folders().get("images", "")
path_exist: bool = os.path.exists(image_dir)
if not path_exist:
raise SystemExit(typer.echo(generate.missing_directory))
images: list = []
valid_images: list = [".jpg", ".jpeg", ".gif", ".png", ".tga"]
for file_name in sorted(os.listdir(image_dir)):
ext: str = os.path.splitext(file_name)[1]
if ext.lower() not in valid_images:
continue
img: str = os.path.join(image_dir, file_name)
images.append(Image.open(img))
if images:
first_image = images[0]
folders = files.get_folders()
save: str = fr"{folders.get('pdfs')}{slash}{save_name}.pdf"
# .pdf generation
typer.echo(generate.images_generate)
first_image.save(save, save_all=True, append_images=images[1:])
typer.echo(generate.file_created)
else:
typer.echo(generate.no_images)
@app.command(name=add_meta.add_metadata_name, help=add_meta.add_metadata_help)
def add_metadata(pdf_name: str = typer.Argument("", help=add_meta.meta_pdf),
config_name: str = typer.Argument("", help=add_meta.yaml_config),
save_name: str = typer.Argument("results", help=add_meta.save_name)
) -> None:
"""
description:
the data from the .yaml file is added to the respective .pdf file
as metadata
example:
python main_app.py add-metadata gen.pdf test.yaml
:arg:
pdf_name: str is the name of the .pdf which should have metadata added
to it
config_name: str is the name of the .yaml file which contains the
metadata.
:returns
None
"""
# Loading .pdf file
try:
pdf: str = files.get_files("pdfs").get(pdf_name)
pdf_in = open(pdf, "rb")
except TypeError:
raise SystemExit((typer.echo(add_meta.pdf_not_exists)))
# Loading .yaml file
try:
config_file: str = files.get_files("config").get(config_name)
yfile = open(config_file, mode="r")
yaml_meta = yaml.load(yfile, Loader=SafeLoader)
except (TypeError, ScannerError, AttributeError) as error:
if "yaml" in str(error):
raise SystemExit(typer.echo(add_meta.yaml_error))
else:
raise SystemExit(typer.echo(add_meta.yaml_not_exist))
try:
# Loading .pdf
reader = PdfFileReader(pdf_in)
writer = PdfFileWriter()
writer.appendPagesFromReader(reader)
metadata = reader.getDocumentInfo()
writer.addMetadata(metadata)
# config file
writer.addMetadata(yaml_meta)
# .pdf with metadata
save_path: str = files.get_folders().get("results")
pdf_out = open(rf"{save_path}{slash}{save_name}_{pdf_name}", "wb")
writer.write(pdf_out)
# Closing files
pdf_out.close()
pdf_in.close()
# Added metadata
typer.echo(add_meta.metadata_added)
except OSError:
raise SystemExit((typer.echo(add_meta.pdf_corrupt)))
if __name__ == "__main__":
app() | [
"os.path.exists",
"os.listdir",
"PIL.Image.open",
"os.makedirs",
"auxiliary.file_explorer.FileExplorer",
"typer.Typer",
"os.path.join",
"os.getcwd",
"yaml.load",
"os.path.splitext",
"platform.system",
"typer.echo",
"PyPDF2.PdfFileWriter",
"PyPDF2.PdfFileReader",
"typer.Argument"
] | [((343, 356), 'typer.Typer', 'typer.Typer', ([], {}), '()\n', (354, 356), False, 'import typer\n'), ((380, 391), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (389, 391), False, 'import os\n'), ((400, 434), 'auxiliary.file_explorer.FileExplorer', 'FileExplorer', ([], {'home_dir': 'current_dir'}), '(home_dir=current_dir)\n', (412, 434), False, 'from auxiliary.file_explorer import FileExplorer\n'), ((584, 601), 'platform.system', 'platform.system', ([], {}), '()\n', (599, 601), False, 'import platform\n'), ((1353, 1413), 'typer.Argument', 'typer.Argument', (['"""generated"""'], {'help': 'generate.generate_pdf_help'}), "('generated', help=generate.generate_pdf_help)\n", (1367, 1413), False, 'import typer\n'), ((2005, 2030), 'os.path.exists', 'os.path.exists', (['image_dir'], {}), '(image_dir)\n', (2019, 2030), False, 'import os\n'), ((2976, 3018), 'typer.Argument', 'typer.Argument', (['""""""'], {'help': 'add_meta.meta_pdf'}), "('', help=add_meta.meta_pdf)\n", (2990, 3018), False, 'import typer\n'), ((3047, 3092), 'typer.Argument', 'typer.Argument', (['""""""'], {'help': 'add_meta.yaml_config'}), "('', help=add_meta.yaml_config)\n", (3061, 3092), False, 'import typer\n'), ((3119, 3169), 'typer.Argument', 'typer.Argument', (['"""results"""'], {'help': 'add_meta.save_name'}), "('results', help=add_meta.save_name)\n", (3133, 3169), False, 'import typer\n'), ((994, 1028), 'typer.echo', 'typer.echo', (['gen_dir.generating_dir'], {}), '(gen_dir.generating_dir)\n', (1004, 1028), False, 'import typer\n'), ((1111, 1150), 'typer.echo', 'typer.echo', (['gen_dir.directory_generated'], {}), '(gen_dir.directory_generated)\n', (1121, 1150), False, 'import typer\n'), ((2239, 2260), 'os.listdir', 'os.listdir', (['image_dir'], {}), '(image_dir)\n', (2249, 2260), False, 'import os\n'), ((2398, 2432), 'os.path.join', 'os.path.join', (['image_dir', 'file_name'], {}), '(image_dir, file_name)\n', (2410, 2432), False, 'import os\n'), ((2662, 2698), 'typer.echo', 'typer.echo', (['generate.images_generate'], {}), '(generate.images_generate)\n', (2672, 2698), False, 'import typer\n'), ((2779, 2812), 'typer.echo', 'typer.echo', (['generate.file_created'], {}), '(generate.file_created)\n', (2789, 2812), False, 'import typer\n'), ((2831, 2861), 'typer.echo', 'typer.echo', (['generate.no_images'], {}), '(generate.no_images)\n', (2841, 2861), False, 'import typer\n'), ((3992, 4027), 'yaml.load', 'yaml.load', (['yfile'], {'Loader': 'SafeLoader'}), '(yfile, Loader=SafeLoader)\n', (4001, 4027), False, 'import yaml\n'), ((4316, 4337), 'PyPDF2.PdfFileReader', 'PdfFileReader', (['pdf_in'], {}), '(pdf_in)\n', (4329, 4337), False, 'from PyPDF2 import PdfFileReader, PdfFileWriter\n'), ((4355, 4370), 'PyPDF2.PdfFileWriter', 'PdfFileWriter', ([], {}), '()\n', (4368, 4370), False, 'from PyPDF2 import PdfFileReader, PdfFileWriter\n'), ((4859, 4894), 'typer.echo', 'typer.echo', (['add_meta.metadata_added'], {}), '(add_meta.metadata_added)\n', (4869, 4894), False, 'import typer\n'), ((1038, 1052), 'os.makedirs', 'os.makedirs', (['f'], {}), '(f)\n', (1049, 1052), False, 'import os\n'), ((1187, 1221), 'typer.echo', 'typer.echo', (['gen_dir.folders_exists'], {}), '(gen_dir.folders_exists)\n', (1197, 1221), False, 'import typer\n'), ((2080, 2118), 'typer.echo', 'typer.echo', (['generate.missing_directory'], {}), '(generate.missing_directory)\n', (2090, 2118), False, 'import typer\n'), ((2282, 2309), 'os.path.splitext', 'os.path.splitext', (['file_name'], {}), '(file_name)\n', (2298, 2309), False, 'import os\n'), ((2455, 2470), 'PIL.Image.open', 'Image.open', (['img'], {}), '(img)\n', (2465, 2470), False, 'from PIL import Image\n'), ((3785, 3820), 'typer.echo', 'typer.echo', (['add_meta.pdf_not_exists'], {}), '(add_meta.pdf_not_exists)\n', (3795, 3820), False, 'import typer\n'), ((4941, 4973), 'typer.echo', 'typer.echo', (['add_meta.pdf_corrupt'], {}), '(add_meta.pdf_corrupt)\n', (4951, 4973), False, 'import typer\n'), ((4153, 4184), 'typer.echo', 'typer.echo', (['add_meta.yaml_error'], {}), '(add_meta.yaml_error)\n', (4163, 4184), False, 'import typer\n'), ((4229, 4264), 'typer.echo', 'typer.echo', (['add_meta.yaml_not_exist'], {}), '(add_meta.yaml_not_exist)\n', (4239, 4264), False, 'import typer\n')] |
from django.shortcuts import render
from django.views import View, generic
from .services.predictor import get_results
class Index(View):
template_name = 'predictions/index.html'
model = 'xgboost'
season = '16/17'
results = ''
leadboard = ''
def get(self, request):
self.results = get_results(self.season)
#self.results = predict_season(self.season, self.model)
return render(request, self.template_name, {'results' :self.results,
'leadboard':self.leadboard})
def post(self, request):
self.model = request.POST['model']
self.season = request.POST['season']
self.results = get_results(self.season)
return render(request, self.template_name, {'results' :self.results,
'leadboard':self.leadboard})
| [
"django.shortcuts.render"
] | [((419, 514), 'django.shortcuts.render', 'render', (['request', 'self.template_name', "{'results': self.results, 'leadboard': self.leadboard}"], {}), "(request, self.template_name, {'results': self.results, 'leadboard':\n self.leadboard})\n", (425, 514), False, 'from django.shortcuts import render\n'), ((744, 839), 'django.shortcuts.render', 'render', (['request', 'self.template_name', "{'results': self.results, 'leadboard': self.leadboard}"], {}), "(request, self.template_name, {'results': self.results, 'leadboard':\n self.leadboard})\n", (750, 839), False, 'from django.shortcuts import render\n')] |
import numpy as np
import pandas as pd
import pickle as pk
import random
from sklearn.metrics import accuracy_score
from sklearn import preprocessing
from sklearn.ensemble import RandomForestClassifier
from sklearn.preprocessing import Imputer
def read_csv(csv_path):
df = pd.read_csv(csv_path)
return df
def encode_label(Y):
le = preprocessing.LabelEncoder()
cls = le.fit(Y)
cls = le.transform(Y)
return cls
def split_test(num_data, percent):
select_id = random.sample(range(num_data), int(num_data*percent))
return select_id
def save_pk(data, pk_path):
with open(pk_path, 'wb') as f:
pk.dump(data, f)
def read_pk(pk_path):
with open(pk_path, 'rb') as f:
data = pk.load(f)
return data
def random_split_test_save(num_data, pk_path, ratio=0.1):
selected_id = split_test(num_data, ratio)
save_pk(selected_id, pk_path)
def list_to_float(data):
power = 0
val = 0
data = data[::-1]
for d in data:
val += int(d)*(10**power)
power += len(d)
return val
def X_preprocessing(X, scenario):
# print ('X.shape = {}'.format(X.shape))
r = X.shape[0]
c = X.shape[1]
# convert ip to float
for i in range(r):
for j in [0, 2]:
if scenario == 'A':
X[i, j] = list_to_float(X[i, j].split('.'))
elif scenario == 'B':
pass
nan_idx = np.where(X == np.nan)[0]
print ('nan_idx = {}'.format(nan_idx))
inf_idx = np.where(X == 'Infinity')[0]
print ('inf_idx = {}'.format(inf_idx))
print('finite_idx = {}'.format(np.isfinite(X.all())))
X[nan_idx] = 0
X[inf_idx] = 0
return X
if __name__ == '__main__':
csv_path = '../../TorCSV/CSV/Scenario-A/merged_5s.csv'
df = read_csv(csv_path)
print ('read CSV !!!')
df_mat = df.as_matrix()
# get input X and label Y #
X = df_mat[:, :-1]
Y = df_mat[:, -1]
X = X_preprocessing(X)
# read the list idx to test #
pk_path = 'selected_id.pkl'
test_idx = read_pk(pk_path)
# print (test_idx)
# encode label #
le = preprocessing.LabelEncoder()
cls = le.fit(Y)
Y = le.transform(Y)
X_test = X[test_idx, :]
Y_test = Y[test_idx]
X_train = np.delete(X, test_idx, axis=0)
Y_train = np.delete(Y, test_idx, axis=0)
clf = RandomForestClassifier(max_depth=2, random_state=0)
clf.fit(X_train, Y_train)
Y_pred = clf.predict(X_test)
print ('accuracy = {}'.format(accuracy_score(Y_test, Y_pred)))
filename = 'randomForest.sav'
pk.dump(clf, open(filename, 'wb'))
| [
"sklearn.preprocessing.LabelEncoder",
"pickle.dump",
"pandas.read_csv",
"numpy.where",
"numpy.delete",
"pickle.load",
"sklearn.ensemble.RandomForestClassifier",
"sklearn.metrics.accuracy_score"
] | [((281, 302), 'pandas.read_csv', 'pd.read_csv', (['csv_path'], {}), '(csv_path)\n', (292, 302), True, 'import pandas as pd\n'), ((349, 377), 'sklearn.preprocessing.LabelEncoder', 'preprocessing.LabelEncoder', ([], {}), '()\n', (375, 377), False, 'from sklearn import preprocessing\n'), ((2115, 2143), 'sklearn.preprocessing.LabelEncoder', 'preprocessing.LabelEncoder', ([], {}), '()\n', (2141, 2143), False, 'from sklearn import preprocessing\n'), ((2256, 2286), 'numpy.delete', 'np.delete', (['X', 'test_idx'], {'axis': '(0)'}), '(X, test_idx, axis=0)\n', (2265, 2286), True, 'import numpy as np\n'), ((2301, 2331), 'numpy.delete', 'np.delete', (['Y', 'test_idx'], {'axis': '(0)'}), '(Y, test_idx, axis=0)\n', (2310, 2331), True, 'import numpy as np\n'), ((2343, 2394), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {'max_depth': '(2)', 'random_state': '(0)'}), '(max_depth=2, random_state=0)\n', (2365, 2394), False, 'from sklearn.ensemble import RandomForestClassifier\n'), ((641, 657), 'pickle.dump', 'pk.dump', (['data', 'f'], {}), '(data, f)\n', (648, 657), True, 'import pickle as pk\n'), ((732, 742), 'pickle.load', 'pk.load', (['f'], {}), '(f)\n', (739, 742), True, 'import pickle as pk\n'), ((1423, 1444), 'numpy.where', 'np.where', (['(X == np.nan)'], {}), '(X == np.nan)\n', (1431, 1444), True, 'import numpy as np\n'), ((1505, 1530), 'numpy.where', 'np.where', (["(X == 'Infinity')"], {}), "(X == 'Infinity')\n", (1513, 1530), True, 'import numpy as np\n'), ((2493, 2523), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['Y_test', 'Y_pred'], {}), '(Y_test, Y_pred)\n', (2507, 2523), False, 'from sklearn.metrics import accuracy_score\n')] |
from decimal import Decimal
import simplejson as json
import requests
from .converter import RatesNotAvailableError, DecimalFloatMismatchError
class BtcConverter(object):
"""
Get bit coin rates and convertion
"""
def __init__(self, force_decimal=False):
self._force_decimal = force_decimal
def _decode_rates(self, response, use_decimal=False):
if self._force_decimal or use_decimal:
decoded_data = json.loads(response.text, use_decimal=True)
else:
decoded_data = response.json()
return decoded_data
def get_latest_price(self, currency):
"""
Get Lates price of one bitcoin to valid Currency 1BTC => X USD
"""
url = 'https://api.coindesk.com/v1/bpi/currentprice/{}.json'.format(currency)
response = requests.get(url)
if response.status_code == 200:
data = response.json()
price = data.get('bpi').get(currency, {}).get('rate_float', None)
if self._force_decimal:
return Decimal(price)
return price
return None
def get_previous_price(self, currency, date_obj):
"""
Get Price for one bit coin on given date
"""
start = date_obj.strftime('%Y-%m-%d')
end = date_obj.strftime('%Y-%m-%d')
url = (
'https://api.coindesk.com/v1/bpi/historical/close.json'
'?start={}&end={}¤cy={}'.format(
start, end, currency
)
)
response = requests.get(url)
if response.status_code == 200:
data = response.json()
price = data.get('bpi', {}).get(start, None)
if self._force_decimal:
return Decimal(price)
return price
raise RatesNotAvailableError("BitCoin Rates Source Not Ready For Given date")
def get_previous_price_list(self, currency, start_date, end_date):
"""
Get List of prices between two dates
"""
start = start_date.strftime('%Y-%m-%d')
end = end_date.strftime('%Y-%m-%d')
url = (
'https://api.coindesk.com/v1/bpi/historical/close.json'
'?start={}&end={}¤cy={}'.format(
start, end, currency
)
)
response = requests.get(url)
if response.status_code == 200:
data = self._decode_rates(response)
price_dict = data.get('bpi', {})
return price_dict
return {}
def convert_to_btc(self, amount, currency):
"""
Convert X amount to Bit Coins
"""
if isinstance(amount, Decimal):
use_decimal = True
else:
use_decimal = self._force_decimal
url = 'https://api.coindesk.com/v1/bpi/currentprice/{}.json'.format(currency)
response = requests.get(url)
if response.status_code == 200:
data = response.json()
price = data.get('bpi').get(currency, {}).get('rate_float', None)
if price:
if use_decimal:
price = Decimal(price)
try:
converted_btc = amount/price
return converted_btc
except TypeError:
raise DecimalFloatMismatchError("convert_to_btc requires amount parameter is of type Decimal when force_decimal=True")
raise RatesNotAvailableError("BitCoin Rates Source Not Ready For Given date")
def convert_btc_to_cur(self, coins, currency):
"""
Convert X bit coins to valid currency amount
"""
if isinstance(coins, Decimal):
use_decimal = True
else:
use_decimal = self._force_decimal
url = 'https://api.coindesk.com/v1/bpi/currentprice/{}.json'.format(currency)
response = requests.get(url)
if response.status_code == 200:
data = response.json()
price = data.get('bpi').get(currency, {}).get('rate_float', None)
if price:
if use_decimal:
price = Decimal(price)
try:
converted_amount = coins * price
return converted_amount
except TypeError:
raise DecimalFloatMismatchError("convert_btc_to_cur requires coins parameter is of type Decimal when force_decimal=True")
raise RatesNotAvailableError("BitCoin Rates Source Not Ready For Given date")
def convert_to_btc_on(self, amount, currency, date_obj):
"""
Convert X amount to BTC based on given date rate
"""
if isinstance(amount, Decimal):
use_decimal = True
else:
use_decimal = self._force_decimal
start = date_obj.strftime('%Y-%m-%d')
end = date_obj.strftime('%Y-%m-%d')
url = (
'https://api.coindesk.com/v1/bpi/historical/close.json'
'?start={}&end={}¤cy={}'.format(
start, end, currency
)
)
response = requests.get(url)
if response.status_code == 200:
data = response.json()
price = data.get('bpi', {}).get(start, None)
if price:
if use_decimal:
price = Decimal(price)
try:
converted_btc = amount/price
return converted_btc
except TypeError:
raise DecimalFloatMismatchError("convert_to_btc_on requires amount parameter is of type Decimal when force_decimal=True")
raise RatesNotAvailableError("BitCoin Rates Source Not Ready For Given Date")
def convert_btc_to_cur_on(self, coins, currency, date_obj):
"""
Convert X BTC to valid currency amount based on given date
"""
if isinstance(coins, Decimal):
use_decimal = True
else:
use_decimal = self._force_decimal
start = date_obj.strftime('%Y-%m-%d')
end = date_obj.strftime('%Y-%m-%d')
url = (
'https://api.coindesk.com/v1/bpi/historical/close.json'
'?start={}&end={}¤cy={}'.format(
start, end, currency
)
)
response = requests.get(url)
if response.status_code == 200:
data = response.json()
price = data.get('bpi', {}).get(start, None)
if price:
if use_decimal:
price = Decimal(price)
try:
converted_btc = coins*price
return converted_btc
except TypeError:
raise DecimalFloatMismatchError("convert_btc_to_cur_on requires amount parameter is of type Decimal when force_decimal=True")
raise RatesNotAvailableError("BitCoin Rates Source Not Ready For Given Date")
def get_symbol(self):
"""
Here is Unicode symbol for bitcoin
"""
return "\u0E3F"
_Btc_Converter = BtcConverter()
get_btc_symbol = _Btc_Converter.get_symbol
convert_btc_to_cur_on = _Btc_Converter.convert_btc_to_cur_on
convert_to_btc_on = _Btc_Converter.convert_to_btc_on
convert_btc_to_cur = _Btc_Converter.convert_btc_to_cur
convert_to_btc = _Btc_Converter.convert_to_btc
get_latest_price = _Btc_Converter.get_latest_price
get_previous_price = _Btc_Converter.get_previous_price
get_previous_price_list = _Btc_Converter.get_previous_price_list
| [
"decimal.Decimal",
"simplejson.loads",
"requests.get"
] | [((821, 838), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (833, 838), False, 'import requests\n'), ((1550, 1567), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1562, 1567), False, 'import requests\n'), ((2339, 2356), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (2351, 2356), False, 'import requests\n'), ((2886, 2903), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (2898, 2903), False, 'import requests\n'), ((3889, 3906), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (3901, 3906), False, 'import requests\n'), ((5123, 5140), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (5135, 5140), False, 'import requests\n'), ((6341, 6358), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (6353, 6358), False, 'import requests\n'), ((449, 492), 'simplejson.loads', 'json.loads', (['response.text'], {'use_decimal': '(True)'}), '(response.text, use_decimal=True)\n', (459, 492), True, 'import simplejson as json\n'), ((1051, 1065), 'decimal.Decimal', 'Decimal', (['price'], {}), '(price)\n', (1058, 1065), False, 'from decimal import Decimal\n'), ((1759, 1773), 'decimal.Decimal', 'Decimal', (['price'], {}), '(price)\n', (1766, 1773), False, 'from decimal import Decimal\n'), ((3139, 3153), 'decimal.Decimal', 'Decimal', (['price'], {}), '(price)\n', (3146, 3153), False, 'from decimal import Decimal\n'), ((4142, 4156), 'decimal.Decimal', 'Decimal', (['price'], {}), '(price)\n', (4149, 4156), False, 'from decimal import Decimal\n'), ((5355, 5369), 'decimal.Decimal', 'Decimal', (['price'], {}), '(price)\n', (5362, 5369), False, 'from decimal import Decimal\n'), ((6573, 6587), 'decimal.Decimal', 'Decimal', (['price'], {}), '(price)\n', (6580, 6587), False, 'from decimal import Decimal\n')] |
# iPhone Manager bot by Oldmole
# No support will be provided, this code is provided "as is" without warranty of any kind, either express or implied. Use at your own risk.
# The use of the software and scripts is done at your own discretion and risk and with agreement that you will be solely responsible for any damage
# to your computer system or loss of data that results from such activities.
#
# If you like the bot and would like buy me a pint, DM @oldmole#3895 and ask for my Paypal info
# If you update the bot, please send me a copy! :-)
# GITHUB : https://github.com/sonofmole/iPhone_Manager
import sys
import yaml
import sqlite3
import hashlib
import discord
import psutil
from discord.ext import commands
import subprocess
import asyncio
from subprocess import Popen, PIPE
from sqlite3 import OperationalError
import time
import math
class IPhone:
def __init__(self, device_uuid, iphone_name, iphone_id):
self.device_uuid = device_uuid
self.iphone_name = iphone_name
self.iphone_id = iphone_id
# A list of the commands
command_list = [
"!sc {name of iphone} or !sc {iphone ID}",
"Screenshots an iphone and uploads that screenshot to discord\n",
"!list iphones", "Lists the name and ID of all the available iphones\n",
"!kill usb","Finds the proccess ID for usbmuxd and kill's it\n",
"!mac grab",
"Takes a screengrab of your Mac and uploads that screengrab to discord\n",
"!reboot {name of iphone} or !reboot {iphone ID}",
"Reboot's an iPhone\n",
"!reload {name of iphone} or !reload {iphone ID}",
"Find's and kill's the PID for an iPhone's Xcode. Pogo will start again\n",
"!log {name of iphone} or !log {iphone ID}",
"The bot will print out x lines from the device log file (x is set the config file)\n",
"!uplog {name of iphone} or !uplog {iphone ID}",
"The bot upload the device's log file with the last x lines (x is set the config file)\n",
"!help",
"Displays this list"
]
print("The iPhone Manager by Oldmole ready!")
try:
with open(r'config.yaml') as file:
documents = yaml.safe_load(file)
except FileNotFoundError:
print ("**** FULL STOP! ***** config.yaml NOT FOUND! ****")
sys.exit()
#logpath = documents.get("logpath")
loglines = documents.get("loglines")
uploglines = documents.get("uploglines")
token = documents.get("token")
role = documents.get("role")
channel = documents.get("channel")
iphone_list = []
log_list = documents.get("logpath")
database_list = documents.get("paths")
db_count = len(database_list)
db_error = 0
for dpath in database_list:
try:
connection = sqlite3.connect(dpath)
cursor = connection.cursor()
cursor.execute('SELECT * FROM device LIMIT 1,100')
rows = cursor.fetchall()
for row in rows:
uuid, name = row[0], row[1]
digest = hashlib.sha1((uuid + name).encode()).hexdigest()
iphone_list.append(IPhone(uuid, name, digest[:4]))
connection.commit()
except sqlite3.OperationalError:
db_error += 1
print ("*** Error reading from %s database" % dpath)
print ("*** Wrong database name or path? ***")
print ("\n")
finally:
connection.close()
if db_error == db_count:
print ("**** FULL STOP! ***** Can't read from any database ****")
sys.exit()
async def reboot_command(params, message):
params = ''.join(params)
for x in iphone_list:
if params == x.iphone_name or params == x.iphone_id:
a = x.iphone_name
b = x.device_uuid
cp = subprocess.run(["idevicediagnostics", "-u", b, "restart"], universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if cp.returncode == 0:
await message.channel.send("%s is rebooting" % a)
else:
await message.channel.send("Sorry, something has gone wrong... is the device connected?")
async def reload_command(params, message):
params = ''.join(params)
await message.channel.send("Can I get a reload")
for x in iphone_list:
if params == x.iphone_name or params == x.iphone_id:
a = x.iphone_name
b_device_uuid = x.device_uuid
for proc in psutil.process_iter():
try:
pinfo = proc.as_dict(attrs=['pid', 'name', 'cmdline'])
except psutil.NoSuchProcess:
pass
else:
if pinfo["name"] == "xcodebuild":
cmdline = " ".join(pinfo["cmdline"])
if (b_device_uuid) in cmdline:
p = psutil.Process(pinfo["pid"])
p.kill()
await message.channel.send("Yes, Done")
return
await message.channel.send("Something has gone wrong")
async def kill_command(params, message):
params = ''.join(params)
if params == "usb":
name = ""
await message.channel.send("Trying to finding and Kill usbmuxd. If I find it I will let you know")
for proc in psutil.process_iter():
try:
pinfo = proc.as_dict(attrs=['pid', 'name'])
except psutil.NoSuchProcess:
pass
else:
if pinfo["name"] == "usbmuxd":
p = psutil.Process(pinfo["pid"])
p.kill()
await message.channel.send("Found and Killed it")
return
else:
await message.channel.send("Sorry, something has gone wrong")
return
async def help_command(params,message):
params = ''.join(params)
if len(params) ==0:
await message.channel.send("You have these commands available: \n")
await message.channel.send("\n".join(command_list))
async def mac_command(params, message):
params = ''.join(params)
if params == "grab":
cp = subprocess.run(["screencapture", "mac.jpg"], universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if cp.returncode == 0:
await message.channel.send("Taken a Mac Screengrab")
await asyncio.sleep(1)
await message.channel.send(file=discord.File('mac.jpg'))
async def list_iphones_command(params,message):
params = ''.join(params)
if params != "iphones":
return
else:
await message.channel.send("You have these iphones in your list:")
name_list = []
for x in iphone_list:
name_and_id = " with an ID of ".join([x.iphone_name,x.iphone_id])
name_list.append(name_and_id)
await message.channel.send("\n".join(name_list))
async def screengrab_command(params, message):
params = ''.join(params)
for x in iphone_list:
if params == x.iphone_name or params == x.iphone_id:
a = x.iphone_name
b = x.device_uuid
cp = subprocess.run(["idevicescreenshot", "-u", b, "phone.jpg"], universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if cp.returncode == 0:
await message.channel.send("Taken a screenshot")
await asyncio.sleep(1)
await message.channel.send(file=discord.File('phone.jpg'))
return
else:
await message.channel.send("Sorry, something has gone wrong... is the device connected?")
return
await message.channel.send("Sorry, something has gone wrong... can't find this device")
async def get_log(params, message):
params = ''.join(params)
for x in iphone_list:
if params == x.iphone_name or params == x.iphone_id:
dname = x.iphone_name
dname_formatted = '"*'+dname+'*"'
cmd = "find . -name %s -name \"*full*\" -mtime -30m | head -1" %dname_formatted
for xpath in log_list:
logpath = xpath
try:
getlogfilename = subprocess.run(cmd,shell=True, check=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT, universal_newlines=True,cwd=logpath)
glfn_output = getlogfilename.stdout.strip('\n')
if glfn_output != (""):
break
except subprocess.CalledProcessError:
return
glfn_output = '"'+glfn_output+'"'
cmd2 = "tail -%d %s > tempfile" % (loglines ,glfn_output)
try:
readfile = subprocess.run(cmd2,shell=True, check=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT, universal_newlines=True,cwd=logpath)
rf_output2 = readfile.stdout.strip('\n')
except subprocess.CalledProcessError:
await message.channel.send("Sorry command failed, log not found")
return
i , line_loop , line_loop1, = 0 , 1 , 10
log_loop = loglines / 10
log_loop = (math.ceil(log_loop))
while i < log_loop:
cmd3 = "sed -n %d,%dp tempfile > tempout" %(line_loop,line_loop1)
cmd3p = subprocess.run(cmd3,shell=True, check=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT, universal_newlines=True,cwd=logpath)
cmd4 = "head -10 tempout"
readline1 = subprocess.run(cmd4,shell=True, check=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT, universal_newlines=True,cwd=logpath)
rf_output3 = readline1.stdout.strip('\n')
time.sleep(.900)
await message.channel.send("```%s```" % rf_output3)
i += 1
line_loop += 10
line_loop1 += 10
return
async def up_log(params, message):
params = ''.join(params)
for x in iphone_list:
if params == x.iphone_name or params == x.iphone_id:
dname = x.iphone_name
dname_formatted = '"*'+dname+'*"'
cmd = "find . -name %s -name \"*full*\" -mtime -30m | head -1" %dname_formatted
for xpath in log_list:
logpath = xpath
try:
getlogfilename = subprocess.run(cmd,shell=True, check=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT, universal_newlines=True,cwd=logpath)
glfn_output = getlogfilename.stdout.strip('\n')
if glfn_output != (""):
break
except subprocess.CalledProcessError:
return
glfn_output = '"'+glfn_output+'"'
logname = '"'+dname+'.log"'
cmd2 = "tail -%d %s > %s" % (uploglines ,glfn_output, logname)
try:
readfile = subprocess.run(cmd2,shell=True, check=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT, universal_newlines=True,cwd=logpath)
rf_output2 = readfile.stdout.strip('\n')
except subprocess.CalledProcessError:
await message.channel.send("Sorry command failed, log not found")
return
await message.channel.send("Uploading your last %d logfile line" %uploglines)
logname = ''+dname+'.log'
await message.channel.send(file=discord.File('%s/%s' % (logpath,logname)))
return
command_dict = {
"!sc": screengrab_command,
"!help": help_command,
"!reboot": reboot_command,
"!reload": reload_command,
"!mac": mac_command,
"!list" : list_iphones_command,
"!kill" : kill_command,
"!log" : get_log,
"!uplog" : up_log
}
async def check_command(message_text,message):
parts = message_text.split(" ",1)
cmd = parts[0]
params = parts[1:]
if cmd in command_dict:
await command_dict[cmd](params,message)
else:
return
client = discord.Client()
@client.event
async def on_ready():
activity = discord.Game(name="Taking Selfies")
await client.change_presence(status=discord.Status.online, activity=activity)
async def send_message(message):
await message.channel.send(message)
@client.event
async def on_message(message):
if message.author == client.user:
return
if str(message.channel) != channel:
return
author_roles = map(lambda x: x.name, message.author.roles)
if role not in author_roles:
return
message_text = message.content
await check_command(message_text,message)
client.run(token)
| [
"math.ceil",
"sqlite3.connect",
"asyncio.sleep",
"discord.Game",
"subprocess.run",
"psutil.process_iter",
"psutil.Process",
"time.sleep",
"yaml.safe_load",
"sys.exit",
"discord.Client",
"discord.File"
] | [((10214, 10230), 'discord.Client', 'discord.Client', ([], {}), '()\n', (10228, 10230), False, 'import discord\n'), ((10281, 10316), 'discord.Game', 'discord.Game', ([], {'name': '"""Taking Selfies"""'}), "(name='Taking Selfies')\n", (10293, 10316), False, 'import discord\n'), ((2040, 2060), 'yaml.safe_load', 'yaml.safe_load', (['file'], {}), '(file)\n', (2054, 2060), False, 'import yaml\n'), ((2149, 2159), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2157, 2159), False, 'import sys\n'), ((2555, 2577), 'sqlite3.connect', 'sqlite3.connect', (['dpath'], {}), '(dpath)\n', (2570, 2577), False, 'import sqlite3\n'), ((4625, 4646), 'psutil.process_iter', 'psutil.process_iter', ([], {}), '()\n', (4644, 4646), False, 'import psutil\n'), ((5303, 5424), 'subprocess.run', 'subprocess.run', (["['screencapture', 'mac.jpg']"], {'universal_newlines': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['screencapture', 'mac.jpg'], universal_newlines=True,\n stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n", (5317, 5424), False, 'import subprocess\n'), ((3176, 3186), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3184, 3186), False, 'import sys\n'), ((3386, 3521), 'subprocess.run', 'subprocess.run', (["['idevicediagnostics', '-u', b, 'restart']"], {'universal_newlines': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['idevicediagnostics', '-u', b, 'restart'],\n universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n", (3400, 3521), False, 'import subprocess\n'), ((3969, 3990), 'psutil.process_iter', 'psutil.process_iter', ([], {}), '()\n', (3988, 3990), False, 'import psutil\n'), ((6176, 6312), 'subprocess.run', 'subprocess.run', (["['idevicescreenshot', '-u', b, 'phone.jpg']"], {'universal_newlines': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['idevicescreenshot', '-u', b, 'phone.jpg'],\n universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n", (6190, 6312), False, 'import subprocess\n'), ((7846, 7865), 'math.ceil', 'math.ceil', (['log_loop'], {}), '(log_loop)\n', (7855, 7865), False, 'import math\n'), ((5514, 5530), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (5527, 5530), False, 'import asyncio\n'), ((7460, 7597), 'subprocess.run', 'subprocess.run', (['cmd2'], {'shell': '(True)', 'check': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'universal_newlines': '(True)', 'cwd': 'logpath'}), '(cmd2, shell=True, check=True, stdout=subprocess.PIPE, stderr\n =subprocess.STDOUT, universal_newlines=True, cwd=logpath)\n', (7474, 7597), False, 'import subprocess\n'), ((7972, 8109), 'subprocess.run', 'subprocess.run', (['cmd3'], {'shell': '(True)', 'check': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'universal_newlines': '(True)', 'cwd': 'logpath'}), '(cmd3, shell=True, check=True, stdout=subprocess.PIPE, stderr\n =subprocess.STDOUT, universal_newlines=True, cwd=logpath)\n', (7986, 8109), False, 'import subprocess\n'), ((8148, 8285), 'subprocess.run', 'subprocess.run', (['cmd4'], {'shell': '(True)', 'check': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'universal_newlines': '(True)', 'cwd': 'logpath'}), '(cmd4, shell=True, check=True, stdout=subprocess.PIPE, stderr\n =subprocess.STDOUT, universal_newlines=True, cwd=logpath)\n', (8162, 8285), False, 'import subprocess\n'), ((8328, 8343), 'time.sleep', 'time.sleep', (['(0.9)'], {}), '(0.9)\n', (8338, 8343), False, 'import time\n'), ((9261, 9398), 'subprocess.run', 'subprocess.run', (['cmd2'], {'shell': '(True)', 'check': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'universal_newlines': '(True)', 'cwd': 'logpath'}), '(cmd2, shell=True, check=True, stdout=subprocess.PIPE, stderr\n =subprocess.STDOUT, universal_newlines=True, cwd=logpath)\n', (9275, 9398), False, 'import subprocess\n'), ((4800, 4828), 'psutil.Process', 'psutil.Process', (["pinfo['pid']"], {}), "(pinfo['pid'])\n", (4814, 4828), False, 'import psutil\n'), ((6399, 6415), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (6412, 6415), False, 'import asyncio\n'), ((7062, 7198), 'subprocess.run', 'subprocess.run', (['cmd'], {'shell': '(True)', 'check': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'universal_newlines': '(True)', 'cwd': 'logpath'}), '(cmd, shell=True, check=True, stdout=subprocess.PIPE, stderr=\n subprocess.STDOUT, universal_newlines=True, cwd=logpath)\n', (7076, 7198), False, 'import subprocess\n'), ((8827, 8963), 'subprocess.run', 'subprocess.run', (['cmd'], {'shell': '(True)', 'check': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'universal_newlines': '(True)', 'cwd': 'logpath'}), '(cmd, shell=True, check=True, stdout=subprocess.PIPE, stderr=\n subprocess.STDOUT, universal_newlines=True, cwd=logpath)\n', (8841, 8963), False, 'import subprocess\n'), ((5566, 5589), 'discord.File', 'discord.File', (['"""mac.jpg"""'], {}), "('mac.jpg')\n", (5578, 5589), False, 'import discord\n'), ((9704, 9746), 'discord.File', 'discord.File', (["('%s/%s' % (logpath, logname))"], {}), "('%s/%s' % (logpath, logname))\n", (9716, 9746), False, 'import discord\n'), ((4246, 4274), 'psutil.Process', 'psutil.Process', (["pinfo['pid']"], {}), "(pinfo['pid'])\n", (4260, 4274), False, 'import psutil\n'), ((6452, 6477), 'discord.File', 'discord.File', (['"""phone.jpg"""'], {}), "('phone.jpg')\n", (6464, 6477), False, 'import discord\n')] |
# Generated by Django 3.1.7 on 2021-03-09 16:24
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Material',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('series', models.CharField(db_column='系列', max_length=100, verbose_name='系列')),
('mark', models.CharField(db_column='牌号', max_length=100, verbose_name='牌号')),
('manufacturer', models.CharField(db_column='制造商', max_length=50, verbose_name='制造商')),
('link', models.CharField(db_column='链接', max_length=100, verbose_name='链接')),
('acronym', models.CharField(db_column='材料名称缩写', max_length=20, verbose_name='材料名称缩写')),
('material_type', models.CharField(db_column='材料类型', max_length=100, verbose_name='材料类型')),
('data_source', models.CharField(db_column='数据来源', max_length=100, verbose_name='数据来源')),
('last_modified_date', models.CharField(db_column='上次修改日期', max_length=50, verbose_name='上次修改日期')),
('test_date', models.CharField(db_column='测试日期', max_length=50, verbose_name='测试日期')),
('data_status', models.CharField(db_column='数据状态', max_length=50, verbose_name='数据状态')),
('material_id', models.CharField(db_column='材料ID', max_length=20, verbose_name='材料ID')),
('level_code', models.CharField(db_column='等级代码', max_length=50, verbose_name='等级代码')),
('vendor_code', models.CharField(db_column='供应商代码', max_length=50, verbose_name='供应商代码')),
('fibre_or_infill', models.CharField(db_column='纤维/填充物', max_length=100, verbose_name='纤维/填充物')),
],
),
]
| [
"django.db.models.AutoField",
"django.db.models.CharField"
] | [((304, 397), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (320, 397), False, 'from django.db import migrations, models\n'), ((423, 490), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""系列"""', 'max_length': '(100)', 'verbose_name': '"""系列"""'}), "(db_column='系列', max_length=100, verbose_name='系列')\n", (439, 490), False, 'from django.db import migrations, models\n'), ((518, 585), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""牌号"""', 'max_length': '(100)', 'verbose_name': '"""牌号"""'}), "(db_column='牌号', max_length=100, verbose_name='牌号')\n", (534, 585), False, 'from django.db import migrations, models\n'), ((621, 689), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""制造商"""', 'max_length': '(50)', 'verbose_name': '"""制造商"""'}), "(db_column='制造商', max_length=50, verbose_name='制造商')\n", (637, 689), False, 'from django.db import migrations, models\n'), ((717, 784), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""链接"""', 'max_length': '(100)', 'verbose_name': '"""链接"""'}), "(db_column='链接', max_length=100, verbose_name='链接')\n", (733, 784), False, 'from django.db import migrations, models\n'), ((815, 889), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""材料名称缩写"""', 'max_length': '(20)', 'verbose_name': '"""材料名称缩写"""'}), "(db_column='材料名称缩写', max_length=20, verbose_name='材料名称缩写')\n", (831, 889), False, 'from django.db import migrations, models\n'), ((926, 997), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""材料类型"""', 'max_length': '(100)', 'verbose_name': '"""材料类型"""'}), "(db_column='材料类型', max_length=100, verbose_name='材料类型')\n", (942, 997), False, 'from django.db import migrations, models\n'), ((1032, 1103), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""数据来源"""', 'max_length': '(100)', 'verbose_name': '"""数据来源"""'}), "(db_column='数据来源', max_length=100, verbose_name='数据来源')\n", (1048, 1103), False, 'from django.db import migrations, models\n'), ((1145, 1219), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""上次修改日期"""', 'max_length': '(50)', 'verbose_name': '"""上次修改日期"""'}), "(db_column='上次修改日期', max_length=50, verbose_name='上次修改日期')\n", (1161, 1219), False, 'from django.db import migrations, models\n'), ((1252, 1322), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""测试日期"""', 'max_length': '(50)', 'verbose_name': '"""测试日期"""'}), "(db_column='测试日期', max_length=50, verbose_name='测试日期')\n", (1268, 1322), False, 'from django.db import migrations, models\n'), ((1357, 1427), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""数据状态"""', 'max_length': '(50)', 'verbose_name': '"""数据状态"""'}), "(db_column='数据状态', max_length=50, verbose_name='数据状态')\n", (1373, 1427), False, 'from django.db import migrations, models\n'), ((1462, 1532), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""材料ID"""', 'max_length': '(20)', 'verbose_name': '"""材料ID"""'}), "(db_column='材料ID', max_length=20, verbose_name='材料ID')\n", (1478, 1532), False, 'from django.db import migrations, models\n'), ((1566, 1636), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""等级代码"""', 'max_length': '(50)', 'verbose_name': '"""等级代码"""'}), "(db_column='等级代码', max_length=50, verbose_name='等级代码')\n", (1582, 1636), False, 'from django.db import migrations, models\n'), ((1671, 1743), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""供应商代码"""', 'max_length': '(50)', 'verbose_name': '"""供应商代码"""'}), "(db_column='供应商代码', max_length=50, verbose_name='供应商代码')\n", (1687, 1743), False, 'from django.db import migrations, models\n'), ((1782, 1857), 'django.db.models.CharField', 'models.CharField', ([], {'db_column': '"""纤维/填充物"""', 'max_length': '(100)', 'verbose_name': '"""纤维/填充物"""'}), "(db_column='纤维/填充物', max_length=100, verbose_name='纤维/填充物')\n", (1798, 1857), False, 'from django.db import migrations, models\n')] |
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.db import migrations, models
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Announcement',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=50, verbose_name='title')),
('level', models.IntegerField(default=1, choices=[(1, 'General'), (2, 'Warning'), (3, 'Critical')])),
('content', models.TextField(verbose_name='content')),
('creation_date',
models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation_date')),
('site_wide', models.BooleanField(default=False, verbose_name='site wide')),
('members_only', models.BooleanField(default=False, verbose_name='members only')),
('dismissal_type',
models.IntegerField(
default=2,
choices=[
(1, 'No Dismissals Allowed'),
(2, 'Session Only Dismissal'),
(3, 'Permanent Dismissal Allowed')])),
('publish_start',
models.DateTimeField(default=django.utils.timezone.now, verbose_name='publish_start')),
('publish_end', models.DateTimeField(null=True, verbose_name='publish_end', blank=True)),
('creator', models.ForeignKey(verbose_name='creator',
to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
options={
'verbose_name': 'announcement',
'verbose_name_plural': 'announcements',
},
),
migrations.CreateModel(
name='Dismissal',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('dismissed_at', models.DateTimeField(default=django.utils.timezone.now)),
('announcement', models.ForeignKey(related_name='dismissals',
to='announcements.Announcement', on_delete=models.CASCADE)),
('user', models.ForeignKey(related_name='announcement_dismissals',
to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
),
]
| [
"django.db.models.TextField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.migrations.swappable_dependency",
"django.db.models.CharField"
] | [((1012, 1069), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (1043, 1069), False, 'from django.db import migrations, models\n'), ((1206, 1299), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (1222, 1299), False, 'from django.db import migrations, models\n'), ((1324, 1377), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'verbose_name': '"""title"""'}), "(max_length=50, verbose_name='title')\n", (1340, 1377), False, 'from django.db import migrations, models\n'), ((1406, 1499), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)', 'choices': "[(1, 'General'), (2, 'Warning'), (3, 'Critical')]"}), "(default=1, choices=[(1, 'General'), (2, 'Warning'), (3,\n 'Critical')])\n", (1425, 1499), False, 'from django.db import migrations, models\n'), ((1526, 1566), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""content"""'}), "(verbose_name='content')\n", (1542, 1566), False, 'from django.db import migrations, models\n'), ((1623, 1713), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""creation_date"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'creation_date')\n", (1643, 1713), False, 'from django.db import migrations, models\n'), ((1741, 1801), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""site wide"""'}), "(default=False, verbose_name='site wide')\n", (1760, 1801), False, 'from django.db import migrations, models\n'), ((1837, 1900), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""members only"""'}), "(default=False, verbose_name='members only')\n", (1856, 1900), False, 'from django.db import migrations, models\n'), ((1958, 2099), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(2)', 'choices': "[(1, 'No Dismissals Allowed'), (2, 'Session Only Dismissal'), (3,\n 'Permanent Dismissal Allowed')]"}), "(default=2, choices=[(1, 'No Dismissals Allowed'), (2,\n 'Session Only Dismissal'), (3, 'Permanent Dismissal Allowed')])\n", (1977, 2099), False, 'from django.db import migrations, models\n'), ((2286, 2376), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""publish_start"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'publish_start')\n", (2306, 2376), False, 'from django.db import migrations, models\n'), ((2406, 2477), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'verbose_name': '"""publish_end"""', 'blank': '(True)'}), "(null=True, verbose_name='publish_end', blank=True)\n", (2426, 2477), False, 'from django.db import migrations, models\n'), ((2508, 2608), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'verbose_name': '"""creator"""', 'to': 'settings.AUTH_USER_MODEL', 'on_delete': 'models.CASCADE'}), "(verbose_name='creator', to=settings.AUTH_USER_MODEL,\n on_delete=models.CASCADE)\n", (2525, 2608), False, 'from django.db import migrations, models\n'), ((2926, 3019), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (2942, 3019), False, 'from django.db import migrations, models\n'), ((3051, 3106), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now'}), '(default=django.utils.timezone.now)\n', (3071, 3106), False, 'from django.db import migrations, models\n'), ((3142, 3250), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""dismissals"""', 'to': '"""announcements.Announcement"""', 'on_delete': 'models.CASCADE'}), "(related_name='dismissals', to=\n 'announcements.Announcement', on_delete=models.CASCADE)\n", (3159, 3250), False, 'from django.db import migrations, models\n'), ((3324, 3441), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""announcement_dismissals"""', 'to': 'settings.AUTH_USER_MODEL', 'on_delete': 'models.CASCADE'}), "(related_name='announcement_dismissals', to=settings.\n AUTH_USER_MODEL, on_delete=models.CASCADE)\n", (3341, 3441), False, 'from django.db import migrations, models\n')] |
import re
from dataclasses import dataclass
from collections import defaultdict
from itertools import cycle
@dataclass
class Line:
x1: int
y1: int
x2: int
y2: int
def all_points(self):
stepx = 1 if self.x1 < self.x2 else -1
stepy = 1 if self.y1 < self.y2 else -1
if self.x1 == self.x2:
x = cycle((self.x1,))
else:
x = range(self.x1, self.x2+stepx, stepx)
if self.y1 == self.y2:
y = cycle((self.y1,))
else:
y = range(self.y1, self.y2+stepy, stepy)
yield from zip(x, y)
def __repr__(self):
return f"{self.x1},{self.y1} -> {self.x2},{self.y2}"
lines = []
with open("input.txt") as input_file:
for line in input_file:
match = re.match(r"(\d+),(\d+) -> (\d+),(\d+)", line)
coords = [int(n) for n in match.groups()]
lines.append(Line(*coords))
points = defaultdict(int)
for line in lines:
for point in line.all_points():
points[point] += 1
print(len(list(filter(lambda x: x > 1, points.values()))))
| [
"itertools.cycle",
"collections.defaultdict",
"re.match"
] | [((927, 943), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (938, 943), False, 'from collections import defaultdict\n'), ((785, 833), 're.match', 're.match', (['"""(\\\\d+),(\\\\d+) -> (\\\\d+),(\\\\d+)"""', 'line'], {}), "('(\\\\d+),(\\\\d+) -> (\\\\d+),(\\\\d+)', line)\n", (793, 833), False, 'import re\n'), ((349, 366), 'itertools.cycle', 'cycle', (['(self.x1,)'], {}), '((self.x1,))\n', (354, 366), False, 'from itertools import cycle\n'), ((490, 507), 'itertools.cycle', 'cycle', (['(self.y1,)'], {}), '((self.y1,))\n', (495, 507), False, 'from itertools import cycle\n')] |
# Generated by Django 3.0.7 on 2020-12-20 15:16
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('academica', '0002_auto_20201220_0117'),
]
operations = [
migrations.RemoveField(
model_name='clase',
name='nivel',
),
migrations.RemoveField(
model_name='clase_profesor',
name='clase',
),
migrations.RemoveField(
model_name='clase_profesor',
name='profesor',
),
migrations.RemoveField(
model_name='nota',
name='alumno',
),
migrations.RemoveField(
model_name='nota',
name='clase',
),
migrations.RemoveField(
model_name='nota',
name='periodo',
),
migrations.RemoveField(
model_name='perfil_profesor',
name='nivel',
),
migrations.RemoveField(
model_name='perfil_profesor',
name='usuario',
),
migrations.DeleteModel(
name='Alumno',
),
migrations.DeleteModel(
name='Clase',
),
migrations.DeleteModel(
name='Clase_Profesor',
),
migrations.DeleteModel(
name='Nota',
),
migrations.DeleteModel(
name='Perfil_Profesor',
),
migrations.DeleteModel(
name='Periodo',
),
]
| [
"django.db.migrations.DeleteModel",
"django.db.migrations.RemoveField"
] | [((229, 285), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""clase"""', 'name': '"""nivel"""'}), "(model_name='clase', name='nivel')\n", (251, 285), False, 'from django.db import migrations\n'), ((330, 395), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""clase_profesor"""', 'name': '"""clase"""'}), "(model_name='clase_profesor', name='clase')\n", (352, 395), False, 'from django.db import migrations\n'), ((440, 508), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""clase_profesor"""', 'name': '"""profesor"""'}), "(model_name='clase_profesor', name='profesor')\n", (462, 508), False, 'from django.db import migrations\n'), ((553, 609), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""nota"""', 'name': '"""alumno"""'}), "(model_name='nota', name='alumno')\n", (575, 609), False, 'from django.db import migrations\n'), ((654, 709), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""nota"""', 'name': '"""clase"""'}), "(model_name='nota', name='clase')\n", (676, 709), False, 'from django.db import migrations\n'), ((754, 811), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""nota"""', 'name': '"""periodo"""'}), "(model_name='nota', name='periodo')\n", (776, 811), False, 'from django.db import migrations\n'), ((856, 922), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""perfil_profesor"""', 'name': '"""nivel"""'}), "(model_name='perfil_profesor', name='nivel')\n", (878, 922), False, 'from django.db import migrations\n'), ((967, 1035), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""perfil_profesor"""', 'name': '"""usuario"""'}), "(model_name='perfil_profesor', name='usuario')\n", (989, 1035), False, 'from django.db import migrations\n'), ((1080, 1117), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Alumno"""'}), "(name='Alumno')\n", (1102, 1117), False, 'from django.db import migrations\n'), ((1150, 1186), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Clase"""'}), "(name='Clase')\n", (1172, 1186), False, 'from django.db import migrations\n'), ((1219, 1264), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Clase_Profesor"""'}), "(name='Clase_Profesor')\n", (1241, 1264), False, 'from django.db import migrations\n'), ((1297, 1332), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Nota"""'}), "(name='Nota')\n", (1319, 1332), False, 'from django.db import migrations\n'), ((1365, 1411), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Perfil_Profesor"""'}), "(name='Perfil_Profesor')\n", (1387, 1411), False, 'from django.db import migrations\n'), ((1444, 1482), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Periodo"""'}), "(name='Periodo')\n", (1466, 1482), False, 'from django.db import migrations\n')] |
from event.models import Event
from talk.models import Talk
def committee_member_context_processor(request):
if request.user.is_authenticated:
return {
"is_committee_member": request.user.has_perms(
("talk.add_vote", "talk.add_talkcomment")
)
}
else:
return {"is_committee_member": False}
def reservation_context_processor(request):
event = Event.objects.current_event()
if event.sessions_published and not event.is_started():
return {
"reservable_sessions": Talk.objects.filter(
event=event, track__isnull=False, spots__gt=0
).exists()
}
return {"reservable_sessions": False}
| [
"talk.models.Talk.objects.filter",
"event.models.Event.objects.current_event"
] | [((421, 450), 'event.models.Event.objects.current_event', 'Event.objects.current_event', ([], {}), '()\n', (448, 450), False, 'from event.models import Event\n'), ((563, 629), 'talk.models.Talk.objects.filter', 'Talk.objects.filter', ([], {'event': 'event', 'track__isnull': '(False)', 'spots__gt': '(0)'}), '(event=event, track__isnull=False, spots__gt=0)\n', (582, 629), False, 'from talk.models import Talk\n')] |
import code.PdfReader as PdfReaderModule
import code.ExcelReader as ExcelReader
import code.TemplateParser as TemplateParser
import code.PdfAnalyzer as PdfAnalyzer
import code.EmailSender as EmailSender
def getFileContent(fileName):
read_data = ""
with open(fileName, encoding="utf-8") as f:
read_data = f.read()
return read_data
def main():
# TODO Do not forget, to remind user, that [MONTH] should be updated before continueing!
print("If you use [MONTH] in you template, don't forget to update it in InvoiceSenderControl.xlsx")
input("Press Enter to continue... (close window with script to CANCEL)")
print("Parsing excel...")
excelReader = ExcelReader.ExcelReader()
excelContent = excelReader.getData()
excelSmtpData = excelReader.getSmtpData()
print("Parsing pdf...")
pdfReader = PdfReaderModule.PdfReader()
pdfFileNameToItsContentMap = pdfReader.getReadedInvoicesMap()
print("Searching pdfs...")
pdfAnalyzer = PdfAnalyzer.PdfAnalyzer(pdfFileNameToItsContentMap)
emailContentAttachmentList = []
for (invoiceText, emailAddress, templateName, keyWordMap, emailSubject, messageId) in excelContent:
invoicesToAttach = pdfAnalyzer.searchSentenceAndUpdateStats(invoiceText)
if len(invoicesToAttach) == 0:
print("No invoices for: " + emailAddress + " SKIPPING!")
continue
templateContent = getFileContent("emailTemplates/" + templateName)
if templateContent is None or templateContent == "":
print("template not existing or empty for: " + emailAddress + " SKIPPING!")
continue
templateParser = TemplateParser.TemplateParser(templateContent, keyWordMap)
emailFilledTemplate = templateParser.getFilledTemplate()
emailContentAttachmentList.append( (emailAddress, emailSubject, emailFilledTemplate, invoicesToAttach, messageId) )
print("What will be sent:")
for (emailAddress, emailSubject, emailFilledTemplate, invoicesToAttach, messageId) in emailContentAttachmentList:
print("To " + emailAddress + " will be send " + str(invoicesToAttach))
print("Checking if all PDFs can be delivered:")
pdfAnalyzer.dropStatistics()
input("Press Enter to send emails.. (close window with script to CANCEL)")
print("Sending emails...")
(smtpAddress, smtpPort, ownerEmail, ownerPassword) = excelSmtpData
emailSender = EmailSender.EmailSender(smtpAddress, smtpPort, ownerEmail, ownerPassword)
for (emailAddress, emailSubject, emailFilledTemplate, invoicesToAttach, messageId) in emailContentAttachmentList:
if messageId == None or messageId == "":
emailSender.sendEmail(emailAddress, emailSubject, emailFilledTemplate, invoicesToAttach)
print("Sent an email to " + emailAddress + " with " + str(invoicesToAttach))
else:
emailSender.replayEmail(emailAddress, emailSubject, emailFilledTemplate, invoicesToAttach, messageId)
print("Sent response to " + emailAddress + " with " + str(invoicesToAttach))
emailSender.close()
if __name__ == '__main__':
main()
| [
"code.PdfAnalyzer.PdfAnalyzer",
"code.ExcelReader.ExcelReader",
"code.EmailSender.EmailSender",
"code.TemplateParser.TemplateParser",
"code.PdfReader.PdfReader"
] | [((686, 711), 'code.ExcelReader.ExcelReader', 'ExcelReader.ExcelReader', ([], {}), '()\n', (709, 711), True, 'import code.ExcelReader as ExcelReader\n'), ((848, 875), 'code.PdfReader.PdfReader', 'PdfReaderModule.PdfReader', ([], {}), '()\n', (873, 875), True, 'import code.PdfReader as PdfReaderModule\n'), ((996, 1047), 'code.PdfAnalyzer.PdfAnalyzer', 'PdfAnalyzer.PdfAnalyzer', (['pdfFileNameToItsContentMap'], {}), '(pdfFileNameToItsContentMap)\n', (1019, 1047), True, 'import code.PdfAnalyzer as PdfAnalyzer\n'), ((2451, 2524), 'code.EmailSender.EmailSender', 'EmailSender.EmailSender', (['smtpAddress', 'smtpPort', 'ownerEmail', 'ownerPassword'], {}), '(smtpAddress, smtpPort, ownerEmail, ownerPassword)\n', (2474, 2524), True, 'import code.EmailSender as EmailSender\n'), ((1681, 1739), 'code.TemplateParser.TemplateParser', 'TemplateParser.TemplateParser', (['templateContent', 'keyWordMap'], {}), '(templateContent, keyWordMap)\n', (1710, 1739), True, 'import code.TemplateParser as TemplateParser\n')] |
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import os
import textwrap
import time
import bs4
from django.core.urlresolvers import get_resolver
from django.http import HttpResponse
from django.http import HttpResponseBadRequest
from django.http import HttpResponseNotFound
from django.http import HttpResponseNotModified
from django.http import HttpResponseRedirect
from django.http import HttpResponseServerError
from django.http import JsonResponse
from django.utils.cache import patch_response_headers
from django.utils.http import http_date
from django.views import View
from kolibri.core.content.utils.paths import get_content_storage_file_path
from zimply_core.zim_core import to_bytes
from zimply_core.zim_core import ZIMClient
# This provides an API similar to the zipfile view in Kolibri core's zip_wsgi.
# In the future, we should replace this with a change adding Zim file support
# in the same place: <https://github.com/endlessm/kolibri/pull/3>.
#
# We are avoiding Django REST Framework here in case this code needs to be
# moved to the alternative zip_wsgi server.
YEAR_IN_SECONDS = 60 * 60 * 24 * 365
SNIPPET_MAX_CHARS = 280
class ZimFileNotFoundError(Exception):
pass
class ZimFileReadError(Exception):
pass
class _ZimFileViewMixin(View):
zim_client_args = {"enable_search": False}
def dispatch(self, request, *args, **kwargs):
zim_filename = kwargs["zim_filename"]
try:
self.zim_file = self.__get_zim_file(zim_filename)
except ZimFileNotFoundError:
return HttpResponseNotFound("Zim file does not exist")
except ZimFileReadError:
return HttpResponseServerError("Error reading Zim file")
return super(_ZimFileViewMixin, self).dispatch(request, *args, **kwargs)
def __get_zim_file(self, zim_filename):
zim_file_path = get_content_storage_file_path(zim_filename)
if not os.path.exists(zim_file_path):
raise ZimFileNotFoundError()
# Raises RuntimeError
try:
# A ZIMClient requires an encoding (usually UTF-8). The
# auto_delete property only applies to an FTS index and will
# automagically recreate an index if any issues are detected.
zim_file = ZIMClient(
zim_file_path,
encoding="utf-8",
auto_delete=True,
**self.zim_client_args
)
except RuntimeError as error:
raise ZimFileReadError(str(error))
return zim_file
class _ImmutableViewMixin(View):
def dispatch(self, request, *args, **kwargs):
if request.method != "GET":
return super(_ImmutableViewMixin, self).dispatch(request, *args, **kwargs)
elif request.META.get("HTTP_IF_MODIFIED_SINCE"):
return HttpResponseNotModified()
else:
response = super(_ImmutableViewMixin, self).dispatch(
request, *args, **kwargs
)
if response.status_code == 200:
patch_response_headers(response, cache_timeout=YEAR_IN_SECONDS)
return response
class ZimIndexView(_ImmutableViewMixin, _ZimFileViewMixin, View):
http_method_names = (
"get",
"options",
)
def get(self, request, zim_filename):
main_page = self.zim_file.main_page
if main_page is None:
return HttpResponseNotFound("Article does not exist")
article_url = _zim_article_url(request, zim_filename, main_page.full_url)
return HttpResponseRedirect(article_url)
class ZimArticleView(_ImmutableViewMixin, _ZimFileViewMixin, View):
http_method_names = (
"get",
"options",
)
def get(self, request, zim_filename, zim_article_path):
try:
if not zim_article_path:
return self._get_response_for_article(self.zim_file.main_page)
else:
zim_article = self.zim_file.get_article(zim_article_path)
return self._get_response_for_article(zim_article)
except KeyError:
return HttpResponseNotFound("Article does not exist")
@staticmethod
def _get_response_for_article(article):
if article is None:
return HttpResponseNotFound("Article does not exist")
response = HttpResponse()
article_bytes = to_bytes(article.data, "utf-8")
response["Content-Length"] = len(article_bytes)
# Ensure the browser knows not to try byte-range requests, as we don't support them here
response["Accept-Ranges"] = "none"
response["Last-Modified"] = http_date(time.time())
response["Content-Type"] = article.mimetype
response.write(article_bytes)
return response
class ZimRandomArticleView(_ZimFileViewMixin, View):
http_method_names = (
"get",
"options",
)
def get(self, request, zim_filename):
article_url = _zim_article_url(
request, zim_filename, self.zim_file.random_article_url
)
return HttpResponseRedirect(article_url)
class ZimSearchView(_ZimFileViewMixin, View):
zim_client_args = {"enable_search": True}
MAX_RESULTS_MAXIMUM = 100
def get(self, request, zim_filename):
query = request.GET.get("query")
suggest = "suggest" in request.GET
start = request.GET.get("start", 0)
max_results = request.GET.get("max_results", 30)
if suggest:
snippet_length = None
else:
snippet_length = request.GET.get("snippet_length", SNIPPET_MAX_CHARS)
if not query:
return HttpResponseBadRequest('Missing "query"')
try:
start = int(start)
except ValueError:
return HttpResponseBadRequest('Invalid "start"')
try:
max_results = int(max_results)
except ValueError:
return HttpResponseBadRequest('Invalid "max_results"')
if max_results < 0 or max_results > self.MAX_RESULTS_MAXIMUM:
return HttpResponseBadRequest('Invalid "max_results"')
# This results in a list of SearchResult objects ordered by their
# score (lower is better is earlier in the list)...
if suggest:
count = self.zim_file.get_suggestions_results_count(query)
search = self.zim_file.suggest(query, start=start, end=start + max_results)
else:
count = self.zim_file.get_search_results_count(query)
search = self.zim_file.search(query, start=start, end=start + max_results)
articles = list(
self.__article_metadata(result, snippet_length) for result in search
)
return JsonResponse({"articles": articles, "count": count})
def __article_metadata(self, search_result, snippet_length):
full_url = search_result.namespace + "/" + search_result.url
result = {"title": search_result.title, "path": full_url}
if snippet_length:
zim_article = self.zim_file.get_article(full_url)
result["snippet"] = _html_snippet(
to_bytes(zim_article.data, "utf-8"), max_chars=snippet_length
)
return result
def _zim_article_url(request, zim_filename, zim_article_path):
# I don't know why I need to torment the resolver like this instead of
# using django.urls.reverse, but something is trying to add a language
# prefix incorrectly and causing an error.
resolver = get_resolver(None)
redirect_url = resolver.reverse(
"zim_article", zim_filename=zim_filename, zim_article_path=zim_article_path
)
return request.build_absolute_uri("/" + redirect_url)
def _html_snippet(html_str, max_chars):
soup = bs4.BeautifulSoup(html_str, "lxml")
snippet_text = _html_snippet_text(soup)
return textwrap.shorten(snippet_text, width=max_chars, placeholder="")
def _html_snippet_text(soup):
meta_description = soup.find("meta", attrs={"name": "description"})
if meta_description:
return meta_description.get("content")
article_elems = soup.find("body").find_all(["h2", "h3", "h4", "h5", "h6", "p"])
article_elems_text = "\n".join(elem.get_text() for elem in article_elems)
if len(article_elems_text) > 0:
return article_elems_text
return soup.find("body").get_text()
| [
"django.http.HttpResponseRedirect",
"os.path.exists",
"django.http.HttpResponseBadRequest",
"textwrap.shorten",
"django.http.JsonResponse",
"django.http.HttpResponse",
"django.core.urlresolvers.get_resolver",
"bs4.BeautifulSoup",
"django.http.HttpResponseServerError",
"kolibri.core.content.utils.p... | [((7576, 7594), 'django.core.urlresolvers.get_resolver', 'get_resolver', (['None'], {}), '(None)\n', (7588, 7594), False, 'from django.core.urlresolvers import get_resolver\n'), ((7833, 7868), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['html_str', '"""lxml"""'], {}), "(html_str, 'lxml')\n", (7850, 7868), False, 'import bs4\n'), ((7924, 7987), 'textwrap.shorten', 'textwrap.shorten', (['snippet_text'], {'width': 'max_chars', 'placeholder': '""""""'}), "(snippet_text, width=max_chars, placeholder='')\n", (7940, 7987), False, 'import textwrap\n'), ((1924, 1967), 'kolibri.core.content.utils.paths.get_content_storage_file_path', 'get_content_storage_file_path', (['zim_filename'], {}), '(zim_filename)\n', (1953, 1967), False, 'from kolibri.core.content.utils.paths import get_content_storage_file_path\n'), ((3612, 3645), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['article_url'], {}), '(article_url)\n', (3632, 3645), False, 'from django.http import HttpResponseRedirect\n'), ((4399, 4413), 'django.http.HttpResponse', 'HttpResponse', ([], {}), '()\n', (4411, 4413), False, 'from django.http import HttpResponse\n'), ((4438, 4469), 'zimply_core.zim_core.to_bytes', 'to_bytes', (['article.data', '"""utf-8"""'], {}), "(article.data, 'utf-8')\n", (4446, 4469), False, 'from zimply_core.zim_core import to_bytes\n'), ((5136, 5169), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['article_url'], {}), '(article_url)\n', (5156, 5169), False, 'from django.http import HttpResponseRedirect\n'), ((6794, 6846), 'django.http.JsonResponse', 'JsonResponse', (["{'articles': articles, 'count': count}"], {}), "({'articles': articles, 'count': count})\n", (6806, 6846), False, 'from django.http import JsonResponse\n'), ((1984, 2013), 'os.path.exists', 'os.path.exists', (['zim_file_path'], {}), '(zim_file_path)\n', (1998, 2013), False, 'import os\n'), ((2338, 2427), 'zimply_core.zim_core.ZIMClient', 'ZIMClient', (['zim_file_path'], {'encoding': '"""utf-8"""', 'auto_delete': '(True)'}), "(zim_file_path, encoding='utf-8', auto_delete=True, **self.\n zim_client_args)\n", (2347, 2427), False, 'from zimply_core.zim_core import ZIMClient\n'), ((3108, 3171), 'django.utils.cache.patch_response_headers', 'patch_response_headers', (['response'], {'cache_timeout': 'YEAR_IN_SECONDS'}), '(response, cache_timeout=YEAR_IN_SECONDS)\n', (3130, 3171), False, 'from django.utils.cache import patch_response_headers\n'), ((3467, 3513), 'django.http.HttpResponseNotFound', 'HttpResponseNotFound', (['"""Article does not exist"""'], {}), "('Article does not exist')\n", (3487, 3513), False, 'from django.http import HttpResponseNotFound\n'), ((4332, 4378), 'django.http.HttpResponseNotFound', 'HttpResponseNotFound', (['"""Article does not exist"""'], {}), "('Article does not exist')\n", (4352, 4378), False, 'from django.http import HttpResponseNotFound\n'), ((4712, 4723), 'time.time', 'time.time', ([], {}), '()\n', (4721, 4723), False, 'import time\n'), ((5715, 5756), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Missing "query\\""""'], {}), '(\'Missing "query"\')\n', (5737, 5756), False, 'from django.http import HttpResponseBadRequest\n'), ((6131, 6178), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Invalid "max_results\\""""'], {}), '(\'Invalid "max_results"\')\n', (6153, 6178), False, 'from django.http import HttpResponseBadRequest\n'), ((1623, 1670), 'django.http.HttpResponseNotFound', 'HttpResponseNotFound', (['"""Zim file does not exist"""'], {}), "('Zim file does not exist')\n", (1643, 1670), False, 'from django.http import HttpResponseNotFound\n'), ((1723, 1772), 'django.http.HttpResponseServerError', 'HttpResponseServerError', (['"""Error reading Zim file"""'], {}), "('Error reading Zim file')\n", (1746, 1772), False, 'from django.http import HttpResponseServerError\n'), ((2895, 2920), 'django.http.HttpResponseNotModified', 'HttpResponseNotModified', ([], {}), '()\n', (2918, 2920), False, 'from django.http import HttpResponseNotModified\n'), ((4175, 4221), 'django.http.HttpResponseNotFound', 'HttpResponseNotFound', (['"""Article does not exist"""'], {}), "('Article does not exist')\n", (4195, 4221), False, 'from django.http import HttpResponseNotFound\n'), ((5848, 5889), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Invalid "start\\""""'], {}), '(\'Invalid "start"\')\n', (5870, 5889), False, 'from django.http import HttpResponseBadRequest\n'), ((5993, 6040), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Invalid "max_results\\""""'], {}), '(\'Invalid "max_results"\')\n', (6015, 6040), False, 'from django.http import HttpResponseBadRequest\n'), ((7201, 7236), 'zimply_core.zim_core.to_bytes', 'to_bytes', (['zim_article.data', '"""utf-8"""'], {}), "(zim_article.data, 'utf-8')\n", (7209, 7236), False, 'from zimply_core.zim_core import to_bytes\n')] |
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 22 21:44:55 2017
@author: Mike
"""
import numpy as np
import cv2
import glob
import pickle
import matplotlib.pyplot as plt
from matplotlib.pyplot import *
import os
from scipy import stats
from moviepy.editor import VideoFileClip
from IPython.display import HTML
from camera_calibration import calibrate_camera, distortion_correct
from sobel_library import abs_sobel_image, sobel_mag_thresh, sobel_dir_thresh
from collections import deque
run_camera_cal = 1
#HLS Color space threshold filter
def color_binary(img, colorspace, color_thresh):
if colorspace == 'HLS':
hls = cv2.cvtColor(img, cv2.COLOR_RGB2HLS)
H = hls[:,:,0]
L = hls[:,:,1]
S = hls[:,:,2]
binary_output = np.zeros_like(S)
binary_output[((S > color_thresh [0]) & (S < color_thresh [1]))] = 1
return binary_output
#combine the thresholds for the color map and the gradient threshold
# send in an image with binary color scheme and binary gradient scheme
def bin_color_gradient(binary_gradient , binary_color):
binary_output = np.zeros_like(binary_gradient)
binary_output[((binary_gradient == 1) | (binary_color == 1))] = 1
# polys = np.array([[(350,720),(580,500),(800,500),(1000,720)]], dtype = np.int32)
polys = np.array([[(350,720),(580,500),(800,500),(900,720)]], dtype = np.int32)
cv2.fillPoly(binary_output, polys, 0, lineType=8, shift=0)
return binary_output
#Function to warp images to birds eye view
def warp(img,source_points, destination_points):
img_shape = (img.shape[1], img.shape[0])
src = np.float32(source_points)
dst = np.float32(destination_points)
M = cv2.getPerspectiveTransform(src,dst)
Minv = cv2.getPerspectiveTransform(dst,src)
warped = cv2.warpPerspective(img,M,img_shape, flags = cv2.INTER_LINEAR)
return warped, M, Minv
global left_fit_deque
global right_fit_deque
deque_size = 3
left_fit_deque = []
left_fit_deque = deque(maxlen = deque_size)
right_fit_deque = []
right_fit_deque = deque(maxlen = deque_size)
class Lane():
def __init__(self):
self.llm = []
self.rlm = []
mylane = Lane()
coeffs = []
C0_L = np.zeros(deque_size)
C1_L = np.zeros(deque_size)
C2_L = np.zeros(deque_size)
C0_R = np.zeros(deque_size)
C1_R = np.zeros(deque_size)
C2_R = np.zeros(deque_size)
def polyfit(warped_image, orig_img, Minv):
#def polyfit(warped_image):
# print('Initiating line overlay onto binary warped image')
# Assuming you have created a warped binary image called "binary_warped"
# Take a histogram of the bottom half of the image
histogram = np.sum(warped_image[warped_image.shape[0]//2:,:], axis=0)
#histogram = np.sum(binary_warped[binary_warped.shape[0]/2:,:], axis=0)
# Create an output image to draw on and visualize the result
out_img = np.dstack((warped_image, warped_image, warped_image))*255
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint = np.int(histogram.shape[0]/2)
leftx_base = np.argmax(histogram[:midpoint])
rightx_base = np.argmax(histogram[midpoint:]) + midpoint
# Choose the number of sliding windows
nwindows = 9
# Set height of windows
window_height = np.int(warped_image.shape[0]/nwindows)
# Identify the x and y positions of all nonzero pixels in the image
nonzero = warped_image.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated for each window
leftx_current = leftx_base
rightx_current = rightx_base
# Set the width of the windows +/- margin
margin = 100
# Set minimum number of pixels found to recenter window
minpix = 50
# Create empty lists to receive left and right lane pixel indices
left_lane_inds = []
right_lane_inds = []
# Step through the windows one by one
for window in range(nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = warped_image.shape[0] - (window+1)*window_height
win_y_high = warped_image.shape[0] - window*window_height
win_xleft_low = leftx_current - margin
win_xleft_high = leftx_current + margin
win_xright_low = rightx_current - margin
win_xright_high = rightx_current + margin
# Draw the windows on the visualization image
cv2.rectangle(out_img,(win_xleft_low,win_y_low),(win_xleft_high,win_y_high),
(0,255,0), 2)
cv2.rectangle(out_img,(win_xright_low,win_y_low),(win_xright_high,win_y_high),
(0,255,0), 2)
# Identify the nonzero pixels in x and y within the window
good_left_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xleft_low) & (nonzerox < win_xleft_high)).nonzero()[0]
good_right_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xright_low) & (nonzerox < win_xright_high)).nonzero()[0]
# Append these indices to the lists
left_lane_inds.append(good_left_inds)
right_lane_inds.append(good_right_inds)
# If you found > minpix pixels, recenter next window on their mean position
if len(good_left_inds) > minpix:
leftx_current = np.int(np.mean(nonzerox[good_left_inds]))
if len(good_right_inds) > minpix:
rightx_current = np.int(np.mean(nonzerox[good_right_inds]))
# Concatenate the arrays of indices
left_lane_inds = np.concatenate(left_lane_inds)
right_lane_inds = np.concatenate(right_lane_inds)
# Extract left and right line pixel positions
leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
# Fit a second order polynomial to each
left_fit = np.polyfit(lefty, leftx, 2)
#Store the left poly coefficient in a deque for later use
left_fit_deque.append(left_fit)
# Take the deque of polynomial data and extract the three coefficients, avearge them for stability
for idx, coeffs in enumerate(left_fit_deque):
C0_L[idx] = coeffs[0]
C1_L[idx] = coeffs[1]
C2_L[idx] = coeffs[2]
average_C0_L = np.mean(C0_L)
average_C1_L = np.mean(C1_L)
average_C2_L = np.mean(C2_L)
left_fit[0] = average_C0_L
left_fit[1] = average_C1_L
left_fit[2] = average_C2_L
right_fit = np.polyfit(righty, rightx, 2)
#Store the left poly coefficient in a deque for later use
right_fit_deque.append(right_fit)
# Take the deque of polynomial data and extract the three coefficients, avearge them for stability
for idx, coeffs in enumerate(right_fit_deque):
C0_R[idx] = coeffs[0]
C1_R[idx] = coeffs[1]
C2_R[idx] = coeffs[2]
average_C0_R = np.mean(C0_R)
average_C1_R = np.mean(C1_R)
average_C2_R = np.mean(C2_R)
right_fit[0] = average_C0_R
right_fit[1] = average_C1_R
right_fit[2] = average_C2_R
# Generate x and y values for plotting
ploty = np.linspace(0, warped_image.shape[0]-1, warped_image.shape[0] )
left_fitx = left_fit[0]*ploty**2 + left_fit[1]*ploty + left_fit[2]
# left_fitx = left_fit_deque[0]*ploty**2 + left_fit_deque[1]*ploty + left_fit[2]
right_fitx = right_fit[0]*ploty**2 + right_fit[1]*ploty + right_fit[2]
out_img[nonzeroy[left_lane_inds], nonzerox[left_lane_inds]] = [255, 0, 0]
out_img[nonzeroy[right_lane_inds], nonzerox[right_lane_inds]] = [0, 0, 255]
# plt.figure(figsize = (20,10))
# plt.imshow(out_img)
# plt.plot(left_fitx, ploty, color='blue')
# plt.plot(right_fitx, ploty, color='red')
# plt.xlim(0, 1280)
# plt.ylim(720, 0)
# plt.show()
# Create an image to draw the lines on
warp_zero = np.zeros_like(warped_image).astype(np.uint8)
color_warp = np.dstack((warp_zero, warp_zero, warp_zero))
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array([np.transpose(np.vstack([left_fitx, ploty]))])
pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fitx, ploty])))])
pts = np.hstack((pts_left, pts_right))
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0,255, 0))
# =============================================================================
# In this section we calculate the radius of curvature for the warped lines
# =============================================================================
# Define y-value where we want radius of curvature
# I'll choose the maximum y-value, corresponding to the bottom of the image
y_eval = np.max(ploty)
left_curverad = ((1 + (2*left_fit[0]*y_eval + left_fit[1])**2)**1.5) / np.absolute(2*left_fit[0])
right_curverad = ((1 + (2*right_fit[0]*y_eval + right_fit[1])**2)**1.5) / np.absolute(2*right_fit[0])
# print(left_curverad, right_curverad)
# Example values: 1926.74 1908.48
# Define conversions in x and y from pixels space to meters
ym_per_pix = 30/720 # meters per pixel in y dimension
xm_per_pix = 3.7/700 # meters per pixel in x dimension
# Fit new polynomials to x,y in world space
left_fit_cr = np.polyfit(ploty*ym_per_pix, left_fitx*xm_per_pix, 2)
right_fit_cr = np.polyfit(ploty*ym_per_pix, right_fitx*xm_per_pix, 2)
# Calculate the new radii of curvature
left_curverad = ((1 + (2*left_fit_cr[0]*y_eval*ym_per_pix + left_fit_cr[1])**2)**1.5) / np.absolute(2*left_fit_cr[0])
right_curverad = ((1 + (2*right_fit_cr[0]*y_eval*ym_per_pix + right_fit_cr[1])**2)**1.5) / np.absolute(2*right_fit_cr[0])
# Now our radius of curvature is in meters
# print(left_curverad, 'm', right_curverad, 'm')
# Example values: 632.1 m 626.2 m
# =============================================================================
# Calculate the position from center for the vehicle relative to the left lane
# =============================================================================
# Warp the blank back to original image space using inverse perspective matrix (Minv)
newwarp = cv2.warpPerspective(color_warp, Minv, (orig_img.shape[1], orig_img.shape[0]))
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(newwarp,'Recording: project_video',(10,50), font, 1,(255,0,0),3,cv2.LINE_AA)
cv2.putText(newwarp,'Road Radius of curvature: {} km'.format(left_curverad/1000),(10,100), font, 1,(255,0,0),3,cv2.LINE_AA)
# =============================================================================
# Add the Section for fitting the radius of curvature to the image
# =============================================================================
vehicle_center = newwarp.shape[1]/2 #assuming that the video feed is from veh center
y_pixels = np.arange(newwarp.shape[0]-10, newwarp.shape[0]+1)
# y_pixels = 719
lx_loc = left_fit_cr[0]*y_pixels**2+left_fit_cr[1]*y_pixels+left_fit_cr[2]
rx_loc = right_fit_cr[0]*y_pixels**2+right_fit_cr[1]*y_pixels+right_fit_cr[2]
lane_center_pixel = (right_fitx[0] + left_fitx[0])/2
vehicle_offset = (vehicle_center - lane_center_pixel)*xm_per_pix
# pct_difference = vehicle_offset/
if vehicle_offset > 0:
cv2.putText(newwarp,'Ego Vehicle is {} meters right of lane center'.format(vehicle_offset),(10,150), font, 1,(255,0,0),3,cv2.LINE_AA)
if vehicle_offset < 0:
cv2.putText(newwarp,'Ego Vehicle is {} meters left of lane center'.format(vehicle_offset),(10,150), font, 1,(255,0,0),3,cv2.LINE_AA)
if vehicle_offset == 0:
cv2.putText(newwarp,'Ego Vehicle is directly on center!! Great job!',(10,150), font, 1,(255,0,0),3,cv2.LINE_AA)
# =============================================================================
# This plots the lane line data for debugging vehicle center
# =============================================================================
# plt.plot(lx_loc,y_pixels,'x')
# plt.title('Left Lane Line Pixel Locations')
# plt.show()
#
# plt.plot(rx_loc,y_pixels,'x')
# plt.title('Right Lane Line Pixel Locations')
# plt.show()
#
# plt.plot(left_fitx,'x')
# plt.plot(right_fitx,'o')
# plt.title('Left Lane and Right Lane overlay, horizontal dir i "y" in image space')
# plt.show()
#
# plt.figure(figsize = (15,15))
# plt.imshow(newwarp)
# plt.show()
#
# Combine the result with the original image
#img = cv2.imread(img)
img = cv2.cvtColor(orig_img,cv2.COLOR_BGR2RGB)
# result = cv2.addWeighted(orig_img, 1, newwarp, 0.3, 0)
result = cv2.addWeighted(img, 1, newwarp, 0.3, 0)
#This is the final overlaid image with the texxto n it
# plt.figure(figsize = (10,10))
# plt.title('final result')
# plt.imshow(result)
# plt.show()
return result, left_fitx, right_fitx, ploty
if run_camera_cal == 1:
#--------------------- CAll functions and initiate camera cal and distortion corrrect-----------------------
#This section calls the camera calibration function
# Call the function to parse through the calibration image array and return
#the base object point, corners and a grascale image for reference size
#***** TURN THIS ON LATER!!!!!! when you want to calibrate the camera
# Make a list of calibration images
image_dir = "C:\\Users\\mrpal\\Documents\\Projects\\CarND-Advanced-Lane-Lines\\camera_cal\\"
images = os.listdir('camera_cal')
corners, imgpoints, objpoints, gray = calibrate_camera(image_dir, images)
##Generate the distortion coefficients and camera matrix, trans vector and rot vector
print('Generating distortion coefficients and camera matrix parameters')
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints,gray.shape[::-1], None, None)
#Undistort the images in the test_images folder
image_dir = "C:\\Users\\mrpal\\Documents\\Projects\\CarND-Advanced-Lane-Lines\\test_images\\"
images = os.listdir('test_images')
print('Selected image directory is: {} '.format(image_dir))
print('The images in the directory are: {}' .format(images))
distortion_corrected = distortion_correct(image_dir, images, mtx, dist)
cv2.destroyAllWindows()
#--------------------- CAll functions to initiate a pipeline for image processing----------------------
image_dir = "C:\\Users\\mrpal\\Documents\\Projects\\CarND-Advanced-Lane-Lines\\test_images\\"
images = os.listdir('test_images')
print('Selected image directory is: {} '.format(image_dir))
print('The images in the directory are: {} \n' .format(images))
#print('The images in the directory are: {} \n' .format(images_new))
sobel_kernel = 9
#mag_thresh = [30,255]
#keep it
grad_threshold = [50,150]
sobel_mag = [0,255]
#distortion correct
if len(glob.glob('./test_images/*Distortion*.jpg')) == 0:
print('there are no distortion corrected images in the directory, let us create them')
distortion_corrected = distortion_correct(image_dir, images, mtx, dist)
images = glob.glob('./test_images/*Distortion*.jpg')
def process_image(images):
# for idx, fname in enumerate(images):
img = cv2.cvtColor(images, cv2.COLOR_BGR2RGB)
# img = cv2.cvtColor(images, cv2.COLOR_RGB2BGR)
# orig_image = img
# img = cv2.imread(fname)
# plt.figure(figsize = (20,10))
# plt.imshow(img)
# plt.show()
#pull in the absolute binary gradient data in X and Y
gradx_binary = abs_sobel_image(img,'x',grad_threshold , sobel_kernel)
# plt.figure(figsize = (20,10))
# plt.title('Binary Gradient Thresholding in X direction')
# plt.imshow(gradx_binary, cmap='gray')
# plt.show()
grady_binary = abs_sobel_image(img,'y',grad_threshold , sobel_kernel)
# plt.figure(figsize = (20,10))
# plt.title('Binary Gradient Thresholding in Y direction')
# plt.imshow(grady_binary, cmap='gray')
# plt.show()
#Calculate the Sobel direction gradient binary threshold
dir_binary = sobel_dir_thresh(img, sobel_kernel=15, thresh=(0.6, np.pi/2))
# print(dir_binary.dtype)
# plt.figure(figsize = (20,10))
# plt.title('Binary Sobel (Absolute) Gradient Thresholding')
# plt.imshow(dir_binary, cmap = 'gray')
# mag_binary = sobel_mag_thresh(img, sobel_kernel, mag_thresh= (50, 150))
mag_binary = sobel_mag_thresh(img, sobel_kernel, mag_thresh= (80, 150))
# plt.figure(figsize = (20,10))
# plt.title('Binary Gradient Magnitude Thresholding')
# plt.imshow(mag_binary, cmap='gray')
# mag_binary
#Combine the gradient thresholds into a coherent image, there still may be gaps where color thresholding comes in
combined_binary = np.zeros_like(dir_binary)
# combined_binary[(gradx_binary == 1) | ((mag_binary == 1) | (dir_binary == 1))] = 1
combined_binary[(gradx_binary == 1) | ((mag_binary == 1) & (dir_binary == 1))] = 1
#combined_binary[((gradx_binary == 1) & (grady_binary == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
# plt.figure(figsize = (20,10))
# plt.title('Combined Binary Gradient Thresholding (X,Mag,Dir)')
# plt.imshow(combined_binary, cmap = 'gray')
# plt.show()
binary_color = color_binary(img, 'HLS', color_thresh = [80,255])
# binary_color = color_binary(img, 'HLS', color_thresh = [80,180])
# plt.figure(figsize = (20,10))
# plt.title('Binary Color Thresholding in HLS')
# plt.imshow(binary_color, cmap = 'gray')
# plt.show()
#Visualize the overall combined thresholding on the test images
color_grad_combined = bin_color_gradient(combined_binary , binary_color)
# plt.figure(figsize = (20,10))
# plt.title('Combined color and gradient mag thresholding')
# plt.imshow(color_grad_combined, cmap = 'gray')
# plt.show()
img_size = img.shape
offset = 100
src = np.float32([(200, 720), (580, 480), (720, 480), (1050, 720)])
dst = np.float32([(280, 720), (400, 190), (920, 190), (960, 720)])
destination_points = np.float32([[offset, img_size[1]-offset], [img_size[0]-offset, img_size[1]-offset],
[img_size[0]-offset, offset],
[offset, offset]])
source_points = np.float32(([450,780], [680, 1050], [680,250], [450, 500]))
binary_warped, M, Minv = warp(color_grad_combined,src, dst)
#warped_image_test = warp(img,source_points, destination_points)
# plt.figure(figsize = (20,10))
# plt.imshow(binary_warped, cmap='gray')
# plt.show()
#
#
# import numpy as np
# plt.figure(figsize = (20,10))
# histogram = np.sum(binary_warped[binary_warped.shape[0]//2:,:], axis=0)
# plt.plot(histogram)
# plt.show()
#
#Need the line data to be fed back out
out, left_fitx, right_fitx, ploty = polyfit(binary_warped,img, Minv)
# out = cv2.cvtColor(out, cv2.COLOR_BGR2RGB)
return out
#######--------------------------
##os.system("ffmpeg -i project_video.mp4 -vf fps=15/1 out_%03d.jpg'
Test_Video_dir = os.listdir("test_videos/")
video_output = 'project_video_output.mp4'
clip1 = VideoFileClip("test_videos/project_video.mp4").subclip(13,18)
#clip1 = VideoFileClip("test_videos/project_video.mp4")
clip = clip1.fl_image(process_image) #NOTE: this function expects color images!!
clip.write_videofile(video_output, audio=False)
#-------------------------------------------
| [
"cv2.rectangle",
"numpy.polyfit",
"numpy.hstack",
"numpy.array",
"cv2.warpPerspective",
"sobel_library.abs_sobel_image",
"cv2.destroyAllWindows",
"cv2.calibrateCamera",
"camera_calibration.calibrate_camera",
"numpy.arange",
"numpy.mean",
"os.listdir",
"collections.deque",
"camera_calibrati... | [((2065, 2089), 'collections.deque', 'deque', ([], {'maxlen': 'deque_size'}), '(maxlen=deque_size)\n', (2070, 2089), False, 'from collections import deque\n'), ((2133, 2157), 'collections.deque', 'deque', ([], {'maxlen': 'deque_size'}), '(maxlen=deque_size)\n', (2138, 2157), False, 'from collections import deque\n'), ((2315, 2335), 'numpy.zeros', 'np.zeros', (['deque_size'], {}), '(deque_size)\n', (2323, 2335), True, 'import numpy as np\n'), ((2344, 2364), 'numpy.zeros', 'np.zeros', (['deque_size'], {}), '(deque_size)\n', (2352, 2364), True, 'import numpy as np\n'), ((2373, 2393), 'numpy.zeros', 'np.zeros', (['deque_size'], {}), '(deque_size)\n', (2381, 2393), True, 'import numpy as np\n'), ((2402, 2422), 'numpy.zeros', 'np.zeros', (['deque_size'], {}), '(deque_size)\n', (2410, 2422), True, 'import numpy as np\n'), ((2431, 2451), 'numpy.zeros', 'np.zeros', (['deque_size'], {}), '(deque_size)\n', (2439, 2451), True, 'import numpy as np\n'), ((2460, 2480), 'numpy.zeros', 'np.zeros', (['deque_size'], {}), '(deque_size)\n', (2468, 2480), True, 'import numpy as np\n'), ((20018, 20044), 'os.listdir', 'os.listdir', (['"""test_videos/"""'], {}), "('test_videos/')\n", (20028, 20044), False, 'import os\n'), ((804, 820), 'numpy.zeros_like', 'np.zeros_like', (['S'], {}), '(S)\n', (817, 820), True, 'import numpy as np\n'), ((1153, 1183), 'numpy.zeros_like', 'np.zeros_like', (['binary_gradient'], {}), '(binary_gradient)\n', (1166, 1183), True, 'import numpy as np\n'), ((1355, 1431), 'numpy.array', 'np.array', (['[[(350, 720), (580, 500), (800, 500), (900, 720)]]'], {'dtype': 'np.int32'}), '([[(350, 720), (580, 500), (800, 500), (900, 720)]], dtype=np.int32)\n', (1363, 1431), True, 'import numpy as np\n'), ((1434, 1492), 'cv2.fillPoly', 'cv2.fillPoly', (['binary_output', 'polys', '(0)'], {'lineType': '(8)', 'shift': '(0)'}), '(binary_output, polys, 0, lineType=8, shift=0)\n', (1446, 1492), False, 'import cv2\n'), ((1691, 1716), 'numpy.float32', 'np.float32', (['source_points'], {}), '(source_points)\n', (1701, 1716), True, 'import numpy as np\n'), ((1728, 1758), 'numpy.float32', 'np.float32', (['destination_points'], {}), '(destination_points)\n', (1738, 1758), True, 'import numpy as np\n'), ((1768, 1805), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['src', 'dst'], {}), '(src, dst)\n', (1795, 1805), False, 'import cv2\n'), ((1817, 1854), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['dst', 'src'], {}), '(dst, src)\n', (1844, 1854), False, 'import cv2\n'), ((1868, 1930), 'cv2.warpPerspective', 'cv2.warpPerspective', (['img', 'M', 'img_shape'], {'flags': 'cv2.INTER_LINEAR'}), '(img, M, img_shape, flags=cv2.INTER_LINEAR)\n', (1887, 1930), False, 'import cv2\n'), ((2773, 2833), 'numpy.sum', 'np.sum', (['warped_image[warped_image.shape[0] // 2:, :]'], {'axis': '(0)'}), '(warped_image[warped_image.shape[0] // 2:, :], axis=0)\n', (2779, 2833), True, 'import numpy as np\n'), ((3200, 3230), 'numpy.int', 'np.int', (['(histogram.shape[0] / 2)'], {}), '(histogram.shape[0] / 2)\n', (3206, 3230), True, 'import numpy as np\n'), ((3247, 3278), 'numpy.argmax', 'np.argmax', (['histogram[:midpoint]'], {}), '(histogram[:midpoint])\n', (3256, 3278), True, 'import numpy as np\n'), ((3459, 3499), 'numpy.int', 'np.int', (['(warped_image.shape[0] / nwindows)'], {}), '(warped_image.shape[0] / nwindows)\n', (3465, 3499), True, 'import numpy as np\n'), ((3625, 3645), 'numpy.array', 'np.array', (['nonzero[0]'], {}), '(nonzero[0])\n', (3633, 3645), True, 'import numpy as np\n'), ((3662, 3682), 'numpy.array', 'np.array', (['nonzero[1]'], {}), '(nonzero[1])\n', (3670, 3682), True, 'import numpy as np\n'), ((5759, 5789), 'numpy.concatenate', 'np.concatenate', (['left_lane_inds'], {}), '(left_lane_inds)\n', (5773, 5789), True, 'import numpy as np\n'), ((5813, 5844), 'numpy.concatenate', 'np.concatenate', (['right_lane_inds'], {}), '(right_lane_inds)\n', (5827, 5844), True, 'import numpy as np\n'), ((6119, 6146), 'numpy.polyfit', 'np.polyfit', (['lefty', 'leftx', '(2)'], {}), '(lefty, leftx, 2)\n', (6129, 6146), True, 'import numpy as np\n'), ((6560, 6573), 'numpy.mean', 'np.mean', (['C0_L'], {}), '(C0_L)\n', (6567, 6573), True, 'import numpy as np\n'), ((6594, 6607), 'numpy.mean', 'np.mean', (['C1_L'], {}), '(C1_L)\n', (6601, 6607), True, 'import numpy as np\n'), ((6628, 6641), 'numpy.mean', 'np.mean', (['C2_L'], {}), '(C2_L)\n', (6635, 6641), True, 'import numpy as np\n'), ((6775, 6804), 'numpy.polyfit', 'np.polyfit', (['righty', 'rightx', '(2)'], {}), '(righty, rightx, 2)\n', (6785, 6804), True, 'import numpy as np\n'), ((7225, 7238), 'numpy.mean', 'np.mean', (['C0_R'], {}), '(C0_R)\n', (7232, 7238), True, 'import numpy as np\n'), ((7259, 7272), 'numpy.mean', 'np.mean', (['C1_R'], {}), '(C1_R)\n', (7266, 7272), True, 'import numpy as np\n'), ((7293, 7306), 'numpy.mean', 'np.mean', (['C2_R'], {}), '(C2_R)\n', (7300, 7306), True, 'import numpy as np\n'), ((7475, 7539), 'numpy.linspace', 'np.linspace', (['(0)', '(warped_image.shape[0] - 1)', 'warped_image.shape[0]'], {}), '(0, warped_image.shape[0] - 1, warped_image.shape[0])\n', (7486, 7539), True, 'import numpy as np\n'), ((8305, 8349), 'numpy.dstack', 'np.dstack', (['(warp_zero, warp_zero, warp_zero)'], {}), '((warp_zero, warp_zero, warp_zero))\n', (8314, 8349), True, 'import numpy as np\n'), ((8595, 8627), 'numpy.hstack', 'np.hstack', (['(pts_left, pts_right)'], {}), '((pts_left, pts_right))\n', (8604, 8627), True, 'import numpy as np\n'), ((9157, 9170), 'numpy.max', 'np.max', (['ploty'], {}), '(ploty)\n', (9163, 9170), True, 'import numpy as np\n'), ((9727, 9784), 'numpy.polyfit', 'np.polyfit', (['(ploty * ym_per_pix)', '(left_fitx * xm_per_pix)', '(2)'], {}), '(ploty * ym_per_pix, left_fitx * xm_per_pix, 2)\n', (9737, 9784), True, 'import numpy as np\n'), ((9801, 9859), 'numpy.polyfit', 'np.polyfit', (['(ploty * ym_per_pix)', '(right_fitx * xm_per_pix)', '(2)'], {}), '(ploty * ym_per_pix, right_fitx * xm_per_pix, 2)\n', (9811, 9859), True, 'import numpy as np\n'), ((10668, 10745), 'cv2.warpPerspective', 'cv2.warpPerspective', (['color_warp', 'Minv', '(orig_img.shape[1], orig_img.shape[0])'], {}), '(color_warp, Minv, (orig_img.shape[1], orig_img.shape[0]))\n', (10687, 10745), False, 'import cv2\n'), ((10795, 10895), 'cv2.putText', 'cv2.putText', (['newwarp', '"""Recording: project_video"""', '(10, 50)', 'font', '(1)', '(255, 0, 0)', '(3)', 'cv2.LINE_AA'], {}), "(newwarp, 'Recording: project_video', (10, 50), font, 1, (255, 0,\n 0), 3, cv2.LINE_AA)\n", (10806, 10895), False, 'import cv2\n'), ((11385, 11439), 'numpy.arange', 'np.arange', (['(newwarp.shape[0] - 10)', '(newwarp.shape[0] + 1)'], {}), '(newwarp.shape[0] - 10, newwarp.shape[0] + 1)\n', (11394, 11439), True, 'import numpy as np\n'), ((13097, 13138), 'cv2.cvtColor', 'cv2.cvtColor', (['orig_img', 'cv2.COLOR_BGR2RGB'], {}), '(orig_img, cv2.COLOR_BGR2RGB)\n', (13109, 13138), False, 'import cv2\n'), ((13215, 13255), 'cv2.addWeighted', 'cv2.addWeighted', (['img', '(1)', 'newwarp', '(0.3)', '(0)'], {}), '(img, 1, newwarp, 0.3, 0)\n', (13230, 13255), False, 'import cv2\n'), ((14131, 14155), 'os.listdir', 'os.listdir', (['"""camera_cal"""'], {}), "('camera_cal')\n", (14141, 14155), False, 'import os\n'), ((14209, 14244), 'camera_calibration.calibrate_camera', 'calibrate_camera', (['image_dir', 'images'], {}), '(image_dir, images)\n', (14225, 14244), False, 'from camera_calibration import calibrate_camera, distortion_correct\n'), ((14456, 14527), 'cv2.calibrateCamera', 'cv2.calibrateCamera', (['objpoints', 'imgpoints', 'gray.shape[::-1]', 'None', 'None'], {}), '(objpoints, imgpoints, gray.shape[::-1], None, None)\n', (14475, 14527), False, 'import cv2\n'), ((14705, 14730), 'os.listdir', 'os.listdir', (['"""test_images"""'], {}), "('test_images')\n", (14715, 14730), False, 'import os\n'), ((14890, 14938), 'camera_calibration.distortion_correct', 'distortion_correct', (['image_dir', 'images', 'mtx', 'dist'], {}), '(image_dir, images, mtx, dist)\n', (14908, 14938), False, 'from camera_calibration import calibrate_camera, distortion_correct\n'), ((14944, 14967), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (14965, 14967), False, 'import cv2\n'), ((15208, 15233), 'os.listdir', 'os.listdir', (['"""test_images"""'], {}), "('test_images')\n", (15218, 15233), False, 'import os\n'), ((15895, 15938), 'glob.glob', 'glob.glob', (['"""./test_images/*Distortion*.jpg"""'], {}), "('./test_images/*Distortion*.jpg')\n", (15904, 15938), False, 'import glob\n'), ((16037, 16076), 'cv2.cvtColor', 'cv2.cvtColor', (['images', 'cv2.COLOR_BGR2RGB'], {}), '(images, cv2.COLOR_BGR2RGB)\n', (16049, 16076), False, 'import cv2\n'), ((16352, 16407), 'sobel_library.abs_sobel_image', 'abs_sobel_image', (['img', '"""x"""', 'grad_threshold', 'sobel_kernel'], {}), "(img, 'x', grad_threshold, sobel_kernel)\n", (16367, 16407), False, 'from sobel_library import abs_sobel_image, sobel_mag_thresh, sobel_dir_thresh\n'), ((16589, 16644), 'sobel_library.abs_sobel_image', 'abs_sobel_image', (['img', '"""y"""', 'grad_threshold', 'sobel_kernel'], {}), "(img, 'y', grad_threshold, sobel_kernel)\n", (16604, 16644), False, 'from sobel_library import abs_sobel_image, sobel_mag_thresh, sobel_dir_thresh\n'), ((16896, 16959), 'sobel_library.sobel_dir_thresh', 'sobel_dir_thresh', (['img'], {'sobel_kernel': '(15)', 'thresh': '(0.6, np.pi / 2)'}), '(img, sobel_kernel=15, thresh=(0.6, np.pi / 2))\n', (16912, 16959), False, 'from sobel_library import abs_sobel_image, sobel_mag_thresh, sobel_dir_thresh\n'), ((17235, 17292), 'sobel_library.sobel_mag_thresh', 'sobel_mag_thresh', (['img', 'sobel_kernel'], {'mag_thresh': '(80, 150)'}), '(img, sobel_kernel, mag_thresh=(80, 150))\n', (17251, 17292), False, 'from sobel_library import abs_sobel_image, sobel_mag_thresh, sobel_dir_thresh\n'), ((17595, 17620), 'numpy.zeros_like', 'np.zeros_like', (['dir_binary'], {}), '(dir_binary)\n', (17608, 17620), True, 'import numpy as np\n'), ((18783, 18844), 'numpy.float32', 'np.float32', (['[(200, 720), (580, 480), (720, 480), (1050, 720)]'], {}), '([(200, 720), (580, 480), (720, 480), (1050, 720)])\n', (18793, 18844), True, 'import numpy as np\n'), ((18856, 18916), 'numpy.float32', 'np.float32', (['[(280, 720), (400, 190), (920, 190), (960, 720)]'], {}), '([(280, 720), (400, 190), (920, 190), (960, 720)])\n', (18866, 18916), True, 'import numpy as np\n'), ((18955, 19100), 'numpy.float32', 'np.float32', (['[[offset, img_size[1] - offset], [img_size[0] - offset, img_size[1] -\n offset], [img_size[0] - offset, offset], [offset, offset]]'], {}), '([[offset, img_size[1] - offset], [img_size[0] - offset, img_size\n [1] - offset], [img_size[0] - offset, offset], [offset, offset]])\n', (18965, 19100), True, 'import numpy as np\n'), ((19193, 19254), 'numpy.float32', 'np.float32', (['([450, 780], [680, 1050], [680, 250], [450, 500])'], {}), '(([450, 780], [680, 1050], [680, 250], [450, 500]))\n', (19203, 19254), True, 'import numpy as np\n'), ((668, 704), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_RGB2HLS'], {}), '(img, cv2.COLOR_RGB2HLS)\n', (680, 704), False, 'import cv2\n'), ((2990, 3043), 'numpy.dstack', 'np.dstack', (['(warped_image, warped_image, warped_image)'], {}), '((warped_image, warped_image, warped_image))\n', (2999, 3043), True, 'import numpy as np\n'), ((3298, 3329), 'numpy.argmax', 'np.argmax', (['histogram[midpoint:]'], {}), '(histogram[midpoint:])\n', (3307, 3329), True, 'import numpy as np\n'), ((4619, 4719), 'cv2.rectangle', 'cv2.rectangle', (['out_img', '(win_xleft_low, win_y_low)', '(win_xleft_high, win_y_high)', '(0, 255, 0)', '(2)'], {}), '(out_img, (win_xleft_low, win_y_low), (win_xleft_high,\n win_y_high), (0, 255, 0), 2)\n', (4632, 4719), False, 'import cv2\n'), ((4729, 4831), 'cv2.rectangle', 'cv2.rectangle', (['out_img', '(win_xright_low, win_y_low)', '(win_xright_high, win_y_high)', '(0, 255, 0)', '(2)'], {}), '(out_img, (win_xright_low, win_y_low), (win_xright_high,\n win_y_high), (0, 255, 0), 2)\n', (4742, 4831), False, 'import cv2\n'), ((8713, 8727), 'numpy.int_', 'np.int_', (['[pts]'], {}), '([pts])\n', (8720, 8727), True, 'import numpy as np\n'), ((9247, 9275), 'numpy.absolute', 'np.absolute', (['(2 * left_fit[0])'], {}), '(2 * left_fit[0])\n', (9258, 9275), True, 'import numpy as np\n'), ((9353, 9382), 'numpy.absolute', 'np.absolute', (['(2 * right_fit[0])'], {}), '(2 * right_fit[0])\n', (9364, 9382), True, 'import numpy as np\n'), ((9993, 10024), 'numpy.absolute', 'np.absolute', (['(2 * left_fit_cr[0])'], {}), '(2 * left_fit_cr[0])\n', (10004, 10024), True, 'import numpy as np\n'), ((10119, 10151), 'numpy.absolute', 'np.absolute', (['(2 * right_fit_cr[0])'], {}), '(2 * right_fit_cr[0])\n', (10130, 10151), True, 'import numpy as np\n'), ((12177, 12300), 'cv2.putText', 'cv2.putText', (['newwarp', '"""Ego Vehicle is directly on center!! Great job!"""', '(10, 150)', 'font', '(1)', '(255, 0, 0)', '(3)', 'cv2.LINE_AA'], {}), "(newwarp, 'Ego Vehicle is directly on center!! Great job!', (10,\n 150), font, 1, (255, 0, 0), 3, cv2.LINE_AA)\n", (12188, 12300), False, 'import cv2\n'), ((15826, 15874), 'camera_calibration.distortion_correct', 'distortion_correct', (['image_dir', 'images', 'mtx', 'dist'], {}), '(image_dir, images, mtx, dist)\n', (15844, 15874), False, 'from camera_calibration import calibrate_camera, distortion_correct\n'), ((20097, 20143), 'moviepy.editor.VideoFileClip', 'VideoFileClip', (['"""test_videos/project_video.mp4"""'], {}), "('test_videos/project_video.mp4')\n", (20110, 20143), False, 'from moviepy.editor import VideoFileClip\n'), ((8242, 8269), 'numpy.zeros_like', 'np.zeros_like', (['warped_image'], {}), '(warped_image)\n', (8255, 8269), True, 'import numpy as np\n'), ((15647, 15690), 'glob.glob', 'glob.glob', (['"""./test_images/*Distortion*.jpg"""'], {}), "('./test_images/*Distortion*.jpg')\n", (15656, 15690), False, 'import glob\n'), ((5531, 5564), 'numpy.mean', 'np.mean', (['nonzerox[good_left_inds]'], {}), '(nonzerox[good_left_inds])\n', (5538, 5564), True, 'import numpy as np\n'), ((5654, 5688), 'numpy.mean', 'np.mean', (['nonzerox[good_right_inds]'], {}), '(nonzerox[good_right_inds])\n', (5661, 5688), True, 'import numpy as np\n'), ((8466, 8495), 'numpy.vstack', 'np.vstack', (['[left_fitx, ploty]'], {}), '([left_fitx, ploty])\n', (8475, 8495), True, 'import numpy as np\n'), ((8549, 8579), 'numpy.vstack', 'np.vstack', (['[right_fitx, ploty]'], {}), '([right_fitx, ploty])\n', (8558, 8579), True, 'import numpy as np\n')] |
"""
homeassistant.components.light.insteon
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Support for Insteon Hub lights.
"""
from homeassistant.components.insteon_hub import INSTEON, InsteonToggleDevice
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the Insteon Hub light platform. """
devs = []
for device in INSTEON.devices:
if device.DeviceCategory == "Switched Lighting Control":
devs.append(InsteonToggleDevice(device))
if device.DeviceCategory == "Dimmable Lighting Control":
devs.append(InsteonToggleDevice(device))
add_devices(devs)
| [
"homeassistant.components.insteon_hub.InsteonToggleDevice"
] | [((455, 482), 'homeassistant.components.insteon_hub.InsteonToggleDevice', 'InsteonToggleDevice', (['device'], {}), '(device)\n', (474, 482), False, 'from homeassistant.components.insteon_hub import INSTEON, InsteonToggleDevice\n'), ((573, 600), 'homeassistant.components.insteon_hub.InsteonToggleDevice', 'InsteonToggleDevice', (['device'], {}), '(device)\n', (592, 600), False, 'from homeassistant.components.insteon_hub import INSTEON, InsteonToggleDevice\n')] |
from IMLearn.learners import UnivariateGaussian, MultivariateGaussian
import numpy as np
import plotly.graph_objects as go
import plotly.io as pio
pio.templates.default = "simple_white"
SAMPLES_NUM = 1000
LEFT_CIRCLE = '('
RIGHT_CIRCLE = ')'
COMMA = ', '
GRAPH_SIZE = 500
HEATMAP_SIZE = 700
def test_univariate_gaussian():
# Question 1 - Draw samples and print fitted model
uni = UnivariateGaussian()
mu, sigma = 10, 1
s = np.random.normal(mu, sigma, SAMPLES_NUM)
res = uni.fit(s)
print(LEFT_CIRCLE + str(res.mu_) + COMMA + str(res.var_) + RIGHT_CIRCLE)
# Question 2 - Empirically showing sample mean is consistent
ms = np.linspace(10, 1000, 100).astype(int)
diff = []
for m in ms:
diff.append(abs(uni.fit(s[0:m]).mu_ - mu))
go.Figure([go.Scatter(x=ms, y=diff, mode='markers+lines')],
layout=go.Layout(title=r"$\text{ Distance between estimated "
r"and true value of the expectation as a function of samples number}$",
xaxis_title="$m\\text{ - number of samples}$",
yaxis_title="r$distance$",
height=GRAPH_SIZE)).show()
# Question 3 - Plotting Empirical PDF of fitted model
pdf_values = uni.pdf(s)
go.Figure([go.Scatter(x=s, y=pdf_values, mode='markers')],
layout=go.Layout(title=r"$\text{ Sampled values distribution}$",
xaxis_title="$m\\text{ - sampled values}$",
yaxis_title="r$ pdf - values$",
height=GRAPH_SIZE)).show()
# As I expected, the samples' distribution is gaussian around the expectation (10)
def test_multivariate_gaussian():
# Question 4 - Draw samples and print fitted model
multi_uni = MultivariateGaussian()
mu = np.array([0, 0, 4, 0])
sigma = np.asarray([[1, 0.2, 0, 0.5],
[0.2, 2, 0, 0],
[0, 0, 1, 0],
[0.5, 0, 0, 1]])
s = np.random.multivariate_normal(mu, sigma, SAMPLES_NUM)
res = multi_uni.fit(s)
print(str(res.mu_) + '\n' + str(res.cov_))
# Question 5 - Likelihood evaluation
ms = np.linspace(-10, 10, 200)
logs = np.zeros((200, 200))
i = 0
j = 0
for f1 in ms:
for f3 in ms:
logs[i][j] = (MultivariateGaussian.log_likelihood(np.transpose([f1, 0, f3, 0]), sigma, s))
j += 1
j = 0
i += 1
go.Figure([go.Heatmap(x=ms, y=ms, z=np.asarray(logs), colorbar=dict(title="Log Likelihood"))],
layout=go.Layout(title=
r"$\text{ Log Likelihood as function of "
r"different expectancies}$",
width=HEATMAP_SIZE, height=HEATMAP_SIZE,
xaxis_title="$f3$", yaxis_title="$f1$")).show()
# Question 6 - Maximum likelihood
index = np.argmax(logs)
row = int(index / 200)
col = int(index % 200)
print("Maximum value is achieved for the pair: f1 = " + str(round(ms[row], 3)) + " f3 = " + str(round(ms[col], 3)))
if __name__ == '__main__':
np.random.seed(0)
test_univariate_gaussian()
test_multivariate_gaussian()
| [
"numpy.random.normal",
"numpy.transpose",
"plotly.graph_objects.Layout",
"numpy.random.multivariate_normal",
"numpy.asarray",
"numpy.argmax",
"numpy.array",
"numpy.linspace",
"numpy.zeros",
"plotly.graph_objects.Scatter",
"numpy.random.seed",
"IMLearn.learners.UnivariateGaussian",
"IMLearn.l... | [((392, 412), 'IMLearn.learners.UnivariateGaussian', 'UnivariateGaussian', ([], {}), '()\n', (410, 412), False, 'from IMLearn.learners import UnivariateGaussian, MultivariateGaussian\n'), ((443, 483), 'numpy.random.normal', 'np.random.normal', (['mu', 'sigma', 'SAMPLES_NUM'], {}), '(mu, sigma, SAMPLES_NUM)\n', (459, 483), True, 'import numpy as np\n'), ((1848, 1870), 'IMLearn.learners.MultivariateGaussian', 'MultivariateGaussian', ([], {}), '()\n', (1868, 1870), False, 'from IMLearn.learners import UnivariateGaussian, MultivariateGaussian\n'), ((1880, 1902), 'numpy.array', 'np.array', (['[0, 0, 4, 0]'], {}), '([0, 0, 4, 0])\n', (1888, 1902), True, 'import numpy as np\n'), ((1915, 1991), 'numpy.asarray', 'np.asarray', (['[[1, 0.2, 0, 0.5], [0.2, 2, 0, 0], [0, 0, 1, 0], [0.5, 0, 0, 1]]'], {}), '([[1, 0.2, 0, 0.5], [0.2, 2, 0, 0], [0, 0, 1, 0], [0.5, 0, 0, 1]])\n', (1925, 1991), True, 'import numpy as np\n'), ((2066, 2119), 'numpy.random.multivariate_normal', 'np.random.multivariate_normal', (['mu', 'sigma', 'SAMPLES_NUM'], {}), '(mu, sigma, SAMPLES_NUM)\n', (2095, 2119), True, 'import numpy as np\n'), ((2246, 2271), 'numpy.linspace', 'np.linspace', (['(-10)', '(10)', '(200)'], {}), '(-10, 10, 200)\n', (2257, 2271), True, 'import numpy as np\n'), ((2283, 2303), 'numpy.zeros', 'np.zeros', (['(200, 200)'], {}), '((200, 200))\n', (2291, 2303), True, 'import numpy as np\n'), ((3176, 3191), 'numpy.argmax', 'np.argmax', (['logs'], {}), '(logs)\n', (3185, 3191), True, 'import numpy as np\n'), ((3399, 3416), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (3413, 3416), True, 'import numpy as np\n'), ((658, 684), 'numpy.linspace', 'np.linspace', (['(10)', '(1000)', '(100)'], {}), '(10, 1000, 100)\n', (669, 684), True, 'import numpy as np\n'), ((2426, 2454), 'numpy.transpose', 'np.transpose', (['[f1, 0, f3, 0]'], {}), '([f1, 0, f3, 0])\n', (2438, 2454), True, 'import numpy as np\n'), ((796, 842), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'ms', 'y': 'diff', 'mode': '"""markers+lines"""'}), "(x=ms, y=diff, mode='markers+lines')\n", (806, 842), True, 'import plotly.graph_objects as go\n'), ((866, 1096), 'plotly.graph_objects.Layout', 'go.Layout', ([], {'title': '"""$\\\\text{ Distance between estimated and true value of the expectation as a function of samples number}$"""', 'xaxis_title': '"""$m\\\\text{ - number of samples}$"""', 'yaxis_title': '"""r$distance$"""', 'height': 'GRAPH_SIZE'}), "(title=\n '$\\\\text{ Distance between estimated and true value of the expectation as a function of samples number}$'\n , xaxis_title='$m\\\\text{ - number of samples}$', yaxis_title=\n 'r$distance$', height=GRAPH_SIZE)\n", (875, 1096), True, 'import plotly.graph_objects as go\n'), ((1328, 1373), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 's', 'y': 'pdf_values', 'mode': '"""markers"""'}), "(x=s, y=pdf_values, mode='markers')\n", (1338, 1373), True, 'import plotly.graph_objects as go\n'), ((1397, 1559), 'plotly.graph_objects.Layout', 'go.Layout', ([], {'title': '"""$\\\\text{ Sampled values distribution}$"""', 'xaxis_title': '"""$m\\\\text{ - sampled values}$"""', 'yaxis_title': '"""r$ pdf - values$"""', 'height': 'GRAPH_SIZE'}), "(title='$\\\\text{ Sampled values distribution}$', xaxis_title=\n '$m\\\\text{ - sampled values}$', yaxis_title='r$ pdf - values$', height=\n GRAPH_SIZE)\n", (1406, 1559), True, 'import plotly.graph_objects as go\n'), ((2687, 2863), 'plotly.graph_objects.Layout', 'go.Layout', ([], {'title': '"""$\\\\text{ Log Likelihood as function of different expectancies}$"""', 'width': 'HEATMAP_SIZE', 'height': 'HEATMAP_SIZE', 'xaxis_title': '"""$f3$"""', 'yaxis_title': '"""$f1$"""'}), "(title=\n '$\\\\text{ Log Likelihood as function of different expectancies}$',\n width=HEATMAP_SIZE, height=HEATMAP_SIZE, xaxis_title='$f3$',\n yaxis_title='$f1$')\n", (2696, 2863), True, 'import plotly.graph_objects as go\n'), ((2556, 2572), 'numpy.asarray', 'np.asarray', (['logs'], {}), '(logs)\n', (2566, 2572), True, 'import numpy as np\n')] |
results = open('test-results-gpu.out', 'a')
results.write('** Starting serial GPU tests **\n')
try:
# Fresnel
#import fresnel
#results.write('Fresnel version : {}\n'.format(fresnel.__version__))
#dev = fresnel.Device(mode='gpu', n=1)
#results.write('Fresnel device : {}\n'.format(dev))
# HOOMD
import hoomd
context = hoomd.context.initialize('--mode=gpu')
assert(context.on_gpu())
results.write('HOOMD version : {}\n'.format(hoomd.__version__))
results.write('HOOMD flags : {}\n'.format(hoomd._hoomd.hoomd_compile_flags()))
results.write('** Serial GPU tests PASSED **\n\n')
except:
results.write('** Serial GPU tests FAILED **\n\n')
raise
| [
"hoomd.context.initialize",
"hoomd._hoomd.hoomd_compile_flags"
] | [((356, 394), 'hoomd.context.initialize', 'hoomd.context.initialize', (['"""--mode=gpu"""'], {}), "('--mode=gpu')\n", (380, 394), False, 'import hoomd\n'), ((548, 582), 'hoomd._hoomd.hoomd_compile_flags', 'hoomd._hoomd.hoomd_compile_flags', ([], {}), '()\n', (580, 582), False, 'import hoomd\n')] |
from django.utils.translation import ugettext_lazy as _
from mayan.apps.authentication.link_conditions import condition_user_is_authenticated
from mayan.apps.navigation.classes import Link, Separator, Text
from mayan.apps.navigation.utils import factory_condition_queryset_access
from .icons import (
icon_current_user_details, icon_group_create, icon_group_delete_single,
icon_group_delete_multiple, icon_group_edit, icon_group_list,
icon_group_setup, icon_group_user_list, icon_user_create,
icon_user_edit, icon_user_group_list, icon_user_list,
icon_user_delete_single, icon_user_delete_multiple,
icon_user_set_options, icon_user_setup
)
from .link_conditions import condition_user_is_not_superuser
from .permissions import (
permission_group_create, permission_group_delete, permission_group_edit,
permission_group_view, permission_user_create, permission_user_delete,
permission_user_edit, permission_user_view
)
from .utils import get_user_label_text
# Current user
link_current_user_details = Link(
args='request.user.id',
condition=condition_user_is_authenticated,
icon=icon_current_user_details, text=_('User details'),
view='user_management:user_details'
)
# Group
link_group_create = Link(
icon=icon_group_create, permissions=(permission_group_create,),
text=_('Create new group'), view='user_management:group_create'
)
link_group_delete_single = Link(
args='object.id', icon=icon_group_delete_single,
permissions=(permission_group_delete,), tags='dangerous',
text=_('Delete'), view='user_management:group_delete_single'
)
link_group_delete_multiple = Link(
icon=icon_group_delete_multiple, tags='dangerous', text=_('Delete'),
view='user_management:group_delete_multiple'
)
link_group_edit = Link(
args='object.id', icon=icon_group_edit,
permissions=(permission_group_edit,), text=_('Edit'),
view='user_management:group_edit'
)
link_group_list = Link(
condition=factory_condition_queryset_access(
app_label='auth', model_name='Group',
object_permission=permission_group_view,
), icon=icon_group_list, text=_('Groups'),
view='user_management:group_list'
)
link_group_user_list = Link(
args='object.id', icon=icon_group_user_list,
permissions=(permission_group_edit,), text=_('Users'),
view='user_management:group_members'
)
link_group_setup = Link(
condition=factory_condition_queryset_access(
app_label='auth', model_name='Group',
callback=condition_user_is_not_superuser,
object_permission=permission_group_view,
view_permission=permission_group_create
), icon=icon_group_setup, text=_('Groups'),
view='user_management:group_list'
)
# User
link_user_create = Link(
condition=condition_user_is_authenticated, icon=icon_user_create,
permissions=(permission_user_create,), text=_('Create new user'),
view='user_management:user_create'
)
link_user_delete_single = Link(
args='object.id', condition=condition_user_is_authenticated,
icon=icon_user_delete_single, permissions=(permission_user_delete,),
tags='dangerous', text=_('Delete'),
view='user_management:user_delete_single'
)
link_user_delete_multiple = Link(
icon=icon_user_delete_multiple, tags='dangerous', text=_('Delete'),
view='user_management:user_delete_multiple'
)
link_user_edit = Link(
args='object.id', condition=condition_user_is_authenticated,
icon=icon_user_edit, permissions=(permission_user_edit,), text=_('Edit'),
view='user_management:user_edit'
)
link_user_group_list = Link(
args='object.id', condition=condition_user_is_authenticated,
icon=icon_user_group_list, permissions=(permission_user_edit,),
text=_('Groups'), view='user_management:user_groups'
)
link_user_list = Link(
icon=icon_user_list, text=_('Users'),
condition=factory_condition_queryset_access(
app_label='auth', model_name='User',
callback=condition_user_is_authenticated,
object_permission=permission_user_view,
view_permission=permission_user_create
), view='user_management:user_list'
)
link_user_set_options = Link(
args='object.id', condition=condition_user_is_authenticated,
icon=icon_user_set_options, permissions=(permission_user_edit,),
text=_('User options'), view='user_management:user_options'
)
link_user_setup = Link(
condition=factory_condition_queryset_access(
app_label='auth', model_name='User',
object_permission=permission_user_view,
view_permission=permission_user_create,
), icon=icon_user_setup, text=_('Users'),
view='user_management:user_list'
)
separator_user_label = Separator()
text_user_label = Text(
html_extra_classes='menu-user-name', text=get_user_label_text
)
| [
"mayan.apps.navigation.classes.Separator",
"mayan.apps.navigation.utils.factory_condition_queryset_access",
"django.utils.translation.ugettext_lazy",
"mayan.apps.navigation.classes.Text"
] | [((4680, 4691), 'mayan.apps.navigation.classes.Separator', 'Separator', ([], {}), '()\n', (4689, 4691), False, 'from mayan.apps.navigation.classes import Link, Separator, Text\n'), ((4711, 4778), 'mayan.apps.navigation.classes.Text', 'Text', ([], {'html_extra_classes': '"""menu-user-name"""', 'text': 'get_user_label_text'}), "(html_extra_classes='menu-user-name', text=get_user_label_text)\n", (4715, 4778), False, 'from mayan.apps.navigation.classes import Link, Separator, Text\n'), ((1161, 1178), 'django.utils.translation.ugettext_lazy', '_', (['"""User details"""'], {}), "('User details')\n", (1162, 1178), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1335, 1356), 'django.utils.translation.ugettext_lazy', '_', (['"""Create new group"""'], {}), "('Create new group')\n", (1336, 1356), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1553, 1564), 'django.utils.translation.ugettext_lazy', '_', (['"""Delete"""'], {}), "('Delete')\n", (1554, 1564), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1706, 1717), 'django.utils.translation.ugettext_lazy', '_', (['"""Delete"""'], {}), "('Delete')\n", (1707, 1717), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1885, 1894), 'django.utils.translation.ugettext_lazy', '_', (['"""Edit"""'], {}), "('Edit')\n", (1886, 1894), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1974, 2090), 'mayan.apps.navigation.utils.factory_condition_queryset_access', 'factory_condition_queryset_access', ([], {'app_label': '"""auth"""', 'model_name': '"""Group"""', 'object_permission': 'permission_group_view'}), "(app_label='auth', model_name='Group',\n object_permission=permission_group_view)\n", (2007, 2090), False, 'from mayan.apps.navigation.utils import factory_condition_queryset_access\n'), ((2138, 2149), 'django.utils.translation.ugettext_lazy', '_', (['"""Groups"""'], {}), "('Groups')\n", (2139, 2149), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2316, 2326), 'django.utils.translation.ugettext_lazy', '_', (['"""Users"""'], {}), "('Users')\n", (2317, 2326), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2410, 2614), 'mayan.apps.navigation.utils.factory_condition_queryset_access', 'factory_condition_queryset_access', ([], {'app_label': '"""auth"""', 'model_name': '"""Group"""', 'callback': 'condition_user_is_not_superuser', 'object_permission': 'permission_group_view', 'view_permission': 'permission_group_create'}), "(app_label='auth', model_name='Group',\n callback=condition_user_is_not_superuser, object_permission=\n permission_group_view, view_permission=permission_group_create)\n", (2443, 2614), False, 'from mayan.apps.navigation.utils import factory_condition_queryset_access\n'), ((2673, 2684), 'django.utils.translation.ugettext_lazy', '_', (['"""Groups"""'], {}), "('Groups')\n", (2674, 2684), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2878, 2898), 'django.utils.translation.ugettext_lazy', '_', (['"""Create new user"""'], {}), "('Create new user')\n", (2879, 2898), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3138, 3149), 'django.utils.translation.ugettext_lazy', '_', (['"""Delete"""'], {}), "('Delete')\n", (3139, 3149), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3292, 3303), 'django.utils.translation.ugettext_lazy', '_', (['"""Delete"""'], {}), "('Delete')\n", (3293, 3303), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3510, 3519), 'django.utils.translation.ugettext_lazy', '_', (['"""Edit"""'], {}), "('Edit')\n", (3511, 3519), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3731, 3742), 'django.utils.translation.ugettext_lazy', '_', (['"""Groups"""'], {}), "('Groups')\n", (3732, 3742), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3834, 3844), 'django.utils.translation.ugettext_lazy', '_', (['"""Users"""'], {}), "('Users')\n", (3835, 3844), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3860, 4061), 'mayan.apps.navigation.utils.factory_condition_queryset_access', 'factory_condition_queryset_access', ([], {'app_label': '"""auth"""', 'model_name': '"""User"""', 'callback': 'condition_user_is_authenticated', 'object_permission': 'permission_user_view', 'view_permission': 'permission_user_create'}), "(app_label='auth', model_name='User',\n callback=condition_user_is_authenticated, object_permission=\n permission_user_view, view_permission=permission_user_create)\n", (3893, 4061), False, 'from mayan.apps.navigation.utils import factory_condition_queryset_access\n'), ((4300, 4317), 'django.utils.translation.ugettext_lazy', '_', (['"""User options"""'], {}), "('User options')\n", (4301, 4317), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4395, 4554), 'mayan.apps.navigation.utils.factory_condition_queryset_access', 'factory_condition_queryset_access', ([], {'app_label': '"""auth"""', 'model_name': '"""User"""', 'object_permission': 'permission_user_view', 'view_permission': 'permission_user_create'}), "(app_label='auth', model_name='User',\n object_permission=permission_user_view, view_permission=\n permission_user_create)\n", (4428, 4554), False, 'from mayan.apps.navigation.utils import factory_condition_queryset_access\n'), ((4605, 4615), 'django.utils.translation.ugettext_lazy', '_', (['"""Users"""'], {}), "('Users')\n", (4606, 4615), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
from django.utils.translation import gettext_lazy as _, gettext
from .utils import get_main_menu_item, APPS
ENTITIES = {
'apart': ('apart', 'apartment'),
'meter': ('meters data', 'execute'),
'bill': ('bill', 'cost'),
'service': ('service', 'key'),
'price': ('tariff', 'application'),
'cars': ('car', 'car'),
'fuel': ('fueling', 'gas'),
'interval': ('spare part', 'part'),
'service': ('replacement', 'key'),
'note': ('note', '/'),
'news': ('news', '/'),
'project': ('project', 'work'),
'expense': ('expense', 'cost'),
'entry': ('password', 'key'),
'person': ('person', 'user'),
'trip': ('trip', 'car'),
'department':('department', 'application'),
'post': ('post', 'application'),
'pay_title': ('pay title', 'application'),
'employee': ('employee', 'user'),
'surname': ('surname change history', 'application'),
'child': ('child', 'user'),
'appoint': ('appointment', 'application'),
'education': ('education', 'application'),
'payment': ('payment', 'cost'),
'task': ('task', 'application'),
'group': ('group', '/'),
'list': ('list', '/'),
}
class SearchResult():
def __init__(self, query):
self.query = query
self.items = []
def add(self, app, entity, id, created, name, info, main_entity = True, detail1 = '', detail2 = ''):
prefix = ''
if (not info):
info = ''
if (len(info) > 500):
pos = info.find(self.query)
if (pos > 250):
pos -= 250
prefix = '... '
else:
pos = 0
info = prefix + info[pos:pos+500] + ' ...'
self.items.append(SearchItem(app, entity, id, created, name, info.replace(self.query, '<strong>' + self.query + '</strong>'), main_entity, detail1, detail2))
class SearchItem():
def __init__(self, app, entity, id, created, name, info, main_entity = True, detail1 = '', detail2 = ''):
self.app = app
self.entity = entity
self.main = main_entity
self.id = id
self.created = created
self.name = name
self.info = info
self.detail1 = detail1
self.detail2 = detail2
def __repr__(self):
return 'Application: "{}", Entity: "{}", Created: "{}", Name: "{}" , Info: "{}"'.format(self.app, self.entity, self.created, self.name, self.info)
def href(self):
pass
def app_name(self):
return get_main_menu_item(self.app)
def app_icon(self):
return 'rok/icon/' + APPS[self.app][0] + '.png'
def ent_icon(self):
if self.entity in ENTITIES:
icon_name = ENTITIES[self.entity][1]
if (icon_name == '/'):
icon_name = self.entity
return 'rok/icon/' + icon_name + '.png'
return 'rok/icon/inline/separator.png'
def ent_name(self):
if self.entity in ENTITIES:
return _(ENTITIES[self.entity][0]).capitalize()
return self.entity
| [
"django.utils.translation.gettext_lazy"
] | [((3184, 3211), 'django.utils.translation.gettext_lazy', '_', (['ENTITIES[self.entity][0]'], {}), '(ENTITIES[self.entity][0])\n', (3185, 3211), True, 'from django.utils.translation import gettext_lazy as _, gettext\n')] |
'''Refaça o DESAFIO 9, mostrando a tabuada de um número que o usuário escolher,
só que agora utilizando um laço for.'''
#RESOLUÇÃO ALAN
'''mult = int(input(' Digite um número para ver sua tabuada: '))
for num in range(1, 11):
rest = num * mult
print('{} X {} = {} '. format(num, mult, rest))'''
#RESOLUÇÃO PROFESSOR
from time import sleep
num = int(input(' Digite um número para ver sua tabuada: '))
print('PROCESSANDO. . . ')
sleep(2)
for c in range(1, 11):
print('{} x {} = {}'.format(c, num, num * c))
sleep(1)
sleep(1)
print('''
OBRIGADO PARCEIRO''')
| [
"time.sleep"
] | [((437, 445), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (442, 445), False, 'from time import sleep\n'), ((532, 540), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (537, 540), False, 'from time import sleep\n'), ((523, 531), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (528, 531), False, 'from time import sleep\n')] |
from django.utils.formats import localize
from rest_framework.serializers import (
ModelSerializer,
HyperlinkedIdentityField,
SerializerMethodField,
ValidationError,
)
from rest_framework import serializers
from django.contrib.auth import get_user_model
from ...purchase.models import PurchaseProduct as Table
from saleor.payment.models import PaymentOption
from structlog import get_logger
logger = get_logger(__name__)
User = get_user_model()
class TableListSerializer(serializers.ModelSerializer):
unit_cost = SerializerMethodField()
total_cost = SerializerMethodField()
paid = SerializerMethodField()
supplier_name = SerializerMethodField()
product_name = SerializerMethodField()
pay_option = SerializerMethodField()
date = SerializerMethodField()
credit_balance = SerializerMethodField()
class Meta:
model = Table
fields = (
'id',
'invoice_number',
'product_name',
'variant',
'quantity',
'unit_cost',
'total_cost',
'paid',
'credit_balance',
'supplier_name',
'pay_option',
'date',
)
def get_pay_option(self, obj):
try:
options = obj.payment_options.first().name
except Exception as e:
print(e)
options = ''
try:
return options + '<br> ' + obj.payment_number
except:
return ''
def get_credit_balance(self, obj):
try:
return "{:,}".format(obj.balance.gross)
except Exception as e:
print(e)
return ''
def get_paid(self, obj):
try:
return "{:,}".format(obj.amount_paid.gross)
except Exception as e:
print(e)
return ''
def get_product_name(self, obj):
try:
return obj.stock.variant.display_product()
except:
return ''
def get_supplier_name(self, obj):
try:
return obj.supplier.name
except:
return ''
def get_date(self, obj):
return localize(obj.created)
def get_unit_cost(self, obj):
try:
return obj.cost_price.gross
except Exception as e:
return 0
def get_total_cost(self, obj):
try:
return obj.total_cost.gross
except Exception as e:
return 0
class DistinctTableListSerializer(serializers.ModelSerializer):
purchase_url = HyperlinkedIdentityField(view_name='dashboard:sale_supplier_list')
unit_cost = SerializerMethodField()
total_cost = SerializerMethodField()
total_quantity = SerializerMethodField()
supplier_name = SerializerMethodField()
product_name = SerializerMethodField()
date = SerializerMethodField()
class Meta:
model = Table
fields = (
'id',
'invoice_number',
'product_name',
'variant',
'quantity',
'unit_cost',
'total_cost',
'total_quantity',
'supplier_name',
'date',
'purchase_url'
)
def get_product_name(self, obj):
return obj.stock.variant.display_product()
def get_date(self, obj):
return localize(obj.created)
def get_supplier_name(self, obj):
try:
return obj.supplier.name
except:
return ''
def get_unit_cost(self, obj):
try:
return obj.cost_price.gross
except Exception as e:
return 0
def get_total_quantity(self, obj):
try:
return Table.objects.total_quantity(obj)
except:
return 0
def get_total_cost(self, obj):
try:
return Table.objects.total_cost(obj)
except:
return 0
class PaymentOptionListSerializer(serializers.ModelSerializer):
tendered = SerializerMethodField()
transaction_number = SerializerMethodField()
payment_name = SerializerMethodField()
class Meta:
model = PaymentOption
fields = (
'id',
'name',
'transaction_number',
'payment_name',
'tendered'
)
def get_transaction_number(self, obj):
return ''
def get_tendered(self, obj):
return 0.00
def get_payment_name(self, obj):
try:
return obj.name
except:
return ''
| [
"structlog.get_logger",
"django.contrib.auth.get_user_model",
"rest_framework.serializers.SerializerMethodField",
"django.utils.formats.localize",
"rest_framework.serializers.HyperlinkedIdentityField"
] | [((484, 504), 'structlog.get_logger', 'get_logger', (['__name__'], {}), '(__name__)\n', (494, 504), False, 'from structlog import get_logger\n'), ((513, 529), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (527, 529), False, 'from django.contrib.auth import get_user_model\n'), ((604, 627), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (625, 627), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((645, 668), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (666, 668), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((680, 703), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (701, 703), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((724, 747), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (745, 747), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((767, 790), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (788, 790), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((808, 831), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (829, 831), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((843, 866), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (864, 866), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((888, 911), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (909, 911), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((2625, 2691), 'rest_framework.serializers.HyperlinkedIdentityField', 'HyperlinkedIdentityField', ([], {'view_name': '"""dashboard:sale_supplier_list"""'}), "(view_name='dashboard:sale_supplier_list')\n", (2649, 2691), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((2708, 2731), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (2729, 2731), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((2749, 2772), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (2770, 2772), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((2794, 2817), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (2815, 2817), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((2838, 2861), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (2859, 2861), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((2881, 2904), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (2902, 2904), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((2916, 2939), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (2937, 2939), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((4134, 4157), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (4155, 4157), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((4183, 4206), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (4204, 4206), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((4226, 4249), 'rest_framework.serializers.SerializerMethodField', 'SerializerMethodField', ([], {}), '()\n', (4247, 4249), False, 'from rest_framework.serializers import ModelSerializer, HyperlinkedIdentityField, SerializerMethodField, ValidationError\n'), ((2237, 2258), 'django.utils.formats.localize', 'localize', (['obj.created'], {}), '(obj.created)\n', (2245, 2258), False, 'from django.utils.formats import localize\n'), ((3486, 3507), 'django.utils.formats.localize', 'localize', (['obj.created'], {}), '(obj.created)\n', (3494, 3507), False, 'from django.utils.formats import localize\n')] |
import inspect
try:
from unittest import mock
except ImportError:
import mock
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.webdriver import WebDriver, WebElement
from selenium.common.exceptions import NoSuchElementException
from page_objects import PageObject, PageElement, MultiPageElement
@pytest.fixture()
def webdriver():
return mock.Mock(spec=WebDriver)
class TestConstructor:
def test_page_element(self):
elem = PageElement(css='foo')
assert elem.locator == (By.CSS_SELECTOR, 'foo')
def test_multi_page_element(self):
elem = MultiPageElement(id_='bar')
assert elem.locator == (By.ID, 'bar')
def test_page_element_bad_args(self):
with pytest.raises(ValueError):
PageElement()
with pytest.raises(ValueError):
PageElement(id_='foo', xpath='bar')
class TestGet:
def test_get_descriptors(self, webdriver):
class TestPage(PageObject):
test_elem1 = PageElement(css='foo')
test_elem2 = PageElement(id_='bar')
webdriver.find_element.side_effect = ["XXX", "YYY"]
page = TestPage(webdriver=webdriver)
assert page.test_elem1 == "XXX"
assert page.test_elem2 == "YYY"
assert webdriver.find_element.mock_calls == [
mock.call(By.CSS_SELECTOR, 'foo'),
mock.call(By.ID, 'bar'),
]
def test_get_element_with_context(self, webdriver):
class TestPage(PageObject):
test_elem = PageElement(css='bar', context=True)
page = TestPage(webdriver=webdriver)
elem = mock.Mock(spec=WebElement, name="My Elem")
res = page.test_elem(elem)
assert elem.find_element.called_once_with(By.CSS_SELECTOR, 'bar')
assert res == elem.find_element.return_value
def test_get_not_found(self, webdriver):
class TestPage(PageObject):
test_elem = PageElement(css='bar')
page = TestPage(webdriver=webdriver)
webdriver.find_element.side_effect = NoSuchElementException
assert page.test_elem is None
def test_get_unattached(self):
assert PageElement(css='bar').__get__(None, None) is None
def test_get_multi(self, webdriver):
class TestPage(PageObject):
test_elems = MultiPageElement(css='foo')
webdriver.find_elements.return_value = ["XXX", "YYY"]
page = TestPage(webdriver=webdriver)
assert page.test_elems == ["XXX", "YYY"]
assert webdriver.find_elements.called_once_with(By.CSS_SELECTOR, 'foo')
def test_get_multi_not_found(self, webdriver):
class TestPage(PageObject):
test_elems = MultiPageElement(css='foo')
webdriver.find_elements.side_effect = NoSuchElementException
page = TestPage(webdriver=webdriver)
assert page.test_elems == []
class TestSet:
def test_set_descriptors(self, webdriver):
class TestPage(PageObject):
test_elem1 = PageElement(css='foo')
page = TestPage(webdriver=webdriver)
elem = mock.Mock(spec=WebElement, name="My Elem")
webdriver.find_element.return_value = elem
page.test_elem1 = "XXX"
assert webdriver.find_elements.called_once_with(By.CSS_SELECTOR, 'foo')
elem.send_keys.assert_called_once_with('XXX')
def test_cannot_set_with_context(self, webdriver):
class TestPage(PageObject):
test_elem = PageElement(css='foo', context=True)
page = TestPage(webdriver=webdriver)
with pytest.raises(ValueError) as e:
page.test_elem = 'xxx'
assert "doesn't support elements with context" in e.value.args[0]
def test_cannot_set_not_found(self, webdriver):
class TestPage(PageObject):
test_elem = PageElement(css='foo')
page = TestPage(webdriver=webdriver)
webdriver.find_element.side_effect = NoSuchElementException
with pytest.raises(ValueError) as e:
page.test_elem = 'xxx'
assert "element not found" in e.value.args[0]
def test_set_multi(self, webdriver):
class TestPage(PageObject):
test_elems = MultiPageElement(css='foo')
page = TestPage(webdriver=webdriver)
elem1 = mock.Mock(spec=WebElement)
elem2 = mock.Mock(spec=WebElement)
webdriver.find_elements.return_value = [elem1, elem2]
page.test_elems = "XXX"
assert webdriver.find_elements.called_once_with(By.CSS_SELECTOR, 'foo')
elem1.send_keys.assert_called_once_with('XXX')
elem2.send_keys.assert_called_once_with('XXX')
def test_cannot_set_multi_with_context(self, webdriver):
class TestPage(PageObject):
test_elem = MultiPageElement(css='foo', context=True)
page = TestPage(webdriver=webdriver)
with pytest.raises(ValueError) as e:
page.test_elem = 'xxx'
assert "doesn't support elements with context" in e.value.args[0]
def test_cannot_set_multi_not_found(self, webdriver):
class TestPage(PageObject):
test_elem = MultiPageElement(css='foo')
page = TestPage(webdriver=webdriver)
webdriver.find_elements.side_effect = NoSuchElementException
with pytest.raises(ValueError) as e:
page.test_elem = 'xxx'
assert "no elements found" in e.value.args[0]
class TestRootURI:
class TestPage(PageObject):
pass
def test_from_constructor(self, webdriver):
page = self.TestPage(webdriver=webdriver, root_uri="http://example.com")
assert page.root_uri == 'http://example.com'
def test_from_webdriver(self):
webdriver = mock.Mock(spec=WebDriver, root_uri="http://example.com/foo")
page = self.TestPage(webdriver=webdriver)
assert page.root_uri == 'http://example.com/foo'
def test_get(self, webdriver):
page = self.TestPage(webdriver=webdriver, root_uri="http://example.com")
page.get('/foo/bar')
assert webdriver.get.called_once_with("http://example.com/foo/bar")
def test_get_no_root(self, webdriver):
page = self.TestPage(webdriver=webdriver)
page.get('/foo/bar')
assert webdriver.get.called_once_with("/foo/bar")
| [
"page_objects.PageElement",
"mock.Mock",
"pytest.raises",
"pytest.fixture",
"mock.call",
"page_objects.MultiPageElement"
] | [((350, 366), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (364, 366), False, 'import pytest\n'), ((395, 420), 'mock.Mock', 'mock.Mock', ([], {'spec': 'WebDriver'}), '(spec=WebDriver)\n', (404, 420), False, 'import mock\n'), ((495, 517), 'page_objects.PageElement', 'PageElement', ([], {'css': '"""foo"""'}), "(css='foo')\n", (506, 517), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((629, 656), 'page_objects.MultiPageElement', 'MultiPageElement', ([], {'id_': '"""bar"""'}), "(id_='bar')\n", (645, 656), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((1645, 1687), 'mock.Mock', 'mock.Mock', ([], {'spec': 'WebElement', 'name': '"""My Elem"""'}), "(spec=WebElement, name='My Elem')\n", (1654, 1687), False, 'import mock\n'), ((3103, 3145), 'mock.Mock', 'mock.Mock', ([], {'spec': 'WebElement', 'name': '"""My Elem"""'}), "(spec=WebElement, name='My Elem')\n", (3112, 3145), False, 'import mock\n'), ((4294, 4320), 'mock.Mock', 'mock.Mock', ([], {'spec': 'WebElement'}), '(spec=WebElement)\n', (4303, 4320), False, 'import mock\n'), ((4337, 4363), 'mock.Mock', 'mock.Mock', ([], {'spec': 'WebElement'}), '(spec=WebElement)\n', (4346, 4363), False, 'import mock\n'), ((5715, 5775), 'mock.Mock', 'mock.Mock', ([], {'spec': 'WebDriver', 'root_uri': '"""http://example.com/foo"""'}), "(spec=WebDriver, root_uri='http://example.com/foo')\n", (5724, 5775), False, 'import mock\n'), ((759, 784), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (772, 784), False, 'import pytest\n'), ((798, 811), 'page_objects.PageElement', 'PageElement', ([], {}), '()\n', (809, 811), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((825, 850), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (838, 850), False, 'import pytest\n'), ((864, 899), 'page_objects.PageElement', 'PageElement', ([], {'id_': '"""foo"""', 'xpath': '"""bar"""'}), "(id_='foo', xpath='bar')\n", (875, 899), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((1025, 1047), 'page_objects.PageElement', 'PageElement', ([], {'css': '"""foo"""'}), "(css='foo')\n", (1036, 1047), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((1073, 1095), 'page_objects.PageElement', 'PageElement', ([], {'id_': '"""bar"""'}), "(id_='bar')\n", (1084, 1095), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((1547, 1583), 'page_objects.PageElement', 'PageElement', ([], {'css': '"""bar"""', 'context': '(True)'}), "(css='bar', context=True)\n", (1558, 1583), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((1956, 1978), 'page_objects.PageElement', 'PageElement', ([], {'css': '"""bar"""'}), "(css='bar')\n", (1967, 1978), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((2336, 2363), 'page_objects.MultiPageElement', 'MultiPageElement', ([], {'css': '"""foo"""'}), "(css='foo')\n", (2352, 2363), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((2714, 2741), 'page_objects.MultiPageElement', 'MultiPageElement', ([], {'css': '"""foo"""'}), "(css='foo')\n", (2730, 2741), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((3019, 3041), 'page_objects.PageElement', 'PageElement', ([], {'css': '"""foo"""'}), "(css='foo')\n", (3030, 3041), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((3479, 3515), 'page_objects.PageElement', 'PageElement', ([], {'css': '"""foo"""', 'context': '(True)'}), "(css='foo', context=True)\n", (3490, 3515), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((3575, 3600), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3588, 3600), False, 'import pytest\n'), ((3829, 3851), 'page_objects.PageElement', 'PageElement', ([], {'css': '"""foo"""'}), "(css='foo')\n", (3840, 3851), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((3980, 4005), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3993, 4005), False, 'import pytest\n'), ((4204, 4231), 'page_objects.MultiPageElement', 'MultiPageElement', ([], {'css': '"""foo"""'}), "(css='foo')\n", (4220, 4231), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((4770, 4811), 'page_objects.MultiPageElement', 'MultiPageElement', ([], {'css': '"""foo"""', 'context': '(True)'}), "(css='foo', context=True)\n", (4786, 4811), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((4871, 4896), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (4884, 4896), False, 'import pytest\n'), ((5131, 5158), 'page_objects.MultiPageElement', 'MultiPageElement', ([], {'css': '"""foo"""'}), "(css='foo')\n", (5147, 5158), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n'), ((5288, 5313), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (5301, 5313), False, 'import pytest\n'), ((1348, 1381), 'mock.call', 'mock.call', (['By.CSS_SELECTOR', '"""foo"""'], {}), "(By.CSS_SELECTOR, 'foo')\n", (1357, 1381), False, 'import mock\n'), ((1395, 1418), 'mock.call', 'mock.call', (['By.ID', '"""bar"""'], {}), "(By.ID, 'bar')\n", (1404, 1418), False, 'import mock\n'), ((2182, 2204), 'page_objects.PageElement', 'PageElement', ([], {'css': '"""bar"""'}), "(css='bar')\n", (2193, 2204), False, 'from page_objects import PageObject, PageElement, MultiPageElement\n')] |
import copy
import struct
class PointerTable:
END_OF_DATA = (0xff, )
"""
Class to manage a list of pointers to data objects
Can rewrite the rom to modify the data objects and still keep the pointers intact.
"""
def __init__(self, rom, info):
assert "count" in info
assert "pointers_bank" in info
assert "pointers_addr" in info
assert ("banks_bank" in info and "banks_addr" in info) or ("data_bank" in info)
self.__info = info
self.__data = []
self.__banks = []
self.__storage = []
count = info["count"]
addr = info["pointers_addr"]
pointers_bank = rom.banks[info["pointers_bank"]]
if "data_addr" in info:
pointers_raw = []
for n in range(count):
pointers_raw.append(info["data_addr"] + pointers_bank[addr + n] * info["data_size"])
else:
pointers_raw = struct.unpack("<" + "H" * count, pointers_bank[addr:addr+count*2])
if "data_bank" in info:
banks = [info["data_bank"]] * count
else:
addr = info["banks_addr"]
banks = rom.banks[info["banks_bank"]][addr:addr+count]
for n in range(count):
bank = banks[n] & 0x3f
pointer = pointers_raw[n]
pointer &= 0x3fff
self.__data.append(self._readData(rom, bank, pointer))
self.__banks.append(bank)
while self.__mergeStorage():
pass
self.__storage.sort(key=lambda n: n["start"])
if "claim_storage_gaps" in info and info["claim_storage_gaps"]:
self.__storage = [{"bank": self.__storage[0]["bank"], "start": self.__storage[0]["start"], "end": self.__storage[-1]["end"]}]
if "expand_to_end_of_bank" in info and info["expand_to_end_of_bank"]:
for st in self.__storage:
expand = True
for st2 in self.__storage:
if st["bank"] == st2["bank"] and st["end"] < st2["end"]:
expand = False
if expand:
st["end"] = 0x4000
# for s in sorted(self.__storage, key=lambda s: (s["bank"], s["start"])):
# print(self.__class__.__name__, s)
def __setitem__(self, item, value):
self.__data[item] = value
def __getitem__(self, item):
return self.__data[item]
def __len__(self):
return len(self.__data)
def store(self, rom):
storage = copy.deepcopy(self.__storage)
pointers_bank = self.__info["pointers_bank"]
pointers_addr = self.__info["pointers_addr"]
done = {}
for st in storage:
done[st["bank"]] = {}
for n, s in enumerate(self.__data):
if isinstance(s, int):
pointer = s
else:
s = bytes(s)
bank = self.__banks[n]
if s in done[bank]:
pointer = done[bank][s]
assert rom.banks[bank][pointer:pointer+len(s)] == s
else:
my_storage = None
for st in storage:
if st["end"] - st["start"] >= len(s) and st["bank"] == bank:
my_storage = st
break
assert my_storage is not None, "Not enough room in storage... %s" % (storage)
pointer = my_storage["start"]
my_storage["start"] = pointer + len(s)
rom.banks[bank][pointer:pointer+len(s)] = s
if "data_size" not in self.__info:
# aggressive de-duplication.
for skip in range(len(s)):
done[bank][s[skip:]] = pointer + skip
done[bank][s] = pointer
if "data_addr" in self.__info:
offset = pointer - self.__info["data_addr"]
if "data_size" in self.__info:
assert offset % self.__info["data_size"] == 0
offset //= self.__info["data_size"]
rom.banks[pointers_bank][pointers_addr + n] = offset
else:
rom.banks[pointers_bank][pointers_addr+n*2] = pointer & 0xff
rom.banks[pointers_bank][pointers_addr+n*2+1] = ((pointer >> 8) & 0xff) | 0x40
space_left = sum(map(lambda n: n["end"] - n["start"], storage))
# print(self.__class__.__name__, "Space left:", space_left)
def _readData(self, rom, bank_nr, pointer):
bank = rom.banks[bank_nr]
start = pointer
if "data_size" in self.__info:
pointer += self.__info["data_size"]
else:
while bank[pointer] not in self.END_OF_DATA:
pointer += 1
pointer += 1
self._addStorage(bank_nr, start, pointer)
return bank[start:pointer]
def _addStorage(self, bank, start, end):
for n, data in enumerate(self.__storage):
if data["bank"] == bank:
if data["start"] == end:
data["start"] = start
return
if data["end"] == start:
data["end"] = end
return
if data["start"] <= start and data["end"] >= end:
return
self.__storage.append({"bank": bank, "start": start, "end": end})
def __mergeStorage(self):
for n in range(len(self.__storage)):
n_end = self.__storage[n]["end"]
n_start = self.__storage[n]["start"]
for m in range(len(self.__storage)):
if m == n or self.__storage[n]["bank"] != self.__storage[m]["bank"]:
continue
m_end = self.__storage[m]["end"]
m_start = self.__storage[m]["start"]
if m_start - 1 <= n_end <= m_end:
self.__storage[n]["start"] = min(self.__storage[n]["start"], self.__storage[m]["start"])
self.__storage[n]["end"] = self.__storage[m]["end"]
self.__storage.pop(m)
return True
return False
| [
"struct.unpack",
"copy.deepcopy"
] | [((2586, 2615), 'copy.deepcopy', 'copy.deepcopy', (['self.__storage'], {}), '(self.__storage)\n', (2599, 2615), False, 'import copy\n'), ((975, 1045), 'struct.unpack', 'struct.unpack', (["('<' + 'H' * count)", 'pointers_bank[addr:addr + count * 2]'], {}), "('<' + 'H' * count, pointers_bank[addr:addr + count * 2])\n", (988, 1045), False, 'import struct\n')] |
#Telegram @javes05
import spamwatch, os, asyncio
from telethon import events
from userbot import client as javes, JAVES_NAME, JAVES_MSG
JAVES_NNAME = str(JAVES_NAME) if JAVES_NAME else str(JAVES_MSG)
swapi = os.environ.get("SPAMWATCH_API_KEY", None)
SPAM_PROTECT = os.environ.get("SPAM_PROTECT", None)
SPAMWATCH_SHOUT = os.environ.get("SPAMWATCH_SHOUT", None)
W_CHAT = set(int(x) for x in os.environ.get("WHITE_CHATS", "").split())
if SPAM_PROTECT:
@javes.on(events.ChatAction)
async def handler(rkG):
if rkG.user_joined or rkG.user_added and not rkG.chat_id in W_CHAT and SPAM_PROTECT and swapi and not rkG.is_private:
chat = await rkG.get_chat()
admin = chat.admin_rights
creator = chat.creator
if admin or creator:
return
sw = spamwatch.Client(swapi)
guser = await rkG.get_user()
try:
sswatch = sw.get_ban(guser.id)
except:
return
if sswatch:
try:
await javes.edit_permissions(rkG.chat_id, guser.id, view_messages=False)
action = "`ban`" ; return await rkG.reply(f"`{JAVES_NNAME}:` ** This user is detected as spam by SpamWatch!!** \n"
f"**Reason ** : `{sswatch.reason}`\n"
f"**Victim Id**: [{guser.id}](tg://user?id={guser.id})\n"
f"**Action ** : {action}")
except:
return
#else:
#if SPAMWATCH_SHOUT:
#action = "`Reported to `@admins" ; return await rkG.reply(f"`{JAVES_NNAME}:` ** This user is detected as spam by SpamWatch!!** \n"
#f"**Reason ** : `{sswatch.reason}`\n"
#f"**Victim Id**: [{guser.id}](tg://user?id={guser.id})\n"
#f"**Action ** : {action}")
| [
"userbot.client.on",
"os.environ.get",
"spamwatch.Client",
"userbot.client.edit_permissions"
] | [((211, 252), 'os.environ.get', 'os.environ.get', (['"""SPAMWATCH_API_KEY"""', 'None'], {}), "('SPAMWATCH_API_KEY', None)\n", (225, 252), False, 'import spamwatch, os, asyncio\n'), ((268, 304), 'os.environ.get', 'os.environ.get', (['"""SPAM_PROTECT"""', 'None'], {}), "('SPAM_PROTECT', None)\n", (282, 304), False, 'import spamwatch, os, asyncio\n'), ((323, 362), 'os.environ.get', 'os.environ.get', (['"""SPAMWATCH_SHOUT"""', 'None'], {}), "('SPAMWATCH_SHOUT', None)\n", (337, 362), False, 'import spamwatch, os, asyncio\n'), ((456, 483), 'userbot.client.on', 'javes.on', (['events.ChatAction'], {}), '(events.ChatAction)\n', (464, 483), True, 'from userbot import client as javes, JAVES_NAME, JAVES_MSG\n'), ((790, 813), 'spamwatch.Client', 'spamwatch.Client', (['swapi'], {}), '(swapi)\n', (806, 813), False, 'import spamwatch, os, asyncio\n'), ((392, 425), 'os.environ.get', 'os.environ.get', (['"""WHITE_CHATS"""', '""""""'], {}), "('WHITE_CHATS', '')\n", (406, 425), False, 'import spamwatch, os, asyncio\n'), ((1127, 1193), 'userbot.client.edit_permissions', 'javes.edit_permissions', (['rkG.chat_id', 'guser.id'], {'view_messages': '(False)'}), '(rkG.chat_id, guser.id, view_messages=False)\n', (1149, 1193), True, 'from userbot import client as javes, JAVES_NAME, JAVES_MSG\n')] |
import json
from urllib import parse, request
from telegram.ext import CallbackContext, CommandHandler
from telegram.update import Update
from autonomia.core import bot_handler
BASE_URL = "https://query.yahooapis.com/v1/public/yql?"
def _get_weather_info(location):
query = (
"select * from weather.forecast where woeid in (select woeid "
'from geo.places(1) where text="%s") AND u="c"' % location
)
final_url = BASE_URL + parse.urlencode({"q": query}) + "&format=json"
result = json.loads(request.urlopen(final_url).read())
if result["query"]["count"] > 0:
return result["query"]["results"]
def cmd_weather(update: Update, context: CallbackContext):
args = context.args
if not args:
args = ["dublin"]
location = " ".join(args)
weather_info = _get_weather_info(location)
if not weather_info:
return
condition = weather_info["channel"]["item"]["condition"]
msg = "{location}, {date}, {temp}°C, {sky}".format(
location=location.capitalize(),
date=condition["date"],
temp=condition["temp"],
sky=condition["text"],
)
update.message.reply_text(msg)
@bot_handler
def weather_factory():
"""
/weather - show the current weather conditions for a given location
"""
return CommandHandler("weather", cmd_weather, pass_args=True)
| [
"urllib.parse.urlencode",
"telegram.ext.CommandHandler",
"urllib.request.urlopen"
] | [((1318, 1372), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""weather"""', 'cmd_weather'], {'pass_args': '(True)'}), "('weather', cmd_weather, pass_args=True)\n", (1332, 1372), False, 'from telegram.ext import CallbackContext, CommandHandler\n'), ((456, 485), 'urllib.parse.urlencode', 'parse.urlencode', (["{'q': query}"], {}), "({'q': query})\n", (471, 485), False, 'from urllib import parse, request\n'), ((527, 553), 'urllib.request.urlopen', 'request.urlopen', (['final_url'], {}), '(final_url)\n', (542, 553), False, 'from urllib import parse, request\n')] |
"""Mock hardware implementation"""
import logging
from stage import exceptions
from unittest.mock import Mock
LOGGER = logging.getLogger("mock")
class MockStage:
"""A mock implemenation of a stepper motor driven linear stage"""
MAX_POS = 100
MIN_POS = 0
def __init__(self):
self._position = __class__.MIN_POS
self.home()
def home(self):
"""Move to home position"""
LOGGER.info("Homing stage")
self._position = __class__.MIN_POS
def end(self):
"""Move to end position"""
LOGGER.info("Moving to home position")
self._position = self.max
@property
def max(self):
"""Return the maximum position index"""
return __class__.MAX_POS
@property
def position(self):
"""Return the current position index"""
return self._position
@position.setter
def position(self, request):
LOGGER.info("Setting position to %s", request)
too_large = request > __class__.MAX_POS
too_small = request < __class__.MIN_POS
if too_large or too_small:
raise exceptions.OutOfRangeError(
"Cannot go to position {}".format(request))
self._position = request
class Matrix:
"""
A mock for an led matrix device
"""
_width = 32
_height = 8
_mode = "1"
def __init__(self):
_LOGGER.info("Created mock led matrix device %r", self)
self.display = Mock()
@property
def width(self):
"""
Width of the display in pixels
"""
return Matrix._width
@property
def height(self):
"""
Height of the display in pixels
"""
return Matrix._height
@property
def mode(self):
"""
Returns mode which is needed for image drawing reasons
"""
return Matrix._mode
def clear(self):
"""
Clear the display
"""
_LOGGER.info("Clearing device %r", self)
| [
"logging.getLogger",
"unittest.mock.Mock"
] | [((121, 146), 'logging.getLogger', 'logging.getLogger', (['"""mock"""'], {}), "('mock')\n", (138, 146), False, 'import logging\n'), ((1469, 1475), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (1473, 1475), False, 'from unittest.mock import Mock\n')] |
from compmech.stiffpanelbay import StiffPanelBay
spb = StiffPanelBay()
spb.a = 2.
spb.b = 1.
spb.r = 2.
spb.stack = [0, 90, 90, 0, -45, +45]
spb.plyt = 1e-3*0.125
spb.laminaprop = (142.5e9, 8.7e9, 0.28, 5.1e9, 5.1e9, 5.1e9)
spb.model = 'cpanel_clt_donnell_bardell'
spb.m = 15
spb.n = 16
spb.u1tx = 0.
spb.u1rx = 1.
spb.u2tx = 0.
spb.u2rx = 1.
spb.u1ty = 0.
spb.u1ry = 1.
spb.u2ty = 0.
spb.u2ry = 1.
spb.v1tx = 0.
spb.v1rx = 1.
spb.v2tx = 0.
spb.v2rx = 1.
spb.v1ty = 0.
spb.v1ry = 1.
spb.v2ty = 0.
spb.v2ry = 1.
spb.w1tx = 0.
spb.w1rx = 1.
spb.w2tx = 0.
spb.w2rx = 1.
spb.w1ty = 0.
spb.w1ry = 1.
spb.w2ty = 0.
spb.w2ry = 1.
spb.add_panel(y1=0, y2=spb.b, Nxx=-1.)
spb.lb(silent=False)
| [
"compmech.stiffpanelbay.StiffPanelBay"
] | [((56, 71), 'compmech.stiffpanelbay.StiffPanelBay', 'StiffPanelBay', ([], {}), '()\n', (69, 71), False, 'from compmech.stiffpanelbay import StiffPanelBay\n')] |
"""
<Program Name>
common.py
<Author>
<NAME> <<EMAIL>>
<NAME> <<EMAIL>>
<Started>
Sep 23, 2016
<Copyright>
See LICENSE for licensing information.
<Purpose>
Provides base classes for various classes in the model.
<Classes>
Metablock:
pretty printed canonical JSON representation and dump
Signable:
sign self, store signature to self and verify signatures
ComparableHashDict: (helper class)
compare contained dictionary of hashes using "=", "!="
"""
import attr
import canonicaljson
from ..ssl_crypto import keys as ssl_crypto__keys
@attr.s(repr=False)
class Metablock(object):
"""Objects with base class Metablock have a __repr__ method
that returns a canonical pretty printed JSON string and can be dumped to a
file """
def __repr__(self):
return canonicaljson.encode_pretty_printed_json(attr.asdict(self))
def dump(self, filename):
with open(filename, 'wt') as fp:
fp.write("{}".format(self))
@attr.s(repr=False)
class Signable(Metablock):
"""Objects with base class Signable can sign their payload (a canonical
pretty printed JSON string not containing the signatures attribute) and store
the signature (signature format: ssl_crypto__formats.SIGNATURE_SCHEMA) """
signatures = attr.ib([])
@property
def payload(self):
payload = attr.asdict(self)
payload.pop("signatures")
return canonicaljson.encode_pretty_printed_json(payload)
def sign(self, key):
"""Signs the canonical JSON representation of itself (without the
signatures property) and adds the signatures to its signature properties."""
# XXX LP: Todo: Verify key format
signature = ssl_crypto__keys.create_signature(key, self.payload)
self.signatures.append(signature)
def verify_signatures(self, keys_dict):
"""Verifies all signatures of the object using the passed key_dict."""
if not self.signatures or len(self.signatures) <= 0:
raise Exception("No signatures found")
for signature in self.signatures:
keyid = signature["keyid"]
try:
key = keys_dict[keyid]
except:
raise Exception("Signature key not found, key id is %s" % keyid)
if not ssl_crypto__keys.verify_signature(key, signature, self.payload):
raise Exception("Invalid signature")
@attr.s(repr=False, cmp=False)
class ComparableHashDict(object):
"""Helper class providing that wraps hash dicts (format:
toto.ssl_crypto.formats.HASHDICT_SCHEMA) in order to compare them using
`=` and `!=`"""
hash_dict = attr.ib({})
def __eq__(self, other):
"""Equal if the dicts have the same keys and the according values
(strings) are equal"""
if self.hash_dict.keys() != other.hash_dict.keys():
return False
for key in self.hash_dict.keys():
if self.hash_dict[key] != other.hash_dict[key]:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
# @attr.s(repr=False)
# class GenericPathList(object):
# """ Helper class implementing __contains__ to provide <path> in <path list>
# where <path> can start with "./" or not
# """
# path_list = attr.ib([])
# def __contains__(self, item):
# if item.startswith("./"):
# other_item = item.lstrip("./")
# else:
# other_item = "./" + item
# if item in self.path_list or \
# other_item in self.path_list:
# return True
# return False
| [
"attr.s",
"attr.asdict",
"canonicaljson.encode_pretty_printed_json",
"attr.ib"
] | [((578, 596), 'attr.s', 'attr.s', ([], {'repr': '(False)'}), '(repr=False)\n', (584, 596), False, 'import attr\n'), ((968, 986), 'attr.s', 'attr.s', ([], {'repr': '(False)'}), '(repr=False)\n', (974, 986), False, 'import attr\n'), ((2299, 2328), 'attr.s', 'attr.s', ([], {'repr': '(False)', 'cmp': '(False)'}), '(repr=False, cmp=False)\n', (2305, 2328), False, 'import attr\n'), ((1260, 1271), 'attr.ib', 'attr.ib', (['[]'], {}), '([])\n', (1267, 1271), False, 'import attr\n'), ((2529, 2540), 'attr.ib', 'attr.ib', (['{}'], {}), '({})\n', (2536, 2540), False, 'import attr\n'), ((1320, 1337), 'attr.asdict', 'attr.asdict', (['self'], {}), '(self)\n', (1331, 1337), False, 'import attr\n'), ((1379, 1428), 'canonicaljson.encode_pretty_printed_json', 'canonicaljson.encode_pretty_printed_json', (['payload'], {}), '(payload)\n', (1419, 1428), False, 'import canonicaljson\n'), ((846, 863), 'attr.asdict', 'attr.asdict', (['self'], {}), '(self)\n', (857, 863), False, 'import attr\n')] |
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
class Application:
def __init__(self):
self.wd = webdriver.Chrome(executable_path='/Users/atvelova/Documents/python_training/chromedriver')
self.wd.implicitly_wait(60)
def open_page(self):
wd = self.wd
wd.get("http://hrm.seleniumminutes.com/symfony/web/index.php/auth/login")
def login(self):
wd = self.wd
self.open_page()
wd.find_element(By.ID, "txtUsername").click()
wd.find_element(By.ID, "txtUsername").send_keys("admin")
wd.find_element(By.ID, "txtPassword").send_keys("Password")
wd.find_element(By.ID, "txtPassword").send_keys(Keys.ENTER)
self.wd.implicitly_wait(60)
def logout(self):
wd = self.wd
wd.find_element(By.ID, "welcome").click()
self.wd.implicitly_wait(60)
wd.find_element(By.LINK_TEXT, "Logout").click()
def destroy(self):
self.wd.quit() | [
"selenium.webdriver.Chrome"
] | [((187, 282), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'executable_path': '"""/Users/atvelova/Documents/python_training/chromedriver"""'}), "(executable_path=\n '/Users/atvelova/Documents/python_training/chromedriver')\n", (203, 282), False, 'from selenium import webdriver\n')] |
# Copyright 2017-2020 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
import logging
import os
import re
from guild import namespace
from guild import resolver as resolverlib
from guild import resource
from guild import util
log = logging.getLogger("guild")
RESOURCE_TERM = r"[a-zA-Z0-9_\-\.]+"
class DependencyError(Exception):
pass
class ResolutionContext(object):
def __init__(self, target_dir, opdef, resource_config):
self.target_dir = target_dir
self.opdef = opdef
self.resource_config = resource_config
class Resource(object):
def __init__(self, resdef, location, ctx):
self.resdef = resdef
self.location = location
self.ctx = ctx
self.config = self._init_resource_config()
self.dependency = None
def _init_resource_config(self):
for name, config in self.ctx.resource_config.items():
if name in [self.resdef.fullname, self.resdef.name]:
return config
return None
def resolve(self, unpack_dir=None):
resolved_acc = []
for source in self.resdef.sources:
paths = self.resolve_source(source, unpack_dir)
resolved_acc.extend(paths)
return resolved_acc
def resolve_source(self, source, unpack_dir=None):
resolver = resolverlib.for_resdef_source(source, self)
if not resolver:
raise DependencyError(
"unsupported source '%s' in %s resource" % (source, self.resdef.name)
)
try:
source_paths = resolver.resolve(unpack_dir)
except resolverlib.ResolutionError as e:
msg = "could not resolve '%s' in %s resource: %s" % (
source,
self.resdef.name,
e,
)
if source.help:
msg += "\n%s" % source.help
raise DependencyError(msg)
except Exception as e:
log.exception(
"resolving required source '%s' in %s resource",
source,
self.resdef.name,
)
raise DependencyError(
"unexpected error resolving '%s' in %s resource: %r"
% (source, self.resdef.name, e)
)
else:
for path in source_paths:
self._link_to_source(path, source)
return source_paths
def _link_to_source(self, source_path, source):
source_path = util.strip_trailing_sep(source_path)
link = self._link_path(source_path, source)
_symlink(source_path, link)
def _link_path(self, source_path, source):
basename = os.path.basename(source_path)
res_path = self.resdef.path or ""
if source.path:
res_path = os.path.join(res_path, source.path)
if os.path.isabs(res_path):
raise DependencyError(
"invalid path '%s' in %s resource (path must be relative)"
% (res_path, self.resdef.name)
)
if source.rename:
basename = _rename_source(basename, source.rename)
return os.path.join(self.ctx.target_dir, res_path, basename)
def _rename_source(name, rename):
for spec in rename:
try:
renamed = re.sub(spec.pattern, spec.repl, name)
except Exception as e:
raise DependencyError(
"error renaming source %s (%r %r): %s"
% (name, spec.pattern, spec.repl, e)
)
else:
if renamed != name:
return renamed
return name
def _symlink(source_path, link):
assert os.path.isabs(link), link
if os.path.lexists(link) or os.path.exists(link):
log.debug("%s already exists, skipping link", link)
return
util.ensure_dir(os.path.dirname(link))
log.debug("resolving source %s as link %s", source_path, link)
rel_source_path = _rel_source_path(source_path, link)
util.symlink(rel_source_path, link)
def _rel_source_path(source, link):
source_dir, source_name = os.path.split(source)
real_link = util.realpath(link)
link_dir = os.path.dirname(real_link)
source_rel_dir = os.path.relpath(source_dir, link_dir)
return os.path.join(source_rel_dir, source_name)
class ResourceProxy(object):
def __init__(self, dependency, name, config, ctx):
self.dependency = dependency
self.name = name
self.config = config
self.ctx = ctx
def resolve(self):
source_path = self.config # the only type of config supported
if not os.path.exists(source_path):
raise DependencyError(
"could not resolve %s: %s does not exist" % (self.name, source_path)
)
log.info("Using %s for %s resource", source_path, self.name)
basename = os.path.basename(source_path)
link = os.path.join(self.ctx.target_dir, basename)
_symlink(source_path, link)
return [source_path]
def _dep_desc(dep):
return "%s:%s" % (dep.opdef.modeldef.name, dep.opdef.name)
def resolve(dependencies, ctx):
resolved = {}
for res in resources(dependencies, ctx):
log.info("Resolving %s dependency", res.resdef.name)
resolved_sources = res.resolve()
log.debug("resolved sources for %s: %r", res.dependency, resolved_sources)
if not resolved_sources:
log.warning("Nothing resolved for %s dependency", res.resdef.name)
resolved.setdefault(res.resdef.name, []).extend(resolved_sources)
return resolved
def resources(dependencies, ctx):
flag_vals = util.resolve_all_refs(ctx.opdef.flag_values())
return [_dependency_resource(dep, flag_vals, ctx) for dep in dependencies]
def _dependency_resource(dep, flag_vals, ctx):
if dep.inline_resource:
return _inline_resource(dep.inline_resource, ctx)
spec = util.resolve_refs(dep.spec, flag_vals)
try:
res = util.find_apply(
[_model_resource, _guildfile_resource, _packaged_resource], spec, ctx
)
except DependencyError as e:
if spec in ctx.resource_config:
log.warning(str(e))
return ResourceProxy(dep, spec, ctx.resource_config[spec], ctx)
raise
if res:
res.dependency = spec
return res
raise DependencyError(
"invalid dependency '%s' in operation '%s'" % (spec, ctx.opdef.fullname)
)
def _inline_resource(resdef, ctx):
return Resource(resdef, resdef.modeldef.guildfile.dir, ctx)
def _model_resource(spec, ctx):
m = re.match(r"(%s)$" % RESOURCE_TERM, spec)
if m is None:
return None
res_name = m.group(1)
return _modeldef_resource(ctx.opdef.modeldef, res_name, ctx)
def _modeldef_resource(modeldef, res_name, ctx):
resdef = modeldef.get_resource(res_name)
if resdef is None:
raise DependencyError(
"resource '%s' required by operation '%s' is not defined"
% (res_name, ctx.opdef.fullname)
)
return Resource(resdef, modeldef.guildfile.dir, ctx)
def _guildfile_resource(spec, ctx):
m = re.match(r"(%s):(%s)$" % (RESOURCE_TERM, RESOURCE_TERM), spec)
if m is None:
return None
model_name = m.group(1)
modeldef = ctx.opdef.guildfile.models.get(model_name)
if modeldef is None:
raise DependencyError(
"model '%s' in resource '%s' required by operation "
"'%s' is not defined" % (model_name, spec, ctx.opdef.fullname)
)
res_name = m.group(2)
return _modeldef_resource(modeldef, res_name, ctx)
def _packaged_resource(spec, ctx):
m = re.match(r"(%s)/(%s)$" % (RESOURCE_TERM, RESOURCE_TERM), spec)
if m is None:
return None
pkg_name = m.group(1)
res_name = m.group(2)
try:
resources = list(resource.for_name(res_name))
except LookupError:
pass
else:
for res in resources:
if namespace.apply_namespace(res.dist.project_name) == pkg_name:
location = os.path.join(
res.dist.location, res.dist.key.replace(".", os.path.sep)
)
return Resource(res.resdef, location, ctx)
raise DependencyError(
"resource '%s' required by operation '%s' is not installed"
% (spec, ctx.opdef.fullname)
)
| [
"logging.getLogger",
"guild.resource.for_name",
"guild.resolver.for_resdef_source",
"os.path.exists",
"os.path.lexists",
"guild.util.realpath",
"os.path.split",
"os.path.relpath",
"os.path.isabs",
"guild.util.strip_trailing_sep",
"re.match",
"os.path.dirname",
"guild.util.resolve_refs",
"r... | [((816, 842), 'logging.getLogger', 'logging.getLogger', (['"""guild"""'], {}), "('guild')\n", (833, 842), False, 'import logging\n'), ((4232, 4251), 'os.path.isabs', 'os.path.isabs', (['link'], {}), '(link)\n', (4245, 4251), False, 'import os\n'), ((4559, 4594), 'guild.util.symlink', 'util.symlink', (['rel_source_path', 'link'], {}), '(rel_source_path, link)\n', (4571, 4594), False, 'from guild import util\n'), ((4663, 4684), 'os.path.split', 'os.path.split', (['source'], {}), '(source)\n', (4676, 4684), False, 'import os\n'), ((4701, 4720), 'guild.util.realpath', 'util.realpath', (['link'], {}), '(link)\n', (4714, 4720), False, 'from guild import util\n'), ((4736, 4762), 'os.path.dirname', 'os.path.dirname', (['real_link'], {}), '(real_link)\n', (4751, 4762), False, 'import os\n'), ((4784, 4821), 'os.path.relpath', 'os.path.relpath', (['source_dir', 'link_dir'], {}), '(source_dir, link_dir)\n', (4799, 4821), False, 'import os\n'), ((4833, 4874), 'os.path.join', 'os.path.join', (['source_rel_dir', 'source_name'], {}), '(source_rel_dir, source_name)\n', (4845, 4874), False, 'import os\n'), ((6487, 6525), 'guild.util.resolve_refs', 'util.resolve_refs', (['dep.spec', 'flag_vals'], {}), '(dep.spec, flag_vals)\n', (6504, 6525), False, 'from guild import util\n'), ((7171, 7210), 're.match', 're.match', (["('(%s)$' % RESOURCE_TERM)", 'spec'], {}), "('(%s)$' % RESOURCE_TERM, spec)\n", (7179, 7210), False, 'import re\n'), ((7719, 7780), 're.match', 're.match', (["('(%s):(%s)$' % (RESOURCE_TERM, RESOURCE_TERM))", 'spec'], {}), "('(%s):(%s)$' % (RESOURCE_TERM, RESOURCE_TERM), spec)\n", (7727, 7780), False, 'import re\n'), ((8238, 8299), 're.match', 're.match', (["('(%s)/(%s)$' % (RESOURCE_TERM, RESOURCE_TERM))", 'spec'], {}), "('(%s)/(%s)$' % (RESOURCE_TERM, RESOURCE_TERM), spec)\n", (8246, 8299), False, 'import re\n'), ((1899, 1942), 'guild.resolver.for_resdef_source', 'resolverlib.for_resdef_source', (['source', 'self'], {}), '(source, self)\n', (1928, 1942), True, 'from guild import resolver as resolverlib\n'), ((3060, 3096), 'guild.util.strip_trailing_sep', 'util.strip_trailing_sep', (['source_path'], {}), '(source_path)\n', (3083, 3096), False, 'from guild import util\n'), ((3252, 3281), 'os.path.basename', 'os.path.basename', (['source_path'], {}), '(source_path)\n', (3268, 3281), False, 'import os\n'), ((3418, 3441), 'os.path.isabs', 'os.path.isabs', (['res_path'], {}), '(res_path)\n', (3431, 3441), False, 'import os\n'), ((3718, 3771), 'os.path.join', 'os.path.join', (['self.ctx.target_dir', 'res_path', 'basename'], {}), '(self.ctx.target_dir, res_path, basename)\n', (3730, 3771), False, 'import os\n'), ((4265, 4286), 'os.path.lexists', 'os.path.lexists', (['link'], {}), '(link)\n', (4280, 4286), False, 'import os\n'), ((4290, 4310), 'os.path.exists', 'os.path.exists', (['link'], {}), '(link)\n', (4304, 4310), False, 'import os\n'), ((4407, 4428), 'os.path.dirname', 'os.path.dirname', (['link'], {}), '(link)\n', (4422, 4428), False, 'import os\n'), ((5436, 5465), 'os.path.basename', 'os.path.basename', (['source_path'], {}), '(source_path)\n', (5452, 5465), False, 'import os\n'), ((5481, 5524), 'os.path.join', 'os.path.join', (['self.ctx.target_dir', 'basename'], {}), '(self.ctx.target_dir, basename)\n', (5493, 5524), False, 'import os\n'), ((6549, 6639), 'guild.util.find_apply', 'util.find_apply', (['[_model_resource, _guildfile_resource, _packaged_resource]', 'spec', 'ctx'], {}), '([_model_resource, _guildfile_resource, _packaged_resource],\n spec, ctx)\n', (6564, 6639), False, 'from guild import util\n'), ((3371, 3406), 'os.path.join', 'os.path.join', (['res_path', 'source.path'], {}), '(res_path, source.path)\n', (3383, 3406), False, 'import os\n'), ((3867, 3904), 're.sub', 're.sub', (['spec.pattern', 'spec.repl', 'name'], {}), '(spec.pattern, spec.repl, name)\n', (3873, 3904), False, 'import re\n'), ((5185, 5212), 'os.path.exists', 'os.path.exists', (['source_path'], {}), '(source_path)\n', (5199, 5212), False, 'import os\n'), ((8425, 8452), 'guild.resource.for_name', 'resource.for_name', (['res_name'], {}), '(res_name)\n', (8442, 8452), False, 'from guild import resource\n'), ((8546, 8594), 'guild.namespace.apply_namespace', 'namespace.apply_namespace', (['res.dist.project_name'], {}), '(res.dist.project_name)\n', (8571, 8594), False, 'from guild import namespace\n')] |
r"""Provides functions used by strategies that use a tree to select the
permutation.
To compute optimal permutations, we use the belief states
.. math::
b(y^{k-1}) := \mathbb{P}(s_0, s_k|y^{k-1}),
where the :math:`s_k` are the states of the HMM at step :math:`k`, and the
superscript :math:`y^{k-1}` is the sequence of observations up to step
:math:`k-1`.
Here, when we refer to a tree, we really mean a list of
:py:class:`~perm_hmm.strategies.belief.HMMBeliefState` objects. The i'th
object contains the beliefs for all the nodes at the i'th level of the tree.
"""
import torch
from perm_hmm.policies.belief import HMMBeliefState
class HMMBeliefTree(object):
r"""
Instances of this class have the following attributes:
``hmm``:
A :py:class:`~perm_hmm.models.hmms.PermutedDiscreteHMM` that is used to
calculate belief states.
``possible_perms``:
A :py:class:`~torch.Tensor` of type ``long`` that contains the possible
permutations. This is used to compute transition matrices for updating
belief states.
"""
def __init__(self, hmm, possible_perms, nsteps, root_belief: HMMBeliefState = None, data_len=None, terminal_offset=False):
r"""Generates the belief tree for the given HMM.
Builds a tree that is traversed by sequences :math:`y_0, \sigma_0, y_1,
\sigma_1, \ldots`, where the :math:`\sigma_k` are permutation indices, and
the :math:`y_k` are the observation indices. This tree has a layered
structure. Attached to each node in the tree is a belief state
:math:`\mathbb{P}(s_0, s_k|y^{k-1})`, or :math:`\mathbb{P}(s_0, s_k|y^k)`,
depending on whether the node is an even or odd number of steps from the
root, respectively. To go from a belief state attached to one node to a
belief state attached to one of that node's children, we either use a
transition or a Bayesian update, depending on whether the edge is a
permutation or an observation, respectively.
:param hmm: The HMM to compute likelihoods with.
:param possible_perms: The allowable permutations.
:param nsteps: The number of steps to compute for. (2 * nsteps + 1) is the
height of the tree.
:param HMMBeliefState root_belief: The belief state to start the tree with. If None,
defaults to the initial state distribution of the HMM.
:param data_len: The length of the data. If None, defaults to 1.
:param terminal_offset: Whether the leaves of the tree should be labeled by
observation indices.
:return: A list of belief states, to be interpreted as a tree by looking at
the ith element of the list as the set of all nodes at the ith level.
"""
self.hmm = hmm
self.possible_perms = possible_perms
self._build_tree(nsteps, root_belief, data_len, terminal_offset)
def _build_tree(self, nsteps, root_belief: HMMBeliefState = None, data_len=None, terminal_offset=False):
r"""Generates the belief tree for the given HMM.
Builds a tree that is traversed by sequences :math:`y_0, \sigma_0, y_1,
\sigma_1, \ldots`, where the :math:`\sigma_k` are permutation indices, and
the :math:`y_k` are the observation indices. This tree has a layered
structure. Attached to each node in the tree is a belief state
:math:`\mathbb{P}(s_0, s_k|y^{k-1})`, or :math:`\mathbb{P}(s_0, s_k|y^k)`,
depending on whether the node is an even or odd number of steps from the
root, respectively. To go from a belief state attached to one node to a
belief state attached to one of that node's children, we either use a
transition or a Bayesian update, depending on whether the edge is a
permutation or an observation, respectively.
:param nsteps: The number of steps to compute for. (2 * nsteps + 1) is the
height of the tree.
:param root_belief: The belief state to start the tree with. If None,
defaults to the initial state distribution of the HMM.
:param data_len: The length of the data. If None, defaults to 1.
:param terminal_offset: Whether the leaves of the tree should be labeled by
observation indices.
:return: A list of belief states, to be interpreted as a tree by looking at
the ith element of the list as the set of all nodes at the ith level.
:raise ValueError: If ``nsteps`` is less than 1. Must look ahead at
least one step.
"""
if nsteps < 1:
raise ValueError("Cannot build a tree of less than 1 look ahead "
"steps.")
if data_len is None:
data_len = 1
if root_belief is None:
root_belief = HMMBeliefState.from_hmm(self.hmm)
root_belief.logits = root_belief.logits.expand(data_len, -1, -1)
self.beliefs = [root_belief]
if terminal_offset and (nsteps == 1):
return
b = root_belief.bayes_update(self.hmm.observation_dist.enumerate_support(expand=False).squeeze(-1), new_dim=True)
self.beliefs.append(b)
if (not terminal_offset) and (nsteps == 1):
return
while len(self.beliefs) < (2 * (nsteps - 1)):
self.grow(self.possible_perms)
if not terminal_offset:
self.grow(self.possible_perms)
else:
self.beliefs.append(self.beliefs[-1].transition(self.possible_perms, new_dim=True))
def broadcast_to_length(self, length):
new_beliefs = []
for b in self.beliefs:
shape = torch.broadcast_shapes((length, 1, 1), b.logits.shape)
new_b = HMMBeliefState(b.logits.expand(shape).clone(), b.hmm, offset=b.offset)
new_beliefs.append(new_b)
self.beliefs = new_beliefs
def grow(self, possible_perms=None, hmm=None):
"""Expands the tree by two levels.
Assumes that the leaves have offset=True. Then, we expand the leaves by
transitioning the belief states at the leaves, and then again by Bayesian
updates.
:param possible_perms: The allowable permutations.
:param hmm: The HMM to compute likelihoods with.
:return: An expanded tree, in the form of a list of belief states.
"""
if possible_perms is None:
possible_perms = self.possible_perms
if hmm is None:
hmm = self.hmm
b = self.beliefs[-1].transition(possible_perms, hmm=hmm, new_dim=True)
self.beliefs.append(b)
b = self.beliefs[-1].bayes_update(hmm.observation_dist.enumerate_support(expand=False).squeeze(-1), hmm=hmm, new_dim=True)
self.beliefs.append(b)
def perm_idxs_from_log_cost(self, log_cost_func, return_log_costs=False, terminal_log_cost=None, is_cost_func=True):
r"""Computes :math:`\mathbb{E}_{Y_k^n|y^{k-1}}[c(y^{k-1},Y_k^n)]` and the
corresponding permutation indices that minimize this expectation.
Given a tree of belief states, computes the expected cost of the tree.
This computation is performed by first evaluating the cost function at the
leaves of the tree, then propagating the cost up the tree.
To compute the cost at an internal node whose children are labeled by data,
we take the expectation over the children's costs, using the belief state
to compute said expectation. To compute the cost at an internal node whose
children are labeled by permutations, we take the minimum over the
children's costs. This is a direct computation of the expected cost using
the `Bellman equation`_.
We then return both the permutation indices and, if ``return_costs`` is
True, the expected cost.
The computation is done in log space, so the cost function must be in log
space as well.
.. _`Bellman equation`: https://en.wikipedia.org/wiki/Bellman_equation
:param log_cost_func: The cost function to compute the expected cost of.
Must be in log space, and must take a single argument, which is a
tensor of shape ``tree_shape + (n_states, n_states)``, returning a
tensor of shape ``tree_shape``. The last two dimensions of the input
correspond to the initial and final states of the HMM.
:param bool return_log_costs: Whether to return the expected cost as well.
:param terminal_log_cost: A tensor of terminal costs to start the calculation
with. Defaults to ``log_cost_func(self.tree[-1].logits)``
:return: A list of permutation indices, and, if ``return_costs`` is True,
the expected cost.
"""
if terminal_log_cost is None:
terminal_log_cost = log_cost_func(self.beliefs[-1].logits)
costs = [terminal_log_cost]
perm_idxs = []
for b in reversed(self.beliefs[:-1]):
if b.offset:
# yksk = b.joint_yksk(b.hmm.enumerate_support(expand=False).squeeze(-1), new_dim=True)
yksk = b.joint_yksks0(b.hmm.enumerate_support(expand=False).squeeze(-1), new_dim=True).logsumexp(-2)
yk = yksk.logsumexp(-1)
# Compute the expectation of the cost function
c = costs[-1] + yk
c = c.logsumexp(-2)
costs.append(c)
else:
# Gets the optimal permutation index.
if is_cost_func:
c, perm_idx = costs[-1].min(-2)
else:
c, perm_idx = costs[-1].max(-2)
costs.append(c)
perm_idxs.append(perm_idx)
costs = costs[::-1]
perm_idxs = perm_idxs[::-1]
perm_tree = PermIdxTree(perm_idxs)
if return_log_costs:
return perm_tree, costs
return perm_tree
def prune_tree(self, idx):
"""Prunes a tree according to the index.
:param idx: The index corresponding to the data or permutations.
"""
idx = idx.unsqueeze(-1).unsqueeze(-2)
new_tree = []
for b in self.beliefs[1:]:
idxb = torch.broadcast_tensors(idx, b.logits)[0]
new_b = HMMBeliefState(b.logits.gather(0, idxb)[0], b.hmm, b.offset)
new_tree.append(new_b)
self.beliefs = new_tree
class PermIdxTree(object):
def __init__(self, idx_list):
self.perm_idxs = idx_list
def trim_list_tree(self):
r"""Trims the tree to remove permutation layers.
The tree is a list of tensors. The first tensor is the root of the tree, and
each subsequent tensor is a layer of the tree. The tree has a layered
structure, with a path to a node in the tree given by the indices
corresponding to the list :math:`(y_0, \sigma_0, y_1, \sigma_1, \ldots,)`,
where :math:`y_i` is the index of the observation at step :math:`i`, and
:math:`\sigma_i` is the index of the permutation at step :math:`i`.
Once the permutations have been selected, the tree should be trimmed to
remove the permutation layers, which is done by this function.
"""
new_tree = []
p = self.perm_idxs[0]
p = p.squeeze()
new_tree.append(p)
for p in self.perm_idxs[1:]:
p = p.squeeze()
for ntp in new_tree:
idx = torch.meshgrid([torch.arange(s) for s in ntp.shape])
p = p[idx + (ntp,)]
new_tree.append(p)
self.perm_idxs = new_tree
def expand_batch(self, data_len):
r"""Adds a dimension of length data_len to each tensor in the tree.
This function is used to expand the tree.
:param int data_len: Length of new dimension.
:return: Same list of tensors, but with a new dimension added to each
tensor.
"""
self.perm_idxs = [b.unsqueeze(-1).expand((-1,)*(len(b.shape)) + (data_len,)) for b in self.perm_idxs]
def prune_perm_tree(self, data_idx):
r"""Prunes the tree after observing data.
Given data indexed by data_idx, this function prunes the tree to remove
the branches that are not relevant to the data.
:param torch.Tensor data_idx: Index of data.
:return: Same list of tensors, but with the branches not relevant to the
data removed.
"""
# data_idx = data_idx.unsqueeze(-1)
new_tree = []
for pl in self.perm_idxs[1:]:
new_b = pl[data_idx, ..., torch.arange(data_idx.shape[-1])]
new_b = new_b.movedim(0, -1)
new_tree.append(new_b)
self.perm_idxs = new_tree
| [
"torch.broadcast_tensors",
"torch.broadcast_shapes",
"torch.arange",
"perm_hmm.policies.belief.HMMBeliefState.from_hmm"
] | [((4847, 4880), 'perm_hmm.policies.belief.HMMBeliefState.from_hmm', 'HMMBeliefState.from_hmm', (['self.hmm'], {}), '(self.hmm)\n', (4870, 4880), False, 'from perm_hmm.policies.belief import HMMBeliefState\n'), ((5686, 5740), 'torch.broadcast_shapes', 'torch.broadcast_shapes', (['(length, 1, 1)', 'b.logits.shape'], {}), '((length, 1, 1), b.logits.shape)\n', (5708, 5740), False, 'import torch\n'), ((10247, 10285), 'torch.broadcast_tensors', 'torch.broadcast_tensors', (['idx', 'b.logits'], {}), '(idx, b.logits)\n', (10270, 10285), False, 'import torch\n'), ((12626, 12658), 'torch.arange', 'torch.arange', (['data_idx.shape[-1]'], {}), '(data_idx.shape[-1])\n', (12638, 12658), False, 'import torch\n'), ((11503, 11518), 'torch.arange', 'torch.arange', (['s'], {}), '(s)\n', (11515, 11518), False, 'import torch\n')] |
# All .ui files and .so files are added through keyword: package_data, because setuptools doesn't include them automatically.
import sys
import os
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name = "xfntr",
version = "0.3.0",
author = "<NAME>",
author_email = "<EMAIL>",
description = "A software that analyzes xfntr data",
long_description = long_description,
long_description_content_type = "text/markdown",
url = "https://github.com/zhul9311/XFNTR.git",
packages = find_packages(),
package_dir = {'':'.'},
package_data = {
'' : ['xr_ref.cpython-37m-darwin.so',
'GUI/*',
'images/*',
'test/*']
},
exclude_package_data = {
'' : ['.git/','.setup.py.swp']
},
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires = '>=3.6',
install_requires = [
'pyqt5',
'scipy',
'matplotlib',
'lmfit',
'periodictable',
'numba'
],
entry_points = { # create scripts and add to sys.PATH
'console_scripts':[
'xfntr1 = xfntr.main:main'
],
'gui_scripts': [
'xfntr = xfntr.main:main'
]
},
)
| [
"setuptools.find_packages"
] | [((581, 596), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (594, 596), False, 'from setuptools import setup, find_packages\n')] |
"""
module contains a function that polls the ukcovid api in order to find up to date
information about coronavirus in the UK
"""
import json
import logging
from uk_covid19 import Cov19API
from requests import get
logging.basicConfig(level=logging.DEBUG, filename='sys.log')
def get_covid() -> str:
""" Polls the ukcovid api in order to find up to date information about Covid-19 in the UK """
with open('config.json') as config_file:
data = json.load(config_file)
filters = [
'areaType=' + data["uk_covid19"][0]["area_type"],
'areaName=' + data["uk_covid19"][0]["area_name"]
]
structure = data["uk_covid19"][0]["structure"]
api = Cov19API(filters=filters, structure=structure, latest_by = "newDeathsByDeathDate")
covid_info = api.get_json()
data = covid_info['data'][0]
date = 'Date of information is ' + str(data['date']) + '.'
new_cases = ' The number of new cases is ' + str(data['newCasesByPublishDate']) + '.'
cum_cases = ' The number of cumulative cases is ' + str(data['cumCasesByPublishDate']) + '.'
new_deaths = ' The number of new deaths is ' + str(data['newDeathsByDeathDate']) + '.'
cum_deaths = ' The number of cumulative deaths is ' + str(data['cumDeathsByDeathDate']) + '. '
return date + new_cases + cum_cases + new_deaths + cum_deaths
def covid_api_checker() -> int:
"""Function finds the HTTP response code when polling the api """
endpoint = endpoint = (
'https://api.coronavirus.data.gov.uk/v1/data?'
'filters=areaType=nation;areaName=england&'
'structure={"date":"date","newCases":"newCasesByPublishDate"}'
)
response = get(endpoint, timeout=10)
if response.status_code >= 400:
logging.warning('HTTP GET request failed, response code is ' + str(response.status_code))
return response.status_code
logging.info('HTTP GET request succeeded')
return response.status_code
| [
"logging.basicConfig",
"requests.get",
"uk_covid19.Cov19API",
"json.load",
"logging.info"
] | [((227, 287), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'filename': '"""sys.log"""'}), "(level=logging.DEBUG, filename='sys.log')\n", (246, 287), False, 'import logging\n'), ((696, 781), 'uk_covid19.Cov19API', 'Cov19API', ([], {'filters': 'filters', 'structure': 'structure', 'latest_by': '"""newDeathsByDeathDate"""'}), "(filters=filters, structure=structure, latest_by='newDeathsByDeathDate'\n )\n", (704, 781), False, 'from uk_covid19 import Cov19API\n'), ((1681, 1706), 'requests.get', 'get', (['endpoint'], {'timeout': '(10)'}), '(endpoint, timeout=10)\n', (1684, 1706), False, 'from requests import get\n'), ((1882, 1924), 'logging.info', 'logging.info', (['"""HTTP GET request succeeded"""'], {}), "('HTTP GET request succeeded')\n", (1894, 1924), False, 'import logging\n'), ((472, 494), 'json.load', 'json.load', (['config_file'], {}), '(config_file)\n', (481, 494), False, 'import json\n')] |