code stringlengths 21 1.03M | apis sequence | extract_api stringlengths 74 8.23M |
|---|---|---|
import sys
from google.cloud import vision_v1
from google.cloud.vision_v1 import enums
import io
import json
from google.cloud import storage
import os
def sample_batch_annotate_files(storage_uri):
# os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = r"C:\Users\user\Desktop\doc_ai\rmi-insights-3e257c9c456c.json"
"""Perform batch file annotation."""
mime_type = "application/pdf"
client = vision_v1.ImageAnnotatorClient()
gcs_source = {"uri": storage_uri}
input_config = {"gcs_source": gcs_source, "mime_type": mime_type}
features = [{"type": enums.Feature.Type.DOCUMENT_TEXT_DETECTION}]
# The service can process up to 5 pages per document file.
# Here we specify the first, second, and last page of the document to be
# processed.
pages = [1, 2, 3]
requests = [{"input_config": input_config, "features": features, "pages": pages}]
response = client.batch_annotate_files(requests)
#Accessing Internal memory 1
f = open("/home/srinidhi/angular/uploads/visionoutput.txt","w+")
for image_response in response.responses[0].responses:
f.write(image_response.full_text_annotation.text)
f.close()
#Reading it line by line
f1 = open("/home/srinidhi/angular/uploads/visionoutput.txt","r")
list_output = []
line = f1.readlines()
line = [x.rstrip('\\n').rstrip() for x in line]
print(line)
#Storing in a dictionary
dict_output ={}
dict_output['data'] = line
#Uploading file to bucket
#Filename is the name you want to store in bucket
storage_client = storage.Client()
bucket = storage_client.get_bucket('sample_pdf')
filename ="visionoutput.json"
blob = bucket.blob(filename)
#Removing Internal memory
# os.remove("visionoutput.txt")
# os.remove("visionoutput.json")
if __name__ == '__main__':
file_path = sys.argv[1]
sample_batch_annotate_files(file_path) | [
"google.cloud.vision_v1.ImageAnnotatorClient",
"google.cloud.storage.Client"
] | [((421, 453), 'google.cloud.vision_v1.ImageAnnotatorClient', 'vision_v1.ImageAnnotatorClient', ([], {}), '()\n', (451, 453), False, 'from google.cloud import vision_v1\n'), ((1631, 1647), 'google.cloud.storage.Client', 'storage.Client', ([], {}), '()\n', (1645, 1647), False, 'from google.cloud import storage\n')] |
# Based on ULN2003 driver lib: https://github.com/zhcong/ULN2003-for-ESP32
from utime import sleep_ms
from machine import Pin
from LogicalPins import physical_pin
STEPPER_INST = None
class StepperULN2003:
FULL_ROTATION = int(4075.7728395061727 / 8) # http://www.jangeox.be/2013/10/stepper-motor-28byj-48_25.html
def __init__(self, mode):
# Mode: FULL / HALF
if mode == 'FULL':
# FULL STEP - ~508
self.mode = [[1, 0, 1, 0],
[0, 1, 1, 0],
[0, 1, 0, 1],
[1, 0, 0, 1]]
self.delay = 10
else:
# HALF STEP - ~1016
self.mode = [[0, 0, 0, 1],
[0, 0, 1, 1],
[0, 0, 1, 0],
[0, 1, 1, 0],
[0, 1, 0, 0],
[1, 1, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 1]]
self.delay = 2
# Init stepper pins
self.pin1 = Pin(physical_pin('stppr_1'), Pin.OUT)
self.pin2 = Pin(physical_pin('stppr_2'), Pin.OUT)
self.pin3 = Pin(physical_pin('stppr_3'), Pin.OUT)
self.pin4 = Pin(physical_pin('stppr_4'), Pin.OUT)
# Initialize all value to 0 - "OFF"
self.reset()
def step(self, count, direction=1):
"""Rotate count steps. direction = -1 means backwards"""
if count < 0:
direction = -1
count = -count
for x in range(count):
for bit in self.mode[::direction]:
self.pin1(bit[0])
self.pin2(bit[1])
self.pin3(bit[2])
self.pin4(bit[3])
sleep_ms(self.delay)
self.reset()
def angle(self, r, direction=1):
if r < 0:
r *= -1
direction = -1
self.step(round(self.FULL_ROTATION * r / 360), direction)
def reset(self):
# Reset to 0, no holding, these are geared, you can't move them
self.pin1(0)
self.pin2(0)
self.pin3(0)
self.pin4(0)
@property
def speed_ms(self):
return self.delay
@speed_ms.setter
def speed_ms(self, ms):
# HALF STEP - delay check
if len(self.mode) > 4 and ms < 1:
ms = 1
# FULL STEP - delay check
elif ms < 10:
ms = 10
self.delay = ms
def __init_stepper(mode='HALF'):
global STEPPER_INST
if STEPPER_INST is None:
STEPPER_INST = StepperULN2003(mode)
return STEPPER_INST
def load_n_init(mode="HALF"):
__init_stepper(mode=mode)
def angle(dg, speed=None):
"""
:param dg: +/- 0-360 degree
:param speed: wait ms
:return: Info
"""
i = __init_stepper()
if speed:
i.speed_ms = speed
i.angle(dg)
return "Move {} degree ({} ms)".format(dg, i.speed_ms)
def step(st, speed=None):
i = __init_stepper()
if speed:
i.speed_ms = speed
i.step(st)
return "Move {} step ({} ms)".format(st, i.speed_ms)
def standby():
__init_stepper().reset()
return "Standby"
#######################
# LM helper functions #
#######################
def pinmap():
# Return module used PIN mapping
return {'stppr_1': physical_pin('stppr_1'), 'stppr_2': physical_pin('stppr_2'),
'stppr_3': physical_pin('stppr_3'), 'stppr_4': physical_pin('stppr_4')}
def help():
return 'angle dg=+/-360 speed=<ms>',\
'step st=+/-2 speed=<ms>',\
'standby',\
'load_n_init mode=<"HALF"/"FULL">', 'pinmap'\
'Info: stepper: 28byj-48 driver: ULN2003'
| [
"utime.sleep_ms",
"LogicalPins.physical_pin"
] | [((3304, 3327), 'LogicalPins.physical_pin', 'physical_pin', (['"""stppr_1"""'], {}), "('stppr_1')\n", (3316, 3327), False, 'from LogicalPins import physical_pin\n'), ((3340, 3363), 'LogicalPins.physical_pin', 'physical_pin', (['"""stppr_2"""'], {}), "('stppr_2')\n", (3352, 3363), False, 'from LogicalPins import physical_pin\n'), ((3388, 3411), 'LogicalPins.physical_pin', 'physical_pin', (['"""stppr_3"""'], {}), "('stppr_3')\n", (3400, 3411), False, 'from LogicalPins import physical_pin\n'), ((3424, 3447), 'LogicalPins.physical_pin', 'physical_pin', (['"""stppr_4"""'], {}), "('stppr_4')\n", (3436, 3447), False, 'from LogicalPins import physical_pin\n'), ((1058, 1081), 'LogicalPins.physical_pin', 'physical_pin', (['"""stppr_1"""'], {}), "('stppr_1')\n", (1070, 1081), False, 'from LogicalPins import physical_pin\n'), ((1116, 1139), 'LogicalPins.physical_pin', 'physical_pin', (['"""stppr_2"""'], {}), "('stppr_2')\n", (1128, 1139), False, 'from LogicalPins import physical_pin\n'), ((1174, 1197), 'LogicalPins.physical_pin', 'physical_pin', (['"""stppr_3"""'], {}), "('stppr_3')\n", (1186, 1197), False, 'from LogicalPins import physical_pin\n'), ((1232, 1255), 'LogicalPins.physical_pin', 'physical_pin', (['"""stppr_4"""'], {}), "('stppr_4')\n", (1244, 1255), False, 'from LogicalPins import physical_pin\n'), ((1743, 1763), 'utime.sleep_ms', 'sleep_ms', (['self.delay'], {}), '(self.delay)\n', (1751, 1763), False, 'from utime import sleep_ms\n')] |
"""
coding:utf-8
file: setting_window.py
@author: jiangwei
@contact: <EMAIL>
@time: 2020/6/27 23:07
@desc:
"""
import sys
from ui.setting_window import Ui_Form
from PyQt5.QtWidgets import QWidget, QApplication
from util.common_util import SYS_STYLE
class SettingWindow(Ui_Form, QWidget):
def __init__(self):
super(SettingWindow, self).__init__()
self.setupUi(self)
self.setStyleSheet(SYS_STYLE)
self.init_ui()
self.init_slot()
self.init_data()
def init_data(self):
pass
def init_ui(self):
self.pushButton.setProperty('class', 'Aqua')
self.pushButton.setMinimumWidth(70)
self.setStyleSheet(SYS_STYLE)
def init_slot(self):
self.pushButton.clicked.connect(self.save_setting)
def save_setting(self):
pass
if __name__ == '__main__':
app = QApplication(sys.argv)
win = SettingWindow()
win.show()
sys.exit(app.exec_())
| [
"PyQt5.QtWidgets.QApplication"
] | [((863, 885), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (875, 885), False, 'from PyQt5.QtWidgets import QWidget, QApplication\n')] |
import os
from testtools import TestCase
from testtools.matchers import Contains
from . import makeprefs
from mock import Mock, patch
from StringIO import StringIO
from twisted.internet import defer
class CommandTest(TestCase):
def setUp(self):
super(CommandTest, self).setUp()
self.prefs = makeprefs()
self.home = os.path.join('t', 'data', 'home')
def tearDown(self):
super(CommandTest, self).tearDown()
def _makeit(self, *args, **kwargs):
from lacli.main import LaCommand
return LaCommand(*args, **kwargs)
def test_command(self):
assert self._makeit(Mock(), makeprefs(Mock()))
@patch('sys.stdin', new_callable=StringIO)
def test_loop_none(self, mock_stdin):
factory = Mock()
factory.return_value.async_account = defer.succeed(
{'email': 'foo'})
cli = self._makeit(Mock(), makeprefs(factory))
cli.cmdloop()
@patch('sys.stdout', new_callable=StringIO)
def test_dispatch(self, stdout):
cli = self._makeit(Mock(), makeprefs(Mock()))
cli.dispatch('foo', [])
self.assertThat(stdout.getvalue(),
Contains('Unrecognized command: foo'))
@patch('sys.stdout', new_callable=StringIO)
def test_dispatch_foo(self, stdout):
cli = self._makeit(Mock(), makeprefs(Mock()))
with patch.object(cli, 'foo', create=True) as foo:
foo.__doc__ = "Usage: lacli foo"
foo.makecmd.return_value = 'bar'
cli.dispatch('foo', [])
self.assertEqual('', stdout.getvalue())
foo.onecmd.assert_called_with('bar')
@patch('sys.stdout', new_callable=StringIO)
def test_dispatch_login(self, stdout):
factory = Mock()
factory.return_value.async_account = defer.succeed(
{'email': 'foo'})
cli = self._makeit(Mock(), makeprefs(factory))
cli.dispatch('login', [])
self.assertThat(stdout.getvalue(),
Contains('authentication succesfull'))
@patch('sys.stdout', new_callable=StringIO)
def test_do_login(self, stdout):
factory = Mock()
factory.return_value.async_account = defer.succeed(
{'email': 'foo'})
cli = self._makeit(Mock(), makeprefs(factory))
cli.onecmd('login')
self.assertThat(stdout.getvalue(),
Contains('authentication succesfull'))
@patch('sys.stdout', new_callable=StringIO)
def test_do_login_with_creds(self, stdout):
factory = Mock()
factory.return_value.async_account = defer.succeed(
{'email': 'foo'})
cli = self._makeit(Mock(), makeprefs(factory))
cli.onecmd('login username password')
self.assertThat(stdout.getvalue(),
Contains('authentication succesfull'))
@patch('sys.stdout', new_callable=StringIO)
def test_do_login_with_bad_creds(self, stdout):
factory = Mock()
factory.return_value.async_account = defer.fail(
Exception())
cli = self._makeit(Mock(), makeprefs(factory))
cli.onecmd('login username password')
self.assertThat(stdout.getvalue(),
Contains('authentication failed'))
| [
"os.path.join",
"mock.patch.object",
"testtools.matchers.Contains",
"twisted.internet.defer.succeed",
"mock.patch",
"mock.Mock",
"lacli.main.LaCommand"
] | [((663, 704), 'mock.patch', 'patch', (['"""sys.stdin"""'], {'new_callable': 'StringIO'}), "('sys.stdin', new_callable=StringIO)\n", (668, 704), False, 'from mock import Mock, patch\n'), ((945, 987), 'mock.patch', 'patch', (['"""sys.stdout"""'], {'new_callable': 'StringIO'}), "('sys.stdout', new_callable=StringIO)\n", (950, 987), False, 'from mock import Mock, patch\n'), ((1223, 1265), 'mock.patch', 'patch', (['"""sys.stdout"""'], {'new_callable': 'StringIO'}), "('sys.stdout', new_callable=StringIO)\n", (1228, 1265), False, 'from mock import Mock, patch\n'), ((1653, 1695), 'mock.patch', 'patch', (['"""sys.stdout"""'], {'new_callable': 'StringIO'}), "('sys.stdout', new_callable=StringIO)\n", (1658, 1695), False, 'from mock import Mock, patch\n'), ((2056, 2098), 'mock.patch', 'patch', (['"""sys.stdout"""'], {'new_callable': 'StringIO'}), "('sys.stdout', new_callable=StringIO)\n", (2061, 2098), False, 'from mock import Mock, patch\n'), ((2446, 2488), 'mock.patch', 'patch', (['"""sys.stdout"""'], {'new_callable': 'StringIO'}), "('sys.stdout', new_callable=StringIO)\n", (2451, 2488), False, 'from mock import Mock, patch\n'), ((2865, 2907), 'mock.patch', 'patch', (['"""sys.stdout"""'], {'new_callable': 'StringIO'}), "('sys.stdout', new_callable=StringIO)\n", (2870, 2907), False, 'from mock import Mock, patch\n'), ((346, 379), 'os.path.join', 'os.path.join', (['"""t"""', '"""data"""', '"""home"""'], {}), "('t', 'data', 'home')\n", (358, 379), False, 'import os\n'), ((546, 572), 'lacli.main.LaCommand', 'LaCommand', (['*args'], {}), '(*args, **kwargs)\n', (555, 572), False, 'from lacli.main import LaCommand\n'), ((765, 771), 'mock.Mock', 'Mock', ([], {}), '()\n', (769, 771), False, 'from mock import Mock, patch\n'), ((817, 848), 'twisted.internet.defer.succeed', 'defer.succeed', (["{'email': 'foo'}"], {}), "({'email': 'foo'})\n", (830, 848), False, 'from twisted.internet import defer\n'), ((1757, 1763), 'mock.Mock', 'Mock', ([], {}), '()\n', (1761, 1763), False, 'from mock import Mock, patch\n'), ((1809, 1840), 'twisted.internet.defer.succeed', 'defer.succeed', (["{'email': 'foo'}"], {}), "({'email': 'foo'})\n", (1822, 1840), False, 'from twisted.internet import defer\n'), ((2154, 2160), 'mock.Mock', 'Mock', ([], {}), '()\n', (2158, 2160), False, 'from mock import Mock, patch\n'), ((2206, 2237), 'twisted.internet.defer.succeed', 'defer.succeed', (["{'email': 'foo'}"], {}), "({'email': 'foo'})\n", (2219, 2237), False, 'from twisted.internet import defer\n'), ((2555, 2561), 'mock.Mock', 'Mock', ([], {}), '()\n', (2559, 2561), False, 'from mock import Mock, patch\n'), ((2607, 2638), 'twisted.internet.defer.succeed', 'defer.succeed', (["{'email': 'foo'}"], {}), "({'email': 'foo'})\n", (2620, 2638), False, 'from twisted.internet import defer\n'), ((2978, 2984), 'mock.Mock', 'Mock', ([], {}), '()\n', (2982, 2984), False, 'from mock import Mock, patch\n'), ((630, 636), 'mock.Mock', 'Mock', ([], {}), '()\n', (634, 636), False, 'from mock import Mock, patch\n'), ((889, 895), 'mock.Mock', 'Mock', ([], {}), '()\n', (893, 895), False, 'from mock import Mock, patch\n'), ((1052, 1058), 'mock.Mock', 'Mock', ([], {}), '()\n', (1056, 1058), False, 'from mock import Mock, patch\n'), ((1178, 1215), 'testtools.matchers.Contains', 'Contains', (['"""Unrecognized command: foo"""'], {}), "('Unrecognized command: foo')\n", (1186, 1215), False, 'from testtools.matchers import Contains\n'), ((1334, 1340), 'mock.Mock', 'Mock', ([], {}), '()\n', (1338, 1340), False, 'from mock import Mock, patch\n'), ((1374, 1411), 'mock.patch.object', 'patch.object', (['cli', '"""foo"""'], {'create': '(True)'}), "(cli, 'foo', create=True)\n", (1386, 1411), False, 'from mock import Mock, patch\n'), ((1881, 1887), 'mock.Mock', 'Mock', ([], {}), '()\n', (1885, 1887), False, 'from mock import Mock, patch\n'), ((2011, 2048), 'testtools.matchers.Contains', 'Contains', (['"""authentication succesfull"""'], {}), "('authentication succesfull')\n", (2019, 2048), False, 'from testtools.matchers import Contains\n'), ((2278, 2284), 'mock.Mock', 'Mock', ([], {}), '()\n', (2282, 2284), False, 'from mock import Mock, patch\n'), ((2401, 2438), 'testtools.matchers.Contains', 'Contains', (['"""authentication succesfull"""'], {}), "('authentication succesfull')\n", (2409, 2438), False, 'from testtools.matchers import Contains\n'), ((2679, 2685), 'mock.Mock', 'Mock', ([], {}), '()\n', (2683, 2685), False, 'from mock import Mock, patch\n'), ((2820, 2857), 'testtools.matchers.Contains', 'Contains', (['"""authentication succesfull"""'], {}), "('authentication succesfull')\n", (2828, 2857), False, 'from testtools.matchers import Contains\n'), ((3095, 3101), 'mock.Mock', 'Mock', ([], {}), '()\n', (3099, 3101), False, 'from mock import Mock, patch\n'), ((3236, 3269), 'testtools.matchers.Contains', 'Contains', (['"""authentication failed"""'], {}), "('authentication failed')\n", (3244, 3269), False, 'from testtools.matchers import Contains\n'), ((648, 654), 'mock.Mock', 'Mock', ([], {}), '()\n', (652, 654), False, 'from mock import Mock, patch\n'), ((1070, 1076), 'mock.Mock', 'Mock', ([], {}), '()\n', (1074, 1076), False, 'from mock import Mock, patch\n'), ((1352, 1358), 'mock.Mock', 'Mock', ([], {}), '()\n', (1356, 1358), False, 'from mock import Mock, patch\n')] |
import pytest
from hyper_prompt.theme import BasicTheme
@pytest.mark.parametrize(
"test, result",
[
("RESET", BasicTheme.RESET),
("TEST_FG", BasicTheme.FG),
("TEST_BG", BasicTheme.BG)
]
)
def test_get_key(test, result):
assert BasicTheme({}).get(test) == result
| [
"pytest.mark.parametrize",
"hyper_prompt.theme.BasicTheme"
] | [((59, 190), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""test, result"""', "[('RESET', BasicTheme.RESET), ('TEST_FG', BasicTheme.FG), ('TEST_BG',\n BasicTheme.BG)]"], {}), "('test, result', [('RESET', BasicTheme.RESET), (\n 'TEST_FG', BasicTheme.FG), ('TEST_BG', BasicTheme.BG)])\n", (82, 190), False, 'import pytest\n'), ((269, 283), 'hyper_prompt.theme.BasicTheme', 'BasicTheme', (['{}'], {}), '({})\n', (279, 283), False, 'from hyper_prompt.theme import BasicTheme\n')] |
#setup cython code
from setuptools import setup, find_packages
from Cython.Build import cythonize
import numpy
import os
import codecs
def read(rel_path):
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, rel_path), 'r') as fp:
return fp.read()
def get_version(rel_path):
for line in read(rel_path).splitlines():
if line.startswith('__version__'):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
else:
raise RuntimeError("Unable to find version string.")
setup(
name="LoopStructural",
install_requires=[
# 'Cython',
# 'numpy',
# 'pandas',
# 'scipy',
# 'matplotlib',
# # 'lavavu',
# 'scikit-image',
# 'scikit-learn'
],
version=get_version("LoopStructural/__init__.py"),
packages=find_packages(),
ext_modules=cythonize("LoopStructural/interpolators/cython/*.pyx",compiler_directives={"language_level": "3"}),
include_dirs=[numpy.get_include()],
include_package_data=True,
package_data={'LoopStructural':['datasets/data/*.csv','datasets/data/*.txt']},
)
| [
"os.path.join",
"os.path.dirname",
"setuptools.find_packages",
"numpy.get_include",
"Cython.Build.cythonize"
] | [((182, 207), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (197, 207), False, 'import os\n'), ((812, 827), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (825, 827), False, 'from setuptools import setup, find_packages\n'), ((842, 946), 'Cython.Build.cythonize', 'cythonize', (['"""LoopStructural/interpolators/cython/*.pyx"""'], {'compiler_directives': "{'language_level': '3'}"}), "('LoopStructural/interpolators/cython/*.pyx', compiler_directives=\n {'language_level': '3'})\n", (851, 946), False, 'from Cython.Build import cythonize\n'), ((230, 258), 'os.path.join', 'os.path.join', (['here', 'rel_path'], {}), '(here, rel_path)\n', (242, 258), False, 'import os\n'), ((957, 976), 'numpy.get_include', 'numpy.get_include', ([], {}), '()\n', (974, 976), False, 'import numpy\n')] |
import torch
import torch.nn as nn
class NCSNSampler(nn.Module):
def __init__(self, score, sigmas, alphas, n_steps_each: int):
super().__init__()
self.score = score
self.sigmas = sigmas
self.alphas = alphas
self.n_steps_each = n_steps_each
def forward(self, x_T: torch.Tensor):
for i, sigma_i in enumerate(self.sigmas):
labels = (i * torch.ones_like(x_T[0])).long()
for t in range(self.n_steps_each):
x_T = x_T + self.alphas[i] * self.score(x_T, labels) / 2 + self.alphas[i] * torch.randn_like(x_T)
return x_T
| [
"torch.randn_like",
"torch.ones_like"
] | [((405, 428), 'torch.ones_like', 'torch.ones_like', (['x_T[0]'], {}), '(x_T[0])\n', (420, 428), False, 'import torch\n'), ((576, 597), 'torch.randn_like', 'torch.randn_like', (['x_T'], {}), '(x_T)\n', (592, 597), False, 'import torch\n')] |
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime
def calculate_profit_from_columns(row):
profit = 0
if row['Buy']:
profit = row['Difference']
elif row['Sell']:
profit = row['Difference'] * -1
return profit
def convert_date(row):
return datetime.strptime(row['Datetime'], '%Y-%m-%d %H:%M:%S')
def get_day_of_week(row):
date = row['Datetime']
return date.weekday()
def get_hour(row):
date = row['Datetime']
return date.hour
def get_minute(row):
date = row['Datetime']
return date.minute
def filter_dataframe_by_days(dataframe, days):
return dataframe.loc[dataframe['Weekday'].isin(days)]
def filter_by_hours(dataframe, hours):
return dataframe.loc[dataframe['Hour'].isin(hours)]
def filter_by_minutes(dataframe, minutes):
return dataframe.loc[dataframe['Minute'].isin(minutes)]
def filter_by_buy_or_sell(dataframe, action):
return dataframe.loc[dataframe[action] == 1]
def get_header_row(dataframe):
return ','.join(list(dataframe.columns))
def append_moving_wealth_row(dataframe, start_wealth, bid_ask_spread=0, leverage=1, wealth_column_name='Wealth'):
if bid_ask_spread and wealth_column_name == 'Wealth':
wealth_column_name='Adjusted Wealth'
df = dataframe.reset_index()
wealth_array = [start_wealth]
for i in range(1, len(df)):
if not df.loc[i, 'Buy'] and not df.loc[i, 'Sell']:
wealth_array.append(wealth_array[-1])
else:
wealth_array.append(wealth_array[-1] * (1 + leverage * (df.loc[i, 'Profit'] - bid_ask_spread)))
dataframe[wealth_column_name] = np.array(wealth_array)
def plot_dataframe(dataframe, title, columns, colours=['blue', 'red', 'green']):
fig, ax = plt.subplots()
for i in range(len(columns)):
ax = dataframe.plot(ax=ax, x='Datetime', y=columns[i], c=colours[i], title=title)
def print_analysis(dataframe, filter_name):
num_trades = dataframe['Buy'].sum() + dataframe['Sell'].sum()
total_profit = dataframe.sum()['Profit']
avg_profit = dataframe.mean()['Profit']
max_profit = dataframe.max()['Profit']
max_loss = dataframe.min()['Profit']
print(f'\n\nSummary for data filtered by {filter_name}')
print('-------------------------------------------')
print(f'Total Trades Made: {num_trades}')
print(f'Total Profit Made: {total_profit}')
print(f'Average Profit Made Per Trade: {avg_profit}')
print(f'Largest Gain: {max_profit}')
print(f'Largest Loss: {max_loss}')
print('-------------------------------------------')
def add_columns_to_base_data(base_data, start_wealth=10000):
base_data['Profit'] = base_data.apply(lambda row: calculate_profit_from_columns(row), axis=1)
base_data['Datetime'] = base_data.apply(lambda row: convert_date(row), axis=1)
base_data['Weekday'] = base_data.apply(lambda row: get_day_of_week(row), axis=1)
base_data['Hour'] = base_data.apply(lambda row: get_hour(row), axis=1)
base_data['Minute'] = base_data.apply(lambda row: get_minute(row), axis=1) | [
"numpy.array",
"matplotlib.pyplot.subplots",
"datetime.datetime.strptime"
] | [((326, 381), 'datetime.datetime.strptime', 'datetime.strptime', (["row['Datetime']", '"""%Y-%m-%d %H:%M:%S"""'], {}), "(row['Datetime'], '%Y-%m-%d %H:%M:%S')\n", (343, 381), False, 'from datetime import datetime\n'), ((1665, 1687), 'numpy.array', 'np.array', (['wealth_array'], {}), '(wealth_array)\n', (1673, 1687), True, 'import numpy as np\n'), ((1784, 1798), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1796, 1798), True, 'import matplotlib.pyplot as plt\n')] |
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 5 16:07:58 2016
@author: Administrator
"""
import unittest
from app import create_app,db
from app.models import User,Role
from flask import url_for
import re
class FlaskClientTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
Role.insert_roles()
self.client = self.app.test_client(use_cookie=True)
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_home_page(self):
response = self.client.get(url_for('main.index'))
self.assertTrue('Stranger' in response.get_data(as_text=True))
def test_register_and_login(self):
response = self.client.post(url_for('auth.register'),data={'email':'<EMAIL>','username':'john','password':'<PASSWORD>','password2':'<PASSWORD>'})
self.assertTrue(response.status_code == 302)
response = self.client.post(url_for('auth.login'),data={'email':'<EMAIL>','password':'<PASSWORD>'},follow_redirects=True)
data = response.get_data(as_text=True)
self.assertTrue(re.search('Hello,\s+john!',data))
self.assertTrue('You have not confirmed your account yet' in data)
user = User.query.filter_by(email='<EMAIL>').first()
token = user.generate_confirmation_token()
response = self.client.get(url_for('auth.confirm',token=token),follow_redirects=True)
data = response.get_data(as_text=True)
self.assertTrue('Your have confirmed your account' in data)
response = self.client.get(url_for('auth.logout'),follow_redirects=True)
data = response.get_data(as_text=True)
self.assertTrue('You have been logged out' in data) | [
"flask.url_for",
"app.models.Role.insert_roles",
"app.create_app",
"app.db.create_all",
"app.models.User.query.filter_by",
"re.search",
"app.db.drop_all",
"app.db.session.remove"
] | [((310, 331), 'app.create_app', 'create_app', (['"""testing"""'], {}), "('testing')\n", (320, 331), False, 'from app import create_app, db\n'), ((425, 440), 'app.db.create_all', 'db.create_all', ([], {}), '()\n', (438, 440), False, 'from app import create_app, db\n'), ((450, 469), 'app.models.Role.insert_roles', 'Role.insert_roles', ([], {}), '()\n', (467, 469), False, 'from app.models import User, Role\n'), ((575, 594), 'app.db.session.remove', 'db.session.remove', ([], {}), '()\n', (592, 594), False, 'from app import create_app, db\n'), ((604, 617), 'app.db.drop_all', 'db.drop_all', ([], {}), '()\n', (615, 617), False, 'from app import create_app, db\n'), ((727, 748), 'flask.url_for', 'url_for', (['"""main.index"""'], {}), "('main.index')\n", (734, 748), False, 'from flask import url_for\n'), ((909, 933), 'flask.url_for', 'url_for', (['"""auth.register"""'], {}), "('auth.register')\n", (916, 933), False, 'from flask import url_for\n'), ((1128, 1149), 'flask.url_for', 'url_for', (['"""auth.login"""'], {}), "('auth.login')\n", (1135, 1149), False, 'from flask import url_for\n'), ((1295, 1329), 're.search', 're.search', (['"""Hello,\\\\s+john!"""', 'data'], {}), "('Hello,\\\\s+john!', data)\n", (1304, 1329), False, 'import re\n'), ((1565, 1601), 'flask.url_for', 'url_for', (['"""auth.confirm"""'], {'token': 'token'}), "('auth.confirm', token=token)\n", (1572, 1601), False, 'from flask import url_for\n'), ((1787, 1809), 'flask.url_for', 'url_for', (['"""auth.logout"""'], {}), "('auth.logout')\n", (1794, 1809), False, 'from flask import url_for\n'), ((1431, 1468), 'app.models.User.query.filter_by', 'User.query.filter_by', ([], {'email': '"""<EMAIL>"""'}), "(email='<EMAIL>')\n", (1451, 1468), False, 'from app.models import User, Role\n')] |
import cv2
import imagezmq
import simplejpeg
# Instantiate and provide the first publisher address
image_hub = imagezmq.ImageHub(open_port="tcp://192.168.12.29:5555", REQ_REP=False)
# image_hub.connect('tcp://192.168.86.38:5555') # second publisher address
while True: # show received images
# sender_name, image = image_hub.recv_image()
sender_name, jpg_buffer = image_hub.recv_jpg()
image = simplejpeg.decode_jpeg(jpg_buffer, colorspace="BGR")
cv2.imshow(sender_name, image)
key = cv2.waitKey(1) & 0xFF
if key in [113, 27]:
break
| [
"cv2.imshow",
"simplejpeg.decode_jpeg",
"cv2.waitKey",
"imagezmq.ImageHub"
] | [((112, 182), 'imagezmq.ImageHub', 'imagezmq.ImageHub', ([], {'open_port': '"""tcp://192.168.12.29:5555"""', 'REQ_REP': '(False)'}), "(open_port='tcp://192.168.12.29:5555', REQ_REP=False)\n", (129, 182), False, 'import imagezmq\n'), ((411, 463), 'simplejpeg.decode_jpeg', 'simplejpeg.decode_jpeg', (['jpg_buffer'], {'colorspace': '"""BGR"""'}), "(jpg_buffer, colorspace='BGR')\n", (433, 463), False, 'import simplejpeg\n'), ((468, 498), 'cv2.imshow', 'cv2.imshow', (['sender_name', 'image'], {}), '(sender_name, image)\n', (478, 498), False, 'import cv2\n'), ((509, 523), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (520, 523), False, 'import cv2\n')] |
from __future__ import division
import gzip
import matplotlib.pyplot as plt
import numpy as np
import random
import math
def Randomize_Weights(weight_vector: np.ndarray):
rand_weights = weight_vector
for j in range(0,len(weight_vector)):
rand_weights[j][::1] = float(random.randint(-100, 100) / 100)
return rand_weights
def Forward_Pass(data: np.ndarray, weights, biases): # a Single Forward Pass, returns a vector output after the softmax
output = [0] * 10
probs = np.zeros(10)
output[::1] = (np.inner(weights[::1], data) + biases[::1])
sum = 0
for index,x in enumerate(output):
exp = math.exp(x)
sum += exp
probs[index] = exp
probs[::1] = probs[::1] * (1/sum)
return probs
def Label_Probs(probabilities): # returns a label, expects normalized data
return np.unravel_index(np.argmax(probabilities, axis=None), probabilities.shape)
def Test_NN(weights, biases): # Tests network and prints accuracy
test_im = np.zeros((10000, 784))
test_lb = None
with open('./data/test_images.gz', 'rb') as f, gzip.GzipFile(fileobj=f) as bytestream:
tmp = bytestream.read()
tmp = np.frombuffer(tmp, dtype=np.dtype('b'), offset=16)
tmp = np.reshape(tmp, (10000, 784))
test_im[:, 0:783] = (tmp[0:10000, 0:783] / 128) // 1
test_im.astype(int)
with open('./data/test_labels.gz', 'rb') as f, gzip.GzipFile(fileobj=f) as bytestream:
tmp = bytestream.read()
test_lb = np.frombuffer(tmp, dtype=np.dtype('b'), offset=8)
num_correct = 0
num_ran = 0
for i in range(0, 10000):
output = Label_Probs(Forward_Pass(test_im[i], weights, biases))
if output == test_lb[i]:
num_correct += 1
num_ran += 1
print("TEST RESULTS\nNUMBER CORRECT: {0}\nACCURACY: {1}%".format(num_correct, round((num_correct/num_ran)*100, 5)))
def GraphCost(cost: list):
for x in range(len(cost)):
plt.scatter(x, cost[x], c="black")
plt.show()
def Train_NN(weights, biases, max_iter: int, epsilon: float, step_size:float, images, labels): # Train network with training data
cost = []
iteration = -1
while iteration < max_iter: # each loop is a training session
iteration += 1
cost.append(0)
num_wrong = 0
for i in range(1, 10000): # go through each image
output = Forward_Pass(images[i], weights, biases) # probabilities dict, each key is 0...9
expected_label = labels[i]
predicted_label = Label_Probs(output)
if expected_label != predicted_label:
num_wrong += 1
for j in range(9):
if expected_label == j: # we want to maximize this if true
weights[j, 0:784] = weights[j, 0:784] - step_size * (images[i, 0:784] * -1 * (1-output[j]))
biases[j] = biases[j] - step_size * (-1 * (1-output[j]))
else: # minimize this
weights[j, 0:784] = weights[j, 0:784] - step_size * (images[i, 0:784] * output[j])
biases[j] = biases[j] - step_size * (1 * output[j])
cost[iteration] = cost[iteration] - math.log(output[expected_label])
if cost[iteration - 1] < cost[iteration]:
step_size /= 2
if cost[iteration] - cost[iteration - 1] <= epsilon and cost[iteration] - cost[iteration - 1] >= 0:
break
print("iteration: {0}/{1} Cost:{2} Num Wrong:{3}".format(iteration, max_iter, cost[iteration], num_wrong))
GraphCost(cost)
return weights, biases
if __name__ == "__main__":
# Import data
train_im = np.zeros((10000, 784))
train_lb = None
with open('./data/training_images.gz', 'rb') as f, gzip.GzipFile(fileobj=f) as bytestream:
tmp = bytestream.read()
tmp = np.frombuffer(tmp, dtype=np.dtype('b'), offset=16)
tmp = np.reshape(tmp, (60000, 784))
train_im[:, 0:783] = (tmp[0:10000, 0:783] / 128) // 1
train_im.astype(int)
with open('./data/training_labels.gz', 'rb') as f, gzip.GzipFile(fileobj=f) as bytestream:
tmp = bytestream.read()
tmp = np.frombuffer(tmp, dtype=np.dtype('b'), offset=8)
train_lb = tmp[0:10000]
weights = Randomize_Weights(np.zeros((10, 784)))
biases = np.zeros(10)
print("BEFORE TRAINING")
Test_NN(weights, biases)
print("TRAINING")
weights, biases = Train_NN(weights, biases, 20, .1, .2, train_im, train_lb)
print("AFTER TRAINING")
Test_NN(weights, biases)
for index,x in enumerate(weights):
print(index)
image = np.reshape(x, (28,28))
plt.imshow(image)
plt.show()
| [
"numpy.reshape",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.imshow",
"math.log",
"numpy.dtype",
"matplotlib.pyplot.show",
"numpy.inner",
"numpy.zeros",
"gzip.GzipFile",
"math.exp",
"numpy.argmax",
"random.randint"
] | [((500, 512), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (508, 512), True, 'import numpy as np\n'), ((998, 1020), 'numpy.zeros', 'np.zeros', (['(10000, 784)'], {}), '((10000, 784))\n', (1006, 1020), True, 'import numpy as np\n'), ((2000, 2010), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2008, 2010), True, 'import matplotlib.pyplot as plt\n'), ((3671, 3693), 'numpy.zeros', 'np.zeros', (['(10000, 784)'], {}), '((10000, 784))\n', (3679, 3693), True, 'import numpy as np\n'), ((4334, 4346), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (4342, 4346), True, 'import numpy as np\n'), ((532, 560), 'numpy.inner', 'np.inner', (['weights[::1]', 'data'], {}), '(weights[::1], data)\n', (540, 560), True, 'import numpy as np\n'), ((640, 651), 'math.exp', 'math.exp', (['x'], {}), '(x)\n', (648, 651), False, 'import math\n'), ((858, 893), 'numpy.argmax', 'np.argmax', (['probabilities'], {'axis': 'None'}), '(probabilities, axis=None)\n', (867, 893), True, 'import numpy as np\n'), ((1091, 1115), 'gzip.GzipFile', 'gzip.GzipFile', ([], {'fileobj': 'f'}), '(fileobj=f)\n', (1104, 1115), False, 'import gzip\n'), ((1242, 1271), 'numpy.reshape', 'np.reshape', (['tmp', '(10000, 784)'], {}), '(tmp, (10000, 784))\n', (1252, 1271), True, 'import numpy as np\n'), ((1412, 1436), 'gzip.GzipFile', 'gzip.GzipFile', ([], {'fileobj': 'f'}), '(fileobj=f)\n', (1425, 1436), False, 'import gzip\n'), ((1961, 1995), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x', 'cost[x]'], {'c': '"""black"""'}), "(x, cost[x], c='black')\n", (1972, 1995), True, 'import matplotlib.pyplot as plt\n'), ((3769, 3793), 'gzip.GzipFile', 'gzip.GzipFile', ([], {'fileobj': 'f'}), '(fileobj=f)\n', (3782, 3793), False, 'import gzip\n'), ((3920, 3949), 'numpy.reshape', 'np.reshape', (['tmp', '(60000, 784)'], {}), '(tmp, (60000, 784))\n', (3930, 3949), True, 'import numpy as np\n'), ((4096, 4120), 'gzip.GzipFile', 'gzip.GzipFile', ([], {'fileobj': 'f'}), '(fileobj=f)\n', (4109, 4120), False, 'import gzip\n'), ((4300, 4319), 'numpy.zeros', 'np.zeros', (['(10, 784)'], {}), '((10, 784))\n', (4308, 4319), True, 'import numpy as np\n'), ((4641, 4664), 'numpy.reshape', 'np.reshape', (['x', '(28, 28)'], {}), '(x, (28, 28))\n', (4651, 4664), True, 'import numpy as np\n'), ((4672, 4689), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image'], {}), '(image)\n', (4682, 4689), True, 'import matplotlib.pyplot as plt\n'), ((4698, 4708), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4706, 4708), True, 'import matplotlib.pyplot as plt\n'), ((288, 313), 'random.randint', 'random.randint', (['(-100)', '(100)'], {}), '(-100, 100)\n', (302, 313), False, 'import random\n'), ((1202, 1215), 'numpy.dtype', 'np.dtype', (['"""b"""'], {}), "('b')\n", (1210, 1215), True, 'import numpy as np\n'), ((1527, 1540), 'numpy.dtype', 'np.dtype', (['"""b"""'], {}), "('b')\n", (1535, 1540), True, 'import numpy as np\n'), ((3201, 3233), 'math.log', 'math.log', (['output[expected_label]'], {}), '(output[expected_label])\n', (3209, 3233), False, 'import math\n'), ((3880, 3893), 'numpy.dtype', 'np.dtype', (['"""b"""'], {}), "('b')\n", (3888, 3893), True, 'import numpy as np\n'), ((4207, 4220), 'numpy.dtype', 'np.dtype', (['"""b"""'], {}), "('b')\n", (4215, 4220), True, 'import numpy as np\n')] |
# External Import
from django.contrib import admin
# Internal Import
from .models import Post
admin.site.register(Post)
| [
"django.contrib.admin.site.register"
] | [((96, 121), 'django.contrib.admin.site.register', 'admin.site.register', (['Post'], {}), '(Post)\n', (115, 121), False, 'from django.contrib import admin\n')] |
import json
twinkle_twinkle = ['c4','c4','g4','g4','a4','a4','g4',\
'f4','f4','e4','e4','d4','d4','c4',\
'g5','g5','f4','f4','e4','e4','d4',\
'g5','g5','f4','f4','e4','e4','d4',\
'c4','c4','g4','g4','a4','a4','g4',\
'f4','f4','e4','e4','d4','d4','c4',\
]
happy_birthday = ["g4", "g4", "a4", "g4", "c5", "b4",\
"g4", "g4", "a4", "g4", "d5", "c5",\
"g4", "g4", "g5", "e5", "c5", "b4", "a4",\
"f5", "f5", "e5", "c5", "d5", "c5"]
jan_gan_man = ['c5', 'd5', 'e5', 'e5', 'e5', 'e5', 'e5',\
'e5', 'e5', 'e5', 'e5', 'd5', 'e5', 'f5',\
'e5', 'e5', 'e5', 'd5', 'd5', 'd5', 'b4',\
'd5', 'c5', 'c5', 'g5', 'g5', 'g5', 'g5',\
'g5', 'f-5', 'g5', 'g5', 'g5', 'f-5', 'a5',\
'g5', 'f5', 'f5', 'f5', 'e5', 'e5', 'f5',\
'd5', 'f5', 'e5', 'e5', 'e5', 'e5', 'e5',\
'd5', 'g5', 'g5', 'g5', 'f5', 'f5', 'e5',\
'e5', 'e5', 'd5', 'd5', 'd5', 'd5', 'b4',\
'd5', 'c5', 'c5', 'd5', 'e5', 'e5', 'e5',\
'e5', 'd5', 'e5', 'f5', 'e5', 'f5', 'g5',\
'g5', 'g5', 'f5', 'e5', 'd5', 'f5', 'e5',\
'e5', 'e5', 'd5', 'd5', 'd5', 'd5', 'b4',\
'd5', 'c5', 'g5', 'g5', 'g5', 'g5', 'g5',\
'g5', 'f-5', 'g5', 'g5', 'g5', 'f-5', 'a5',\
'g5', 'f5', 'f5', 'f5', 'e5', 'e5', 'f5',\
'df', 'e5', 'c5', 'b4', 'c5', 'b4', 'a5',\
'b4', 'a5', 'g5', 'a5', 'c5', 'c5', 'd5',\
'd5', 'e5', 'e5', 'd5', 'e5', 'f5']
o_mere_dil_ke_chain = ['a4', 'g4', 'a4', 'a4', 'g4', 'a4',\
'g4', 'e4', 'b4', 'g4', 'a4', 'a4',\
'g4', 'a4', 'g4', 'e4', 'g4', 'e4',\
'd4', 'd4', 'e4', 'e4', 'g4', 'g4',\
'a4', 'a4', 'b4', 'b4', 'g4', 'a4',\
'b4', 'b4', 'g4', 'a4', 'c5', 'b4',\
'a4', 'c5', 'b4', 'a4', 'c5', 'b4', 'a4']
naruto_theme = ['a4', 'b4', 'a4', 'g4', 'e4', 'g4', 'a4', 'd4',\
'c4', 'd4', 'c4', 'a3', 'b3', 'a4', 'b4', 'a4',\
'g4', 'e4', 'g4', 'a4', 'd4', 'c4', 'd4', 'c4',\
'a3', 'a3', 'e4', 'd4', 'c4', 'a3', 'e4', 'd4',\
'e4', 'a4', 'c5', 'b4', 'a4', 'g4', 'a4', 'e4',\
'd4', 'e4', 'd4', 'b3', 'a3', 'a3', 'e4', 'd4',\
'c4', 'a3', 'e4', 'd4', 'e4', 'a4', 'c5', 'b4',\
'a4', 'g4', 'a4', 'e4', 'g4', 'a4', 'a4', 'b4',\
'a4', 'g4', 'e4', 'g4', 'a4', 'd4', 'c4', 'd4',\
'c4', 'a3', 'b3', 'g3', 'a4', 'b4', 'a4', 'g4',\
'e4', 'g4', 'a4', 'd4', 'c4', 'd4', 'c4', 'a3',\
'a3', 'e4', 'd4', 'c4', 'a3', 'e4', 'd4', 'e4',\
'a4', 'c5', 'b4', 'a4', 'g4', 'a4', 'e4', 'd4',\
'e4', 'd4', 'b3', 'a3', 'a3', 'e4', 'd4', 'c4',\
'a3', 'e4', 'd4', 'e4', 'a4', 'c5', 'b4', 'a4',\
'g4', 'a4', 'e4', 'g4', 'a4', 'a4', 'b4', 'a4',\
'g4', 'e4', 'g4', 'a4', 'd4', 'c4', 'd4', 'c4',\
'a3', 'b3', 'g3', 'a4', 'b4', 'a4', 'g4', 'e4',\
'g4', 'a4', 'd4', 'c4', 'd4', 'c4', 'a3']
notes = {
'1' : twinkle_twinkle,
'2' : happy_birthday,
'3' : jan_gan_man,
'4' : o_mere_dil_ke_chain,
'5' : naruto_theme
}
with open('notes.json', 'w') as file:
json.dump(notes, file)
| [
"json.dump"
] | [((3553, 3575), 'json.dump', 'json.dump', (['notes', 'file'], {}), '(notes, file)\n', (3562, 3575), False, 'import json\n')] |
# Generated by Django 2.1.1 on 2018-11-01 00:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('GestiRED', '0020_phase_observation'),
]
operations = [
migrations.RemoveField(
model_name='phase',
name='observation',
),
migrations.AlterField(
model_name='qualitycontrol',
name='observation',
field=models.CharField(max_length=200),
),
]
| [
"django.db.models.CharField",
"django.db.migrations.RemoveField"
] | [((235, 297), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""phase"""', 'name': '"""observation"""'}), "(model_name='phase', name='observation')\n", (257, 297), False, 'from django.db import migrations, models\n'), ((456, 488), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (472, 488), False, 'from django.db import migrations, models\n')] |
from PyQt5.QtWidgets import QMainWindow, QStatusBar
from PyQt5.QtCore import QObject, QEvent
from PyQt5 import QtCore
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
mousePressed = QtCore.pyqtSignal(str)
def eventFilter(self, obj: 'QObject', event: 'QEvent') -> bool:
if event.type() == QEvent.MouseButtonPress and obj.isWidgetType():
self.mousePressed.emit(obj.objectName())
return False
return super(MainWindow, self).eventFilter(obj, event)
| [
"PyQt5.QtCore.pyqtSignal",
"PyQt5.QtWidgets.QMainWindow.__init__"
] | [((239, 261), 'PyQt5.QtCore.pyqtSignal', 'QtCore.pyqtSignal', (['str'], {}), '(str)\n', (256, 261), False, 'from PyQt5 import QtCore\n'), ((190, 216), 'PyQt5.QtWidgets.QMainWindow.__init__', 'QMainWindow.__init__', (['self'], {}), '(self)\n', (210, 216), False, 'from PyQt5.QtWidgets import QMainWindow, QStatusBar\n')] |
# Generated by Django 2.0.9 on 2018-12-05 11:07
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('users', '0006_auto_20181202_1125'),
('student', '0005_auto_20181203_1208'),
('bge', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CommunicationLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('updated_at', models.DateTimeField(auto_now=True, null=True)),
('date', models.DateField(blank=True, null=True)),
('category', models.CharField(blank=True, max_length=80, null=True)),
('priority', models.IntegerField(blank=True, null=True)),
('comment', models.TextField(blank=True, null=True)),
('file', models.FileField(blank=True, null=True, upload_to='file')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='HostFamily',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('updated_at', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(blank=True, max_length=80, null=True)),
('status', models.CharField(blank=True, choices=[('active', 'Active'), ('inactive', 'Inactive')], max_length=80, null=True)),
('address', models.CharField(blank=True, max_length=140, null=True)),
('phone', models.CharField(blank=True, max_length=80, null=True)),
('email', models.CharField(blank=True, max_length=140, null=True)),
('possible_school', models.TextField(blank=True, null=True)),
('occupation', models.CharField(blank=True, max_length=140, null=True)),
('employer', models.CharField(blank=True, max_length=80, null=True)),
('marital_status', models.CharField(blank=True, max_length=80, null=True)),
('children', models.CharField(blank=True, max_length=80, null=True)),
('pets', models.CharField(blank=True, max_length=80, null=True)),
('next_year_plan', models.CharField(blank=True, choices=[('same_student', 'Same-Student'), ('change_student', 'Change-student'), ('na', 'N/A'), ('a', 'A')], max_length=140, null=True)),
('student_preference', models.CharField(blank=True, choices=[('male', 'Male'), ('female', 'Female')], max_length=80, null=True)),
('hosting_capacity', models.CharField(blank=True, max_length=80, null=True)),
('starting_date', models.DateField(blank=True, null=True)),
('last_update_date', models.DateField(blank=True, null=True)),
('comment', models.TextField(blank=True, null=True)),
('provider', models.CharField(blank=True, max_length=80, null=True)),
('host_coordi', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='users.BgeBranchAdminUser')),
('provider_branch', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='bge.BgeBranch')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='HostFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('updated_at', models.DateTimeField(auto_now=True, null=True)),
('file', models.FileField(blank=True, null=True, upload_to='files')),
('host', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='branch.HostFamily')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='HostPhoto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('updated_at', models.DateTimeField(auto_now=True, null=True)),
('photo', models.ImageField(blank=True, null=True, upload_to='images')),
('host', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='branch.HostFamily')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='HostStudent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('updated_at', models.DateTimeField(auto_now=True, null=True)),
('host', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='branch.HostFamily')),
('student', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='student.Student')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='HostStudentReport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('updated_at', models.DateTimeField(auto_now=True, null=True)),
('description', models.TextField(blank=True, null=True)),
('rate', models.CharField(blank=True, max_length=80, null=True)),
('improvement', models.NullBooleanField()),
('cultural_fluency', models.TextField(blank=True, null=True)),
('house_rule_attitude', models.TextField(blank=True, null=True)),
('responsibility', models.TextField(blank=True, null=True)),
('communication', models.TextField(blank=True, null=True)),
('sleeping_habits', models.TextField(blank=True, null=True)),
('school_attendance', models.TextField(blank=True, null=True)),
('comment', models.TextField(blank=True, null=True)),
('host', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='branch.HostFamily')),
('student', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='student.Student')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ReportFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('updated_at', models.DateTimeField(auto_now=True, null=True)),
('file', models.FileField(blank=True, null=True, upload_to='file')),
('report', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='branch.HostStudentReport')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ReportPhoto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('updated_at', models.DateTimeField(auto_now=True, null=True)),
('photo', models.ImageField(blank=True, null=True, upload_to='image')),
('report', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='branch.HostStudentReport')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='communicationlog',
name='host',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='branch.HostFamily'),
),
migrations.AddField(
model_name='communicationlog',
name='writer',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='users.BgeBranchAdminUser'),
),
]
| [
"django.db.models.NullBooleanField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.ImageField",
"django.db.models.FileField",
"django.db.models.DateTimeField",
"django.db.models.DateField",
"django.db.models.AutoField",
"django.db.models.TextField",
"django.db.m... | [((8718, 8822), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""branch.HostFamily"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='branch.HostFamily')\n", (8738, 8822), False, 'from django.db import migrations, models\n'), ((8948, 9061), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""users.BgeBranchAdminUser"""'}), "(null=True, on_delete=django.db.models.deletion.\n SET_NULL, to='users.BgeBranchAdminUser')\n", (8968, 9061), False, 'from django.db import migrations, models\n'), ((472, 565), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (488, 565), False, 'from django.db import migrations, models\n'), ((595, 645), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)'}), '(auto_now_add=True, null=True)\n', (615, 645), False, 'from django.db import migrations, models\n'), ((679, 725), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'null': '(True)'}), '(auto_now=True, null=True)\n', (699, 725), False, 'from django.db import migrations, models\n'), ((753, 792), 'django.db.models.DateField', 'models.DateField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (769, 792), False, 'from django.db import migrations, models\n'), ((824, 878), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(80)', 'null': '(True)'}), '(blank=True, max_length=80, null=True)\n', (840, 878), False, 'from django.db import migrations, models\n'), ((910, 952), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (929, 952), False, 'from django.db import migrations, models\n'), ((983, 1022), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (999, 1022), False, 'from django.db import migrations, models\n'), ((1050, 1107), 'django.db.models.FileField', 'models.FileField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""file"""'}), "(blank=True, null=True, upload_to='file')\n", (1066, 1107), False, 'from django.db import migrations, models\n'), ((1315, 1408), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1331, 1408), False, 'from django.db import migrations, models\n'), ((1438, 1488), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)'}), '(auto_now_add=True, null=True)\n', (1458, 1488), False, 'from django.db import migrations, models\n'), ((1522, 1568), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'null': '(True)'}), '(auto_now=True, null=True)\n', (1542, 1568), False, 'from django.db import migrations, models\n'), ((1596, 1650), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(80)', 'null': '(True)'}), '(blank=True, max_length=80, null=True)\n', (1612, 1650), False, 'from django.db import migrations, models\n'), ((1680, 1796), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('active', 'Active'), ('inactive', 'Inactive')]", 'max_length': '(80)', 'null': '(True)'}), "(blank=True, choices=[('active', 'Active'), ('inactive',\n 'Inactive')], max_length=80, null=True)\n", (1696, 1796), False, 'from django.db import migrations, models\n'), ((1823, 1878), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(140)', 'null': '(True)'}), '(blank=True, max_length=140, null=True)\n', (1839, 1878), False, 'from django.db import migrations, models\n'), ((1907, 1961), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(80)', 'null': '(True)'}), '(blank=True, max_length=80, null=True)\n', (1923, 1961), False, 'from django.db import migrations, models\n'), ((1990, 2045), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(140)', 'null': '(True)'}), '(blank=True, max_length=140, null=True)\n', (2006, 2045), False, 'from django.db import migrations, models\n'), ((2084, 2123), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (2100, 2123), False, 'from django.db import migrations, models\n'), ((2157, 2212), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(140)', 'null': '(True)'}), '(blank=True, max_length=140, null=True)\n', (2173, 2212), False, 'from django.db import migrations, models\n'), ((2244, 2298), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(80)', 'null': '(True)'}), '(blank=True, max_length=80, null=True)\n', (2260, 2298), False, 'from django.db import migrations, models\n'), ((2336, 2390), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(80)', 'null': '(True)'}), '(blank=True, max_length=80, null=True)\n', (2352, 2390), False, 'from django.db import migrations, models\n'), ((2422, 2476), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(80)', 'null': '(True)'}), '(blank=True, max_length=80, null=True)\n', (2438, 2476), False, 'from django.db import migrations, models\n'), ((2504, 2558), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(80)', 'null': '(True)'}), '(blank=True, max_length=80, null=True)\n', (2520, 2558), False, 'from django.db import migrations, models\n'), ((2596, 2769), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('same_student', 'Same-Student'), ('change_student', 'Change-student'), (\n 'na', 'N/A'), ('a', 'A')]", 'max_length': '(140)', 'null': '(True)'}), "(blank=True, choices=[('same_student', 'Same-Student'), (\n 'change_student', 'Change-student'), ('na', 'N/A'), ('a', 'A')],\n max_length=140, null=True)\n", (2612, 2769), False, 'from django.db import migrations, models\n'), ((2802, 2911), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('male', 'Male'), ('female', 'Female')]", 'max_length': '(80)', 'null': '(True)'}), "(blank=True, choices=[('male', 'Male'), ('female', 'Female'\n )], max_length=80, null=True)\n", (2818, 2911), False, 'from django.db import migrations, models\n'), ((2946, 3000), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(80)', 'null': '(True)'}), '(blank=True, max_length=80, null=True)\n', (2962, 3000), False, 'from django.db import migrations, models\n'), ((3037, 3076), 'django.db.models.DateField', 'models.DateField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (3053, 3076), False, 'from django.db import migrations, models\n'), ((3116, 3155), 'django.db.models.DateField', 'models.DateField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (3132, 3155), False, 'from django.db import migrations, models\n'), ((3186, 3225), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (3202, 3225), False, 'from django.db import migrations, models\n'), ((3257, 3311), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(80)', 'null': '(True)'}), '(blank=True, max_length=80, null=True)\n', (3273, 3311), False, 'from django.db import migrations, models\n'), ((3346, 3459), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""users.BgeBranchAdminUser"""'}), "(null=True, on_delete=django.db.models.deletion.\n SET_NULL, to='users.BgeBranchAdminUser')\n", (3366, 3459), False, 'from django.db import migrations, models\n'), ((3493, 3591), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""bge.BgeBranch"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to='bge.BgeBranch')\n", (3510, 3591), False, 'from django.db import migrations, models\n'), ((3793, 3886), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3809, 3886), False, 'from django.db import migrations, models\n'), ((3916, 3966), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)'}), '(auto_now_add=True, null=True)\n', (3936, 3966), False, 'from django.db import migrations, models\n'), ((4000, 4046), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'null': '(True)'}), '(auto_now=True, null=True)\n', (4020, 4046), False, 'from django.db import migrations, models\n'), ((4074, 4132), 'django.db.models.FileField', 'models.FileField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""files"""'}), "(blank=True, null=True, upload_to='files')\n", (4090, 4132), False, 'from django.db import migrations, models\n'), ((4160, 4264), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""branch.HostFamily"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='branch.HostFamily')\n", (4180, 4264), False, 'from django.db import migrations, models\n'), ((4467, 4560), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (4483, 4560), False, 'from django.db import migrations, models\n'), ((4590, 4640), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)'}), '(auto_now_add=True, null=True)\n', (4610, 4640), False, 'from django.db import migrations, models\n'), ((4674, 4720), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'null': '(True)'}), '(auto_now=True, null=True)\n', (4694, 4720), False, 'from django.db import migrations, models\n'), ((4749, 4809), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""images"""'}), "(blank=True, null=True, upload_to='images')\n", (4766, 4809), False, 'from django.db import migrations, models\n'), ((4837, 4941), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""branch.HostFamily"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='branch.HostFamily')\n", (4857, 4941), False, 'from django.db import migrations, models\n'), ((5146, 5239), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (5162, 5239), False, 'from django.db import migrations, models\n'), ((5269, 5319), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)'}), '(auto_now_add=True, null=True)\n', (5289, 5319), False, 'from django.db import migrations, models\n'), ((5353, 5399), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'null': '(True)'}), '(auto_now=True, null=True)\n', (5373, 5399), False, 'from django.db import migrations, models\n'), ((5427, 5531), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""branch.HostFamily"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='branch.HostFamily')\n", (5447, 5531), False, 'from django.db import migrations, models\n'), ((5558, 5658), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""student.Student"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to='student.Student')\n", (5575, 5658), False, 'from django.db import migrations, models\n'), ((5869, 5962), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (5885, 5962), False, 'from django.db import migrations, models\n'), ((5992, 6042), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)'}), '(auto_now_add=True, null=True)\n', (6012, 6042), False, 'from django.db import migrations, models\n'), ((6076, 6122), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'null': '(True)'}), '(auto_now=True, null=True)\n', (6096, 6122), False, 'from django.db import migrations, models\n'), ((6157, 6196), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6173, 6196), False, 'from django.db import migrations, models\n'), ((6224, 6278), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(80)', 'null': '(True)'}), '(blank=True, max_length=80, null=True)\n', (6240, 6278), False, 'from django.db import migrations, models\n'), ((6313, 6338), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {}), '()\n', (6336, 6338), False, 'from django.db import migrations, models\n'), ((6378, 6417), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6394, 6417), False, 'from django.db import migrations, models\n'), ((6460, 6499), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6476, 6499), False, 'from django.db import migrations, models\n'), ((6537, 6576), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6553, 6576), False, 'from django.db import migrations, models\n'), ((6613, 6652), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6629, 6652), False, 'from django.db import migrations, models\n'), ((6691, 6730), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6707, 6730), False, 'from django.db import migrations, models\n'), ((6771, 6810), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6787, 6810), False, 'from django.db import migrations, models\n'), ((6841, 6880), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6857, 6880), False, 'from django.db import migrations, models\n'), ((6908, 7012), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""branch.HostFamily"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='branch.HostFamily')\n", (6928, 7012), False, 'from django.db import migrations, models\n'), ((7039, 7143), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""student.Student"""'}), "(null=True, on_delete=django.db.models.deletion.\n SET_NULL, to='student.Student')\n", (7059, 7143), False, 'from django.db import migrations, models\n'), ((7346, 7439), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (7362, 7439), False, 'from django.db import migrations, models\n'), ((7469, 7519), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)'}), '(auto_now_add=True, null=True)\n', (7489, 7519), False, 'from django.db import migrations, models\n'), ((7553, 7599), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'null': '(True)'}), '(auto_now=True, null=True)\n', (7573, 7599), False, 'from django.db import migrations, models\n'), ((7627, 7684), 'django.db.models.FileField', 'models.FileField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""file"""'}), "(blank=True, null=True, upload_to='file')\n", (7643, 7684), False, 'from django.db import migrations, models\n'), ((7714, 7822), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""branch.HostStudentReport"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='branch.HostStudentReport')\n", (7731, 7822), False, 'from django.db import migrations, models\n'), ((8027, 8120), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (8043, 8120), False, 'from django.db import migrations, models\n'), ((8150, 8200), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)'}), '(auto_now_add=True, null=True)\n', (8170, 8200), False, 'from django.db import migrations, models\n'), ((8234, 8280), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'null': '(True)'}), '(auto_now=True, null=True)\n', (8254, 8280), False, 'from django.db import migrations, models\n'), ((8309, 8368), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""image"""'}), "(blank=True, null=True, upload_to='image')\n", (8326, 8368), False, 'from django.db import migrations, models\n'), ((8398, 8506), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""branch.HostStudentReport"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='branch.HostStudentReport')\n", (8415, 8506), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/env python3
#
# submit_one_Door43_test.py
# Written: Jan 2019
# Last modified: 2019-12-09 RJH
#
# Python imports
import os
import sys
import json
import logging
import subprocess
# ======================================================================
# User settings
USE_LOCALCOMPOSE_URL = True
LOAD_FROM_DISK_FILE = False
OVERWRITE = False
REVIEW_FLAG = True # Shows all the URLs again at the end
AUTO_OPEN_IN_BROWSER = True
# Choose one of the following
# TEST_PREFIXES = ('',)
TEST_PREFIXES = ('dev-',)
# TEST_PREFIXES = ('', 'dev-',)
TEST_FILENAME = 'Door43-Catalog--vi_ulb--fork' # .json will be appended to this
LOCAL_FILEPATH = '/mnt/SSD/uW/Software/'
# ======================================================================
if USE_LOCALCOMPOSE_URL: assert TEST_PREFIXES == ('dev-',)
LOCAL_COMPOSE_URL = 'http://127.0.0.1:8080/'
TEST_FOLDER = f'{LOCAL_FILEPATH}/testPayloads/JSON/Door43/'
TEST_FILENAME = TEST_FILENAME.replace('/','--')
filepath = os.path.join(TEST_FOLDER, TEST_FILENAME+'.json')
test_json = None
if LOAD_FROM_DISK_FILE:
if os.path.isfile(filepath):
print(f"Loading '{filepath}'β¦")
with open(filepath, 'rt') as jf:
test_json = jf.read()
else:
logging.critical(f"LOAD_FROM_DISK_FILE was specified but '{filepath}' doesn't exist")
if not test_json:
print("Using locally pasted JSON stringβ¦")
test_json = """
{
"secret": "",
"forkee": {
"id": 22755,
"owner": {
"id": 4598,
"login": "Door43-Catalog",
"full_name": "Door43 Resource Catalog",
"email": "<EMAIL>",
"avatar_url": "https://git.door43.org/img/avatar_default.png",
"language": "",
"is_admin": false,
"last_login": "1970-01-01T00:00:00Z",
"created": "2016-10-18T19:03:36Z",
"username": "Door43-Catalog"
},
"name": "vi_ulb",
"full_name": "Door43-Catalog/vi_ulb",
"description": "Vietnamese ULB. STR https://git.door43.org/Door43/SourceTextRequestForm/issues/149\\r\\n\\r\\n",
"empty": false,
"private": false,
"fork": false,
"parent": null,
"mirror": false,
"size": 5376,
"html_url": "https://git.door43.org/Door43-Catalog/vi_ulb",
"ssh_url": "git@git.door43.org:Door43-Catalog/vi_ulb.git",
"clone_url": "https://git.door43.org/Door43-Catalog/vi_ulb.git",
"website": "",
"stars_count": 0,
"forks_count": 1,
"watchers_count": 9,
"open_issues_count": 0,
"default_branch": "master",
"archived": false,
"created_at": "2018-02-12T18:13:00Z",
"updated_at": "2020-08-24T04:10:55Z",
"permissions": {
"admin": true,
"push": true,
"pull": true
},
"has_issues": true,
"has_wiki": false,
"has_pull_requests": true,
"ignore_whitespace_conflicts": false,
"allow_merge_commits": true,
"allow_rebase": true,
"allow_rebase_explicit": true,
"allow_squash_merge": true,
"avatar_url": ""
},
"repository": {
"id": 58265,
"owner": {
"id": 6221,
"login": "STR",
"full_name": "",
"email": "",
"avatar_url": "https://git.door43.org/avatars/6221",
"language": "",
"is_admin": false,
"last_login": "1970-01-01T00:00:00Z",
"created": "2017-08-15T15:24:51Z",
"username": "STR"
},
"name": "vi_ulb",
"full_name": "STR/vi_ulb",
"description": "Vietnamese ULB. STR https://git.door43.org/Door43/SourceTextRequestForm/issues/149\\r\\n\\r\\n",
"empty": false,
"private": false,
"fork": true,
"parent": {
"id": 22755,
"owner": {
"id": 4598,
"login": "Door43-Catalog",
"full_name": "Door43 Resource Catalog",
"email": "<EMAIL>",
"avatar_url": "https://git.door43.org/img/avatar_default.png",
"language": "",
"is_admin": false,
"last_login": "1970-01-01T00:00:00Z",
"created": "2016-10-18T19:03:36Z",
"username": "Door43-Catalog"
},
"name": "vi_ulb",
"full_name": "Door43-Catalog/vi_ulb",
"description": "Vietnamese ULB. STR https://git.door43.org/Door43/SourceTextRequestForm/issues/149\\r\\n\\r\\n",
"empty": false,
"private": false,
"fork": false,
"parent": null,
"mirror": false,
"size": 5376,
"html_url": "https://git.door43.org/Door43-Catalog/vi_ulb",
"ssh_url": "<EMAIL>:Door43-Catalog/vi_ulb.git",
"clone_url": "https://git.door43.org/Door43-Catalog/vi_ulb.git",
"website": "",
"stars_count": 0,
"forks_count": 2,
"watchers_count": 9,
"open_issues_count": 0,
"default_branch": "master",
"archived": false,
"created_at": "2018-02-12T18:13:00Z",
"updated_at": "2020-08-24T04:10:55Z",
"permissions": {
"admin": true,
"push": true,
"pull": true
},
"has_issues": true,
"has_wiki": false,
"has_pull_requests": true,
"ignore_whitespace_conflicts": false,
"allow_merge_commits": true,
"allow_rebase": true,
"allow_rebase_explicit": true,
"allow_squash_merge": true,
"avatar_url": ""
},
"mirror": false,
"size": 0,
"html_url": "https://git.door43.org/STR/vi_ulb",
"ssh_url": "git@git.door43.org:STR/vi_ulb.git",
"clone_url": "https://git.door43.org/STR/vi_ulb.git",
"website": "",
"stars_count": 0,
"forks_count": 0,
"watchers_count": 0,
"open_issues_count": 0,
"default_branch": "master",
"archived": false,
"created_at": "2020-08-24T04:11:10Z",
"updated_at": "2020-08-24T04:11:10Z",
"permissions": {
"admin": true,
"push": true,
"pull": true
},
"has_issues": true,
"has_wiki": true,
"has_pull_requests": true,
"ignore_whitespace_conflicts": false,
"allow_merge_commits": true,
"allow_rebase": true,
"allow_rebase_explicit": true,
"allow_squash_merge": true,
"avatar_url": ""
},
"sender": {
"id": 6442,
"login": "RobH",
"full_name": "<NAME>",
"email": "<EMAIL>",
"avatar_url": "https://git.door43.org/avatars/f85d2867fead49449e89c6822dc77bc6",
"language": "en-US",
"is_admin": true,
"last_login": "2020-08-11T23:22:24Z",
"created": "2017-10-22T07:31:07Z",
"username": "RobH"
}
}
""" \
.replace('\\n','').replace('\n','') \
.replace("{'", '{"').replace("': ", '": ').replace(": '", ': "').replace("', ", '", ').replace(", '", ', "').replace("'}", '"}') \
.replace(': True,', ': true,').replace(': False,', ': false,').replace(': None,', ': null,') \
.replace(': True}', ': true}').replace(': False}', ': false}').replace(': None}', ': null}')
# print('test_json = ', test_json)
if 0 and TEST_FILENAME.replace('--','/') not in test_json:
print(f"Seems '{TEST_FILENAME}' can't be found in the JSON -- is it correct?")
print(f" {test_json[:600]}β¦")
sys.exit()
if OVERWRITE or not LOAD_FROM_DISK_FILE: # Write the json file
print(f"Writing '{filepath}'β¦")
with open(filepath, 'wt') as jf:
jf.write(test_json)
else:
print("(Not saving JSON file)")
if not os.path.isfile(filepath):
logging.critical(f"Unable to proceed coz '{filepath}' doesn't exist")
sys.exit()
webURL = ''
for prefix in TEST_PREFIXES:
long_prefix = 'develop' if prefix else 'git'
webhook = LOCAL_COMPOSE_URL if USE_LOCALCOMPOSE_URL else f'https://{long_prefix}.door43.org/client/webhook/'
print( f"\n{'(dev) ' if prefix else ''}'{TEST_FILENAME}' to {webhook}:" )
jsonFilename = f'{TEST_FOLDER}{TEST_FILENAME}.json'
with open(jsonFilename, 'rt') as jsonFile:
jsonString = jsonFile.read()
jsonDict = json.loads(jsonString)
if 'pusher' in jsonDict:
event = 'push'
elif 'release' in jsonDict:
event = 'release'
elif 'pull_request' in jsonDict:
event = 'pull_request'
elif 'ref_type' in jsonDict and jsonDict['ref_type']=='branch' and 'pusher_type' in jsonDict:
event = 'delete'
elif 'forkee' in jsonDict:
event = 'fork'
# elif 'ref_type' in jsonDict and jsonDict['ref_type']=='branch' and 'ref' in jsonDict:
# event = 'create'
else:
logging.critical(f"Can't determine event (push/release/delete/fork, etc.) from JSON")
halt
# Use curl to actually POST the JSON to the given webhook URL
parameters = ['curl', webhook,
'--data', f'@{jsonFilename}',
'--header', "Content-Type: application/json",
'--header', f"X-Gitea-Event: {event}",
]
myProcess = subprocess.Popen( parameters, stdout=subprocess.PIPE, stderr=subprocess.PIPE )
programOutputBytes, programErrorOutputBytes = myProcess.communicate()
# Process the output from curl
if programOutputBytes:
programOutputString = programOutputBytes.decode(encoding='utf-8', errors='replace')
#programOutputString = programOutputString.replace( baseFolder + ('' if baseFolder[-1]=='/' else '/'), '' ) # Remove long file paths to make it easier for the user to read
#with open( os.path.join( outputFolder, 'ScriptOutput.txt" ), 'wt', encoding='utf-8' ) as myFile: myFile.write( programOutputString )
#print( f"Response = {programOutputString!r}" )
if programOutputString.startswith('{'): # Assume it's a json dict
responseDict = json.loads(programOutputString)
if responseDict['status'] == 'queued':
print( " Job successfully queued" )
else:
print( f"Response dict = {responseDict}" )
else:
print( f"Response = {programOutputString!r}" )
if programErrorOutputBytes:
programErrorOutputString = programErrorOutputBytes.decode(encoding='utf-8', errors='replace')
#with open( os.path.join( outputFolder, 'ScriptErrorOutput.txt" ), 'wt', encoding='utf-8' ) as myFile: myFile.write( programErrorOutputString )
if not programErrorOutputString.startswith(' % Total'):
print( f"pEOS = {programErrorOutputString!r}" )
if webURL:
url = f"https://{'dev.' if prefix else ''}door43.org/u/{webURL}/"
print(f"View result at {url}")
if AUTO_OPEN_IN_BROWSER:
import webbrowser
webbrowser.open(url, new=0, autoraise=True)
#subprocess.Popen(['xdg-open', url])
| [
"os.path.join",
"subprocess.Popen",
"json.loads",
"os.path.isfile",
"webbrowser.open",
"logging.critical",
"sys.exit"
] | [((993, 1043), 'os.path.join', 'os.path.join', (['TEST_FOLDER', "(TEST_FILENAME + '.json')"], {}), "(TEST_FOLDER, TEST_FILENAME + '.json')\n", (1005, 1043), False, 'import os\n'), ((1090, 1114), 'os.path.isfile', 'os.path.isfile', (['filepath'], {}), '(filepath)\n', (1104, 1114), False, 'import os\n'), ((6905, 6915), 'sys.exit', 'sys.exit', ([], {}), '()\n', (6913, 6915), False, 'import sys\n'), ((7129, 7153), 'os.path.isfile', 'os.path.isfile', (['filepath'], {}), '(filepath)\n', (7143, 7153), False, 'import os\n'), ((7159, 7228), 'logging.critical', 'logging.critical', (['f"""Unable to proceed coz \'{filepath}\' doesn\'t exist"""'], {}), '(f"Unable to proceed coz \'{filepath}\' doesn\'t exist")\n', (7175, 7228), False, 'import logging\n'), ((7233, 7243), 'sys.exit', 'sys.exit', ([], {}), '()\n', (7241, 7243), False, 'import sys\n'), ((7681, 7703), 'json.loads', 'json.loads', (['jsonString'], {}), '(jsonString)\n', (7691, 7703), False, 'import json\n'), ((8607, 8683), 'subprocess.Popen', 'subprocess.Popen', (['parameters'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(parameters, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (8623, 8683), False, 'import subprocess\n'), ((1247, 1337), 'logging.critical', 'logging.critical', (['f"""LOAD_FROM_DISK_FILE was specified but \'{filepath}\' doesn\'t exist"""'], {}), '(\n f"LOAD_FROM_DISK_FILE was specified but \'{filepath}\' doesn\'t exist")\n', (1263, 1337), False, 'import logging\n'), ((9394, 9425), 'json.loads', 'json.loads', (['programOutputString'], {}), '(programOutputString)\n', (9404, 9425), False, 'import json\n'), ((10289, 10332), 'webbrowser.open', 'webbrowser.open', (['url'], {'new': '(0)', 'autoraise': '(True)'}), '(url, new=0, autoraise=True)\n', (10304, 10332), False, 'import webbrowser\n'), ((8196, 8286), 'logging.critical', 'logging.critical', (['f"""Can\'t determine event (push/release/delete/fork, etc.) from JSON"""'], {}), '(\n f"Can\'t determine event (push/release/delete/fork, etc.) from JSON")\n', (8212, 8286), False, 'import logging\n')] |
'''entre no programa com a altura e largura de uma parede
em mtr calcule a area e a quantidade de tinta necessΓ‘ria para
pintΓ‘-la considerando que cada litro de tinta pinta 2mΒ²'''
from math import ceil
cores = {'limpa': '\033[m', 'azul': '\033[1;34m'}
print('{:-^40}'.format('CALCULO DE ΓREA'))
largura = float(input('Digite a largura da Parede: '))
altura = float(input('Digite a altura da Parede: '))
area = largura * altura
tinta = area / 2
print('Sua parede tem uma dimensΓ£o de \033[1;34m{}\033[m x \033[1;34m{}\033[mmtr. '.format(largura, altura))
print('{}{:.2f}MΒ²{} Γ© sua Γ‘rea de Parede. '.format(cores['azul'], area, cores['limpa']))
print('{}{}-Litros{} de tinta para pintar estΓ‘ Γ‘rea. '.format(cores['azul'], ceil(tinta), cores['limpa']))
print('{:-^40}'.format('FIM'))
| [
"math.ceil"
] | [((722, 733), 'math.ceil', 'ceil', (['tinta'], {}), '(tinta)\n', (726, 733), False, 'from math import ceil\n')] |
import numpy as np
import sympy as sp
import pandas as pd
import sys
#give input equation
expr = sp.sympify("x_1**2 + x_2*x_3")
#get number of columns from X dataframe and y dataframe
num_of_columns = 3+1 #dataframe features len(X.columns) +1
num_of_y_columns = 2 #dataframe features len(Y.columns) +1
#create equation variables as user prefers to enter like x_1 ** 2 + x_2 * x_3
symbols = sp.symarray('x',num_of_columns)
symbols = symbols[1:]
y_symbols = sp.symarray('y', num_of_y_columns)
y_symbols = y_symbols[1:]
print(symbols)
df = pd.DataFrame(
[[21, 72, 67.1],
[23, 78, 69.5],
[32, 74, 56.6],
[52, 54, 76.2]],
columns = ['x1','x2', 'x3'])
variable_tuple = [df[c].to_numpy() for i,c in enumerate(df.columns, start=1)]
mod = sys.modules[__name__]
for i,c in enumerate(df.columns, start=1):
setattr(mod, "x_"+str(i), df[c])
f = sp.lambdify(symbols, expr, 'numpy')
result = f(*variable_tuple)
print(result)
expr2 = sp.sympify("8*x_3/(pi * x_2^3)")
f = sp.lambdify(symbols, expr2, 'numpy')
result = f(*variable_tuple)
print(result)
| [
"pandas.DataFrame",
"sympy.sympify",
"sympy.symarray",
"sympy.lambdify"
] | [((97, 127), 'sympy.sympify', 'sp.sympify', (['"""x_1**2 + x_2*x_3"""'], {}), "('x_1**2 + x_2*x_3')\n", (107, 127), True, 'import sympy as sp\n'), ((390, 422), 'sympy.symarray', 'sp.symarray', (['"""x"""', 'num_of_columns'], {}), "('x', num_of_columns)\n", (401, 422), True, 'import sympy as sp\n'), ((456, 490), 'sympy.symarray', 'sp.symarray', (['"""y"""', 'num_of_y_columns'], {}), "('y', num_of_y_columns)\n", (467, 490), True, 'import sympy as sp\n'), ((537, 648), 'pandas.DataFrame', 'pd.DataFrame', (['[[21, 72, 67.1], [23, 78, 69.5], [32, 74, 56.6], [52, 54, 76.2]]'], {'columns': "['x1', 'x2', 'x3']"}), "([[21, 72, 67.1], [23, 78, 69.5], [32, 74, 56.6], [52, 54, 76.2\n ]], columns=['x1', 'x2', 'x3'])\n", (549, 648), True, 'import pandas as pd\n'), ((841, 876), 'sympy.lambdify', 'sp.lambdify', (['symbols', 'expr', '"""numpy"""'], {}), "(symbols, expr, 'numpy')\n", (852, 876), True, 'import sympy as sp\n'), ((927, 959), 'sympy.sympify', 'sp.sympify', (['"""8*x_3/(pi * x_2^3)"""'], {}), "('8*x_3/(pi * x_2^3)')\n", (937, 959), True, 'import sympy as sp\n'), ((964, 1000), 'sympy.lambdify', 'sp.lambdify', (['symbols', 'expr2', '"""numpy"""'], {}), "(symbols, expr2, 'numpy')\n", (975, 1000), True, 'import sympy as sp\n')] |
"""
Read sample documents from mongo db and write sample metadata files
to iRODS.
"""
import argparse
from itertools import islice
import json
import os
import pprint
import re
import sys
import time
import pymongo
import imicrobe.util.irods as irods
def write_sample_metadata_files(target_root, file_limit):
"""
This script is intended to run on a system with access to the iMicrobe MongoDB.
For each document in the 'sample' collection of the 'imicrobe' database write
the document contents as a CSV file to iRODS.
"""
print('target iRODS directory is "{}"'.format(target_root))
with irods.irods_session_manager() as irods_session:
if irods.irods_collection_exists(irods_session, target_root):
print(' target directory exists')
else:
print(' target directory does not exist')
exit(1)
print('\nsearching for samples in Mongo DB')
sequence_file_extensions = re.compile('\.(fa|fna|fasta|fastq)(\.tar)?(\.gz)?$')
t0 = time.time()
samples = {}
samples_missing_specimen_file = []
samples_missing_fasta_file = []
for sample_metadata in pymongo.MongoClient().imicrobe.sample.find(limit=file_limit):
sample_fn = None
#if sample_metadata is None:
# print('what is this all about?')
# raise Exception()
if 'specimen__file' in sample_metadata:
specimen_files = sample_metadata['specimen__file'].split()
##print('specimen__file:\n\t"{}"'.format('\n\t'.join(specimen_files)))
# find the FASTA file
for fp in specimen_files:
if not fp.startswith('/iplant/'):
# avoid ftp
pass
elif sequence_file_extensions.search(fp) is None:
pass
else:
sample_dp, sample_fn = os.path.split(fp)
metadata_fp = sequence_file_extensions.sub('.json', fp)
samples[metadata_fp] = sample_metadata
break
if sample_fn is None:
samples_missing_fasta_file.append(sample_metadata)
print('{}: no FASTA file in "{}"'.format(
len(samples_missing_fasta_file),
pprint.pformat(sample_metadata)))
else:
pass
#print('FASTA file: "{}"'.format(sample_fn))
else:
samples_missing_specimen_file.append(sample_metadata)
print('{}: no specimen__file in "{}"'.format(
len(samples_missing_specimen_file),
pprint.pformat(sample_metadata['_id'])))
print('found {} samples in {:5.2f}s'.format(len(samples), time.time()-t0))
print(' {} samples have no specimen__file'.format(len(samples_missing_specimen_file)))
print(' {} samples have no FASTA file'.format(len(samples_missing_fasta_file)))
t0 = time.time()
print('which files already exist?')
files_to_be_written = {}
with irods.irods_session_manager() as irods_session:
for metadata_fp, sample_metadata in sorted(samples.items()):
print('checking for "{}"'.format(metadata_fp))
if irods.irods_data_object_exists(irods_session, metadata_fp):
pass
else:
files_to_be_written[metadata_fp] = sample_metadata
print('found {} files to be written in {:5.2f}s'.format(len(files_to_be_written), time.time()-t0))
t0 = time.time()
print('\nwriting {} files'.format(len(files_to_be_written)))
with irods.irods_session_manager() as irods_session:
for metadata_fp, sample_metadata in sorted(files_to_be_written.items()):
print('writing {}'.format(metadata_fp))
# remove mongo _id field - it will not serialize
del sample_metadata['_id']
irods.irods_write_data_object(
irods_session,
metadata_fp,
content=json.dumps(sample_metadata, indent=2))
#print(json.dumps(sample_metadata, indent=2))
print('wrote {} metadata files in {:5.3f}s'.format(len(files_to_be_written), time.time()-t0))
def main(argv):
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--target-root', default='/iplant/home/shared/imicrobe/projects')
arg_parser.add_argument('--file-limit', type=int, default=0, required=False)
args = arg_parser.parse_args(args=argv)
write_sample_metadata_files(
target_root=args.target_root,
file_limit=args.file_limit)
def cli():
main(sys.argv[1:])
if __name__ == '__main__':
cli()
| [
"imicrobe.util.irods.irods_collection_exists",
"pprint.pformat",
"imicrobe.util.irods.irods_session_manager",
"re.compile",
"argparse.ArgumentParser",
"json.dumps",
"imicrobe.util.irods.irods_data_object_exists",
"pymongo.MongoClient",
"time.time",
"os.path.split"
] | [((955, 1010), 're.compile', 're.compile', (['"""\\\\.(fa|fna|fasta|fastq)(\\\\.tar)?(\\\\.gz)?$"""'], {}), "('\\\\.(fa|fna|fasta|fastq)(\\\\.tar)?(\\\\.gz)?$')\n", (965, 1010), False, 'import re\n'), ((1017, 1028), 'time.time', 'time.time', ([], {}), '()\n', (1026, 1028), False, 'import time\n'), ((2947, 2958), 'time.time', 'time.time', ([], {}), '()\n', (2956, 2958), False, 'import time\n'), ((3507, 3518), 'time.time', 'time.time', ([], {}), '()\n', (3516, 3518), False, 'import time\n'), ((4230, 4255), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4253, 4255), False, 'import argparse\n'), ((619, 648), 'imicrobe.util.irods.irods_session_manager', 'irods.irods_session_manager', ([], {}), '()\n', (646, 648), True, 'import imicrobe.util.irods as irods\n'), ((678, 735), 'imicrobe.util.irods.irods_collection_exists', 'irods.irods_collection_exists', (['irods_session', 'target_root'], {}), '(irods_session, target_root)\n', (707, 735), True, 'import imicrobe.util.irods as irods\n'), ((3037, 3066), 'imicrobe.util.irods.irods_session_manager', 'irods.irods_session_manager', ([], {}), '()\n', (3064, 3066), True, 'import imicrobe.util.irods as irods\n'), ((3593, 3622), 'imicrobe.util.irods.irods_session_manager', 'irods.irods_session_manager', ([], {}), '()\n', (3620, 3622), True, 'import imicrobe.util.irods as irods\n'), ((3228, 3286), 'imicrobe.util.irods.irods_data_object_exists', 'irods.irods_data_object_exists', (['irods_session', 'metadata_fp'], {}), '(irods_session, metadata_fp)\n', (3258, 3286), True, 'import imicrobe.util.irods as irods\n'), ((2743, 2754), 'time.time', 'time.time', ([], {}), '()\n', (2752, 2754), False, 'import time\n'), ((3480, 3491), 'time.time', 'time.time', ([], {}), '()\n', (3489, 3491), False, 'import time\n'), ((4179, 4190), 'time.time', 'time.time', ([], {}), '()\n', (4188, 4190), False, 'import time\n'), ((1148, 1169), 'pymongo.MongoClient', 'pymongo.MongoClient', ([], {}), '()\n', (1167, 1169), False, 'import pymongo\n'), ((2639, 2677), 'pprint.pformat', 'pprint.pformat', (["sample_metadata['_id']"], {}), "(sample_metadata['_id'])\n", (2653, 2677), False, 'import pprint\n'), ((4001, 4038), 'json.dumps', 'json.dumps', (['sample_metadata'], {'indent': '(2)'}), '(sample_metadata, indent=2)\n', (4011, 4038), False, 'import json\n'), ((1887, 1904), 'os.path.split', 'os.path.split', (['fp'], {}), '(fp)\n', (1900, 1904), False, 'import os\n'), ((2299, 2330), 'pprint.pformat', 'pprint.pformat', (['sample_metadata'], {}), '(sample_metadata)\n', (2313, 2330), False, 'import pprint\n')] |
import json
import pathlib
import random
import jsonpickle
class BankAccount:
def __init__(self, ssn, balance=0):
self._ssn = ssn
self._balance = balance
@property
def ssn(self):
return self._ssn
@property
def balance(self):
return self._balance
@staticmethod
def authentication(filename, ssn, account_number):
'''
Read JSON file and checks whether account number is valid
'''
with open(filename, "r") as json_file:
data = json.load(json_file)
stem = pathlib.Path(filename).stem
accounts = data[stem]
# Using ssn and account number check whether account number is valid or not
for account in accounts:
account = jsonpickle.decode(account)
if account.ssn == ssn and account.account_number == account_number:
return True
return False
class CheckingAccount(BankAccount):
def __init__(self, ssn, balance):
super().__init__(ssn, balance)
self._account_type = 'Checking'
self._account_number = random.randint(10000, 99999)
@property
def account_type(self):
return self._account_type
@property
def account_number(self):
return self._account_number
@property
def deposit(self):
return self._deposit
@deposit.setter
def deposit(self, deposit):
if deposit > 0:
self._deposit = deposit
self._deposit += 1
self._balance += self._deposit
else:
raise ValueError
@property
def withdrawal(self):
return self._withdraw
@withdrawal.setter
def withdrawal(self, withdraw):
if withdraw > 0:
self._withdraw = withdraw
self._balance -= self._withdraw
else:
raise ValueError
class SavingsAccount(BankAccount):
def __init__(self, ssn, balance):
super().__init__(ssn, balance)
self._account_type = "Savings"
self._account_number = random.randint(100000, 200000)
@property
def account_type(self):
return self._account_type
@property
def account_number(self):
return self._account_number
@property
def deposit(self):
return self._deposit
@deposit.setter
def deposit(self, deposit):
if deposit > 0:
self._deposit = deposit
self._deposit += 5
self._balance += self._deposit
else:
raise ValueError
@property
def withdrawal(self):
return self._withdraw
@withdrawal.setter
def withdrawal(self, withdraw):
if withdraw > 0:
self._withdraw = withdraw
self._balance -= self._withdraw
else:
raise ValueError | [
"jsonpickle.decode",
"json.load",
"pathlib.Path",
"random.randint"
] | [((1156, 1184), 'random.randint', 'random.randint', (['(10000)', '(99999)'], {}), '(10000, 99999)\n', (1170, 1184), False, 'import random\n'), ((2109, 2139), 'random.randint', 'random.randint', (['(100000)', '(200000)'], {}), '(100000, 200000)\n', (2123, 2139), False, 'import random\n'), ((543, 563), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (552, 563), False, 'import json\n'), ((583, 605), 'pathlib.Path', 'pathlib.Path', (['filename'], {}), '(filename)\n', (595, 605), False, 'import pathlib\n'), ((797, 823), 'jsonpickle.decode', 'jsonpickle.decode', (['account'], {}), '(account)\n', (814, 823), False, 'import jsonpickle\n')] |
#!/usr/bin/env python3
import sys
import argparse
import bz2
import pickle
import numpy
import textwrap
import tabulate
import copy
import math
import operator
import collections
import profileLib
import statistics
from xopen import xopen
def error(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue):
return value - baseline
def weightedError(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue):
return error(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue) * weight
def absoluteWeightedError(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue):
return abs(weightedError(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue))
def absoluteError(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue):
return abs(error(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue))
def relativeError(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue):
return error(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue) / baseline if (baseline != 0) else 0
def absoluteRelativeError(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue):
return abs(relativeError(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue))
def weightedRelativeError(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue):
return relativeError(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue) * weight
def absoluteWeightedRelativeError(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue):
return abs(weightedRelativeError(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue))
# values are already processed by errorFunction
def aggregateSum(baselines, values, totalBaseline, totalValue, weights, fullTotalBaseline, fullTotalValue):
return sum(values)
# values are already processed by errorFunction
def aggregateMin(baselines, values, totalBaseline, totalValue, weights, fullTotalBaseline, fullTotalValue):
return min(values)
# values are already processed by errorFunction
def aggregateMax(baselines, values, totalBaseline, totalValue, weights, fullTotalBaseline, fullTotalValue):
return max(values)
# values are already processed by errorFunction
def aggregateMean(baselines, values, totalBaseline, totalValue, weights, fullTotalBaseline, fullTotalValue):
return sum(values) / len(values)
# values are already processed by errorFunction
def aggregateRelativeBaseline(baselines, values, totalBaseline, totalValue, weights, fullTotalBaseline, fullTotalValue):
return (sum(values) / fullTotalBaseline) if fullTotalBaseline != 0 else 0
# values are already processed by errorFunction
def aggregateRelative(baselines, values, totalBaseline, totalValue, weights, fullTotalBaseline, fullTotalValue):
return (sum(values) / fullTotalValue) if fullTotalValue != 0 else 0
def aggregateWeightedMean(baselines, values, totalBaseline, totalValue, weights, fullTotalBaseline, fullTotalValue):
return sum([value * weight for value, weight in zip(values, weights)])
explodedData = []
for value, weight in zip(values, weights):
explodedData = numpy.append(explodedData, [value] * max(1, int(10000 * abs(weight))))
return statistics.mean(explodedData)
def aggregateRootMeanSquaredError(baselines, values, totalBaseline, totalValue, weights, fullTotalBaseline, fullTotalValue):
return math.sqrt(sum([math.pow(error(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue), 2) for baseline, value, weight in zip(baselines, values, weights)]) / len(values))
def aggregateWeightedRootMeanSquaredError(baselines, values, totalBaseline, totalValue, weights, fullTotalBaseline, fullTotalValue):
return math.sqrt(sum([math.pow(error(baseline, value, totalBaseline, totalValue, weight, fullTotalBaseline, fullTotalValue), 2) * weight for baseline, value, weight in zip(baselines, values, weights)]))
# [ parameter, description, error function, ]
errorFunctions = numpy.array([
['relative_error', 'Relative Error', relativeError],
['error', 'Error', error],
['absolute_error', 'Absolute Error', absoluteError],
['weighted_error', 'Weighted Error', weightedError],
['absolute_weighted_error', 'Absolute Weighted Error', absoluteWeightedError],
['absolute_relative_error', 'Absolute Relative Error', absoluteRelativeError],
['weighted_relative_error', 'Weighted Relative Error', weightedRelativeError],
['absolute_weighted_relative_error', 'Absolute Weighted Relative Error', absoluteWeightedRelativeError],
], dtype=object)
aggregateFunctions = numpy.array([
['sum', 'Total', aggregateSum, True],
['relative', 'Relative', aggregateRelative, True],
['relative_baseline', 'Relative', aggregateRelativeBaseline, True],
['min', 'Minimum', aggregateMin, True],
['max', 'Maximum', aggregateMax, True],
['mean', 'Mean', aggregateMean, True],
['wmean', 'Weighted Mean', aggregateWeightedMean, True],
['rmse', 'Root Mean Squared Error', aggregateRootMeanSquaredError, False],
['wrmse', 'Weighted Root Mean Squared Error', aggregateWeightedRootMeanSquaredError, False]
])
parser = argparse.ArgumentParser(description="Visualize profiles from intrvelf sampler.")
parser.add_argument("profile", help="baseline aggregated profile")
parser.add_argument("profiles", help="aggregated profiles to compare", nargs="+")
parser.add_argument("--use-time", help="compare time values", action="store_true", default=False)
parser.add_argument("--use-energy", help="compare energy values (default)", action="store_true", default=False)
parser.add_argument("--use-power", help="compare power values", action="store_true", default=False)
parser.add_argument("--use-samples", help="compare sample counters", action="store_true", default=False)
parser.add_argument("--use-share", help="compare the share (is combined with other --use options)", action="store_true", default=False)
parser.add_argument("--use-exec-times", help="compare execution time", action="store_true", default=False)
parser.add_argument("-e", "--error", help=f"error function (default: {errorFunctions[0][0]})", default=False, choices=errorFunctions[:, 0], type=str.lower)
parser.add_argument("-a", "--aggregate", help="aggregate erros", default=False, choices=aggregateFunctions[:, 0], type=str.lower)
parser.add_argument("-c", "--compensation", help="switch on latency compensation (experimental)", action="store_true", default=False)
parser.add_argument("--limit-time-top", help="include top n entries ranked after time", type=int, default=0)
parser.add_argument("--limit-time", help="include top entries until limit (in percent, e.g. 0.0 - 1.0)", type=float, default=0)
parser.add_argument("--time-threshold", help="time contribution threshold to include (in percent, e.g. 0.0 - 1.0)", type=float, default=0)
parser.add_argument("--limit-energy-top", help="include top n entries ranked after energy", type=int, default=0)
parser.add_argument("--limit-energy", help="include top entries until limit (in percent, e.g. 0.0 - 1.0)", type=float, default=0)
parser.add_argument("--energy-threshold", help="energy contribution threshold (in percent, e.g. 0.0 - 1.0)", type=float, default=0)
parser.add_argument("--exclude-binary", help="exclude these binaries", default=[], nargs='+', action="extend")
parser.add_argument("--exclude-file", help="exclude these files", default=[], nargs='+', action="extend")
parser.add_argument("--exclude-function", help="exclude these functions", default=[], nargs='+', action="extend")
parser.add_argument("--exclude-external", help="exclude external binaries", default=False, action="store_true")
parser.add_argument('--header', help='override header', default=None)
parser.add_argument('--names', help='names of the provided profiles', default=[], nargs="+", action="extend")
parser.add_argument('-n', '--name', action='append', help='name the provided profiles', default=[])
parser.add_argument("-t", "--table", help="output csv table")
parser.add_argument("--coverage", action="store_true", help="output coverage", default=False)
parser.add_argument("--totals", action="store_true", help="output total", default=False)
parser.add_argument("--weights", action="store_true", help="output importance", default=False)
parser.add_argument("-q", "--quiet", action="store_true", help="be quiet", default=False)
parser.add_argument("--cut-off-symbols", help="number of characters symbol to insert line break (positive) or cut off (negative)", type=int, default=64)
args = parser.parse_args()
if (not args.use_time and not args.use_energy and not args.use_power and not args.use_samples and not args.use_exec_times):
args.use_time = True
header = ""
cmpTime = 0
cmpPower = 1
cmpEnergy = 2
cmpRelSamples = 3
cmpExecs = 4
cmpShare = 5
subCmpOffset = cmpTime
if args.use_time:
header = "Time "
subCmpOffset = cmpTime
if args.use_power:
header = "Power "
subCmpOffset = cmpPower
if args.use_samples:
header = "Relative Samples "
subCmpOffset = cmpRelSamples
if args.use_exec_times:
header = "Execution Times "
subCmpOffset = cmpExecs
if args.use_energy:
header = "Energy "
subCmpOffset = cmpEnergy
cmpOffset = subCmpOffset
if args.use_share:
header += "Share "
cmpOffset = cmpShare
if (args.limit_time != 0 or args.limit_time_top != 0) and (args.limit_energy != 0 or args.limit_energy_top != 0):
print("ERROR: cannot simultanously limit after energy and time!")
parser.print_help()
sys.exit(1)
if args.limit_time_top != 0 and args.limit_time_top < 0:
print("ERROR: time limit top can't be negative")
parser.print_help()
sys.exit(0)
if (args.limit_time != 0 and (args.limit_time < 0 or args.limit_time > 1.0)):
print("ERROR: time limit out of range")
parser.print_help()
sys.exit(0)
if args.limit_energy_top != 0 and args.limit_energy_top < 0:
print("ERROR: energy limit top can't be negative")
parser.print_help()
sys.exit(0)
if (args.limit_energy != 0 and (args.limit_energy < 0 or args.limit_energy > 1.0)):
print("ERROR: energy limit out of range")
parser.print_help()
sys.exit(0)
if (args.time_threshold != 0 and (args.time_threshold < 0 or args.time_threshold > 1.0)):
print("ERROR: time threshold out of range")
parser.print_help()
sys.exit(0)
if (args.energy_threshold != 0 and (args.energy_threshold < 0 or args.energy_threshold > 1.0)):
print("ERROR: energy threshold out of range")
parser.print_help()
sys.exit(0)
if (args.quiet and not args.table):
print("ERROR: don't know what to do")
parser.print_help()
sys.exit(1)
if (not args.profiles) or (len(args.profiles) <= 0):
print("ERROR: unsufficient amount of profiles passed")
parser.print_help()
sys.exit(1)
try:
baselineProfile = pickle.load(xopen(args.profile, mode="rb"))
except Exception:
raise Exception(f'Could not open file {args.profile}')
if 'version' not in baselineProfile or baselineProfile['version'] != profileLib.aggProfileVersion:
raise Exception(f"Incompatible profile version (required: {profileLib.aggProfileVersion})")
errorFunction = False
aggregateFunction = False
if args.aggregate is not False:
chosenAggregateFunction = aggregateFunctions[numpy.where(aggregateFunctions == args.aggregate)[0][0]]
aggregateFunction = chosenAggregateFunction[2]
if args.aggregate is not False and not chosenAggregateFunction[3] and args.error is not False:
print(f"NOTICE: error function does not have an influence on '{chosenAggregateFunction[1]}'")
args.error = False
if args.error is False and args.aggregate is False: # default value
args.error = errorFunctions[0][0]
if args.error is not False:
chosenErrorFunction = errorFunctions[numpy.where(errorFunctions == args.error)[0][0]]
errorFunction = chosenErrorFunction[2]
chart = {'name': '', 'fullTotals': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'totals': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'keys': [], 'labels': [], 'values': [], 'errors': [], 'weights': []}
baselineChart = copy.deepcopy(chart)
baselineChart['name'] = f"{baselineProfile['samples'] / baselineProfile['samplingTime']:.2f} Hz, {baselineProfile['samplingTime']:.2f} s, {baselineProfile['latencyTime'] * 1000000 / baselineProfile['samples']:.2f} us"
if (args.limit_energy > 0 or args.limit_energy_top > 0):
baselineProfile['profile'] = collections.OrderedDict(sorted(baselineProfile['profile'].items(), key=lambda x: operator.itemgetter(profileLib.AGGSAMPLE.energy)(x[1]), reverse=True))
else:
baselineProfile['profile'] = collections.OrderedDict(sorted(baselineProfile['profile'].items(), key=lambda x: operator.itemgetter(profileLib.AGGSAMPLE.time)(x[1]), reverse=True))
filterAnything = args.exclude_external or len(args.exclude_binary) > 0 or len(args.exclude_file) > 0 or len(args.exclude_function) > 0
# Filter out exclude before anything else
if filterAnything:
for key in list(baselineProfile['profile'].keys()):
if (args.exclude_external and baselineProfile['profile'][key][profileLib.AGGSAMPLE.mappedSample][profileLib.SAMPLE.binary] != baselineProfile['target']) or \
(len(args.exclude_binary) > 0 and baselineProfile['profile'][key][profileLib.AGGSAMPLE.mappedSample][profileLib.SAMPLE.binary] in args.exclude_binary) or \
(len(args.exclude_file) > 0 and baselineProfile['profile'][key][profileLib.AGGSAMPLE.mappedSample][profileLib.SAMPLE.file] in args.exclude_file) or \
(len(args.exclude_function) > 0 and baselineProfile['profile'][key][profileLib.AGGSAMPLE.mappedSample][profileLib.SAMPLE.function] in args.exclude_function):
del baselineProfile['profile'][key]
for key in baselineProfile['profile']:
baselineChart['fullTotals'][cmpTime] += baselineProfile['profile'][key][profileLib.AGGSAMPLE.time]
baselineChart['fullTotals'][cmpEnergy] += baselineProfile['profile'][key][profileLib.AGGSAMPLE.energy]
baselineChart['fullTotals'][cmpExecs] += baselineProfile['profile'][key][profileLib.AGGSAMPLE.execs]
baselineChart['fullTotals'][cmpRelSamples] = 1
baselineChart['fullTotals'][cmpPower] = (baselineChart['fullTotals'][cmpEnergy] / baselineChart['fullTotals'][cmpTime])
baselineChart['fullTotals'][cmpShare] = 1
chart = {'name': '', 'fullTotals': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'totals': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'values': [], 'errors': [], 'weights': []}
errorCharts = [copy.deepcopy(chart) for x in args.profiles]
includedBaselineTime = 0.0
includedBaselineEnergy = 0.0
includedKeys = 0
for index, errorChart in enumerate(errorCharts):
# print(f"Compare profile {index+1}/{len(args.profiles)}")
try:
profile = pickle.load(xopen(args.profiles[index], mode="rb"))
except Exception:
raise Exception(f'Could not open file {args.profiles[index]}')
if 'version' not in profile or profile['version'] != profileLib.aggProfileVersion:
raise Exception(f"Incompatible profile version (required: {profileLib.aggProfileVersion})")
if len(args.name) > index:
errorCharts[index]['name'] = args.name[index]
else:
errorCharts[index]['name'] = f"{profile['samples'] / profile['samplingTime']:.2f} Hz, {profile['samplingTime']:.2f} s"
if filterAnything:
for key in list(profile['profile'].keys()):
if (args.exclude_external and profile['profile'][key][profileLib.AGGSAMPLE.mappedSample][profileLib.SAMPLE.binary] != profile['target']) or \
(len(args.exclude_binary) > 0 and profile['profile'][key][profileLib.AGGSAMPLE.mappedSample][profileLib.SAMPLE.binary] in args.exclude_binary) or \
(len(args.exclude_file) > 0 and profile['profile'][key][profileLib.AGGSAMPLE.mappedSample][profileLib.SAMPLE.file] in args.exclude_file) or \
(len(args.exclude_function) > 0 and profile['profile'][key][profileLib.AGGSAMPLE.mappedSample][profileLib.SAMPLE.function] in args.exclude_function):
del profile['profile'][key]
for key in profile['profile']:
errorChart['fullTotals'][cmpTime] += profile['profile'][key][profileLib.AGGSAMPLE.time]
errorChart['fullTotals'][cmpEnergy] += profile['profile'][key][profileLib.AGGSAMPLE.energy]
errorChart['fullTotals'][cmpExecs] += profile['profile'][key][profileLib.AGGSAMPLE.execs]
errorChart['fullTotals'][cmpRelSamples] = 1
errorChart['fullTotals'][cmpShare] = 1
errorChart['fullTotals'][cmpPower] = (errorChart['fullTotals'][cmpEnergy] / errorChart['fullTotals'][cmpTime])
for key in baselineProfile['profile']:
if key in profile['profile']:
# Key never seen before, so add it to the baseline and all charts
if key not in baselineChart['keys']:
# Key was never compared before, check thresholds and limitations whether to include or not
if (((args.limit_time_top != 0) and (includedKeys >= args.limit_time_top)) or
((args.limit_energy_top != 0) and (includedKeys >= args.limit_energy_top)) or
((args.limit_time != 0) and ((includedBaselineTime / baselineChart['fullTotals'][cmpTime]) >= args.limit_time)) or
((args.limit_energy != 0) and ((includedBaselineEnergy / baselineChart['fullTotals'][cmpEnergy]) >= args.limit_energy)) or
((args.time_threshold != 0) and ((baselineProfile['profile'][key][profileLib.AGGSAMPLE.time] / baselineChart['fullTotals'][cmpTime]) < args.time_threshold)) or
((args.energy_threshold != 0) and ((baselineProfile['profile'][key][profileLib.AGGSAMPLE.energy] / baselineChart['fullTotals'][cmpEnergy]) < args.energy_threshold))):
continue
baselineChart['keys'].append(key)
baselineChart['labels'].append(baselineProfile['profile'][key][profileLib.AGGSAMPLE.label])
baselineChart['values'].append([
baselineProfile['profile'][key][profileLib.AGGSAMPLE.time], # time
baselineProfile['profile'][key][profileLib.AGGSAMPLE.power], # power
baselineProfile['profile'][key][profileLib.AGGSAMPLE.energy], # energy
baselineProfile['profile'][key][profileLib.AGGSAMPLE.samples] / baselineProfile['samples'], # relSamples
baselineProfile['profile'][key][profileLib.AGGSAMPLE.time] / baselineProfile['profile'][key][profileLib.AGGSAMPLE.execs], # execTimes
0 # Share (will be filled in later)
])
includedBaselineTime += baselineProfile['profile'][key][profileLib.AGGSAMPLE.time]
includedBaselineEnergy += baselineProfile['profile'][key][profileLib.AGGSAMPLE.energy]
includedKeys += 1
for chart in errorCharts:
chart['values'].append([0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
# Index of the key correlates to the errorChart (same order)
keyIndex = baselineChart['keys'].index(key)
errorChart['values'][keyIndex] = [
profile['profile'][key][profileLib.AGGSAMPLE.time],
profile['profile'][key][profileLib.AGGSAMPLE.power],
profile['profile'][key][profileLib.AGGSAMPLE.energy],
profile['profile'][key][profileLib.AGGSAMPLE.samples] / profile['samples'],
profile['profile'][key][profileLib.AGGSAMPLE.time] / profile['profile'][key][profileLib.AGGSAMPLE.execs],
0
]
# Totals are the totals of only comparable keys
errorChart['totals'][cmpTime] += profile['profile'][key][profileLib.AGGSAMPLE.time]
errorChart['totals'][cmpEnergy] += profile['profile'][key][profileLib.AGGSAMPLE.energy]
errorChart['totals'][cmpExecs] += profile['profile'][key][profileLib.AGGSAMPLE.time] / profile['profile'][key][profileLib.AGGSAMPLE.execs]
# fullTotals are the metrics of all profile keys
# These can be calculated afterwards
errorChart['totals'][cmpPower] = (errorChart['totals'][cmpEnergy] / errorChart['totals'][cmpTime]) if errorChart['totals'][cmpTime] != 0 else 0
errorChart['totals'][cmpRelSamples] = 1
errorChart['totals'][cmpShare] = 1
del profile
if len(baselineChart['keys']) == 0:
raise Exception("Nothing found to compare, limit too strict?")
# calculate baseline total
values = numpy.array(baselineChart['values'])
baselineChart['totals'] = [
numpy.sum(values[:, 0]),
0.0,
numpy.sum(values[:, 2]),
1,
numpy.sum(values[:, 4]),
1
]
baselineChart['totals'][cmpPower] = (baselineChart['totals'][cmpEnergy] / baselineChart['totals'][cmpTime]) if baselineChart['totals'][cmpTime] != 0 else 0
baselineChart['totals'][cmpRelSamples] = 1
baselineChart['totals'][cmpShare] = 1
del values
# fill in the weights, based on baseline energy
for index, _ in enumerate(baselineChart['keys']):
baselineChart['values'][index][cmpShare] = baselineChart['values'][index][subCmpOffset] / baselineChart['totals'][subCmpOffset]
for chart in errorCharts:
chart['values'][index][cmpShare] = chart['values'][index][subCmpOffset] / chart['totals'][subCmpOffset]
if args.limit_energy:
chart['weights'].append(chart['values'][index][cmpEnergy] / baselineChart['fullTotals'][cmpEnergy])
else:
chart['weights'].append(chart['values'][index][cmpTime] / baselineChart['fullTotals'][cmpTime])
# fill in the errors
if errorFunction is not False:
for index, _ in enumerate(baselineChart['keys']):
for chart in errorCharts:
chart['errors'].append(errorFunction(baselineChart['values'][index][cmpOffset], chart['values'][index][cmpOffset], baselineChart['totals'][cmpOffset], chart['totals'][cmpOffset], chart['weights'][index], baselineChart['fullTotals'][cmpOffset], chart['fullTotals'][cmpOffset]))
# names = [ key, name1, name2, name3, name4 ]
# values = [ key, error1, error2, error3, error4 ]a
#
if aggregateFunction:
header += f"{chosenAggregateFunction[1]} "
if errorFunction:
header += f"{chosenErrorFunction[1]}"
header = header.strip()
if args.header is not None:
header = args.header
if errorFunction is not False and aggregateFunction is False:
headers = numpy.array([chart['name'] for chart in errorCharts])
rows = numpy.array(baselineChart['labels']).reshape(-1, 1)
weights = numpy.empty((rows.shape[0], 0))
barLabels = numpy.empty((rows.shape[0], 0))
for chart in errorCharts:
rows = numpy.append(rows, numpy.array(chart['errors']).reshape(-1, 1), axis=1)
weights = numpy.append(weights, numpy.array(chart['weights']).reshape(-1, 1), axis=1)
barLabels = numpy.append(barLabels, numpy.array(chart['weights']).reshape(-1, 1), axis=1) # weights
# barLabels = numpy.append(barLabels, chartValues[:, 4].reshape(-1, 1), axis=1) # execTimes
try:
# Try to sort after numeric values
asort = numpy.array(rows[:, 1], dtype=float).argsort()
except Exception:
asort = rows[:, 1].argsort()
rows = rows[asort]
weights = weights[asort]
barLabels = barLabels[asort]
if aggregateFunction is not False:
baselineValues = numpy.array(baselineChart['values'])
rows = numpy.array([chart['name'] for chart in errorCharts], dtype=object).reshape(-1, 1)
barLabels = numpy.array([''] * len(rows)).reshape(1, -1)
errors = numpy.empty(0)
for chart in errorCharts:
chartValues = numpy.array(chart['values'])
errors = numpy.append(errors, aggregateFunction(
baselineValues[:, cmpOffset],
chart['errors'] if errorFunction is not False else chartValues[:, cmpOffset],
baselineChart['totals'][cmpOffset],
chart['totals'][cmpOffset],
chart['weights'],
baselineChart['fullTotals'][cmpOffset],
chart['fullTotals'][cmpOffset]
))
rows = numpy.append(rows, errors.reshape(-1, 1), axis=1)
headers = numpy.array([header], dtype=object)
if aggregateFunction is False:
if args.totals:
total = ['_total']
for i in range(1, len(rows[0])):
total = numpy.append(total, numpy.sum(numpy.array(rows[:, (i)], dtype=float)))
weights = numpy.concatenate(([[0] * (len(total) - 1)], weights), axis=0)
rows = numpy.concatenate(([total], rows), axis=0)
if args.coverage:
coverage = ['_coverage']
coverage.extend([chart['totals'][cmpOffset] / chart['fullTotals'][cmpOffset] if chart['fullTotals'][cmpOffset] != 0 else 0 for chart in errorCharts])
weights = numpy.concatenate(([[0] * (len(coverage) - 1)], weights), axis=0)
rows = numpy.concatenate(([coverage], rows), axis=0)
if args.weights:
for i in range(0, len(rows[0]) - 1):
headers = numpy.insert(headers, (i * 2), 'Weights')
rows = numpy.insert(rows, (i * 2) + 1, weights[:, i], axis=1)
else:
header = 'Profile' # baselineProfile['name']
rows = rows[::-1]
if (args.table):
if args.table.endswith("bz2"):
table = bz2.BZ2File.open(args.table, "w")
else:
table = open(args.table, "w")
table.write(header + ";" + ';'.join(headers) + "\n")
for i, x in enumerate(rows):
table.write(';'.join([f"{y:.16f}" if not isinstance(y, str) else y for y in x]) + "\n")
table.close()
if not args.quiet:
print(f"CSV saved to {args.table}")
if (not args.quiet):
headers = numpy.append([header], headers)
if (args.cut_off_symbols > 0):
rows[:, 0] = [textwrap.fill(x, args.cut_off_symbols) for x in rows[:, 0]]
elif (args.cut_off_symbols < 0):
rows[:, 0] = [f"{x[0:abs(args.cut_off_symbols)]}..." if len(x) > abs(args.cut_off_symbols) else x for x in rows[:, 0]]
print(tabulate.tabulate(rows, headers=headers, floatfmt=".16f"))
| [
"tabulate.tabulate",
"copy.deepcopy",
"bz2.BZ2File.open",
"numpy.append",
"argparse.ArgumentParser",
"statistics.mean",
"numpy.sum",
"numpy.where",
"textwrap.fill",
"numpy.concatenate",
"numpy.empty",
"numpy.insert",
"numpy.array",
"sys.exit",
"xopen.xopen",
"operator.itemgetter"
] | [((4375, 4969), 'numpy.array', 'numpy.array', (["[['relative_error', 'Relative Error', relativeError], ['error', 'Error',\n error], ['absolute_error', 'Absolute Error', absoluteError], [\n 'weighted_error', 'Weighted Error', weightedError], [\n 'absolute_weighted_error', 'Absolute Weighted Error',\n absoluteWeightedError], ['absolute_relative_error',\n 'Absolute Relative Error', absoluteRelativeError], [\n 'weighted_relative_error', 'Weighted Relative Error',\n weightedRelativeError], ['absolute_weighted_relative_error',\n 'Absolute Weighted Relative Error', absoluteWeightedRelativeError]]"], {'dtype': 'object'}), "([['relative_error', 'Relative Error', relativeError], ['error',\n 'Error', error], ['absolute_error', 'Absolute Error', absoluteError], [\n 'weighted_error', 'Weighted Error', weightedError], [\n 'absolute_weighted_error', 'Absolute Weighted Error',\n absoluteWeightedError], ['absolute_relative_error',\n 'Absolute Relative Error', absoluteRelativeError], [\n 'weighted_relative_error', 'Weighted Relative Error',\n weightedRelativeError], ['absolute_weighted_relative_error',\n 'Absolute Weighted Relative Error', absoluteWeightedRelativeError]],\n dtype=object)\n", (4386, 4969), False, 'import numpy\n'), ((4988, 5535), 'numpy.array', 'numpy.array', (["[['sum', 'Total', aggregateSum, True], ['relative', 'Relative',\n aggregateRelative, True], ['relative_baseline', 'Relative',\n aggregateRelativeBaseline, True], ['min', 'Minimum', aggregateMin, True\n ], ['max', 'Maximum', aggregateMax, True], ['mean', 'Mean',\n aggregateMean, True], ['wmean', 'Weighted Mean', aggregateWeightedMean,\n True], ['rmse', 'Root Mean Squared Error',\n aggregateRootMeanSquaredError, False], ['wrmse',\n 'Weighted Root Mean Squared Error',\n aggregateWeightedRootMeanSquaredError, False]]"], {}), "([['sum', 'Total', aggregateSum, True], ['relative', 'Relative',\n aggregateRelative, True], ['relative_baseline', 'Relative',\n aggregateRelativeBaseline, True], ['min', 'Minimum', aggregateMin, True\n ], ['max', 'Maximum', aggregateMax, True], ['mean', 'Mean',\n aggregateMean, True], ['wmean', 'Weighted Mean', aggregateWeightedMean,\n True], ['rmse', 'Root Mean Squared Error',\n aggregateRootMeanSquaredError, False], ['wrmse',\n 'Weighted Root Mean Squared Error',\n aggregateWeightedRootMeanSquaredError, False]])\n", (4999, 5535), False, 'import numpy\n'), ((5551, 5636), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Visualize profiles from intrvelf sampler."""'}), "(description='Visualize profiles from intrvelf sampler.'\n )\n", (5574, 5636), False, 'import argparse\n'), ((12459, 12479), 'copy.deepcopy', 'copy.deepcopy', (['chart'], {}), '(chart)\n', (12472, 12479), False, 'import copy\n'), ((20834, 20870), 'numpy.array', 'numpy.array', (["baselineChart['values']"], {}), "(baselineChart['values'])\n", (20845, 20870), False, 'import numpy\n'), ((3598, 3627), 'statistics.mean', 'statistics.mean', (['explodedData'], {}), '(explodedData)\n', (3613, 3627), False, 'import statistics\n'), ((9900, 9911), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (9908, 9911), False, 'import sys\n'), ((10052, 10063), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (10060, 10063), False, 'import sys\n'), ((10215, 10226), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (10223, 10226), False, 'import sys\n'), ((10372, 10383), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (10380, 10383), False, 'import sys\n'), ((10543, 10554), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (10551, 10554), False, 'import sys\n'), ((10722, 10733), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (10730, 10733), False, 'import sys\n'), ((10909, 10920), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (10917, 10920), False, 'import sys\n'), ((11028, 11039), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (11036, 11039), False, 'import sys\n'), ((11181, 11192), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (11189, 11192), False, 'import sys\n'), ((14827, 14847), 'copy.deepcopy', 'copy.deepcopy', (['chart'], {}), '(chart)\n', (14840, 14847), False, 'import copy\n'), ((20903, 20926), 'numpy.sum', 'numpy.sum', (['values[:, 0]'], {}), '(values[:, 0])\n', (20912, 20926), False, 'import numpy\n'), ((20941, 20964), 'numpy.sum', 'numpy.sum', (['values[:, 2]'], {}), '(values[:, 2])\n', (20950, 20964), False, 'import numpy\n'), ((20977, 21000), 'numpy.sum', 'numpy.sum', (['values[:, 4]'], {}), '(values[:, 4])\n', (20986, 21000), False, 'import numpy\n'), ((22713, 22766), 'numpy.array', 'numpy.array', (["[chart['name'] for chart in errorCharts]"], {}), "([chart['name'] for chart in errorCharts])\n", (22724, 22766), False, 'import numpy\n'), ((22844, 22875), 'numpy.empty', 'numpy.empty', (['(rows.shape[0], 0)'], {}), '((rows.shape[0], 0))\n', (22855, 22875), False, 'import numpy\n'), ((22892, 22923), 'numpy.empty', 'numpy.empty', (['(rows.shape[0], 0)'], {}), '((rows.shape[0], 0))\n', (22903, 22923), False, 'import numpy\n'), ((23660, 23696), 'numpy.array', 'numpy.array', (["baselineChart['values']"], {}), "(baselineChart['values'])\n", (23671, 23696), False, 'import numpy\n'), ((23865, 23879), 'numpy.empty', 'numpy.empty', (['(0)'], {}), '(0)\n', (23876, 23879), False, 'import numpy\n'), ((24449, 24484), 'numpy.array', 'numpy.array', (['[header]'], {'dtype': 'object'}), '([header], dtype=object)\n', (24460, 24484), False, 'import numpy\n'), ((25934, 25965), 'numpy.append', 'numpy.append', (['[header]', 'headers'], {}), '([header], headers)\n', (25946, 25965), False, 'import numpy\n'), ((11233, 11263), 'xopen.xopen', 'xopen', (['args.profile'], {'mode': '"""rb"""'}), "(args.profile, mode='rb')\n", (11238, 11263), False, 'from xopen import xopen\n'), ((23932, 23960), 'numpy.array', 'numpy.array', (["chart['values']"], {}), "(chart['values'])\n", (23943, 23960), False, 'import numpy\n'), ((24793, 24835), 'numpy.concatenate', 'numpy.concatenate', (['([total], rows)'], {'axis': '(0)'}), '(([total], rows), axis=0)\n', (24810, 24835), False, 'import numpy\n'), ((25148, 25193), 'numpy.concatenate', 'numpy.concatenate', (['([coverage], rows)'], {'axis': '(0)'}), '(([coverage], rows), axis=0)\n', (25165, 25193), False, 'import numpy\n'), ((25545, 25578), 'bz2.BZ2File.open', 'bz2.BZ2File.open', (['args.table', '"""w"""'], {}), "(args.table, 'w')\n", (25561, 25578), False, 'import bz2\n'), ((26257, 26314), 'tabulate.tabulate', 'tabulate.tabulate', (['rows'], {'headers': 'headers', 'floatfmt': '""".16f"""'}), "(rows, headers=headers, floatfmt='.16f')\n", (26274, 26314), False, 'import tabulate\n'), ((15099, 15137), 'xopen.xopen', 'xopen', (['args.profiles[index]'], {'mode': '"""rb"""'}), "(args.profiles[index], mode='rb')\n", (15104, 15137), False, 'from xopen import xopen\n'), ((22778, 22814), 'numpy.array', 'numpy.array', (["baselineChart['labels']"], {}), "(baselineChart['labels'])\n", (22789, 22814), False, 'import numpy\n'), ((23708, 23775), 'numpy.array', 'numpy.array', (["[chart['name'] for chart in errorCharts]"], {'dtype': 'object'}), "([chart['name'] for chart in errorCharts], dtype=object)\n", (23719, 23775), False, 'import numpy\n'), ((25282, 25321), 'numpy.insert', 'numpy.insert', (['headers', '(i * 2)', '"""Weights"""'], {}), "(headers, i * 2, 'Weights')\n", (25294, 25321), False, 'import numpy\n'), ((25343, 25395), 'numpy.insert', 'numpy.insert', (['rows', '(i * 2 + 1)', 'weights[:, i]'], {'axis': '(1)'}), '(rows, i * 2 + 1, weights[:, i], axis=1)\n', (25355, 25395), False, 'import numpy\n'), ((26023, 26061), 'textwrap.fill', 'textwrap.fill', (['x', 'args.cut_off_symbols'], {}), '(x, args.cut_off_symbols)\n', (26036, 26061), False, 'import textwrap\n'), ((11669, 11718), 'numpy.where', 'numpy.where', (['(aggregateFunctions == args.aggregate)'], {}), '(aggregateFunctions == args.aggregate)\n', (11680, 11718), False, 'import numpy\n'), ((12172, 12213), 'numpy.where', 'numpy.where', (['(errorFunctions == args.error)'], {}), '(errorFunctions == args.error)\n', (12183, 12213), False, 'import numpy\n'), ((23412, 23448), 'numpy.array', 'numpy.array', (['rows[:, 1]'], {'dtype': 'float'}), '(rows[:, 1], dtype=float)\n', (23423, 23448), False, 'import numpy\n'), ((22988, 23016), 'numpy.array', 'numpy.array', (["chart['errors']"], {}), "(chart['errors'])\n", (22999, 23016), False, 'import numpy\n'), ((23081, 23110), 'numpy.array', 'numpy.array', (["chart['weights']"], {}), "(chart['weights'])\n", (23092, 23110), False, 'import numpy\n'), ((23179, 23208), 'numpy.array', 'numpy.array', (["chart['weights']"], {}), "(chart['weights'])\n", (23190, 23208), False, 'import numpy\n'), ((24656, 24692), 'numpy.array', 'numpy.array', (['rows[:, i]'], {'dtype': 'float'}), '(rows[:, i], dtype=float)\n', (24667, 24692), False, 'import numpy\n'), ((12870, 12918), 'operator.itemgetter', 'operator.itemgetter', (['profileLib.AGGSAMPLE.energy'], {}), '(profileLib.AGGSAMPLE.energy)\n', (12889, 12918), False, 'import operator\n'), ((13061, 13107), 'operator.itemgetter', 'operator.itemgetter', (['profileLib.AGGSAMPLE.time'], {}), '(profileLib.AGGSAMPLE.time)\n', (13080, 13107), False, 'import operator\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import AbstractBaseUser
from django.db import models
# Create your models here.
class Account(AbstractBaseUser):
email = models.EmailField(unique=True)
username = models.CharField(max_length=40, unique=True)
is_admin = models.BooleanField(default=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username']
def __unicode__(self):
return self.email
| [
"django.db.models.DateTimeField",
"django.db.models.CharField",
"django.db.models.BooleanField",
"django.db.models.EmailField"
] | [((228, 258), 'django.db.models.EmailField', 'models.EmailField', ([], {'unique': '(True)'}), '(unique=True)\n', (245, 258), False, 'from django.db import models\n'), ((274, 318), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'unique': '(True)'}), '(max_length=40, unique=True)\n', (290, 318), False, 'from django.db import models\n'), ((334, 368), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (353, 368), False, 'from django.db import models\n'), ((386, 425), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (406, 425), False, 'from django.db import models\n'), ((443, 478), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (463, 478), False, 'from django.db import models\n')] |
# -*- coding: utf-8 -*-
"""
> :warning: This file is only for use by the Simmate team. Users should instead
access data via the load_remote_archive method.
This file is for pulling AFLOW data into the Simmate database.
AFLOW's supported REST API can be accessed via "AFLUX API". This is a separate
python package, which is maintained at https://github.com/rosenbrockc/aflow.
Note that this not from the official AFLOW team, but it is made such that keywords
are pulled dynamically from the AFLOW servers -- any updates in AFLOW's API should
be properly handled. Also structures are loaded as ASE Atom objects, which we then
convert to pymatgen.
"""
from tqdm import tqdm
from pymatgen.io.ase import AseAtomsAdaptor
from simmate.database.third_parties.aflow import AflowStructure
# AFLOW is not a dependency of simmate, so make sure you install it before using
# this module
try:
from aflow import K as AflowKeywords
from aflow.control import Query as AflowQuery
except:
raise ModuleNotFoundError(
"You must install aflow client with `conda install -c conda-forge aflow`"
)
def load_all_structures():
"""
Only use this function if you are part of the Simmate dev team!
Loads all structures directly for the AFLOW database into the local
Simmate database.
"""
# The way we build a query looks similar to the Django API, where we start
# with a Query object (similar to Table.objects manager) and build filters
# off of it.
data = (
AflowQuery(
# This is a list of the supported "catalogs" that AFLOW has -- which appear
# to be separately stored databases. I just use all of them by default.
catalog=[
"icsd", # 60,000 structures
"lib1", # 4,000 structures
"lib2", # 360,000 structures (binary phases)
"lib3", # 2,530,000 structures (ternary phases)
],
# The batch size the number of results to return per HTTP request.
batch_size=2000,
)
# .filter(
# # Now we want set the conditions for which structures to pull. Because we
# # want all of them, we normally comment this line out. For testing, we
# # can pull a smaller subset of the structures.
# # I use the element Dy because it gives about 1,300 structures
# AflowKeywords.species == "Dy",
# )
.select(
# Indicate what data we want to grab from each result. Note that we don't
# access the structure quite yet.
AflowKeywords.auid,
# This is the URL that leads to the rest of the data. Note it is a
# interactive REST endpoint, while the dashboard link is different.
# AflowKeywords.aurl,
# The date that the entry was added
# AflowKeywords.aflowlib_date,
# Band gap
# AflowKeywords.Egap,
# The calculated energy of the unit cell
AflowKeywords.enthalpy_cell,
# BUG: or should we use energy_cell? Aren't these the same in
# groundstate DFT?
)
)
# Let's sanitize all structures first. So iterate through each one in the list
# This also takes a while, so we use a progress bar (via tqdm)
for entry in tqdm(data):
# grab the structure -- this is loaded as an ASE atoms object
structure_ase = entry.atoms()
# convert the structure to pymatgen
structure_pmg = AseAtomsAdaptor.get_structure(structure_ase)
# now convert the entry to a database object
structure_db = AflowStructure.from_toolkit(
id=entry.auid.replace(":", "-"),
structure=structure_pmg,
energy=entry.enthalpy_cell,
)
# and save it to our database!
structure_db.save()
| [
"pymatgen.io.ase.AseAtomsAdaptor.get_structure",
"aflow.control.Query",
"tqdm.tqdm"
] | [((3358, 3368), 'tqdm.tqdm', 'tqdm', (['data'], {}), '(data)\n', (3362, 3368), False, 'from tqdm import tqdm\n'), ((3548, 3592), 'pymatgen.io.ase.AseAtomsAdaptor.get_structure', 'AseAtomsAdaptor.get_structure', (['structure_ase'], {}), '(structure_ase)\n', (3577, 3592), False, 'from pymatgen.io.ase import AseAtomsAdaptor\n'), ((1511, 1580), 'aflow.control.Query', 'AflowQuery', ([], {'catalog': "['icsd', 'lib1', 'lib2', 'lib3']", 'batch_size': '(2000)'}), "(catalog=['icsd', 'lib1', 'lib2', 'lib3'], batch_size=2000)\n", (1521, 1580), True, 'from aflow.control import Query as AflowQuery\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = "<NAME>"
__email__ = "<EMAIL>"
import datetime
import numpy as np
import pandas as pd
class DataType(object):
INT = 1
STRING = 2
DECIMAL = 3
TIMESTAMP = 4
DATE = 5
ARRAY = 6
_converted_types = {
DataType.INT: int,
DataType.DECIMAL: float,
DataType.ARRAY: list,
DataType.STRING: np.dtype('O'), #TODO: check string type
DataType.TIMESTAMP: datetime.datetime,
DataType.DATE: datetime.date,
}
| [
"numpy.dtype"
] | [((384, 397), 'numpy.dtype', 'np.dtype', (['"""O"""'], {}), "('O')\n", (392, 397), True, 'import numpy as np\n')] |
#!/usr/bin/env python
import sys
# import all necessary stuff
import osg
import osgDB
import osgGA
import osgViewer
# create a root node
node = osg.Group()
# needed for python
filepath = osgDB.getLibraryFilePathList()
for item in sys.path: filepath.append(item)
osgDB.setLibraryFilePathList(filepath)
loadedmodel = osgDB.readNodeFile("cow.osg")
# open a file
node.addChild(loadedmodel)
# create a viewer
viewer = osgViewer.Viewer()
# configure default threading
viewer.setThreadingModel(osgViewer.Viewer.SingleThreaded)
# add handlers
viewer.addEventHandler(osgViewer.StatsHandler())
viewer.addEventHandler(osgViewer.WindowSizeHandler())
viewer.addEventHandler(osgViewer.ThreadingHandler())
viewer.addEventHandler(osgViewer.HelpHandler())
# add to the scene
viewer.setSceneData(node)
# loop until done
viewer.run()
| [
"osgDB.getLibraryFilePathList",
"osgDB.setLibraryFilePathList",
"osgViewer.ThreadingHandler",
"osgViewer.Viewer",
"osgViewer.StatsHandler",
"osgViewer.HelpHandler",
"osg.Group",
"osgDB.readNodeFile",
"osgViewer.WindowSizeHandler"
] | [((147, 158), 'osg.Group', 'osg.Group', ([], {}), '()\n', (156, 158), False, 'import osg\n'), ((191, 221), 'osgDB.getLibraryFilePathList', 'osgDB.getLibraryFilePathList', ([], {}), '()\n', (219, 221), False, 'import osgDB\n'), ((266, 304), 'osgDB.setLibraryFilePathList', 'osgDB.setLibraryFilePathList', (['filepath'], {}), '(filepath)\n', (294, 304), False, 'import osgDB\n'), ((320, 349), 'osgDB.readNodeFile', 'osgDB.readNodeFile', (['"""cow.osg"""'], {}), "('cow.osg')\n", (338, 349), False, 'import osgDB\n'), ((420, 438), 'osgViewer.Viewer', 'osgViewer.Viewer', ([], {}), '()\n', (436, 438), False, 'import osgViewer\n'), ((567, 591), 'osgViewer.StatsHandler', 'osgViewer.StatsHandler', ([], {}), '()\n', (589, 591), False, 'import osgViewer\n'), ((616, 645), 'osgViewer.WindowSizeHandler', 'osgViewer.WindowSizeHandler', ([], {}), '()\n', (643, 645), False, 'import osgViewer\n'), ((670, 698), 'osgViewer.ThreadingHandler', 'osgViewer.ThreadingHandler', ([], {}), '()\n', (696, 698), False, 'import osgViewer\n'), ((723, 746), 'osgViewer.HelpHandler', 'osgViewer.HelpHandler', ([], {}), '()\n', (744, 746), False, 'import osgViewer\n')] |
import json
import csv
labels = ["E10","E11","E12","E13","E14","E15","E16","E17","E18","E19","E20","E21","E22","E23","E24","E25","E26","E27","E28","E29","E30","E31","E32","E33","E34","E35","E36","E37","E38","E39","E40","E41","E42","E43","E44","E45","E46","E47","E48","E49","E50","E51","E52","E53","E54","E55","E56","E57","E58","E59","E60","E61","E62","E63","E64","E65","E66","E67","E68","E69"]
# train.csv μμ±
def create_train():
with open('train_data.json', 'r', encoding="utf-8") as f:
json_data = json.load(f)
f = open('train.csv', 'a', newline='')
title = ['label', 'content']
wr = csv.writer(f)
wr.writerow(title)
for i in json_data:
tmp = i['talk']['content']
label = i['profile']['emotion']['type']
tmp2 = []
tmp2.append(int(labels.index(label)))
string = tmp['HS01'] + ' [SEP] ' + tmp['SS01'] + ' [SEP] ' + tmp['HS02']
tmp2.append(string)
wr = csv.writer(f)
wr.writerow(tmp2)
# dev.csv μμ±
def create_dev():
with open('dev_data.json', 'r', encoding="utf-8") as f:
json_data = json.load(f)
f = open('dev.csv', 'a', newline='')
title = ['label', 'content']
wr = csv.writer(f)
wr.writerow(title)
for i in json_data:
tmp = i['talk']['content']
label = i['profile']['emotion']['type']
tmp2 = []
tmp2.append(int(labels.index(label)))
string = tmp['HS01'] + ' [SEP] ' + tmp['SS01'] + ' [SEP] ' + tmp['HS02']
tmp2.append(string)
wr = csv.writer(f)
wr.writerow(tmp2)
# test.csv μμ±
def create_test():
# with open('test_data.json', 'r', encoding="utf-8") as f:
with open('final_test.json', 'r', encoding="utf-8") as f:
json_data = json.load(f)
f = open('final_test.csv', 'a', newline='')
title = ['content', 'talkid']
wr = csv.writer(f)
wr.writerow(title)
for i in json_data:
tmp = i['talk']['content']
id = i['talk']['id']['talk-id']
tmp2 = []
string = tmp['HS01'] + ' [SEP] ' + tmp['SS01'] + ' [SEP] ' + tmp['HS02']
tmp2.append(string)
tmp2.append(id)
wr = csv.writer(f)
wr.writerow(tmp2)
if __name__ == "__main__":
# create_train() # train.csv μμ±
# create_dev() # dev.csv μμ±
create_test() # test.csv μμ±
| [
"csv.writer",
"json.load"
] | [((612, 625), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (622, 625), False, 'import csv\n'), ((1193, 1206), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (1203, 1206), False, 'import csv\n'), ((1850, 1863), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (1860, 1863), False, 'import csv\n'), ((513, 525), 'json.load', 'json.load', (['f'], {}), '(f)\n', (522, 525), False, 'import json\n'), ((944, 957), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (954, 957), False, 'import csv\n'), ((1096, 1108), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1105, 1108), False, 'import json\n'), ((1526, 1539), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (1536, 1539), False, 'import csv\n'), ((1745, 1757), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1754, 1757), False, 'import json\n'), ((2153, 2166), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (2163, 2166), False, 'import csv\n')] |
# Generated by Django 3.2.7 on 2021-10-08 08:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('firearm', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='firearm',
name='opType',
field=models.TextField(default=''),
),
migrations.AlterField(
model_name='firearm',
name='SerialNumber',
field=models.CharField(max_length=30),
),
]
| [
"django.db.models.CharField",
"django.db.models.TextField"
] | [((324, 352), 'django.db.models.TextField', 'models.TextField', ([], {'default': '""""""'}), "(default='')\n", (340, 352), False, 'from django.db import migrations, models\n'), ((481, 512), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (497, 512), False, 'from django.db import migrations, models\n')] |
"""
This file is part of LiberaForms.
# SPDX-FileCopyrightText: 2021 LiberaForms.org
# SPDX-License-Identifier: AGPL-3.0-or-later
"""
import os, shutil
def ensure_uploads_dir_tree(app):
uploads_dir = app.config['UPLOADS_DIR']
media_dir = os.path.join(uploads_dir, app.config['MEDIA_DIR'])
attachment_dir = os.path.join(uploads_dir, app.config['ATTACHMENT_DIR'])
if not os.path.isdir(media_dir):
os.makedirs(media_dir)
shutil.copytree(os.path.join(app.config['ROOT_DIR'], 'assets', 'brand'),
os.path.join(uploads_dir, app.config['BRAND_DIR']))
if not os.path.isdir(attachment_dir):
os.makedirs(attachment_dir)
#app.logger.info("Uploads dir tree in place")
| [
"os.path.join",
"os.path.isdir",
"os.makedirs"
] | [((250, 300), 'os.path.join', 'os.path.join', (['uploads_dir', "app.config['MEDIA_DIR']"], {}), "(uploads_dir, app.config['MEDIA_DIR'])\n", (262, 300), False, 'import os, shutil\n'), ((322, 377), 'os.path.join', 'os.path.join', (['uploads_dir', "app.config['ATTACHMENT_DIR']"], {}), "(uploads_dir, app.config['ATTACHMENT_DIR'])\n", (334, 377), False, 'import os, shutil\n'), ((389, 413), 'os.path.isdir', 'os.path.isdir', (['media_dir'], {}), '(media_dir)\n', (402, 413), False, 'import os, shutil\n'), ((423, 445), 'os.makedirs', 'os.makedirs', (['media_dir'], {}), '(media_dir)\n', (434, 445), False, 'import os, shutil\n'), ((614, 643), 'os.path.isdir', 'os.path.isdir', (['attachment_dir'], {}), '(attachment_dir)\n', (627, 643), False, 'import os, shutil\n'), ((653, 680), 'os.makedirs', 'os.makedirs', (['attachment_dir'], {}), '(attachment_dir)\n', (664, 680), False, 'import os, shutil\n'), ((470, 525), 'os.path.join', 'os.path.join', (["app.config['ROOT_DIR']", '"""assets"""', '"""brand"""'], {}), "(app.config['ROOT_DIR'], 'assets', 'brand')\n", (482, 525), False, 'import os, shutil\n'), ((551, 601), 'os.path.join', 'os.path.join', (['uploads_dir', "app.config['BRAND_DIR']"], {}), "(uploads_dir, app.config['BRAND_DIR'])\n", (563, 601), False, 'import os, shutil\n')] |
import datetime
import pytz
from sqlalchemy import DateTime
from sqlalchemy.types import TypeDecorator
def tzware_datetime():
"""
Return a timezone aware datetime.
:return: Datetime
"""
return datetime.datetime.now(pytz.utc)
class AwareDateTime(TypeDecorator):
"""
A DateTime type which can only store tz-aware DateTimes.
Source:
https://gist.github.com/inklesspen/90b554c864b99340747e
"""
cache_ok = True
impl = DateTime(timezone=True)
def process_bind_param(self, value, dialect):
if isinstance(value, datetime.datetime) and value.tzinfo is None:
raise ValueError("{!r} must be TZ-aware".format(value))
return value
def __repr__(self):
return "AwareDateTime()"
| [
"datetime.datetime.now",
"sqlalchemy.DateTime"
] | [((217, 248), 'datetime.datetime.now', 'datetime.datetime.now', (['pytz.utc'], {}), '(pytz.utc)\n', (238, 248), False, 'import datetime\n'), ((471, 494), 'sqlalchemy.DateTime', 'DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (479, 494), False, 'from sqlalchemy import DateTime\n')] |
from multiprocessing import Pool
import pandas as pd
import os
from model_based_analysis_tools import *
base_dir = '../../'
in_dirs = {
'data':'new-processed-model-processed-1en01',
'synthetic-data':'new-synthetic-model-processed-1en01',
'score-matched-data':'new-synthetic-score-matched-model-processed-1en01'}
subset = '1en01'
data_dirs = {
'data':'new-processed',
'synthetic-data':'new-synthetic-processed',
'score-matched-data':'new-synthetic-score-matched-processed'}
out_dir = base_dir + 'model-processed-results/'
try:
os.makedirs(out_dir)
except:
pass
def run(model):
in_dir = in_dirs[model]
data_dir = data_dirs[model]
results = {}
for game in os.listdir(base_dir + in_dir):
if game[-4:] != '.out':
continue
if game.split('_')[-2].split('-')[1] != subset:
continue
with open(base_dir + in_dir + '/' + game) as f:
data = pickle.load(f)
results[game] = get_all_divergences(data)
with open(out_dir + model + '.out', 'w') as h:
pickle.dump(results, h)
p = Pool(len(in_dirs))
p.map(run, in_dirs)
| [
"os.listdir",
"os.makedirs"
] | [((559, 579), 'os.makedirs', 'os.makedirs', (['out_dir'], {}), '(out_dir)\n', (570, 579), False, 'import os\n'), ((718, 747), 'os.listdir', 'os.listdir', (['(base_dir + in_dir)'], {}), '(base_dir + in_dir)\n', (728, 747), False, 'import os\n')] |
import sys
import numpy as np
import scipy as sp
import pandas as pd
import networkx as nx
import gensim.parsing.preprocessing as gpp
import sklearn.metrics.pairwise as smp
from .persistent_homology import PersistentHomology
__all__ = ['Model']
class Model(PersistentHomology):
"""
Attributes
----------
graph: networkx.DiGraph
graph_parent: networkx.DiGraph
vectors: scipy.sparse.csc_matrix
vectors_parent: scipy.sparse.csc_matrix
seeds: {node string: [scipy.sparse.csc_matrix]}
thresholds: {node string: [float]}
year: int
record: pandas.DataFrame
record of evolution
year_start: int
n_seeds: int
number of seeds per node
point, insert, delete: tuple
See ``mutate()``.
rvs: lambda n->float
random values for point mutations & insertions
dct: gensim.corpora.dictionary
create: lambda n-> float
thresholds of cosine similarity with parent
for node creation
crossover: float
threshold of cosine similarity with parent
for crossing over nodes
"""
def __init__(self, graph_parent, vectors_parent, year_start, start_nodes,
n_seeds, dct, point, insert, delete, rvs,
create, crossover=None):
"""
Parameters
----------
start_nodes: lambda wiki.Model -> list(networkx.Nodes)
"""
PersistentHomology.__init__(self)
self.graph_parent = graph_parent
self.vectors_parent = vectors_parent
self.year_start = year_start
self.year = year_start
self.seeds = {}
self.thresholds = {}
self.record = pd.DataFrame()
nodes = list(graph_parent.nodes)
self.start_nodes = start_nodes(self)
self.graph = graph_parent.subgraph(self.start_nodes).copy()
self.vectors = sp.sparse.hstack([
vectors_parent[:,nodes.index(n)]
for n in self.start_nodes
])
self.n_seeds = n_seeds
self.dct = dct
self.point = point
self.insert = insert
self.delete = delete
self.rvs = rvs
self.create = create
self.crossover = crossover
def __str__(self):
return f"Model\tparent: '{self.graph_parent.name}'\n" +\
f"\tyear_start: {self.year_start}\n" +\
f"\tstart_nodes: {self.start_nodes}\n" +\
f"\tn_seeds: {self.n_seeds}\n" +\
f"\tpoint: ({self.point[0]:.4f}, {self.point[1]:.4f})\n" +\
f"\tinsert: ({self.insert[0]}, {self.insert[1]:.4f}, {type(self.insert[2])})\n" +\
f"\tdelete: ({self.delete[0]}, {self.delete[1]:.4f})"
def __repr__(self):
return self.__str__()
def evolve(self, until, record=False):
""" Evolves a graph based on vector representations
until `until (lambda wiki.Model) == True`
"""
year_start = self.year
while not until(self):
sys.stdout.write(f"\r{year_start} > {self.year} "+\
f"n={self.graph.number_of_nodes()} ")
sys.stdout.flush()
self.initialize_seeds()
self.mutate_seeds()
self.create_nodes()
if record:
self.record = pd.concat(
[self.record] + \
[
pd.DataFrame(
{
'Year': self.year,
'Parent': seed,
'Seed number': i,
'Seed vectors': seed_vec
},
index=[0]
)
for seed, seed_vecs in self.seeds.items()
for i, seed_vec in enumerate(seed_vecs)
],
ignore_index=True, sort=False
)
self.year += 1
print('')
def initialize_seeds(self):
nodes = list(self.graph.nodes)
for i, node in enumerate(nodes):
if node not in self.seeds.keys():
self.seeds[node] = []
if node not in self.thresholds.keys():
self.thresholds[node] = []
while len(self.seeds[node]) < self.n_seeds:
self.seeds[node] += [self.vectors[:,i].copy()]
while len(self.thresholds[node]) < self.n_seeds:
self.thresholds[node] += [self.create(1)[0]]
def mutate_seeds(self):
for node, vecs in self.seeds.items():
self.seeds[node] = [
Model.mutate(
vec, self.rvs, self.point, self.insert, self.delete
)
for vec in vecs
]
def crossover_seeds(self):
nodes = list(self.graph.nodes)
for i in range(len(nodes)):
seeds_i = sp.sparse.hstack(self.seeds[nodes[i]])
for j in range(i+1,len(nodes)):
seeds_j = sp.sparse.hstack(self.seeds[nodes[j]])
similarity = smp.cosine_similarity(
seeds_i.transpose(), seeds_j.transpose()
)
for k,l in np.argwhere(similarity>self.threshold):
cross = Model.crossover(seeds_i[:,k], seeds_j[:,l])
choice = np.random.choice(2)
self.seeds[nodes[i]][k] = cross if choice else self.vectors[:,i]
self.seeds[nodes[j]][l] = cross if not choice else self.vectors[:,j]
def create_nodes(self):
nodes = list(self.graph.nodes)
for i, node in enumerate(nodes):
parent = self.vectors[:,i]
seeds = sp.sparse.hstack(self.seeds[node])
sim_to_parent = smp.cosine_similarity(parent.transpose(), seeds.transpose())
for j, seed_vec in enumerate(self.seeds[node]):
if sim_to_parent[0,j] < self.thresholds[node][j]:
Model.connect(seed_vec, self.graph, self.vectors, self.dct)
self.vectors = sp.sparse.hstack([self.vectors, seed_vec])
self.seeds[node].pop(j)
self.thresholds[node].pop(j)
for node in self.graph.nodes:
if 'year' not in self.graph.nodes[node].keys():
self.graph.nodes[node]['year'] = self.year
@staticmethod
def mutate(x, rvs, point=(0,0), insert=(0,0,None), delete=(0,0)):
""" Mutates vector ``x`` with point mutations,
insertions, and deletions. Insertions and point
mutations draw from a random process ``rvs``.
Parameters
----------
x: spipy.sparse.csc_matrix
rvs: lambda (n)-> float
returns ``n`` random weights in [0,1]
point: tuple (int n, float p)
n = number of elements to insert
p = probability of insertion for each trial
insert: tuple (n, p, iterable s)
s = set of elements from which to select
if None, select from all zero elements
delete: tuple (n, p)
max_weight: float
"""
data = x.data
idx = x.indices
if idx.size==0:
return x
n_point = np.random.binomial(point[0], point[1])
i_point = np.random.choice(x.size, size=n_point, replace=False)
data[i_point] = rvs(n_point)
# insertion
n_insert = np.random.binomial(insert[0], insert[1])
for _ in range(n_insert):
while True:
insert_idx = np.random.choice(insert[2]) if insert[2]\
else np.random.choice(x.shape[0])
if insert_idx not in idx: break
idx = np.append(idx, insert_idx)
data = np.append(data, rvs(1))
# deletion
n_delete = np.random.binomial(delete[0], delete[1])
i_delete = np.random.choice(idx.size, size=n_delete, replace=False)
idx = np.delete(idx, i_delete)
data = np.delete(data, i_delete)
y = sp.sparse.csc_matrix(
(data, (idx, np.zeros(idx.shape, dtype=int))),
shape=x.shape
)
return y
@staticmethod
def connect(seed_vector, graph, vectors, dct, top_words=10, match_n=6):
"""
Parameters
----------
seed_vector: scipy.sparse.csc_matrix
graph: networkx.DiGraph (not optional)
vectors: scipy.sparse.csc_matrix (not optional)
dct: gensim.corpora.dictionary (not optional)
top_words: int (default=5)
match_n: int
how many words should be matched by...
"""
seed_top_words, seed_top_idx = Model.find_top_words(seed_vector, dct)
seed_name = ' '.join(seed_top_words)
nodes = list(graph.nodes)
graph.add_node(seed_name)
for i, node in enumerate(nodes):
node_vector = vectors[:,i]
node_top_words, node_top_idx = Model.find_top_words(node_vector, dct)
if len(set(seed_top_idx).intersection(set(node_vector.indices))) >= match_n or\
len(set(node_top_idx).intersection(set(seed_vector.indices))) >= match_n:
graph.add_edge(node, seed_name)
@staticmethod
def find_top_words(x, dct, top_n=10):
"""
Parameters
----------
x: scipy.sparse.csc_matrix
dct: gensim.corpora.dictionary
top_n: int
Returns
-------
words:
idx_vector:
"""
top_idx = np.argsort(x.data)[-top_n:]
idx = [x.indices[i] for i in top_idx]
words = [dct[i] for i in idx]
words_nostop = gpp.remove_stopwords(' '.join(words)).split(' ')
idx_keep = list(map(lambda x: words.index(x), set(words).intersection(words_nostop)))
idx_nostop = list(map(idx.__getitem__, idx_keep))
return words_nostop, idx_nostop
@staticmethod
def crossover(v1, v2):
""" Crosses two vectors by combining half of one
and half of the other.
Parameters
----------
v1, v2: scipy.sparse.matrix
Returns
-------
v3: scipy.sparse.matrix
"""
idx1 = np.random.choice(v1.size, size=int(v1.size/2))
idx2 = np.random.choice(v2.size, size=int(v2.size/2))
data = np.array(
[v1.data[i] for i in idx1] + [v2.data[i] for i in idx2]
)
idx = np.array(
[v1.indices[i] for i in idx1] + [v2.indices[i] for i in idx2]
)
v3 = sp.sparse.csc_matrix(
(data, (idx, np.zeros(idx.shape, dtype=int))), shape=v1.shape
)
return v3
| [
"numpy.random.choice",
"numpy.append",
"numpy.argwhere",
"sys.stdout.flush",
"numpy.delete",
"numpy.random.binomial",
"numpy.zeros",
"numpy.array",
"pandas.DataFrame",
"numpy.argsort",
"scipy.sparse.hstack"
] | [((1666, 1680), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (1678, 1680), True, 'import pandas as pd\n'), ((7272, 7310), 'numpy.random.binomial', 'np.random.binomial', (['point[0]', 'point[1]'], {}), '(point[0], point[1])\n', (7290, 7310), True, 'import numpy as np\n'), ((7329, 7382), 'numpy.random.choice', 'np.random.choice', (['x.size'], {'size': 'n_point', 'replace': '(False)'}), '(x.size, size=n_point, replace=False)\n', (7345, 7382), True, 'import numpy as np\n'), ((7459, 7499), 'numpy.random.binomial', 'np.random.binomial', (['insert[0]', 'insert[1]'], {}), '(insert[0], insert[1])\n', (7477, 7499), True, 'import numpy as np\n'), ((7857, 7897), 'numpy.random.binomial', 'np.random.binomial', (['delete[0]', 'delete[1]'], {}), '(delete[0], delete[1])\n', (7875, 7897), True, 'import numpy as np\n'), ((7917, 7973), 'numpy.random.choice', 'np.random.choice', (['idx.size'], {'size': 'n_delete', 'replace': '(False)'}), '(idx.size, size=n_delete, replace=False)\n', (7933, 7973), True, 'import numpy as np\n'), ((7988, 8012), 'numpy.delete', 'np.delete', (['idx', 'i_delete'], {}), '(idx, i_delete)\n', (7997, 8012), True, 'import numpy as np\n'), ((8028, 8053), 'numpy.delete', 'np.delete', (['data', 'i_delete'], {}), '(data, i_delete)\n', (8037, 8053), True, 'import numpy as np\n'), ((10353, 10418), 'numpy.array', 'np.array', (['([v1.data[i] for i in idx1] + [v2.data[i] for i in idx2])'], {}), '([v1.data[i] for i in idx1] + [v2.data[i] for i in idx2])\n', (10361, 10418), True, 'import numpy as np\n'), ((10455, 10526), 'numpy.array', 'np.array', (['([v1.indices[i] for i in idx1] + [v2.indices[i] for i in idx2])'], {}), '([v1.indices[i] for i in idx1] + [v2.indices[i] for i in idx2])\n', (10463, 10526), True, 'import numpy as np\n'), ((3118, 3136), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (3134, 3136), False, 'import sys\n'), ((4931, 4969), 'scipy.sparse.hstack', 'sp.sparse.hstack', (['self.seeds[nodes[i]]'], {}), '(self.seeds[nodes[i]])\n', (4947, 4969), True, 'import scipy as sp\n'), ((5740, 5774), 'scipy.sparse.hstack', 'sp.sparse.hstack', (['self.seeds[node]'], {}), '(self.seeds[node])\n', (5756, 5774), True, 'import scipy as sp\n'), ((7749, 7775), 'numpy.append', 'np.append', (['idx', 'insert_idx'], {}), '(idx, insert_idx)\n', (7758, 7775), True, 'import numpy as np\n'), ((9552, 9570), 'numpy.argsort', 'np.argsort', (['x.data'], {}), '(x.data)\n', (9562, 9570), True, 'import numpy as np\n'), ((5040, 5078), 'scipy.sparse.hstack', 'sp.sparse.hstack', (['self.seeds[nodes[j]]'], {}), '(self.seeds[nodes[j]])\n', (5056, 5078), True, 'import scipy as sp\n'), ((5237, 5277), 'numpy.argwhere', 'np.argwhere', (['(similarity > self.threshold)'], {}), '(similarity > self.threshold)\n', (5248, 5277), True, 'import numpy as np\n'), ((5378, 5397), 'numpy.random.choice', 'np.random.choice', (['(2)'], {}), '(2)\n', (5394, 5397), True, 'import numpy as np\n'), ((6105, 6147), 'scipy.sparse.hstack', 'sp.sparse.hstack', (['[self.vectors, seed_vec]'], {}), '([self.vectors, seed_vec])\n', (6121, 6147), True, 'import scipy as sp\n'), ((7587, 7614), 'numpy.random.choice', 'np.random.choice', (['insert[2]'], {}), '(insert[2])\n', (7603, 7614), True, 'import numpy as np\n'), ((7654, 7682), 'numpy.random.choice', 'np.random.choice', (['x.shape[0]'], {}), '(x.shape[0])\n', (7670, 7682), True, 'import numpy as np\n'), ((8113, 8143), 'numpy.zeros', 'np.zeros', (['idx.shape'], {'dtype': 'int'}), '(idx.shape, dtype=int)\n', (8121, 8143), True, 'import numpy as np\n'), ((10609, 10639), 'numpy.zeros', 'np.zeros', (['idx.shape'], {'dtype': 'int'}), '(idx.shape, dtype=int)\n', (10617, 10639), True, 'import numpy as np\n'), ((3385, 3493), 'pandas.DataFrame', 'pd.DataFrame', (["{'Year': self.year, 'Parent': seed, 'Seed number': i, 'Seed vectors': seed_vec}"], {'index': '[0]'}), "({'Year': self.year, 'Parent': seed, 'Seed number': i,\n 'Seed vectors': seed_vec}, index=[0])\n", (3397, 3493), True, 'import pandas as pd\n')] |
import numpy as np
from scipy.special import expit
class NeuralNet:
def __init__(self, input_qty, hidden_dts, output_qty):
"""
input_qty : Quantidade de entradas que a rede vai receber.
hidden_dts : Uma tuple contedo os detalhes das camadas escondidas.
Primeiro valor quantidade de camadas escondidas.
Segundo valor quantidade de 'neurΓ΄nios' em cada camada.
saida : Quantidade de saidas da rede.
"""
self.input_qty = input_qty
self.hidden_dts = hidden_dts
self.output_qty = output_qty
def fitting(self, inputs, weights):
inputs = np.asarray(inputs)
bias = -1
ini = 0
# INPUT LAYER
end = self.input_qty * self.hidden_dts[1]
newshape = (self.input_qty, self.hidden_dts[1])
new_input = self.neuron(weights[ini:end], inputs, newshape, bias)
ini = end
# HIDDENS LAYER
for _ in range(self.hidden_dts[0] - 1):
end = self.hidden_dts[1] ** self.hidden_dts[0] + ini
newshape = (self.hidden_dts[1], self.hidden_dts[1])
new_input = self.neuron(weights[ini:end], new_input, newshape, bias)
ini = end
# OUTPUT LAYER
end = self.hidden_dts[1] * self.output_qty + ini
newshape = (self.hidden_dts[1], self.output_qty)
output = self.neuron(weights[ini:end], new_input, newshape, bias, out=True)
return output
def neuron(self, weights, matrix_a, newshape, bias, out=False):
matrix_weights = np.reshape(weights, newshape)
output = np.dot(matrix_a, matrix_weights) + bias
if out:
output = expit(output)
else:
output = np.maximum(output, 0)
return output
def generate_weights(self):
input_qty = self.input_qty * self.hidden_dts[1]
hidden_qty = self.hidden_dts[1] ** self.hidden_dts[0]
output_qty = self.hidden_dts[1] * self.output_qty
total = input_qty + hidden_qty + output_qty
weights = np.random.uniform(-1, 1, total)
return weights
| [
"numpy.reshape",
"numpy.asarray",
"numpy.random.uniform",
"numpy.maximum",
"scipy.special.expit",
"numpy.dot"
] | [((661, 679), 'numpy.asarray', 'np.asarray', (['inputs'], {}), '(inputs)\n', (671, 679), True, 'import numpy as np\n'), ((1579, 1608), 'numpy.reshape', 'np.reshape', (['weights', 'newshape'], {}), '(weights, newshape)\n', (1589, 1608), True, 'import numpy as np\n'), ((2075, 2106), 'numpy.random.uniform', 'np.random.uniform', (['(-1)', '(1)', 'total'], {}), '(-1, 1, total)\n', (2092, 2106), True, 'import numpy as np\n'), ((1626, 1658), 'numpy.dot', 'np.dot', (['matrix_a', 'matrix_weights'], {}), '(matrix_a, matrix_weights)\n', (1632, 1658), True, 'import numpy as np\n'), ((1703, 1716), 'scipy.special.expit', 'expit', (['output'], {}), '(output)\n', (1708, 1716), False, 'from scipy.special import expit\n'), ((1752, 1773), 'numpy.maximum', 'np.maximum', (['output', '(0)'], {}), '(output, 0)\n', (1762, 1773), True, 'import numpy as np\n')] |
import gzip
from collections import OrderedDict
from typing import Dict, Tuple
import numpy as np
import tensorflow as tf
from tensorflow.python.lib.io import file_io
def random_normal_initializer(_, dim):
return np.random.normal(0, 0.01, dim)
def zero_initializer(_, dim):
return np.zeros(dim)
def read_vectors(path, max_vecs=1000000) -> Tuple[Dict[str, np.array], int]:
"""
Read word vectors from a specified path as float32 numpy arrays.
:param path: path to .gz file or text file
:param max_vecs: limit of vectors to read into memory
:return: tuple of the vector dictionary and the vector dimensionality
"""
vectors = OrderedDict()
dim = 0
with gzip.open(path, 'rt') if path.endswith('gz') else file_io.FileIO(path, 'r') as lines:
for line in lines:
if len(vectors) >= max_vecs:
break
fields = line.strip().split()
if len(fields) < 2:
continue
if dim == 0:
dim = len(fields) - 1
elif dim != len(fields) - 1:
tf.logging.warn('Skipping vector with unexpected number of dimensions in line %d: %s', len(vectors), line)
continue
vec = np.array([float(x) for x in fields[1:]], dtype=np.float32)
vectors[fields[0]] = vec
return vectors, dim
def write_vectors(vectors, path):
with file_io.FileIO(path, 'w') as lines:
for word, vector in vectors.items():
lines.write(word + ' ' + ' '.join([str(ele) for ele in vector]))
lines.write('\n')
def initialize_embedding_from_dict(vector_map, dim, vocabulary, zero_init=False, standardize=False):
"""
Initialize a numpy matrix from pre-exi\sting vectors with indices corresponding to a given vocabulary. Words in vocabulary
not in vectors are initialized using a given function.
:param vector_map: dictionary from words to numpy arrays
:param dim: dimensionality of vectors
:param vocabulary: dictionary from words to corresponding indices
:param zero_init: initialization function taking the word and dimensionality for words not in vector_map
:param standardize: set word embedding values to have standard deviation of 1
:return: numpy matrix with rows corresponding to vectors
"""
initializer = random_normal_initializer
if zero_init:
initializer = zero_initializer
emb = np.zeros([len(vocabulary), dim], dtype=np.float32)
for word, index in vocabulary.items():
if word not in vector_map:
vector_map[word] = initializer(word, dim)
emb[index] = vector_map[word]
if standardize:
emb = emb / np.std(emb)
return emb
| [
"tensorflow.python.lib.io.file_io.FileIO",
"numpy.std",
"numpy.random.normal",
"collections.OrderedDict",
"numpy.zeros",
"gzip.open"
] | [((220, 250), 'numpy.random.normal', 'np.random.normal', (['(0)', '(0.01)', 'dim'], {}), '(0, 0.01, dim)\n', (236, 250), True, 'import numpy as np\n'), ((294, 307), 'numpy.zeros', 'np.zeros', (['dim'], {}), '(dim)\n', (302, 307), True, 'import numpy as np\n'), ((665, 678), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (676, 678), False, 'from collections import OrderedDict\n'), ((1411, 1436), 'tensorflow.python.lib.io.file_io.FileIO', 'file_io.FileIO', (['path', '"""w"""'], {}), "(path, 'w')\n", (1425, 1436), False, 'from tensorflow.python.lib.io import file_io\n'), ((700, 721), 'gzip.open', 'gzip.open', (['path', '"""rt"""'], {}), "(path, 'rt')\n", (709, 721), False, 'import gzip\n'), ((750, 775), 'tensorflow.python.lib.io.file_io.FileIO', 'file_io.FileIO', (['path', '"""r"""'], {}), "(path, 'r')\n", (764, 775), False, 'from tensorflow.python.lib.io import file_io\n'), ((2701, 2712), 'numpy.std', 'np.std', (['emb'], {}), '(emb)\n', (2707, 2712), True, 'import numpy as np\n')] |
# MIT License
#
# Copyright (c) 2021 <NAME> and <NAME> and <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor
from typing import Tuple
from openspeech.modules.wrapper import Linear
class LocationAwareAttention(nn.Module):
r"""
Applies a location-aware attention mechanism on the output features from the decoders.
Location-aware attention proposed in "Attention-Based Models for Speech Recognition" paper.
The location-aware attention mechanism is performing well in speech recognition tasks.
We refer to implementation of ClovaCall Attention style.
Args:
dim (int): dimension of model
attn_dim (int): dimension of attention
smoothing (bool): flag indication whether to use smoothing or not.
Inputs: query, value, last_attn
- **query** (batch, q_len, hidden_dim): tensor containing the output features from the decoders.
- **value** (batch, v_len, hidden_dim): tensor containing features of the encoded input sequence.
- **last_attn** (batch_size, v_len): tensor containing previous timestep`s attention (alignment)
Returns: output, attn
- **output** (batch, output_len, dimensions): tensor containing the feature from encoders outputs
- **attn** (batch * num_heads, v_len): tensor containing the attention (alignment) from the encoders outputs.
Reference:
<NAME> et al.: Attention-Based Models for Speech Recognition.
https://arxiv.org/abs/1506.07503
"""
def __init__(self, dim: int = 1024, attn_dim: int = 1024, smoothing: bool = False) -> None:
super(LocationAwareAttention, self).__init__()
self.location_conv = nn.Conv1d(in_channels=1, out_channels=attn_dim, kernel_size=3, padding=1)
self.query_proj = Linear(dim, attn_dim, bias=False)
self.value_proj = Linear(dim, attn_dim, bias=False)
self.bias = nn.Parameter(torch.rand(attn_dim).uniform_(-0.1, 0.1))
self.fc = Linear(attn_dim, 1, bias=True)
self.smoothing = smoothing
def forward(self, query: Tensor, value: Tensor, last_alignment_energy: Tensor) -> Tuple[Tensor, Tensor]:
batch_size, hidden_dim, seq_length = query.size(0), query.size(2), value.size(1)
if last_alignment_energy is None:
last_alignment_energy = value.new_zeros(batch_size, seq_length)
last_alignment_energy = self.location_conv(last_alignment_energy.unsqueeze(dim=1))
last_alignment_energy = last_alignment_energy.transpose(1, 2)
alignmment_energy = self.fc(torch.tanh(
self.query_proj(query)
+ self.value_proj(value)
+ last_alignment_energy
+ self.bias
)).squeeze(dim=-1)
if self.smoothing:
alignmment_energy = torch.sigmoid(alignmment_energy)
alignmment_energy = torch.div(alignmment_energy, alignmment_energy.sum(dim=-1).unsqueeze(dim=-1))
else:
alignmment_energy = F.softmax(alignmment_energy, dim=-1)
context = torch.bmm(alignmment_energy.unsqueeze(dim=1), value)
return context, alignmment_energy
| [
"torch.rand",
"torch.nn.functional.softmax",
"openspeech.modules.wrapper.Linear",
"torch.nn.Conv1d",
"torch.sigmoid"
] | [((2769, 2842), 'torch.nn.Conv1d', 'nn.Conv1d', ([], {'in_channels': '(1)', 'out_channels': 'attn_dim', 'kernel_size': '(3)', 'padding': '(1)'}), '(in_channels=1, out_channels=attn_dim, kernel_size=3, padding=1)\n', (2778, 2842), True, 'import torch.nn as nn\n'), ((2869, 2902), 'openspeech.modules.wrapper.Linear', 'Linear', (['dim', 'attn_dim'], {'bias': '(False)'}), '(dim, attn_dim, bias=False)\n', (2875, 2902), False, 'from openspeech.modules.wrapper import Linear\n'), ((2929, 2962), 'openspeech.modules.wrapper.Linear', 'Linear', (['dim', 'attn_dim'], {'bias': '(False)'}), '(dim, attn_dim, bias=False)\n', (2935, 2962), False, 'from openspeech.modules.wrapper import Linear\n'), ((3056, 3086), 'openspeech.modules.wrapper.Linear', 'Linear', (['attn_dim', '(1)'], {'bias': '(True)'}), '(attn_dim, 1, bias=True)\n', (3062, 3086), False, 'from openspeech.modules.wrapper import Linear\n'), ((3886, 3918), 'torch.sigmoid', 'torch.sigmoid', (['alignmment_energy'], {}), '(alignmment_energy)\n', (3899, 3918), False, 'import torch\n'), ((4076, 4112), 'torch.nn.functional.softmax', 'F.softmax', (['alignmment_energy'], {'dim': '(-1)'}), '(alignmment_energy, dim=-1)\n', (4085, 4112), True, 'import torch.nn.functional as F\n'), ((2996, 3016), 'torch.rand', 'torch.rand', (['attn_dim'], {}), '(attn_dim)\n', (3006, 3016), False, 'import torch\n')] |
TOKEN = ""
# YOUR BOT TOKEN
guildid = []
# YOUR GUILD ID
import discord
from discord.ext import commands
# pip install -U git+https://github.com/Rapptz/discord.py.git (recommended)
from discord_slash import SlashContext, SlashCommand
from discord_slash.model import SlashCommandOptionType
from discord_slash.utils.manage_commands import create_option, create_choice
# pip install discord_py_slash_command
channels = {} # ticket channels list
bot = commands.Bot(command_prefix="!")
slash = SlashCommand(bot, sync_commands=True)
@bot.event
async def on_ready():
print("ꡬλ μλ£")
@bot.event
async def on_command_error(ctx, error):
pass
@slash.slash(
name="λ¬Έμμ±λ",
description="λ¬Έμμ±λμ μμ±ν©λλ€.",
options=[
create_option(
name="method",
description="μ¬μ©ν λμ (μμ±, μμ )",
option_type=SlashCommandOptionType.STRING,
required=True,
choices=[
create_choice(name="create", value="create"),
create_choice(name="delete", value="delete")
]
)
],
guild_ids=guildid
)
async def ticket(ctx: SlashContext, method):
global channels
if (method == "create"):
if str(ctx.author.id) in channels:
await ctx.send("λ¬Έμμ±λμ μμ νμ ν λ€μ μλν΄ μ£ΌμΈμ.")
return
if (not ctx.guild):
await ctx.send("DM μ±λμμλ μ¬μ©ν μ μμ΅λλ€.")
return
overwrites = {
ctx.guild.default_role: discord.PermissionOverwrite(read_messages=False),
ctx.author: discord.PermissionOverwrite(read_messages=True, send_messages=True, manage_messages=True)
}
ticket_channel = await ctx.author.guild.create_text_channel(f"{ctx.author}λμ λ¬Έμμ±λ", overwrites=overwrites)
channels.setdefault(str(ctx.author.id), ticket_channel) # global channels
await ticket_channel.send(f'{ctx.author.mention}, λ¬Έμμ±λμ΄ μ±κ³΅μ μΌλ‘ λ§λ€μ΄μ‘μ΅λλ€. λ¬Έμ ν λ¬Έμμ±λμ μμ νμΈμ.')
await ctx.send("λ¬Έμμ±λμ΄ μ μμ μΌλ‘ λ§λ€μ΄μ‘μ΅λλ€.")
return
else:
if str(ctx.author.id) not in channels: # global channels
await ctx.send("μ΄μ© μ€μΈ λ¬Έμμ±λμ΄ μμ΅λλ€.")
return
if (not ctx.guild):
await ctx.send("DM μ±λμμλ μ¬μ©ν μ μμ΅λλ€.")
return
ticket_channel = channels.get(str(ctx.author.id)) # global channels
channels = {k: v for k, v in channels.items() if k != str(ctx.author.id)} # delete user
await ticket_channel.delete()
dm = await ctx.author.create_dm()
await dm.send("λ¬Έμμ±λ μμ κ° μλ£λμμ΅λλ€.")
@slash.slash(
name="κ°μ μμ ",
description="λ¬Έμμ±λμ κ°μ μμ ν©λλ€.",
options=[
create_option(
name="target",
description="κ°μ μμ ν μ μ ",
option_type=SlashCommandOptionType.USER,
required=True
)
],
guild_ids=guildid
)
async def κ°μ μμ (ctx: SlashContext, target):
global channels
if (not ctx.author.guild_permissions.administrator):
await ctx.send("κ΄λ¦¬μ κΆνμ΄ νμν©λλ€.")
return
if (not ctx.guild):
dm = await ctx.author.create_dm()
await dm.send("DM μ±λμμλ μ¬μ©ν μ μμ΅λλ€.")
return
if str(target.id) not in channels: # global channels
await ctx.send(f"{target}λμ΄ μ΄μ© μ€μΈ λ¬Έμμ±λμ΄ μμ΅λλ€.")
return
ticket_channel = channels.get(str(target.id)) # global channels
channels = {k: v for k, v in channels.items() if k != str(target.id)} # delete user
await ticket_channel.delete()
dm = await ctx.author.create_dm()
await dm.send("λ¬Έμμ±λ μμ κ° μλ£λμμ΅λλ€.")
bot.run(TOKEN)
| [
"discord_slash.utils.manage_commands.create_choice",
"discord.PermissionOverwrite",
"discord.ext.commands.Bot",
"discord_slash.utils.manage_commands.create_option",
"discord_slash.SlashCommand"
] | [((460, 492), 'discord.ext.commands.Bot', 'commands.Bot', ([], {'command_prefix': '"""!"""'}), "(command_prefix='!')\n", (472, 492), False, 'from discord.ext import commands\n'), ((501, 538), 'discord_slash.SlashCommand', 'SlashCommand', (['bot'], {'sync_commands': '(True)'}), '(bot, sync_commands=True)\n', (513, 538), False, 'from discord_slash import SlashContext, SlashCommand\n'), ((1491, 1539), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'read_messages': '(False)'}), '(read_messages=False)\n', (1518, 1539), False, 'import discord\n'), ((1565, 1658), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'read_messages': '(True)', 'send_messages': '(True)', 'manage_messages': '(True)'}), '(read_messages=True, send_messages=True,\n manage_messages=True)\n', (1592, 1658), False, 'import discord\n'), ((2649, 2763), 'discord_slash.utils.manage_commands.create_option', 'create_option', ([], {'name': '"""target"""', 'description': '"""κ°μ μμ ν μ μ """', 'option_type': 'SlashCommandOptionType.USER', 'required': '(True)'}), "(name='target', description='κ°μ μμ ν μ μ ', option_type=\n SlashCommandOptionType.USER, required=True)\n", (2662, 2763), False, 'from discord_slash.utils.manage_commands import create_option, create_choice\n'), ((944, 988), 'discord_slash.utils.manage_commands.create_choice', 'create_choice', ([], {'name': '"""create"""', 'value': '"""create"""'}), "(name='create', value='create')\n", (957, 988), False, 'from discord_slash.utils.manage_commands import create_option, create_choice\n'), ((1006, 1050), 'discord_slash.utils.manage_commands.create_choice', 'create_choice', ([], {'name': '"""delete"""', 'value': '"""delete"""'}), "(name='delete', value='delete')\n", (1019, 1050), False, 'from discord_slash.utils.manage_commands import create_option, create_choice\n')] |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 7